gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.ftp; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; import java.io.File; import java.text.SimpleDateFormat; import java.util.Date; import java.util.regex.Pattern; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.Rule; import org.junit.rules.TemporaryFolder; import org.pentaho.di.core.Const; import org.pentaho.di.core.KettleClientEnvironment; import org.pentaho.di.core.Result; import org.pentaho.di.job.Job; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entry.JobEntryCopy; import org.pentaho.di.utils.TestUtils; public class JobEntryFTPTest { private Job job; private JobEntryFTP entry; private String existingDir; @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @BeforeClass public static void setUpBeforeClass() throws Exception { KettleClientEnvironment.init(); } @Before public void setUp() throws Exception { job = new Job( null, new JobMeta() ); entry = new MockedJobEntryFTP(); job.getJobMeta().addJobEntry( new JobEntryCopy( entry ) ); entry.setParentJob( job ); job.setStopped( false ); entry.setServerName( "some.server" ); entry.setUserName( "anonymous" ); entry.setFtpDirectory( "." ); entry.setWildcard( "robots.txt" ); entry.setBinaryMode( false ); entry.setSuccessCondition( "success_if_no_errors" ); existingDir = TestUtils.createTempDir(); } @After public void tearDown() throws Exception { File fl = new File( existingDir ); if ( !fl.exists() ) { return; } File[] fls = fl.listFiles(); if ( fls == null || fls.length == 0 ) { return; } fls[0].delete(); fl.delete(); } @Test public void testFixedExistingTargetDir() throws Exception { entry.setTargetDirectory( existingDir ); Result result = entry.execute( new Result(), 0 ); assertTrue( "For existing folder should be true", result.getResult() ); assertEquals( "There should be no errors", 0, result.getNrErrors() ); } @Test public void testFixedNonExistingTargetDir() throws Exception { entry.setTargetDirectory( existingDir + File.separator + "sub" ); Result result = entry.execute( new Result(), 0 ); assertFalse( "For non existing folder should be false", result.getResult() ); assertTrue( "There should be errors", 0 != result.getNrErrors() ); } @Test public void testVariableExistingTargetDir() throws Exception { entry.setTargetDirectory( "${Internal.Job.Filename.Directory}" ); entry.setVariable( "Internal.Job.Filename.Directory", existingDir ); Result result = entry.execute( new Result(), 0 ); assertTrue( "For existing folder should be true", result.getResult() ); assertEquals( "There should be no errors", 0, result.getNrErrors() ); } @Test public void testVariableNonExistingTargetDir() throws Exception { entry.setTargetDirectory( "${Internal.Job.Filename.Directory}/Worg" ); entry.setVariable( "Internal.Job.Filename.Directory", existingDir + File.separator + "sub" ); Result result = entry.execute( new Result(), 0 ); assertFalse( "For non existing folder should be false", result.getResult() ); assertTrue( "There should be errors", 0 != result.getNrErrors() ); } @Test public void testProtocolVariableExistingTargetDir() throws Exception { entry.setTargetDirectory( "${Internal.Job.Filename.Directory}" ); entry.setVariable( "Internal.Job.Filename.Directory", "file://" + existingDir ); Result result = entry.execute( new Result(), 0 ); assertTrue( "For existing folder should be true", result.getResult() ); assertEquals( "There should be no errors", 0, result.getNrErrors() ); } @Test public void testPtotocolVariableNonExistingTargetDir() throws Exception { entry.setTargetDirectory( "${Internal.Job.Filename.Directory}/Worg" ); entry.setVariable( "Internal.Job.Filename.Directory", "file://" + existingDir + File.separator + "sub" ); Result result = entry.execute( new Result(), 0 ); assertFalse( "For non existing folder should be false", result.getResult() ); assertTrue( "There should be errors", 0 != result.getNrErrors() ); } @Test public void testTargetFilenameNoDateTime() throws Exception { File destFolder = tempFolder.newFolder( "pdi5558" ); destFolder.deleteOnExit(); JobEntryFTP entry = new JobEntryFTP(); entry.setTargetDirectory( destFolder.getAbsolutePath() ); entry.setAddDateBeforeExtension( false ); assertNull( entry.returnTargetFilename( null ) ); assertEquals( destFolder.getAbsolutePath() + Const.FILE_SEPARATOR + "testFile", entry.returnTargetFilename( "testFile" ) ); assertEquals( destFolder.getAbsolutePath() + Const.FILE_SEPARATOR + "testFile.txt", entry.returnTargetFilename( "testFile.txt" ) ); } @Test public void testTargetFilenameWithDateTime() throws Exception { SimpleDateFormat yyyyMMdd = new SimpleDateFormat( "yyyyMMdd" ); SimpleDateFormat HHmmssSSS = new SimpleDateFormat( "HHmmssSSS" ); SimpleDateFormat yyyyMMddHHmmssSSS = new SimpleDateFormat( "yyyyMMdd_HHmmssSSS" ); File destFolder = tempFolder.newFolder( "pdi5558" ); destFolder.deleteOnExit(); String destFolderName = destFolder.getAbsolutePath(); JobEntryFTP entry = new JobEntryFTP(); entry.setTargetDirectory( destFolderName ); entry.setAddDateBeforeExtension( true ); //Test Date-Only entry.setDateInFilename( true ); assertNull( entry.returnTargetFilename( null ) ); assertEquals( "Test Add Date without file extension", destFolderName + Const.FILE_SEPARATOR + "testFile_" + yyyyMMdd.format( new Date() ), entry.returnTargetFilename( "testFile" ) ); assertEquals( "Test Add Date with file extension", destFolderName + Const.FILE_SEPARATOR + "testFile_" + yyyyMMdd.format( new Date() ) + ".txt", entry.returnTargetFilename( "testFile.txt" ) ); //Test Date-and-Time entry.setTimeInFilename( true ); String beforeString = destFolderName + Const.FILE_SEPARATOR + "testFile_" + yyyyMMddHHmmssSSS.format( new Date() ) + ".txt"; String actualValue = entry.returnTargetFilename( "testFile.txt" ); String afterString = destFolderName + Const.FILE_SEPARATOR + "testFile_" + yyyyMMddHHmmssSSS.format( new Date() ) + ".txt"; Pattern expectedFormat = Pattern.compile( Pattern.quote( destFolderName + Const.FILE_SEPARATOR + "testFile_" + yyyyMMdd.format( new Date() ) + "_" ) + "([\\d]{9})\\.txt" ); assertTrue( "Output file matches expected format", expectedFormat.matcher( actualValue ).matches() ); assertTrue( "The actual time is not too early for test run", actualValue.compareTo( beforeString ) >= 0 ); assertTrue( "The actual time is not too late for test run", actualValue.compareTo( afterString ) <= 0 ); //Test Time-Only entry.setDateInFilename( false ); beforeString = destFolderName + Const.FILE_SEPARATOR + "testFile_" + HHmmssSSS.format( new Date() ) + ".txt"; actualValue = entry.returnTargetFilename( "testFile.txt" ); afterString = destFolderName + Const.FILE_SEPARATOR + "testFile_" + HHmmssSSS.format( new Date() ) + ".txt"; expectedFormat = Pattern.compile( Pattern.quote( destFolderName + Const.FILE_SEPARATOR + "testFile_" ) + "([\\d]{9})\\.txt" ); assertTrue( "Output file matches expected format", expectedFormat.matcher( actualValue ).matches() ); assertTrue( "The actual time is not too early for test run", actualValue.compareTo( beforeString ) >= 0 ); assertTrue( "The actual time is not too late for test run", actualValue.compareTo( afterString ) <= 0 ); } }
package in.ankushs.browscap4j.domain; import java.io.File; import java.io.InputStream; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; import gnu.trove.map.TCharObjectMap; import gnu.trove.map.hash.TCharObjectHashMap; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import in.ankushs.browscap4j.service.ResourceBuilder; import in.ankushs.browscap4j.utils.PreConditions; import javax.annotation.PostConstruct; /** * * @author Ankush Sharma * */ public class Browscap { private static final Logger logger = LoggerFactory.getLogger(Browscap.class); private static final String UNKNOWN = "Unknown"; /* * A flag for indicating whether the browscap.csv file has been loaded into * memory. Its value is true if data has been loaded,and false otherwise */ private static boolean allLoaded; /* * Name patterns(from the browscap.csv file) as key and its capabilities in * the form of BrowserCapabilities object as value . */ private static Map<String, BrowserCapabilities> cache; private final static Trie tree = new Trie(); /** * Create a new Browscap instance. Once an instance has been created, the * allLoaded flag is set to true. Any futher initializations of the Browscap * object will not load data into memory again.Processing will be parallel * if @param enableParalle is true * * @param csvFile The browscap.csv file as a File object. * @throws IllegalArgumentException if {@code csvFile} does not exist. */ public Browscap(final File csvFile) { PreConditions.checkNull(csvFile, "csvFile cannot be null"); PreConditions.checkExpression(!csvFile.exists(), "The csvFile does not exist"); if (!allLoaded) { ResourceBuilder resourceBuilder = new ResourceBuilder(csvFile); loadData(resourceBuilder); } else { logger.debug("Data has already been loaded!"); } } /** * Create a new Browscap instance. Once an instance has been created, the * allLoaded flag is set to true. Any futher initializations of the Browscap * object will not load data into memory again.Processing will be parallel * if @param enableParalle is true * * @param csvInputStream The browscap.csv file as a InputStream object. */ public Browscap(final InputStream csvInputStream) { PreConditions.checkNull(csvInputStream, "csvInputStream cannot be null"); if (!allLoaded) { ResourceBuilder resourceBuilder = new ResourceBuilder(csvInputStream); loadData(resourceBuilder); } else { logger.debug("Data has already been loaded!"); } } private void loadData(final ResourceBuilder resourceBuilder) { logger.info("Loading data "); tree.makeTrie(resourceBuilder.getNamePatterns()); cache = resourceBuilder.getNamePatternsToBrowserCapabilitiesMap(); logger.info("Finished loading data"); allLoaded = true; } /** * The main API method . Return the capabilities of a user agent. * * @param userAgent the user agent being queried. * @return null if no capabilities found for {@code userAgent} ,and a loaded * BrowserCapabilities object otherwise. */ public BrowserCapabilities lookup(final String userAgent) throws Exception { PreConditions.checkNull(userAgent, "Cannot pass a null UserAgent String ! "); logger.debug("Attempting to find BrowserCapabilities for User Agent String {}", userAgent); BrowserCapabilities browserCapabilities = resolve(userAgent); if (browserCapabilities == null) { browserCapabilities = new BrowserCapabilities.Builder() .browser(UNKNOWN) .deviceBrandName(UNKNOWN) .deviceCodeName(UNKNOWN) .deviceName(UNKNOWN) .deviceType(UNKNOWN) .isMobile(false) .isTablet(false) .platform(UNKNOWN) .platformMaker(UNKNOWN) .platformVersion(UNKNOWN) .build(); } return browserCapabilities; } private BrowserCapabilities resolve(final String userAgent) throws Exception { final String namePattern = getPattern(userAgent); final BrowserCapabilities browserCapabilities = cache.get(namePattern); logger.debug("BrowserCapabilities {} found for user agent string {} ", browserCapabilities, userAgent); return browserCapabilities; } static abstract class AbstractNode { protected String leaf = null; protected short minLengthOfUserAgentSuffix; protected final char nodeChar; protected BitSetWithMask requiredCharacters; public AbstractNode(final char c) { this.nodeChar = c; } public abstract List<String> getLeafs(); public abstract boolean hasChildren(); public abstract void populateNextCheckNodes(char c, Collection<AbstractNode> nextToCheck); } private static final char ASTERIX = '*'; private static final char QUESTION = '?'; static class Node extends AbstractNode { private AbstractNode asterixNode = null; private TCharObjectMap<AbstractNode> children = new TCharObjectHashMap<>(); private AbstractNode questionNode = null; public Node(final char c) { super(c); } private char calcMaxChar() { char max = this.nodeChar; for (final AbstractNode child : children.valueCollection()) { max = (char) Math.max(max, ((Node) child).calcMaxChar()); } return max; } private short calcMinLengthOfUserAgentSuffix() { if (leaf != null) { this.minLengthOfUserAgentSuffix = 0; return 0; } short min = Short.MAX_VALUE; for (final AbstractNode child : children.valueCollection()) { short childValue = ((Node) child).calcMinLengthOfUserAgentSuffix(); if (this.nodeChar != ASTERIX) { childValue++; } min = (short) Math.min(childValue, min); } this.minLengthOfUserAgentSuffix = min; return min; } private BitSetWithMask calcRequiredCharacters() { if (leaf != null) { // still need to calc required chars for children for (final AbstractNode child : children.valueCollection()) { ((Node) child).calcRequiredCharacters(); } return requiredCharacters = new BitSetWithMask(); } BitSetWithMask result = null; for (final AbstractNode child : children.valueCollection()) { final BitSetWithMask childResult = (BitSetWithMask) (((Node) child).calcRequiredCharacters()).clone(); if (child.nodeChar != ASTERIX && child.nodeChar != QUESTION) { childResult.set(child.nodeChar); } if (result == null) { result = childResult; } else { result.and(childResult); } } return this.requiredCharacters = result; } public List<String> getLeafs() { final List<String> result = new ArrayList<>(2); if (StringUtils.isNotBlank(this.leaf)) { result.add(leaf); } if (this.asterixNode != null) { result.addAll(this.asterixNode.getLeafs()); } return result; } public int getMinLengthOfUserAgentSuffix() { return minLengthOfUserAgentSuffix; } @Override public boolean hasChildren() { return !children.isEmpty(); } public void insertPattern(final String pattern,final char[] cs, final int i) { if (i == cs.length) { // this is the end of pattern if (this.leaf != null) { throw new IllegalArgumentException("Duplicate pattern: '" + pattern + "'"); } this.leaf = pattern; return; } final char c = cs[i]; Node charNode = (Node) children.get(c); if (charNode == null) { charNode = new Node(c); children.put(c, charNode); } charNode.insertPattern(pattern, cs, i + 1); this.asterixNode = (Node) children.get(ASTERIX); this.questionNode = (Node) children.get(QUESTION); } private void optimize() { final Map<BitSetWithMask, BitSetWithMask> bitSets = new HashMap<>(1 << 18); optimizeImpl(bitSets); } public void optimizeImpl(final Map<BitSetWithMask, BitSetWithMask> bitSets) { for (final AbstractNode child : children.valueCollection()) { ((Node) child).optimizeImpl(bitSets); } for (final char c : children.keys()) { final Node child = (Node) children.get(c); if (child.children.size() == 1) { final SingleChildNode singleChildNode = new SingleChildNode(child); children.put(c, singleChildNode); if (c == '*') { this.asterixNode = singleChildNode; } else if (c == '?') { this.questionNode = singleChildNode; } } } if (children.size() == 1) { final TCharObjectMap<AbstractNode> singletonMap = new TCharObjectHashMap<>(1); singletonMap.put( children.keys()[0], (AbstractNode) children.values()[0]); this.children = singletonMap; } else if (children.size() == 0) { this.children = new TCharObjectHashMap<>(0); } else { this.children = new TCharObjectHashMap<>(this.children); } if (bitSets.containsKey(this.requiredCharacters)) { this.requiredCharacters = bitSets.get(this.requiredCharacters); } else { bitSets.put(this.requiredCharacters, this.requiredCharacters); } } public void populateNextCheckNodes(final char c,final Collection<AbstractNode> nextToCheck) { final AbstractNode byChar = children.get(c); if (byChar != null) { nextToCheck.add(byChar); } if (asterixNode != null) { asterixNode.populateNextCheckNodes(c, nextToCheck); } if (questionNode != null) { nextToCheck.add(questionNode); } if (nodeChar == '*') { nextToCheck.add(this); } } @Override public String toString() { return this.nodeChar + "=>[" + new String(this.children.keys()) + "]; " + this.leaf; } } static class SingleChildNode extends AbstractNode { private final AbstractNode child; public SingleChildNode(final Node src) { super(src.nodeChar); this.leaf = src.leaf; this.minLengthOfUserAgentSuffix = src.minLengthOfUserAgentSuffix; this.requiredCharacters = src.requiredCharacters; this.child = (AbstractNode) src.children.values()[0]; } public List<String> getLeafs() { final List<String> result = new ArrayList<>(2); if (StringUtils.isNotBlank(this.leaf)) { result.add(leaf); } if (this.child.nodeChar == '*') { result.addAll(this.child.getLeafs()); } return result; } @Override public boolean hasChildren() { return true; } public void populateNextCheckNodes(final char c, final Collection<AbstractNode> nextToCheck) { if (this.child.nodeChar == c || this.child.nodeChar == '?') { nextToCheck.add(this.child); } if (this.child.nodeChar == '*') { this.child.populateNextCheckNodes(c, nextToCheck); } if (nodeChar == '*') { nextToCheck.add(this); } } } static class Trie { private int maxPatternChar = 127; private final Node root = new Node((char) 0); public List<String> getMatchedPatterns(final String userAgent) { final int userAgentLength = userAgent.length(); if (userAgentLength >= Short.MAX_VALUE) { return Collections.singletonList("*"); } final short[] charCounters = new short[maxPatternChar + 1]; final BitSetWithMask charPresence = new BitSetWithMask(maxPatternChar); for (int i = 0; i < userAgentLength; i++) { char c = userAgent.charAt(i); if (c <= maxPatternChar) { charCounters[c]++; charPresence.set(c); } } final List<String> leafs = new ArrayList<>(); final List<AbstractNode> toCheck = new ArrayList<>(); toCheck.add(this.root); int currentChar = -1; final List<AbstractNode> nextToCheck = new ArrayList<>(); while (!toCheck.isEmpty()) { currentChar++; final int uaCharsLeft = userAgentLength - currentChar; if (0 == uaCharsLeft) { for (final AbstractNode node : toCheck) { leafs.addAll(node.getLeafs()); } break; } final char c = userAgent.charAt(currentChar); final int toCheckSize = toCheck.size(); for (int i = 0; i < toCheckSize; i++) { toCheck.get(i).populateNextCheckNodes(c, nextToCheck); } if (nextToCheck.isEmpty()) { break; } toCheck.clear(); if (c <= maxPatternChar) { if (--charCounters[c] == 0) { charPresence.clear(c); } } final int nextToCheckSize = nextToCheck.size(); for (int i = 0; i < nextToCheckSize; i++) { final AbstractNode node = nextToCheck.get(i); if (node.nodeChar == ASTERIX && !node.hasChildren()) { leafs.addAll(node.getLeafs()); } else if (uaCharsLeft >= node.minLengthOfUserAgentSuffix && charPresence.matchedMask(node.requiredCharacters)) { toCheck.add(node); } } nextToCheck.clear(); } return leafs; } public void makeTrie(final Collection<String> patterns) { for (final String pattern : patterns) { root.insertPattern(pattern, pattern.toCharArray(), 0); } this.root.calcMinLengthOfUserAgentSuffix(); this.maxPatternChar = this.root.calcMaxChar(); this.root.calcRequiredCharacters(); this.root.optimize(); } } public String getPattern(final String userAgent) throws Exception { final List<String> patterns = tree .getMatchedPatterns(userAgent) .stream() .sorted((p1,p2)->{ return - Integer.compare(p1.length(),p2.length()); }) .collect(Collectors.toList()); final String longest = patterns.get(0); return longest; } // // public static void main(String[] args) throws Exception{ // Browscap b = new Browscap(new File("/Users/Ankush/Downloads/browscap.csv")); // System.out.println(b.lookup("HotJava/1.1.2 FCS")); // } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1.stub; import static com.google.cloud.compute.v1.TargetHttpProxiesClient.AggregatedListPagedResponse; import static com.google.cloud.compute.v1.TargetHttpProxiesClient.ListPagedResponse; import com.google.api.client.http.HttpMethods; import com.google.api.core.BetaApi; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonOperationSnapshot; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.compute.v1.AggregatedListTargetHttpProxiesRequest; import com.google.cloud.compute.v1.DeleteTargetHttpProxyRequest; import com.google.cloud.compute.v1.GetTargetHttpProxyRequest; import com.google.cloud.compute.v1.InsertTargetHttpProxyRequest; import com.google.cloud.compute.v1.ListTargetHttpProxiesRequest; import com.google.cloud.compute.v1.Operation; import com.google.cloud.compute.v1.Operation.Status; import com.google.cloud.compute.v1.PatchTargetHttpProxyRequest; import com.google.cloud.compute.v1.SetUrlMapTargetHttpProxyRequest; import com.google.cloud.compute.v1.TargetHttpProxy; import com.google.cloud.compute.v1.TargetHttpProxyAggregatedList; import com.google.cloud.compute.v1.TargetHttpProxyList; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the TargetHttpProxies service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public class HttpJsonTargetHttpProxiesStub extends TargetHttpProxiesStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().add(Operation.getDescriptor()).build(); private static final ApiMethodDescriptor< AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList> aggregatedListMethodDescriptor = ApiMethodDescriptor .<AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList>newBuilder() .setFullMethodName("google.cloud.compute.v1.TargetHttpProxies/AggregatedList") .setHttpMethod(HttpMethods.GET) .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<AggregatedListTargetHttpProxiesRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/aggregated/targetHttpProxies", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<AggregatedListTargetHttpProxiesRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<AggregatedListTargetHttpProxiesRequest> serializer = ProtoRestSerializer.create(); if (request.hasFilter()) { serializer.putQueryParam(fields, "filter", request.getFilter()); } if (request.hasIncludeAllScopes()) { serializer.putQueryParam( fields, "includeAllScopes", request.getIncludeAllScopes()); } if (request.hasMaxResults()) { serializer.putQueryParam( fields, "maxResults", request.getMaxResults()); } if (request.hasOrderBy()) { serializer.putQueryParam(fields, "orderBy", request.getOrderBy()); } if (request.hasPageToken()) { serializer.putQueryParam(fields, "pageToken", request.getPageToken()); } if (request.hasReturnPartialSuccess()) { serializer.putQueryParam( fields, "returnPartialSuccess", request.getReturnPartialSuccess()); } return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<TargetHttpProxyAggregatedList>newBuilder() .setDefaultInstance(TargetHttpProxyAggregatedList.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<DeleteTargetHttpProxyRequest, Operation> deleteMethodDescriptor = ApiMethodDescriptor.<DeleteTargetHttpProxyRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.TargetHttpProxies/Delete") .setHttpMethod(HttpMethods.DELETE) .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteTargetHttpProxyRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/targetHttpProxies/{targetHttpProxy}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteTargetHttpProxyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); serializer.putPathParam( fields, "targetHttpProxy", request.getTargetHttpProxy()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteTargetHttpProxyRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (DeleteTargetHttpProxyRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private static final ApiMethodDescriptor<GetTargetHttpProxyRequest, TargetHttpProxy> getMethodDescriptor = ApiMethodDescriptor.<GetTargetHttpProxyRequest, TargetHttpProxy>newBuilder() .setFullMethodName("google.cloud.compute.v1.TargetHttpProxies/Get") .setHttpMethod(HttpMethods.GET) .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetTargetHttpProxyRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/targetHttpProxies/{targetHttpProxy}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetTargetHttpProxyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); serializer.putPathParam( fields, "targetHttpProxy", request.getTargetHttpProxy()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetTargetHttpProxyRequest> serializer = ProtoRestSerializer.create(); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<TargetHttpProxy>newBuilder() .setDefaultInstance(TargetHttpProxy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<InsertTargetHttpProxyRequest, Operation> insertMethodDescriptor = ApiMethodDescriptor.<InsertTargetHttpProxyRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.TargetHttpProxies/Insert") .setHttpMethod(HttpMethods.POST) .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<InsertTargetHttpProxyRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/targetHttpProxies", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<InsertTargetHttpProxyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<InsertTargetHttpProxyRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody( "targetHttpProxyResource", request.getTargetHttpProxyResource())) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (InsertTargetHttpProxyRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private static final ApiMethodDescriptor<ListTargetHttpProxiesRequest, TargetHttpProxyList> listMethodDescriptor = ApiMethodDescriptor.<ListTargetHttpProxiesRequest, TargetHttpProxyList>newBuilder() .setFullMethodName("google.cloud.compute.v1.TargetHttpProxies/List") .setHttpMethod(HttpMethods.GET) .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListTargetHttpProxiesRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/targetHttpProxies", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListTargetHttpProxiesRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListTargetHttpProxiesRequest> serializer = ProtoRestSerializer.create(); if (request.hasFilter()) { serializer.putQueryParam(fields, "filter", request.getFilter()); } if (request.hasMaxResults()) { serializer.putQueryParam( fields, "maxResults", request.getMaxResults()); } if (request.hasOrderBy()) { serializer.putQueryParam(fields, "orderBy", request.getOrderBy()); } if (request.hasPageToken()) { serializer.putQueryParam(fields, "pageToken", request.getPageToken()); } if (request.hasReturnPartialSuccess()) { serializer.putQueryParam( fields, "returnPartialSuccess", request.getReturnPartialSuccess()); } return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<TargetHttpProxyList>newBuilder() .setDefaultInstance(TargetHttpProxyList.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<PatchTargetHttpProxyRequest, Operation> patchMethodDescriptor = ApiMethodDescriptor.<PatchTargetHttpProxyRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.TargetHttpProxies/Patch") .setHttpMethod(HttpMethods.PATCH) .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<PatchTargetHttpProxyRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/targetHttpProxies/{targetHttpProxy}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<PatchTargetHttpProxyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); serializer.putPathParam( fields, "targetHttpProxy", request.getTargetHttpProxy()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<PatchTargetHttpProxyRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody( "targetHttpProxyResource", request.getTargetHttpProxyResource())) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (PatchTargetHttpProxyRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private static final ApiMethodDescriptor<SetUrlMapTargetHttpProxyRequest, Operation> setUrlMapMethodDescriptor = ApiMethodDescriptor.<SetUrlMapTargetHttpProxyRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.TargetHttpProxies/SetUrlMap") .setHttpMethod(HttpMethods.POST) .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<SetUrlMapTargetHttpProxyRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/targetHttpProxies/{targetHttpProxy}/setUrlMap", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<SetUrlMapTargetHttpProxyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); serializer.putPathParam( fields, "targetHttpProxy", request.getTargetHttpProxy()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<SetUrlMapTargetHttpProxyRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody( "urlMapReferenceResource", request.getUrlMapReferenceResource())) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (SetUrlMapTargetHttpProxyRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private final UnaryCallable<AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList> aggregatedListCallable; private final UnaryCallable<AggregatedListTargetHttpProxiesRequest, AggregatedListPagedResponse> aggregatedListPagedCallable; private final UnaryCallable<DeleteTargetHttpProxyRequest, Operation> deleteCallable; private final OperationCallable<DeleteTargetHttpProxyRequest, Operation, Operation> deleteOperationCallable; private final UnaryCallable<GetTargetHttpProxyRequest, TargetHttpProxy> getCallable; private final UnaryCallable<InsertTargetHttpProxyRequest, Operation> insertCallable; private final OperationCallable<InsertTargetHttpProxyRequest, Operation, Operation> insertOperationCallable; private final UnaryCallable<ListTargetHttpProxiesRequest, TargetHttpProxyList> listCallable; private final UnaryCallable<ListTargetHttpProxiesRequest, ListPagedResponse> listPagedCallable; private final UnaryCallable<PatchTargetHttpProxyRequest, Operation> patchCallable; private final OperationCallable<PatchTargetHttpProxyRequest, Operation, Operation> patchOperationCallable; private final UnaryCallable<SetUrlMapTargetHttpProxyRequest, Operation> setUrlMapCallable; private final OperationCallable<SetUrlMapTargetHttpProxyRequest, Operation, Operation> setUrlMapOperationCallable; private final BackgroundResource backgroundResources; private final HttpJsonGlobalOperationsStub httpJsonOperationsStub; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonTargetHttpProxiesStub create(TargetHttpProxiesStubSettings settings) throws IOException { return new HttpJsonTargetHttpProxiesStub(settings, ClientContext.create(settings)); } public static final HttpJsonTargetHttpProxiesStub create(ClientContext clientContext) throws IOException { return new HttpJsonTargetHttpProxiesStub( TargetHttpProxiesStubSettings.newBuilder().build(), clientContext); } public static final HttpJsonTargetHttpProxiesStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonTargetHttpProxiesStub( TargetHttpProxiesStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonTargetHttpProxiesStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HttpJsonTargetHttpProxiesStub( TargetHttpProxiesStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonTargetHttpProxiesCallableFactory()); } /** * Constructs an instance of HttpJsonTargetHttpProxiesStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HttpJsonTargetHttpProxiesStub( TargetHttpProxiesStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.httpJsonOperationsStub = HttpJsonGlobalOperationsStub.create(clientContext, callableFactory); HttpJsonCallSettings<AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList> aggregatedListTransportSettings = HttpJsonCallSettings .<AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList>newBuilder() .setMethodDescriptor(aggregatedListMethodDescriptor) .setTypeRegistry(typeRegistry) .build(); HttpJsonCallSettings<DeleteTargetHttpProxyRequest, Operation> deleteTransportSettings = HttpJsonCallSettings.<DeleteTargetHttpProxyRequest, Operation>newBuilder() .setMethodDescriptor(deleteMethodDescriptor) .setTypeRegistry(typeRegistry) .build(); HttpJsonCallSettings<GetTargetHttpProxyRequest, TargetHttpProxy> getTransportSettings = HttpJsonCallSettings.<GetTargetHttpProxyRequest, TargetHttpProxy>newBuilder() .setMethodDescriptor(getMethodDescriptor) .setTypeRegistry(typeRegistry) .build(); HttpJsonCallSettings<InsertTargetHttpProxyRequest, Operation> insertTransportSettings = HttpJsonCallSettings.<InsertTargetHttpProxyRequest, Operation>newBuilder() .setMethodDescriptor(insertMethodDescriptor) .setTypeRegistry(typeRegistry) .build(); HttpJsonCallSettings<ListTargetHttpProxiesRequest, TargetHttpProxyList> listTransportSettings = HttpJsonCallSettings.<ListTargetHttpProxiesRequest, TargetHttpProxyList>newBuilder() .setMethodDescriptor(listMethodDescriptor) .setTypeRegistry(typeRegistry) .build(); HttpJsonCallSettings<PatchTargetHttpProxyRequest, Operation> patchTransportSettings = HttpJsonCallSettings.<PatchTargetHttpProxyRequest, Operation>newBuilder() .setMethodDescriptor(patchMethodDescriptor) .setTypeRegistry(typeRegistry) .build(); HttpJsonCallSettings<SetUrlMapTargetHttpProxyRequest, Operation> setUrlMapTransportSettings = HttpJsonCallSettings.<SetUrlMapTargetHttpProxyRequest, Operation>newBuilder() .setMethodDescriptor(setUrlMapMethodDescriptor) .setTypeRegistry(typeRegistry) .build(); this.aggregatedListCallable = callableFactory.createUnaryCallable( aggregatedListTransportSettings, settings.aggregatedListSettings(), clientContext); this.aggregatedListPagedCallable = callableFactory.createPagedCallable( aggregatedListTransportSettings, settings.aggregatedListSettings(), clientContext); this.deleteCallable = callableFactory.createUnaryCallable( deleteTransportSettings, settings.deleteSettings(), clientContext); this.deleteOperationCallable = callableFactory.createOperationCallable( deleteTransportSettings, settings.deleteOperationSettings(), clientContext, httpJsonOperationsStub); this.getCallable = callableFactory.createUnaryCallable( getTransportSettings, settings.getSettings(), clientContext); this.insertCallable = callableFactory.createUnaryCallable( insertTransportSettings, settings.insertSettings(), clientContext); this.insertOperationCallable = callableFactory.createOperationCallable( insertTransportSettings, settings.insertOperationSettings(), clientContext, httpJsonOperationsStub); this.listCallable = callableFactory.createUnaryCallable( listTransportSettings, settings.listSettings(), clientContext); this.listPagedCallable = callableFactory.createPagedCallable( listTransportSettings, settings.listSettings(), clientContext); this.patchCallable = callableFactory.createUnaryCallable( patchTransportSettings, settings.patchSettings(), clientContext); this.patchOperationCallable = callableFactory.createOperationCallable( patchTransportSettings, settings.patchOperationSettings(), clientContext, httpJsonOperationsStub); this.setUrlMapCallable = callableFactory.createUnaryCallable( setUrlMapTransportSettings, settings.setUrlMapSettings(), clientContext); this.setUrlMapOperationCallable = callableFactory.createOperationCallable( setUrlMapTransportSettings, settings.setUrlMapOperationSettings(), clientContext, httpJsonOperationsStub); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(aggregatedListMethodDescriptor); methodDescriptors.add(deleteMethodDescriptor); methodDescriptors.add(getMethodDescriptor); methodDescriptors.add(insertMethodDescriptor); methodDescriptors.add(listMethodDescriptor); methodDescriptors.add(patchMethodDescriptor); methodDescriptors.add(setUrlMapMethodDescriptor); return methodDescriptors; } @Override public UnaryCallable<AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList> aggregatedListCallable() { return aggregatedListCallable; } @Override public UnaryCallable<AggregatedListTargetHttpProxiesRequest, AggregatedListPagedResponse> aggregatedListPagedCallable() { return aggregatedListPagedCallable; } @Override public UnaryCallable<DeleteTargetHttpProxyRequest, Operation> deleteCallable() { return deleteCallable; } @Override public OperationCallable<DeleteTargetHttpProxyRequest, Operation, Operation> deleteOperationCallable() { return deleteOperationCallable; } @Override public UnaryCallable<GetTargetHttpProxyRequest, TargetHttpProxy> getCallable() { return getCallable; } @Override public UnaryCallable<InsertTargetHttpProxyRequest, Operation> insertCallable() { return insertCallable; } @Override public OperationCallable<InsertTargetHttpProxyRequest, Operation, Operation> insertOperationCallable() { return insertOperationCallable; } @Override public UnaryCallable<ListTargetHttpProxiesRequest, TargetHttpProxyList> listCallable() { return listCallable; } @Override public UnaryCallable<ListTargetHttpProxiesRequest, ListPagedResponse> listPagedCallable() { return listPagedCallable; } @Override public UnaryCallable<PatchTargetHttpProxyRequest, Operation> patchCallable() { return patchCallable; } @Override public OperationCallable<PatchTargetHttpProxyRequest, Operation, Operation> patchOperationCallable() { return patchOperationCallable; } @Override public UnaryCallable<SetUrlMapTargetHttpProxyRequest, Operation> setUrlMapCallable() { return setUrlMapCallable; } @Override public OperationCallable<SetUrlMapTargetHttpProxyRequest, Operation, Operation> setUrlMapOperationCallable() { return setUrlMapOperationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.mixin.core.world; import com.flowpowered.math.vector.Vector2i; import com.flowpowered.math.vector.Vector3d; import com.flowpowered.math.vector.Vector3i; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.util.BlockPos; import net.minecraft.world.ChunkCoordIntPair; import net.minecraft.world.World; import net.minecraft.world.WorldServer; import net.minecraft.world.biome.BiomeGenBase; import net.minecraft.world.biome.WorldChunkManager; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.BlockType; import org.spongepowered.api.util.DiscreteTransform3; import org.spongepowered.api.util.PositionOutOfBoundsException; import org.spongepowered.api.util.annotation.NonnullByDefault; import org.spongepowered.api.world.Chunk; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.biome.BiomeType; import org.spongepowered.api.world.extent.Extent; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.Shadow; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.callback.CallbackInfo; import org.spongepowered.common.util.SpongeHooks; import org.spongepowered.common.util.VecHelper; import org.spongepowered.common.world.extent.ExtentViewDownsize; import org.spongepowered.common.world.extent.ExtentViewTransform; import org.spongepowered.common.world.storage.SpongeChunkLayout; @NonnullByDefault @Mixin(net.minecraft.world.chunk.Chunk.class) public abstract class MixinChunk implements Chunk { private static final Vector2i BIOME_SIZE = SpongeChunkLayout.CHUNK_SIZE.toVector2(true); private Vector3i chunkPos; private Vector3i blockMin; private Vector3i blockMax; private Vector2i biomeMin; private Vector2i biomeMax; private ChunkCoordIntPair chunkCoordIntPair; @Shadow private World worldObj; @Shadow public int xPosition; @Shadow public int zPosition; @Shadow private boolean isChunkLoaded; @Shadow private boolean isTerrainPopulated; @Shadow public abstract IBlockState getBlockState(BlockPos pos); @Shadow public abstract BiomeGenBase getBiome(BlockPos pos, WorldChunkManager chunkManager); @Shadow public abstract byte[] getBiomeArray(); @Shadow public abstract void setBiomeArray(byte[] biomeArray); @Shadow(prefix = "shadow$") public abstract Block shadow$getBlock(int x, int y, int z); @Inject(method = "<init>(Lnet/minecraft/world/World;II)V", at = @At("RETURN"), remap = false) public void onConstructed(World world, int x, int z, CallbackInfo ci) { this.chunkPos = new Vector3i(x, 0, z); this.blockMin = SpongeChunkLayout.instance.toWorld(this.chunkPos).get(); this.blockMax = this.blockMin.add(SpongeChunkLayout.CHUNK_SIZE).sub(1, 1, 1); this.biomeMin = this.blockMin.toVector2(true); this.biomeMax = this.blockMax.toVector2(true); this.chunkCoordIntPair = new ChunkCoordIntPair(x, z); } @Inject(method = "onChunkLoad()V", at = @At("RETURN")) public void onChunkLoadInject(CallbackInfo ci) { if (!worldObj.isRemote) { SpongeHooks.logChunkLoad(this.worldObj, this.chunkPos); } } @Inject(method = "onChunkUnload()V", at = @At("RETURN")) public void onChunkUnloadInject(CallbackInfo ci) { if (!worldObj.isRemote) { SpongeHooks.logChunkUnload(this.worldObj, this.chunkPos); } } @Override public Vector3i getPosition() { return this.chunkPos; } @Override public boolean isLoaded() { return this.isChunkLoaded; } @Override public boolean isPopulated() { return this.isTerrainPopulated; } @Override public boolean loadChunk(boolean generate) { WorldServer worldserver = (WorldServer) this.worldObj; net.minecraft.world.chunk.Chunk chunk = null; if (worldserver.theChunkProviderServer.chunkExists(this.xPosition, this.zPosition) || generate) { chunk = worldserver.theChunkProviderServer.loadChunk(this.xPosition, this.zPosition); } return chunk != null; } @Override public org.spongepowered.api.world.World getWorld() { return (org.spongepowered.api.world.World) this.worldObj; } @Override public BiomeType getBiome(int x, int z) { checkBiomeBounds(x, z); return (BiomeType) getBiome(new BlockPos(x, 0, z), this.worldObj.getWorldChunkManager()); } @Override public void setBiome(int x, int z, BiomeType biome) { checkBiomeBounds(x, z); // Taken from Chunk#getBiome byte[] biomeArray = getBiomeArray(); int i = x & 15; int j = z & 15; biomeArray[j << 4 | i] = (byte) (((BiomeGenBase) biome).biomeID & 255); setBiomeArray(biomeArray); } @Override public BlockState getBlock(int x, int y, int z) { checkBlockBounds(x, y, z); return (BlockState) getBlockState(new BlockPos(x, y, z)); } @Override public void setBlock(int x, int y, int z, BlockState block) { checkBlockBounds(x, y, z); SpongeHooks.setBlockState((net.minecraft.world.chunk.Chunk) (Object) this, x, y, z, block); } @Override public BlockType getBlockType(int x, int y, int z) { checkBlockBounds(x, y, z); return (BlockType) shadow$getBlock(x, y, z); } @Override public Vector2i getBiomeMin() { return this.biomeMin; } @Override public Vector2i getBiomeMax() { return this.biomeMax; } @Override public Vector2i getBiomeSize() { return BIOME_SIZE; } @Override public Vector3i getBlockMin() { return this.blockMin; } @Override public Vector3i getBlockMax() { return this.blockMax; } @Override public Vector3i getBlockSize() { return SpongeChunkLayout.CHUNK_SIZE; } @Override public boolean containsBiome(int x, int z) { return VecHelper.inBounds(x, z, this.biomeMin, this.biomeMax); } @Override public boolean containsBlock(int x, int y, int z) { return VecHelper.inBounds(x, y, z, this.blockMin, this.blockMax); } private void checkBiomeBounds(int x, int z) { if (!containsBiome(x, z)) { throw new PositionOutOfBoundsException(new Vector2i(x, z), this.biomeMin, this.biomeMax); } } private void checkBlockBounds(int x, int y, int z) { if (!containsBlock(x, y, z)) { throw new PositionOutOfBoundsException(new Vector3i(x, y, z), this.blockMin, this.blockMax); } } @Override public Extent getExtentView(Vector3i newMin, Vector3i newMax) { checkBlockBounds(newMin.getX(), newMin.getY(), newMin.getZ()); checkBlockBounds(newMax.getX(), newMax.getY(), newMax.getZ()); return ExtentViewDownsize.newInstance(this, newMin, newMax); } @Override public Extent getExtentView(DiscreteTransform3 transform) { return ExtentViewTransform.newInstance(this, transform); } @Override public Extent getRelativeExtentView() { return getExtentView(DiscreteTransform3.fromTranslation(getBlockMin().negate())); } }
/* * Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of JSR-310 nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package java.time.chrono.global; import static java.time.calendrical.ChronoField.EPOCH_DAY; import java.io.Serializable; import java.time.DateTimeException; import java.time.calendrical.ChronoField; import java.time.calendrical.DateTimeAccessor; import java.time.calendrical.DateTimeValueRange; import java.time.chrono.Chrono; import java.time.chrono.ChronoLocalDate; import java.time.chrono.Era; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Locale; /** * The Hijrah calendar system. * <p> * This chronology defines the rules of the Hijrah calendar system. * <p> * The implementation follows the Freeman-Grenville algorithm (*1) and has following features. * <p> * <ul> * <li>A year has 12 months.</li> * <li>Over a cycle of 30 years there are 11 leap years.</li> * <li>There are 30 days in month number 1, 3, 5, 7, 9, and 11, and 29 days in month number 2, 4, 6, 8, 10, * and 12.</li> * <li>In a leap year month 12 has 30 days.</li> * <li>In a 30 year cycle, year 2, 5, 7, 10, 13, 16, 18, 21, 24, 26, and 29 are leap years.</li> * <li>Total of 10631 days in a 30 years cycle.</li> * </ul> * <p> * <P> * The table shows the features described above. <blockquote> * <table border="1"> * <tbody> * <tr> * <th># of month</th> * <th>Name of month</th> * <th>Number of days</th> * </tr> * <tr> * <td>1</td> * <td>Muharram</td> * <td>30</td> * </tr> * <tr> * <td>2</td> * <td>Safar</td> * <td>29</td> * </tr> * <tr> * <td>3</td> * <td>Rabi'al-Awwal</td> * <td>30</td> * </tr> * <tr> * <td>4</td> * <td>Rabi'ath-Thani</td> * <td>29</td> * </tr> * <tr> * <td>5</td> * <td>Jumada l-Ula</td> * <td>30</td> * </tr> * <tr> * <td>6</td> * <td>Jumada t-Tania</td> * <td>29</td> * </tr> * <tr> * <td>7</td> * <td>Rajab</td> * <td>30</td> * </tr> * <tr> * <td>8</td> * <td>Sha`ban</td> * <td>29</td> * </tr> * <tr> * <td>9</td> * <td>Ramadan</td> * <td>30</td> * </tr> * <tr> * <td>10</td> * <td>Shawwal</td> * <td>29</td> * </tr> * <tr> * <td>11</td> * <td>Dhu 'l-Qa`da</td> * <td>30</td> * </tr> * <tr> * <td>12</td> * <td>Dhu 'l-Hijja</td> * <td>29, but 30 days in years 2, 5, 7, 10,<br> * 13, 16, 18, 21, 24, 26, and 29</td> * </tr> * </tbody> * </table> * </blockquote> * <p> * (*1) The algorithm is taken from the book, The Muslim and Christian Calendars by G.S.P. Freeman-Grenville. * <p> * * <h4>Implementation notes</h4> This class is immutable and thread-safe. */ public final class HijrahChrono extends Chrono<HijrahChrono> implements Serializable { /** * Singleton instance of the Hijrah chronology. */ public static final HijrahChrono INSTANCE = new HijrahChrono(); /** * The singleton instance for the era before the current one - Before Hijrah - which has the value 0. */ public static final Era<HijrahChrono> ERA_BEFORE_AH = HijrahEra.BEFORE_AH; /** * The singleton instance for the current era - Hijrah - which has the value 1. */ public static final Era<HijrahChrono> ERA_AH = HijrahEra.AH; /** * Serialization version. */ private static final long serialVersionUID = 3127340209035924785L; /** * Narrow names for eras. */ private static final HashMap<String, String[]> ERA_NARROW_NAMES = new HashMap<String, String[]>(); /** * Short names for eras. */ private static final HashMap<String, String[]> ERA_SHORT_NAMES = new HashMap<String, String[]>(); /** * Full names for eras. */ private static final HashMap<String, String[]> ERA_FULL_NAMES = new HashMap<String, String[]>(); /** * Fallback language for the era names. */ private static final String FALLBACK_LANGUAGE = "en"; /** * Language that has the era names. */ // private static final String TARGET_LANGUAGE = "ar"; /** * Name data. */ static { ERA_NARROW_NAMES.put(FALLBACK_LANGUAGE, new String[] { "BH", "HE" }); ERA_SHORT_NAMES.put(FALLBACK_LANGUAGE, new String[] { "B.H.", "H.E." }); ERA_FULL_NAMES.put(FALLBACK_LANGUAGE, new String[] { "Before Hijrah", "Hijrah Era" }); } /** * Restrictive constructor. */ private HijrahChrono() { } /** * Resolve singleton. * * @return the singleton instance, not null */ private Object readResolve() { return INSTANCE; } // ----------------------------------------------------------------------- /** * Gets the ID of the chronology - 'Hijrah'. * <p> * The ID uniquely identifies the {@code Chrono}. It can be used to lookup the {@code Chrono} using * {@link #of(String)}. * * @return the chronology ID - 'Hijrah' * @see #getCalendarType() */ @Override public String getId() { return "Hijrah"; } /** * Gets the calendar type of the underlying calendar system - 'islamicc'. * <p> * The calendar type is an identifier defined by the <em>Unicode Locale Data Markup Language (LDML)</em> * specification. It can be used to lookup the {@code Chrono} using {@link #of(String)}. It can also be used * as part of a locale, accessible via {@link Locale#getUnicodeLocaleType(String)} with the key 'ca'. * * @return the calendar system type - 'islamicc' * @see #getId() */ @Override public String getCalendarType() { return "islamicc"; } // ----------------------------------------------------------------------- @Override public ChronoLocalDate<HijrahChrono> date(int prolepticYear, int month, int dayOfMonth) { return HijrahDate.of(prolepticYear, month, dayOfMonth); } @Override public ChronoLocalDate<HijrahChrono> dateYearDay(int prolepticYear, int dayOfYear) { return HijrahDate.of(prolepticYear, 1, 1).plusDays(dayOfYear - 1); // TODO better } @Override public ChronoLocalDate<HijrahChrono> date(DateTimeAccessor dateTime) { if (dateTime instanceof HijrahDate) { return (HijrahDate) dateTime; } return HijrahDate.ofEpochDay(dateTime.getLong(EPOCH_DAY)); } // ----------------------------------------------------------------------- @Override public boolean isLeapYear(long prolepticYear) { return HijrahDate.isLeapYear(prolepticYear); } @Override public int prolepticYear(Era<HijrahChrono> era, int yearOfEra) { if (era instanceof HijrahEra == false) { throw new DateTimeException("Era must be HijrahEra"); } return (era == HijrahEra.AH ? yearOfEra : 1 - yearOfEra); } @Override public Era<HijrahChrono> eraOf(int eraValue) { switch (eraValue) { case 0: return HijrahEra.BEFORE_AH; case 1: return HijrahEra.AH; default : throw new DateTimeException("invalid Hijrah era"); } } @Override public List<Era<HijrahChrono>> eras() { return Arrays.<Era<HijrahChrono>> asList(HijrahEra.values()); } // ----------------------------------------------------------------------- @Override public DateTimeValueRange range(ChronoField field) { return field.range(); } }
/** * Copyright 2004-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.test.launch; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import javax.servlet.Servlet; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.builder.ToStringBuilder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.util.resource.ResourceCollection; import org.eclipse.jetty.webapp.WebAppContext; public class JettyLauncher { private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(JettyLauncher.class); /** * The name of an attribute we set in the ServletContext to indicate to the webapp * that it is running within unit tests, in case it needs to alter its configuration * or behavior. */ public static final String JETTYSERVER_TESTMODE_ATTRIB = "JETTYSERVER_TESTMODE"; private int port; private String contextName; private List<String> relativeWebappRoots = new ArrayList<String>(); private Class<? extends Servlet> servletClass; private Server server; private ServletContextHandler context; private boolean failOnContextFailure; /** * Whether we are in test mode */ private boolean testMode = false; public JettyLauncher() { this(8080); } public JettyLauncher(int port) { this(port, null, null, null); } public JettyLauncher(int port, String contextName) { this(port, contextName, null, null); } public JettyLauncher(int port, String contextName, String relativeWebappRoot) { this(port, contextName, relativeWebappRoot, null); } public JettyLauncher(int port, String contextName, Class<? extends Servlet> servletClass) { this(port, contextName, null, servletClass); } public JettyLauncher(int port, String contextName, String relativeWebappRoots, Class<? extends Servlet> servletClass) { this.port = port; this.contextName = contextName; StringTokenizer tokenizer = new StringTokenizer(relativeWebappRoots, ","); while (tokenizer.hasMoreTokens()) { String relativeWebappRoot = tokenizer.nextToken(); this.relativeWebappRoots.add(relativeWebappRoot); } this.servletClass = servletClass; } public void setTestMode(boolean t) { this.testMode = t; } public boolean isTestMode() { return testMode; } public Server getServer() { return server; } public ServletContextHandler getContext() { return context; } public void start() throws Exception { server = createServer(); server.start(); if (isFailOnContextFailure() && contextStartupFailed()) { try { server.stop(); } catch (Exception e) { LOG.warn("Failed to stop server after web application startup failure."); } throw new Exception("Failed to startup web application context! Check logs for specific error."); } } public void stop() throws Exception { server.stop(); } public boolean isStarted() { return server.isStarted(); } protected Server createServer() { Server server = new Server(getPort()); setBaseDirSystemProperty(); if (useWebAppContext()) { File tmpDir = new File(System.getProperty("basedir") + "/target/jetty-tmp"); tmpDir.mkdirs(); WebAppContext webAppContext = new WebAppContext(); webAppContext.setContextPath(getContextName()); String[] fullRelativeWebappRoots = new String[this.relativeWebappRoots.size()]; for (int i = 0; i < this.relativeWebappRoots.size(); i++) { String fullRelativeWebappRoot = this.relativeWebappRoots.get(i); fullRelativeWebappRoots[i] = System.getProperty("basedir") + fullRelativeWebappRoot; if (LOG.isInfoEnabled()) { LOG.info("WebAppRoot = " + fullRelativeWebappRoots[i]); } } webAppContext.setBaseResource(new ResourceCollection(fullRelativeWebappRoots)); webAppContext.setTempDirectory(tmpDir); webAppContext.setAttribute(JETTYSERVER_TESTMODE_ATTRIB, String.valueOf(isTestMode())); context = webAppContext; server.setHandler(context); } else { ServletContextHandler root = new ServletContextHandler(server,"/",ServletContextHandler.SESSIONS); root.addServlet(new ServletHolder(servletClass), getContextName()); root.setAttribute(JETTYSERVER_TESTMODE_ATTRIB, String.valueOf(isTestMode())); context = root; } return server; } protected void setBaseDirSystemProperty() { if (System.getProperty("basedir") == null) { System.setProperty("basedir", System.getProperty("user.dir")); } } private boolean useWebAppContext() { return CollectionUtils.isNotEmpty(this.relativeWebappRoots); } protected boolean contextStartupFailed() throws Exception { return !context.isAvailable(); } public String getContextName() { if (contextName == null) { return "/SampleRiceClient"; } return contextName; } public void setContextName(String contextName) { this.contextName = contextName; } public int getPort() { return port; } public void setPort(int port) { this.port = port; } public boolean isFailOnContextFailure() { return this.failOnContextFailure; } public void setFailOnContextFailure(boolean failOnContextFailure) { this.failOnContextFailure = failOnContextFailure; } public String toString() { return new ToStringBuilder(this).append("port", port) .append("contextName", contextName) .append("relativeWebappRoots", relativeWebappRoots) .append("servletClass", servletClass) .toString(); } public static void main(String[] args) { int port = args.length > 0 ? Integer.parseInt(args[0]) : 8080; String contextName = args.length > 1 ? args[1] : null; String relativeWebappRoot = args.length > 2 ? args[2] : null; try { new JettyLauncher(port, contextName, relativeWebappRoot).start(); } catch (Exception e) { e.printStackTrace(); } } }
/* * MIT License * * Copyright (c) 2016 * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ package com.codingrodent.emulator.emulator.display; import javax.swing.*; /* * convert video memory writes into bit displays */ public class PrimaryDisplay extends JFrame { private static final long serialVersionUID = 3832623997442863920L; // private final JMenuBar menuBar; private final JMenuItem loadFileRAM; private final JMenuItem saveFile; private final JCheckBoxMenuItem MHz2; private final JCheckBoxMenuItem MHz4; private final JCheckBoxMenuItem MHz6; private final JCheckBoxMenuItem MHzMax; private final JMenuItem runUntil; private final JMenuItem singleStep; private final JMenuItem halt; private final JMenuItem reset; private final JMenuItem restart; private final GUIListener guiListener; private JMenu menu; /* * put up windows to hold the video display and register display */ public PrimaryDisplay() { super("Primary Display"); /* the nascom 48*16 video display */ //setBounds(0, 0, 800, 640); -- bounds are now calculated based on contents // addWindowListener(new WindowHandler()); // screenFrame.addKeyListener(new KeyboardHandler(keyboard)); // // add in all the menu items etc guiListener = new GUIListener(this); // context.setGUIListener(guiListener); menuBar = new JMenuBar(); JPopupMenu.setDefaultLightWeightPopupEnabled(false); setJMenuBar(menuBar); // set the file items menu = new JMenu("File"); menuBar.add(menu); loadFileRAM = new JMenuItem("Load File (RAM)"); loadFileRAM.addActionListener(guiListener); menu.add(loadFileRAM); saveFile = new JMenuItem("Save Memory Image"); saveFile.addActionListener(guiListener); menu.add(saveFile); menu.addSeparator(); JMenuItem exit = new JMenuItem("Exit"); exit.addActionListener(guiListener); menu.add(exit); // execute options menu = new JMenu("Execute"); menuBar.add(menu); runUntil = new JMenuItem("Run Until"); runUntil.setEnabled(false); runUntil.addActionListener(guiListener); menu.add(runUntil); singleStep = new JMenuItem("Single Step"); singleStep.addActionListener(guiListener); singleStep.setEnabled(false); menu.add(singleStep); halt = new JMenuItem("Stop"); halt.addActionListener(guiListener); halt.setEnabled(true); menu.add(halt); reset = new JMenuItem("Reset"); reset.addActionListener(guiListener); menu.add(reset); restart = new JMenuItem("Restart"); restart.addActionListener(guiListener); restart.setEnabled(false); menu.add(restart); JMenuItem nmi = new JMenuItem("NMI"); nmi.addActionListener(guiListener); menu.add(nmi); // performance options menu = new JMenu("Performance"); menuBar.add(menu); MHz2 = new JCheckBoxMenuItem("2 MHz"); MHz2.addActionListener(guiListener); menu.add(MHz2); MHz4 = new JCheckBoxMenuItem("4 MHz"); MHz4.addActionListener(guiListener); menu.add(MHz4); MHz6 = new JCheckBoxMenuItem("6 MHz"); MHz6.addActionListener(guiListener); menu.add(MHz6); MHz4.setState(true); MHzMax = new JCheckBoxMenuItem("Maximum"); MHzMax.addActionListener(guiListener); menu.add(MHzMax); } /** * Add the final menu items */ public void displayMenu() { // set the help items menu = new JMenu("Help"); menuBar.add(menu); JMenuItem information = new JMenuItem("Information"); information.addActionListener(guiListener); menu.add(information); JMenuItem about = new JMenuItem("About"); about.addActionListener(guiListener); menu.add(about); // Set up the panel, enable this close and enable event handling // and show the window setVisible(true); //setResizable(false); } void setLoadFileRAM(boolean state) { loadFileRAM.setEnabled(state); } void setSaveFile(boolean state) { saveFile.setEnabled(state); } // void set2MHz() { MHz2.setEnabled(true); } void enable2MHz() { MHz2.setState(true); MHz4.setState(false); MHz6.setState(false); MHzMax.setState(false); } void set4MHz() { MHz4.setEnabled(true); } void enable4MHz() { MHz2.setState(false); MHz4.setState(true); MHz6.setState(false); MHzMax.setState(false); } void setMHzMax() { MHzMax.setEnabled(true); } void enableMHzMax() { MHz2.setState(false); MHz4.setState(false); MHz6.setState(false); MHzMax.setState(true); } void set6MHz() { MHz6.setEnabled(true); } void enable6MHz() { MHz2.setState(false); MHz4.setState(false); MHz6.setState(true); MHzMax.setState(false); } // void setRunUntil(boolean state) { runUntil.setEnabled(state); } void setSingleStep(boolean state) { singleStep.setEnabled(state); } void setHalt(boolean state) { halt.setEnabled(state); } void setReset(boolean state) { reset.setEnabled(state); } void setRestart(boolean state) { restart.setEnabled(state); } }
/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is part of dcm4che, an implementation of DICOM(TM) in * Java(TM), hosted at http://sourceforge.net/projects/dcm4che. * * The Initial Developer of the Original Code is * TIANI Medgraph AG. * Portions created by the Initial Developer are Copyright (C) 2002-2005 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Gunter Zeilinger <gunter.zeilinger@tiani.com> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ package de.iftm.dcm4che.services; import java.io.File; import java.io.InputStream; import java.io.IOException; import java.net.URL; import java.net.URI; import java.net.URISyntaxException; import java.net.MalformedURLException; import java.util.LinkedList; import java.util.List; import java.util.Properties; import java.util.StringTokenizer; import org.dcm4che.data.*; import org.dcm4che.dict.*; import org.apache.log4j.*; /** * Extension of the Properties class. * <p>Based on dcm4che 1.4.0 sample: Configuration.java revision date 2005-10-05 * <p>Method uriToFile added. * <p>No modifications. * * @author Thomas Hacklaender * @version 2006-08-24 */ public class ConfigProperties extends Properties { static final Logger log = Logger.getLogger("ConfigProperties"); // Static -------------------------------------------------------- private static String replace(String val, String from, String to) { return from.equals(val) ? to : val; } // Constructors -------------------------------------------------- /** * Create an ConfigurationProperty object (extension of Propreties) without * any Properties included. */ public ConfigProperties() { } /** * Create an ConfigurationProperty object (extension of Propreties) and load * Properties from given URL. * * @param url the URL containing the Properties. */ public ConfigProperties(URL url) throws IOException { InputStream in = null; try { load(in = url.openStream()); } catch (Exception e) { throw new IOException("Could not load configuration from " + url); } finally { if (in != null) { try { in.close(); } catch (IOException ignore) {} } } } // Public -------------------------------------------------------- public String getProperty(String key, String defaultValue, String replace, String to) { return replace(getProperty(key, defaultValue), replace, to); } public List tokenize(String s, List result) { StringTokenizer stk = new StringTokenizer(s, ", "); while (stk.hasMoreTokens()) { String tk = stk.nextToken(); if (tk.startsWith("$")) { tokenize(getProperty(tk.substring(1),""), result); } else { result.add(tk); } } return result; } public String[] tokenize(String s) { if (s == null) return null; List l = tokenize(s, new LinkedList()); return (String[])l.toArray(new String[l.size()]); } /** * Create a File from an URI. * <span style="font-style: italic;">file-uri</span> * <p>See the API-Doc of the URI class. For Windows-OS the absolute URI * "file:/c:/user/tom/foo.txt" describes the file * "C:\\user\\tom\\foo.txt". Relative URI's, e.g. without the "file:" * schema-prefix, are relativ to the user-directory, given by the system * property user.dir. * <p>For example: If the user.dir is "C:\\user\\tom\\" * and the relative URI is "/abc/foo.txt" the referenced file is * "C:\\user\\tom\\abc\\foo.txt". The abbreviations "." for the current * and ".." for the upper directory are valid to form a relative URI. * * @param uriString The string-description of an absolute or relative URI. * @return the file which is described by the uriString. Returns null, if * uriString is null or "". Returns null also, if a conversion error occures. */ static public File uriToFile(String uriString) { URI baseURI; URI uri; if (uriString == null) { return null; } if (uriString.equals("")) { return null; } try { uri = new URI(uriString); // Remove redundend elements: // Auakommentiert, weil eine URI der Form "./a.b" (nicht "./a/b.c") zu // einer ArrayIndexOutOfBoundsException fuehrt. Grund unklar. Interner Fehler? // uri = uri.normalize(); // Example of an absolute URI: file://de.iftm/abc/def/g.txt // Relative URI do not have a "schema". Example: ./abc/def/g.txt if (!uri.isAbsolute()) { // Relative URI's werden auf das user.dir bezogen. baseURI = (new File(System.getProperty("user.dir"))).toURI(); uri = baseURI.resolve(uri); } return new File(uri); } catch (Exception e) { return null; } } /** * Returns a URL of a reference to a file. If the file reference is a valid * absolute URI, this URI is converted directly to a URL. If the file reference * is a relative URI this is resolved relative to a given base URL. * <p>Example: For a class de.iftm.dcm4che.servicesCDimseService the method call * fileRefToURL(CDimseService.class.getResource(""), "resources/certificates/test_sys_1.p12") * results to the URL "file:/D:/DcmServices/build/classes/de/iftm/dcm4che/services/resources/certificates/test_sys_1.p12" * * @param baseURL the base URL to which relative file references are resolved. * May be null, if the fileRef is a absolute reference. * @param fileRef the reference to file file. May be an absolute reference * (file:/C:/a/b/c.cfg) or relative reference (b/c.cfg). * @return the URL of a file reference. The String representation is of the form "file:/a/b/c.cfg". * @throws URISyntaxException if the fileRef is not formed as a URI. * @throws MalformedURLException if the fileRef is not a reference to a file or * baseURL is null for relative file references. */ static public URL fileRefToURL(URL baseURL, String fileRef) throws URISyntaxException, MalformedURLException { URL resultURL = null; URI fileRefURI; URI baseURI; // log.info("baseURL: " + baseURL.toString()); // log.info("fileRef: " + fileRef); fileRefURI = new URI(fileRef); if (fileRefURI.isAbsolute()) { // Absolute URI (file:/C:/a/b/c.cfg): resultURL = fileRefURI.toURL(); } else { // Relativ URI (b/c.cfg): resultURL = new URL(baseURL, fileRef); } // log.info("result URL: " + resultURL.toString()); return resultURL; } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.vision.v1p3beta1.stub; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.vision.v1p3beta1.AsyncBatchAnnotateFilesRequest; import com.google.cloud.vision.v1p3beta1.AsyncBatchAnnotateFilesResponse; import com.google.cloud.vision.v1p3beta1.BatchAnnotateImagesRequest; import com.google.cloud.vision.v1p3beta1.BatchAnnotateImagesResponse; import com.google.cloud.vision.v1p3beta1.OperationMetadata; import com.google.longrunning.Operation; import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the ImageAnnotator service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @BetaApi @Generated("by gapic-generator-java") public class GrpcImageAnnotatorStub extends ImageAnnotatorStub { private static final MethodDescriptor<BatchAnnotateImagesRequest, BatchAnnotateImagesResponse> batchAnnotateImagesMethodDescriptor = MethodDescriptor.<BatchAnnotateImagesRequest, BatchAnnotateImagesResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.vision.v1p3beta1.ImageAnnotator/BatchAnnotateImages") .setRequestMarshaller( ProtoUtils.marshaller(BatchAnnotateImagesRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(BatchAnnotateImagesResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<AsyncBatchAnnotateFilesRequest, Operation> asyncBatchAnnotateFilesMethodDescriptor = MethodDescriptor.<AsyncBatchAnnotateFilesRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.vision.v1p3beta1.ImageAnnotator/AsyncBatchAnnotateFiles") .setRequestMarshaller( ProtoUtils.marshaller(AsyncBatchAnnotateFilesRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); private final UnaryCallable<BatchAnnotateImagesRequest, BatchAnnotateImagesResponse> batchAnnotateImagesCallable; private final UnaryCallable<AsyncBatchAnnotateFilesRequest, Operation> asyncBatchAnnotateFilesCallable; private final OperationCallable< AsyncBatchAnnotateFilesRequest, AsyncBatchAnnotateFilesResponse, OperationMetadata> asyncBatchAnnotateFilesOperationCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcImageAnnotatorStub create(ImageAnnotatorStubSettings settings) throws IOException { return new GrpcImageAnnotatorStub(settings, ClientContext.create(settings)); } public static final GrpcImageAnnotatorStub create(ClientContext clientContext) throws IOException { return new GrpcImageAnnotatorStub( ImageAnnotatorStubSettings.newBuilder().build(), clientContext); } public static final GrpcImageAnnotatorStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcImageAnnotatorStub( ImageAnnotatorStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcImageAnnotatorStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected GrpcImageAnnotatorStub(ImageAnnotatorStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcImageAnnotatorCallableFactory()); } /** * Constructs an instance of GrpcImageAnnotatorStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected GrpcImageAnnotatorStub( ImageAnnotatorStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<BatchAnnotateImagesRequest, BatchAnnotateImagesResponse> batchAnnotateImagesTransportSettings = GrpcCallSettings.<BatchAnnotateImagesRequest, BatchAnnotateImagesResponse>newBuilder() .setMethodDescriptor(batchAnnotateImagesMethodDescriptor) .build(); GrpcCallSettings<AsyncBatchAnnotateFilesRequest, Operation> asyncBatchAnnotateFilesTransportSettings = GrpcCallSettings.<AsyncBatchAnnotateFilesRequest, Operation>newBuilder() .setMethodDescriptor(asyncBatchAnnotateFilesMethodDescriptor) .build(); this.batchAnnotateImagesCallable = callableFactory.createUnaryCallable( batchAnnotateImagesTransportSettings, settings.batchAnnotateImagesSettings(), clientContext); this.asyncBatchAnnotateFilesCallable = callableFactory.createUnaryCallable( asyncBatchAnnotateFilesTransportSettings, settings.asyncBatchAnnotateFilesSettings(), clientContext); this.asyncBatchAnnotateFilesOperationCallable = callableFactory.createOperationCallable( asyncBatchAnnotateFilesTransportSettings, settings.asyncBatchAnnotateFilesOperationSettings(), clientContext, operationsStub); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<BatchAnnotateImagesRequest, BatchAnnotateImagesResponse> batchAnnotateImagesCallable() { return batchAnnotateImagesCallable; } @Override public UnaryCallable<AsyncBatchAnnotateFilesRequest, Operation> asyncBatchAnnotateFilesCallable() { return asyncBatchAnnotateFilesCallable; } @Override public OperationCallable< AsyncBatchAnnotateFilesRequest, AsyncBatchAnnotateFilesResponse, OperationMetadata> asyncBatchAnnotateFilesOperationCallable() { return asyncBatchAnnotateFilesOperationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
/* * Copyright The Sett Ltd, 2005 to 2014. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thesett.aima.search.examples.eightpuzzle; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.apache.log4j.NDC; import com.thesett.aima.search.Operator; import com.thesett.aima.search.TraversableStateTest; /** * EightPuzzleStateTest is a pure unit test class for the {@link EightPuzzleState} class. * * <pre><p/><table id="crc"><caption>CRC Card</caption> * <tr><th> Responsibilities <th> Collaborations * <tr><td> Check that the empty tile is reported consistently by the getEmpty and getForTile methods. * <tr><td> Check that the applied operators correctly move the empty tile. * <tr><td> Check that the applied operators correctly move the non-empty tile. * <tr><td> Check that the goal state passes the goal test. * <tr><td> Check that non-goal test states do not pass the goal test. * </table></pre> * * @author Rupert Smith */ public class EightPuzzleStateTest extends TestCase { /** */ /* private static final Logger log = Logger.getLogger(EightPuzzleStateTest.class.getName()); */ /** The states to test. */ private static Collection<EightPuzzleState> testStates; /** Default constructor that will result in the tests being run on the {@link EightPuzzleState} class. */ public EightPuzzleStateTest(String testName) { super(testName); // Create the test states to use generateTestStates(); } /** * Generates a collection of test states to run the tests on. This consists of one state for each position of the * empty tile. * * @todo Could randomize the positions of the non-empty tiles too. */ public static void generateTestStates() { testStates = new ArrayList<EightPuzzleState>(9); // Loop over all the tile positions generating a puzzle with the empty tile in each. for (int i = 0; i < 9; i++) { // Used to keep track of the current tile int tile = 1; // Used to build the puzzle string in char[] chars = new char[9]; // Generate the characters up to i and place them in the string for (int j = 0; j < i; j++) { chars[j] = (new Integer(tile++)).toString().charAt(0); } // Add in the empty tile in the string chars[i] = 'E'; // Generate the charactars after i and place them in the string for (int k = i + 1; k < 9; k++) { chars[k] = (new Integer(tile++)).toString().charAt(0); } String puzzleString = new String(chars); testStates.add(EightPuzzleState.getStateFromString(puzzleString)); } } /** Compile all the tests for the default test implementation of a search node into a test suite. */ public static Test suite() { // Build a new test suite TestSuite suite = new TestSuite("EightPuzzleState Tests"); // Add all tests defined in the TraversableStateTest class for each puzzles with the empty square in all // possible board positions generateTestStates(); for (EightPuzzleState nextState : testStates) { suite.addTest(new TraversableStateTest("testNoDuplicateOperators", nextState)); suite.addTest(new TraversableStateTest("testNoDuplicateOperatorsInSuccessors", nextState)); suite.addTest(new TraversableStateTest("testApplyOperatorWorksAllValidOperators", nextState)); suite.addTest(new TraversableStateTest("testExactlyOneSuccessorStateForEachValidOperator", nextState)); suite.addTest(new TraversableStateTest("testSuccessorStateCostsMatchAppliedOperators", nextState)); suite.addTest(new TraversableStateTest("testSuccessorStatesMatchAppliedOperatorStates", nextState)); } // Add all the tests defined in this class (using the default constructor) suite.addTestSuite(EightPuzzleStateTest.class); return suite; } /** * Check that the empty tile 'E' is reported to be in the location where it is consistently by the getEmpty * getForTile methods. */ public void testGetEmptyAndGetForTileConsistent() throws Exception { String errorMessage = ""; // Loop over the whole set of test puzzles for (EightPuzzleState nextState : testStates) { // Get the empty tile coordinates using the getEmpty methods. int emptyX = nextState.getEmptyX(); int emptyY = nextState.getEmptyY(); // Get the empty tile coordinates using the getForTile methods. int getForTileX = nextState.getXForTile('E'); int getForTileY = nextState.getYForTile('E'); // Check that they match and add to the error message if not. if ((emptyX != getForTileX) || (emptyY != getForTileY)) { errorMessage += "The coordinates reported by the getEmpty and getForTile methods do not match.\n" + "getForTileX = " + getForTileX + ", emptyX = " + emptyX + ", getForTileY = " + getForTileY + ", emptyY = " + emptyY + " for puzzle board:\n" + nextState.prettyPrint(); } // Get the tile at the reported empty tile location and check it really is the empty tile. if (nextState.getTileAt(emptyX, emptyY) != 'E') { errorMessage += "The repored empty tile location, X = " + emptyX + ", Y = " + emptyY + ", does not return the empty tile using getTileAt.\n" + " for puzzle board:\n" + nextState.prettyPrint(); } } // Assert that there were no error messages and print them if there were assertTrue(errorMessage, "".equals(errorMessage)); } /** Check that the applied operators correctly move the empty tile. */ public void testOperatorsMoveEmptyTile() throws Exception { String errorMessage = ""; // Loop over the whole set of test puzzles for (EightPuzzleState nextState : testStates) { // Get the current empty tile position. int emptyX = nextState.getEmptyX(); int emptyY = nextState.getEmptyY(); // Loop over all of the valid operators on the puzzle for (Iterator<Operator<String>> j = nextState.validOperators(false); j.hasNext();) { Operator operator = j.next(); char op = ((String) operator.getOp()).charAt(0); // Work out where it should be after the operator is applied. int newX = emptyX; int newY = emptyY; switch (op) { case 'U': { newY--; break; } case 'D': { newY++; break; } case 'L': { newX--; break; } case 'R': { newX++; break; } } // Apply the operator to get a new state. EightPuzzleState newState = (EightPuzzleState) nextState.getChildStateForOperator(operator); // Check that empty tile has really moved to the correct location. if ((newState.getEmptyX() != newX) || (newState.getEmptyY() != newY)) { errorMessage += "After applying the operator, " + operator + ", the empty tile does not move to its expected location, X = " + newX + ", Y = " + newY + ", but moves instead to, X = " + newState.getEmptyX() + ", Y = " + newState.getEmptyY() + " for puzzle board, X = " + emptyX + ", Y = " + emptyY + ":\n" + nextState.prettyPrint(); } } } // Assert that there were no error messages and print them if there were assertTrue(errorMessage, "".equals(errorMessage)); } /** Check that the applied operators correctly move the non-empty tile. */ public void testOperatorsMoveNonEmptyTile() throws Exception { String errorMessage = ""; // Loop over the whole set of test puzzles for (EightPuzzleState nextState : testStates) { // Get the current empty tile position. int emptyX = nextState.getEmptyX(); int emptyY = nextState.getEmptyY(); // Loop over all of the valid operators on the puzzle for (Iterator<Operator<String>> j = nextState.validOperators(false); j.hasNext();) { Operator operator = j.next(); char op = ((String) operator.getOp()).charAt(0); // Work out where it should be after the operator is applied. // Work out where it should be after the operator is applied. int newX = emptyX; int newY = emptyY; switch (op) { case 'U': { newY--; break; } case 'D': { newY++; break; } case 'L': { newX--; break; } case 'R': { newX++; break; } } // Get the tile at that location. char t = nextState.getTileAt(newX, newY); // Apply the operators to get a new state. EightPuzzleState newState = (EightPuzzleState) nextState.getChildStateForOperator(operator); // Check that the displaced tile has been moved to the empty tiles old location. if (t != newState.getTileAt(emptyX, emptyY)) { errorMessage += "After applying the operator, " + operator + ", the displaced tile does not move to its expected location, X = " + emptyX + ", Y = " + emptyY + ", but this location contains the tile, " + newState.getTileAt(emptyX, emptyY) + ", instead " + " for puzzle board:\n" + nextState.prettyPrint(); } } } // Assert that there were no error messages and print them if there were assertTrue(errorMessage, "".equals(errorMessage)); } /** * Check that the goal state passes the goal test and that the positions of all tiles matches the positions reported * for them by the getGoalForTile methods. */ public void testGoalStateIsGoalState() throws Exception { String errorMessage = ""; // Generate the goal state. EightPuzzleState goalState = EightPuzzleState.getGoalState(); // Check that it really does pass the isGoal test. assertTrue("The goal state does not pass the isGoal test.", goalState.isGoal()); // Check all tiles are really in their getGoalForTile positions. for (int j = 0; j < 3; j++) { for (int i = 0; i < 3; i++) { // Get the tile at the current location. char t = goalState.getTileAt(i, j); // Get the goal location for that tile. int goalX = goalState.getGoalXForTile(t); int goalY = goalState.getGoalYForTile(t); // Check it matches its current location. if ((i != goalX) || (j != goalY)) { errorMessage += "Tile at " + i + ", " + j + " reports goal location at " + goalX + ", " + goalY + " for a board position that passes the isGoal test."; } } } // Assert that there were no error messages and print them if there were assertTrue(errorMessage, "".equals(errorMessage)); } /** Check that non-goal test states do not pass the goal test. */ public void testNonGoalStateIsNotGoalState() throws Exception { String errorMessage = ""; // Generate the goal state. EightPuzzleState goalState = EightPuzzleState.getGoalState(); // Take a copy of the test puzzles and add some random solvable puzzles to it. ArrayList<EightPuzzleState> expandedTestStates = new ArrayList<EightPuzzleState>(testStates); for (int j = 0; j < 50; j++) { expandedTestStates.add(EightPuzzleState.getRandomStartState()); } // Loop over the whole expanded set of test puzzles for (EightPuzzleState nextState : expandedTestStates) { // Check if the state is not equal to the goal state. if (!nextState.equals(goalState)) { // Check that the state does not pass the isGoal test. assertFalse("Non-goal state passed the isGoal test for puzzle board:\n" + nextState.prettyPrint(), nextState.isGoal()); } } // Assert that there were no error messages and print them if there were assertTrue(errorMessage, "".equals(errorMessage)); } /** Checks that the hashCode method runs ok. */ public void testHashCode() throws Exception { // Generate the goal state. EightPuzzleState goalState = EightPuzzleState.getGoalState(); // Get its hash code goalState.hashCode(); } /** @throws Exception */ protected void setUp() throws Exception { NDC.push(getName()); } /** @throws Exception */ protected void tearDown() throws Exception { NDC.pop(); } }
/* * This file is generated by jOOQ. */ package io.cattle.platform.core.model.tables.records; import io.cattle.platform.core.model.VolumeStoragePoolMap; import io.cattle.platform.core.model.tables.VolumeStoragePoolMapTable; import io.cattle.platform.db.jooq.utils.TableRecordJaxb; import java.util.Date; import java.util.Map; import javax.annotation.Generated; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; import org.jooq.Field; import org.jooq.Record1; import org.jooq.Record12; import org.jooq.Row12; import org.jooq.impl.UpdatableRecordImpl; /** * This class is generated by jOOQ. */ @Generated( value = { "http://www.jooq.org", "jOOQ version:3.9.3" }, comments = "This class is generated by jOOQ" ) @SuppressWarnings({ "all", "unchecked", "rawtypes" }) @Entity @Table(name = "volume_storage_pool_map", schema = "cattle") public class VolumeStoragePoolMapRecord extends UpdatableRecordImpl<VolumeStoragePoolMapRecord> implements TableRecordJaxb, Record12<Long, String, String, String, String, String, Date, Date, Date, Map<String,Object>, Long, Long>, VolumeStoragePoolMap { private static final long serialVersionUID = 871783002; /** * Setter for <code>cattle.volume_storage_pool_map.id</code>. */ @Override public void setId(Long value) { set(0, value); } /** * Getter for <code>cattle.volume_storage_pool_map.id</code>. */ @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Column(name = "id", unique = true, nullable = false, precision = 19) @Override public Long getId() { return (Long) get(0); } /** * Setter for <code>cattle.volume_storage_pool_map.name</code>. */ @Override public void setName(String value) { set(1, value); } /** * Getter for <code>cattle.volume_storage_pool_map.name</code>. */ @Column(name = "name", length = 255) @Override public String getName() { return (String) get(1); } /** * Setter for <code>cattle.volume_storage_pool_map.kind</code>. */ @Override public void setKind(String value) { set(2, value); } /** * Getter for <code>cattle.volume_storage_pool_map.kind</code>. */ @Column(name = "kind", nullable = false, length = 255) @Override public String getKind() { return (String) get(2); } /** * Setter for <code>cattle.volume_storage_pool_map.uuid</code>. */ @Override public void setUuid(String value) { set(3, value); } /** * Getter for <code>cattle.volume_storage_pool_map.uuid</code>. */ @Column(name = "uuid", unique = true, nullable = false, length = 128) @Override public String getUuid() { return (String) get(3); } /** * Setter for <code>cattle.volume_storage_pool_map.description</code>. */ @Override public void setDescription(String value) { set(4, value); } /** * Getter for <code>cattle.volume_storage_pool_map.description</code>. */ @Column(name = "description", length = 1024) @Override public String getDescription() { return (String) get(4); } /** * Setter for <code>cattle.volume_storage_pool_map.state</code>. */ @Override public void setState(String value) { set(5, value); } /** * Getter for <code>cattle.volume_storage_pool_map.state</code>. */ @Column(name = "state", nullable = false, length = 128) @Override public String getState() { return (String) get(5); } /** * Setter for <code>cattle.volume_storage_pool_map.created</code>. */ @Override public void setCreated(Date value) { set(6, value); } /** * Getter for <code>cattle.volume_storage_pool_map.created</code>. */ @Column(name = "created") @Override public Date getCreated() { return (Date) get(6); } /** * Setter for <code>cattle.volume_storage_pool_map.removed</code>. */ @Override public void setRemoved(Date value) { set(7, value); } /** * Getter for <code>cattle.volume_storage_pool_map.removed</code>. */ @Column(name = "removed") @Override public Date getRemoved() { return (Date) get(7); } /** * Setter for <code>cattle.volume_storage_pool_map.remove_time</code>. */ @Override public void setRemoveTime(Date value) { set(8, value); } /** * Getter for <code>cattle.volume_storage_pool_map.remove_time</code>. */ @Column(name = "remove_time") @Override public Date getRemoveTime() { return (Date) get(8); } /** * Setter for <code>cattle.volume_storage_pool_map.data</code>. */ @Override public void setData(Map<String,Object> value) { set(9, value); } /** * Getter for <code>cattle.volume_storage_pool_map.data</code>. */ @Column(name = "data", length = 16777215) @Override public Map<String,Object> getData() { return (Map<String,Object>) get(9); } /** * Setter for <code>cattle.volume_storage_pool_map.volume_id</code>. */ @Override public void setVolumeId(Long value) { set(10, value); } /** * Getter for <code>cattle.volume_storage_pool_map.volume_id</code>. */ @Column(name = "volume_id", precision = 19) @Override public Long getVolumeId() { return (Long) get(10); } /** * Setter for <code>cattle.volume_storage_pool_map.storage_pool_id</code>. */ @Override public void setStoragePoolId(Long value) { set(11, value); } /** * Getter for <code>cattle.volume_storage_pool_map.storage_pool_id</code>. */ @Column(name = "storage_pool_id", precision = 19) @Override public Long getStoragePoolId() { return (Long) get(11); } // ------------------------------------------------------------------------- // Primary key information // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public Record1<Long> key() { return (Record1) super.key(); } // ------------------------------------------------------------------------- // Record12 type implementation // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public Row12<Long, String, String, String, String, String, Date, Date, Date, Map<String,Object>, Long, Long> fieldsRow() { return (Row12) super.fieldsRow(); } /** * {@inheritDoc} */ @Override public Row12<Long, String, String, String, String, String, Date, Date, Date, Map<String,Object>, Long, Long> valuesRow() { return (Row12) super.valuesRow(); } /** * {@inheritDoc} */ @Override public Field<Long> field1() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.ID; } /** * {@inheritDoc} */ @Override public Field<String> field2() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.NAME; } /** * {@inheritDoc} */ @Override public Field<String> field3() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.KIND; } /** * {@inheritDoc} */ @Override public Field<String> field4() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.UUID; } /** * {@inheritDoc} */ @Override public Field<String> field5() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.DESCRIPTION; } /** * {@inheritDoc} */ @Override public Field<String> field6() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.STATE; } /** * {@inheritDoc} */ @Override public Field<Date> field7() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.CREATED; } /** * {@inheritDoc} */ @Override public Field<Date> field8() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.REMOVED; } /** * {@inheritDoc} */ @Override public Field<Date> field9() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.REMOVE_TIME; } /** * {@inheritDoc} */ @Override public Field<Map<String,Object>> field10() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.DATA; } /** * {@inheritDoc} */ @Override public Field<Long> field11() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.VOLUME_ID; } /** * {@inheritDoc} */ @Override public Field<Long> field12() { return VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP.STORAGE_POOL_ID; } /** * {@inheritDoc} */ @Override public Long value1() { return getId(); } /** * {@inheritDoc} */ @Override public String value2() { return getName(); } /** * {@inheritDoc} */ @Override public String value3() { return getKind(); } /** * {@inheritDoc} */ @Override public String value4() { return getUuid(); } /** * {@inheritDoc} */ @Override public String value5() { return getDescription(); } /** * {@inheritDoc} */ @Override public String value6() { return getState(); } /** * {@inheritDoc} */ @Override public Date value7() { return getCreated(); } /** * {@inheritDoc} */ @Override public Date value8() { return getRemoved(); } /** * {@inheritDoc} */ @Override public Date value9() { return getRemoveTime(); } /** * {@inheritDoc} */ @Override public Map<String,Object> value10() { return getData(); } /** * {@inheritDoc} */ @Override public Long value11() { return getVolumeId(); } /** * {@inheritDoc} */ @Override public Long value12() { return getStoragePoolId(); } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value1(Long value) { setId(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value2(String value) { setName(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value3(String value) { setKind(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value4(String value) { setUuid(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value5(String value) { setDescription(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value6(String value) { setState(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value7(Date value) { setCreated(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value8(Date value) { setRemoved(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value9(Date value) { setRemoveTime(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value10(Map<String,Object> value) { setData(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value11(Long value) { setVolumeId(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord value12(Long value) { setStoragePoolId(value); return this; } /** * {@inheritDoc} */ @Override public VolumeStoragePoolMapRecord values(Long value1, String value2, String value3, String value4, String value5, String value6, Date value7, Date value8, Date value9, Map<String,Object> value10, Long value11, Long value12) { value1(value1); value2(value2); value3(value3); value4(value4); value5(value5); value6(value6); value7(value7); value8(value8); value9(value9); value10(value10); value11(value11); value12(value12); return this; } // ------------------------------------------------------------------------- // FROM and INTO // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public void from(VolumeStoragePoolMap from) { setId(from.getId()); setName(from.getName()); setKind(from.getKind()); setUuid(from.getUuid()); setDescription(from.getDescription()); setState(from.getState()); setCreated(from.getCreated()); setRemoved(from.getRemoved()); setRemoveTime(from.getRemoveTime()); setData(from.getData()); setVolumeId(from.getVolumeId()); setStoragePoolId(from.getStoragePoolId()); } /** * {@inheritDoc} */ @Override public <E extends VolumeStoragePoolMap> E into(E into) { into.from(this); return into; } // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- /** * Create a detached VolumeStoragePoolMapRecord */ public VolumeStoragePoolMapRecord() { super(VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP); } /** * Create a detached, initialised VolumeStoragePoolMapRecord */ public VolumeStoragePoolMapRecord(Long id, String name, String kind, String uuid, String description, String state, Date created, Date removed, Date removeTime, Map<String,Object> data, Long volumeId, Long storagePoolId) { super(VolumeStoragePoolMapTable.VOLUME_STORAGE_POOL_MAP); set(0, id); set(1, name); set(2, kind); set(3, uuid); set(4, description); set(5, state); set(6, created); set(7, removed); set(8, removeTime); set(9, data); set(10, volumeId); set(11, storagePoolId); } }
/* * Copyright 2010-2013 Ning, Inc. * Copyright 2014-2018 Groupon, Inc * Copyright 2014-2018 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.beatrix.integration; import java.math.BigDecimal; import java.util.Collection; import org.joda.time.DateTime; import org.killbill.billing.ObjectType; import org.killbill.billing.account.api.Account; import org.killbill.billing.api.TestApiListener.NextEvent; import org.killbill.billing.catalog.api.BillingPeriod; import org.killbill.billing.catalog.api.ProductCategory; import org.killbill.billing.entitlement.api.DefaultEntitlement; import org.killbill.billing.invoice.api.Invoice; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; public class TestIntegrationWithAutoPayOff extends TestIntegrationBase { private Account account; private String productName; private BillingPeriod term; @Override @BeforeMethod(groups = "slow") public void beforeMethod() throws Exception { if (hasFailed()) { return; } super.beforeMethod(); account = createAccountWithNonOsgiPaymentMethod(getAccountData(25)); assertNotNull(account); productName = "Shotgun"; term = BillingPeriod.MONTHLY; } @Test(groups = "slow") public void testAutoPayOff() throws Exception { clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0)); add_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT); final DefaultEntitlement bpEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE); assertNotNull(bpEntitlement); Collection<Invoice> invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); assertEquals(invoices.size(), 1); busHandler.pushExpectedEvents(NextEvent.PHASE); busHandler.pushExpectedEvents(NextEvent.INVOICE); clock.addDays(40); // After trial assertListenerStatus(); invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); assertEquals(invoices.size(), 2); for (Invoice cur : invoices) { if (cur.getChargedAmount().compareTo(BigDecimal.ZERO) == 0) { continue; } assertEquals(cur.getBalance(), cur.getChargedAmount()); } remove_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT); addDelayBceauseOfLackOfCorrectSynchro(); invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); assertEquals(invoices.size(), 2); for (Invoice cur : invoices) { if (cur.getChargedAmount().compareTo(BigDecimal.ZERO) == 0) { continue; } assertTrue(cur.getBalance().compareTo(BigDecimal.ZERO) == 0); assertTrue(cur.getPaidAmount().compareTo(cur.getChargedAmount()) == 0); } assertListenerStatus(); } @Test(groups = "slow") public void testAutoPayOffWithPaymentFailure() throws Exception { clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0)); add_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT); final DefaultEntitlement bpEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE); assertNotNull(bpEntitlement); Collection<Invoice> invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); assertEquals(invoices.size(), 1); busHandler.pushExpectedEvents(NextEvent.PHASE); busHandler.pushExpectedEvents(NextEvent.INVOICE); clock.addDays(40); // After trial assertListenerStatus(); invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); assertEquals(invoices.size(), 2); for (Invoice cur : invoices) { if (cur.getChargedAmount().compareTo(BigDecimal.ZERO) == 0) { continue; } assertEquals(cur.getBalance(), cur.getChargedAmount()); } paymentPlugin.makeNextPaymentFailWithError(); remove_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR); addDelayBceauseOfLackOfCorrectSynchro(); invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); assertEquals(invoices.size(), 2); for (Invoice cur : invoices) { if (cur.getChargedAmount().compareTo(BigDecimal.ZERO) == 0) { continue; } assertEquals(cur.getBalance(), cur.getChargedAmount()); } assertListenerStatus(); int nbDaysBeforeRetry = paymentConfig.getPaymentFailureRetryDays(internalCallContext).get(0); // MOVE TIME FOR RETRY TO HAPPEN busHandler.pushExpectedEvents(NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT); clock.addDays(nbDaysBeforeRetry + 1); assertListenerStatus(); invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); for (Invoice cur : invoices) { if (cur.getChargedAmount().compareTo(BigDecimal.ZERO) == 0) { continue; } assertTrue(cur.getBalance().compareTo(BigDecimal.ZERO) == 0); assertTrue(cur.getPaidAmount().compareTo(cur.getChargedAmount()) == 0); } assertListenerStatus(); } @Test(groups = "slow") public void testAutoPayOffWithPaymentFailureOn_AUTO_PAY_OFF() throws Exception { clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0)); add_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT); final DefaultEntitlement bpEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE); assertNotNull(bpEntitlement); Collection<Invoice> invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); assertEquals(invoices.size(), 1); // CREATE FIRST NON NULL INVOICE + FIRST PAYMENT/ATTEMPT -> AUTO_PAY_OFF busHandler.pushExpectedEvents(NextEvent.PHASE); busHandler.pushExpectedEvents(NextEvent.INVOICE); clock.addDays(31); // After trial assertListenerStatus(); invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); assertEquals(invoices.size(), 2); for (Invoice cur : invoices) { if (cur.getChargedAmount().compareTo(BigDecimal.ZERO) == 0) { continue; } assertEquals(cur.getBalance(), cur.getChargedAmount()); } // NOW SET PLUGIN TO THROW FAILURES paymentPlugin.makeNextPaymentFailWithError(); remove_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR); addDelayBceauseOfLackOfCorrectSynchro(); invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); assertEquals(invoices.size(), 2); for (Invoice cur : invoices) { if (cur.getChargedAmount().compareTo(BigDecimal.ZERO) == 0) { continue; } assertEquals(cur.getBalance(), cur.getChargedAmount()); } assertListenerStatus(); // RE-ADD AUTO_PAY_OFF to ON int nbDaysBeforeRetry = paymentConfig.getPaymentFailureRetryDays(internalCallContext).get(0); add_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT); // MOVE TIME FOR RETRY TO HAPPEN -> WILL BE DISCARDED SINCE AUTO_PAY_OFF IS SET clock.addDays(nbDaysBeforeRetry + 1); assertListenerStatus(); invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); assertEquals(invoices.size(), 2); for (Invoice cur : invoices) { if (cur.getChargedAmount().compareTo(BigDecimal.ZERO) == 0) { continue; } assertEquals(cur.getBalance(), cur.getChargedAmount()); } // We want to give some time for the retry to fail before we start clearing the state addDelayBceauseOfLackOfCorrectSynchro(); // REMOVE AUTO_PAY_OFF -> WILL SCHEDULE A PAYMENT_RETRY paymentPlugin.clear(); remove_AUTO_PAY_OFF_Tag(account.getId(), ObjectType.ACCOUNT, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT); addDelayBceauseOfLackOfCorrectSynchro(); invoices = invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext); for (Invoice cur : invoices) { if (cur.getChargedAmount().compareTo(BigDecimal.ZERO) == 0) { continue; } assertTrue(cur.getBalance().compareTo(BigDecimal.ZERO) == 0); assertTrue(cur.getPaidAmount().compareTo(cur.getChargedAmount()) == 0); } assertListenerStatus(); } private void addDelayBceauseOfLackOfCorrectSynchro() { // TODO When removing the tag, the payment system will schedule retries for payments that are in non terminal state // The issue is that at this point we know the event went on the bus but we don't know if the listener in payment completed // so we add some delay to ensure that it had time to complete. Failure to do so introduces some flakiness in the test because the clock // is moved right after that, and so payment may see the new value. // try { Thread.sleep(2000); } catch (InterruptedException ignore) { } } }
package brooklyn.rest.apidoc; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import com.google.common.collect.ImmutableList; import com.sun.jersey.api.core.ResourceConfig; import com.wordnik.swagger.core.Api; import com.wordnik.swagger.core.ApiOperation; import com.wordnik.swagger.core.Documentation; import com.wordnik.swagger.core.DocumentationEndPoint; import com.wordnik.swagger.jaxrs.ConfigReader; import com.wordnik.swagger.jaxrs.HelpApi; import com.wordnik.swagger.jaxrs.JaxrsApiReader; import com.wordnik.swagger.jaxrs.JaxrsApiSpecParser; @Produces({"application/json"}) /** create a concrete subclass for this annotated with the Path where * this resource should live * <p> * like Swagger ApiListing (and based on that) but: * supports singletons as well as classes; * supports simpler Apidoc annotation (doesn't repeat path, in common case); * doesn't support listingPath/Class that swagger does (but describes in under /apidoc/name.of.Class * does not support auth filters */ abstract public class ApidocResource { static ConfigReader configReader; static { JaxrsApiReader.setFormatString(""); } protected boolean isSupportedMediaType(String type) { if ("application/json".equals(type)) return true; if ("application/xml".equals(type)) return true; return false; } protected boolean isIncludedForDocumentation(Class<?> resource) { // TODO currently only support @Produces, not Contenty-type header, or Accept header (which original ApiListing does support) Produces produces = getAnnotation(resource, Produces.class); if (produces == null) return false; for (String type: produces.value()) if (isSupportedMediaType(type)) return true; return false; } protected <A extends Annotation> A getAnnotation(Class<?> r, Class<A> annotationClass) { A result = r.getAnnotation(annotationClass); if (result == null) { // first look at things directly on superclass (not inherited) if (r.getSuperclass()!=null) result = r.getSuperclass().getAnnotation(annotationClass); } if (result == null) { // then look at interfaces here (not inherited) // we look at superclasses next so don't have to here for(Class<?> parentInterface : r.getInterfaces()) { result = parentInterface.getAnnotation(annotationClass); if (result != null) break; } } if (result == null) { // lastly take annotations on superclass and interfaces on superclass, recursively // (so in short we prefer things lower down) if (r.getSuperclass()!=null) result = getAnnotation(r.getSuperclass(), annotationClass); } return result; } protected String getLinkFor(String path, Class<?> resource) { return getClass().getAnnotation(Path.class).value()+"/"+getLinkWordFor(resource); } protected String getLinkWordFor(Class<?> resource) { if (resource.getCanonicalName()!=null) return resource.getCanonicalName(); else return Integer.toHexString(resource.hashCode()); } protected Class<?> getResourceOfLink(ResourceConfig rc, String link) { for (Class<?> r: getResourceClasses(rc)) { if (getLinkWordFor(r).equals(link)) return r; } return null; } @GET @ApiOperation(value = "Returns list of all available API resource endpoints", responseClass = "DocumentationEndPoint", multiValueResponse = true) public Response getAllApis( @Context ResourceConfig rc, @Context HttpHeaders headers, @Context UriInfo uriInfo) { String apiVersion = getConfigReader().getApiVersion(); String swaggerVersion = getConfigReader().getSwaggerVersion(); String basePath = getConfigReader().getBasePath(); Set<Class<?>> resources = getResourceClasses(rc); Documentation allApiDoc = new Documentation(); List<ApidocEndpoint> endpoints = new ArrayList<ApidocEndpoint>(); for (Class<?> resource : resources) { if (!isIncludedForDocumentation(resource)) continue; Apidoc apidoc = getAnnotation(resource, Apidoc.class); Api apidocX = getAnnotation(resource, Api.class); Path rsPath = getAnnotation(resource, Path.class); if (apidoc==null && apidocX == null) continue; String path = rsPath.value(); String name = null; String description; if (apidoc!=null) { name = apidoc.value(); description = apidoc.description(); } else { path = apidocX.value(); description = apidocX.description(); } endpoints.add(new ApidocEndpoint(name, path, description, getLinkFor(path, resource))); } Collections.sort(endpoints, ApidocEndpoint.COMPARATOR); for (ApidocEndpoint api: endpoints) { if (!isApiAdded(allApiDoc, api)) { allApiDoc.addApi(api); } } allApiDoc.setSwaggerVersion(swaggerVersion); allApiDoc.setBasePath(basePath); allApiDoc.setApiVersion(apiVersion); return Response.ok().entity(allApiDoc).build(); } protected Set<Class<?>> getResourceClasses(ResourceConfig rc) { Set<Class<?>> resourceClasses = rc.getRootResourceClasses(); Set<Object> resourceObjects = rc.getRootResourceSingletons(); Set<Class<?>> resources = new LinkedHashSet<Class<?>>(); // @Path should always be set, right? unless something is oddd for (Class<?> r: resourceClasses) if (r.getAnnotation(Path.class)!=null) resources.add(r); for (Object r: resourceObjects) { if (getAnnotation(r.getClass(), Path.class)!=null) { resources.add(r.getClass()); } } return resources; } private boolean isApiAdded(Documentation allApiDoc, DocumentationEndPoint endpoint) { boolean isAdded = false; if (allApiDoc.getApis() != null) { for (DocumentationEndPoint addedApi : allApiDoc.getApis()) { if (endpoint.getPath().equals(addedApi.getPath())) isAdded = true; } } return isAdded; } @GET @Path("/{resource}") @ApiOperation(value = "Returns detail on the given API resource endpoint", responseClass = "DocumentationEndPoint", multiValueResponse = true) public Response details( @Context ResourceConfig rc, @Context HttpHeaders headers, @Context UriInfo uriInfo, @PathParam("resource") String resource) { Class<?> target = getResourceOfLink(rc, resource); if (target==null) return Response.status(Response.Status.NOT_FOUND).build(); // roughly duplicates JavaHelp String apiVersion = getConfigReader().getApiVersion(); String swaggerVersion = getConfigReader().getSwaggerVersion(); String basePath = getConfigReader().getBasePath(); String apiFilterClassName = getConfigReader().getApiFilterClassName(); Apidoc apidoc = getAnnotation(target, Apidoc.class); Api apidocX = getAnnotation(target, Api.class); Path rsPath = getAnnotation(target, Path.class); if ((apidoc==null && apidocX==null) || rsPath==null) return Response.status(Response.Status.NOT_FOUND).build(); String apiPath = apidoc!=null ? rsPath.value() : apidocX.value(); HelpApi helpApi = new HelpApi(apiFilterClassName); Documentation doc = read(target, apiVersion, swaggerVersion, basePath, apiPath); Documentation docs = helpApi.filterDocs(doc, headers, uriInfo, apiPath, apiPath); return Response.ok().entity(docs).build(); } // items below here simply override the swagger Jaxrs* classes/behaviour so we can use @Path/@Apidoc instead of @Api protected ConfigReader getConfigReader() { if (configReader==null) configReader = new ConfigReader(null); return configReader; } static protected Map<Class<?>,Documentation> endpointsCache = new LinkedHashMap<Class<?>, Documentation>(); protected Documentation read(Class<?> target, String apiVersion, String swaggerVersion, String basePath, String apiPath) { Documentation result = endpointsCache.get(target); if (result!=null) return result; JaxrsApiSpecParser parser = new ApidocJaxrsSpecParser(target, apiVersion, swaggerVersion, basePath, apiPath); result = parser.parse(); endpointsCache.put(target, result); return result; } @Api("ignored") static class ApidocJaxrsSpecParser extends JaxrsApiSpecParser { public ApidocJaxrsSpecParser(Class<?> target, String apiVersion, String swaggerVersion, String basePath, String apiPath) { super(target, apiVersion, swaggerVersion, basePath, apiPath); } @Override public Api apiEndpoint() { // return an ignored item; all clients do is check it isn't null return ApidocJaxrsSpecParser.class.getAnnotation(Api.class); } @Override public Class<?> hostClass() { // Overriding to make sure we have a look at the interfaces (Jersey jaxrs implementation doesn't bother) // Note this means we require the @Path class annotation on the same class as all the method annotations for (Class<?> tryMe : ImmutableList.<Class<?>>builder().add(super.hostClass()).add(super.hostClass().getInterfaces()).build()) { if (tryMe.getAnnotation(Path.class) != null) { return tryMe; } } return super.hostClass(); } public String getPath(Method method) { Path cwsPath = hostClass().getAnnotation(Path.class); Path mwsPath = method.getAnnotation(Path.class); if (cwsPath==null) return null; return cwsPath.value() + (mwsPath!=null ? mwsPath.value() : ""); } } }
/* * Copyright (c) 2009 University of Durham, England All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * Redistributions in binary * form must reproduce the above copyright notice, this list of conditions and * the following disclaimer in the documentation and/or other materials provided * with the distribution. * Neither the name of 'SynergyNet' nor the names of * its contributors may be used to endorse or promote products derived from this * software without specific prior written permission. THIS SOFTWARE IS PROVIDED * BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package apps.control.controlmenu; import java.awt.Color; import java.util.List; import synergynetframework.appsystem.contentsystem.ContentSystem; import synergynetframework.appsystem.contentsystem.items.BackgroundController; import synergynetframework.appsystem.contentsystem.items.ContentItem; import synergynetframework.appsystem.contentsystem.items.ListContainer; import synergynetframework.appsystem.contentsystem.items.SimpleButton; import synergynetframework.appsystem.contentsystem.items.listener.ItemEventAdapter; import synergynetframework.appsystem.contentsystem.items.listener.ListEventAdapter; import synergynetframework.appsystem.contentsystem.items.listener.SimpleButtonAdapter; import synergynetframework.appsystem.services.net.localpresence.TableIdentity; import synergynetframework.appsystem.services.net.networkedcontentmanager.NetworkedContentManager; import synergynetframework.appsystem.services.net.networkedcontentmanager.utils.TableSwapDialogue; import apps.mysteries.SubAppMenu; import core.SynergyNetDesktop; /** * The Class ControlMenu. */ public class ControlMenu { /** The background controller. */ protected BackgroundController backgroundController; /** The content system. */ protected ContentSystem contentSystem; /** The control menu. */ protected ListContainer controlMenu; /** The networked content manager. */ protected NetworkedContentManager networkedContentManager; /** The sub app menu. */ protected SubAppMenu subAppMenu; /** * Instantiates a new control menu. * * @param contentSystem * the content system * @param networkedContentManager * the networked content manager * @param subAppMenu * the sub app menu * @param backgroundController * the background controller */ public ControlMenu(ContentSystem contentSystem, NetworkedContentManager networkedContentManager, SubAppMenu subAppMenu, BackgroundController backgroundController) { this.contentSystem = contentSystem; this.subAppMenu = subAppMenu; this.backgroundController = backgroundController; this.networkedContentManager = networkedContentManager; LoadControlMenu(); } /** * Load control menu. * * @return the list container */ private ListContainer LoadControlMenu() { controlMenu = (ListContainer) contentSystem .createContentItem(ListContainer.class); controlMenu.setBackgroundColour(Color.BLUE); controlMenu.setWidth(200); controlMenu.setItemHeight(30); if (backgroundController != null) { backgroundController.addItemListener(new ItemEventAdapter() { public void cursorLongHeld(ContentItem b, long id, float x, float y, float pressure) { if (controlMenu.isVisible()) { controlMenu.setVisible(false); } else { controlMenu.setVisible(true); controlMenu.setLocalLocation(x, contentSystem.getScreenHeight() - y); } } }); } final TableSwapDialogue tableSwapDialogue = new TableSwapDialogue( contentSystem, this.networkedContentManager); tableSwapDialogue.setVisible(false); final SimpleButton blockButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); blockButton.setAutoFitSize(false); blockButton.setText("Block Student Table"); blockButton.setBackgroundColour(Color.lightGray); blockButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { if (networkedContentManager.isRemoteLocked()) { blockButton.setText("Block Student Table"); networkedContentManager.setRemoteLocked(false); } else { blockButton.setText("Unblock Student Table"); networkedContentManager.setRemoteLocked(true); } } }); final SimpleButton disableMenuButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); disableMenuButton.setAutoFitSize(false); disableMenuButton.setText("Disable Students Menu"); disableMenuButton.setBackgroundColour(Color.lightGray); disableMenuButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { if (networkedContentManager.isRemoteMenuOn()) { disableMenuButton.setText("Enable Students Menu"); networkedContentManager.setRemoteMenuOn(false); } else { disableMenuButton.setText("Disable Students Menu"); networkedContentManager.setRemoteMenuOn(true); } } }); final SimpleButton clearTableButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); clearTableButton.setAutoFitSize(false); clearTableButton.setText("Clear Student Table"); clearTableButton.setBackgroundColour(Color.lightGray); clearTableButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { networkedContentManager.clearRemoteDeskTop(); } }); SimpleButton broadcastButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); broadcastButton.setAutoFitSize(false); broadcastButton.setText("Broadcast Data"); broadcastButton.setBackgroundColour(Color.lightGray); broadcastButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { networkedContentManager.broadcastCurrentDeskTop(); } }); final SimpleButton synchroniseButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); synchroniseButton.setAutoFitSize(false); synchroniseButton.setText("Synchronise"); synchroniseButton.setBackgroundColour(Color.lightGray); synchroniseButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { if (networkedContentManager.isSynchronisationOn()) { synchroniseButton.setText("Synchronise"); networkedContentManager.setSynchronisationOn(false); } else { synchroniseButton.setText("Stop Synchronise"); networkedContentManager.setSynchronisationOn(true); } } }); final SimpleButton collaborationButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); collaborationButton.setAutoFitSize(false); collaborationButton.setText("Collaborate"); collaborationButton.setBackgroundColour(Color.lightGray); collaborationButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { if (networkedContentManager.isBiSynchronisationEnabled()) { collaborationButton.setText("Collaborate"); networkedContentManager.setBiSynchronisationEnabled(false); synchroniseButton.setText("Synchronise"); networkedContentManager.setSynchronisationOn(false); } else { collaborationButton.setText("Stop Collaborate"); networkedContentManager.setBiSynchronisationEnabled(true); synchroniseButton.setText("Stop Synchronise"); networkedContentManager.setSynchronisationOn(true); } } }); final SimpleButton swapTableButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); swapTableButton.setAutoFitSize(false); swapTableButton.setText("Swap Tables"); swapTableButton.setBackgroundColour(Color.lightGray); swapTableButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { tableSwapDialogue.setVisible(true); } }); controlMenu.setLocalLocation(200, 200); controlMenu.addSubItem(blockButton); controlMenu.addSubItem(disableMenuButton); SimpleButton backToMainMenuButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); backToMainMenuButton.setAutoFitSize(false); backToMainMenuButton.setText("Back To Main Menu"); backToMainMenuButton.setBackgroundColour(Color.lightGray); backToMainMenuButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { try { // setSafeExit(); SynergyNetDesktop.getInstance().showMainMenu(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } } }); final SimpleButton getDesktopButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); getDesktopButton.setAutoFitSize(false); getDesktopButton.setText("Get student desktops"); getDesktopButton.setBackgroundColour(Color.lightGray); getDesktopButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { if (networkedContentManager.getRemoteDesktopController() != null) { if (getDesktopButton.getText().equals( "Get student desktops")) { networkedContentManager.getRemoteDesktopController() .requestRemoteDesktops(true); getDesktopButton.setText("Hide student desktops"); } else { networkedContentManager.getRemoteDesktopController() .requestRemoteDesktops(false); getDesktopButton.setText("Get student desktops"); } } } }); final SimpleButton getProjectorButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); getProjectorButton.setAutoFitSize(false); getProjectorButton.setText("Search projectors"); getProjectorButton.setBackgroundColour(Color.lightGray); getProjectorButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { if (networkedContentManager.getProjectorController() != null) { networkedContentManager.getProjectorController() .demandProjectors(); } } }); final ListContainer sendDataToMenu = (ListContainer) contentSystem .createContentItem(ListContainer.class); sendDataToMenu.setWidth(300); sendDataToMenu.setItemHeight(30); sendDataToMenu.getBackgroundFrame().setBackgroundColour(Color.gray); sendDataToMenu.addListEventListener(new ListEventAdapter() { public void listHiden() { sendDataToMenu.clear(); } @Override public void listShown() { if (networkedContentManager.getTableCommsClientService() == null) { return; } List<TableIdentity> onlineTables = networkedContentManager .getTableCommsClientService().getCurrentlyOnline(); for (final TableIdentity table : onlineTables) { if (table.hashCode() != TableIdentity.getTableIdentity() .hashCode()) { SimpleButton onlineTableButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); onlineTableButton.setAutoFitSize(false); onlineTableButton.setText(table.toString()); onlineTableButton.setBackgroundColour(Color.lightGray); onlineTableButton .addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { networkedContentManager .sendDataTo(table); sendDataToMenu.setVisible(false); } }); sendDataToMenu.addSubItem(onlineTableButton); } } } }); final ListContainer requestDataFromMenu = (ListContainer) contentSystem .createContentItem(ListContainer.class); requestDataFromMenu.setWidth(300); requestDataFromMenu.setItemHeight(30); requestDataFromMenu.getBackgroundFrame() .setBackgroundColour(Color.gray); requestDataFromMenu.addListEventListener(new ListEventAdapter() { public void listHiden() { requestDataFromMenu.clear(); } @Override public void listShown() { if (networkedContentManager.getTableCommsClientService() == null) { return; } List<TableIdentity> onlineTables = networkedContentManager .getTableCommsClientService().getCurrentlyOnline(); for (final TableIdentity table : onlineTables) { if (table.hashCode() != TableIdentity.getTableIdentity() .hashCode()) { SimpleButton onlineTableButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); onlineTableButton.setAutoFitSize(false); onlineTableButton.setText(table.toString()); onlineTableButton.setBackgroundColour(Color.lightGray); onlineTableButton .addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { networkedContentManager .requestDataFrom(table); requestDataFromMenu.setVisible(false); } }); requestDataFromMenu.addSubItem(onlineTableButton); } } } }); final SimpleButton flickButton = (SimpleButton) contentSystem .createContentItem(SimpleButton.class); flickButton.setAutoFitSize(false); flickButton.setText("Enable Item Flick"); flickButton.setBackgroundColour(Color.lightGray); flickButton.addButtonListener(new SimpleButtonAdapter() { public void buttonClicked(SimpleButton b, long id, float x, float y, float pressure) { if (!networkedContentManager.getNetworkedFlickController() .isFlickEnabled()) { flickButton.setText("Disable Item Flick"); networkedContentManager.getNetworkedFlickController() .setNetworkFlickEnabled(true); } else { flickButton.setText("Enable Item Flick"); networkedContentManager.getNetworkedFlickController() .setNetworkFlickEnabled(false); } } }); if (subAppMenu != null) { controlMenu.addSubMenu(subAppMenu.getSubAppMenu(), "Set Tables"); } controlMenu.addSubItem(clearTableButton); controlMenu.addSubItem(broadcastButton); controlMenu.addSubItem(synchroniseButton); controlMenu.addSubItem(collaborationButton); controlMenu.addSubMenu(sendDataToMenu, "Send Data To..."); controlMenu.addSubMenu(requestDataFromMenu, "Request Data From..."); controlMenu.addSubItem(swapTableButton); controlMenu.addSubItem(flickButton); controlMenu.addSubItem(getDesktopButton); controlMenu.addSubItem(getProjectorButton); controlMenu.addSubItem(backToMainMenuButton); return controlMenu; } /** * Sets the location. * * @param x * the x * @param y * the y */ public void setLocation(float x, float y) { controlMenu.setLocalLocation(x, y); } }
/** * The MIT License * Copyright (c) 2011 Kuali Mobility Team * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.kuali.mobility.news.entity; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; import javax.xml.bind.annotation.XmlRootElement; import java.io.Serializable; import java.text.SimpleDateFormat; import java.util.Date; /** * Represents a single article present in a news feed. * * @author Kuali Mobility Team (mobility.dev@kuali.org) */ @Entity @Table(name = "NEWS_ARTICLE_T") @XmlRootElement(name = "article") public class NewsArticleImpl implements Serializable, Comparable<NewsArticle>, NewsArticle { private static final long serialVersionUID = -133725965130444787L; @Id @GeneratedValue(strategy = GenerationType.TABLE) @Column(name = "ID") private String articleId; @Column(name = "TITLE") private String title; @Column(name = "LINK") private String link; @Column(name = "DESCRIPTION", columnDefinition = "CLOB") private String description; @Column(name = "PUBLISHERDATE") private Date publishDate; @Column(name = "SOURCEID") private long sourceId; @Column(name = "PUBLISHERDATEDISPALY") private String publishDateDisplay; private final SimpleDateFormat format = new SimpleDateFormat("EEEE, MMMM dd, yyyy h:mm a"); @Override public NewsArticle copy() { NewsArticle copy = new NewsArticleImpl(); if (title != null) { copy.setTitle(new String(title)); } if (link != null) { copy.setLink(new String(link)); } if (description != null) { copy.setDescription(new String(description)); } if (articleId != null) { copy.setArticleId(new String(articleId)); } copy.setSourceId(sourceId); copy.setPublishDate(new Date(publishDate.getTime())); return copy; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#getPublishDateDisplay() */ @Override public String getPublishDateDisplay() { return this.publishDateDisplay; } public void setPublishDateDisplay(String publishDateDisplay) { this.publishDateDisplay = publishDateDisplay; } @Override public int compareTo(NewsArticle arg0) { return publishDate.compareTo(arg0.getPublishDate()); } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#getTitle() */ @Override public String getTitle() { return title; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#setTitle(java.lang.String) */ @Override public void setTitle(String title) { this.title = title; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#getLink() */ @Override public String getLink() { return link; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#setLink(java.lang.String) */ @Override public void setLink(String link) { this.link = link; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#getDescription() */ @Override public String getDescription() { return description; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#setDescription(java.lang.String) */ @Override public void setDescription(String description) { this.description = description; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#getPublishDate() */ @Override public Date getPublishDate() { return publishDate; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#setPublishDate(java.sql.Timestamp) */ @Override public void setPublishDate(Date publishDate) { this.publishDate = publishDate; this.publishDateDisplay = format.format(publishDate); } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#getArticleId() */ @Override public String getArticleId() { return articleId; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#setArticleId(java.lang.String) */ @Override public void setArticleId(String articleId) { this.articleId = articleId; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#getSourceId() */ @Override public long getSourceId() { return sourceId; } /* (non-Javadoc) * @see org.kuali.mobility.news.entity.NewsArticle#setSourceId(long) */ @Override public void setSourceId(long sourceId) { this.sourceId = sourceId; } }
package cs.si.stavor.simulator; import org.orekit.attitudes.AttitudeProvider; import org.orekit.attitudes.NadirPointing; import org.orekit.bodies.BodyShape; import org.orekit.bodies.CelestialBodyFactory; import org.orekit.bodies.OneAxisEllipsoid; import org.orekit.errors.OrekitException; import org.orekit.errors.PropagationException; import org.orekit.frames.Frame; import org.orekit.frames.FramesFactory; import org.orekit.orbits.KeplerianOrbit; import org.orekit.orbits.Orbit; import org.orekit.orbits.PositionAngle; import org.orekit.propagation.Propagator; import org.orekit.propagation.SpacecraftState; import org.orekit.propagation.analytical.KeplerianPropagator; import org.orekit.time.AbsoluteDate; import cs.si.stavor.R; import cs.si.stavor.app.Parameters; import cs.si.stavor.mission.Mission; import android.os.Handler; public class ThreadLocal extends Thread{ private final Handler mHandler; private Simulator simulator; private Mission mission; ThreadLocal(Handler handler, Simulator simu, Mission mis) { mHandler = handler; simulator = simu; mission = mis; } @Override public void run() { Thread.currentThread().setPriority(Thread.MIN_PRIORITY); if(simulator.getSimulatorStatus().equals(SimulatorStatus.Disconnected)){ //initialize simulation try { simulator.setProgress(60 * 100); setSimulationParameters(); simulator.setProgress(80 * 100); setConnected(); clearBrowserPath(); simulator.goToHud(); simulator.showMessage(simulator.getContext().getString(R.string.sim_local_simulator_connected)); } catch (OrekitException e) { e.printStackTrace(); simulator.showMessage(simulator.getContext().getString(R.string.sim_orekit_init_error)+": "+e.getMessage()); setDisconnected(); } } if(simulator.getSimulatorStatus().equals(SimulatorStatus.Connected)){ try { while (true){//Infinite simulation loop if(simulator.reset){ simulator.reset=false; setSimulationParameters(); simulator.pause(); clearBrowserPath(); } simulator.playCondition.block(); if(simulator.cancel){ simulator.cancel=false; break; } //Fix simulation speed to desired FPS long dur = (System.nanoTime()-time_tmp_data); if(dur<(Parameters.Simulator.min_hud_model_refreshing_interval_ns-Parameters.Simulator.model_refreshing_interval_safe_guard_ns)){ try { long sleep_dur = (Parameters.Simulator.min_hud_model_refreshing_interval_ns-dur)/1000000; if(sleep_dur>0){ Thread.sleep(sleep_dur); } } catch (InterruptedException e) { e.printStackTrace(); } }else{ try { Thread.sleep(Parameters.Simulator.model_refreshing_interval_safe_guard_ns/1000000); } catch (InterruptedException e) { e.printStackTrace(); } } time_tmp_data = System.nanoTime(); SpacecraftState sstate = propagate(); int progress = (int)(((mission.sim_duration+extrapDate.durationFrom(finalDate))/mission.sim_duration)*100); if(sstate!=null){ simulator.getSimulationResults().updateSimulation(sstate, progress); publishProgress(); }else{ simulator.stop(); simulator.showMessage(simulator.getContext().getString(R.string.sim_mission_ended)); } if(simulator.cancel){ simulator.cancel=false; break; } Thread.yield(); } } catch (OrekitException e) { e.printStackTrace(); simulator.showMessage(simulator.getContext().getString(R.string.sim_orekit_prop_error)+": "+e.getMessage()); } } setDisconnected(); } private long time_tmp_data = 0; private long time_tmp_gui = 0; private void publishProgress(){ mHandler.post(new Runnable() { @Override public void run() { //Update model by push simulator.getSimulationResults().pushSimulationModel(); //Update GUI HUD if(time_tmp_gui==0 || (System.nanoTime()-time_tmp_gui)>Parameters.Simulator.min_hud_panel_refreshing_interval_ns){ time_tmp_gui = System.nanoTime(); simulator.getSimulationResults().updateHUD(); } } }); } private void clearBrowserPath(){ mHandler.post(new Runnable() { @Override public void run() { //Clear model by push simulator.getSimulationResults().resetMapPathBuffer(); } }); } private void setConnected(){ //Log.d("Sim",System.currentTimeMillis()+": "+"Simulator connected"); simulator.setSimulatorStatus(SimulatorStatus.Connected); } public void setDisconnected(){ //Log.d("Sim",System.currentTimeMillis()+": "+"Simulator disconnected"); simulator.setSimulatorStatus(SimulatorStatus.Disconnected); simulator.resetSelectedMissionId(); } private Frame inertialFrame; //private Frame rotatingFrame; private Propagator propagator; private AbsoluteDate extrapDate, finalDate; /** * Initialize simulation * @throws OrekitException */ private void setSimulationParameters() throws OrekitException{ switch(mission.inertialFrame){ case GCRF: inertialFrame = FramesFactory.getGCRF(); break; case EME2000: inertialFrame = FramesFactory.getEME2000(); break; case MOD: inertialFrame = FramesFactory.getMOD(true); break; case TOD: inertialFrame = FramesFactory.getTOD(true); break; case TEME: inertialFrame = FramesFactory.getTEME(); break; case Veis1959: inertialFrame = FramesFactory.getVeis1950(); break; default: inertialFrame = FramesFactory.getEME2000(); break; } /*switch(mission.rotatingFrame){ case ITRF: rotatingFrame = FramesFactory.getITRF(IERSConventions.IERS_2010, true); break; case GTOD: rotatingFrame = FramesFactory.getGTOD(true); break; default: rotatingFrame = FramesFactory.getITRF(IERSConventions.IERS_2010, true); break; }*/ Orbit initialOrbit = new KeplerianOrbit(mission.initial_orbit.a, mission.initial_orbit.e, mission.initial_orbit.i, mission.initial_orbit.omega, mission.initial_orbit.raan, mission.initial_orbit.lM, PositionAngle.MEAN, inertialFrame, mission.initial_date, mission.initial_orbit.mu); BodyShape earth = new OneAxisEllipsoid(org.orekit.utils.Constants.WGS84_EARTH_EQUATORIAL_RADIUS,org.orekit.utils.Constants.WGS84_EARTH_FLATTENING,CelestialBodyFactory.getEarth().getBodyOrientedFrame()); AttitudeProvider attitudeProvider = new NadirPointing(earth); SpacecraftState old_st; switch(mission.propagatorType){ case Keplerian://FIXME:PROPAGATOR implement other propagators and put a flag to use each one //kepler = new KeplerianPropagator(initialOrbit,null,mission.initial_orbit.mu,mission.initial_mass); propagator = new KeplerianPropagator(initialOrbit,attitudeProvider,mission.initial_orbit.mu, mission.initial_mass); propagator.setSlaveMode(); old_st = propagator.getInitialState(); propagator.resetInitialState(new SpacecraftState(old_st.getOrbit(), old_st.getAttitude() , mission.initial_mass)); break; default: //kepler = new KeplerianPropagator(initialOrbit,"DEFAULT_LAW",mission.initial_orbit.mu,mission.initial_mass); propagator = new KeplerianPropagator(initialOrbit,attitudeProvider,mission.initial_orbit.mu, mission.initial_mass); propagator.setSlaveMode(); old_st = propagator.getInitialState(); propagator.resetInitialState(new SpacecraftState(old_st.getOrbit(), old_st.getAttitude() , mission.initial_mass)); } //Apply EventDetectors for maneuvers /*for(Entry<String, ManeuverImpulse> ma : simulation.mission.maneuvers.entrySet()){ ManeuverImpulse mi = (ManeuverImpulse) ma.getValue(); propagator.addEventDetector(mi.getOrekitManeuver()); }*/ extrapDate = mission.initial_date; finalDate = new AbsoluteDate(mission.initial_date, mission.sim_duration); //step=simulation.mission.sim_step; } /** * Propagate simulation * @return * @throws PropagationException */ private SpacecraftState propagate() throws PropagationException{ if(extrapDate.compareTo(finalDate) <= 0){ SpacecraftState currentState = propagator.propagate(extrapDate); extrapDate = new AbsoluteDate(extrapDate, mission.sim_step); return currentState; }else{ return null; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive.statistics; import com.facebook.presto.hive.HiveColumnHandle; import com.facebook.presto.hive.HivePartition; import com.facebook.presto.hive.HiveTableHandle; import com.facebook.presto.hive.PartitionStatistics; import com.facebook.presto.hive.metastore.HiveColumnStatistics; import com.facebook.presto.hive.metastore.Partition; import com.facebook.presto.hive.metastore.SemiTransactionalHiveMetastore; import com.facebook.presto.hive.metastore.Table; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.ConnectorTableHandle; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.statistics.ColumnStatistics; import com.facebook.presto.spi.statistics.Estimate; import com.facebook.presto.spi.statistics.RangeColumnStatistics; import com.facebook.presto.spi.statistics.TableStatistics; import com.facebook.presto.spi.type.TypeManager; import com.google.common.collect.ImmutableMap; import javax.annotation.Nullable; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.OptionalDouble; import java.util.OptionalLong; import java.util.PrimitiveIterator; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.DoubleStream; import static com.facebook.presto.hive.HiveSessionProperties.isStatisticsEnabled; import static com.google.common.base.Preconditions.checkArgument; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; public class MetastoreHiveStatisticsProvider implements HiveStatisticsProvider { private final TypeManager typeManager; private final SemiTransactionalHiveMetastore metastore; public MetastoreHiveStatisticsProvider(TypeManager typeManager, SemiTransactionalHiveMetastore metastore) { this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.metastore = requireNonNull(metastore, "metastore is null"); } @Override public TableStatistics getTableStatistics(ConnectorSession session, ConnectorTableHandle tableHandle, List<HivePartition> hivePartitions, Map<String, ColumnHandle> tableColumns) { if (!isStatisticsEnabled(session)) { return TableStatistics.EMPTY_STATISTICS; } Map<String, PartitionStatistics> partitionStatistics = getPartitionsStatistics((HiveTableHandle) tableHandle, hivePartitions, tableColumns.keySet()); TableStatistics.Builder tableStatistics = TableStatistics.builder(); Estimate rowCount = calculateRowsCount(partitionStatistics); tableStatistics.setRowCount(rowCount); for (Map.Entry<String, ColumnHandle> columnEntry : tableColumns.entrySet()) { String columnName = columnEntry.getKey(); HiveColumnHandle hiveColumnHandle = (HiveColumnHandle) columnEntry.getValue(); RangeColumnStatistics.Builder rangeStatistics = RangeColumnStatistics.builder(); Estimate nullsFraction; if (hiveColumnHandle.isPartitionKey()) { rangeStatistics.setDistinctValuesCount(countDistinctPartitionKeys(hiveColumnHandle, hivePartitions)); nullsFraction = calculateNullsFractionForPartitioningKey(hiveColumnHandle, hivePartitions, partitionStatistics); } else { rangeStatistics.setDistinctValuesCount(calculateDistinctValuesCount(partitionStatistics, columnName)); nullsFraction = calculateNullsFraction(partitionStatistics, columnName, rowCount); } rangeStatistics.setFraction(nullsFraction.map(value -> 1.0 - value)); ColumnStatistics.Builder columnStatistics = ColumnStatistics.builder(); columnStatistics.setNullsFraction(nullsFraction); columnStatistics.addRange(rangeStatistics.build()); tableStatistics.setColumnStatistics(hiveColumnHandle, columnStatistics.build()); } return tableStatistics.build(); } private Estimate calculateRowsCount(Map<String, PartitionStatistics> partitionStatistics) { List<Long> knownPartitionRowCounts = partitionStatistics.values().stream() .map(PartitionStatistics::getRowCount) .filter(OptionalLong::isPresent) .map(OptionalLong::getAsLong) .collect(toList()); long knownPartitionRowCountsSum = knownPartitionRowCounts.stream().mapToLong(a -> a).sum(); long partitionsWithStatsCount = knownPartitionRowCounts.size(); long allPartitionsCount = partitionStatistics.size(); if (partitionsWithStatsCount == 0) { return Estimate.unknownValue(); } return new Estimate(1.0 * knownPartitionRowCountsSum / partitionsWithStatsCount * allPartitionsCount); } private Estimate calculateDistinctValuesCount(Map<String, PartitionStatistics> statisticsByPartitionName, String column) { return summarizePartitionStatistics( statisticsByPartitionName.values(), column, columnStatistics -> { if (columnStatistics.getDistinctValuesCount().isPresent()) { return OptionalDouble.of(columnStatistics.getDistinctValuesCount().getAsLong()); } else { return OptionalDouble.empty(); } }, DoubleStream::max); } private Estimate calculateNullsFraction(Map<String, PartitionStatistics> statisticsByPartitionName, String column, Estimate totalRowsCount) { Estimate totalNullsCount = summarizePartitionStatistics( statisticsByPartitionName.values(), column, columnStatistics -> { if (columnStatistics.getNullsCount().isPresent()) { return OptionalDouble.of(columnStatistics.getNullsCount().getAsLong()); } else { return OptionalDouble.empty(); } }, nullsCountStream -> { double nullsCount = 0; long partitionsWithStatisticsCount = 0; for (PrimitiveIterator.OfDouble nullsCountIterator = nullsCountStream.iterator(); nullsCountIterator.hasNext(); ) { nullsCount += nullsCountIterator.nextDouble(); partitionsWithStatisticsCount++; } if (partitionsWithStatisticsCount == 0) { return OptionalDouble.empty(); } else { int allPartitionsCount = statisticsByPartitionName.size(); return OptionalDouble.of(allPartitionsCount / partitionsWithStatisticsCount * nullsCount); } }); if (totalNullsCount.isValueUnknown() || totalRowsCount.isValueUnknown()) { return Estimate.unknownValue(); } if (totalRowsCount.getValue() == 0.0) { return Estimate.zeroValue(); } return new Estimate(totalNullsCount.getValue() / totalRowsCount.getValue()); } private Estimate countDistinctPartitionKeys(HiveColumnHandle partitionColumn, List<HivePartition> partitions) { return new Estimate(partitions.stream() .map(HivePartition::getKeys) .map(keys -> keys.get(partitionColumn)) .distinct() .count()); } private Estimate calculateNullsFractionForPartitioningKey(HiveColumnHandle partitionColumn, List<HivePartition> partitions, Map<String, PartitionStatistics> partitionStatistics) { OptionalDouble rowsPerPartition = partitionStatistics.values().stream() .map(PartitionStatistics::getRowCount) .filter(OptionalLong::isPresent) .mapToLong(OptionalLong::getAsLong) .average(); if (!rowsPerPartition.isPresent()) { return Estimate.unknownValue(); } double estimatedTotalRowsCount = rowsPerPartition.getAsDouble() * partitions.size(); if (estimatedTotalRowsCount == 0.0) { return Estimate.zeroValue(); } double estimatedNullsCount = partitions.stream() .filter(partition -> partition.getKeys().get(partitionColumn).isNull()) .map(HivePartition::getPartitionId) .mapToLong(partitionId -> partitionStatistics.get(partitionId).getRowCount().orElse((long) rowsPerPartition.getAsDouble())) .sum(); return new Estimate(estimatedNullsCount / estimatedTotalRowsCount); } private Estimate summarizePartitionStatistics( Collection<PartitionStatistics> partitionStatistics, String column, Function<HiveColumnStatistics, OptionalDouble> valueExtractFunction, Function<DoubleStream, OptionalDouble> valueAggregateFunction) { DoubleStream intermediateStream = partitionStatistics.stream() .map(PartitionStatistics::getColumnStatistics) .filter(stats -> stats.containsKey(column)) .map(stats -> stats.get(column)) .map(valueExtractFunction) .filter(OptionalDouble::isPresent) .mapToDouble(OptionalDouble::getAsDouble); OptionalDouble statisticsValue = valueAggregateFunction.apply(intermediateStream); if (statisticsValue.isPresent()) { return new Estimate(statisticsValue.getAsDouble()); } else { return Estimate.unknownValue(); } } private Map<String, PartitionStatistics> getPartitionsStatistics(HiveTableHandle tableHandle, List<HivePartition> hivePartitions, Set<String> tableColumns) { if (hivePartitions.isEmpty()) { return ImmutableMap.of(); } boolean unpartitioned = hivePartitions.stream().anyMatch(partition -> partition.getPartitionId().equals(HivePartition.UNPARTITIONED_ID)); if (unpartitioned) { checkArgument(hivePartitions.size() == 1, "expected only one hive partition"); } if (unpartitioned) { return ImmutableMap.of(HivePartition.UNPARTITIONED_ID, getTableStatistics(tableHandle.getSchemaTableName(), tableColumns)); } else { return getPartitionsStatistics(tableHandle.getSchemaTableName(), hivePartitions, tableColumns); } } private Map<String, PartitionStatistics> getPartitionsStatistics(SchemaTableName schemaTableName, List<HivePartition> hivePartitions, Set<String> tableColumns) { String databaseName = schemaTableName.getSchemaName(); String tableName = schemaTableName.getTableName(); ImmutableMap.Builder<String, PartitionStatistics> resultMap = ImmutableMap.builder(); List<String> partitionNames = hivePartitions.stream().map(HivePartition::getPartitionId).collect(Collectors.toList()); Map<String, Map<String, HiveColumnStatistics>> partitionColumnStatisticsMap = metastore.getPartitionColumnStatistics(databaseName, tableName, new HashSet<>(partitionNames), tableColumns) .orElse(ImmutableMap.of()); Map<String, Optional<Partition>> partitionsByNames = metastore.getPartitionsByNames(databaseName, tableName, partitionNames); for (String partitionName : partitionNames) { Map<String, String> partitionParameters = partitionsByNames.get(partitionName) .map(Partition::getParameters) .orElseThrow(() -> new IllegalArgumentException(format("Could not get metadata for partition %s.%s.%s", databaseName, tableName, partitionName))); Map<String, HiveColumnStatistics> partitionColumnStatistics = partitionColumnStatisticsMap.getOrDefault(partitionName, ImmutableMap.of()); resultMap.put(partitionName, readStatisticsFromParameters(partitionParameters, partitionColumnStatistics)); } return resultMap.build(); } private PartitionStatistics getTableStatistics(SchemaTableName schemaTableName, Set<String> tableColumns) { String databaseName = schemaTableName.getSchemaName(); String tableName = schemaTableName.getTableName(); Table table = metastore.getTable(databaseName, tableName) .orElseThrow(() -> new IllegalArgumentException(format("Could not get metadata for table %s.%s", databaseName, tableName))); Map<String, HiveColumnStatistics> tableColumnStatistics = metastore.getTableColumnStatistics(databaseName, tableName, tableColumns).orElse(ImmutableMap.of()); return readStatisticsFromParameters(table.getParameters(), tableColumnStatistics); } private PartitionStatistics readStatisticsFromParameters(Map<String, String> parameters, Map<String, HiveColumnStatistics> columnStatistics) { boolean columnStatsAcurate = Boolean.valueOf(Optional.ofNullable(parameters.get("COLUMN_STATS_ACCURATE")).orElse("false")); OptionalLong numFiles = convertStringParameter(parameters.get("numFiles")); OptionalLong numRows = convertStringParameter(parameters.get("numRows")); OptionalLong rawDataSize = convertStringParameter(parameters.get("rawDataSize")); OptionalLong totalSize = convertStringParameter(parameters.get("totalSize")); return new PartitionStatistics(columnStatsAcurate, numFiles, numRows, rawDataSize, totalSize, columnStatistics); } private OptionalLong convertStringParameter(@Nullable String parameterValue) { if (parameterValue == null) { return OptionalLong.empty(); } try { long longValue = Long.parseLong(parameterValue); if (longValue < 0) { return OptionalLong.empty(); } return OptionalLong.of(longValue); } catch (NumberFormatException e) { return OptionalLong.empty(); } } private ColumnMetadata getColumnMetadata(ColumnHandle columnHandle) { return ((HiveColumnHandle) columnHandle).getColumnMetadata(typeManager); } }
/** * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.guvnor.client; import java.util.Collection; import org.drools.guvnor.client.common.GenericCallback; import org.drools.guvnor.client.explorer.AuthorPerspectivePlace; import org.drools.guvnor.client.explorer.ClientFactory; import org.drools.guvnor.client.explorer.GuvnorActivityMapper; import org.drools.guvnor.client.explorer.GuvnorPlaceHistoryMapper; import org.drools.guvnor.client.explorer.LoadPerspectives; import org.drools.guvnor.client.explorer.Perspective; import org.drools.guvnor.client.explorer.PerspectiveLoader; import org.drools.guvnor.client.explorer.PerspectivesPanel; import org.drools.guvnor.client.explorer.Preferences; import org.drools.guvnor.client.messages.Constants; import org.drools.guvnor.client.resources.GuvnorResources; import org.drools.guvnor.client.resources.OperatorsResource; import org.drools.guvnor.client.resources.RoundedCornersResource; import org.drools.guvnor.client.rpc.ConfigurationService; import org.drools.guvnor.client.rpc.ConfigurationServiceAsync; import org.drools.guvnor.client.rpc.RepositoryServiceFactory; import org.drools.guvnor.client.rpc.UserSecurityContext; import org.drools.guvnor.client.ruleeditor.StandaloneEditorManager; import org.drools.guvnor.client.security.CapabilitiesManager; import com.google.gwt.activity.shared.ActivityManager; import com.google.gwt.activity.shared.ActivityMapper; import com.google.gwt.animation.client.Animation; import com.google.gwt.core.client.EntryPoint; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.Style.Visibility; import com.google.gwt.event.shared.EventBus; import com.google.gwt.place.shared.PlaceController; import com.google.gwt.place.shared.PlaceHistoryHandler; import com.google.gwt.user.client.Command; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.RootLayoutPanel; import com.google.gwt.user.client.ui.RootPanel; /** * This is the main launching/entry point for the JBRMS web console. It * essentially sets the initial layout. * <p/> * If you hadn't noticed, this is using GWT from google. Refer to GWT docs if * GWT is new to you (it is quite a different way of building web apps). */ public class JBRMSEntryPoint implements EntryPoint { private Constants constants = GWT.create( Constants.class ); private PerspectivesPanel perspectivesPanel; public void onModuleLoad() { loadStyles(); hideLoadingPopup(); checkLogIn(); } private void loadStyles() { GuvnorResources.INSTANCE.headerCss().ensureInjected(); RoundedCornersResource.INSTANCE.roundCornersCss().ensureInjected(); OperatorsResource.INSTANCE.operatorsCss().ensureInjected(); } /** * Check if user is logged in, if not, then show prompt. If it is, then we * show the app, in all its glory ! */ private void checkLogIn() { RepositoryServiceFactory.getSecurityService().getCurrentUser( new GenericCallback<UserSecurityContext>() { public void onSuccess(UserSecurityContext userSecurityContext) { String userName = userSecurityContext.getUserName(); if ( userName != null ) { showMain( userName ); } else { logIn(); } } } ); } private void logIn() { final LoginWidget loginWidget = new LoginWidget(); loginWidget.setLoggedInEvent( new Command() { public void execute() { showMain( loginWidget.getUserName() ); } } ); loginWidget.show(); } private void showMain(final String userName) { Window.setStatus( constants.LoadingUserPermissions() ); CapabilitiesManager.getInstance().refreshAllowedCapabilities( new Command() { public void execute() { Preferences.INSTANCE.loadPrefs( CapabilitiesManager.getInstance().getCapabilities() ); Window.setStatus( " " ); createMain(); perspectivesPanel.setUserName( userName ); } } ); } /** * Creates the main view of Guvnor. The path used to invoke guvnor is used * to identify the view to show: If the path contains * "StandaloneEditor.html" then the StandaloneGuidedEditorManager is used to * render the view. If not, the default view is shown. */ private void createMain() { if ( Window.Location.getPath().contains( "StandaloneEditor.html" ) ) { RootLayoutPanel.get().add( new StandaloneEditorManager().getBaseLayout() ); } else { ClientFactory clientFactory = GWT.create( ClientFactory.class ); EventBus eventBus = clientFactory.getEventBus(); PlaceController placeController = clientFactory.getPlaceController(); Perspective defaultPlace = new AuthorPerspectivePlace(); perspectivesPanel = new PerspectivesPanel( clientFactory.getPerspectivesPanelView( hideTitle() ), placeController ); loadPerspectives(); // TODo: Hide the dropdown if the default one is the only one -Rikkola- ActivityMapper activityMapper = new GuvnorActivityMapper( clientFactory ); ActivityManager activityManager = new ActivityManager( activityMapper, eventBus ); activityManager.setDisplay( perspectivesPanel ); GuvnorPlaceHistoryMapper historyMapper = GWT.create( GuvnorPlaceHistoryMapper.class ); PlaceHistoryHandler historyHandler = new PlaceHistoryHandler( historyMapper ); historyHandler.register( placeController, eventBus, defaultPlace ); historyHandler.handleCurrentHistory(); RootLayoutPanel.get().add( perspectivesPanel.getView() ); } } private void loadPerspectives() { ConfigurationServiceAsync configurationServiceAsync = GWT.create( ConfigurationService.class ); PerspectiveLoader perspectiveLoader = new PerspectiveLoader( configurationServiceAsync ); perspectiveLoader.loadPerspectives( new LoadPerspectives() { public void loadPerspectives(Collection<Perspective> perspectives) { for ( Perspective perspective : perspectives ) { perspectivesPanel.addPerspective( perspective ); } } } ); } private boolean hideTitle() { String parameter = Window.Location.getParameter( "nochrome" ); if ( parameter == null ) { return true; } else { return parameter.equals( "true" ); } } //Fade out the "Loading application" pop-up private void hideLoadingPopup() { final Element e = RootPanel.get( "loading" ).getElement(); Animation r = new Animation() { @Override protected void onUpdate(double progress) { e.getStyle().setOpacity( 1.0 - progress ); } @Override protected void onComplete() { e.getStyle().setVisibility( Visibility.HIDDEN ); } }; r.run( 500 ); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.lucene.index; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FilteredDocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * A frequency TermsEnum that returns frequencies derived from a collection of * cached leaf termEnums. It also allows to provide a filter to explicitly * compute frequencies only for docs that match the filter (heavier!). */ public class FilterableTermsEnum extends TermsEnum { static class Holder { final TermsEnum termsEnum; @Nullable PostingsEnum docsEnum; @Nullable final Bits bits; Holder(TermsEnum termsEnum, Bits bits) { this.termsEnum = termsEnum; this.bits = bits; } } static final String UNSUPPORTED_MESSAGE = "This TermsEnum only supports #seekExact(BytesRef) as well as #docFreq() and #totalTermFreq()"; protected final static int NOT_FOUND = -1; private final Holder[] enums; protected int currentDocFreq = 0; protected long currentTotalTermFreq = 0; protected BytesRef current; protected final int docsEnumFlag; public FilterableTermsEnum(IndexReader reader, String field, int docsEnumFlag, @Nullable Query filter) throws IOException { if ((docsEnumFlag != PostingsEnum.FREQS) && (docsEnumFlag != PostingsEnum.NONE)) { throw new IllegalArgumentException("invalid docsEnumFlag of " + docsEnumFlag); } this.docsEnumFlag = docsEnumFlag; List<LeafReaderContext> leaves = reader.leaves(); List<Holder> enums = new ArrayList<>(leaves.size()); final Weight weight; if (filter == null) { weight = null; } else { final IndexSearcher searcher = new IndexSearcher(reader); searcher.setQueryCache(null); weight = searcher.createNormalizedWeight(filter, false); } for (LeafReaderContext context : leaves) { Terms terms = context.reader().terms(field); if (terms == null) { continue; } TermsEnum termsEnum = terms.iterator(); if (termsEnum == null) { continue; } BitSet bits = null; if (weight != null) { Scorer scorer = weight.scorer(context); if (scorer == null) { // fully filtered, none matching, no need to iterate on this continue; } DocIdSetIterator docs = scorer.iterator(); // we want to force apply deleted docs final Bits liveDocs = context.reader().getLiveDocs(); if (liveDocs != null) { docs = new FilteredDocIdSetIterator(docs) { @Override protected boolean match(int doc) { return liveDocs.get(doc); } }; } bits = BitSet.of(docs, context.reader().maxDoc()); } enums.add(new Holder(termsEnum, bits)); } this.enums = enums.toArray(new Holder[enums.size()]); } @Override public BytesRef term() throws IOException { return current; } @Override public boolean seekExact(BytesRef text) throws IOException { int docFreq = 0; long totalTermFreq = 0; for (Holder anEnum : enums) { if (anEnum.termsEnum.seekExact(text)) { if (anEnum.bits == null) { docFreq += anEnum.termsEnum.docFreq(); if (docsEnumFlag == PostingsEnum.FREQS) { long leafTotalTermFreq = anEnum.termsEnum.totalTermFreq(); if (totalTermFreq == -1 || leafTotalTermFreq == -1) { totalTermFreq = -1; continue; } totalTermFreq += leafTotalTermFreq; } } else { final PostingsEnum docsEnum = anEnum.docsEnum = anEnum.termsEnum.postings(anEnum.docsEnum, docsEnumFlag); // 2 choices for performing same heavy loop - one attempts to calculate totalTermFreq and other does not if (docsEnumFlag == PostingsEnum.FREQS) { for (int docId = docsEnum.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = docsEnum.nextDoc()) { if (anEnum.bits != null && anEnum.bits.get(docId) == false) { continue; } docFreq++; // docsEnum.freq() returns 1 if doc indexed with IndexOptions.DOCS_ONLY so no way of knowing if value // is really 1 or unrecorded when filtering like this totalTermFreq += docsEnum.freq(); } } else { for (int docId = docsEnum.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = docsEnum.nextDoc()) { if (anEnum.bits != null && anEnum.bits.get(docId) == false) { continue; } // docsEnum.freq() behaviour is undefined if docsEnumFlag==PostingsEnum.FLAG_NONE so don't bother with call docFreq++; } } } } } if (docFreq > 0) { currentDocFreq = docFreq; currentTotalTermFreq = totalTermFreq; current = text; return true; } else { currentDocFreq = NOT_FOUND; currentTotalTermFreq = NOT_FOUND; current = null; return false; } } @Override public int docFreq() throws IOException { return currentDocFreq; } @Override public long totalTermFreq() throws IOException { return currentTotalTermFreq; } @Override public void seekExact(long ord) throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public SeekStatus seekCeil(BytesRef text) throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public long ord() throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public BytesRef next() throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import java.lang.ref.WeakReference; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.HeapMemoryTuneObserver; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * Does the management of memstoreLAB chunk creations. A monotonically incrementing id is associated * with every chunk */ @InterfaceAudience.Private public class ChunkCreator { private static final Log LOG = LogFactory.getLog(ChunkCreator.class); // monotonically increasing chunkid private AtomicInteger chunkID = new AtomicInteger(1); // maps the chunk against the monotonically increasing chunk id. We need to preserve the // natural ordering of the key // CellChunkMap creation should convert the weak ref to hard reference // chunk id of each chunk is the first integer written on each chunk, // the header size need to be changed in case chunk id size is changed public static final int SIZEOF_CHUNK_HEADER = Bytes.SIZEOF_INT; // An object pointed by a weak reference can be garbage collected, in opposite to an object // referenced by a strong (regular) reference. Every chunk created via ChunkCreator is referenced // from either weakChunkIdMap or strongChunkIdMap. // Upon chunk C creation, C's ID is mapped into weak reference to C, in order not to disturb C's // GC in case all other reference to C are going to be removed. // When chunk C is referenced from CellChunkMap (via C's ID) it is possible to GC the chunk C. // To avoid that upon inserting C into CellChunkMap, C's ID is mapped into strong (regular) // reference to C. // map that doesn't influence GC private Map<Integer, WeakReference<Chunk>> weakChunkIdMap = new ConcurrentHashMap<Integer, WeakReference<Chunk>>(); // map that keeps chunks from garbage collection private Map<Integer, Chunk> strongChunkIdMap = new ConcurrentHashMap<Integer, Chunk>(); private final int chunkSize; private final boolean offheap; @VisibleForTesting static ChunkCreator INSTANCE; @VisibleForTesting static boolean chunkPoolDisabled = false; private MemStoreChunkPool pool; @VisibleForTesting ChunkCreator(int chunkSize, boolean offheap, long globalMemStoreSize, float poolSizePercentage, float initialCountPercentage, HeapMemoryManager heapMemoryManager) { this.chunkSize = chunkSize; this.offheap = offheap; this.pool = initializePool(globalMemStoreSize, poolSizePercentage, initialCountPercentage); if (heapMemoryManager != null && this.pool != null) { // Register with Heap Memory manager heapMemoryManager.registerTuneObserver(this.pool); } } /** * Initializes the instance of MSLABChunkCreator * @param chunkSize the chunkSize * @param offheap indicates if the chunk is to be created offheap or not * @param globalMemStoreSize the global memstore size * @param poolSizePercentage pool size percentage * @param initialCountPercentage the initial count of the chunk pool if any * @param heapMemoryManager the heapmemory manager * @return singleton MSLABChunkCreator */ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "LI_LAZY_INIT_STATIC", justification = "Method is called by single thread at the starting of RS") @VisibleForTesting public static ChunkCreator initialize(int chunkSize, boolean offheap, long globalMemStoreSize, float poolSizePercentage, float initialCountPercentage, HeapMemoryManager heapMemoryManager) { if (INSTANCE != null) return INSTANCE; INSTANCE = new ChunkCreator(chunkSize, offheap, globalMemStoreSize, poolSizePercentage, initialCountPercentage, heapMemoryManager); return INSTANCE; } static ChunkCreator getInstance() { return INSTANCE; } /** * Creates and inits a chunk. * @return the chunk that was initialized */ Chunk getChunk() { Chunk chunk = null; if (pool != null) { // the pool creates the chunk internally. The chunk#init() call happens here chunk = this.pool.getChunk(); // the pool has run out of maxCount if (chunk == null) { if (LOG.isTraceEnabled()) { LOG.trace("The chunk pool is full. Reached maxCount= " + this.pool.getMaxCount() + ". Creating chunk onheap."); } } } if (chunk == null) { chunk = createChunk(); } // put this chunk initially into the weakChunkIdMap this.weakChunkIdMap.put(chunk.getId(), new WeakReference<>(chunk)); // now we need to actually do the expensive memory allocation step in case of a new chunk, // else only the offset is set to the beginning of the chunk to accept allocations chunk.init(); return chunk; } private Chunk createChunk() { return createChunk(false); } /** * Creates the chunk either onheap or offheap * @param pool indicates if the chunks have to be created which will be used by the Pool * @return the chunk */ private Chunk createChunk(boolean pool) { int id = chunkID.getAndIncrement(); assert id > 0; // do not create offheap chunk on demand if (pool && this.offheap) { return new OffheapChunk(chunkSize, id, pool); } else { return new OnheapChunk(chunkSize, id, pool); } } @VisibleForTesting // Used to translate the ChunkID into a chunk ref Chunk getChunk(int id) { WeakReference<Chunk> ref = weakChunkIdMap.get(id); if (ref != null) { return ref.get(); } // check also the strong mapping return strongChunkIdMap.get(id); } // transfer the weak pointer to be a strong chunk pointer Chunk saveChunkFromGC(int chunkID) { Chunk c = strongChunkIdMap.get(chunkID); // check whether the chunk is already protected if (c != null) // with strong pointer return c; WeakReference<Chunk> ref = weakChunkIdMap.get(chunkID); if (ref != null) { c = ref.get(); } if (c != null) { // put this strong reference to chunk into the strongChunkIdMap // the read of the weakMap is always happening before the read of the strongMap // so no synchronization issues here this.strongChunkIdMap.put(chunkID, c); this.weakChunkIdMap.remove(chunkID); return c; } // we should actually never return null as someone should not ask to save from GC a chunk, // which is already released. However, we are not asserting it here and we let the caller // to deal with the return value an assert if needed return null; } int getChunkSize() { return this.chunkSize; } boolean isOffheap() { return this.offheap; } private void removeChunks(Set<Integer> chunkIDs) { this.weakChunkIdMap.keySet().removeAll(chunkIDs); this.strongChunkIdMap.keySet().removeAll(chunkIDs); } Chunk removeChunk(int chunkId) { WeakReference<Chunk> weak = this.weakChunkIdMap.remove(chunkId); Chunk strong = this.strongChunkIdMap.remove(chunkId); if (weak != null) { return weak.get(); } return strong; } @VisibleForTesting // the chunks in the weakChunkIdMap may already be released so we shouldn't relay // on this counting for strong correctness. This method is used only in testing. int size() { return this.weakChunkIdMap.size()+this.strongChunkIdMap.size(); } @VisibleForTesting void clearChunkIds() { this.strongChunkIdMap.clear(); this.weakChunkIdMap.clear(); } /** * A pool of {@link Chunk} instances. * * MemStoreChunkPool caches a number of retired chunks for reusing, it could * decrease allocating bytes when writing, thereby optimizing the garbage * collection on JVM. */ private class MemStoreChunkPool implements HeapMemoryTuneObserver { private int maxCount; // A queue of reclaimed chunks private final BlockingQueue<Chunk> reclaimedChunks; private final float poolSizePercentage; /** Statistics thread schedule pool */ private final ScheduledExecutorService scheduleThreadPool; /** Statistics thread */ private static final int statThreadPeriod = 60 * 5; private final AtomicLong chunkCount = new AtomicLong(); private final AtomicLong reusedChunkCount = new AtomicLong(); MemStoreChunkPool(int maxCount, int initialCount, float poolSizePercentage) { this.maxCount = maxCount; this.poolSizePercentage = poolSizePercentage; this.reclaimedChunks = new LinkedBlockingQueue<>(); for (int i = 0; i < initialCount; i++) { Chunk chunk = createChunk(true); chunk.init(); reclaimedChunks.add(chunk); } chunkCount.set(initialCount); final String n = Thread.currentThread().getName(); scheduleThreadPool = Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder() .setNameFormat(n + "-MemStoreChunkPool Statistics").setDaemon(true).build()); this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(), statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS); } /** * Poll a chunk from the pool, reset it if not null, else create a new chunk to return if we have * not yet created max allowed chunks count. When we have already created max allowed chunks and * no free chunks as of now, return null. It is the responsibility of the caller to make a chunk * then. * Note: Chunks returned by this pool must be put back to the pool after its use. * @return a chunk * @see #putbackChunks(Set) */ Chunk getChunk() { Chunk chunk = reclaimedChunks.poll(); if (chunk != null) { chunk.reset(); reusedChunkCount.incrementAndGet(); } else { // Make a chunk iff we have not yet created the maxCount chunks while (true) { long created = this.chunkCount.get(); if (created < this.maxCount) { if (this.chunkCount.compareAndSet(created, created + 1)) { chunk = createChunk(true); break; } } else { break; } } } return chunk; } /** * Add the chunks to the pool, when the pool achieves the max size, it will skip the remaining * chunks * @param chunks */ private void putbackChunks(Set<Integer> chunks) { int toAdd = Math.min(chunks.size(), this.maxCount - reclaimedChunks.size()); Iterator<Integer> iterator = chunks.iterator(); while (iterator.hasNext()) { Integer chunkId = iterator.next(); // remove the chunks every time though they are from the pool or not Chunk chunk = ChunkCreator.this.removeChunk(chunkId); if (chunk != null) { if (chunk.isFromPool() && toAdd > 0) { reclaimedChunks.add(chunk); } toAdd--; } } } private class StatisticsThread extends Thread { StatisticsThread() { super("MemStoreChunkPool.StatisticsThread"); setDaemon(true); } @Override public void run() { logStats(); } private void logStats() { if (!LOG.isDebugEnabled()) return; long created = chunkCount.get(); long reused = reusedChunkCount.get(); long total = created + reused; LOG.debug("Stats: current pool size=" + reclaimedChunks.size() + ",created chunk count=" + created + ",reused chunk count=" + reused + ",reuseRatio=" + (total == 0 ? "0" : StringUtils.formatPercent( (float) reused / (float) total, 2))); } } private int getMaxCount() { return this.maxCount; } @Override public void onHeapMemoryTune(long newMemstoreSize, long newBlockCacheSize) { // don't do any tuning in case of offheap memstore if (isOffheap()) { LOG.warn("Not tuning the chunk pool as it is offheap"); return; } int newMaxCount = (int) (newMemstoreSize * poolSizePercentage / getChunkSize()); if (newMaxCount != this.maxCount) { // We need an adjustment in the chunks numbers if (newMaxCount > this.maxCount) { // Max chunks getting increased. Just change the variable. Later calls to getChunk() would // create and add them to Q LOG.info("Max count for chunks increased from " + this.maxCount + " to " + newMaxCount); this.maxCount = newMaxCount; } else { // Max chunks getting decreased. We may need to clear off some of the pooled chunks now // itself. If the extra chunks are serving already, do not pool those when we get them back LOG.info("Max count for chunks decreased from " + this.maxCount + " to " + newMaxCount); this.maxCount = newMaxCount; if (this.reclaimedChunks.size() > newMaxCount) { synchronized (this) { while (this.reclaimedChunks.size() > newMaxCount) { this.reclaimedChunks.poll(); } } } } } } } @VisibleForTesting static void clearDisableFlag() { chunkPoolDisabled = false; } private MemStoreChunkPool initializePool(long globalMemStoreSize, float poolSizePercentage, float initialCountPercentage) { if (poolSizePercentage <= 0) { LOG.info("PoolSizePercentage is less than 0. So not using pool"); return null; } if (chunkPoolDisabled) { return null; } if (poolSizePercentage > 1.0) { throw new IllegalArgumentException( MemStoreLAB.CHUNK_POOL_MAXSIZE_KEY + " must be between 0.0 and 1.0"); } int maxCount = (int) (globalMemStoreSize * poolSizePercentage / getChunkSize()); if (initialCountPercentage > 1.0 || initialCountPercentage < 0) { throw new IllegalArgumentException( MemStoreLAB.CHUNK_POOL_INITIALSIZE_KEY + " must be between 0.0 and 1.0"); } int initialCount = (int) (initialCountPercentage * maxCount); LOG.info("Allocating MemStoreChunkPool with chunk size " + StringUtils.byteDesc(getChunkSize()) + ", max count " + maxCount + ", initial count " + initialCount); return new MemStoreChunkPool(maxCount, initialCount, poolSizePercentage); } @VisibleForTesting int getMaxCount() { if (pool != null) { return pool.getMaxCount(); } return 0; } @VisibleForTesting int getPoolSize() { if (pool != null) { return pool.reclaimedChunks.size(); } return 0; } /* * Only used in testing */ @VisibleForTesting void clearChunksInPool() { if (pool != null) { pool.reclaimedChunks.clear(); } } synchronized void putbackChunks(Set<Integer> chunks) { if (pool != null) { pool.putbackChunks(chunks); } else { this.removeChunks(chunks); } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.tools.consistency; import com.facebook.buck.log.thrift.rulekeys.Value; import com.facebook.buck.tools.consistency.DifferState.DiffResult; import com.facebook.buck.tools.consistency.DifferState.MaxDifferencesException; import com.facebook.buck.tools.consistency.RuleKeyDiffPrinter.TargetScope; import com.facebook.buck.tools.consistency.RuleKeyDiffPrinter.TargetScope.PropertyScope; import com.facebook.buck.tools.consistency.RuleKeyFileParser.ParsedRuleKeyFile; import com.facebook.buck.tools.consistency.RuleKeyFileParser.RuleKeyNode; import com.google.common.collect.Sets; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; /** Traverses a graph of rule keys and prints out the differences between them. */ public class RuleKeyDiffer { private final RuleKeyDiffPrinter printer; /** * Signals that there was an issue while traversing one of the graphs. This generally means that * the graph is malformed */ public class GraphTraversalException extends Exception { GraphTraversalException(String message, Object... formatObjects) { super(String.format(message, formatObjects)); } GraphTraversalException(Throwable e, String message, Object... formatObjects) { super(String.format(message, formatObjects), e); } } public RuleKeyDiffer(RuleKeyDiffPrinter printer) { this.printer = printer; } /** * Prints the differences between two files' rule key graphs * * @param originalFile The parsed original file * @param newFile The new file to use * @throws MaxDifferencesException Thrown if the maximum number of differences has been found * @returns true if differences were found, false otherwise */ public DiffResult printDiff(ParsedRuleKeyFile originalFile, ParsedRuleKeyFile newFile) throws MaxDifferencesException, GraphTraversalException { if (!originalFile.rootNodes.keySet().equals(newFile.rootNodes.keySet())) { String originalTargets = originalFile.rootNodes.keySet().stream() .map(RuleKeyFileParser.targetNameAndConf::toString) .collect(Collectors.joining(",")); String newTargets = newFile.rootNodes.keySet().stream() .map(RuleKeyFileParser.targetNameAndConf::toString) .collect(Collectors.joining(",")); throw new GraphTraversalException( "Root nodes in %s do not match root nodes in %s. %s vs %s", originalFile.filename, newFile.filename, originalTargets, newTargets); } int visitId = 1; for (Map.Entry<RuleKeyFileParser.targetNameAndConf, RuleKeyNode> entry : originalFile.rootNodes.entrySet()) { printDiff( originalFile, entry.getValue(), newFile, newFile.rootNodes.get(entry.getKey()), visitId); visitId++; } return printer.hasChanges(); } private void printDiff( ParsedRuleKeyFile originalFile, RuleKeyNode originalRuleKey, ParsedRuleKeyFile newFile, RuleKeyNode newRuleKey, int visitId) throws MaxDifferencesException, GraphTraversalException { if (originalRuleKey.ruleKey.key.equals(newRuleKey.ruleKey.key)) { return; } if (originalRuleKey.lastVisitId == visitId && newRuleKey.lastVisitId == visitId) { return; } originalRuleKey.lastVisitId = visitId; newRuleKey.lastVisitId = visitId; Set<String> originalRuleKeyProperties = originalRuleKey.ruleKey.values.keySet(); Set<String> newRuleKeyProperties = newRuleKey.ruleKey.values.keySet(); Set<String> onlyInOriginalKey = Sets.difference(originalRuleKeyProperties, newRuleKeyProperties); Set<String> onlyInNewKey = Sets.difference(newRuleKeyProperties, originalRuleKeyProperties); Set<String> inBoth = Sets.intersection(originalRuleKeyProperties, newRuleKeyProperties); String target = RuleKeyDiffPrinter.getRuleKeyName(originalFile, originalRuleKey.ruleKey); try (TargetScope targetScope = printer.addTarget(target, originalRuleKey.ruleKey.key, newRuleKey.ruleKey.key)) { try (PropertyScope rootProperty = targetScope.addProperty("")) { for (String key : onlyInOriginalKey) { try (PropertyScope propertyScope = rootProperty.addNestedProperty(key)) { propertyScope.removed(originalFile, originalRuleKey.ruleKey.values.get(key)); } } for (String key : onlyInNewKey) { try (PropertyScope propertyScope = rootProperty.addNestedProperty(key)) { propertyScope.added(newFile, newRuleKey.ruleKey.values.get(key)); } } for (String key : inBoth) { try (PropertyScope propertyScope = rootProperty.addNestedProperty(key)) { printDiff( propertyScope, originalFile, originalRuleKey.ruleKey.values.get(key), newFile, newRuleKey.ruleKey.values.get(key)); } } } } catch (MaxDifferencesException e) { throw e; } catch (Exception e) { throw new GraphTraversalException(e, "Unexpected error while traversing rule key graph"); } } private void printDiff( PropertyScope propertiesSoFar, ParsedRuleKeyFile originalFile, Value originalValue, ParsedRuleKeyFile newFile, Value newValue) throws MaxDifferencesException, GraphTraversalException { if (originalValue.equals(newValue)) { return; } if (originalValue.getSetField() != newValue.getSetField()) { propertiesSoFar.changed(originalFile, originalValue, newFile, newValue); return; } try { switch (originalValue.getSetField()) { case STRING_VALUE: case NUMBER_VALUE: case BOOL_VALUE: case NULL_VALUE: case HASHED_PATH: case PATH: case SHA1_HASH: case PATTERN: case BYTE_ARRAY: case ARCHIVE_MEMBER_PATH: case BUILD_RULE_TYPE: case BUILD_TARGET: case TARGET_PATH: propertiesSoFar.changed(originalFile, originalValue, newFile, newValue); break; case CONTAINER_MAP: printContainerMapDiff(propertiesSoFar, originalFile, originalValue, newFile, newValue); break; case CONTAINER_LIST: printContainerListDiff(propertiesSoFar, originalFile, originalValue, newFile, newValue); break; case WRAPPER: printWrapperDiff(propertiesSoFar, originalFile, originalValue, newFile, newValue); break; case RULE_KEY_HASH: printRuleKeyHashDiff(propertiesSoFar, originalFile, originalValue, newFile, newValue); break; case KEY: break; } } catch (MaxDifferencesException e) { throw e; } catch (Exception e) { throw new GraphTraversalException( e, "Unexpected error while traversing rule key's properties"); } } private void printContainerMapDiff( PropertyScope propertiesSoFar, ParsedRuleKeyFile originalFile, Value originalValue, ParsedRuleKeyFile newFile, Value newValue) throws Exception { Set<String> originalKeyProperties = originalValue.getContainerMap().keySet(); Set<String> newKeyProperties = newValue.getContainerMap().keySet(); Set<String> onlyInOrigKey = Sets.difference(originalKeyProperties, newKeyProperties); Set<String> onlyInNewKey = Sets.difference(newKeyProperties, originalKeyProperties); Set<String> inBoth = Sets.intersection(originalKeyProperties, newKeyProperties); for (String k : onlyInOrigKey) { try (PropertyScope propertyScope = propertiesSoFar.addNestedProperty(k)) { propertyScope.removed(originalFile, originalValue.getContainerMap().get(k)); } } for (String k : onlyInNewKey) { try (PropertyScope propertyScope = propertiesSoFar.addNestedProperty(k)) { propertyScope.added(newFile, newValue.getContainerMap().get(k)); } } for (String k : inBoth) { try (PropertyScope propertyScope = propertiesSoFar.addNestedProperty(k)) { printDiff( propertyScope, originalFile, originalValue.getContainerMap().get(k), newFile, newValue.getContainerMap().get(k)); } } } private void printContainerListDiff( PropertyScope propertiesSoFar, ParsedRuleKeyFile originalFile, Value originalValue, ParsedRuleKeyFile newFile, Value newValue) throws Exception { int commonListLength = Math.min(originalValue.getContainerList().size(), newValue.getContainerList().size()); for (int i = 0; i < commonListLength; i++) { try (PropertyScope propertyScope = propertiesSoFar.addNestedProperty(String.valueOf(i))) { printDiff( propertyScope, originalFile, originalValue.getContainerList().get(i), newFile, newValue.getContainerList().get(i)); } } for (int i = commonListLength; i < originalValue.getContainerList().size(); i++) { try (PropertyScope propertyScope = propertiesSoFar.addNestedProperty(String.valueOf(i))) { propertyScope.removed(originalFile, originalValue.getContainerList().get(i)); } } for (int i = commonListLength; i < newValue.getContainerList().size(); i++) { try (PropertyScope propertyScope = propertiesSoFar.addNestedProperty(String.valueOf(i))) { propertyScope.added(newFile, newValue.getContainerList().get(i)); } } } private void printRuleKeyHashDiff( PropertyScope propertiesSoFar, ParsedRuleKeyFile originalFile, Value originalValue, ParsedRuleKeyFile newFile, Value newValue) throws MaxDifferencesException, GraphTraversalException { RuleKeyNode nextOriginalRuleKey = originalFile.rules.get(originalValue.getRuleKeyHash().sha1); RuleKeyNode nextNewRuleKey = newFile.rules.get(newValue.getRuleKeyHash().sha1); String originalName = RuleKeyDiffPrinter.getRuleKeyName(originalFile, nextOriginalRuleKey.ruleKey); String newName = RuleKeyDiffPrinter.getRuleKeyName(newFile, nextNewRuleKey.ruleKey); if (originalName.equals(newName)) { // Only recurse if we've got the same name, otherwise we can already tell // where the divergence happened propertiesSoFar.recordEmptyChange(); printDiff(originalFile, nextOriginalRuleKey, newFile, nextNewRuleKey, 1); } else { // If the rule keys are significantly different, just print their names out propertiesSoFar.changed(originalFile, originalValue, newFile, newValue); } } private void printWrapperDiff( PropertyScope propertiesSoFar, ParsedRuleKeyFile originalFile, Value originalValue, ParsedRuleKeyFile newFile, Value newValue) throws Exception { if (!originalValue.getWrapper().type.equals(newValue.getWrapper().type)) { propertiesSoFar.changed(originalFile, originalValue, newFile, newValue); } else { try (PropertyScope propertyScope = propertiesSoFar.addNestedProperty(originalValue.getWrapper().type)) { printDiff( propertyScope, originalFile, originalValue.getWrapper().value, newFile, newValue.getWrapper().value); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.api.operators; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.core.memory.DataInputViewStreamWrapper; import org.apache.flink.core.memory.DataOutputViewStreamWrapper; import org.apache.flink.runtime.state.KeyGroupRangeAssignment; import org.apache.flink.runtime.state.KeyGroupsList; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.util.InstantiationUtil; /** * The watermark callback service allows to register a {@link OnWatermarkCallback OnWatermarkCallback} * and multiple keys, for which the callback will be invoked every time a new {@link Watermark} is received * (after the registration of the key). * * <p><b>NOTE: </b> This service is only available to <b>keyed</b> operators. * * @param <K> The type of key returned by the {@code KeySelector}. */ @Internal public class InternalWatermarkCallbackService<K> { //////////// Information about the keyed state ////////// private final KeyGroupsList localKeyGroupRange; private final int totalKeyGroups; private final int localKeyGroupRangeStartIdx; private final KeyContext keyContext; /** * An array of sets of keys keeping the registered keys split * by the key-group they belong to. Each key-group has one set. */ private final Set<K>[] registeredKeysByKeyGroup; /** * An array of sets of keys keeping the keys "to delete" split * by the key-group they belong to. Each key-group has one set. * * <p>This is used to avoid potential concurrent modification * exception when deleting keys from inside the * {@link #invokeOnWatermarkCallback(Watermark)}. */ private final Set<K>[] deletedKeysByKeyGroup; /** A serializer for the registered keys. */ private TypeSerializer<K> keySerializer; /** * The {@link OnWatermarkCallback} to be invoked for each * registered key upon reception of the watermark. */ private OnWatermarkCallback<K> callback; public InternalWatermarkCallbackService(int totalKeyGroups, KeyGroupsList localKeyGroupRange, KeyContext keyContext) { this.totalKeyGroups = totalKeyGroups; this.localKeyGroupRange = checkNotNull(localKeyGroupRange); this.keyContext = checkNotNull(keyContext); // find the starting index of the local key-group range int startIdx = Integer.MAX_VALUE; for (Integer keyGroupIdx : localKeyGroupRange) { startIdx = Math.min(keyGroupIdx, startIdx); } this.localKeyGroupRangeStartIdx = startIdx; // the list of ids of the key-groups this task is responsible for int localKeyGroups = this.localKeyGroupRange.getNumberOfKeyGroups(); this.registeredKeysByKeyGroup = new Set[localKeyGroups]; this.deletedKeysByKeyGroup = new Set[localKeyGroups]; } /** * Registers a {@link OnWatermarkCallback} with the current {@link InternalWatermarkCallbackService} service. * Before this method is called and the callback is set, the service is unusable. * * @param watermarkCallback The callback to be registered. * @param keySerializer A serializer for the registered keys. */ public void setWatermarkCallback(OnWatermarkCallback<K> watermarkCallback, TypeSerializer<K> keySerializer) { if (callback == null) { this.keySerializer = keySerializer; this.callback = watermarkCallback; } else { throw new RuntimeException("The watermark callback has already been initialized."); } } /** * Registers a key with the service. This will lead to the {@link OnWatermarkCallback} * being invoked for this key upon reception of each subsequent watermark. * * @param key The key to be registered. */ public boolean registerKeyForWatermarkCallback(K key) { return getRegisteredKeysForKeyGroup(key).add(key); } /** * Unregisters the provided key from the service. * * @param key The key to be unregistered. */ public boolean unregisterKeyFromWatermarkCallback(K key) { return getDeletedKeysForKeyGroup(key).add(key); } /** * Invokes the registered callback for all the registered keys. * * @param watermark The watermark that triggered the invocation. */ public void invokeOnWatermarkCallback(Watermark watermark) throws IOException { // clean up any keys registered for deletion before calling the callback cleanupRegisteredKeys(); if (callback != null) { for (Set<K> keySet : registeredKeysByKeyGroup) { if (keySet != null) { for (K key : keySet) { keyContext.setCurrentKey(key); callback.onWatermark(key, watermark); } } } } } /** * Does the actual deletion of any keys registered for deletion using the * {@link #unregisterKeyFromWatermarkCallback(Object)}. */ private void cleanupRegisteredKeys() { for (int keyGroupIdx = 0; keyGroupIdx < registeredKeysByKeyGroup.length; keyGroupIdx++) { Set<K> deletedKeys = deletedKeysByKeyGroup[keyGroupIdx]; if (deletedKeys != null) { Set<K> registeredKeys = registeredKeysByKeyGroup[keyGroupIdx]; if (registeredKeys != null) { registeredKeys.removeAll(deletedKeys); if (registeredKeys.isEmpty()) { registeredKeysByKeyGroup[keyGroupIdx] = null; } } deletedKeysByKeyGroup[keyGroupIdx] = null; } } } /** * Retrieve the set of keys for the key-group this key belongs to. * * @param key the key whose key-group we are searching. * @return the set of registered keys for the key-group. */ private Set<K> getRegisteredKeysForKeyGroup(K key) { checkArgument(localKeyGroupRange != null, "The operator has not been initialized."); int keyGroupIdx = KeyGroupRangeAssignment.assignToKeyGroup(key, totalKeyGroups); return getRegisteredKeysForKeyGroup(keyGroupIdx); } /** * Retrieve the set of keys for the requested key-group. * * @param keyGroupIdx the index of the key group we are interested in. * @return the set of keys for the key-group. */ private Set<K> getRegisteredKeysForKeyGroup(int keyGroupIdx) { int localIdx = getIndexForKeyGroup(keyGroupIdx); Set<K> keys = registeredKeysByKeyGroup[localIdx]; if (keys == null) { keys = new HashSet<>(); registeredKeysByKeyGroup[localIdx] = keys; } return keys; } private Set<K> getDeletedKeysForKeyGroup(K key) { checkArgument(localKeyGroupRange != null, "The operator has not been initialized."); int keyGroupIdx = KeyGroupRangeAssignment.assignToKeyGroup(key, totalKeyGroups); return getDeletedKeysForKeyGroup(keyGroupIdx); } private Set<K> getDeletedKeysForKeyGroup(int keyGroupIdx) { int localIdx = getIndexForKeyGroup(keyGroupIdx); Set<K> keys = deletedKeysByKeyGroup[localIdx]; if (keys == null) { keys = new HashSet<>(); deletedKeysByKeyGroup[localIdx] = keys; } return keys; } /** * Computes the index of the requested key-group in the local datastructures. * * <p>Currently we assume that each task is assigned a continuous range of key-groups, * e.g. 1,2,3,4, and not 1,3,5. We leverage this to keep the different states * key-grouped in arrays instead of maps, where the offset for each key-group is * the key-group id (an int) minus the id of the first key-group in the local range. * This is for performance reasons. */ private int getIndexForKeyGroup(int keyGroupIdx) { checkArgument(localKeyGroupRange.contains(keyGroupIdx), "Key Group " + keyGroupIdx + " does not belong to the local range."); return keyGroupIdx - localKeyGroupRangeStartIdx; } ////////////////// Fault Tolerance Methods /////////////////// public void snapshotKeysForKeyGroup(DataOutputViewStreamWrapper stream, int keyGroupIdx) throws Exception { // we cleanup also here to avoid checkpointing the deletion set cleanupRegisteredKeys(); Set<K> keySet = getRegisteredKeysForKeyGroup(keyGroupIdx); if (keySet != null) { stream.writeInt(keySet.size()); InstantiationUtil.serializeObject(stream, keySerializer); for (K key : keySet) { keySerializer.serialize(key, stream); } } else { stream.writeInt(0); } } public void restoreKeysForKeyGroup(DataInputViewStreamWrapper stream, int keyGroupIdx, ClassLoader userCodeClassLoader) throws IOException, ClassNotFoundException { checkArgument(localKeyGroupRange.contains(keyGroupIdx), "Key Group " + keyGroupIdx + " does not belong to the local range."); int numKeys = stream.readInt(); if (numKeys > 0) { TypeSerializer<K> tmpKeyDeserializer = InstantiationUtil.deserializeObject(stream, userCodeClassLoader); if (keySerializer != null && !keySerializer.equals(tmpKeyDeserializer)) { throw new IllegalArgumentException("Tried to restore keys " + "for the watermark callback service with mismatching serializers."); } this.keySerializer = tmpKeyDeserializer; Set<K> keys = getRegisteredKeysForKeyGroup(keyGroupIdx); for (int i = 0; i < numKeys; i++) { keys.add(keySerializer.deserialize(stream)); } } } ////////////////// Testing Methods /////////////////// @VisibleForTesting public int numKeysForWatermarkCallback() { int count = 0; for (Set<K> keyGroup: registeredKeysByKeyGroup) { if (keyGroup != null) { count += keyGroup.size(); } } return count; } }
package core; import java.awt.Color; import java.awt.Dimension; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.TreeMap; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JColorChooser; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSlider; import javax.swing.colorchooser.AbstractColorChooserPanel; /* * Listener for threshold options */ public class ThresholdActionListener implements ActionListener { private CentralParameterStorage centralPara = null; ArrayList<CentralPanelStorage> centralList = null; private CentralPanelStorage centralPanel = null; private byte[] qualScore; private byte[] qualScoreNuc; private short[] channel1Data; private short[] channel2Data; private short[] channel3Data; private short[] channel4Data; private short[] peakLocation; private Color[] colors; private boolean reversed; private boolean basePositionOn; private boolean baseCallOn; private boolean qualityScoreOn; private boolean toggleRegionOn; private String filename; private TreeMap<Integer, Integer> predict; private int verticalSegmentNumber; private int verticalUnit; public ThresholdActionListener(CentralParameterStorage centralPara, CentralPanelStorage centralPanel, ArrayList<CentralPanelStorage> centralList) { this.centralPara = centralPara; this.centralPanel = centralPanel; this.centralList = centralList; } @Override public void actionPerformed(ActionEvent arg0) { //get current frame if(centralPara.getNumOpen() > 0){ centralPanel = centralList.get(centralPara.getCurrentFrame()); verticalSegmentNumber = centralPanel.getVerticalSegmentNumber(); qualScore = centralPanel.getQualScore(); qualScoreNuc = centralPanel.getQualScoreNuc(); channel1Data = centralPanel.getChannel1Data(); channel2Data = centralPanel.getChannel2Data(); channel3Data = centralPanel.getChannel3Data(); channel4Data = centralPanel.getChannel4Data(); peakLocation = centralPanel.getPeakLocation(); colors = centralPanel.getColors(); predict = centralPanel.getPredict(); reversed = centralPanel.getReversed(); basePositionOn = centralPanel.getBasePositionOn(); baseCallOn = centralPanel.getBaseCallOn(); qualityScoreOn = centralPanel.getQualityScoreOn(); toggleRegionOn = centralPanel.getToggleRegionOn(); verticalUnit = centralPanel.getVerticalUnit(); //--------------------------------------------------- // get all data from central parameter before writing qualScore = centralPanel.getQualScore(); qualScoreNuc = centralPanel.getQualScoreNuc(); channel1Data = centralPanel.getChannel1Data(); channel2Data = centralPanel.getChannel2Data(); channel3Data = centralPanel.getChannel3Data(); channel4Data = centralPanel.getChannel4Data(); peakLocation = centralPanel.getPeakLocation(); filename = centralPanel.getFileName(); ABIFframe parent = centralPara.getMainFrame(); JPanel dialog = new JPanel(); dialog.setLayout(new BoxLayout(dialog, BoxLayout.PAGE_AXIS)); int threshold = centralPanel.getThreshold(); JSlider qualityThreshold = new JSlider(JSlider.HORIZONTAL, 20, 40, threshold); qualityThreshold.setFocusable(false); qualityThreshold.setMajorTickSpacing(10); qualityThreshold.setMinorTickSpacing(1); qualityThreshold.setPaintLabels(true); qualityThreshold.setPaintTicks(true); int threshold2 = centralPanel.getThreshold2(); JSlider qualityThreshold2 = new JSlider(JSlider.HORIZONTAL, 10, 25, threshold2); qualityThreshold2.setFocusable(false); qualityThreshold2.setMajorTickSpacing(10); qualityThreshold2.setMinorTickSpacing(1); qualityThreshold2.setPaintLabels(true); qualityThreshold2.setPaintTicks(true); int clearance = centralPanel.getClearanceLevel(); JSlider clearanceSlider = new JSlider(JSlider.HORIZONTAL, 20, 50, clearance); clearanceSlider.setFocusable(false); clearanceSlider.setMajorTickSpacing(10); clearanceSlider.setMinorTickSpacing(1); clearanceSlider.setPaintLabels(true); clearanceSlider.setPaintTicks(true); int differential = centralPanel.getPeak2ValleyDiff(); JSlider differentialSlider = new JSlider(JSlider.HORIZONTAL, 20, 50, differential); differentialSlider.setFocusable(false); differentialSlider.setMajorTickSpacing(10); differentialSlider.setMinorTickSpacing(1); differentialSlider.setPaintLabels(true); differentialSlider.setPaintTicks(true); int window = centralPanel.getWindow(); JSlider windowSlider = new JSlider(JSlider.HORIZONTAL, 3, 29, window); windowSlider.setFocusable(false); windowSlider.setMajorTickSpacing(10); windowSlider.setMinorTickSpacing(1); windowSlider.setPaintLabels(true); windowSlider.setPaintTicks(true); JButton color = new JButton(); color.setPreferredSize( new Dimension(125, 50)); color.setText("Select"); color.setFocusPainted(false); JColorChooser chooser = new JColorChooser(colors[5]); AbstractColorChooserPanel[] panels = chooser.getChooserPanels(); for(AbstractColorChooserPanel panel: panels){ System.out.println(panel.getDisplayName()); if(!panel.getDisplayName().equals("HSV")){ chooser.removeChooserPanel(panel); } else{ //modify the chooser panel. } } //create custom preview MyPreviewPanel pview = new MyPreviewPanel(chooser); //check for color change and apply it to preview chooser.getSelectionModel().addChangeListener(e->{ Color col = chooser.getColor(); pview.setColor(col); }); chooser.setPreviewPanel(pview); Object[] inputFields = {"Threshold Sensitivity for entering good region", qualityThreshold, "Threshold for staying in good region", qualityThreshold2, "Peak Clearance", clearanceSlider, "Peak to valley differential", differentialSlider, "Sliding window size", windowSlider, "Choose color", chooser}; int option = JOptionPane.showOptionDialog(parent, inputFields, "Modify Region Extraction Specs", JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE, null, new String[]{"Apply", "Cancel"}, "default"); if (option == JOptionPane.OK_OPTION) { JScrollPane scrollPane = centralPanel.getDrawPanel(); // get current scroll values to maintain them after repaint int scrollVertical = scrollPane.getVerticalScrollBar().getValue(); centralPanel.setScrollVertical(scrollVertical); int scrollHorizontal = scrollPane.getHorizontalScrollBar().getValue(); centralPanel.setScrollHorizontal(scrollHorizontal); ArrayList<Integer> coList = AnalyzeABIFdata.coAlgorithm(channel1Data, channel2Data, channel3Data, channel4Data, peakLocation); short sil[] = AnalyzeABIFdata.getMaxValArray(channel1Data, channel2Data, channel3Data, channel4Data); short[] valley = AnalyzeABIFdata.valleyFinder(peakLocation, sil); float[] doList = AnalyzeABIFdata.doAlgorithm(peakLocation, valley, sil); TreeMap<Integer, Integer> predicter = AnalyzeABIFdata.predictiveAlgorithm(qualScore, qualityThreshold.getValue(), qualityThreshold2.getValue(), coList, clearanceSlider.getValue(), doList, differentialSlider.getValue(), windowSlider.getValue()); //centralPanel.setPredict(predicter); Color custom = chooser.getColor(); colors[5] = custom; // // DrawRawChannelDataPanel paintPanel = new DrawRawChannelDataPanel(channel1Data.length + 200, verticalSegmentNumber + 200, qualScore, qualScoreNuc, channel1Data, channel2Data, channel3Data, channel4Data, peakLocation, colors, verticalUnit, verticalSegmentNumber, 100, 100, predicter,reversed,basePositionOn,baseCallOn, qualityScoreOn,toggleRegionOn); paintPanel.setPreferredSize(new Dimension(channel1Data.length + 200, verticalSegmentNumber + 200)); scrollPane.remove(centralPanel.getPaintPanel()); scrollPane.setSize(600, 400); scrollPane.getViewport().add(paintPanel); // maintain current position in scroll pane scrollPane.getVerticalScrollBar().setValue(scrollVertical); scrollPane.getHorizontalScrollBar().setValue(scrollHorizontal); //centralPanel.setDrawPanel(scrollPane); scrollPane.revalidate(); scrollPane.repaint(); centralPanel.setThreshold(qualityThreshold.getValue()); centralPanel.setThreshold2(qualityThreshold2.getValue()); centralPanel.setclearanceLevel(clearanceSlider.getValue()); centralPanel.setPeak2ValleyDiff(differentialSlider.getValue()); centralPanel.setWindow(windowSlider.getValue()); } // // // //ABIFframe parent = centralPara.getMainFrame(); // JPanel disCombo = new JPanel(); // JPanel colorCombo = new JPanel(); // // //JPanel panel = new JPanel(); // panel.setLayout(new BoxLayout(panel, BoxLayout.PAGE_AXIS)); // // // // JLabel sliderLabel = new JLabel("Threshold Sensitivity", JLabel.CENTER); // // //int threshold = centralPanel.getThreshold(); // // //JSlider qualityThreshold = new JSlider(JSlider.HORIZONTAL, // // 0, 60, threshold); // // qualityThreshold.setFocusable(false); // // qualityThreshold.setMajorTickSpacing(10); // qualityThreshold.setMinorTickSpacing(1); // qualityThreshold.setPaintLabels(true); // qualityThreshold.setPaintTicks(true); // // ThresholdChangeListener thresholdChange = new ThresholdChangeListener(centralPara, centralPanel, centralList); // qualityThreshold.addChangeListener(thresholdChange); // // disCombo.add(sliderLabel); // disCombo.add(qualityThreshold); // // // // // //color swapper // //// JLabel colorLabel = new JLabel("Choose color", JLabel.LEFT); //// JButton color = new JButton(); //// color.setPreferredSize( new Dimension(125, 50)); //// color.setText("Select"); //// color.setFocusPainted(false); //// color.addActionListener(new ActionListener(){ //// //addMouseListener(new MouseAdapter(){ //// @Override //// public void actionPerformed(ActionEvent evt){ //// JColorChooser chooser = new JColorChooser(colors[5]); //// AbstractColorChooserPanel[] panels = chooser.getChooserPanels(); //// for(AbstractColorChooserPanel panel: panels){ //// System.out.println(panel.getDisplayName()); //// if(!panel.getDisplayName().equals("HSV")){ //// chooser.removeChooserPanel(panel); //// } else{ //// //modify the chooser panel. //// //// } //// } //// //// //create custom preview //// MyPreviewPanel pview = new MyPreviewPanel(chooser); //// //check for color change and apply it to preview //// chooser.getSelectionModel().addChangeListener(e->{ //// Color col = chooser.getColor(); //// pview.setColor(col); //// //// }); //// //// //// chooser.setPreviewPanel(pview); //// JColorChooser.createDialog( //// panel, //// "Choose a Color", //// true, //// chooser, //// (e)->{ //// //// JScrollPane scrollPane = centralPanel.getDrawPanel(); //// // get current scroll values to maintain them after repaint //// int scrollVertical = scrollPane.getVerticalScrollBar().getValue(); //// centralPanel.setScrollVertical(scrollVertical); //// int scrollHorizontal = scrollPane.getHorizontalScrollBar().getValue(); //// centralPanel.setScrollHorizontal(scrollHorizontal); //// TreeMap<Integer, Integer> predict = centralPanel.getPredict(); //// //// Color custom = chooser.getColor(); //// colors[5] = custom; //// //// //// //// DrawRawChannelDataPanel paintPanel = new DrawRawChannelDataPanel(channel1Data.length + 200, verticalSegmentNumber + 200, //// qualScore, qualScoreNuc, channel1Data, channel2Data, channel3Data, //// channel4Data, peakLocation, colors, verticalUnit, verticalSegmentNumber, 100, 100, predict,reversed,basePositionOn,baseCallOn, qualityScoreOn,toggleRegionOn); //// //// paintPanel.setPreferredSize(new Dimension(channel1Data.length + 200, verticalSegmentNumber + 200)); //// //// scrollPane.remove(centralPanel.getPaintPanel()); //// scrollPane.setSize(600, 400); //// scrollPane.getViewport().add(paintPanel); //// // maintain current position in scroll pane //// scrollPane.getVerticalScrollBar().setValue(scrollVertical); //// scrollPane.getHorizontalScrollBar().setValue(scrollHorizontal); //// //centralPanel.setDrawPanel(scrollPane); //// scrollPane.revalidate(); //// scrollPane.repaint(); //// //// //// }, //// (e)->{ //// System.out.println("whatever"); //// } //// ).setVisible(true); //// } //// }); // // //colorCombo.add(colorLabel); // colorCombo.add(Box.createRigidArea(new Dimension(50,0))); // //colorCombo.add(color); // // panel.add(disCombo); // panel.add(Box.createRigidArea(new Dimension(0,20))); // panel.add(colorCombo); // // // //create custom dialog box, point to parent, give name, modal true, pass component // //ChromDialog dialogBox= new ChromDialog(parent,"Threshold Options", false, panel); // // //dialogBox.setVisible(true); } } }
package assignment.test; import static org.junit.Assert.*; import java.io.File; import java.net.URL; import java.util.Random; import org.junit.Before; import org.junit.Test; import assignment.BoardModel; import assignment.MyColor; public class TestBoardModel { private BoardModel board; private Random randomizer = new Random(); private File saveFile; private File loadFile; private static final int MAX_RGB_VALUE = 255; private static final String FILE_PATH_LOAD = "loadtest.txt"; private static final String FILE_PATH_SAVE = "savetest.txt"; @Before public void setupBoardTesting() { board = new BoardModel(50, 50); } @Test public void testCreateBoardModelWithParameters() { BoardModel board = new BoardModel(50, 50); assertEquals("Board dimensions incorrect", 50, board.getGridDimensions()); } @Test public void testCreateBoardModelWithIllegalHighParameters() { BoardModel board = new BoardModel(150, 150); assertEquals("Board dimensions too high", 30, board.getGridDimensions()); } @Test public void testCreateBoardModelWithIllegalLowParameters() { BoardModel board = new BoardModel(-1, -1); assertEquals("Board dimensions too low", 30, board.getGridDimensions()); } @Test public void testNewGridIsFilledCorrectly() { char characterAtElement = 'Z'; assertEquals("Character is not equal to expected", characterAtElement, board.getColor(5, 5)); } @Test public void testNewGridWithChosenColor() { board.newGrid(50, 50, new MyColor(MAX_RGB_VALUE, 51, 0, '_')); assertEquals("Characters do not match on new colour", '_', board.getColor(5, 5)); } @Test public void testPopulateFillColour() { board.populate(new MyColor(MAX_RGB_VALUE, 102, 0, '`')); assertEquals("Characters do not match after populate", '`', board.getColor(15, 15)); } @Test public void testClearFillColour() { board.clear('d'); assertEquals("Characters do not match after clear", 'd', board.getColor(15, 15)); } @Test public void testClearFillColourBelowBounds() { board.clear('Y'); assertEquals("Characters do not match after clear", 'Z', board.getColor(15, 15)); } @Test public void testClearFillColourAboveBounds() { board.clear('~'); assertEquals("Characters do not match after clear", 'Z', board.getColor(15, 15)); } @Test public void testSettingAnIndividualElementColorWithPencil() { board.setValue(20, 20, 't', 1); assertEquals("Characters do not match on specific colour setting", 't', board.getColor(20, 20)); } @Test public void testSettingAnIndividualElementColorWithPaintbrush() { board.setValue(20, 20, 'm', 2); assertEquals("Characters do not match - Brush center", 'm', board.getColor(20, 20)); assertEquals("Characters do not match - Brush bottom", 'm', board.getColor(21, 20)); assertEquals("Characters do not match - Brush right", 'm', board.getColor(20, 21)); assertEquals("Characters do not match - Brush top", 'm', board.getColor(19, 20)); assertEquals("Characters do not match - Brush left", 'm', board.getColor(20, 19)); } @Test public void testSettingAnIndividualElementColorWithSpill() { BoardModel board = new BoardModel(50, 50); int loc1X = randomizer.nextInt(50) + 1; int loc1Y = randomizer.nextInt(50) + 1; int loc2X = randomizer.nextInt(50) + 1; int loc2Y = randomizer.nextInt(50) + 1; int loc3X = randomizer.nextInt(50) + 1; int loc3Y = randomizer.nextInt(50) + 1; board.setValue(22, 22, 'l', 2); assertEquals("Characters do not match after Spill", 'l', board.getColor(loc1X, loc1Y)); assertEquals("Characters do not match after Spill", 'l', board.getColor(loc2X, loc2Y)); assertEquals("Characters do not match after Spill", 'l', board.getColor(loc3X, loc3Y)); } @Test public void testNewProject() { board.newProject(15, new MyColor(MAX_RGB_VALUE, MAX_RGB_VALUE, MAX_RGB_VALUE, 'Z')); assertEquals("New Project size does not match", 15, board.getGridDimensions()); assertEquals("New Project colours do not match", 'Z', board.getColor(5, 5)); assertEquals( "Project Frames still holds more than the single new Frame", 1, board.getTotalFrames()); } @Before public void setupFileForLoadTesting() { URL url = getClass().getResource(FILE_PATH_LOAD); loadFile = new File(url.getPath()); } @Test public void testOpeningAFile() { board.openFile(loadFile); char[][] testGridArray = new char[board.getGridDimensions() + 2][board .getGridDimensions() + 2]; for (int i = 1; i <= board.getGridDimensions(); i++) { for (int j = 1; j <= board.getGridDimensions(); j++) { testGridArray[j][i] = board.getColor(j, i); } } for (int i = 1; i <= board.getGridDimensions(); i++) { for (int j = 1; j <= board.getGridDimensions(); j++) { assertEquals("Grids do match up - Loading unsuccessful", testGridArray[j][i], board.getColor(j, i)); } } assertEquals("Frame amount is not that expected from load", 14, board.getTotalFrames()); } @Before public void setupFileForSaveTesting() { URL url = getClass().getResource(FILE_PATH_SAVE); saveFile = new File(url.getPath()); } @Test public void testSavingToFile() { BoardModel board = new BoardModel(10, 10); board.clear('r'); board.setValue(2, 2, 'f', 1); board.saveFile(saveFile); board.openFile(saveFile); char[][] testGridArray = new char[board.getGridDimensions() + 2][board .getGridDimensions() + 2]; for (int i = 1; i <= board.getGridDimensions(); i++) { for (int j = 1; j <= board.getGridDimensions(); j++) { testGridArray[j][i] = board.getColor(j, i); } } for (int i = 1; i <= board.getGridDimensions(); i++) { for (int j = 1; j <= board.getGridDimensions(); j++) { assertEquals("Grids do match up - Loading unsuccessful", testGridArray[j][i], board.getColor(j, i)); } } assertEquals("Frame amount is not that expected from load", 1, board.getTotalFrames()); } @Test public void testNewFrame() { board.newFrame(); char[][] testGrid = board.getGrid(2); assertEquals("Not as many Frames as expected", 2, board.getTotalFrames()); assertEquals("New Frame is not as expected", 'Z', testGrid[4][4]); } @Test public void testCopyForNewFrame() { board.clear('h'); board.copy(); char[][] testGrid = board.getGrid(2); assertEquals("Not as many Frames as expected", 2, board.getTotalFrames()); assertEquals("New Frame is not as expected", 'h', testGrid[4][4]); } @Test public void testDeleteFrame() { assertEquals("Did not start with correct Frames", 1, board.getTotalFrames()); board.newFrame(); board.deleteFrame(); assertEquals("Did not correctly delete Frame", 1, board.getTotalFrames()); } @Test public void testNextAndPreviousFrame() { board.newFrame(); board.newFrame(); board.newFrame(); assertEquals("Board did not start with correct amount of Frames", 4, board.getTotalFrames()); board.previousFrame(); board.previousFrame(); assertEquals("Board did not change Frames correctly - Previous", 2, board.getFrameNo()); board.nextFrame(); assertEquals("Board did not change Frames correctly - Next", 3, board.getFrameNo()); } }
/** * */ package es.upm.fi.dia.oeg.morph.r2rml.rdb.mappingsgenerator.main; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Properties; import java.util.Scanner; import java.util.logging.Level; import java.util.logging.Logger; import es.upm.fi.dia.oeg.morph.r2rml.rdb.mappingsgenerator.control.R2RMLProcess; import es.upm.fi.dia.oeg.morph.r2rml.rdb.mappingsgenerator.exception.R2RMLException; import es.upm.fi.dia.oeg.morph.r2rml.rdb.mappingsgenerator.util.VerboseMode; /** * @author Luciano Frontino de Medeiros * */ public class R2RMLMapper { private R2RMLProcess p = new R2RMLProcess(); private static final Logger log = Logger.getLogger(R2RMLMapper.class.getName()); /** * @param args * @throws R2RMLException */ public static void main(String[] args) throws R2RMLException { // TODO Auto-generated method stub Properties properties = new Properties(); FileInputStream arquivoDePropriedades = null; try { arquivoDePropriedades = new FileInputStream(args[0]); properties.load(arquivoDePropriedades); R2RMLMapper mapper = new R2RMLMapper(); mapper.run(properties); //log.log(Level.INFO, "End of process with success"); } catch (FileNotFoundException exc) { StringBuffer mensagem = new StringBuffer("R2RML file properties not found"); mensagem.append("\nMotive: " + exc.getMessage()); log.log(Level.SEVERE, exc.toString(), exc); throw new R2RMLException(mensagem.toString()); } catch(IOException exc) { StringBuffer mensagem = new StringBuffer("I/O error in loading properties"); mensagem.append("\nMotive: " + exc.getMessage()); log.log(Level.SEVERE, exc.toString(), exc); throw new R2RMLException(mensagem.toString()); } } public void run(Properties properties) { //System.out.println("Generating R2RML Mappings ..."); VerboseMode vm = new VerboseMode(); String db = properties.getProperty("driver"); if(db == null) { db = properties.getProperty("database.type[0]"); } if(db.toLowerCase().equals("mysql")) { p.driver = p.DB_MYSQL; } else if(db.toLowerCase().equals("postgresql")) { p.driver = p.DB_POSTGRESQL; } //System.setProperty("file.encoding", "UTF-8"); //p.fileProperties = args[0]; p.properties = properties; p.database = properties.getProperty("database"); p.inputFile = properties.getProperty("inputfile"); p.schema = properties.getProperty("schema"); if(p.schema == null) { p.schema = properties.getProperty("database.name[0]"); } p.outputFile = properties.getProperty("outputfile"); if(p.outputFile == null) { p.outputFile = p.schema + "-mappings.ttl"; } p.filelog = properties.getProperty("logfile"); if(p.filelog == null) { p.filelog = p.schema + ".log"; } p.prefix = properties.getProperty("prefix"); if(p.prefix == null) { p.prefix = "ex"; } p.IRI = properties.getProperty("IRI"); if(p.IRI == null) { p.IRI = "http://example.com"; } p.owner = properties.getProperty("owner"); if(p.owner == null) { p.owner = "TEST"; } p.qualifier = properties.getProperty("qualifier"); if(p.qualifier == null) { p.qualifier = "R2RML"; } p.comments = Boolean.valueOf(properties.getProperty("comments")); p.externalSchema = Boolean.valueOf(properties.getProperty("externalschema")); p.SQLInsert = Boolean.valueOf(properties.getProperty("sqlinsert")); p.prefixedTables = Boolean.valueOf(properties.getProperty("prefixedtables")); String compatible = properties.getProperty("compatible"); if(compatible == null) { p.compatible = p.COMPATIBLE_W3C; // W3C compatible } else if(compatible.toLowerCase().equals("virtuoso")) { p.compatible = p.COMPATIBLE_VIRTUOSO; // Virtuoso compatible } else if(compatible.toLowerCase().equals("w3c")) { p.compatible = p.COMPATIBLE_W3C; // W3C compatible } else { p.compatible = p.COMPATIBLE_W3C; // W3C compatible } // Verbose mode String verbose = properties.getProperty("verbose"); if(verbose == null) { verbose = "0"; } try { p.verbose = Integer.valueOf(verbose); } catch (Exception e) { p.verbose = 1; } if(p.verbose == 0) { vm.verbose_mode = (byte) (vm.VERBOSE_COMMAND); } if(p.verbose == 1) { vm.verbose_mode = (byte) (vm.VERBOSE_COMMAND + vm.VERBOSE_TEXT); } if(p.verbose == 2) { vm.verbose_mode = (byte) (vm.VERBOSE_COMMAND + vm.VERBOSE_TEXT + vm.VERBOSE_SQL); } // Optional parameters String templateSeparator = properties.getProperty("templateseparator"); if(templateSeparator == null) { templateSeparator = "."; } p.templateSeparator = templateSeparator; String joinString = properties.getProperty("joinstring"); if(joinString == null) { joinString = "#ref-"; } p.joinString = joinString; if(properties.getProperty("saturation") != null) { p.saturation = Boolean.valueOf(properties.getProperty("saturation")); } if(properties.getProperty("triplesmapmode") != null) { p.triplesMapMode = Byte.valueOf(properties.getProperty("triplesmapmode")); } else { // default is all (data + onto) p.triplesMapMode = 3; } if(properties.getProperty("showviews") != null) { p.showViews = Boolean.valueOf(properties.getProperty("showviews")); } else { // default is all (data + onto) p.showViews = false; } if(properties.getProperty("firstcharcase") != null) { p.firstCharCase = Byte.valueOf(properties.getProperty("firstcharcase")); } else { // default is all (data + onto) p.firstCharCase = 0; } int len; try { if(p.verbose >= 1) { //System.out.println(""); System.out.println("MIRROR: MappIng from Relational to Rdf generatOR"); System.out.println("v.0.4 beta"); System.out.println("---------------------------------------------------------"); System.out.println(""); } if(p.externalSchema) { byte[] encoded = Files.readAllBytes(Paths.get(p.inputFile)); //byte[] encoded = Files.readAllBytes(Paths.get("examples\\sti3_5.sql")); p.schemaContent = new String(encoded, Charset.defaultCharset()); len = p.schemaContent.length(); if(p.verbose >= 1) { System.out.println("Processing schema: " + p.schema + " (" + len + " bytes)"); } if(p.verbose >= 2) { System.out.println(p.schemaContent); } } } catch (FileNotFoundException e1) { // TODO Auto-generated catch block log.log(Level.SEVERE, e1.toString(), e1); e1.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block log.log(Level.SEVERE, e.toString(), e); e.printStackTrace(); } try { if(p.externalSchema) { if(p.verbose >= 1) { System.out.println("Starting generation..."); } if(p.verbose >= 1) { System.out.println("Creating schema "+ p.schema +" in DB (" + db + ") ..."); } p.R2RMLCreateSchema(); if(p.verbose >= 1) { System.out.println("Temporary schema created "+ p.dbName +" in DB (" + db + ") ..."); } if(p.verbose >= 1) { System.out.println("Preprocessing: " + p.inputFile + "..."); } p.R2RMLPreprocessing(); if(p.verbose >= 1) { System.out.println("Building R2RML mapping: " + p.outputFile + "..."); } p.R2RMLBuild(); p.R2RMLPrint(); if(p.verbose >= 1) { System.out.println("Dropping temporary schema " + p.dbName + "..."); } p.R2RMLDropSchema(); if(p.verbose >= 1) { System.out.println("End of generation."); } } else { if(p.verbose >= 1) { System.out.println("Starting generation..."); System.out.println("Using schema "+ p.schema +" in DB (" + db + ") ..."); System.out.println("Preprocessing: " + p.inputFile + "..."); } p.R2RMLPreprocessing(); if(p.verbose >= 1) { System.out.println("Building R2RML mapping: " + p.outputFile + "..."); } p.R2RMLBuild(); p.R2RMLPrint(); if(p.verbose >= 1) { System.out.println("End of generation."); } } } catch (R2RMLException e) { // TODO Auto-generated catch block log.log(Level.SEVERE, e.toString(), e); e.printStackTrace(); } System.out.println("Mappings generated."); System.out.println(); } public String getGeneratedMappingsFile() { if(this.p != null) { return this.p.outputFile; } else { return null; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.raptor.legacy; import com.google.common.collect.HashMultimap; import com.google.common.collect.SetMultimap; import io.airlift.bootstrap.LifeCycleManager; import io.airlift.log.Logger; import io.trino.plugin.raptor.legacy.metadata.ForMetadata; import io.trino.plugin.raptor.legacy.metadata.MetadataDao; import io.trino.spi.NodeManager; import io.trino.spi.connector.Connector; import io.trino.spi.connector.ConnectorAccessControl; import io.trino.spi.connector.ConnectorMetadata; import io.trino.spi.connector.ConnectorNodePartitioningProvider; import io.trino.spi.connector.ConnectorPageSinkProvider; import io.trino.spi.connector.ConnectorPageSourceProvider; import io.trino.spi.connector.ConnectorSplitManager; import io.trino.spi.connector.ConnectorTransactionHandle; import io.trino.spi.connector.SystemTable; import io.trino.spi.session.PropertyMetadata; import io.trino.spi.transaction.IsolationLevel; import org.skife.jdbi.v2.IDBI; import javax.annotation.PostConstruct; import javax.annotation.concurrent.GuardedBy; import javax.inject.Inject; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledExecutorService; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Verify.verify; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.trino.plugin.raptor.legacy.util.DatabaseUtil.onDemandDao; import static io.trino.spi.transaction.IsolationLevel.READ_COMMITTED; import static io.trino.spi.transaction.IsolationLevel.checkConnectorSupports; import static java.util.Objects.requireNonNull; import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; import static java.util.concurrent.TimeUnit.SECONDS; public class RaptorConnector implements Connector { private static final Logger log = Logger.get(RaptorConnector.class); private final LifeCycleManager lifeCycleManager; private final RaptorMetadataFactory metadataFactory; private final RaptorSplitManager splitManager; private final RaptorPageSourceProvider pageSourceProvider; private final RaptorPageSinkProvider pageSinkProvider; private final RaptorNodePartitioningProvider nodePartitioningProvider; private final List<PropertyMetadata<?>> sessionProperties; private final List<PropertyMetadata<?>> tableProperties; private final Set<SystemTable> systemTables; private final MetadataDao dao; private final ConnectorAccessControl accessControl; private final boolean coordinator; private final ConcurrentMap<ConnectorTransactionHandle, RaptorMetadata> transactions = new ConcurrentHashMap<>(); private final ScheduledExecutorService unblockMaintenanceExecutor = newSingleThreadScheduledExecutor(daemonThreadsNamed("raptor-unblock-maintenance")); @GuardedBy("this") private final SetMultimap<Long, UUID> deletions = HashMultimap.create(); @Inject public RaptorConnector( LifeCycleManager lifeCycleManager, NodeManager nodeManager, RaptorMetadataFactory metadataFactory, RaptorSplitManager splitManager, RaptorPageSourceProvider pageSourceProvider, RaptorPageSinkProvider pageSinkProvider, RaptorNodePartitioningProvider nodePartitioningProvider, RaptorSessionProperties sessionProperties, RaptorTableProperties tableProperties, Set<SystemTable> systemTables, ConnectorAccessControl accessControl, @ForMetadata IDBI dbi) { this.lifeCycleManager = requireNonNull(lifeCycleManager, "lifeCycleManager is null"); this.metadataFactory = requireNonNull(metadataFactory, "metadataFactory is null"); this.splitManager = requireNonNull(splitManager, "splitManager is null"); this.pageSourceProvider = requireNonNull(pageSourceProvider, "pageSourceProvider is null"); this.pageSinkProvider = requireNonNull(pageSinkProvider, "pageSinkProvider is null"); this.nodePartitioningProvider = requireNonNull(nodePartitioningProvider, "nodePartitioningProvider is null"); this.sessionProperties = requireNonNull(sessionProperties, "sessionProperties is null").getSessionProperties(); this.tableProperties = requireNonNull(tableProperties, "tableProperties is null").getTableProperties(); this.systemTables = requireNonNull(systemTables, "systemTables is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.dao = onDemandDao(dbi, MetadataDao.class); this.coordinator = nodeManager.getCurrentNode().isCoordinator(); } @PostConstruct public void start() { if (coordinator) { dao.unblockAllMaintenance(); } } @Override public ConnectorTransactionHandle beginTransaction(IsolationLevel isolationLevel, boolean readOnly) { checkConnectorSupports(READ_COMMITTED, isolationLevel); RaptorTransactionHandle transaction = new RaptorTransactionHandle(); transactions.put(transaction, metadataFactory.create(tableId -> beginDelete(tableId, transaction.getUuid()))); return transaction; } @Override public void commit(ConnectorTransactionHandle transaction) { checkArgument(transactions.remove(transaction) != null, "no such transaction: %s", transaction); finishDelete(((RaptorTransactionHandle) transaction).getUuid()); } @Override public void rollback(ConnectorTransactionHandle transaction) { RaptorMetadata metadata = transactions.remove(transaction); checkArgument(metadata != null, "no such transaction: %s", transaction); finishDelete(((RaptorTransactionHandle) transaction).getUuid()); metadata.rollback(); } @Override public ConnectorPageSourceProvider getPageSourceProvider() { return pageSourceProvider; } @Override public ConnectorPageSinkProvider getPageSinkProvider() { return pageSinkProvider; } @Override public ConnectorMetadata getMetadata(ConnectorTransactionHandle transaction) { RaptorMetadata metadata = transactions.get(transaction); checkArgument(metadata != null, "no such transaction: %s", transaction); return metadata; } @Override public ConnectorSplitManager getSplitManager() { return splitManager; } @Override public ConnectorNodePartitioningProvider getNodePartitioningProvider() { return nodePartitioningProvider; } @Override public List<PropertyMetadata<?>> getSessionProperties() { return sessionProperties; } @Override public List<PropertyMetadata<?>> getTableProperties() { return tableProperties; } @Override public Set<SystemTable> getSystemTables() { return systemTables; } @Override public ConnectorAccessControl getAccessControl() { return accessControl; } @Override public final void shutdown() { lifeCycleManager.stop(); } private synchronized void beginDelete(long tableId, UUID transactionId) { dao.blockMaintenance(tableId); verify(deletions.put(tableId, transactionId)); } private synchronized void finishDelete(UUID transactionId) { deletions.entries().stream() .filter(entry -> entry.getValue().equals(transactionId)) .findFirst() .ifPresent(entry -> { long tableId = entry.getKey(); deletions.remove(tableId, transactionId); if (!deletions.containsKey(tableId)) { unblockMaintenance(tableId); } }); } private void unblockMaintenance(long tableId) { try { dao.unblockMaintenance(tableId); } catch (Throwable t) { log.warn(t, "Failed to unblock maintenance for table ID %s, will retry", tableId); unblockMaintenanceExecutor.schedule(() -> unblockMaintenance(tableId), 2, SECONDS); } } }
package com.graphhopper.http.resources; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.graphhopper.config.CHProfile; import com.graphhopper.config.Profile; import com.graphhopper.http.GraphHopperApplication; import com.graphhopper.http.GraphHopperServerConfiguration; import com.graphhopper.http.util.GraphHopperServerTestConfiguration; import com.graphhopper.routing.weighting.custom.CustomProfile; import com.graphhopper.util.CustomModel; import com.graphhopper.util.Helper; import io.dropwizard.testing.junit5.DropwizardAppExtension; import io.dropwizard.testing.junit5.DropwizardExtensionsSupport; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import javax.ws.rs.client.Entity; import javax.ws.rs.core.Response; import java.io.File; import java.io.IOException; import java.lang.reflect.Array; import java.util.Arrays; import java.util.Collections; import static com.graphhopper.http.util.TestUtils.clientTarget; import static com.graphhopper.json.Statement.If; import static com.graphhopper.json.Statement.Op.LIMIT; import static com.graphhopper.json.Statement.Op.MULTIPLY; import static org.junit.jupiter.api.Assertions.*; @ExtendWith(DropwizardExtensionsSupport.class) public class RouteResourceCustomModelTest { private static final String DIR = "./target/north-bayreuth-gh/"; private static final DropwizardAppExtension<GraphHopperServerConfiguration> app = new DropwizardAppExtension<>(GraphHopperApplication.class, createConfig()); private static GraphHopperServerConfiguration createConfig() { GraphHopperServerConfiguration config = new GraphHopperServerTestConfiguration(); config.getGraphHopperConfiguration(). putObject("graph.flag_encoders", "bike,car,foot"). putObject("prepare.min_network_size", 200). putObject("datareader.file", "../core/files/north-bayreuth.osm.gz"). putObject("graph.location", DIR). putObject("graph.encoded_values", "max_height,max_weight,max_width,hazmat,toll,surface,track_type"). putObject("custom_model_folder", "./src/test/resources/com/graphhopper/http/resources"). setProfiles(Arrays.asList( new CustomProfile("car").setCustomModel(new CustomModel()).setVehicle("car"), new CustomProfile("bike").setCustomModel(new CustomModel()).setVehicle("bike"), new CustomProfile("truck").setVehicle("car"). putHint("custom_model_file", "truck.yml"), new CustomProfile("cargo_bike").setVehicle("bike"). putHint("custom_model_file", "cargo_bike.yml"), new CustomProfile("json_bike").setVehicle("bike"). putHint("custom_model_file", "json_bike.json"), new Profile("foot_profile").setVehicle("foot").setWeighting("fastest"), new CustomProfile("car_no_unclassified").setCustomModel( new CustomModel(new CustomModel(). addToPriority(If("road_class == UNCLASSIFIED", LIMIT, 0)))). setVehicle("car"), new CustomProfile("custom_bike"). setCustomModel(new CustomModel(). addToSpeed(If("road_class == PRIMARY", LIMIT, 28)). addToPriority(If("max_width < 1.2", MULTIPLY, 0))). setVehicle("bike"), new CustomProfile("custom_bike2").setCustomModel( new CustomModel(new CustomModel(). addToPriority(If("road_class == TERTIARY || road_class == TRACK", MULTIPLY, 0)))). setVehicle("bike"))). setCHProfiles(Arrays.asList(new CHProfile("truck"), new CHProfile("car_no_unclassified"))); return config; } @BeforeAll @AfterAll public static void cleanUp() { Helper.removeDir(new File(DIR)); } @Test public void testBlockAreaNotAllowed() { String body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"car\", \"custom_model\": {}, \"block_area\": \"abc\", \"ch.disable\": true}"; JsonNode jsonNode = query(body, 400).readEntity(JsonNode.class); assertMessageStartsWith(jsonNode, "When using `custom_model` do not use `block_area`. Use `areas` in the custom model instead"); } @Test public void testCHPossibleWithoutCustomModel() { // the truck profile is a custom profile and we can use its CH preparation as long as we do not add a custom model String body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"truck\"}"; JsonNode json = query(body, 200).readEntity(JsonNode.class); JsonNode path = json.get("paths").get(0); assertEquals(path.get("distance").asDouble(), 1500, 10); assertEquals(path.get("time").asLong(), 151_000, 1_000); } @Test public void testDisableCHAndUseCustomModel() { // If we specify a custom model we get an error, because it does not work with CH. String body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"truck\", \"custom_model\": {" + "\"speed\": [{\"if\": \"road_class == PRIMARY\", \"multiply_by\": 0.9}]" + "}}"; JsonNode json = query(body, 400).readEntity(JsonNode.class); assertMessageStartsWith(json, "The 'custom_model' parameter is currently not supported for speed mode, you need to disable speed mode with `ch.disable=true`."); // ... even when the custom model is just an empty object body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"truck\", \"custom_model\": {}}"; json = query(body, 400).readEntity(JsonNode.class); assertMessageStartsWith(json, "The 'custom_model' parameter is currently not supported for speed mode, you need to disable speed mode with `ch.disable=true`."); // ... but when we disable CH it works of course body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"truck\", \"custom_model\": {}, \"ch.disable\": true}"; json = query(body, 200).readEntity(JsonNode.class); JsonNode path = json.get("paths").get(0); assertEquals(path.get("distance").asDouble(), 1500, 10); assertEquals(path.get("time").asLong(), 151_000, 1_000); } @Test public void testMissingProfile() { String body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"custom_model\": {}, \"ch.disable\": true}"; JsonNode jsonNode = query(body, 400).readEntity(JsonNode.class); assertMessageStartsWith(jsonNode, "The 'profile' parameter is required when you use the `custom_model` parameter"); } @Test public void testUnknownProfile() { String body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"unknown\", \"custom_model\": {}, \"ch.disable\": true}"; JsonNode jsonNode = query(body, 400).readEntity(JsonNode.class); assertMessageStartsWith(jsonNode, "The requested profile 'unknown' does not exist.\nAvailable profiles: [car, bike, truck, cargo_bike, json_bike, foot_profile, car_no_unclassified, custom_bike, custom_bike2]"); } @Test public void testCustomWeightingRequired() { String body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"foot_profile\", \"custom_model\": {}, \"ch.disable\": true}"; JsonNode jsonNode = query(body, 400).readEntity(JsonNode.class); assertEquals("The requested profile 'foot_profile' cannot be used with `custom_model`, because it has weighting=fastest", jsonNode.get("message").asText()); } @Test public void testWeightingAndVehicleNotAllowed() { String body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"truck\"," + " \"custom_model\": {}, \"ch.disable\": true, \"vehicle\": \"truck\"}"; JsonNode jsonNode = query(body, 400).readEntity(JsonNode.class); assertEquals("Since you are using the 'profile' parameter, do not use the 'vehicle' parameter. You used 'vehicle=truck'", jsonNode.get("message").asText()); body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"truck\"," + " \"custom_model\": {}, \"ch.disable\": true, \"weighting\": \"custom\"}"; jsonNode = query(body, 400).readEntity(JsonNode.class); assertEquals("Since you are using the 'profile' parameter, do not use the 'weighting' parameter. You used 'weighting=custom'", jsonNode.get("message").asText()); } @ParameterizedTest @CsvSource(value = {"0.05,3073", "0.5,1498"}) public void testAvoidArea(double priority, double expectedDistance) { String bodyFragment = "\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"car\", \"ch.disable\": true"; JsonNode jsonNode = query("{" + bodyFragment + ", \"custom_model\": {}}", 200).readEntity(JsonNode.class); JsonNode path = jsonNode.get("paths").get(0); assertEquals(path.get("distance").asDouble(), 661, 10); // 'blocking' the area either leads to a route that still crosses it (but on a faster road) or to a road // going all the way around it depending on the priority, see #2021 String body = "{" + bodyFragment + ", \"custom_model\": {" + "\"priority\":[{" + // a faster road (see #2021)? or maybe do both? " \"if\": \"in_custom1\"," + " \"multiply_by\": " + priority + "}], " + "\"areas\":{" + " \"custom1\":{" + " \"type\": \"Feature\"," + " \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [[[11.5818,50.0126], [11.5818,50.0119], [11.5861,50.0119], [11.5861,50.0126], [11.5818,50.0126]]] }" + " }" + "}}" + "}"; jsonNode = query(body, 200).readEntity(JsonNode.class); path = jsonNode.get("paths").get(0); assertEquals(expectedDistance, path.get("distance").asDouble(), 10); } @Test public void testCargoBike() throws IOException { String body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"bike\", \"custom_model\": {}, \"ch.disable\": true}"; JsonNode jsonNode = query(body, 200).readEntity(JsonNode.class); JsonNode path = jsonNode.get("paths").get(0); assertEquals(path.get("distance").asDouble(), 661, 5); String jsonFromYamlFile = yamlToJson(Helper.isToString(getClass().getResourceAsStream("cargo_bike.yml"))); body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"bike\", \"custom_model\":" + jsonFromYamlFile + ", \"ch.disable\": true}"; jsonNode = query(body, 200).readEntity(JsonNode.class); path = jsonNode.get("paths").get(0); assertEquals(path.get("distance").asDouble(), 1007, 5); // results should be identical be it via server-side profile or query profile: body = "{\"points\": [[11.58199, 50.0141], [11.5865, 50.0095]], \"profile\": \"cargo_bike\", \"custom_model\": {}, \"ch.disable\": true}"; jsonNode = query(body, 200).readEntity(JsonNode.class); JsonNode path2 = jsonNode.get("paths").get(0); assertEquals(path.get("distance").asDouble(), path2.get("distance").asDouble(), 1); } @Test public void testJsonBike() { String jsonQuery = "{" + " \"points\": [[11.58199, 50.0141], [11.5865, 50.0095]]," + " \"profile\": \"json_bike\"," + " \"custom_model\": {}," + " \"ch.disable\": true" + "}"; final Response response = query(jsonQuery, 200); JsonNode json = response.readEntity(JsonNode.class); JsonNode infoJson = json.get("info"); assertFalse(infoJson.has("errors")); JsonNode path = json.get("paths").get(0); assertEquals(path.get("distance").asDouble(), 660, 10); } @Test public void customBikeShouldBeLikeJsonBike() { String jsonQuery = "{" + " \"points\": [[11.58199, 50.0141], [11.5865, 50.0095]]," + " \"profile\": \"custom_bike\"," + " \"custom_model\": {}," + " \"ch.disable\": true" + "}"; final Response response = query(jsonQuery, 200); JsonNode json = response.readEntity(JsonNode.class); JsonNode infoJson = json.get("info"); assertFalse(infoJson.has("errors")); JsonNode path = json.get("paths").get(0); assertEquals(path.get("distance").asDouble(), 660, 10); } @Test public void testSubnetworkRemovalPerProfile() { // none-CH String body = "{\"points\": [[11.556416,50.007739], [11.528864,50.021638]]," + " \"profile\": \"car_no_unclassified\"," + " \"ch.disable\": true" + "}"; JsonNode jsonNode = query(body, 200).readEntity(JsonNode.class); JsonNode path = jsonNode.get("paths").get(0); assertEquals(8754, path.get("distance").asDouble(), 5); // CH body = "{\"points\": [[11.556416,50.007739], [11.528864,50.021638]]," + " \"profile\": \"car_no_unclassified\"" + "}"; jsonNode = query(body, 200).readEntity(JsonNode.class); path = jsonNode.get("paths").get(0); assertEquals(8754, path.get("distance").asDouble(), 5); // different profile body = "{\"points\": [[11.494446, 50.027814], [11.511483, 49.987628]]," + " \"profile\": \"custom_bike2\"," + " \"ch.disable\": true" + "}"; jsonNode = query(body, 200).readEntity(JsonNode.class); path = jsonNode.get("paths").get(0); assertEquals(5370, path.get("distance").asDouble(), 5); } private void assertMessageStartsWith(JsonNode jsonNode, String message) { assertNotNull(jsonNode.get("message")); assertTrue(jsonNode.get("message").asText().startsWith(message), "Expected error message to start with:\n" + message + "\nbut got:\n" + jsonNode.get("message").asText()); } Response query(String body, int code) { Response response = clientTarget(app, "/route").request().post(Entity.json(body)); response.bufferEntity(); JsonNode jsonNode = response.readEntity(JsonNode.class); assertEquals(code, response.getStatus(), jsonNode.has("message") ? jsonNode.get("message").toString() : "no error message"); return response; } private static String yamlToJson(String yaml) { try { ObjectMapper yamlReader = new ObjectMapper(new YAMLFactory()); Object obj = yamlReader.readValue(yaml, Object.class); ObjectMapper jsonWriter = new ObjectMapper(); return jsonWriter.writeValueAsString(obj); } catch (Exception ex) { throw new RuntimeException(ex); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.mongodb; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Shorts; import com.google.common.primitives.SignedBytes; import com.mongodb.client.MongoCollection; import com.mongodb.client.model.InsertManyOptions; import io.airlift.slice.Slice; import io.trino.spi.Page; import io.trino.spi.StandardErrorCode; import io.trino.spi.TrinoException; import io.trino.spi.block.Block; import io.trino.spi.connector.ConnectorPageSink; import io.trino.spi.connector.SchemaTableName; import io.trino.spi.type.BigintType; import io.trino.spi.type.BooleanType; import io.trino.spi.type.CharType; import io.trino.spi.type.DateType; import io.trino.spi.type.DecimalType; import io.trino.spi.type.DoubleType; import io.trino.spi.type.IntegerType; import io.trino.spi.type.NamedTypeSignature; import io.trino.spi.type.RealType; import io.trino.spi.type.SmallintType; import io.trino.spi.type.TimeType; import io.trino.spi.type.TimestampWithTimeZoneType; import io.trino.spi.type.TinyintType; import io.trino.spi.type.Type; import io.trino.spi.type.TypeSignatureParameter; import io.trino.spi.type.VarbinaryType; import io.trino.spi.type.VarcharType; import org.bson.BsonInvalidOperationException; import org.bson.Document; import org.bson.types.Binary; import org.bson.types.ObjectId; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import static io.trino.plugin.mongodb.ObjectIdType.OBJECT_ID; import static io.trino.plugin.mongodb.TypeUtils.isArrayType; import static io.trino.plugin.mongodb.TypeUtils.isJsonType; import static io.trino.plugin.mongodb.TypeUtils.isMapType; import static io.trino.plugin.mongodb.TypeUtils.isRowType; import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; import static io.trino.spi.type.Chars.padSpaces; import static io.trino.spi.type.DateTimeEncoding.unpackMillisUtc; import static io.trino.spi.type.Decimals.readBigDecimal; import static io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS; import static io.trino.spi.type.Timestamps.MICROSECONDS_PER_MILLISECOND; import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_MILLISECOND; import static io.trino.spi.type.Timestamps.roundDiv; import static java.lang.Float.intBitsToFloat; import static java.lang.Math.floorDiv; import static java.lang.Math.toIntExact; import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableMap; import static java.util.Objects.requireNonNull; import static java.util.concurrent.CompletableFuture.completedFuture; public class MongoPageSink implements ConnectorPageSink { private final MongoSession mongoSession; private final SchemaTableName schemaTableName; private final List<MongoColumnHandle> columns; private final String implicitPrefix; public MongoPageSink( MongoClientConfig config, MongoSession mongoSession, SchemaTableName schemaTableName, List<MongoColumnHandle> columns) { this.mongoSession = mongoSession; this.schemaTableName = schemaTableName; this.columns = columns; this.implicitPrefix = requireNonNull(config.getImplicitRowFieldPrefix(), "config.getImplicitRowFieldPrefix() is null"); } @Override public CompletableFuture<?> appendPage(Page page) { MongoCollection<Document> collection = mongoSession.getCollection(schemaTableName); List<Document> batch = new ArrayList<>(page.getPositionCount()); for (int position = 0; position < page.getPositionCount(); position++) { Document doc = new Document(); for (int channel = 0; channel < page.getChannelCount(); channel++) { MongoColumnHandle column = columns.get(channel); doc.append(column.getName(), getObjectValue(columns.get(channel).getType(), page.getBlock(channel), position)); } batch.add(doc); } collection.insertMany(batch, new InsertManyOptions().ordered(true)); return NOT_BLOCKED; } private Object getObjectValue(Type type, Block block, int position) { if (block.isNull(position)) { if (type.equals(OBJECT_ID)) { return new ObjectId(); } return null; } if (type.equals(OBJECT_ID)) { return new ObjectId(block.getSlice(position, 0, block.getSliceLength(position)).getBytes()); } if (type.equals(BooleanType.BOOLEAN)) { return type.getBoolean(block, position); } if (type.equals(BigintType.BIGINT)) { return type.getLong(block, position); } if (type.equals(IntegerType.INTEGER)) { return toIntExact(type.getLong(block, position)); } if (type.equals(SmallintType.SMALLINT)) { return Shorts.checkedCast(type.getLong(block, position)); } if (type.equals(TinyintType.TINYINT)) { return SignedBytes.checkedCast(type.getLong(block, position)); } if (type.equals(RealType.REAL)) { return intBitsToFloat(toIntExact(type.getLong(block, position))); } if (type.equals(DoubleType.DOUBLE)) { return type.getDouble(block, position); } if (type instanceof VarcharType) { return type.getSlice(block, position).toStringUtf8(); } if (type instanceof CharType) { return padSpaces(type.getSlice(block, position), ((CharType) type)).toStringUtf8(); } if (type.equals(VarbinaryType.VARBINARY)) { return new Binary(type.getSlice(block, position).getBytes()); } if (type.equals(DateType.DATE)) { long days = type.getLong(block, position); return new Date(TimeUnit.DAYS.toMillis(days)); } if (type.equals(TimeType.TIME)) { long picos = type.getLong(block, position); return new Date(roundDiv(picos, PICOSECONDS_PER_MILLISECOND)); } if (type.equals(TIMESTAMP_MILLIS)) { long millisUtc = floorDiv(type.getLong(block, position), MICROSECONDS_PER_MILLISECOND); return new Date(millisUtc); } if (type.equals(TimestampWithTimeZoneType.TIMESTAMP_TZ_MILLIS)) { long millisUtc = unpackMillisUtc(type.getLong(block, position)); return new Date(millisUtc); } if (type instanceof DecimalType) { return readBigDecimal((DecimalType) type, block, position); } if (isJsonType(type)) { String json = type.getSlice(block, position).toStringUtf8(); try { return Document.parse(json); } catch (BsonInvalidOperationException e) { throw new TrinoException(NOT_SUPPORTED, "Can't convert json to MongoDB Document: " + json, e); } } if (isArrayType(type)) { Type elementType = type.getTypeParameters().get(0); Block arrayBlock = block.getObject(position, Block.class); List<Object> list = new ArrayList<>(arrayBlock.getPositionCount()); for (int i = 0; i < arrayBlock.getPositionCount(); i++) { Object element = getObjectValue(elementType, arrayBlock, i); list.add(element); } return unmodifiableList(list); } if (isMapType(type)) { Type keyType = type.getTypeParameters().get(0); Type valueType = type.getTypeParameters().get(1); Block mapBlock = block.getObject(position, Block.class); // map type is converted into list of fixed keys document List<Object> values = new ArrayList<>(mapBlock.getPositionCount() / 2); for (int i = 0; i < mapBlock.getPositionCount(); i += 2) { Map<String, Object> mapValue = new HashMap<>(); mapValue.put("key", getObjectValue(keyType, mapBlock, i)); mapValue.put("value", getObjectValue(valueType, mapBlock, i + 1)); values.add(mapValue); } return unmodifiableList(values); } if (isRowType(type)) { Block rowBlock = block.getObject(position, Block.class); List<Type> fieldTypes = type.getTypeParameters(); if (fieldTypes.size() != rowBlock.getPositionCount()) { throw new TrinoException(StandardErrorCode.GENERIC_INTERNAL_ERROR, "Expected row value field count does not match type field count"); } if (isImplicitRowType(type)) { List<Object> rowValue = new ArrayList<>(); for (int i = 0; i < rowBlock.getPositionCount(); i++) { Object element = getObjectValue(fieldTypes.get(i), rowBlock, i); rowValue.add(element); } return unmodifiableList(rowValue); } Map<String, Object> rowValue = new HashMap<>(); for (int i = 0; i < rowBlock.getPositionCount(); i++) { rowValue.put( type.getTypeSignature().getParameters().get(i).getNamedTypeSignature().getName().orElse("field" + i), getObjectValue(fieldTypes.get(i), rowBlock, i)); } return unmodifiableMap(rowValue); } throw new TrinoException(NOT_SUPPORTED, "unsupported type: " + type); } private boolean isImplicitRowType(Type type) { return type.getTypeSignature().getParameters() .stream() .map(TypeSignatureParameter::getNamedTypeSignature) .map(NamedTypeSignature::getName) .filter(Optional::isPresent) .map(Optional::get) .allMatch(name -> name.startsWith(implicitPrefix)); } @Override public CompletableFuture<Collection<Slice>> finish() { return completedFuture(ImmutableList.of()); } @Override public void abort() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hive.service.auth.ldap; import java.io.IOException; import java.util.Arrays; import javax.naming.NamingException; import javax.security.sasl.AuthenticationException; import org.apache.hadoop.hive.conf.HiveConf; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; import org.junit.Before; import org.mockito.Mock; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; import static org.mockito.Mockito.*; @RunWith(MockitoJUnitRunner.class) public class TestGroupFilter { private FilterFactory factory; private HiveConf conf; @Mock private DirSearch search; @Before public void setup() { conf = new HiveConf(); conf.set("hive.root.logger", "DEBUG,console"); factory = new GroupFilterFactory(); } @Test public void testGetInstanceWhenGroupFilterIsEmpty() { conf.unset(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER.varname); assertNull(factory.getInstance(conf)); } @Test public void testGetInstanceOfGroupMembershipKeyFilter() { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "G1"); Filter instance = factory.getInstance(conf); assertNotNull(instance); assertThat(instance, instanceOf(GroupFilterFactory.GroupMembershipKeyFilter.class)); } @Test public void testGetInstanceOfUserMembershipKeyFilter() { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "G1"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY, "memberof"); Filter instance = factory.getInstance(conf); assertNotNull(instance); assertThat(instance, instanceOf(GroupFilterFactory.UserMembershipKeyFilter.class)); } @Test public void testGroupMembershipKeyFilterApplyPositive() throws AuthenticationException, NamingException, IOException { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "HiveUsers"); when(search.findUserDn(eq("user1"))) .thenReturn("cn=user1,ou=People,dc=example,dc=com"); when(search.findUserDn(eq("cn=user2,dc=example,dc=com"))) .thenReturn("cn=user2,ou=People,dc=example,dc=com"); when(search.findUserDn(eq("user3@mydomain.com"))) .thenReturn("cn=user3,ou=People,dc=example,dc=com"); when(search.findGroupsForUser(eq("cn=user1,ou=People,dc=example,dc=com"))) .thenReturn(Arrays.asList( "cn=SuperUsers,ou=Groups,dc=example,dc=com", "cn=Office1,ou=Groups,dc=example,dc=com", "cn=HiveUsers,ou=Groups,dc=example,dc=com", "cn=G1,ou=Groups,dc=example,dc=com")); when(search.findGroupsForUser(eq("cn=user2,ou=People,dc=example,dc=com"))) .thenReturn(Arrays.asList( "cn=HiveUsers,ou=Groups,dc=example,dc=com")); when(search.findGroupsForUser(eq("cn=user3,ou=People,dc=example,dc=com"))) .thenReturn(Arrays.asList( "cn=HiveUsers,ou=Groups,dc=example,dc=com", "cn=G1,ou=Groups,dc=example,dc=com", "cn=G2,ou=Groups,dc=example,dc=com")); Filter filter = factory.getInstance(conf); filter.apply(search, "user1"); filter.apply(search, "cn=user2,dc=example,dc=com"); filter.apply(search, "user3@mydomain.com"); } @Test public void testGroupMembershipKeyCaseInsensitiveFilterApplyPositive() throws AuthenticationException, NamingException, IOException { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "hiveusers,g1"); when(search.findUserDn(eq("user1"))) .thenReturn("cn=user1,ou=People,dc=example,dc=com"); when(search.findUserDn(eq("cn=user2,dc=example,dc=com"))) .thenReturn("cn=user2,ou=People,dc=example,dc=com"); when(search.findUserDn(eq("user3@mydomain.com"))) .thenReturn("cn=user3,ou=People,dc=example,dc=com"); when(search.findGroupsForUser(eq("cn=user1,ou=People,dc=example,dc=com"))) .thenReturn(Arrays.asList( "cn=SuperUsers,ou=Groups,dc=example,dc=com", "cn=Office1,ou=Groups,dc=example,dc=com", "cn=HiveUsers,ou=Groups,dc=example,dc=com", "cn=G1,ou=Groups,dc=example,dc=com")); when(search.findGroupsForUser(eq("cn=user2,ou=People,dc=example,dc=com"))) .thenReturn(Arrays.asList( "cn=HiveUsers,ou=Groups,dc=example,dc=com")); when(search.findGroupsForUser(eq("cn=user3,ou=People,dc=example,dc=com"))) .thenReturn(Arrays.asList( "cn=G1,ou=Groups,dc=example,dc=com", "cn=G2,ou=Groups,dc=example,dc=com")); Filter filter = factory.getInstance(conf); filter.apply(search, "user1"); filter.apply(search, "cn=user2,dc=example,dc=com"); filter.apply(search, "user3@mydomain.com"); } @Test(expected = AuthenticationException.class) public void testGroupMembershipKeyCaseInsensitiveFilterApplyNegative() throws AuthenticationException, NamingException, IOException { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "hiveusers,containsg1"); lenient().when(search.findGroupsForUser(eq("user1"))) .thenReturn(Arrays.asList("SuperUsers", "Office1", "G1", "G2")); Filter filter = factory.getInstance(conf); filter.apply(search, "user1"); } @Test(expected = AuthenticationException.class) public void testGroupMembershipKeyFilterApplyNegative() throws AuthenticationException, NamingException, IOException { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "HiveUsers"); lenient().when(search.findGroupsForUser(eq("user1"))) .thenReturn(Arrays.asList("SuperUsers", "Office1", "G1", "G2")); Filter filter = factory.getInstance(conf); filter.apply(search, "user1"); } @Test public void testUserMembershipKeyFilterApplyPositiveWithUserId() throws AuthenticationException, NamingException, IOException { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY, "memberOf"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "Group1,Group2"); when(search.findGroupDn("Group1")).thenReturn("cn=Group1,dc=a,dc=b"); when(search.findGroupDn("Group2")).thenReturn("cn=Group2,dc=a,dc=b"); when(search.isUserMemberOfGroup("User1", "cn=Group2,dc=a,dc=b")).thenReturn(true); Filter filter = factory.getInstance(conf); filter.apply(search, "User1"); } @Test public void testUserMembershipKeyFilterApplyPositiveWithUserDn() throws AuthenticationException, NamingException, IOException { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY, "memberOf"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "Group1,Group2"); when(search.findGroupDn("Group1")).thenReturn("cn=Group1,dc=a,dc=b"); when(search.findGroupDn("Group2")).thenReturn("cn=Group2,dc=a,dc=b"); when(search.isUserMemberOfGroup("cn=User1,dc=a,dc=b", "cn=Group2,dc=a,dc=b")).thenReturn(true); Filter filter = factory.getInstance(conf); filter.apply(search, "cn=User1,dc=a,dc=b"); } @Test(expected = AuthenticationException.class) public void testUserMembershipKeyFilterApplyNegative() throws AuthenticationException, NamingException, IOException { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY, "memberOf"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "Group1,Group2"); when(search.findGroupDn("Group1")).thenReturn("cn=Group1,dc=a,dc=b"); when(search.findGroupDn("Group2")).thenReturn("cn=Group2,dc=a,dc=b"); Filter filter = factory.getInstance(conf); filter.apply(search, "User1"); } }
package nl.esciencecenter.neon.math; import java.nio.FloatBuffer; /* Copyright 2013 Netherlands eScience Center * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * A 2-place float vector implementation. * * @author Maarten van Meersbergen <m.van.meersbergen@esciencecenter.nl> * */ public class Float2Vector implements FloatVector { /** The number of elements in this vector */ private static final int SIZE = 2; private float x, y; /** * Creates a new vector, initialized to 0. */ public Float2Vector() { this.x = 0f; this.y = 0f; } /** * Creates a new vector by copying the given vector. * * @param v * The vector to be copied. */ public Float2Vector(Float2Vector v) { this.x = v.getX(); this.y = v.getY(); } /** * Creates a new vector with the given values. * * @param x * The value to be put in the first position. * @param y * The value to be put in the second position. */ public Float2Vector(float x, float y) { this.x = x; this.y = y; } /** * Gives the negated vector of this vector. * * @return The new negated vector. */ public Float2Vector neg() { Float2Vector result = new Float2Vector(); result.setX(-x); result.setY(-y); return result; } /** * Adds the given vector to the current vector, and returns the result. * * @param u * The vector to be added to this vector. * @return The new vector. */ public Float2Vector add(Float2Vector u) { Float2Vector result = new Float2Vector(); result.setX(x + u.getX()); result.setY(y + u.getY()); return result; } /** * Substracts the given vector from this vector. * * @param u * The vector to be substracted from this one. * @return The new Vector, which is a result of the substraction. */ public Float2Vector sub(Float2Vector u) { Float2Vector result = new Float2Vector(); result.setX(x - u.getX()); result.setY(y - u.getY()); return result; } /** * Multiplies the given scalar with this vector. * * @param n * The scalar to be multiplied with this one. * @return The new Vector, which is a result of the multiplication. */ public Float2Vector mul(Number n) { float fn = n.floatValue(); Float2Vector result = new Float2Vector(); result.setX(x * fn); result.setY(y * fn); return result; } /** * Divides the current vector with the given scalar. * * @param n * The scalar to be divided with. * @return The new Vector, which is a result of the division. */ public Float2Vector div(Number n) { float fn = n.floatValue(); if (fn == 0f) { return new Float2Vector(); } float divfn = 1f / fn; Float2Vector result = new Float2Vector(); result.setX(x * divfn); result.setY(y * divfn); return result; } @Override public FloatBuffer asBuffer() { FloatBuffer result = FloatBuffer.allocate(SIZE); result.put(x); result.put(y); result.rewind(); return result; } @Override public int getSize() { return SIZE; } /** * Getter for x. * * @return the x. */ public float getX() { return x; } /** * Setter for x. * * @param x * the x to set */ public void setX(float x) { this.x = x; } /** * Getter for y. * * @return the y. */ public float getY() { return y; } /** * Setter for y. * * @param y * the y to set */ public void setY(float y) { this.y = y; } /* * (non-Javadoc) * * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Float.floatToIntBits(x); result = prime * result + Float.floatToIntBits(y); return result; } /* * (non-Javadoc) * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } Float2Vector other = (Float2Vector) obj; if (Float.floatToIntBits(x) != Float.floatToIntBits(other.x)) { return false; } if (Float.floatToIntBits(y) != Float.floatToIntBits(other.y)) { return false; } return true; } /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { return "Float2Vector [x=" + x + ", y=" + y + "]"; } public float[] asArray() { return new float[] { x, y }; } }
package net.giovannibotta.trees; import static net.giovannibotta.trees.VEBTreeUtils.firstLargerPowerOf2; import static net.giovannibotta.trees.VEBTreeUtils.lowerSquareRoot; import static net.giovannibotta.trees.VEBTreeUtils.upperSquareRoot; import java.util.AbstractSet; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.SortedSet; import java.util.Stack; // TODO: parameterize and turn into a Map<Integer, E> public class VEBTree extends AbstractSet<Integer> implements SortedSet<Integer> { private final VEB veb; private int size = 0; public VEBTree(int capacity) { if (capacity < 1) throw new IllegalArgumentException( "VEB tree capacity must be at least 1 but " + capacity + " was provided"); veb = new VEB(firstLargerPowerOf2(capacity)); } private static final class VEB { // u is always a power of 2 final int u; final int lowSqrtU; int min, max; final VEB summary; // final VEB[] cluster; final HashMap<Integer, VEB> cluster; void reset() { min = -1; max = -1; if (cluster != null) cluster.clear(); if (summary != null) summary.reset(); } VEB(int u) { this.u = u; lowSqrtU = lowerSquareRoot(u); int upperSqrtU = upperSquareRoot(u); if (u > 2) { summary = new VEB(upperSqrtU); cluster = new HashMap<>(); // cluster = new VEB[upperSqrtU]; // Arrays.setAll(cluster, i -> new VEB(lowSqrtU)); } else { // there are no summary/cluster for the base case summary = null; cluster = null; } min = -1; max = -1; } int min() { return min; } int clusterMin(int c) { VEB cVEB = cluster.get(c); if (cVEB == null) return -1; return cVEB.min(); // return cluster[c].min(); } int max() { return max; } int clusterMax(int c) { VEB cVEB = cluster.get(c); if (cVEB == null) return -1; return cVEB.max(); // return cluster[c].max(); } boolean member(final int x) { if (x == min || x == max) return true; if (u == 2) return false; final int h = high(x); final int l = low(x); return clusterMember(h, l); } boolean clusterMember(int c, int x) { VEB cVEB = cluster.get(c); if (cVEB == null) return false; return cVEB.member(x); // return cluster[c].member(x); } int successor(final int x) { if (u == 2) { if (x == 0 && max == 1) return 1; else return -1; } if (min != -1 && x < min) return min; final int h = high(x); final int l = low(x); int maxLow = clusterMax(h); if (maxLow != -1 && l < maxLow) { final int offset = clusterSuccessor(h, l); return index(h, offset); } final int succCluster = summary.successor(h); if (succCluster == -1) return -1; final int offset = clusterMin(succCluster); return index(succCluster, offset); } int clusterSuccessor(int c, int x) { VEB cVEB = cluster.get(c); if (cVEB == null) return -1; return cVEB.successor(x); // return cluster[c].successor(x); } int predecessor(final int x) { if (u == 2) { if (x == 1 && min == 0) return 0; return -1; } if (max != -1 && x > max) return max; final int h = high(x); final int l = low(x); final int minLow = clusterMin(h); if (minLow != -1 && l > minLow) { final int offset = clusterPredecessor(h, l); return index(h, offset); } int predCluster = summary.predecessor(h); if (predCluster == -1) { if (min != -1 && x > min) return min; return -1; } final int offset = clusterMax(predCluster); return index(predCluster, offset); } int clusterPredecessor(int c, int x) { VEB cVEB = cluster.get(c); if (cVEB == null) return -1; return cVEB.predecessor(x); // return cluster[c].predecessor(x); } void emptyInsert(final int x) { min = x; max = x; } void clusterEmptyInsert(int c, int x) { VEB cVEB = cluster.get(c); if (cVEB == null) { cVEB = new VEB(lowSqrtU); cluster.put(c, cVEB); } cVEB.emptyInsert(x); // cluster[c].emptyInsert(x); } boolean insert(int x) { if (min == x || max == x) { return false; } if (min == -1) { emptyInsert(x); return true; } boolean added = false; if (x < min) { int tmp = min; min = x; x = tmp; added = true; } if (u > 2) { final int h = high(x); final int l = low(x); if (clusterMin(h) == -1) { summary.insert(h); clusterEmptyInsert(h, l); added = true; } else added = clusterInsert(h, l); } if (x > max) { max = x; added = true; } return added; } boolean clusterInsert(int c, int x) { VEB cVEB = cluster.get(c); if (cVEB == null) { cVEB = new VEB(lowSqrtU); cluster.put(c, cVEB); } return cVEB.insert(x); // return cluster[c].insert(x); } void delete(int x) { if (min == max) { min = -1; max = -1; } else if (u == 2) { if (x == 0) min = 1; else min = 0; max = min; } else { if (x == min) { int firstCluster = summary.min(); x = index(firstCluster, clusterMin(firstCluster)); min = x; } clusterDelete(high(x), low(x)); if (clusterMin(high(x)) == -1) { summary.delete(high(x)); if (x == max) { int summaryMax = summary.max(); if (summaryMax == -1) { max = min; } else { max = index(summaryMax, clusterMax(summaryMax)); } } } else if (x == max) { max = index(high(x), clusterMax(high(x))); } } } void clusterDelete(int c, int x) { VEB cVEB = cluster.get(c); if (cVEB != null) cVEB.delete(x); // cluster[c].delete(x); } int high(int x) { return x / lowSqrtU; } int low(int x) { return x % lowSqrtU; } int index(int x, int y) { return x * lowSqrtU + y; } } private static final class OptVEBIterator implements Iterator<Integer> { final Stack<VEB> toProcess; int nextSubtree = -1; int add = 0; private OptVEBIterator(VEB root) { toProcess = new Stack<VEB>(); if (root.min() != -1) toProcess.push(root); } @Override public boolean hasNext() { return !toProcess.empty(); } @Override public Integer next() { if (!hasNext()) throw new NoSuchElementException(); VEB node = toProcess.peek(); if (nextSubtree == -1) { return add + node.min(); } else { } return null; } } // TODO: this is not optimal, it calls successor n times, so it runs in // O(n*lglgu) instead of O(n). private static final class VEBIterator implements Iterator<Integer> { final VEB root; int current = -1; int next = -1; private VEBIterator(VEB root) { this.root = root; next = root.min(); } @Override public boolean hasNext() { return next != -1; } @Override public Integer next() { if (!hasNext()) throw new NoSuchElementException(); current = next; next = root.successor(current); return current; } } @Override public boolean add(Integer e) { if (e == null) throw new IllegalArgumentException( "Can not insert null element into this set"); int elem = e; if (elem < 0) throw new IllegalArgumentException( "Can not insert negative value into this set, " + elem); if (elem >= veb.u) throw new IllegalArgumentException( "Can not insert value larger than or equals to allowed capacity " + veb.u + " into this set, " + elem); if (veb.insert(elem)) { size++; return true; } return false; }; @Override public boolean contains(Object o) { if (!(o instanceof Integer)) return false; int i = (Integer) o; if (i < 0 || i >= veb.u) return false; return veb.member(i); } @Override public boolean remove(Object o) { if (!contains(o)) return false; veb.delete((Integer) o); return true; }; public Integer predecessor(Integer x) { if (x == null) return null; int i = x; if (i < 0) return null; if (i >= veb.u) return veb.max(); int p = veb.predecessor(i); if (p >= 0) return p; return null; } public Integer successor(Integer x) { if (x == null) return null; int i = x; if (i < 0) return veb.min(); if (i >= veb.u) return null; int s = veb.successor(i); if (s >= 0) return s; return null; } @Override public Comparator<? super Integer> comparator() { return null; } @Override public SortedSet<Integer> subSet(Integer fromElement, Integer toElement) { // TODO Auto-generated method stub return null; } @Override public SortedSet<Integer> headSet(Integer toElement) { // TODO Auto-generated method stub return null; } @Override public SortedSet<Integer> tailSet(Integer fromElement) { // TODO Auto-generated method stub return null; } @Override public Integer first() { return veb.min(); } @Override public Integer last() { return veb.max(); } @Override public Iterator<Integer> iterator() { return new VEBIterator(veb); } @Override public int size() { return size; } @Override public void clear() { veb.reset(); size = 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.security.generator; import java.security.Principal; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.logging.log4j.Logger; import org.apache.geode.cache.operations.OperationContext.OperationCode; import org.apache.geode.internal.logging.LogService; import org.apache.geode.security.AccessControl; import org.apache.geode.security.templates.DummyAuthorization; import org.apache.geode.security.templates.XmlAuthorization; /** * Encapsulates obtaining authorized and unauthorized credentials for a given operation in a region. * Implementations will be for different kinds of authorization scheme and authentication scheme * combos. * * @since GemFire 5.5 */ public abstract class AuthzCredentialGenerator { private static final Logger logger = LogService.getLogger(); /** * The {@link CredentialGenerator} being used. */ protected CredentialGenerator generator; /** * A set of system properties that should be added to the gemfire system properties before using * the authorization module. */ private Properties systemProperties; /** * A factory method to create a new instance of an {@link AuthzCredentialGenerator} for the given * {@link ClassCode}. Caller is supposed to invoke {@link AuthzCredentialGenerator#init} * immediately after obtaining the instance. * * @param classCode the {@code ClassCode} of the {@code AuthzCredentialGenerator} implementation * * @return an instance of {@code AuthzCredentialGenerator} for the given class code */ public static AuthzCredentialGenerator create(final ClassCode classCode) { switch (classCode.classType) { case ClassCode.ID_DUMMY: return new DummyAuthzCredentialGenerator(); case ClassCode.ID_XML: return new XmlAuthzCredentialGenerator(); default: return null; } } /** * Initialize the authorized credential generator. * * @param generator an instance of {@link CredentialGenerator} of the credential implementation * for which to obtain authorized/unauthorized credentials. * * @return false when the given {@link CredentialGenerator} is incompatible with this * authorization module. */ public boolean init(final CredentialGenerator generator) { this.generator = generator; try { this.systemProperties = init(); } catch (IllegalArgumentException ex) { return false; } return true; } /** * * @return A set of extra properties that should be added to Gemfire system properties when not * null. */ public Properties getSystemProperties() { return this.systemProperties; } /** * Get the {@link CredentialGenerator} being used by this instance. */ public CredentialGenerator getCredentialGenerator() { return this.generator; } /** * Initialize the authorized credential generator. * * Required to be implemented by concrete classes that implement this abstract class. * * @return A set of extra properties that should be added to Gemfire system properties when not * null. * * @throws IllegalArgumentException when the {@link CredentialGenerator} is incompatible with this * authorization module. */ protected abstract Properties init() throws IllegalArgumentException; /** * The {@link ClassCode} of the particular implementation. * * @return the {@code ClassCode} */ public abstract ClassCode classCode(); /** * The name of the {@link AccessControl} factory function that should be used as the authorization * module on the server side. * * @return name of the {@code AccessControl} factory function */ public abstract String getAuthorizationCallback(); /** * Get a set of credentials generated using the given index allowed to perform the given * {@link OperationCode}s for the given regions. * * @param opCodes the list of {@link OperationCode}s of the operations requiring authorization; * should not be null * @param regionNames list of the region names requiring authorization; a value of null indicates * all regions * @param index used to generate multiple such credentials by passing different values for this * * @return the set of credentials authorized to perform the given operation in the given regions */ public Properties getAllowedCredentials(final OperationCode[] opCodes, final String[] regionNames, final int index) { int numTries = getNumPrincipalTries(opCodes, regionNames); if (numTries <= 0) { numTries = 1; } for (int tries = 0; tries < numTries; tries++) { final Principal principal = getAllowedPrincipal(opCodes, regionNames, (index + tries) % numTries); try { return this.generator.getValidCredentials(principal); } catch (IllegalArgumentException ex) { } } return null; } /** * Get a set of credentials generated using the given index not allowed to perform the given * {@link OperationCode}s for the given regions. The credentials are required to be valid for * authentication. * * @param opCodes the {@link OperationCode}s of the operations requiring authorization failure; * should not be null * @param regionNames list of the region names requiring authorization failure; a value of null * indicates all regions * @param index used to generate multiple such credentials by passing different values for this * * @return the set of credentials that are not authorized to perform the given operation in the * given region */ public Properties getDisallowedCredentials(final OperationCode[] opCodes, final String[] regionNames, final int index) { // This may not be very correct since we use the value of // getNumPrincipalTries() but is used to avoid adding another method. // Also something like getNumDisallowedPrincipals() will be normally always // infinite, and the number here is just to perform some number of tries // before giving up. int numTries = getNumPrincipalTries(opCodes, regionNames); if (numTries <= 0) { numTries = 1; } for (int tries = 0; tries < numTries; tries++) { final Principal principal = getDisallowedPrincipal(opCodes, regionNames, (index + tries) % numTries); try { return this.generator.getValidCredentials(principal); } catch (IllegalArgumentException ex) { } } return null; } /** * Get the number of tries to be done for obtaining valid credentials for the given operations in * the given region. It is required that {@link #getAllowedPrincipal} method returns valid * principals for values of {@code index} from 0 through (n-1) where {@code n} is the value * returned by this method. It is recommended that the principals so returned be unique for * efficiency. * * This will be used by {@link #getAllowedCredentials} to step through different principals and * obtain a set of valid credentials. * * Required to be implemented by concrete classes that implement this abstract class. * * @param opCodes the {@link OperationCode}s of the operations requiring authorization * @param regionNames list of the region names requiring authorization; a value of null indicates * all regions * * @return the number of principals allowed to perform the given operation in the given region */ protected abstract int getNumPrincipalTries(final OperationCode[] opCodes, final String[] regionNames); /** * Get a {@link Principal} generated using the given index allowed to perform the given * {@link OperationCode}s for the given region. * * Required to be implemented by concrete classes that implement this abstract class. * * @param opCodes the {@link OperationCode}s of the operations requiring authorization * @param regionNames list of the region names requiring authorization; a value of null indicates * all regions * @param index used to generate multiple such principals by passing different values for this * * @return the {@link Principal} authorized to perform the given operation in the given region */ protected abstract Principal getAllowedPrincipal(final OperationCode[] opCodes, final String[] regionNames, final int index); /** * Get a {@link Principal} generated using the given index not allowed to perform the given * {@link OperationCode}s for the given region. * * Required to be implemented by concrete classes that implement this abstract class. * * @param opCodes the {@link OperationCode}s of the operations requiring authorization failure * @param regionNames list of the region names requiring authorization failure; a value of null * indicates all regions * @param index used to generate multiple such principals by passing different values for this * * @return a {@link Principal} not authorized to perform the given operation in the given region */ protected abstract Principal getDisallowedPrincipal(final OperationCode[] opCodes, final String[] regionNames, final int index); /** * Enumeration for various {@link AuthzCredentialGenerator} implementations. * * <p> * The following schemes are supported as of now: * <ul> * <li>{@code DummyAuthorization} with {@code DummyAuthenticator}</li> * <li>{@code XMLAuthorization} with {@code DummyAuthenticator}</li> * <li>{@code XMLAuthorization} with {@code LDAPAuthenticator}</li> * <li>{@code XMLAuthorization} with {@code PKCSAuthenticator}</li> * <li>{@code XMLAuthorization} when using SSL sockets</li> * </ul> * * <p> * To add a new authorization scheme the following needs to be done: * <ul> * <li>Add implementation for {@link AccessControl}.</li> * <li>Choose the authentication schemes that it shall work with from * {@link CredentialGenerator.ClassCode}</li> * <li>Add a new enumeration value for the scheme in this class. Notice the size of {@code VALUES} * array and increase that if it is getting overflowed. Note the methods and fields for existing * schemes and add for the new one in a similar manner.</li> * <li>Add an implementation for {@link AuthzCredentialGenerator}. Note the * {@link AuthzCredentialGenerator#init} method where different authentication schemes can be * passed and initialize differently for the authentication schemes that shall be handled.</li> * <li>Modify the {@link AuthzCredentialGenerator#create} method to add creation of an instance of * the new implementation for the {@code ClassCode} enumeration value.</li> * </ul> * * <p> * All dunit tests will automagically start testing the new implementation after this. * * @since GemFire 5.5 */ public static class ClassCode { private static byte nextOrdinal = 0; private static final byte ID_DUMMY = 1; private static final byte ID_XML = 2; private static final ClassCode[] VALUES = new ClassCode[10]; private static final Map CODE_NAME_MAP = new HashMap(); public static final ClassCode DUMMY = new ClassCode(DummyAuthorization.class.getName() + ".create", ID_DUMMY); public static final ClassCode XML = new ClassCode(XmlAuthorization.class.getName() + ".create", ID_XML); /** The name of this class. */ private final String name; /** byte used as ordinal to represent this class */ private final byte ordinal; /** * One of the following: ID_DUMMY, ID_LDAP, ID_PKI */ private final byte classType; /** Creates a new instance of class code. */ private ClassCode(final String name, final byte classType) { this.name = name; this.classType = classType; this.ordinal = nextOrdinal++; VALUES[this.ordinal] = this; CODE_NAME_MAP.put(name, this); } public boolean isDummy() { return this.classType == ID_DUMMY; } public boolean isXml() { return this.classType == ID_XML; } /** * Returns the {@code ClassCode} represented by specified ordinal. */ public static ClassCode fromOrdinal(final byte ordinal) { return VALUES[ordinal]; } /** * Returns the {@code ClassCode} represented by specified string. */ public static ClassCode parse(final String operationName) { return (ClassCode) CODE_NAME_MAP.get(operationName); } /** * Returns all the possible values. */ public static List getAll() { final List codes = new ArrayList(); for (Iterator iter = CODE_NAME_MAP.values().iterator(); iter.hasNext();) { codes.add(iter.next()); } return codes; } /** * Returns the ordinal for this class code. * * @return the ordinal of this class code. */ public byte toOrdinal() { return this.ordinal; } /** * Returns a string representation for this class code. * * @return the name of this class code. */ @Override public String toString() { return this.name; } /** * Indicates whether other object is same as this one. * * @return true if other object is same as this one. */ @Override public boolean equals(final Object obj) { if (obj == this) { return true; } if (!(obj instanceof ClassCode)) { return false; } final ClassCode other = (ClassCode) obj; return other.ordinal == this.ordinal; } /** * Indicates whether other {@code ClassCode} is same as this one. * * @return true if other {@code ClassCode} is same as this one. */ public boolean equals(final ClassCode opCode) { return opCode != null && opCode.ordinal == this.ordinal; } /** * Returns a hash code value for this {@code ClassCode} which is the same as its ordinal. * * @return the ordinal of this {@code ClassCode}. */ @Override public int hashCode() { return this.ordinal; } } }
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.directio.hive.orc; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.orc.CompressionKind; import org.apache.hadoop.hive.ql.io.orc.OrcFile; import org.junit.Assume; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import com.asakusafw.directio.hive.serde.DataModelDescriptorEditor; import com.asakusafw.directio.hive.serde.DataModelMapping.ExceptionHandlingStrategy; import com.asakusafw.directio.hive.serde.DataModelMapping.FieldMappingStrategy; import com.asakusafw.directio.hive.serde.FieldPropertyDescriptor; import com.asakusafw.directio.hive.serde.ValueSerde; import com.asakusafw.directio.hive.serde.mock.MockSimple; import com.asakusafw.info.hive.BuiltinStorageFormatInfo; import com.asakusafw.info.hive.StorageFormatInfo; import com.asakusafw.runtime.directio.Counter; import com.asakusafw.runtime.directio.DirectInputFragment; import com.asakusafw.runtime.directio.hadoop.StripedDataFormat; import com.asakusafw.runtime.io.ModelInput; import com.asakusafw.runtime.io.ModelOutput; import com.asakusafw.runtime.value.IntOption; import com.asakusafw.runtime.value.StringOption; import com.asakusafw.runtime.windows.WindowsSupport; /** * Test for {@link OrcFileFormat}. */ public class OrcFileFormatTest { /** * Windows platform support. */ @ClassRule public static final WindowsSupport WINDOWS_SUPPORT = new WindowsSupport(); /** * A temporary folder for testing. */ @Rule public final TemporaryFolder folder = new TemporaryFolder(); private <T> OrcFileFormat<T> format(Class<T> type, String... removes) { return format(type, Collections.emptyMap(), removes); } private <T> OrcFileFormat<T> format( Class<T> type, Map<String, ? extends ValueSerde> edits, String... removes) { OrcFileFormat<T> format = new OrcFileFormat<>( "testing", new OrcFormatConfiguration(), new DataModelDescriptorEditor(FieldPropertyDescriptor.extract(type)) .editAll(edits) .removeAll(Arrays.asList(removes)) .build()); format.setConf(new org.apache.hadoop.conf.Configuration()); return format; } /** * Test method for {@link AbstractOrcFileFormat#getSchema()}. */ @Test public void format_name() { assertThat( format(MockSimple.class).getSchema().getStorageFormat(), equalTo((Object) BuiltinStorageFormatInfo.of(StorageFormatInfo.FormatKind.ORC))); } /** * Test method for {@link AbstractOrcFileFormat#getSupportedType()}. */ @Test public void supported_type() { assertThat(format(MockSimple.class).getSupportedType(), equalTo((Object) MockSimple.class)); } /** * {@code tblproperties} for default settings. */ @Test public void table_properties_default() { Map<String, String> props = format(MockSimple.class).getSchema().getProperties(); assertThat(props.size(), is(2)); assertThat(props, hasEntry("orc.compress", "SNAPPY")); assertThat(props, hasEntry("orc.stripe.size", String.valueOf(64L * 1024 * 1024))); } /** * {@code tblproperties} for custom settings. */ @Test public void table_properties_custom() { long stripeSize = 99L * 1024 * 1024; OrcFileFormat<MockSimple> format = format(MockSimple.class); format.getFormatConfiguration() .withFormatVersion(OrcFile.Version.V_0_11.name()) .withCompressionKind(CompressionKind.ZLIB.name()) .withStripeSize(stripeSize); Map<String, String> props = format.getSchema().getProperties(); assertThat(props.size(), is(2)); assertThat(props, hasEntry("orc.compress", "ZLIB")); assertThat(props, hasEntry("orc.stripe.size", String.valueOf(stripeSize))); } /** * simple I/O. * @throws Exception if failed */ @Test public void io_simple() throws Exception { OrcFileFormat<MockSimple> format = format(MockSimple.class); MockSimple in = new MockSimple(100, "Hello, world!"); MockSimple out = restore(format, in); assertThat(out.number, is(in.number)); assertThat(out.string, is(in.string)); } /** * I/O with projection. * @throws Exception if failed */ @Test public void io_projection() throws Exception { OrcFileFormat<MockSimple> format1 = format(MockSimple.class); OrcFileFormat<MockSimple> format2 = format(MockSimple.class, "string"); format2.getFormatConfiguration() .withFieldMappingStrategy(FieldMappingStrategy.NAME) .withOnMissingTarget(ExceptionHandlingStrategy.IGNORE); MockSimple in = new MockSimple(100, "Hello, world!"); File file = save(format1, Arrays.asList(in)); List<MockSimple> restored = load(format2, file); assertThat(restored, hasSize(1)); MockSimple out = restored.get(0); assertThat(out.number, is(in.number)); assertThat(out.string, is(new StringOption())); // null } /** * I/O with fragment. * @throws Exception if failed */ @Test public void io_fragment() throws Exception { File file = folder.newFile(); Assume.assumeThat(file.delete() || file.exists() == false, is(true)); OrcFileFormat<MockSimple> format = format(MockSimple.class); LocalFileSystem fs = FileSystem.getLocal(format.getConf()); try (ModelOutput<MockSimple> output = format.createOutput( MockSimple.class, fs, new Path(file.toURI()), new Counter());) { output.write(new MockSimple(100, "Hello, world!")); } assertThat(file.exists(), is(true)); FileStatus stat = fs.getFileStatus(new Path(file.toURI())); List<DirectInputFragment> fragments = format.computeInputFragments(new StripedDataFormat.InputContext( MockSimple.class, Arrays.asList(stat), fs, -1L, -1L, false, false)); assertThat(fragments, hasSize(1)); DirectInputFragment first = fragments.get(0); try (ModelInput<MockSimple> input = format.createInput( MockSimple.class, fs, new Path(first.getPath()), first.getOffset(), first.getSize(), new Counter())) { MockSimple buf = new MockSimple(); assertThat(input.readTo(buf), is(true)); assertThat(buf.number, is(new IntOption(100))); assertThat(buf.string, is(new StringOption("Hello, world!"))); assertThat(input.readTo(buf), is(false)); } } /** * I/O with {@code 0.11}. * @throws Exception if failed */ @Test public void io_v_0_11() throws Exception { OrcFileFormat<MockSimple> format = format(MockSimple.class); format.getFormatConfiguration().withFormatVersion(OrcFile.Version.V_0_11.name()); MockSimple in = new MockSimple(100, "Hello, world!"); MockSimple out = restore(format, in); assertThat(out.number, is(in.number)); assertThat(out.string, is(in.string)); } private <T> T restore(OrcFileFormat<T> format, T value) throws IOException, InterruptedException { List<T> in = new ArrayList<>(); in.add(value); return restore(format, in).get(0); } private <T> List<T> restore(OrcFileFormat<T> format, List<T> values) throws IOException, InterruptedException { File file = save(format, values); List<T> results = load(format, file); assertThat(values, hasSize(results.size())); return results; } private <T> File save(OrcFileFormat<T> format, List<T> values) throws IOException, InterruptedException { File file = folder.newFile(); Assume.assumeThat(file.delete() || file.exists() == false, is(true)); LocalFileSystem fs = FileSystem.getLocal(format.getConf()); try (ModelOutput<T> output = format.createOutput( format.getSupportedType(), fs, new Path(file.toURI()), new Counter())) { for (T value : values) { output.write(value); } } assertThat(file.exists(), is(true)); return file; } private <T> List<T> load(OrcFileFormat<T> format, File file) throws IOException, InterruptedException { LocalFileSystem fs = FileSystem.getLocal(format.getConf()); try (ModelInput<T> input = format.createInput( format.getSupportedType(), fs, new Path(file.toURI()), 0, file.length(), new Counter())) { List<T> results = new ArrayList<>(); while (true) { @SuppressWarnings("unchecked") T value = (T) format.getDataModelDescriptor().createDataModelObject(); if (input.readTo(value) == false) { break; } results.add(value); } return results; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The SF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.apache.sling.hc.util; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.sling.hc.api.HealthCheck; import org.apache.sling.hc.api.execution.HealthCheckSelector; import org.osgi.annotation.versioning.ProviderType; import org.osgi.framework.BundleContext; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.ServiceReference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.sling.hc.api.execution.HealthCheckSelector.tags; import static org.apache.sling.hc.api.execution.HealthCheckSelector.empty; /** * Select from available {@link HealthCheck} services. * Once this filter object and the returned health check services are no longer * be used {@link #dispose()} should be called, to free the service * references. * * This class is not thread safe and instances shouldn't be used concurrently * from different threads. */ @ProviderType public class HealthCheckFilter { private final Logger log = LoggerFactory.getLogger(getClass()); private final BundleContext bundleContext; public static final String OMIT_PREFIX = "-"; private final Set<ServiceReference> usedReferences = new HashSet<ServiceReference>(); /** * Create a new filter object */ public HealthCheckFilter(final BundleContext bc) { bundleContext = bc; } public List<HealthCheck> getHealthChecks(final HealthCheckSelector selector) { final ServiceReference [] refs = this.getHealthCheckServiceReferences(selector); final List<HealthCheck> result = new ArrayList<HealthCheck>(); if ( refs != null ) { final List<ServiceReference> sortedRefs = Arrays.asList(refs); Collections.sort(sortedRefs); for(final ServiceReference ref : sortedRefs) { final HealthCheck hc = (HealthCheck)bundleContext.getService(ref); log.debug("Selected HealthCheck service {}", hc); if ( hc != null ) { this.usedReferences.add(ref); result.add(hc); } } } return result; } public ServiceReference[] getHealthCheckServiceReferences(final HealthCheckSelector selector) { return getHealthCheckServiceReferences(selector, false); } public ServiceReference[] getHealthCheckServiceReferences(final HealthCheckSelector selector, boolean combineTagsWithOr) { final CharSequence filterBuilder = selector != null ? getServiceFilter(selector, combineTagsWithOr) : getServiceFilter(empty(), combineTagsWithOr); log.debug("OSGi service filter in getHealthCheckServiceReferences(): {}", filterBuilder); try { final String filterString = filterBuilder.length() == 0 ? null : filterBuilder.toString(); bundleContext.createFilter(filterString); // check syntax early final ServiceReference[] refs = bundleContext.getServiceReferences(HealthCheck.class.getName(), filterString); if (refs == null) { log.debug("Found no HealthCheck services with filter [{}]", filterString); return new ServiceReference[0]; } else { log.debug("Found {} HealthCheck services with filter [{}]", refs.length, filterString); } return refs; } catch (final InvalidSyntaxException ise) { // this should not happen, but we fail gracefully log.error("Invalid OSGi filter syntax in '" + filterBuilder + "'", ise); return new ServiceReference[0]; } } /** * Get all health check services with one of the supplied tags. * @return A list of services - might be the empty list if none matches * @deprecated use getHealthChecks() instead */ @Deprecated public List<HealthCheck> getTaggedHealthChecks(final String... tags) { final HealthCheckSelector selector = tags(tags); return getHealthChecks(selector); } /** * Get all service references for health check services with one of the supplied tags. Uses logical "and" to combine tags. * @return An array of service references - might be an empty error if none matches * @deprecated use getHealthCheckServiceReferences() instead */ @Deprecated public ServiceReference[] getTaggedHealthCheckServiceReferences(final String... tags) { return getHealthCheckServiceReferences(tags(tags), false); } /** * Get all service references for health check services with one of the supplied tags. * * @param combineWithOr If true will return all health checks that have at least one of the tags set. * If false will return only health checks that have all given tags assigned. * @param tags the tags to look for * @return An array of service references - might be an empty error if none matches * @deprecated use getHealthCheckServiceReferences() instead */ @Deprecated public ServiceReference[] getTaggedHealthCheckServiceReferences(boolean combineWithOr, final String... tags) { final HealthCheckSelector selector = tags(tags); return getHealthCheckServiceReferences(selector, combineWithOr); } /** * Dispose all used service references */ public void dispose() { for(final ServiceReference ref : this.usedReferences) { this.bundleContext.ungetService(ref); } this.usedReferences.clear(); } CharSequence getServiceFilter(HealthCheckSelector selector, boolean combineTagsWithOr) { // Build service filter final StringBuilder filterBuilder = new StringBuilder(); filterBuilder.append("(&(objectClass=").append(HealthCheck.class.getName()).append(")"); final int prefixLen = HealthCheckFilter.OMIT_PREFIX.length(); final StringBuilder filterBuilderForOrOperator = new StringBuilder(); // or filters final StringBuilder tagsBuilder = new StringBuilder(); int tagsAndClauses = 0; if (selector.tags() != null) { for (String tag : selector.tags()) { tag = tag.trim(); if (tag.length() == 0) { continue; } if (tag.startsWith(HealthCheckFilter.OMIT_PREFIX)) { // ommit tags always have to be added as and-clause filterBuilder.append("(!(").append(HealthCheck.TAGS).append("=").append(tag.substring(prefixLen)).append("))"); } else { // add regular tags in the list either to outer and-clause or inner or-clause if (combineTagsWithOr) { filterBuilderForOrOperator.append("(").append(HealthCheck.TAGS).append("=").append(tag).append(")"); } else { tagsBuilder.append("(").append(HealthCheck.TAGS).append("=").append(tag).append(")"); tagsAndClauses++; } } } } boolean addedNameToOrBuilder = false; if (selector.names() != null) { for (String name : selector.names()) { name = name.trim(); if (name.length() == 0) { continue; } if (name.startsWith(HealthCheckFilter.OMIT_PREFIX)) { // ommit tags always have to be added as and-clause filterBuilder.append("(!(").append(HealthCheck.NAME).append("=").append(name.substring(prefixLen)).append("))"); } else { // names are always ORd filterBuilderForOrOperator.append("(").append(HealthCheck.NAME).append("=").append(name).append(")"); addedNameToOrBuilder = true; } } } if (addedNameToOrBuilder) { if (tagsAndClauses > 1) { filterBuilderForOrOperator.append("(&").append(tagsBuilder).append(")"); } else { filterBuilderForOrOperator.append(tagsBuilder); } } else { filterBuilder.append(tagsBuilder); } // add "or" clause if we have accumulated any if (filterBuilderForOrOperator.length() > 0) { filterBuilder.append("(|").append(filterBuilderForOrOperator).append(")"); } filterBuilder.append(")"); return filterBuilder; } }
/* * Copyright (c) 2010-2019 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.model.impl.lens.projector.policy.evaluators; import com.evolveum.midpoint.common.LocalizationService; import com.evolveum.midpoint.model.api.context.AssignmentPath; import com.evolveum.midpoint.model.api.context.EvaluatedExclusionTrigger; import com.evolveum.midpoint.model.api.context.EvaluatedPolicyRule; import com.evolveum.midpoint.model.api.context.EvaluatedPolicyRuleTrigger; import com.evolveum.midpoint.model.impl.lens.EvaluatedAssignmentImpl; import com.evolveum.midpoint.model.impl.lens.EvaluatedAssignmentTargetImpl; import com.evolveum.midpoint.model.impl.lens.projector.policy.AssignmentPolicyRuleEvaluationContext; import com.evolveum.midpoint.model.impl.lens.projector.policy.PolicyRuleEvaluationContext; import com.evolveum.midpoint.prism.PrismContext; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.PrismObjectDefinition; import com.evolveum.midpoint.prism.match.MatchingRuleRegistry; import com.evolveum.midpoint.prism.polystring.PolyString; import com.evolveum.midpoint.prism.query.ObjectFilter; import com.evolveum.midpoint.schema.RelationRegistry; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.util.ObjectTypeUtil; import com.evolveum.midpoint.util.LocalizableMessage; import com.evolveum.midpoint.util.LocalizableMessageBuilder; import com.evolveum.midpoint.util.exception.*; import com.evolveum.midpoint.xml.ns._public.common.common_3.*; import com.evolveum.prism.xml.ns._public.query_3.SearchFilterType; import com.evolveum.prism.xml.ns._public.types_3.EvaluationTimeType; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import javax.xml.bind.JAXBElement; import javax.xml.namespace.QName; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; /** * @author semancik * @author mederly */ @Component public class ExclusionConstraintEvaluator implements PolicyConstraintEvaluator<ExclusionPolicyConstraintType> { private static final String OP_EVALUATE = ExclusionConstraintEvaluator.class.getName() + ".evaluate"; private static final String CONSTRAINT_KEY = "exclusion"; @Autowired private ConstraintEvaluatorHelper evaluatorHelper; @Autowired private PrismContext prismContext; @Autowired private MatchingRuleRegistry matchingRuleRegistry; @Autowired private RelationRegistry relationRegistry; @Autowired private LocalizationService localizationService; @Override public <AH extends AssignmentHolderType> EvaluatedPolicyRuleTrigger evaluate(JAXBElement<ExclusionPolicyConstraintType> constraint, PolicyRuleEvaluationContext<AH> rctx, OperationResult parentResult) throws SchemaException, ExpressionEvaluationException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException { OperationResult result = parentResult.subresult(OP_EVALUATE) .setMinor() .build(); try { if (!(rctx instanceof AssignmentPolicyRuleEvaluationContext)) { return null; } AssignmentPolicyRuleEvaluationContext<AH> ctx = (AssignmentPolicyRuleEvaluationContext<AH>) rctx; if (!ctx.inPlus && !ctx.inZero) { return null; } List<OrderConstraintsType> sourceOrderConstraints = defaultIfEmpty(constraint.getValue().getOrderConstraint()); List<OrderConstraintsType> targetOrderConstraints = defaultIfEmpty(constraint.getValue().getTargetOrderConstraint()); if (ctx.policyRule.isGlobal()) { if (!pathMatches(ctx.policyRule.getAssignmentPath(), sourceOrderConstraints)) { System.out.println("[global] Source assignment path does not match: " + ctx.policyRule.getAssignmentPath()); return null; } } else { // It is not clear how to match orderConstraint with assignment path of the constraint. // Let us try the following test: we consider it matching if there's at least one segment // on the path that matches the constraint. boolean found = ctx.policyRule.getAssignmentPath().getSegments().stream() .anyMatch(segment -> segment.matches(sourceOrderConstraints)); if (!found) { // System.out.println("Source assignment path does not match: constraints=" + sourceOrderConstraints + ", whole path=" + ctx.policyRule.getAssignmentPath()); return null; } } // We consider all policy rules, i.e. also from induced targets. (It is not possible to collect local // rules for individual targets in the chain - rules are computed only for directly evaluated assignments.) // // In order to avoid false positives, we consider all targets from the current assignment as "allowed" // Set<String> allowedTargetOids = ctx.evaluatedAssignment.getNonNegativeTargets().stream() // .filter(t -> t.appliesToFocus()) // .map(t -> t.getOid()) // .collect(Collectors.toSet()); List<EvaluatedAssignmentTargetImpl> nonNegativeTargetsA = ctx.evaluatedAssignment.getNonNegativeTargets(); for (EvaluatedAssignmentImpl<AH> assignmentB : ctx.evaluatedAssignmentTriple.getNonNegativeValues()) { if (assignmentB.equals(ctx.evaluatedAssignment)) { // TODO (value instead of reference equality?) continue; } targetB: for (EvaluatedAssignmentTargetImpl targetB : assignmentB.getNonNegativeTargets()) { if (!pathMatches(targetB.getAssignmentPath(), targetOrderConstraints)) { // System.out.println("Target assignment path does not match: constraints=" + targetOrderConstraints + ", whole path=" + targetB.getAssignmentPath()); continue; } if (!oidMatches(constraint.getValue().getTargetRef(), targetB, prismContext, matchingRuleRegistry, "exclusion constraint")) { continue; } // To avoid false positives let us check if this target is not already covered by assignment being evaluated // (is this really needed?) for (EvaluatedAssignmentTargetImpl targetA : nonNegativeTargetsA) { if (targetA.appliesToFocusWithAnyRelation(relationRegistry) && targetA.getOid() != null && targetA.getOid().equals(targetB.getOid()) && targetA.getAssignmentPath().equivalent(targetB.getAssignmentPath())) { continue targetB; } } EvaluatedPolicyRuleTrigger rv = createTrigger(ctx.evaluatedAssignment, assignmentB, targetB, constraint, ctx.policyRule, ctx, result); result.addReturn("trigger", rv.toDiagShortcut()); return rv; } } return null; } catch (Throwable t) { result.recordFatalError(t.getMessage(), t); throw t; } finally { result.computeStatusIfUnknown(); } } @SuppressWarnings("BooleanMethodIsAlwaysInverted") private boolean pathMatches(AssignmentPath assignmentPath, List<OrderConstraintsType> definedOrderConstraints) { if (assignmentPath == null) { throw new IllegalStateException("Check this. Assignment path is null."); } if (assignmentPath.isEmpty()) { throw new IllegalStateException("Check this. Assignment path is empty."); } return assignmentPath.matches(definedOrderConstraints); } @NotNull private List<OrderConstraintsType> defaultIfEmpty(List<OrderConstraintsType> definedOrderConstraints) { return !definedOrderConstraints.isEmpty() ? definedOrderConstraints : defaultOrderConstraints(); } private List<OrderConstraintsType> defaultOrderConstraints() { return Collections.singletonList(new OrderConstraintsType(prismContext).order(1)); } static boolean oidMatches(ObjectReferenceType targetRef, EvaluatedAssignmentTargetImpl assignmentTarget, PrismContext prismContext, MatchingRuleRegistry matchingRuleRegistry, String context) throws SchemaException { if (targetRef == null) { return true; // this means we rely on comparing relations } if (assignmentTarget.getOid() == null) { return false; // shouldn't occur } if (targetRef.getOid() != null) { return assignmentTarget.getOid().equals(targetRef.getOid()); } if (targetRef.getResolutionTime() == EvaluationTimeType.RUN) { SearchFilterType filterType = targetRef.getFilter(); if (filterType == null) { throw new SchemaException("No filter in " + context); } QName typeQName = targetRef.getType(); @SuppressWarnings("rawtypes") PrismObjectDefinition objDef = prismContext.getSchemaRegistry().findObjectDefinitionByType(typeQName); ObjectFilter filter = prismContext.getQueryConverter().parseFilter(filterType, objDef); PrismObject<? extends AssignmentHolderType> target = assignmentTarget.getTarget(); return filter.match(target.getValue(), matchingRuleRegistry); } else { throw new SchemaException("No OID in " + context); } } private <AH extends AssignmentHolderType> EvaluatedExclusionTrigger createTrigger(EvaluatedAssignmentImpl<AH> assignmentA, @NotNull EvaluatedAssignmentImpl<AH> assignmentB, EvaluatedAssignmentTargetImpl targetB, JAXBElement<ExclusionPolicyConstraintType> constraintElement, EvaluatedPolicyRule policyRule, AssignmentPolicyRuleEvaluationContext<AH> ctx, OperationResult result) throws ExpressionEvaluationException, ObjectNotFoundException, SchemaException, CommunicationException, ConfigurationException, SecurityViolationException { AssignmentPath pathA = policyRule.getAssignmentPath(); AssignmentPath pathB = targetB.getAssignmentPath(); LocalizableMessage infoA = createObjectInfo(pathA, assignmentA.getTarget(), true); LocalizableMessage infoB = createObjectInfo(pathB, targetB.getTarget(), false); ObjectType objectA = getConflictingObject(pathA, assignmentA.getTarget()); ObjectType objectB = getConflictingObject(pathB, targetB.getTarget()); LocalizableMessage message = createMessage(infoA, infoB, constraintElement, ctx, result); LocalizableMessage shortMessage = createShortMessage(infoA, infoB, constraintElement, ctx, result); return new EvaluatedExclusionTrigger(constraintElement.getValue(), message, shortMessage, assignmentB, objectA, objectB, pathA, pathB); } @NotNull private <AH extends AssignmentHolderType> LocalizableMessage createMessage(LocalizableMessage infoA, LocalizableMessage infoB, JAXBElement<ExclusionPolicyConstraintType> constraintElement, PolicyRuleEvaluationContext<AH> ctx, OperationResult result) throws ExpressionEvaluationException, ObjectNotFoundException, SchemaException, CommunicationException, ConfigurationException, SecurityViolationException { LocalizableMessage builtInMessage = new LocalizableMessageBuilder() .key(SchemaConstants.DEFAULT_POLICY_CONSTRAINT_KEY_PREFIX + CONSTRAINT_KEY) .args(infoA, infoB) .build(); return evaluatorHelper.createLocalizableMessage(constraintElement, ctx, builtInMessage, result); } @NotNull private <AH extends AssignmentHolderType> LocalizableMessage createShortMessage(LocalizableMessage infoA, LocalizableMessage infoB, JAXBElement<ExclusionPolicyConstraintType> constraintElement, PolicyRuleEvaluationContext<AH> ctx, OperationResult result) throws ExpressionEvaluationException, ObjectNotFoundException, SchemaException, CommunicationException, ConfigurationException, SecurityViolationException { LocalizableMessage builtInMessage = new LocalizableMessageBuilder() .key(SchemaConstants.DEFAULT_POLICY_CONSTRAINT_SHORT_MESSAGE_KEY_PREFIX + CONSTRAINT_KEY) .args(infoA, infoB) .build(); return evaluatorHelper.createLocalizableShortMessage(constraintElement, ctx, builtInMessage, result); } private ObjectType getConflictingObject(AssignmentPath path, PrismObject<?> defaultObject) { if (path == null) { return ObjectTypeUtil.toObjectable(defaultObject); } List<ObjectType> objects = path.getFirstOrderChain(); return objects.isEmpty() ? ObjectTypeUtil.toObjectable(defaultObject) : objects.get(objects.size()-1); } private LocalizableMessage createObjectInfo(AssignmentPath path, PrismObject<?> defaultObject, boolean startsWithUppercase) { if (path == null) { return ObjectTypeUtil.createDisplayInformation(defaultObject, startsWithUppercase); } List<ObjectType> objects = path.getFirstOrderChain(); if (objects.isEmpty()) { // shouldn't occur return ObjectTypeUtil.createDisplayInformation(defaultObject, startsWithUppercase); } PrismObject<?> last = objects.get(objects.size()-1).asPrismObject(); if (objects.size() == 1) { return ObjectTypeUtil.createDisplayInformation(last, startsWithUppercase); } String pathString = objects.stream() .map(o -> PolyString.getOrig(o.getName())) .collect(Collectors.joining(" -> ")); return ObjectTypeUtil.createDisplayInformationWithPath(last, startsWithUppercase, pathString); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage; import java.beans.*; /** * * @author William.Stockhausen */ public class EggStageParametersBeanInfo extends SimpleBeanInfo { // Bean descriptor//GEN-FIRST:BeanDescriptor /*lazy BeanDescriptor*/ private static BeanDescriptor getBdescriptor(){ BeanDescriptor beanDescriptor = new BeanDescriptor ( wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class , null ); // NOI18N//GEN-HEADEREND:BeanDescriptor // Here you can add code for customizing the BeanDescriptor. return beanDescriptor; }//GEN-LAST:BeanDescriptor // Property identifiers//GEN-FIRST:Properties private static final int PROPERTY_CSV = 0; private static final int PROPERTY_CSVHeader = 1; private static final int PROPERTY_IBMFunctionCategories = 2; private static final int PROPERTY_IBMParameterNames = 3; private static final int PROPERTY_keys = 4; private static final int PROPERTY_typeName = 5; // Property array /*lazy PropertyDescriptor*/ private static PropertyDescriptor[] getPdescriptor(){ PropertyDescriptor[] properties = new PropertyDescriptor[6]; try { properties[PROPERTY_CSV] = new PropertyDescriptor ( "CSV", wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class, "getCSV", null ); // NOI18N properties[PROPERTY_CSVHeader] = new PropertyDescriptor ( "CSVHeader", wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class, "getCSVHeader", null ); // NOI18N properties[PROPERTY_IBMFunctionCategories] = new PropertyDescriptor ( "IBMFunctionCategories", wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class, "getIBMFunctionCategories", null ); // NOI18N properties[PROPERTY_IBMParameterNames] = new PropertyDescriptor ( "IBMParameterNames", wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class, "getIBMParameterNames", null ); // NOI18N properties[PROPERTY_keys] = new PropertyDescriptor ( "keys", wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class, "getKeys", null ); // NOI18N properties[PROPERTY_typeName] = new PropertyDescriptor ( "typeName", wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class, "getTypeName", "setTypeName" ); // NOI18N } catch(IntrospectionException e) { e.printStackTrace(); }//GEN-HEADEREND:Properties // Here you can add code for customizing the properties array. return properties; }//GEN-LAST:Properties // EventSet identifiers//GEN-FIRST:Events private static final int EVENT_propertyChangeListener = 0; // EventSet array /*lazy EventSetDescriptor*/ private static EventSetDescriptor[] getEdescriptor(){ EventSetDescriptor[] eventSets = new EventSetDescriptor[1]; try { eventSets[EVENT_propertyChangeListener] = new EventSetDescriptor ( wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class, "propertyChangeListener", java.beans.PropertyChangeListener.class, new String[] {"propertyChange"}, "addPropertyChangeListener", "removePropertyChangeListener" ); // NOI18N } catch(IntrospectionException e) { e.printStackTrace(); }//GEN-HEADEREND:Events // Here you can add code for customizing the event sets array. return eventSets; }//GEN-LAST:Events // Method identifiers//GEN-FIRST:Methods private static final int METHOD_clone0 = 0; private static final int METHOD_createInstance1 = 1; private static final int METHOD_getIBMFunction2 = 2; private static final int METHOD_getIBMFunctionNamesByCategory3 = 3; private static final int METHOD_getIBMParameter4 = 4; private static final int METHOD_getSelectedIBMFunctionForCategory5 = 5; private static final int METHOD_getValue6 = 6; private static final int METHOD_getValue7 = 7; private static final int METHOD_getValue8 = 8; private static final int METHOD_getValue9 = 9; private static final int METHOD_getValue10 = 10; private static final int METHOD_getValue11 = 11; private static final int METHOD_getValue12 = 12; private static final int METHOD_getValue13 = 13; private static final int METHOD_getValue14 = 14; private static final int METHOD_getValue15 = 15; private static final int METHOD_selectIBMFunctionForCategory16 = 16; private static final int METHOD_setIBMFunction17 = 17; private static final int METHOD_setValue18 = 18; private static final int METHOD_setValue19 = 19; private static final int METHOD_setValue20 = 20; private static final int METHOD_setValue21 = 21; private static final int METHOD_setValue22 = 22; // Method array /*lazy MethodDescriptor*/ private static MethodDescriptor[] getMdescriptor(){ MethodDescriptor[] methods = new MethodDescriptor[23]; try { methods[METHOD_clone0] = new MethodDescriptor(wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class.getMethod("clone", new Class[] {})); // NOI18N methods[METHOD_clone0].setDisplayName ( "" ); methods[METHOD_createInstance1] = new MethodDescriptor(wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class.getMethod("createInstance", new Class[] {java.lang.String[].class})); // NOI18N methods[METHOD_createInstance1].setDisplayName ( "" ); methods[METHOD_getIBMFunction2] = new MethodDescriptor(wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class.getMethod("getIBMFunction", new Class[] {java.lang.String.class, java.lang.String.class})); // NOI18N methods[METHOD_getIBMFunction2].setDisplayName ( "" ); methods[METHOD_getIBMFunctionNamesByCategory3] = new MethodDescriptor(wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class.getMethod("getIBMFunctionNamesByCategory", new Class[] {java.lang.String.class})); // NOI18N methods[METHOD_getIBMFunctionNamesByCategory3].setDisplayName ( "" ); methods[METHOD_getIBMParameter4] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getIBMParameter", new Class[] {java.lang.String.class})); // NOI18N methods[METHOD_getIBMParameter4].setDisplayName ( "" ); methods[METHOD_getSelectedIBMFunctionForCategory5] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getSelectedIBMFunctionForCategory", new Class[] {java.lang.String.class})); // NOI18N methods[METHOD_getSelectedIBMFunctionForCategory5].setDisplayName ( "" ); methods[METHOD_getValue6] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getValue", new Class[] {java.lang.String.class, java.lang.Boolean.class})); // NOI18N methods[METHOD_getValue6].setDisplayName ( "" ); methods[METHOD_getValue7] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getValue", new Class[] {java.lang.String.class, java.lang.Double.class})); // NOI18N methods[METHOD_getValue7].setDisplayName ( "" ); methods[METHOD_getValue8] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getValue", new Class[] {java.lang.String.class, java.lang.Integer.class})); // NOI18N methods[METHOD_getValue8].setDisplayName ( "" ); methods[METHOD_getValue9] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getValue", new Class[] {java.lang.String.class, java.lang.Long.class})); // NOI18N methods[METHOD_getValue9].setDisplayName ( "" ); methods[METHOD_getValue10] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getValue", new Class[] {java.lang.String.class, boolean.class})); // NOI18N methods[METHOD_getValue10].setDisplayName ( "" ); methods[METHOD_getValue11] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getValue", new Class[] {java.lang.String.class, double.class})); // NOI18N methods[METHOD_getValue11].setDisplayName ( "" ); methods[METHOD_getValue12] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getValue", new Class[] {java.lang.String.class, int.class})); // NOI18N methods[METHOD_getValue12].setDisplayName ( "" ); methods[METHOD_getValue13] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getValue", new Class[] {java.lang.String.class, long.class})); // NOI18N methods[METHOD_getValue13].setDisplayName ( "" ); methods[METHOD_getValue14] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getValue", new Class[] {java.lang.String.class, java.lang.String.class})); // NOI18N methods[METHOD_getValue14].setDisplayName ( "" ); methods[METHOD_getValue15] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("getValue", new Class[] {java.lang.String.class})); // NOI18N methods[METHOD_getValue15].setDisplayName ( "" ); methods[METHOD_selectIBMFunctionForCategory16] = new MethodDescriptor(wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class.getMethod("selectIBMFunctionForCategory", new Class[] {java.lang.String.class, java.lang.String.class})); // NOI18N methods[METHOD_selectIBMFunctionForCategory16].setDisplayName ( "" ); methods[METHOD_setIBMFunction17] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("setIBMFunction", new Class[] {java.lang.String.class, java.lang.String.class, wts.models.DisMELS.framework.IBMFunctions.IBMFunctionInterface.class})); // NOI18N methods[METHOD_setIBMFunction17].setDisplayName ( "" ); methods[METHOD_setValue18] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("setValue", new Class[] {java.lang.String.class, double.class})); // NOI18N methods[METHOD_setValue18].setDisplayName ( "" ); methods[METHOD_setValue19] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("setValue", new Class[] {java.lang.String.class, float.class})); // NOI18N methods[METHOD_setValue19].setDisplayName ( "" ); methods[METHOD_setValue20] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("setValue", new Class[] {java.lang.String.class, int.class})); // NOI18N methods[METHOD_setValue20].setDisplayName ( "" ); methods[METHOD_setValue21] = new MethodDescriptor(wts.models.DisMELS.framework.AbstractLHSParameters.class.getMethod("setValue", new Class[] {java.lang.String.class, long.class})); // NOI18N methods[METHOD_setValue21].setDisplayName ( "" ); methods[METHOD_setValue22] = new MethodDescriptor(wts.models.DisMELS.IBMs.ArrowtoothFlounder.EggStage.EggStageParameters.class.getMethod("setValue", new Class[] {java.lang.String.class, java.lang.Object.class})); // NOI18N methods[METHOD_setValue22].setDisplayName ( "" ); } catch( Exception e) {}//GEN-HEADEREND:Methods // Here you can add code for customizing the methods array. return methods; }//GEN-LAST:Methods private static java.awt.Image iconColor16 = null;//GEN-BEGIN:IconsDef private static java.awt.Image iconColor32 = null; private static java.awt.Image iconMono16 = null; private static java.awt.Image iconMono32 = null;//GEN-END:IconsDef private static String iconNameC16 = null;//GEN-BEGIN:Icons private static String iconNameC32 = null; private static String iconNameM16 = null; private static String iconNameM32 = null;//GEN-END:Icons private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx private static final int defaultEventIndex = -1;//GEN-END:Idx //GEN-FIRST:Superclass // Here you can add code for customizing the Superclass BeanInfo. //GEN-LAST:Superclass /** * Gets the bean's * <code>BeanDescriptor</code>s. * * @return BeanDescriptor describing the editable properties of this bean. * May return null if the information should be obtained by automatic * analysis. */ @Override public BeanDescriptor getBeanDescriptor() { return getBdescriptor(); } /** * Gets the bean's * <code>PropertyDescriptor</code>s. * * @return An array of PropertyDescriptors describing the editable * properties supported by this bean. May return null if the information * should be obtained by automatic analysis. <p> If a property is indexed, * then its entry in the result array will belong to the * IndexedPropertyDescriptor subclass of PropertyDescriptor. A client of * getPropertyDescriptors can use "instanceof" to check if a given * PropertyDescriptor is an IndexedPropertyDescriptor. */ @Override public PropertyDescriptor[] getPropertyDescriptors() { return getPdescriptor(); } /** * Gets the bean's * <code>EventSetDescriptor</code>s. * * @return An array of EventSetDescriptors describing the kinds of events * fired by this bean. May return null if the information should be obtained * by automatic analysis. */ @Override public EventSetDescriptor[] getEventSetDescriptors() { return getEdescriptor(); } /** * Gets the bean's * <code>MethodDescriptor</code>s. * * @return An array of MethodDescriptors describing the methods implemented * by this bean. May return null if the information should be obtained by * automatic analysis. */ @Override public MethodDescriptor[] getMethodDescriptors() { return getMdescriptor(); } /** * A bean may have a "default" property that is the property that will * mostly commonly be initially chosen for update by human's who are * customizing the bean. * * @return Index of default property in the PropertyDescriptor array * returned by getPropertyDescriptors. <P> Returns -1 if there is no default * property. */ @Override public int getDefaultPropertyIndex() { return defaultPropertyIndex; } /** * A bean may have a "default" event that is the event that will mostly * commonly be used by human's when using the bean. * * @return Index of default event in the EventSetDescriptor array returned * by getEventSetDescriptors. <P> Returns -1 if there is no default event. */ @Override public int getDefaultEventIndex() { return defaultEventIndex; } /** * This method returns an image object that can be used to represent the * bean in toolboxes, toolbars, etc. Icon images will typically be GIFs, but * may in future include other formats. <p> Beans aren't required to provide * icons and may return null from this method. <p> There are four possible * flavors of icons (16x16 color, 32x32 color, 16x16 mono, 32x32 mono). If a * bean choses to only support a single icon we recommend supporting 16x16 * color. <p> We recommend that icons have a "transparent" background so * they can be rendered onto an existing background. * * @param iconKind The kind of icon requested. This should be one of the * constant values ICON_COLOR_16x16, ICON_COLOR_32x32, ICON_MONO_16x16, or * ICON_MONO_32x32. * @return An image object representing the requested icon. May return null * if no suitable icon is available. */ @Override public java.awt.Image getIcon(int iconKind) { switch (iconKind) { case ICON_COLOR_16x16: if (iconNameC16 == null) { return null; } else { if (iconColor16 == null) { iconColor16 = loadImage(iconNameC16); } return iconColor16; } case ICON_COLOR_32x32: if (iconNameC32 == null) { return null; } else { if (iconColor32 == null) { iconColor32 = loadImage(iconNameC32); } return iconColor32; } case ICON_MONO_16x16: if (iconNameM16 == null) { return null; } else { if (iconMono16 == null) { iconMono16 = loadImage(iconNameM16); } return iconMono16; } case ICON_MONO_32x32: if (iconNameM32 == null) { return null; } else { if (iconMono32 == null) { iconMono32 = loadImage(iconNameM32); } return iconMono32; } default: return null; } } }
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.clustered.replication; import org.ehcache.Cache; import org.ehcache.PersistentCacheManager; import org.ehcache.Status; import org.ehcache.clustered.ClusteredTests; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.clustered.client.config.builders.TimeoutsBuilder; import org.ehcache.clustered.common.Consistency; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.testing.rules.Cluster; import java.io.File; import java.io.Serializable; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.terracotta.testing.rules.BasicExternalClusterBuilder.newCluster; /** * This test asserts Active-Passive fail-over with * multi-threaded/multi-client scenarios. * Note that fail-over is happening while client threads are still writing * Finally the same key set correctness is asserted. */ @RunWith(Parameterized.class) public class BasicClusteredCacheOpsReplicationMultiThreadedTest extends ClusteredTests { private static final int NUM_OF_THREADS = 10; private static final int JOB_SIZE = 100; private static final String RESOURCE_CONFIG = "<config xmlns:ohr='http://www.terracotta.org/config/offheap-resource'>" + "<ohr:offheap-resources>" + "<ohr:resource name=\"primary-server-resource\" unit=\"MB\">16</ohr:resource>" + "</ohr:offheap-resources>" + "</config>\n"; private static PersistentCacheManager CACHE_MANAGER1; private static PersistentCacheManager CACHE_MANAGER2; private static Cache<Long, BlobValue> CACHE1; private static Cache<Long, BlobValue> CACHE2; @Parameters(name = "consistency={0}") public static Consistency[] data() { return Consistency.values(); } @Parameter public Consistency cacheConsistency; @ClassRule public static Cluster CLUSTER = newCluster(2).in(new File("build/cluster")).withServiceFragment(RESOURCE_CONFIG).build(); private final Logger log = LoggerFactory.getLogger(getClass()); private List<Cache<Long, BlobValue>> caches; private final ThreadLocalRandom random = ThreadLocalRandom.current(); private final ExecutorService executorService = Executors.newWorkStealingPool(NUM_OF_THREADS); @Before public void startServers() throws Exception { CLUSTER.getClusterControl().startAllServers(); CLUSTER.getClusterControl().waitForActive(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); final CacheManagerBuilder<PersistentCacheManager> clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm-replication")) .timeouts(TimeoutsBuilder.timeouts() // we need to give some time for the failover to occur .read(Duration.ofMinutes(1)) .write(Duration.ofMinutes(1))) .autoCreate() .defaultServerResource("primary-server-resource")); CACHE_MANAGER1 = clusteredCacheManagerBuilder.build(true); CACHE_MANAGER2 = clusteredCacheManagerBuilder.build(true); CacheConfiguration<Long, BlobValue> config = CacheConfigurationBuilder .newCacheConfigurationBuilder(Long.class, BlobValue.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(500, EntryUnit.ENTRIES) .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) .add(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) .build(); CACHE1 = CACHE_MANAGER1.createCache("clustered-cache", config); CACHE2 = CACHE_MANAGER2.createCache("clustered-cache", config); caches = Arrays.asList(CACHE1, CACHE2); } @After public void tearDown() throws Exception { CLUSTER.getClusterControl().startAllServers(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); List<Runnable> unprocessed = executorService.shutdownNow(); if(!unprocessed.isEmpty()) { log.warn("Tearing down with {} unprocess task", unprocessed); } if(CACHE_MANAGER1 != null && CACHE_MANAGER1.getStatus() != Status.UNINITIALIZED) { CACHE_MANAGER1.close(); } if(CACHE_MANAGER2 != null && CACHE_MANAGER2.getStatus() != Status.UNINITIALIZED) { CACHE_MANAGER2.close(); CACHE_MANAGER2.destroy(); } } @Test(timeout=180000) public void testCRUD() throws Exception { Set<Long> universalSet = ConcurrentHashMap.newKeySet(); List<Future<?>> futures = new ArrayList<>(); caches.forEach(cache -> { for (int i = 0; i < NUM_OF_THREADS; i++) { futures.add(executorService.submit(() -> random.longs().limit(JOB_SIZE).forEach(x -> { cache.put(x, new BlobValue()); universalSet.add(x); }))); } }); //This step is to add values in local tier randomly to test invalidations happen correctly futures.add(executorService.submit(() -> universalSet.forEach(x -> { CACHE1.get(x); CACHE2.get(x); }))); CLUSTER.getClusterControl().terminateActive(); drainTasks(futures); Set<Long> readKeysByCache1AfterFailOver = new HashSet<>(); Set<Long> readKeysByCache2AfterFailOver = new HashSet<>(); universalSet.forEach(x -> { if (CACHE1.get(x) != null) { readKeysByCache1AfterFailOver.add(x); } if (CACHE2.get(x) != null) { readKeysByCache2AfterFailOver.add(x); } }); assertThat(readKeysByCache2AfterFailOver.size(), equalTo(readKeysByCache1AfterFailOver.size())); readKeysByCache2AfterFailOver.stream().forEach(y -> assertThat(readKeysByCache1AfterFailOver.contains(y), is(true))); } @Test(timeout=180000) public void testBulkOps() throws Exception { Set<Long> universalSet = ConcurrentHashMap.newKeySet(); List<Future<?>> futures = new ArrayList<>(); caches.forEach(cache -> { for (int i = 0; i < NUM_OF_THREADS; i++) { Map<Long, BlobValue> map = random.longs().limit(JOB_SIZE).collect(HashMap::new, (hashMap, x) -> hashMap.put(x, new BlobValue()), HashMap::putAll); futures.add(executorService.submit(() -> { cache.putAll(map); universalSet.addAll(map.keySet()); })); } }); //This step is to add values in local tier randomly to test invalidations happen correctly futures.add(executorService.submit(() -> { universalSet.forEach(x -> { CACHE1.get(x); CACHE2.get(x); }); })); CLUSTER.getClusterControl().terminateActive(); drainTasks(futures); Set<Long> readKeysByCache1AfterFailOver = new HashSet<>(); Set<Long> readKeysByCache2AfterFailOver = new HashSet<>(); universalSet.forEach(x -> { if (CACHE1.get(x) != null) { readKeysByCache1AfterFailOver.add(x); } if (CACHE2.get(x) != null) { readKeysByCache2AfterFailOver.add(x); } }); assertThat(readKeysByCache2AfterFailOver.size(), equalTo(readKeysByCache1AfterFailOver.size())); readKeysByCache2AfterFailOver.stream().forEach(y -> assertThat(readKeysByCache1AfterFailOver.contains(y), is(true))); } @Ignore("This is currently unstable as if the clear does not complete before the failover," + "there is no future operation that will trigger the code in ClusterTierActiveEntity.invokeServerStoreOperation" + "dealing with in-flight invalidation reconstructed from reconnect data") @Test(timeout=180000) public void testClear() throws Exception { List<Future<?>> futures = new ArrayList<>(); Set<Long> universalSet = ConcurrentHashMap.newKeySet(); caches.forEach(cache -> { for (int i = 0; i < NUM_OF_THREADS; i++) { Map<Long, BlobValue> map = random.longs().limit(JOB_SIZE).collect(HashMap::new, (hashMap, x) -> hashMap.put(x, new BlobValue()), HashMap::putAll); futures.add(executorService.submit(() -> { cache.putAll(map); universalSet.addAll(map.keySet()); })); } }); drainTasks(futures); universalSet.forEach(x -> { CACHE1.get(x); CACHE2.get(x); }); Future<?> clearFuture = executorService.submit(() -> CACHE1.clear()); CLUSTER.getClusterControl().terminateActive(); clearFuture.get(); universalSet.forEach(x -> assertThat(CACHE2.get(x), nullValue())); } private void drainTasks(List<Future<?>> futures) throws InterruptedException, java.util.concurrent.ExecutionException { for (int i = 0; i < futures.size(); i++) { try { futures.get(i).get(60, TimeUnit.SECONDS); } catch (TimeoutException e) { fail("Stuck on number " + i); } } } private static class BlobValue implements Serializable { private static final long serialVersionUID = 1L; private final byte[] data = new byte[10 * 1024]; } }
/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ package org.tensorflow.lite.support.audio; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import android.media.AudioFormat; import android.media.AudioRecord; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; import org.robolectric.RobolectricTestRunner; import org.tensorflow.lite.support.audio.TensorAudio.TensorAudioFormat; /** Test for {@link TensorAudio}. */ @RunWith(Suite.class) @SuiteClasses({ TensorAudioTest.General.class, }) public class TensorAudioTest { /** General tests of TensorAudio. */ @RunWith(RobolectricTestRunner.class) public static final class General extends TensorAudioTest { @Test public void createSucceedsWithTensorAudioFormat() throws Exception { TensorAudio tensor = TensorAudio.create( TensorAudioFormat.builder().setChannels(1).setSampleRate(2).build(), 100); assertThat(tensor.getFormat().getChannels()).isEqualTo(1); assertThat(tensor.getFormat().getSampleRate()).isEqualTo(2); assertThat(tensor.getTensorBuffer().getFlatSize()).isEqualTo(100); } @Test public void createSucceedsWithTensorAudioFormatWithMultipleChannels() throws Exception { TensorAudio tensor = TensorAudio.create( TensorAudioFormat.builder().setChannels(5).setSampleRate(2).build(), 100); assertThat(tensor.getFormat().getChannels()).isEqualTo(5); assertThat(tensor.getFormat().getSampleRate()).isEqualTo(2); assertThat(tensor.getTensorBuffer().getFlatSize()).isEqualTo(500); } @Test public void createSucceededsWithDefaultArguments() throws Exception { TensorAudio tensor = TensorAudio.create(TensorAudioFormat.builder().setSampleRate(20).build(), 1000); // Number of channels defaults to 1. assertThat(tensor.getFormat().getChannels()).isEqualTo(1); assertThat(tensor.getFormat().getSampleRate()).isEqualTo(20); assertThat(tensor.getTensorBuffer().getFlatSize()).isEqualTo(1000); } @Test public void createSucceedsWithAudioFormat() throws Exception { AudioFormat format = new AudioFormat.Builder() .setChannelMask(AudioFormat.CHANNEL_IN_STEREO) .setEncoding(AudioFormat.ENCODING_PCM_16BIT) .setSampleRate(16000) .build(); TensorAudio tensor = TensorAudio.create(format, 100); // STEREO has 2 channels assertThat(tensor.getFormat().getChannels()).isEqualTo(2); assertThat(tensor.getFormat().getSampleRate()).isEqualTo(16000); // flatSize = channelCount * sampleCount assertThat(tensor.getTensorBuffer().getFlatSize()).isEqualTo(200); } @Test public void createFailedWithInvalidSampleRate() throws Exception { IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, () -> TensorAudio.create(TensorAudioFormat.builder().setSampleRate(0).build(), 100)); // Sample rate 0 is not allowed assertThat(exception).hasMessageThat().ignoringCase().contains("sample rate"); } @Test public void createFailedWithInvalidChannels() throws Exception { IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, () -> TensorAudio.create( TensorAudioFormat.builder().setSampleRate(1).setChannels(-1).build(), 100)); // Negative channels is not allowed assertThat(exception).hasMessageThat().ignoringCase().contains("channels"); } @Test public void loadSucceedsFromArray() throws Exception { TensorAudioFormat format = TensorAudioFormat.builder().setChannels(2).setSampleRate(2).build(); TensorAudio tensor = TensorAudio.create(format, 2); assertThat(tensor.getTensorBuffer().getFloatArray()).isEqualTo(new float[4]); tensor.load(new float[] {2.f, 0}); assertThat(tensor.getTensorBuffer().getFloatArray()) .usingTolerance(0.001f) .containsExactly(new float[] {0, 0, 2.f, 0}); tensor.load(new float[] {2.f, 3.f}, 0, 2); assertThat(tensor.getTensorBuffer().getFloatArray()) .usingTolerance(0.001f) .containsExactly(new float[] {2.f, 0, 2.f, 3.f}); tensor.load(new short[] {Short.MAX_VALUE, Short.MIN_VALUE}); assertThat(tensor.getTensorBuffer().getFloatArray()) .usingTolerance(0.001f) .containsExactly(new float[] {2.f, 3.f, 1.f, -1.f}); tensor.load(new short[] {1, 2, 3, 0, 1, Short.MIN_VALUE, 3, 4, 5}, 3, 6); // The entire sequence becomes {2.f, 0, 2.f, 3.f, 1.f, -1.f, 0, 0, -1.f, 0, 0, 0} but the ring // buffer is only keep the last 4 results. assertThat(tensor.getTensorBuffer().getFloatArray()) .usingTolerance(0.001f) .containsExactly(new float[] {-1.f, 0, 0, 0}); } @Test public void loadFailsWithIndexOutOfRange() throws Exception { TensorAudioFormat format = TensorAudioFormat.builder().setSampleRate(2).build(); TensorAudio tensor = TensorAudio.create(format, 5); assertThrows(IllegalArgumentException.class, () -> tensor.load(new short[100], 99, 2)); assertThrows(IllegalArgumentException.class, () -> tensor.load(new float[100], 99, 2)); } @Test public void loadFailsWithIncompatibleInputSize() throws Exception { TensorAudioFormat format = TensorAudioFormat.builder().setChannels(3).setSampleRate(2).build(); TensorAudio tensor = TensorAudio.create(format, 5); assertThrows(IllegalArgumentException.class, () -> tensor.load(new float[1])); assertThrows(IllegalArgumentException.class, () -> tensor.load(new short[2])); assertThrows(IllegalArgumentException.class, () -> tensor.load(new float[2], 1, 1)); assertThrows(IllegalArgumentException.class, () -> tensor.load(new short[5], 2, 4)); } @Test public void loadAudioRecordSucceeds() throws Exception { TensorAudio tensor = TensorAudio.create(TensorAudioFormat.builder().setSampleRate(16000).build(), 4); tensor.load(new float[] {1, 2, 3, 4, 5}); assertThat(tensor.getTensorBuffer().getFloatArray()) .isEqualTo(new float[] {2.f, 3.f, 4.f, 5.f}); AudioRecord record = mock(AudioRecord.class); when(record.getBufferSizeInFrames()).thenReturn(5); when(record.getChannelCount()).thenReturn(1); when(record.getAudioFormat()).thenReturn(AudioFormat.ENCODING_PCM_FLOAT); when(record.getFormat()) .thenReturn( new AudioFormat.Builder() .setChannelMask(AudioFormat.CHANNEL_IN_MONO) .setEncoding(AudioFormat.ENCODING_PCM_FLOAT) .setSampleRate(16000) .build()); // Unused when(record.read(any(short[].class), anyInt(), anyInt(), eq(AudioRecord.READ_NON_BLOCKING))) .thenReturn(AudioRecord.ERROR_INVALID_OPERATION); // Used when(record.read(any(float[].class), anyInt(), anyInt(), eq(AudioRecord.READ_NON_BLOCKING))) .thenReturn(1); assertThat(tensor.load(record)).isEqualTo(1); assertThat(tensor.getTensorBuffer().getFloatArray()) .isEqualTo(new float[] {3.f, 4.f, 5.f, 0}); record = mock(AudioRecord.class); when(record.getBufferSizeInFrames()).thenReturn(5); when(record.getChannelCount()).thenReturn(1); when(record.getAudioFormat()).thenReturn(AudioFormat.ENCODING_PCM_16BIT); when(record.getFormat()) .thenReturn( new AudioFormat.Builder() .setChannelMask(AudioFormat.CHANNEL_IN_MONO) .setEncoding(AudioFormat.ENCODING_PCM_16BIT) .setSampleRate(16000) .build()); // Used when(record.read(any(short[].class), anyInt(), anyInt(), eq(AudioRecord.READ_NON_BLOCKING))) .thenReturn(2); // Unused when(record.read(any(float[].class), anyInt(), anyInt(), eq(AudioRecord.READ_NON_BLOCKING))) .thenReturn(AudioRecord.ERROR_INVALID_OPERATION); assertThat(tensor.load(record)).isEqualTo(2); assertThat(tensor.getTensorBuffer().getFloatArray()).isEqualTo(new float[] {5.f, 0, 0, 0}); } @Test public void loadAudioRecordFailsWithErrorState() throws Exception { TensorAudio tensor = TensorAudio.create(TensorAudioFormat.builder().setSampleRate(16000).build(), 4); tensor.load(new float[] {1, 2, 3, 4, 5}); assertThat(tensor.getTensorBuffer().getFloatArray()) .isEqualTo(new float[] {2.f, 3.f, 4.f, 5.f}); AudioRecord record = mock(AudioRecord.class); when(record.getAudioFormat()).thenReturn(AudioFormat.ENCODING_PCM_FLOAT); when(record.getFormat()) .thenReturn( new AudioFormat.Builder() .setChannelMask(AudioFormat.CHANNEL_IN_MONO) .setEncoding(AudioFormat.ENCODING_PCM_FLOAT) .setSampleRate(16000) .build()); // Unused when(record.read(any(short[].class), anyInt(), anyInt(), eq(AudioRecord.READ_NON_BLOCKING))) .thenReturn(AudioRecord.ERROR_INVALID_OPERATION); // Used when(record.read(any(float[].class), anyInt(), anyInt(), eq(AudioRecord.READ_NON_BLOCKING))) .thenReturn(AudioRecord.ERROR_DEAD_OBJECT); IllegalStateException exception = assertThrows(IllegalStateException.class, () -> tensor.load(record)); assertThat(exception).hasMessageThat().contains("ERROR_DEAD_OBJECT"); } @Test public void loadAudioRecordFailsWithUnsupportedAudioEncoding() throws Exception { TensorAudio tensor = TensorAudio.create(TensorAudioFormat.builder().setSampleRate(16000).build(), 4); AudioRecord record = mock(AudioRecord.class); when(record.getFormat()) .thenReturn( new AudioFormat.Builder() .setChannelMask(AudioFormat.CHANNEL_IN_MONO) .setEncoding(AudioFormat.ENCODING_PCM_8BIT) // Not supported .setSampleRate(16000) .build()); when(record.getAudioFormat()).thenReturn(AudioFormat.ENCODING_PCM_8BIT); IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> tensor.load(record)); assertThat(exception).hasMessageThat().ignoringCase().contains("unsupported encoding"); } @Test public void loadAudioRecordFailsWithIncompatibleAudioFormat() throws Exception { TensorAudio tensor = TensorAudio.create(TensorAudioFormat.builder().setSampleRate(16000).build(), 4); AudioRecord record = mock(AudioRecord.class); when(record.getFormat()) .thenReturn( new AudioFormat.Builder() .setChannelMask(AudioFormat.CHANNEL_IN_MONO) .setEncoding(AudioFormat.ENCODING_PCM_FLOAT) .setSampleRate(44100) // Mismatch .build()); IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> tensor.load(record)); assertThat(exception).hasMessageThat().ignoringCase().contains("Incompatible audio format"); } } }
package ws.wamp.jawampa.roles; import static org.mockito.Matchers.any; import static org.mockito.Matchers.argThat; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentMatcher; import org.mockito.InOrder; import org.mockito.Mock; import rx.Observer; import rx.subjects.PublishSubject; import ws.wamp.jawampa.ApplicationError; import ws.wamp.jawampa.PubSubData; import ws.wamp.jawampa.ids.PublicationId; import ws.wamp.jawampa.ids.RequestId; import ws.wamp.jawampa.ids.SubscriptionId; import ws.wamp.jawampa.io.BaseClient; import ws.wamp.jawampa.messages.ErrorMessage; import ws.wamp.jawampa.messages.EventMessage; import ws.wamp.jawampa.messages.SubscribeMessage; import ws.wamp.jawampa.messages.SubscribedMessage; import ws.wamp.jawampa.messages.UnsubscribeMessage; import ws.wamp.jawampa.messages.UnsubscribedMessage; import ws.wamp.jawampa.messages.WampMessage; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; public class SubscriberTest { private static final RequestId REQUEST_ID = RequestId.of( 42L ); private static final SubscriptionId SUBSCRIPTION_ID = SubscriptionId.of( 23L ); private static final PublicationId PUBLICATION_ID = PublicationId.of( 17L ); private static final RequestId UN_REQUEST_ID = RequestId.of( 57L ); @Mock private BaseClient baseClient; private String topic = "some_topic"; @Mock private ArrayNode arguments; @Mock private ObjectNode kwArguments; private Subscriber subject; private PublishSubject<PubSubData> resultSubject; @Mock private Observer<PubSubData> publicationObserver; private PublishSubject<Void> unsubscribeSubject; @Mock private Observer<Void> unsubscriptionObserver; @Before public void setup() { initMocks( this ); subject = new Subscriber( baseClient ); resultSubject = PublishSubject.create(); resultSubject.subscribe( publicationObserver ); unsubscribeSubject = PublishSubject.create(); unsubscribeSubject.subscribe( unsubscriptionObserver ); when( baseClient.getNewRequestId() ).thenReturn( REQUEST_ID ) .thenReturn( UN_REQUEST_ID ) .thenThrow( new IllegalStateException( "No more request ids for you!" ) ); } @Test public void testSubscribeSendsSubscribeMessage() { subject.subscribe( topic, resultSubject ); ArgumentMatcher<WampMessage> messageMatcher = new ArgumentMatcher<WampMessage>() { @Override public boolean matches( Object argument ) { SubscribeMessage message = (SubscribeMessage)argument; if ( !message.requestId.equals( REQUEST_ID ) ) return false; if ( message.topic != topic ) return false; return true; } }; verify( baseClient ).scheduleMessageToRouter( argThat( messageMatcher ) ); } @Test public void testEventIsDeliveredAfterSubscription() { subject.subscribe( topic, resultSubject ); subject.onSubscribed( new SubscribedMessage( REQUEST_ID, SUBSCRIPTION_ID ) ); subject.onEvent( new EventMessage( SUBSCRIPTION_ID, PUBLICATION_ID, null, arguments, kwArguments ) ); ArgumentMatcher<PubSubData> dataMatcher = new ArgumentMatcher<PubSubData>() { @Override public boolean matches( Object argument ) { PubSubData data = (PubSubData)argument; if ( data.arguments() != arguments ) return false; if ( data.keywordArguments() != kwArguments ) return false; return true; } }; verify( publicationObserver ).onNext( argThat( dataMatcher ) ); verify( publicationObserver, never()).onCompleted(); verify( publicationObserver, never()).onError( any( Throwable.class ) ); } @Test public void testSubscriptionErrorIsDeliveredToClient() { subject.subscribe( topic, resultSubject ); subject.onSubscribeError( new ErrorMessage( SubscribeMessage.ID, REQUEST_ID, null, ApplicationError.INVALID_ARGUMENT, null, null ) ); verify( publicationObserver, never() ).onNext( any( PubSubData.class ) ); verify( publicationObserver, never()).onCompleted(); verify( publicationObserver).onError( any( Throwable.class ) ); } @Test public void testUnsubscribeSendsUnsubscribeMessage() { subject.subscribe( topic, resultSubject ); subject.onSubscribed( new SubscribedMessage( REQUEST_ID, SUBSCRIPTION_ID ) ); subject.unsubscribe( topic, unsubscribeSubject ); ArgumentMatcher<WampMessage> messageMatcher = new ArgumentMatcher<WampMessage>() { @Override public boolean matches( Object argument ) { UnsubscribeMessage message = (UnsubscribeMessage)argument; if ( !message.requestId.equals( UN_REQUEST_ID ) ) return false; if ( !message.subscriptionId.equals( SUBSCRIPTION_ID ) ) return false; return true; } }; InOrder inOrder = inOrder( baseClient ); inOrder.verify( baseClient ).scheduleMessageToRouter( any( WampMessage.class ) ); inOrder.verify( baseClient ).scheduleMessageToRouter( argThat( messageMatcher ) ); } @Test public void testSuccessfulUnsubscribeIsDeliveredToClient() { subject.subscribe( topic, resultSubject ); subject.onSubscribed( new SubscribedMessage( REQUEST_ID, SUBSCRIPTION_ID ) ); subject.unsubscribe( topic, unsubscribeSubject ); subject.onUnsubscribed( new UnsubscribedMessage( UN_REQUEST_ID ) ); verify( unsubscriptionObserver, never() ).onNext( any( Void.class ) ); verify( unsubscriptionObserver ).onCompleted(); verify( unsubscriptionObserver, never() ).onError( any( Throwable.class ) ); } @Test public void testUnsubscribeErrorIsDeliveredToClient() { subject.subscribe( topic, resultSubject ); subject.onSubscribed( new SubscribedMessage( REQUEST_ID, SUBSCRIPTION_ID ) ); subject.unsubscribe( topic, unsubscribeSubject ); subject.onUnsubscribeError( new ErrorMessage( SubscribeMessage.ID, UN_REQUEST_ID, null, ApplicationError.INVALID_ARGUMENT, null, null ) ); verify( unsubscriptionObserver, never() ).onNext( any( Void.class ) ); verify( unsubscriptionObserver, never() ).onCompleted(); verify( unsubscriptionObserver ).onError( any( ApplicationError.class ) ); } @Test public void testEventAfterUnsubscribeIsAnError() { subject.subscribe( topic, resultSubject ); subject.onSubscribed( new SubscribedMessage( REQUEST_ID, SUBSCRIPTION_ID ) ); subject.unsubscribe( topic, unsubscribeSubject ); subject.onUnsubscribed( new UnsubscribedMessage( UN_REQUEST_ID ) ); subject.onEvent( new EventMessage( SUBSCRIPTION_ID, PUBLICATION_ID, null, arguments, kwArguments ) ); verify( baseClient ).onProtocolError(); } @Test public void testRegistrationSubjectIsCompletedWhenUnsubscribeIsSuccessful() { subject.subscribe( topic, resultSubject ); subject.onSubscribed( new SubscribedMessage( REQUEST_ID, SUBSCRIPTION_ID ) ); subject.unsubscribe( topic, unsubscribeSubject ); subject.onUnsubscribed( new UnsubscribedMessage( UN_REQUEST_ID ) ); verify( publicationObserver ).onCompleted(); verify( publicationObserver, never()).onError( any( Throwable.class ) ); } @Test public void testUnsubscribeBeforeSubscribedMessage() { subject.subscribe( topic, resultSubject ); subject.unsubscribe( topic, unsubscribeSubject ); subject.onSubscribed( new SubscribedMessage( REQUEST_ID, SUBSCRIPTION_ID ) ); ArgumentMatcher<WampMessage> messageMatcher = new ArgumentMatcher<WampMessage>() { @Override public boolean matches( Object argument ) { UnsubscribeMessage message = (UnsubscribeMessage)argument; if ( !message.requestId.equals( UN_REQUEST_ID ) ) return false; if ( !message.subscriptionId.equals( SUBSCRIPTION_ID ) ) return false; return true; } }; InOrder inOrder = inOrder( baseClient ); inOrder.verify( baseClient ).scheduleMessageToRouter( any( WampMessage.class ) ); inOrder.verify( baseClient ).scheduleMessageToRouter( argThat( messageMatcher ) ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.client; import java.nio.channels.ClosedChannelException; import java.util.ArrayList; import java.util.Collection; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.client.impl.connection.GridClientConnectionResetException; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.apache.ignite.testsuites.IgniteIgnore; /** * */ public class ClientReconnectionSelfTest extends GridCommonAbstractTest { /** */ public static final String HOST = "127.0.0.1"; /** */ private ClientTestRestServer[] srvs = new ClientTestRestServer[ClientTestRestServer.SERVERS_CNT]; /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { for (int i = 0; i < srvs.length; i++) { ClientTestRestServer srv = srvs[i]; if (srv != null) srv.stop(); srvs[i] = null; } super.afterTest(); } /** * @return Client for test. * @throws GridClientException In case of error. */ private GridClient client() throws GridClientException { return client(HOST); } /** * @param host - server host * @return Client for test. * @throws GridClientException In case of error. */ private GridClient client(String host) throws GridClientException { GridClientConfiguration cfg = new GridClientConfiguration(); cfg.setProtocol(GridClientProtocol.TCP); Collection<String> addrs = new ArrayList<>(); for (int port = ClientTestRestServer.FIRST_SERVER_PORT; port < ClientTestRestServer.FIRST_SERVER_PORT + ClientTestRestServer.SERVERS_CNT; port++) addrs.add(host + ":" + port); cfg.setServers(addrs); cfg.setTopologyRefreshFrequency(60 * 60 * 1000); return GridClientFactory.start(cfg); } /** * @throws Exception If failed. */ public void testNoFailedReconnection() throws Exception { for (int i = 0; i < ClientTestRestServer.SERVERS_CNT; i++) runServer(i, false); try (GridClient client = client()) { // Here client opens initial connection and fetches topology. // Only first server in list should be contacted. assertEquals(1, srvs[0].getConnectCount()); for (int i = 1; i < ClientTestRestServer.SERVERS_CNT; i++) assertEquals(0, srvs[i].getConnectCount()); srvs[0].resetCounters(); int contactedSrv = 0; for (int i = 0; i < 100; i++) { int failed = contactedSrv; srvs[failed].fail(); // Sometimes session close missing on client side. Retry few times until request succeeds. while (true) try { client.compute().refreshTopology(false, false); break; } catch (GridClientConnectionResetException e) { info("Exception caught: " + e); } // Check which servers where contacted, int connects = 0; for (int srv = 0; srv < ClientTestRestServer.SERVERS_CNT; srv++) { if (srvs[srv].getSuccessfulConnectCount() > 0) { assertTrue("Failed server was contacted: " + srv, srv != failed); contactedSrv = srv; } connects += srvs[srv].getSuccessfulConnectCount(); } assertEquals(1, connects); // Only one new connection should be opened. srvs[failed].repair(); srvs[contactedSrv].resetCounters(); // It should be the only server with non-0 counters. } } } /** * @throws Exception If failed. */ public void testCorrectInit() throws Exception { for (int i = 0; i < ClientTestRestServer.SERVERS_CNT; i++) runServer(i, i == 0); try (GridClient ignored = client()) { // Here client opens initial connection and fetches topology. // First and second should be contacted, due to failure in initial request to the first. for (int i = 0; i < 2; i++) assertEquals("Iteration: " + i, 1, srvs[i].getConnectCount()); for (int i = 2; i < ClientTestRestServer.SERVERS_CNT; i++) assertEquals(0, srvs[i].getConnectCount()); } } /** * @throws Exception If failed. */ public void testFailedInit() throws Exception { for (int i = 0; i < ClientTestRestServer.SERVERS_CNT; i++) runServer(i, true); GridClient c = client(); try { c.compute().execute("fake", "arg"); fail("Client operation should fail when server resets connections."); } catch (GridClientDisconnectedException e) { assertTrue("Thrown exception doesn't have an expected cause: " + X.getFullStackTrace(e), X.hasCause(e, GridClientConnectionResetException.class, ClosedChannelException.class)); } for (int i = 0; i < ClientTestRestServer.SERVERS_CNT; i++) // Connection manager does 3 attempts to get topology before failure. assertEquals("Server: " + i, 3, srvs[i].getConnectCount()); } /** * TODO: IGNITE-590. * * @throws Exception If failed. */ @IgniteIgnore(value = "https://issues.apache.org/jira/browse/IGNITE-590", forceFailure = true) public void testIdleConnection() throws Exception { int srvsCnt = 4; // TODO: IGNITE-590 it may be wrong value. Need to investigate after IGNITE-590 will be fixed. for (int i = 0; i < srvsCnt; i++) runServer(i, false); GridClient client = client(); // Here client opens initial connection and fetches topology. try { // Only first server in list should be contacted. assertEquals(1, srvs[0].getConnectCount()); Thread.sleep(35000); // Timeout as idle. assertEquals(1, srvs[0].getDisconnectCount()); for (int i = 1; i < srvsCnt; i++) assertEquals(0, srvs[i].getConnectCount()); srvs[0].resetCounters(); // On new request connection should be re-opened. client.compute().refreshTopology(false, false); assertEquals(1, srvs[0].getConnectCount()); for (int i = 1; i < srvsCnt; i++) assertEquals(0, srvs[i].getConnectCount()); } finally { GridClientFactory.stop(client.id()); } } /** * Runs a new server with given index. * * @param idx Server index, same as in client configuration's servers property. * @param failOnConnect If {@code true} the server should fail incoming connection immediately. * @return Server instance. * @throws IgniteCheckedException If failed. */ private ClientTestRestServer runServer(int idx, boolean failOnConnect) throws IgniteCheckedException { ClientTestRestServer srv = new ClientTestRestServer(ClientTestRestServer.FIRST_SERVER_PORT + idx, failOnConnect, log()); srv.start(); srvs[idx] = srv; return srv; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/redis/v1/cloud_redis.proto package com.google.cloud.redis.v1; /** * * * <pre> * Request for [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. * </pre> * * Protobuf type {@code google.cloud.redis.v1.ExportInstanceRequest} */ public final class ExportInstanceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.redis.v1.ExportInstanceRequest) ExportInstanceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ExportInstanceRequest.newBuilder() to construct. private ExportInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExportInstanceRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ExportInstanceRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ExportInstanceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } case 26: { com.google.cloud.redis.v1.OutputConfig.Builder subBuilder = null; if (outputConfig_ != null) { subBuilder = outputConfig_.toBuilder(); } outputConfig_ = input.readMessage( com.google.cloud.redis.v1.OutputConfig.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(outputConfig_); outputConfig_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_ExportInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_ExportInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.redis.v1.ExportInstanceRequest.class, com.google.cloud.redis.v1.ExportInstanceRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int OUTPUT_CONFIG_FIELD_NUMBER = 3; private com.google.cloud.redis.v1.OutputConfig outputConfig_; /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the outputConfig field is set. */ @java.lang.Override public boolean hasOutputConfig() { return outputConfig_ != null; } /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The outputConfig. */ @java.lang.Override public com.google.cloud.redis.v1.OutputConfig getOutputConfig() { return outputConfig_ == null ? com.google.cloud.redis.v1.OutputConfig.getDefaultInstance() : outputConfig_; } /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.redis.v1.OutputConfigOrBuilder getOutputConfigOrBuilder() { return getOutputConfig(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (outputConfig_ != null) { output.writeMessage(3, getOutputConfig()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (outputConfig_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getOutputConfig()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.redis.v1.ExportInstanceRequest)) { return super.equals(obj); } com.google.cloud.redis.v1.ExportInstanceRequest other = (com.google.cloud.redis.v1.ExportInstanceRequest) obj; if (!getName().equals(other.getName())) return false; if (hasOutputConfig() != other.hasOutputConfig()) return false; if (hasOutputConfig()) { if (!getOutputConfig().equals(other.getOutputConfig())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); if (hasOutputConfig()) { hash = (37 * hash) + OUTPUT_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getOutputConfig().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.redis.v1.ExportInstanceRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.redis.v1.ExportInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.redis.v1.ExportInstanceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. * </pre> * * Protobuf type {@code google.cloud.redis.v1.ExportInstanceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.redis.v1.ExportInstanceRequest) com.google.cloud.redis.v1.ExportInstanceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_ExportInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_ExportInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.redis.v1.ExportInstanceRequest.class, com.google.cloud.redis.v1.ExportInstanceRequest.Builder.class); } // Construct using com.google.cloud.redis.v1.ExportInstanceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; if (outputConfigBuilder_ == null) { outputConfig_ = null; } else { outputConfig_ = null; outputConfigBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_ExportInstanceRequest_descriptor; } @java.lang.Override public com.google.cloud.redis.v1.ExportInstanceRequest getDefaultInstanceForType() { return com.google.cloud.redis.v1.ExportInstanceRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.redis.v1.ExportInstanceRequest build() { com.google.cloud.redis.v1.ExportInstanceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.redis.v1.ExportInstanceRequest buildPartial() { com.google.cloud.redis.v1.ExportInstanceRequest result = new com.google.cloud.redis.v1.ExportInstanceRequest(this); result.name_ = name_; if (outputConfigBuilder_ == null) { result.outputConfig_ = outputConfig_; } else { result.outputConfig_ = outputConfigBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.redis.v1.ExportInstanceRequest) { return mergeFrom((com.google.cloud.redis.v1.ExportInstanceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.redis.v1.ExportInstanceRequest other) { if (other == com.google.cloud.redis.v1.ExportInstanceRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } if (other.hasOutputConfig()) { mergeOutputConfig(other.getOutputConfig()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.redis.v1.ExportInstanceRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.redis.v1.ExportInstanceRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * Required. Redis instance resource name using the form: * `projects/{project_id}/locations/{location_id}/instances/{instance_id}` * where `location_id` refers to a GCP region. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } private com.google.cloud.redis.v1.OutputConfig outputConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.redis.v1.OutputConfig, com.google.cloud.redis.v1.OutputConfig.Builder, com.google.cloud.redis.v1.OutputConfigOrBuilder> outputConfigBuilder_; /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the outputConfig field is set. */ public boolean hasOutputConfig() { return outputConfigBuilder_ != null || outputConfig_ != null; } /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The outputConfig. */ public com.google.cloud.redis.v1.OutputConfig getOutputConfig() { if (outputConfigBuilder_ == null) { return outputConfig_ == null ? com.google.cloud.redis.v1.OutputConfig.getDefaultInstance() : outputConfig_; } else { return outputConfigBuilder_.getMessage(); } } /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setOutputConfig(com.google.cloud.redis.v1.OutputConfig value) { if (outputConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } outputConfig_ = value; onChanged(); } else { outputConfigBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setOutputConfig(com.google.cloud.redis.v1.OutputConfig.Builder builderForValue) { if (outputConfigBuilder_ == null) { outputConfig_ = builderForValue.build(); onChanged(); } else { outputConfigBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeOutputConfig(com.google.cloud.redis.v1.OutputConfig value) { if (outputConfigBuilder_ == null) { if (outputConfig_ != null) { outputConfig_ = com.google.cloud.redis.v1.OutputConfig.newBuilder(outputConfig_) .mergeFrom(value) .buildPartial(); } else { outputConfig_ = value; } onChanged(); } else { outputConfigBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearOutputConfig() { if (outputConfigBuilder_ == null) { outputConfig_ = null; onChanged(); } else { outputConfig_ = null; outputConfigBuilder_ = null; } return this; } /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.redis.v1.OutputConfig.Builder getOutputConfigBuilder() { onChanged(); return getOutputConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.redis.v1.OutputConfigOrBuilder getOutputConfigOrBuilder() { if (outputConfigBuilder_ != null) { return outputConfigBuilder_.getMessageOrBuilder(); } else { return outputConfig_ == null ? com.google.cloud.redis.v1.OutputConfig.getDefaultInstance() : outputConfig_; } } /** * * * <pre> * Required. Specify data to be exported. * </pre> * * <code> * .google.cloud.redis.v1.OutputConfig output_config = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.redis.v1.OutputConfig, com.google.cloud.redis.v1.OutputConfig.Builder, com.google.cloud.redis.v1.OutputConfigOrBuilder> getOutputConfigFieldBuilder() { if (outputConfigBuilder_ == null) { outputConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.redis.v1.OutputConfig, com.google.cloud.redis.v1.OutputConfig.Builder, com.google.cloud.redis.v1.OutputConfigOrBuilder>( getOutputConfig(), getParentForChildren(), isClean()); outputConfig_ = null; } return outputConfigBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.redis.v1.ExportInstanceRequest) } // @@protoc_insertion_point(class_scope:google.cloud.redis.v1.ExportInstanceRequest) private static final com.google.cloud.redis.v1.ExportInstanceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.redis.v1.ExportInstanceRequest(); } public static com.google.cloud.redis.v1.ExportInstanceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExportInstanceRequest> PARSER = new com.google.protobuf.AbstractParser<ExportInstanceRequest>() { @java.lang.Override public ExportInstanceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ExportInstanceRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ExportInstanceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExportInstanceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.redis.v1.ExportInstanceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* Generated by camel build tools - do NOT edit this file! */ package org.apache.camel.component.debezium; import java.util.Map; import org.apache.camel.CamelContext; import org.apache.camel.spi.ConfigurerStrategy; import org.apache.camel.spi.GeneratedPropertyConfigurer; import org.apache.camel.spi.PropertyConfigurerGetter; import org.apache.camel.util.CaseInsensitiveMap; import org.apache.camel.support.component.PropertyConfigurerSupport; /** * Generated by camel build tools - do NOT edit this file! */ @SuppressWarnings("unchecked") public class DebeziumSqlserverComponentConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { private static final Map<String, Object> ALL_OPTIONS; static { Map<String, Object> map = new CaseInsensitiveMap(); map.put("additionalProperties", java.util.Map.class); map.put("bridgeErrorHandler", boolean.class); map.put("configuration", org.apache.camel.component.debezium.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration.class); map.put("internalKeyConverter", java.lang.String.class); map.put("internalValueConverter", java.lang.String.class); map.put("offsetCommitPolicy", java.lang.String.class); map.put("offsetCommitTimeoutMs", long.class); map.put("offsetFlushIntervalMs", long.class); map.put("offsetStorage", java.lang.String.class); map.put("offsetStorageFileName", java.lang.String.class); map.put("offsetStoragePartitions", int.class); map.put("offsetStorageReplicationFactor", int.class); map.put("offsetStorageTopic", java.lang.String.class); map.put("basicPropertyBinding", boolean.class); map.put("columnBlacklist", java.lang.String.class); map.put("columnExcludeList", java.lang.String.class); map.put("columnIncludeList", java.lang.String.class); map.put("columnPropagateSourceType", java.lang.String.class); map.put("columnWhitelist", java.lang.String.class); map.put("converters", java.lang.String.class); map.put("databaseDbname", java.lang.String.class); map.put("databaseHistory", java.lang.String.class); map.put("databaseHistoryFileFilename", java.lang.String.class); map.put("databaseHistoryKafkaBootstrapServers", java.lang.String.class); map.put("databaseHistoryKafkaRecoveryAttempts", int.class); map.put("databaseHistoryKafkaRecoveryPollIntervalMs", int.class); map.put("databaseHistoryKafkaTopic", java.lang.String.class); map.put("databaseHostname", java.lang.String.class); map.put("databaseInstance", java.lang.String.class); map.put("databasePassword", java.lang.String.class); map.put("databasePort", int.class); map.put("databaseServerName", java.lang.String.class); map.put("databaseServerTimezone", java.lang.String.class); map.put("databaseUser", java.lang.String.class); map.put("datatypePropagateSourceType", java.lang.String.class); map.put("decimalHandlingMode", java.lang.String.class); map.put("eventProcessingFailureHandlingMode", java.lang.String.class); map.put("heartbeatIntervalMs", int.class); map.put("heartbeatTopicsPrefix", java.lang.String.class); map.put("includeSchemaChanges", boolean.class); map.put("maxBatchSize", int.class); map.put("maxQueueSize", int.class); map.put("messageKeyColumns", java.lang.String.class); map.put("pollIntervalMs", long.class); map.put("provideTransactionMetadata", boolean.class); map.put("queryFetchSize", int.class); map.put("retriableRestartConnectorWaitMs", long.class); map.put("sanitizeFieldNames", boolean.class); map.put("skippedOperations", java.lang.String.class); map.put("snapshotDelayMs", long.class); map.put("snapshotFetchSize", int.class); map.put("snapshotIsolationMode", java.lang.String.class); map.put("snapshotLockTimeoutMs", long.class); map.put("snapshotMode", java.lang.String.class); map.put("snapshotSelectStatementOverrides", java.lang.String.class); map.put("sourceStructVersion", java.lang.String.class); map.put("sourceTimestampMode", java.lang.String.class); map.put("tableBlacklist", java.lang.String.class); map.put("tableExcludeList", java.lang.String.class); map.put("tableIgnoreBuiltin", boolean.class); map.put("tableIncludeList", java.lang.String.class); map.put("tableWhitelist", java.lang.String.class); map.put("timePrecisionMode", java.lang.String.class); map.put("tombstonesOnDelete", boolean.class); ALL_OPTIONS = map; ConfigurerStrategy.addConfigurerClearer(DebeziumSqlserverComponentConfigurer::clearConfigurers); } private org.apache.camel.component.debezium.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration getOrCreateConfiguration(DebeziumSqlserverComponent target) { if (target.getConfiguration() == null) { target.setConfiguration(new org.apache.camel.component.debezium.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration()); } return target.getConfiguration(); } @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { DebeziumSqlserverComponent target = (DebeziumSqlserverComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "additionalproperties": case "additionalProperties": getOrCreateConfiguration(target).setAdditionalProperties(property(camelContext, java.util.Map.class, value)); return true; case "basicpropertybinding": case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true; case "bridgeerrorhandler": case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true; case "columnblacklist": case "columnBlacklist": getOrCreateConfiguration(target).setColumnBlacklist(property(camelContext, java.lang.String.class, value)); return true; case "columnexcludelist": case "columnExcludeList": getOrCreateConfiguration(target).setColumnExcludeList(property(camelContext, java.lang.String.class, value)); return true; case "columnincludelist": case "columnIncludeList": getOrCreateConfiguration(target).setColumnIncludeList(property(camelContext, java.lang.String.class, value)); return true; case "columnpropagatesourcetype": case "columnPropagateSourceType": getOrCreateConfiguration(target).setColumnPropagateSourceType(property(camelContext, java.lang.String.class, value)); return true; case "columnwhitelist": case "columnWhitelist": getOrCreateConfiguration(target).setColumnWhitelist(property(camelContext, java.lang.String.class, value)); return true; case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.debezium.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration.class, value)); return true; case "converters": getOrCreateConfiguration(target).setConverters(property(camelContext, java.lang.String.class, value)); return true; case "databasedbname": case "databaseDbname": getOrCreateConfiguration(target).setDatabaseDbname(property(camelContext, java.lang.String.class, value)); return true; case "databasehistory": case "databaseHistory": getOrCreateConfiguration(target).setDatabaseHistory(property(camelContext, java.lang.String.class, value)); return true; case "databasehistoryfilefilename": case "databaseHistoryFileFilename": getOrCreateConfiguration(target).setDatabaseHistoryFileFilename(property(camelContext, java.lang.String.class, value)); return true; case "databasehistorykafkabootstrapservers": case "databaseHistoryKafkaBootstrapServers": getOrCreateConfiguration(target).setDatabaseHistoryKafkaBootstrapServers(property(camelContext, java.lang.String.class, value)); return true; case "databasehistorykafkarecoveryattempts": case "databaseHistoryKafkaRecoveryAttempts": getOrCreateConfiguration(target).setDatabaseHistoryKafkaRecoveryAttempts(property(camelContext, int.class, value)); return true; case "databasehistorykafkarecoverypollintervalms": case "databaseHistoryKafkaRecoveryPollIntervalMs": getOrCreateConfiguration(target).setDatabaseHistoryKafkaRecoveryPollIntervalMs(property(camelContext, int.class, value)); return true; case "databasehistorykafkatopic": case "databaseHistoryKafkaTopic": getOrCreateConfiguration(target).setDatabaseHistoryKafkaTopic(property(camelContext, java.lang.String.class, value)); return true; case "databasehostname": case "databaseHostname": getOrCreateConfiguration(target).setDatabaseHostname(property(camelContext, java.lang.String.class, value)); return true; case "databaseinstance": case "databaseInstance": getOrCreateConfiguration(target).setDatabaseInstance(property(camelContext, java.lang.String.class, value)); return true; case "databasepassword": case "databasePassword": getOrCreateConfiguration(target).setDatabasePassword(property(camelContext, java.lang.String.class, value)); return true; case "databaseport": case "databasePort": getOrCreateConfiguration(target).setDatabasePort(property(camelContext, int.class, value)); return true; case "databaseservername": case "databaseServerName": getOrCreateConfiguration(target).setDatabaseServerName(property(camelContext, java.lang.String.class, value)); return true; case "databaseservertimezone": case "databaseServerTimezone": getOrCreateConfiguration(target).setDatabaseServerTimezone(property(camelContext, java.lang.String.class, value)); return true; case "databaseuser": case "databaseUser": getOrCreateConfiguration(target).setDatabaseUser(property(camelContext, java.lang.String.class, value)); return true; case "datatypepropagatesourcetype": case "datatypePropagateSourceType": getOrCreateConfiguration(target).setDatatypePropagateSourceType(property(camelContext, java.lang.String.class, value)); return true; case "decimalhandlingmode": case "decimalHandlingMode": getOrCreateConfiguration(target).setDecimalHandlingMode(property(camelContext, java.lang.String.class, value)); return true; case "eventprocessingfailurehandlingmode": case "eventProcessingFailureHandlingMode": getOrCreateConfiguration(target).setEventProcessingFailureHandlingMode(property(camelContext, java.lang.String.class, value)); return true; case "heartbeatintervalms": case "heartbeatIntervalMs": getOrCreateConfiguration(target).setHeartbeatIntervalMs(property(camelContext, int.class, value)); return true; case "heartbeattopicsprefix": case "heartbeatTopicsPrefix": getOrCreateConfiguration(target).setHeartbeatTopicsPrefix(property(camelContext, java.lang.String.class, value)); return true; case "includeschemachanges": case "includeSchemaChanges": getOrCreateConfiguration(target).setIncludeSchemaChanges(property(camelContext, boolean.class, value)); return true; case "internalkeyconverter": case "internalKeyConverter": getOrCreateConfiguration(target).setInternalKeyConverter(property(camelContext, java.lang.String.class, value)); return true; case "internalvalueconverter": case "internalValueConverter": getOrCreateConfiguration(target).setInternalValueConverter(property(camelContext, java.lang.String.class, value)); return true; case "maxbatchsize": case "maxBatchSize": getOrCreateConfiguration(target).setMaxBatchSize(property(camelContext, int.class, value)); return true; case "maxqueuesize": case "maxQueueSize": getOrCreateConfiguration(target).setMaxQueueSize(property(camelContext, int.class, value)); return true; case "messagekeycolumns": case "messageKeyColumns": getOrCreateConfiguration(target).setMessageKeyColumns(property(camelContext, java.lang.String.class, value)); return true; case "offsetcommitpolicy": case "offsetCommitPolicy": getOrCreateConfiguration(target).setOffsetCommitPolicy(property(camelContext, java.lang.String.class, value)); return true; case "offsetcommittimeoutms": case "offsetCommitTimeoutMs": getOrCreateConfiguration(target).setOffsetCommitTimeoutMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "offsetflushintervalms": case "offsetFlushIntervalMs": getOrCreateConfiguration(target).setOffsetFlushIntervalMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "offsetstorage": case "offsetStorage": getOrCreateConfiguration(target).setOffsetStorage(property(camelContext, java.lang.String.class, value)); return true; case "offsetstoragefilename": case "offsetStorageFileName": getOrCreateConfiguration(target).setOffsetStorageFileName(property(camelContext, java.lang.String.class, value)); return true; case "offsetstoragepartitions": case "offsetStoragePartitions": getOrCreateConfiguration(target).setOffsetStoragePartitions(property(camelContext, int.class, value)); return true; case "offsetstoragereplicationfactor": case "offsetStorageReplicationFactor": getOrCreateConfiguration(target).setOffsetStorageReplicationFactor(property(camelContext, int.class, value)); return true; case "offsetstoragetopic": case "offsetStorageTopic": getOrCreateConfiguration(target).setOffsetStorageTopic(property(camelContext, java.lang.String.class, value)); return true; case "pollintervalms": case "pollIntervalMs": getOrCreateConfiguration(target).setPollIntervalMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "providetransactionmetadata": case "provideTransactionMetadata": getOrCreateConfiguration(target).setProvideTransactionMetadata(property(camelContext, boolean.class, value)); return true; case "queryfetchsize": case "queryFetchSize": getOrCreateConfiguration(target).setQueryFetchSize(property(camelContext, int.class, value)); return true; case "retriablerestartconnectorwaitms": case "retriableRestartConnectorWaitMs": getOrCreateConfiguration(target).setRetriableRestartConnectorWaitMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "sanitizefieldnames": case "sanitizeFieldNames": getOrCreateConfiguration(target).setSanitizeFieldNames(property(camelContext, boolean.class, value)); return true; case "skippedoperations": case "skippedOperations": getOrCreateConfiguration(target).setSkippedOperations(property(camelContext, java.lang.String.class, value)); return true; case "snapshotdelayms": case "snapshotDelayMs": getOrCreateConfiguration(target).setSnapshotDelayMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "snapshotfetchsize": case "snapshotFetchSize": getOrCreateConfiguration(target).setSnapshotFetchSize(property(camelContext, int.class, value)); return true; case "snapshotisolationmode": case "snapshotIsolationMode": getOrCreateConfiguration(target).setSnapshotIsolationMode(property(camelContext, java.lang.String.class, value)); return true; case "snapshotlocktimeoutms": case "snapshotLockTimeoutMs": getOrCreateConfiguration(target).setSnapshotLockTimeoutMs(property(camelContext, java.time.Duration.class, value).toMillis()); return true; case "snapshotmode": case "snapshotMode": getOrCreateConfiguration(target).setSnapshotMode(property(camelContext, java.lang.String.class, value)); return true; case "snapshotselectstatementoverrides": case "snapshotSelectStatementOverrides": getOrCreateConfiguration(target).setSnapshotSelectStatementOverrides(property(camelContext, java.lang.String.class, value)); return true; case "sourcestructversion": case "sourceStructVersion": getOrCreateConfiguration(target).setSourceStructVersion(property(camelContext, java.lang.String.class, value)); return true; case "sourcetimestampmode": case "sourceTimestampMode": getOrCreateConfiguration(target).setSourceTimestampMode(property(camelContext, java.lang.String.class, value)); return true; case "tableblacklist": case "tableBlacklist": getOrCreateConfiguration(target).setTableBlacklist(property(camelContext, java.lang.String.class, value)); return true; case "tableexcludelist": case "tableExcludeList": getOrCreateConfiguration(target).setTableExcludeList(property(camelContext, java.lang.String.class, value)); return true; case "tableignorebuiltin": case "tableIgnoreBuiltin": getOrCreateConfiguration(target).setTableIgnoreBuiltin(property(camelContext, boolean.class, value)); return true; case "tableincludelist": case "tableIncludeList": getOrCreateConfiguration(target).setTableIncludeList(property(camelContext, java.lang.String.class, value)); return true; case "tablewhitelist": case "tableWhitelist": getOrCreateConfiguration(target).setTableWhitelist(property(camelContext, java.lang.String.class, value)); return true; case "timeprecisionmode": case "timePrecisionMode": getOrCreateConfiguration(target).setTimePrecisionMode(property(camelContext, java.lang.String.class, value)); return true; case "tombstonesondelete": case "tombstonesOnDelete": getOrCreateConfiguration(target).setTombstonesOnDelete(property(camelContext, boolean.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { return ALL_OPTIONS; } public static void clearBootstrapConfigurers() { } public static void clearConfigurers() { ALL_OPTIONS.clear(); } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { DebeziumSqlserverComponent target = (DebeziumSqlserverComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "additionalproperties": case "additionalProperties": return getOrCreateConfiguration(target).getAdditionalProperties(); case "basicpropertybinding": case "basicPropertyBinding": return target.isBasicPropertyBinding(); case "bridgeerrorhandler": case "bridgeErrorHandler": return target.isBridgeErrorHandler(); case "columnblacklist": case "columnBlacklist": return getOrCreateConfiguration(target).getColumnBlacklist(); case "columnexcludelist": case "columnExcludeList": return getOrCreateConfiguration(target).getColumnExcludeList(); case "columnincludelist": case "columnIncludeList": return getOrCreateConfiguration(target).getColumnIncludeList(); case "columnpropagatesourcetype": case "columnPropagateSourceType": return getOrCreateConfiguration(target).getColumnPropagateSourceType(); case "columnwhitelist": case "columnWhitelist": return getOrCreateConfiguration(target).getColumnWhitelist(); case "configuration": return target.getConfiguration(); case "converters": return getOrCreateConfiguration(target).getConverters(); case "databasedbname": case "databaseDbname": return getOrCreateConfiguration(target).getDatabaseDbname(); case "databasehistory": case "databaseHistory": return getOrCreateConfiguration(target).getDatabaseHistory(); case "databasehistoryfilefilename": case "databaseHistoryFileFilename": return getOrCreateConfiguration(target).getDatabaseHistoryFileFilename(); case "databasehistorykafkabootstrapservers": case "databaseHistoryKafkaBootstrapServers": return getOrCreateConfiguration(target).getDatabaseHistoryKafkaBootstrapServers(); case "databasehistorykafkarecoveryattempts": case "databaseHistoryKafkaRecoveryAttempts": return getOrCreateConfiguration(target).getDatabaseHistoryKafkaRecoveryAttempts(); case "databasehistorykafkarecoverypollintervalms": case "databaseHistoryKafkaRecoveryPollIntervalMs": return getOrCreateConfiguration(target).getDatabaseHistoryKafkaRecoveryPollIntervalMs(); case "databasehistorykafkatopic": case "databaseHistoryKafkaTopic": return getOrCreateConfiguration(target).getDatabaseHistoryKafkaTopic(); case "databasehostname": case "databaseHostname": return getOrCreateConfiguration(target).getDatabaseHostname(); case "databaseinstance": case "databaseInstance": return getOrCreateConfiguration(target).getDatabaseInstance(); case "databasepassword": case "databasePassword": return getOrCreateConfiguration(target).getDatabasePassword(); case "databaseport": case "databasePort": return getOrCreateConfiguration(target).getDatabasePort(); case "databaseservername": case "databaseServerName": return getOrCreateConfiguration(target).getDatabaseServerName(); case "databaseservertimezone": case "databaseServerTimezone": return getOrCreateConfiguration(target).getDatabaseServerTimezone(); case "databaseuser": case "databaseUser": return getOrCreateConfiguration(target).getDatabaseUser(); case "datatypepropagatesourcetype": case "datatypePropagateSourceType": return getOrCreateConfiguration(target).getDatatypePropagateSourceType(); case "decimalhandlingmode": case "decimalHandlingMode": return getOrCreateConfiguration(target).getDecimalHandlingMode(); case "eventprocessingfailurehandlingmode": case "eventProcessingFailureHandlingMode": return getOrCreateConfiguration(target).getEventProcessingFailureHandlingMode(); case "heartbeatintervalms": case "heartbeatIntervalMs": return getOrCreateConfiguration(target).getHeartbeatIntervalMs(); case "heartbeattopicsprefix": case "heartbeatTopicsPrefix": return getOrCreateConfiguration(target).getHeartbeatTopicsPrefix(); case "includeschemachanges": case "includeSchemaChanges": return getOrCreateConfiguration(target).isIncludeSchemaChanges(); case "internalkeyconverter": case "internalKeyConverter": return getOrCreateConfiguration(target).getInternalKeyConverter(); case "internalvalueconverter": case "internalValueConverter": return getOrCreateConfiguration(target).getInternalValueConverter(); case "maxbatchsize": case "maxBatchSize": return getOrCreateConfiguration(target).getMaxBatchSize(); case "maxqueuesize": case "maxQueueSize": return getOrCreateConfiguration(target).getMaxQueueSize(); case "messagekeycolumns": case "messageKeyColumns": return getOrCreateConfiguration(target).getMessageKeyColumns(); case "offsetcommitpolicy": case "offsetCommitPolicy": return getOrCreateConfiguration(target).getOffsetCommitPolicy(); case "offsetcommittimeoutms": case "offsetCommitTimeoutMs": return getOrCreateConfiguration(target).getOffsetCommitTimeoutMs(); case "offsetflushintervalms": case "offsetFlushIntervalMs": return getOrCreateConfiguration(target).getOffsetFlushIntervalMs(); case "offsetstorage": case "offsetStorage": return getOrCreateConfiguration(target).getOffsetStorage(); case "offsetstoragefilename": case "offsetStorageFileName": return getOrCreateConfiguration(target).getOffsetStorageFileName(); case "offsetstoragepartitions": case "offsetStoragePartitions": return getOrCreateConfiguration(target).getOffsetStoragePartitions(); case "offsetstoragereplicationfactor": case "offsetStorageReplicationFactor": return getOrCreateConfiguration(target).getOffsetStorageReplicationFactor(); case "offsetstoragetopic": case "offsetStorageTopic": return getOrCreateConfiguration(target).getOffsetStorageTopic(); case "pollintervalms": case "pollIntervalMs": return getOrCreateConfiguration(target).getPollIntervalMs(); case "providetransactionmetadata": case "provideTransactionMetadata": return getOrCreateConfiguration(target).isProvideTransactionMetadata(); case "queryfetchsize": case "queryFetchSize": return getOrCreateConfiguration(target).getQueryFetchSize(); case "retriablerestartconnectorwaitms": case "retriableRestartConnectorWaitMs": return getOrCreateConfiguration(target).getRetriableRestartConnectorWaitMs(); case "sanitizefieldnames": case "sanitizeFieldNames": return getOrCreateConfiguration(target).isSanitizeFieldNames(); case "skippedoperations": case "skippedOperations": return getOrCreateConfiguration(target).getSkippedOperations(); case "snapshotdelayms": case "snapshotDelayMs": return getOrCreateConfiguration(target).getSnapshotDelayMs(); case "snapshotfetchsize": case "snapshotFetchSize": return getOrCreateConfiguration(target).getSnapshotFetchSize(); case "snapshotisolationmode": case "snapshotIsolationMode": return getOrCreateConfiguration(target).getSnapshotIsolationMode(); case "snapshotlocktimeoutms": case "snapshotLockTimeoutMs": return getOrCreateConfiguration(target).getSnapshotLockTimeoutMs(); case "snapshotmode": case "snapshotMode": return getOrCreateConfiguration(target).getSnapshotMode(); case "snapshotselectstatementoverrides": case "snapshotSelectStatementOverrides": return getOrCreateConfiguration(target).getSnapshotSelectStatementOverrides(); case "sourcestructversion": case "sourceStructVersion": return getOrCreateConfiguration(target).getSourceStructVersion(); case "sourcetimestampmode": case "sourceTimestampMode": return getOrCreateConfiguration(target).getSourceTimestampMode(); case "tableblacklist": case "tableBlacklist": return getOrCreateConfiguration(target).getTableBlacklist(); case "tableexcludelist": case "tableExcludeList": return getOrCreateConfiguration(target).getTableExcludeList(); case "tableignorebuiltin": case "tableIgnoreBuiltin": return getOrCreateConfiguration(target).isTableIgnoreBuiltin(); case "tableincludelist": case "tableIncludeList": return getOrCreateConfiguration(target).getTableIncludeList(); case "tablewhitelist": case "tableWhitelist": return getOrCreateConfiguration(target).getTableWhitelist(); case "timeprecisionmode": case "timePrecisionMode": return getOrCreateConfiguration(target).getTimePrecisionMode(); case "tombstonesondelete": case "tombstonesOnDelete": return getOrCreateConfiguration(target).isTombstonesOnDelete(); default: return null; } } }
package org.axway.grapes.server.webapp; import org.axway.grapes.commons.datamodel.*; import org.axway.grapes.server.db.DataUtils; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; /** * Data Validator * * <p>Validates the posted mime to make sure that no corrupted data is sent to Grapes.</p> * * @author jdcoffre */ public final class DataValidator { private DataValidator(){ // Hide utility class constructor } /** * Checks if the provided artifact is valid and could be stored into the database * * @param artifact the artifact to test * @throws WebApplicationException if the data is corrupted */ public static void validate(final Artifact artifact) { if((artifact.getOrigin()== null || "maven".equals(artifact.getOrigin())) && (artifact.getGroupId() == null || artifact.getGroupId().isEmpty())){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact groupId should not be null or empty") .build()); } if(artifact.getArtifactId() == null || artifact.getArtifactId().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact artifactId should not be null or empty") .build()); } if(artifact.getVersion() == null || artifact.getVersion().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact version should not be null or empty") .build()); } } /** * Checks if the provided artifact is valid and could be stored into the database * * @param artifact the artifact to test * @throws WebApplicationException if the data is corrupted */ public static void validatePostArtifact(final Artifact artifact) { validate(artifact); if(artifact.getExtension() == null || artifact.getExtension().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact extension should not be null or empty") .build()); } if(artifact.getSha256() == null || artifact.getSha256().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact SHA256 checksum should not be null or empty") .build()); } if(artifact.getSha256().length() != 64){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact SHA256 checksum length should be 64") .build()); } } /** * Checks if the provided license is valid and could be stored into the database * * @param license the license to test * @throws WebApplicationException if the data is corrupted */ public static void validate(final License license) { // A license should have a name if(license.getName() == null || license.getName().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("License name should not be empty!") .build()); } // A license should have a long name if(license.getLongName() == null || license.getLongName().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("License long name should not be empty!") .build()); } // If there is a regexp, it should compile if(license.getRegexp() != null && !license.getRegexp().isEmpty()){ try{ Pattern.compile(license.getRegexp()); } catch (PatternSyntaxException e){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("License regexp does not compile!").build()); } Pattern regex = Pattern.compile("[&%//]"); if(regex.matcher(license.getRegexp()).find()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("License regexp does not compile!").build()); } } } /** * Checks if the provided module is valid and could be stored into the database * * @param module the module to test * @throws WebApplicationException if the data is corrupted */ public static void validate(final Module module) { if (null == module) { throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Module cannot be null!") .build()); } if(module.getName() == null || module.getName().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Module name cannot be null or empty!") .build()); } if(module.getVersion()== null || module.getVersion().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Module version cannot be null or empty!") .build()); } // Check artifacts for(final Artifact artifact: DataUtils.getAllArtifacts(module)){ validate(artifact); } // Check dependencies for(final Dependency dependency: DataUtils.getAllDependencies(module)){ validate(dependency.getTarget()); } } /** * Checks if the provided organization is valid and could be stored into the database * * @param organization Organization * @throws WebApplicationException if the data is corrupted */ public static void validate(final Organization organization) { if(organization.getName() == null || organization.getName().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Organization name cannot be null or empty!") .build()); } } /** * Checks if the provided artifactQuery is valid * * @param artifactQuery ArtifactQuery * @throws WebApplicationException if the data is corrupted */ public static void validate(final ArtifactQuery artifactQuery) { final Pattern invalidChars = Pattern.compile("[^A-Fa-f0-9]"); if(artifactQuery.getUser() == null || artifactQuery.getUser().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Mandatory field [user] missing") .build()); } if( artifactQuery.getStage() != 0 && artifactQuery.getStage() !=1 ){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Invalid [stage] value (supported 0 | 1)") .build()); } if(artifactQuery.getName() == null || artifactQuery.getName().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Mandatory field [name] missing, it should be the file name") .build()); } if(artifactQuery.getSha256() == null || artifactQuery.getSha256().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Mandatory field [sha256] missing") .build()); } if(artifactQuery.getSha256().length() < 64 || invalidChars.matcher(artifactQuery.getSha256()).find()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Invalid file checksum value") .build()); } if(artifactQuery.getType() == null || artifactQuery.getType().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Mandatory field [type] missing") .build()); } } }
import com.google.gson.*; import java.awt.*; import java.io.*; import java.net.*; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Scanner; /** * Created by qjex on 10/6/15. */ public class Main { public static Path path; public static String getToken = "https://oauth.vk.com/authorize?client_id=5096323&scope=audio&display=page&v=5.37&response_type=token"; public static String getApiCode = "https://oauth.vk.com/authorize?client_id=5096323&scope=audio&display=page&v=5.37&response_type=code&redirect_uri=http://127.0.0.1:7856/"; public static String getApiToken = "https://oauth.vk.com/access_token?client_id=5096323&client_secret=I8ErT0wpoMX9RHxecJ3I&redirect_uri=http://127.0.0.1:7856/&code="; public static boolean mobileNames = false; public static int albumId = -1; public static int ownerId = -1; public static void main(String[] args) { String key = null; String dir = "."; for(int i = 0; i < args.length; i++){ if (args[i].equals("-d")){ dir = args[i + 1]; i++; } if (args[i].equals("-k")){ key = args[i + 1]; i++; } if (args[i].equals("-m")){ mobileNames = true; } if (args[i].equals("-h")){ System.out.println("-d <dir> \n-k <api key> \n-m Enable mobile filenames\n-a <album_id>\n-o <owner_id>\n"); System.exit(0); } if (args[i].equals("-a")){ albumId = Integer.parseInt(args[i + 1]); i++; } if (args[i].equals("-o")){ ownerId = Integer.parseInt(args[i + 1]); i++; } } path = Paths.get(dir); if (key == null){ key = getAccessKey(); } String response = getResponse(key); ArrayList<Song> songList = getList(response); System.out.println("Got " + songList.size() + " songs"); for(Song song : songList){ downloadSong(song); } } public static void downloadSong(Song song){ try { String filename = song.artist + " - " + song.title + ".mp3"; if (mobileNames) filename = song.owner_id + "_" + song.aid; File file = new File(path.toFile(), filename); System.out.println("Downloading " + "\"" + song.artist + " - " + song.title+ "\" " + "id: " + song.aid); URL url = new URL(song.url.replaceAll("\\\\", "")); HttpURLConnection con = (HttpURLConnection) url.openConnection(); con.setRequestMethod("GET"); float fileSize = Float.parseFloat(con.getHeaderField("Content-Length")); if (file.exists() && file.length() == fileSize) { System.out.println("Already downloaded"); return; } BufferedInputStream in = new BufferedInputStream(con.getInputStream()); FileOutputStream fout = new FileOutputStream(file); int BUFF_SZ = 1024; byte buff[] = new byte[BUFF_SZ]; int data = 0; long totalDownloaded = 0; long downloadStartTime = System.currentTimeMillis(); while ((data = in.read(buff, 0, BUFF_SZ)) != -1){ totalDownloaded += data; long cur = System.currentTimeMillis() - downloadStartTime; float speed = 1000f * totalDownloaded / cur; printProgress(Math.round(100f * totalDownloaded / fileSize), speed); fout.write(buff, 0, data); } System.out.print('\n'); if (file.length() != fileSize){ System.out.println("\rError downloading " + song.aid); return; } } catch (MalformedURLException e) { System.err.println("Error downloading " + song.aid); } catch (ProtocolException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } public static void printProgress(int pos, float speed){ int size = 20; int current = pos * size / 100; StringBuilder bar = new StringBuilder(); bar.append("["); for(int i = 0; i < current; i++) bar.append("#"); for(int i = current; i < size; i++) bar.append(" "); bar.append("]"); System.out.print("\r" + bar + " " + getSpeed(speed)); } public static String getSpeed(float speed) { String ut = "B/s"; if (speed > (1024 * 1024)){ speed /= (1024 * 1024); ut = "MB/s"; } if (speed > 1024){ speed /= 1024; ut = "KB/s"; } String res = String.format("%.2f", speed); return (res + " " + ut); } private static ArrayList<Song> getList(String response) { ArrayList<Song> list = new ArrayList<>(); System.out.println("Parsing song list"); try { JsonElement jse = new JsonParser().parse(response); JsonObject resp = jse.getAsJsonObject(); if (resp.has("error")){ System.err.println("Wrong permissions"); System.exit(1); } if (!resp.has("response")) throw new JsonParseException("No song data in response"); JsonArray items = resp.getAsJsonArray("response"); for(JsonElement item: items){ if (item.isJsonObject()){ Gson gson = new GsonBuilder().create(); Song song = gson.fromJson(item, Song.class); list.add(song); } } } catch (JsonParseException e){ System.err.println("Error parsing song list"); e.printStackTrace(); System.exit(1); } return list; } private static String getAppCode() throws IOException { URL urlCode = null; try { urlCode = new URL(getApiCode); } catch (MalformedURLException e) { e.printStackTrace(); System.exit(1); } LightHTTP httpServer = new LightHTTP(7856); Thread t = new Thread(httpServer); t.start(); openWebpage(urlCode); try { t.join(); } catch (InterruptedException e) { e.printStackTrace(); } httpServer.destroy(); return httpServer.getCode(); } private static String getAccessKey() { String accessKey = null; URL url = null; try { url = new URL(getToken); } catch (MalformedURLException e) { e.printStackTrace(); System.exit(1); } try { String code = getAppCode(); URL urlToken = new URL(getApiToken + code); HttpURLConnection connection = (HttpURLConnection)urlToken.openConnection(); BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream())); StringBuilder response = new StringBuilder(); String line; while ((line = in.readLine()) != null){ response.append(line); } in.close(); JsonElement jse = new JsonParser().parse(response.toString()); JsonObject resp = jse.getAsJsonObject(); if (resp.has("error")){ System.err.println("Wrong permissions"); System.exit(1); } if (!resp.has("access_token")) throw new JsonParseException("No access token data in response"); return resp.getAsJsonPrimitive("access_token").getAsString(); } catch (IOException e) { openWebpage(url); e.printStackTrace(); Scanner sc = new Scanner(System.in); System.out.println("Enter api key"); String key = sc.next(); return key; } catch (JsonParseException e){ System.err.println("Error parsing access token"); e.printStackTrace(); System.exit(1); } return accessKey; } private static String getResponse(String key) { String request = "https://api.vk.com/method/audio.get?access_token=" + key; if (ownerId != -1) request+= "&owner_id=" + ownerId; if (ownerId != -1) request+= "&album_id=" + albumId; StringBuilder response = new StringBuilder(); try { URL url = new URL(request); HttpURLConnection con = (HttpURLConnection) url.openConnection(); con.setRequestMethod("GET"); if (con.getResponseCode() != 200){ System.err.println("Bad api key"); System.exit(0); } BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream())); String line; while ((line = in.readLine()) != null){ response.append(line); } in.close(); } catch (MalformedURLException e) { e.printStackTrace(); System.exit(0); } catch (IOException e) { System.err.println("Error downloading music list"); e.printStackTrace(); System.exit(1); } return response.toString(); } public static void openWebpage(URL url) { URI uri; try { uri = url.toURI(); } catch (URISyntaxException e) { e.printStackTrace(); return; } Desktop desktop = Desktop.isDesktopSupported() ? Desktop.getDesktop() : null; if (desktop != null && desktop.isSupported(Desktop.Action.BROWSE)) { try { desktop.browse(uri); } catch (Exception e) { e.printStackTrace(); } } else { System.out.println("Get api key: " + uri.toString()); } } }
package fr.baloomba.feeligo.model; import android.os.Parcel; import android.os.Parcelable; import fr.baloomba.feeligo.helper.JSONHelper; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.regex.Matcher; import java.util.regex.Pattern; public class Sticker implements Parcelable { // <editor-fold desc="VARIABLES"> private final static String TAG = Sticker.class.getSimpleName(); private Long mId; private String mSendableCode; private ArrayList<String> mTags; private String mImageURL; private StickerImage mImage; // </editor-fold> // <editor-fold desc="CONSTRUCTORS"> public Sticker(Init<?> builder) { mId = builder.mId; mSendableCode = builder.mSendableCode; mTags = builder.mTags; mImageURL = builder.mImageURL; mImage = builder.mImage; } public Sticker(Parcel in) { mId = in.readLong(); mSendableCode = in.readString(); mTags = null; int size = in.readInt(); if (size != 0) { mTags = new ArrayList<String>(); for (int i = 0; i < size; i++) { mTags.add(in.readString()); } } mImageURL = in.readString(); mImage = in.readParcelable(StickerImage.class.getClassLoader()); } // </editor-fold> // <editor-fold desc="SETTERS"> public void setId(Long id) { mId = id; } public void setSendableCode(String code) { mSendableCode = code; } public void setTags(ArrayList<String> tags) { mTags = tags; } public void setImageURL(String url) { mImageURL = url; } public void setImages(StickerImage image) { mImage = image; } // </editor-fold> // <editor-fold desc="GETTERS"> public Long getId() { return mId; } public String getSendableCode() { return mSendableCode; } public ArrayList<String> getTags() { return mTags; } public String getImageURL() { return mImageURL; } public StickerImage getImage() { return mImage; } public String getMessage() { return mImage.getMessage(); } // </editor-fold> // <editor-fold desc="PARCELABLE METHODS IMPLEMENTATION"> @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeLong(mId); dest.writeString(mSendableCode); dest.writeInt(mTags != null ? mTags.size() : 0); if (mTags != null) { for (String tag : mTags) { dest.writeString(tag); } } dest.writeString(mImageURL); dest.writeParcelable(mImage, flags); } public static final Creator<Sticker> CREATOR = new Creator<Sticker>() { public Sticker createFromParcel(Parcel in) { return new Sticker(in); } public Sticker[] newArray(int size) { return new Sticker[size]; } }; // </editor-fold> // <editor-fold desc="FACTORY"> public static class Factory { // <editor-fold desc="VARIABLES"> private static Factory sInstance = new Factory(); // </editor-fold> // <editor-fold desc="INSTANCE"> public static Factory getInstance() { return sInstance; } // </editor-fold> // <editor-fold desc="FACTORY METHODS"> public Sticker stickerFromJSON(JSONObject object) throws JSONException { if (object == null) return null; ArrayList<String> tags = null; if (object.has("tags")) { tags = new ArrayList<String>(); JSONArray array = object.getJSONArray("tags"); for (int i = 0; i < array.length(); i++) { tags.add(array.getString(i)); } } return new Builder() .setId(object.getLong("id")) .setSendableCode(object.getString("image_url")) .setTags(tags) .setImageURL(object.getString("image_url")) .setImages(StickerImage.Factory.getInstance() .stickerImageFromJSON(JSONHelper.getJSONObject(object, "image"))) .build(); } public Sticker sticker(String text) { Sticker sticker = null; Pattern pattern = Pattern.compile("\\[s:([a-zA-Z0-9\\/\\.\\?\\=]+[a-zA-Z0-9]*)\\]"); Matcher matcher = pattern.matcher(text); if (matcher.find()) { String code = matcher.group(1).split("/")[1]; sticker = new Builder() .setSendableCode(code) .setImages(StickerImage.Factory.getInstance().stickerImage(code)) .build(); } return sticker; } // </editor-fold> } // </editor-fold> // <editor-fold desc="INIT BUILDER CLASS"> protected static abstract class Init<T extends Init<T>> { // <editor-fold desc="VARIABLES"> private Long mId; private String mSendableCode; private ArrayList<String> mTags; private String mImageURL; private StickerImage mImage; // </editor-fold> // <editor-fold desc="CONSTRUCTORS"> public Init() { mId = -1L; mSendableCode = ""; mTags = null; mImageURL = ""; mImage = null; } // </editor-fold> // <editor-fold desc="SETTERS"> public T setId(Long id) { mId = id; return self(); } public T setSendableCode(String code) { mSendableCode = "[s:p/" + code.split("/")[1] + "]"; return self(); } public T setTags(ArrayList<String> tags) { mTags = tags; return self(); } public T setImageURL(String url) { mImageURL = url; setSendableCode(url.split("http://stkr.es/")[1]); return self(); } public T setImages(StickerImage image) { mImage = image; return self(); } // </editor-fold> // <editor-fold desc="METHODS"> protected abstract T self(); public Sticker build() { return new Sticker(this); } // </editor-fold> } // </editor-fold> // <editor-fold desc="BUILDER"> public static class Builder extends Init<Builder> { // <editor-fold desc="OVERRIDDEN INIT<BUILDER> METHODS"> @Override protected Builder self() { return this; } // </editor-fold> } // </editor-fold> }
package by.vshkl.translate.activities; import android.content.pm.PackageManager; import android.os.Build; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.design.widget.Snackbar; import android.support.v7.app.AppCompatActivity; import android.view.View; import android.webkit.CookieManager; import android.webkit.GeolocationPermissions; import android.webkit.WebChromeClient; import android.webkit.WebResourceRequest; import android.webkit.WebResourceResponse; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.FrameLayout; import android.widget.ImageButton; import android.widget.TextView; import com.google.firebase.analytics.FirebaseAnalytics; import com.mikepenz.materialdrawer.AccountHeaderBuilder; import com.mikepenz.materialdrawer.Drawer; import com.mikepenz.materialdrawer.DrawerBuilder; import com.mikepenz.materialdrawer.model.PrimaryDrawerItem; import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem; import com.wang.avi.AVLoadingIndicatorView; import java.util.List; import by.vshkl.translate.R; import by.vshkl.translate.listeners.DeleteConfirmationListener; import by.vshkl.translate.listeners.StopEditListener; import by.vshkl.translate.listeners.StopsDialogListener; import by.vshkl.translate.model.Stop; import by.vshkl.translate.receivers.NetworkAndLocationStateReceiver; import by.vshkl.translate.utilities.BroadcastReceiverHelper; import by.vshkl.translate.utilities.CookieHelper; import by.vshkl.translate.utilities.DbHelper; import by.vshkl.translate.utilities.DialogHelper; import by.vshkl.translate.utilities.LocationHelper; import by.vshkl.translate.utilities.NetworkHelper; import by.vshkl.translate.utilities.PermissionsHelper; import by.vshkl.translate.utilities.UrlHelper; import io.reactivex.android.schedulers.AndroidSchedulers; import io.reactivex.functions.Consumer; import io.reactivex.schedulers.Schedulers; public class MapActivity extends AppCompatActivity implements NetworkAndLocationStateReceiver.NetworkAndLocationStateReceiverCallback, StopEditListener, StopsDialogListener, DeleteConfirmationListener { private static final int REQUEST_CODE = 42; private static final String URL_BASE = "http://www.minsktrans.by"; private static final String URL_MAP = "http://www.minsktrans.by/lookout_yard/Home/Index/minsk?neareststops"; private static final String URL_STOP = "http://www.minsktrans.by/lookout_yard/Home/Index/minsk?neareststops&s="; private FrameLayout rootView; private WebView wvMap; private ImageButton btnLocation; private ImageButton btnFavourite; private AVLoadingIndicatorView pbLoading; private Drawer drawer; private List<Stop> stops; private boolean hasSavedState = false; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_map); rootView = (FrameLayout) findViewById(R.id.root_view_map); wvMap = (WebView) findViewById(R.id.wv_map); btnLocation = (ImageButton) findViewById(R.id.btn_location); btnFavourite = (ImageButton) findViewById(R.id.btn_favourite); pbLoading = (AVLoadingIndicatorView) findViewById(R.id.pb_loading); hasSavedState = savedInstanceState != null; initializeDrawer(); checkNetworkAndLocation(); enableBroadcastReceiver(); btnLocation.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { checkNetworkAndLocation(); } }); btnFavourite.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { String url = wvMap.getUrl(); if (isStopInList(url)) { DialogHelper.showDeleteConfirmationDialog(MapActivity.this, MapActivity.this, findStop(url)); } else { DialogHelper.showEditStopDialog(MapActivity.this, wvMap.getUrl(), "", "", MapActivity.this); } } }); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); wvMap.restoreState(savedInstanceState); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); wvMap.saveState(outState); } @Override protected void onDestroy() { disableBroadcastReceiver(); super.onDestroy(); } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { switch (requestCode) { case REQUEST_CODE: if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED && grantResults[1] == PackageManager.PERMISSION_GRANTED) { initializeWebView(); } break; default: super.onRequestPermissionsResult(requestCode, permissions, grantResults); } } @Override public void onBackPressed() { if (wvMap.canGoBack()) { wvMap.goBackOrForward(-1); wvMap.clearHistory(); } else { super.onBackPressed(); } } @Override public void onStateChangeReceived() { checkNetworkAndLocation(); } //------------------------------------------------------------------------------------------------------------------ @Override public void onStopEdited(String stopUrl, String stopName, String stopDirection) { updateStopFavouriteIcon(wvMap.getUrl()); addStopToFavourite(stopUrl, stopName, stopDirection); } @Override public void onStopEdit(int stopPosition) { Stop stop = stops.get(stopPosition); if (stop != null) { DialogHelper.showEditStopDialog(MapActivity.this, stop.getUrl(), stop.getName(), stop.getDirection(), MapActivity.this); } } @Override public void onStopDelete(int stopPosition) { DialogHelper.showDeleteConfirmationDialog(MapActivity.this, MapActivity.this, stopPosition); } @Override public void onDeleteConfirmed(int stopPosition) { updateStopFavouriteIcon(wvMap.getUrl()); Stop stop = stops.get(stopPosition); if (stop != null) { deleteStop(stop); } } //------------------------------------------------------------------------------------------------------------------ private void addStopToFavourite(String stopUrl, @Nullable String stopName, @Nullable String stopDirection) { DbHelper.writeStop(stopUrl, stopName, stopDirection) .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(new Consumer<Boolean>() { @Override public void accept(Boolean aBoolean) throws Exception { getAllStops(); logAddStopAction(); } }); } private void deleteStop(Stop stop) { DbHelper.deleteStop(UrlHelper.extractStopId(stop.getUrl())) .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(new Consumer<Boolean>() { @Override public void accept(Boolean aBoolean) throws Exception { getAllStops(); } }); } private void getAllStops() { DbHelper.readStops() .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(new Consumer<List<Stop>>() { @Override public void accept(List<Stop> stops) throws Exception { if (stops != null) { updateDrawerItems(stops); } } }); } //------------------------------------------------------------------------------------------------------------------ private void initializeWebView() { CookieManager.getInstance().setCookie(URL_BASE, CookieHelper.getCookies(getApplicationContext())); wvMap.clearHistory(); if (Build.VERSION.SDK_INT >= 19) { wvMap.setLayerType(View.LAYER_TYPE_HARDWARE, null); } else { wvMap.setLayerType(View.LAYER_TYPE_SOFTWARE, null); } wvMap.getSettings().setCacheMode(WebSettings.LOAD_DEFAULT); wvMap.getSettings().setAppCacheEnabled(true); wvMap.getSettings().setDomStorageEnabled(true); wvMap.getSettings().setLayoutAlgorithm(WebSettings.LayoutAlgorithm.NARROW_COLUMNS); wvMap.getSettings().setJavaScriptEnabled(true); wvMap.getSettings().setGeolocationEnabled(true); wvMap.setWebViewClient(new WebViewClient() { @Override public void onPageFinished(WebView view, String url) { super.onPageFinished(view, url); hideLoading(); } @Override public WebResourceResponse shouldInterceptRequest(final WebView view, WebResourceRequest request) { runOnUiThread(new Runnable() { @Override public void run() { String url = view.getUrl(); if (url.startsWith(URL_STOP)) { btnLocation.setVisibility(View.GONE); btnFavourite.setVisibility(View.VISIBLE); updateStopFavouriteIcon(url); } else if (url.equals(URL_MAP)) { btnFavourite.setVisibility(View.GONE); btnLocation.setVisibility(View.VISIBLE); } } }); return super.shouldInterceptRequest(view, request); } }); wvMap.setWebChromeClient(new WebChromeClient() { @Override public void onGeolocationPermissionsShowPrompt(String origin, GeolocationPermissions.Callback callback) { callback.invoke(origin, true, false); } }); if (!hasSavedState) { wvMap.loadUrl(URL_MAP); } } private void loadStopTimetable(int stopPosition) { if (stops != null) { Stop stop = stops.get(stopPosition); wvMap.loadUrl(stop.getUrl()); } } private void initializeDrawer() { drawer = new DrawerBuilder().withActivity(this) .withAccountHeader(new AccountHeaderBuilder() .withActivity(this) .withCompactStyle(true) .withHeaderBackground(R.drawable.header) .build()) .withOnDrawerItemClickListener(new Drawer.OnDrawerItemClickListener() { @Override public boolean onItemClick(View view, int position, IDrawerItem drawerItem) { loadStopTimetable((int) drawerItem.getIdentifier()); return false; } }) .withOnDrawerItemLongClickListener(new Drawer.OnDrawerItemLongClickListener() { @Override public boolean onItemLongClick(View view, int position, IDrawerItem drawerItem) { DialogHelper.showStopsDialog(MapActivity.this, MapActivity.this, (int) drawerItem.getIdentifier()); return false; } }) .build(); getAllStops(); } private void updateDrawerItems(List<Stop> stops) { if (drawer != null) { drawer.removeAllItems(); this.stops = stops; int size = this.stops.size(); for (int i = 0; i < size; i++) { Stop stop = this.stops.get(i); PrimaryDrawerItem item = new PrimaryDrawerItem() .withIdentifier(i) .withIcon(R.drawable.ic_stop_marker) .withName(stop.getName()) .withDescription(stop.getDirection()); drawer.addItem(item); } } } private void showLoading() { wvMap.setVisibility(View.GONE); btnLocation.setVisibility(View.GONE); pbLoading.show(); pbLoading.setVisibility(View.VISIBLE); } private void hideLoading() { pbLoading.hide(); pbLoading.setVisibility(View.GONE); wvMap.setVisibility(View.VISIBLE); btnLocation.setVisibility(View.VISIBLE); } private void updateStopFavouriteIcon(String url) { btnFavourite.setImageResource(!isStopInList(url) ? R.drawable.ic_star : R.drawable.ic_star_selected); } private boolean isStopInList(String url) { for (Stop stop : stops) { if (stop.getUrl().equals(url)) { return true; } } return false; } private int findStop(String url) { for (int i = 0; i < stops.size(); i++) { if (stops.get(i).getUrl().equals(url)) { return i; } } return -1; } private void enableBroadcastReceiver() { BroadcastReceiverHelper.enableBroadcastReceiver(this); NetworkAndLocationStateReceiver.setCallback(this); } private void disableBroadcastReceiver() { BroadcastReceiverHelper.disableBroadcastReceiver(this); NetworkAndLocationStateReceiver.removeCallback(); } private void checkPermissionsAndShowMap() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { if (!PermissionsHelper.hasLocationPermissions(MapActivity.this)) { if (PermissionsHelper.shouldShowRationale(MapActivity.this)) { showRationale(); } else { PermissionsHelper.requestLocationPermissions(MapActivity.this, REQUEST_CODE); } } else { showLoading(); initializeWebView(); } } else { showLoading(); initializeWebView(); } } private void showRationale() { Snackbar.make(rootView, getString(R.string.location_permissions_rationale), Snackbar.LENGTH_INDEFINITE) .setAction(android.R.string.ok, new View.OnClickListener() { @Override public void onClick(View v) { PermissionsHelper.requestLocationPermissions(MapActivity.this, REQUEST_CODE); } }) .show(); } private void checkNetworkAndLocation() { FrameLayout emptyView = (FrameLayout) findViewById(R.id.empty_view_map); TextView tvAlertMessage = (TextView) findViewById(R.id.tv_alert_message); boolean hasNetwork = NetworkHelper.hasNetworkConnection(MapActivity.this); boolean hasLocation = LocationHelper.hasLocationEnabled(MapActivity.this); if (hasNetwork && hasLocation) { emptyView.setVisibility(View.GONE); wvMap.setVisibility(View.VISIBLE); btnLocation.setVisibility(View.VISIBLE); checkPermissionsAndShowMap(); } else { wvMap.setVisibility(View.GONE); btnLocation.setVisibility(View.GONE); emptyView.setVisibility(View.VISIBLE); if (!hasNetwork && !hasLocation) { tvAlertMessage.setText(getString(R.string.message_template_both, getString(R.string.message_network_needed), getString(R.string.message_location_needed))); } else if (!hasNetwork) { tvAlertMessage.setText(getString(R.string.message_template_one, getString(R.string.message_network_needed))); } else { tvAlertMessage.setText(getString(R.string.message_template_one, getString(R.string.message_location_needed))); } } } private void logAddStopAction() { Bundle bundle = new Bundle(); bundle.putString(FirebaseAnalytics.Param.VALUE, ""); FirebaseAnalytics.getInstance(getApplicationContext()).logEvent(getString(R.string.event_stop_added), bundle); } }
/* ** 2013 October 27 ** ** The author disclaims copyright to this source code. In place of ** a legal notice, here is a blessing: ** May you do good and not evil. ** May you find forgiveness for yourself and forgive others. ** May you share freely, never taking more than you give. */ package info.ata4.minecraft.dragon.server.entity.helper; import info.ata4.minecraft.dragon.server.entity.EntityTameableDragon; import info.ata4.minecraft.dragon.server.entity.ai.air.EntityAICatchOwnerAir; import info.ata4.minecraft.dragon.server.entity.ai.air.EntityAILand; import info.ata4.minecraft.dragon.server.entity.ai.air.EntityAIRideAir; import info.ata4.minecraft.dragon.server.entity.ai.ground.*; import info.ata4.minecraft.dragon.server.util.ClientServerSynchronisedTickCount; import info.ata4.minecraft.dragon.server.util.EntityClassPredicate; import net.minecraft.block.Block; import net.minecraft.entity.SharedMonsterAttributes; import net.minecraft.entity.ai.*; import net.minecraft.entity.ai.attributes.IAttributeInstance; import net.minecraft.entity.passive.*; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.pathfinding.PathNavigateGround; import net.minecraft.util.BlockPos; import net.minecraft.util.EnumParticleTypes; import net.minecraft.util.MathHelper; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import static info.ata4.minecraft.dragon.server.entity.helper.DragonLifeStage.*; /** * * @author Nico Bergemann <barracuda415 at yahoo.de> */ public class DragonLifeStageHelper extends DragonHelper { private static final Logger L = LogManager.getLogger(); private static final String NBT_TICKS_SINCE_CREATION = "TicksSinceCreation"; private static final int TICKS_SINCE_CREATION_UPDATE_INTERVAL = 100; private DragonLifeStage lifeStagePrev; private final DragonScaleModifier scaleModifier = new DragonScaleModifier(); private int eggWiggleX; private int eggWiggleZ; // the ticks since creation is used to control the dragon's life stage. It is only updated by the server occasionally. // the client keeps a cached copy of it and uses client ticks to interpolate in the gaps. // when the watcher is updated from the server, the client will tick it faster or slower to resynchronise private final int dataIndexTicksSinceCreation; private int ticksSinceCreationServer; private final ClientServerSynchronisedTickCount ticksSinceCreationClient; public DragonLifeStageHelper(EntityTameableDragon dragon, int dataWatcherIndex) { super(dragon); dataIndexTicksSinceCreation = dataWatcherIndex; ticksSinceCreationServer = 0; dataWatcher.addObject(dataIndexTicksSinceCreation, ticksSinceCreationServer); ticksSinceCreationClient = new ClientServerSynchronisedTickCount(TICKS_SINCE_CREATION_UPDATE_INTERVAL); ticksSinceCreationClient.reset(ticksSinceCreationServer); } @Override public void applyEntityAttributes() { scaleModifier.setScale(getScale()); dragon.getEntityAttribute(SharedMonsterAttributes.maxHealth).applyModifier(scaleModifier); dragon.getEntityAttribute(SharedMonsterAttributes.attackDamage).applyModifier(scaleModifier); } /** * Generates some egg shell particles and a breaking sound. */ public void playEggCrackEffect() { int bx = (int) Math.round(dragon.posX - 0.5); int by = (int) Math.round(dragon.posY); int bz = (int) Math.round(dragon.posZ - 0.5); dragon.worldObj.playAuxSFX(2001, new BlockPos(bx, by, bz), Block.getIdFromBlock(Blocks.dragon_egg)); } public int getEggWiggleX() { return eggWiggleX; } public int getEggWiggleZ() { return eggWiggleZ; } /** * Returns the current life stage of the dragon. * * @return current life stage */ public DragonLifeStage getLifeStage() { int age = getTicksSinceCreation(); return DragonLifeStage.getLifeStageFromTickCount(age); } /** * Returns the size multiplier for the current age. * * @return size */ // public float getScale() { // int age = getTicksSinceCreation(); // return DragonLifeStage.getScaleFromTickCount(age); // } public int getTicksSinceCreation() { if (!dragon.worldObj.isRemote) { return ticksSinceCreationServer; } return ticksSinceCreationClient.getCurrentTickCount(); } @Override public void writeToNBT(NBTTagCompound nbt) { nbt.setInteger(NBT_TICKS_SINCE_CREATION, getTicksSinceCreation()); } @Override public void readFromNBT(NBTTagCompound nbt) { int ticksRead = nbt.getInteger(NBT_TICKS_SINCE_CREATION); ticksRead = DragonLifeStage.clipTickCountToValid(ticksRead); ticksSinceCreationServer = ticksRead; dataWatcher.updateObject(dataIndexTicksSinceCreation, ticksSinceCreationServer); } /** * Returns the size multiplier for the current age. * * @return size */ public float getScale() { DragonLifeStage lifeStage = getLifeStage(); int stageStartTicks = lifeStage.startTicks; int timeInThisStage = getTicksSinceCreation() - stageStartTicks; float fractionOfStage = timeInThisStage / (float) lifeStage.durationTicks; fractionOfStage = MathHelper.clamp_float(fractionOfStage, 0.0F, 1.0F); final float EGG_SIZE = 0.9F / EntityTameableDragon.BASE_WIDTH; final float HATCHLING_SIZE = 0.33F; final float JUVENILE_SIZE = 0.66F; final float ADULT_SIZE = 1.0F; switch (getLifeStage()) { case EGG: { // constant size for egg stage return EGG_SIZE; } case HATCHLING: { return HATCHLING_SIZE + fractionOfStage * (JUVENILE_SIZE - HATCHLING_SIZE); } case JUVENILE: { return JUVENILE_SIZE + fractionOfStage * (ADULT_SIZE - JUVENILE_SIZE); } case ADULT: { return ADULT_SIZE; } default: { L.error("Illegal lifestage in getScale():" + getLifeStage()); return 1; } } } /** * Transforms the dragon to an egg (item form) */ public void transformToEgg() { if (dragon.getHealth() <= 0) { // no can do return; } L.debug("transforming to egg"); float volume = 1; float pitch = 0.5f + (0.5f - rand.nextFloat()) * 0.1f; dragon.worldObj.playSoundAtEntity(dragon, "mob.endermen.portal", volume, pitch); if (dragon.isSaddled()) { dragon.dropItem(Items.saddle, 1); } dragon.entityDropItem(new ItemStack(Blocks.dragon_egg), 0); dragon.setDead(); } /** * Sets a new life stage for the dragon. * * @param lifeStage */ public final void setLifeStage(DragonLifeStage lifeStage) { L.trace("setLifeStage({})", lifeStage); if (!dragon.worldObj.isRemote) { ticksSinceCreationServer = lifeStage.startTicks; dataWatcher.updateObject(dataIndexTicksSinceCreation, ticksSinceCreationServer); } else { L.error("setLifeStage called on Client"); } updateLifeStage(); } /** * Called when the dragon enters a new life stage. */ private void onNewLifeStage(DragonLifeStage lifeStage, DragonLifeStage prevLifeStage) { L.trace("onNewLifeStage({},{})", prevLifeStage, lifeStage); if (dragon.isClient()) { if (prevLifeStage != null && prevLifeStage == EGG && lifeStage == HATCHLING) { playEggCrackEffect(); } } else { // eggs and hatchlings can't fly dragon.setCanFly(lifeStage != EGG && lifeStage != HATCHLING); // // only hatchlings are small enough for doors // // (eggs don't move on their own anyway and are ignored) // dragon.getNavigator().setEnterDoors(lifeStage == HATCHLING); // guessed, based on EntityAIRestrictOpenDoor - break the door down, don't open it if (dragon.getNavigator() instanceof PathNavigateGround) { PathNavigateGround pathNavigateGround = (PathNavigateGround) dragon.getNavigator(); pathNavigateGround.func_179691_c(lifeStage == HATCHLING); } // update AI states so the egg won't move // dragon.setNoAI(lifeStage == EGG); stops egg from sitting on the ground properly :( changeAITasks(lifeStage, prevLifeStage); // update attribute modifier IAttributeInstance healthAttrib = dragon.getEntityAttribute(SharedMonsterAttributes.maxHealth); IAttributeInstance damageAttrib = dragon.getEntityAttribute(SharedMonsterAttributes.attackDamage); // remove old size modifiers healthAttrib.removeModifier(scaleModifier); damageAttrib.removeModifier(scaleModifier); // update modifier scaleModifier.setScale(getScale()); // set new size modifiers healthAttrib.applyModifier(scaleModifier); damageAttrib.applyModifier(scaleModifier); // heal dragon to updated full health dragon.setHealth(dragon.getMaxHealth()); } } @Override public void onLivingUpdate() { // testing code // if (dragon.isServer()) { // dragon.setGrowingAge((int) ((((Math.sin(Math.toRadians(dragon.ticksExisted))) + 1) * 0.5) * EGG.ageLimit)); // } // if the dragon is not an adult, update its growth ticks if (!dragon.worldObj.isRemote) { if (getLifeStage() != ADULT) { ++ticksSinceCreationServer; if (ticksSinceCreationServer % TICKS_SINCE_CREATION_UPDATE_INTERVAL == 0){ dataWatcher.updateObject(dataIndexTicksSinceCreation, ticksSinceCreationServer); } } } else { ticksSinceCreationClient.updateFromServer(dataWatcher.getWatchableObjectInt( dataIndexTicksSinceCreation)); if (getLifeStage() != ADULT) { ticksSinceCreationClient.tick(); } } updateLifeStage(); updateEgg(); updateScale(); } private void updateLifeStage() { // trigger event when a new life stage was reached DragonLifeStage lifeStage = getLifeStage(); if (lifeStagePrev != lifeStage) { onNewLifeStage(lifeStage, lifeStagePrev); lifeStagePrev = lifeStage; } } private void updateEgg() { if (!isEgg()) { return; } // animate egg wiggle based on the time the eggs take to hatch int age = getTicksSinceCreation(); int hatchAge = DragonLifeStage.HATCHLING.durationTicks; float fractionComplete = age / (float)hatchAge; // wait until the egg is nearly hatched if (fractionComplete > 0.66f) { float wiggleChance = fractionComplete / 60; if (eggWiggleX > 0) { eggWiggleX--; } else if (rand.nextFloat() < wiggleChance) { eggWiggleX = rand.nextBoolean() ? 10 : 20; playEggCrackEffect(); } if (eggWiggleZ > 0) { eggWiggleZ--; } else if (rand.nextFloat() < wiggleChance) { eggWiggleZ = rand.nextBoolean() ? 10 : 20; playEggCrackEffect(); } } // spawn generic particles double px = dragon.posX + (rand.nextDouble() - 0.5); double py = dragon.posY + (rand.nextDouble() - 0.5); double pz = dragon.posZ + (rand.nextDouble() - 0.5); double ox = (rand.nextDouble() - 0.5) * 2; double oy = (rand.nextDouble() - 0.5) * 2; double oz = (rand.nextDouble() - 0.5) * 2; dragon.worldObj.spawnParticle(EnumParticleTypes.PORTAL, px, py, pz, ox, oy, oz); } private void updateScale() { dragon.setScalePublic(getScale()); } @Override public void onDeath() { if (dragon.isClient() && isEgg()) { playEggCrackEffect(); } } public boolean isEgg() { return getLifeStage() == EGG; } public boolean isHatchling() { return getLifeStage() == HATCHLING; } public boolean isJuvenile() { return getLifeStage() == JUVENILE; } public boolean isAdult() { return getLifeStage() == ADULT; } private void changeAITasks(DragonLifeStage newLifeStage, DragonLifeStage previousLifeStage) { // handle initialisation after load from NBT if (newLifeStage != null && previousLifeStage != null) { if (newLifeStage == previousLifeStage) { return; } if (newLifeStage != EGG && previousLifeStage != EGG) { return; } } EntityAITasks tasks = dragon.tasks; EntityAITasks airTasks = dragon.airTasks; EntityAITasks targetTasks = dragon.targetTasks; while (!tasks.taskEntries.isEmpty()) { EntityAIBase entityAIBase = ((EntityAITasks.EntityAITaskEntry) tasks.taskEntries.get(0)).action; tasks.removeTask(entityAIBase); } while (!airTasks.taskEntries.isEmpty()) { EntityAIBase entityAIBase = ((EntityAITasks.EntityAITaskEntry) airTasks.taskEntries.get(0)).action; airTasks.removeTask(entityAIBase); } while (!targetTasks.taskEntries.isEmpty()) { EntityAIBase entityAIBase = ((EntityAITasks.EntityAITaskEntry) targetTasks.taskEntries.get(0)).action; targetTasks.removeTask(entityAIBase); } if (newLifeStage == EGG) { return; } // mutex 1: movement // mutex 2: looking // mutex 4: special state tasks.addTask(0, new EntityAICatchOwnerGround(dragon)); // mutex all tasks.addTask(1, new EntityAIRideGround(dragon, 1)); // mutex all tasks.addTask(2, new EntityAISwimming(dragon)); // mutex 4 tasks.addTask(3, dragon.getAISit()); // mutex 4+1 tasks.addTask(4, new EntityAIDragonMate(dragon, 0.6)); // mutex 2+1 tasks.addTask(5, new EntityAITempt(dragon, 0.75, dragon.FAVORITE_FOOD, false)); // mutex 2+1 tasks.addTask(6, new EntityAIAttackOnCollide(dragon, 1, true)); // mutex 2+1 tasks.addTask(7, new EntityAIFollowParent(dragon, 0.8)); // mutex 2+1 tasks.addTask(8, new EntityAIDragonFollowOwner(dragon, 1, 12, 128)); // mutex 2+1 tasks.addTask(8, new EntityAIPanicChild(dragon, 1)); // mutex 1 tasks.addTask(9, new EntityAIWander(dragon, 1)); // mutex 1 tasks.addTask(10, new EntityAIWatchIdle(dragon)); // mutex 2 tasks.addTask(10, new EntityAIWatchLiving(dragon, 16, 0.05f)); // mutex 2 // mutex 1: waypointing // mutex 2: continuous waypointing airTasks.addTask(0, new EntityAIRideAir(dragon)); // mutex all airTasks.addTask(0, new EntityAILand(dragon)); // mutex 0 airTasks.addTask(0, new EntityAICatchOwnerAir(dragon)); // mutex all // mutex 1: generic targeting targetTasks.addTask(1, new EntityAIOwnerHurtByTarget(dragon)); // mutex 1 targetTasks.addTask(2, new EntityAIOwnerHurtTarget(dragon)); // mutex 1 targetTasks.addTask(3, new EntityAIHurtByTarget(dragon, false)); // mutex 1 targetTasks.addTask(4, new EntityAIHunt(dragon, EntityAnimal.class, false, new EntityClassPredicate( EntitySheep.class, EntityPig.class, EntityChicken.class, EntityRabbit.class ) )); // mutex 1 } }
package org.jaudiotagger.audio.mp3; /** * @author : Paul Taylor * @author : Eric Farng * <p> * Version @version:$Id$ * <p> * MusicTag Copyright (C)2003,2004 * <p> * This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser * General Public License as published by the Free Software Foundation; either version 2.1 of the License, * or (at your option) any later version. * <p> * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even * the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * <p> * You should have received a copy of the GNU Lesser General Public License along with this library; if not, * you can get a copy from http://www.opensource.org/licenses/lgpl-license.php or write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ import org.jaudiotagger.audio.AudioFile; import org.jaudiotagger.audio.exceptions.CannotReadException; import org.jaudiotagger.audio.exceptions.CannotWriteException; import org.jaudiotagger.audio.exceptions.InvalidAudioFrameException; import org.jaudiotagger.audio.exceptions.NoWritePermissionsException; import org.jaudiotagger.audio.exceptions.ReadOnlyFileException; import org.jaudiotagger.audio.exceptions.UnableToModifyFileException; import org.jaudiotagger.logging.AbstractTagDisplayFormatter; import org.jaudiotagger.logging.ErrorMessage; import org.jaudiotagger.logging.Hex; import org.jaudiotagger.logging.PlainTextTagDisplayFormatter; import org.jaudiotagger.logging.XMLTagDisplayFormatter; import org.jaudiotagger.tag.Tag; import org.jaudiotagger.tag.TagException; import org.jaudiotagger.tag.TagNotFoundException; import org.jaudiotagger.tag.TagOptionSingleton; import org.jaudiotagger.tag.id3.AbstractID3v2Tag; import org.jaudiotagger.tag.id3.AbstractTag; import org.jaudiotagger.tag.id3.ID3v11Tag; import org.jaudiotagger.tag.id3.ID3v1Tag; import org.jaudiotagger.tag.id3.ID3v22Tag; import org.jaudiotagger.tag.id3.ID3v23Tag; import org.jaudiotagger.tag.id3.ID3v24Tag; import org.jaudiotagger.tag.lyrics3.AbstractLyrics3; import org.jaudiotagger.tag.reference.ID3V2Version; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.logging.Level; /** * This class represents a physical MP3 File */ public class MP3File extends AudioFile { private static final int MINIMUM_FILESIZE = 150; protected static AbstractTagDisplayFormatter tagFormatter; /** * the ID3v2 tag that this file contains. */ private AbstractID3v2Tag id3v2tag = null; /** * Representation of the idv2 tag as a idv24 tag */ private ID3v24Tag id3v2Asv24tag = null; /** * The Lyrics3 tag that this file contains. */ private AbstractLyrics3 lyrics3tag = null; /** * The ID3v1 tag that this file contains. */ private ID3v1Tag id3v1tag = null; /** * Creates a new empty MP3File datatype that is not associated with a * specific file. */ public MP3File() { } /** * Creates a new MP3File datatype and parse the tag from the given filename. * * @param filename MP3 file * @throws IOException on any I/O error * @throws TagException on any exception generated by this library. * @throws org.jaudiotagger.audio.exceptions.ReadOnlyFileException * @throws org.jaudiotagger.audio.exceptions.InvalidAudioFrameException */ public MP3File(String filename) throws IOException, TagException, ReadOnlyFileException, CannotReadException, InvalidAudioFrameException { this(new File(filename)); } /* Load ID3V1tag if exists */ public static final int LOAD_IDV1TAG = 2; /* Load ID3V2tag if exists */ public static final int LOAD_IDV2TAG = 4; /** * This option is currently ignored */ public static final int LOAD_LYRICS3 = 8; public static final int LOAD_ALL = LOAD_IDV1TAG | LOAD_IDV2TAG | LOAD_LYRICS3; /** * Creates a new MP3File dataType and parse the tag from the given file * Object, files must be writable to use this constructor. * * @param file MP3 file * @param loadOptions decide what tags to load * @throws IOException on any I/O error * @throws TagException on any exception generated by this library. * @throws org.jaudiotagger.audio.exceptions.ReadOnlyFileException * @throws org.jaudiotagger.audio.exceptions.InvalidAudioFrameException */ public MP3File(File file, int loadOptions) throws IOException, TagException, ReadOnlyFileException, CannotReadException, InvalidAudioFrameException { this(file, loadOptions, false); } /** * Read v1 tag * * @param file * @param newFile * @param loadOptions * @throws IOException */ private void readV1Tag(File file, RandomAccessFile newFile, int loadOptions) throws IOException { if ((loadOptions & LOAD_IDV1TAG) != 0) { logger.finer("Attempting to read id3v1tags"); try { id3v1tag = new ID3v11Tag(newFile, file.getName()); } catch (TagNotFoundException ex) { logger.config("No ids3v11 tag found"); } try { if (id3v1tag == null) { id3v1tag = new ID3v1Tag(newFile, file.getName()); } } catch (TagNotFoundException ex) { logger.config("No id3v1 tag found"); } } } /** * Read V2tag, if exists. * <p> * TODO:shouldn't we be handing TagExceptions:when will they be thrown * * @param file the file to read tags from * @param loadOptions load options * @throws IOException IO issues * @throws TagException tag issues */ private void readV2Tag(File file, int loadOptions, int startByte) throws IOException, TagException { //We know where the actual Audio starts so load all the file from start to that point into //a buffer then we can read the IDv2 information without needing any more File I/O if (startByte >= AbstractID3v2Tag.TAG_HEADER_LENGTH) { logger.finer("Attempting to read id3v2tags"); final FileInputStream fis = new FileInputStream(file); final ByteBuffer bb = ByteBuffer.allocateDirect(startByte); fis.getChannel().read(bb, 0); bb.rewind(); if ((loadOptions & LOAD_IDV2TAG) != 0) { logger.config("Attempting to read id3v2tags"); try { this.setID3v2Tag(new ID3v24Tag(bb, file.getName())); } catch (TagNotFoundException ex) { logger.config("No id3v24 tag found"); } try { if (id3v2tag == null) { this.setID3v2Tag(new ID3v23Tag(bb, file.getName())); } } catch (TagNotFoundException ex) { logger.config("No id3v23 tag found"); } try { if (id3v2tag == null) { this.setID3v2Tag(new ID3v22Tag(bb, file.getName())); } } catch (TagNotFoundException ex) { logger.config("No id3v22 tag found"); } } } else { logger.config("Not enough room for valid id3v2 tag:" + startByte); } } /** * Read lyrics3 Tag * <p> * TODO:not working * * @param file * @param newFile * @param loadOptions * @throws IOException */ private void readLyrics3Tag(File file, RandomAccessFile newFile, int loadOptions) throws IOException { /*if ((loadOptions & LOAD_LYRICS3) != 0) { try { lyrics3tag = new Lyrics3v2(newFile); } catch (TagNotFoundException ex) { } try { if (lyrics3tag == null) { lyrics3tag = new Lyrics3v1(newFile); } } catch (TagNotFoundException ex) { } } */ } /** * @param startByte * @param endByte * @return true if all the bytes between in the file between startByte and endByte are null, false * otherwise * @throws Exception */ private boolean isFilePortionNull(int startByte, int endByte) throws IOException { logger.config("Checking file portion:" + Hex.asHex(startByte) + ":" + Hex.asHex(endByte)); FileInputStream fis = null; FileChannel fc = null; try { fis = new FileInputStream(file); fc = fis.getChannel(); fc.position(startByte); ByteBuffer bb = ByteBuffer.allocateDirect(endByte - startByte); fc.read(bb); while (bb.hasRemaining()) { if (bb.get() != 0) { return false; } } } finally { if (fc != null) { fc.close(); } if (fis != null) { fis.close(); } } return true; } /** * Regets the audio header starting from start of file, and write appropriate logging to indicate * potential problem to user. * * @param startByte * @param firstHeaderAfterTag * @return * @throws IOException * @throws InvalidAudioFrameException */ private MP3AudioHeader checkAudioStart(long startByte, MP3AudioHeader firstHeaderAfterTag) throws IOException, InvalidAudioFrameException { MP3AudioHeader headerOne; MP3AudioHeader headerTwo; logger.warning(ErrorMessage.MP3_ID3TAG_LENGTH_INCORRECT.getMsg(file.getPath(), Hex.asHex(startByte), Hex.asHex(firstHeaderAfterTag.getMp3StartByte()))); //because we cant agree on start location we reread the audioheader from the start of the file, at least //this way we cant overwrite the audio although we might overwrite part of the tag if we write this file //back later headerOne = new MP3AudioHeader(file, 0); logger.config("Checking from start:" + headerOne); //Although the id3 tag size appears to be incorrect at least we have found the same location for the start //of audio whether we start searching from start of file or at the end of the alleged of file so no real //problem if (firstHeaderAfterTag.getMp3StartByte() == headerOne.getMp3StartByte()) { logger.config(ErrorMessage.MP3_START_OF_AUDIO_CONFIRMED.getMsg(file.getPath(), Hex.asHex(headerOne.getMp3StartByte()))); return firstHeaderAfterTag; } else { //We get a different value if read from start, can't guarantee 100% correct lets do some more checks logger.config((ErrorMessage.MP3_RECALCULATED_POSSIBLE_START_OF_MP3_AUDIO.getMsg(file.getPath(), Hex.asHex(headerOne.getMp3StartByte())))); //Same frame count so probably both audio headers with newAudioHeader being the first one if (firstHeaderAfterTag.getNumberOfFrames() == headerOne.getNumberOfFrames()) { logger.warning((ErrorMessage.MP3_RECALCULATED_START_OF_MP3_AUDIO.getMsg(file.getPath(), Hex.asHex(headerOne.getMp3StartByte())))); return headerOne; } //If the size reported by the tag header is a little short and there is only nulls between the recorded value //and the start of the first audio found then we stick with the original header as more likely that currentHeader //DataInputStream not really a header if (isFilePortionNull((int) startByte, (int) firstHeaderAfterTag.getMp3StartByte())) { return firstHeaderAfterTag; } //Skip to the next header (header 2, counting from start of file) headerTwo = new MP3AudioHeader(file, headerOne.getMp3StartByte() + headerOne.mp3FrameHeader.getFrameLength()); //It matches the header we found when doing the original search from after the ID3Tag therefore it //seems that newAudioHeader was a false match and the original header was correct if (headerTwo.getMp3StartByte() == firstHeaderAfterTag.getMp3StartByte()) { logger.warning((ErrorMessage.MP3_START_OF_AUDIO_CONFIRMED.getMsg(file.getPath(), Hex.asHex(firstHeaderAfterTag.getMp3StartByte())))); return firstHeaderAfterTag; } //It matches the frameCount the header we just found so lends weight to the fact that the audio does indeed start at new header //however it maybe that neither are really headers and just contain the same data being misrepresented as headers. if (headerTwo.getNumberOfFrames() == headerOne.getNumberOfFrames()) { logger.warning((ErrorMessage.MP3_RECALCULATED_START_OF_MP3_AUDIO.getMsg(file.getPath(), Hex.asHex(headerOne.getMp3StartByte())))); return headerOne; } ///Doesnt match the frameCount lets go back to the original header else { logger.warning((ErrorMessage.MP3_RECALCULATED_START_OF_MP3_AUDIO.getMsg(file.getPath(), Hex.asHex(firstHeaderAfterTag.getMp3StartByte())))); return firstHeaderAfterTag; } } } /** * Creates a new MP3File dataType and parse the tag from the given file * Object, files can be opened read only if required. * * @param file MP3 file * @param loadOptions decide what tags to load * @param readOnly causes the files to be opened readonly * @throws IOException on any I/O error * @throws TagException on any exception generated by this library. * @throws org.jaudiotagger.audio.exceptions.ReadOnlyFileException * @throws org.jaudiotagger.audio.exceptions.InvalidAudioFrameException */ public MP3File(File file, int loadOptions, boolean readOnly) throws IOException, TagException, ReadOnlyFileException, CannotReadException, InvalidAudioFrameException { RandomAccessFile newFile = null; try { this.file = file; //Check File accessibility newFile = checkFilePermissions(file, readOnly); //Read ID3v2 tag size (if tag exists) to allow audioHeader parsing to skip over tag long tagSizeReportedByHeader = AbstractID3v2Tag.getV2TagSizeIfExists(file); logger.config("TagHeaderSize:" + Hex.asHex(tagSizeReportedByHeader)); audioHeader = new MP3AudioHeader(file, tagSizeReportedByHeader); //If the audio header is not straight after the end of the tag then search from start of file if (tagSizeReportedByHeader != ((MP3AudioHeader) audioHeader).getMp3StartByte()) { logger.config("First header found after tag:" + audioHeader); audioHeader = checkAudioStart(tagSizeReportedByHeader, (MP3AudioHeader) audioHeader); } //Read v1 tags (if any) readV1Tag(file, newFile, loadOptions); //Read v2 tags (if any) readV2Tag(file, loadOptions, (int) ((MP3AudioHeader) audioHeader).getMp3StartByte()); //If we have a v2 tag use that, if we do not but have v1 tag use that //otherwise use nothing //TODO:if have both should we merge //rather than just returning specific ID3v22 tag, would it be better to return v24 version ? if (this.getID3v2Tag() != null) { tag = this.getID3v2Tag(); } else if (id3v1tag != null) { tag = id3v1tag; } } finally { if (newFile != null) { newFile.close(); } } } /** * Used by tags when writing to calculate the location of the music file * * @param file * @return the location within the file that the audio starts * @throws java.io.IOException * @throws org.jaudiotagger.audio.exceptions.InvalidAudioFrameException */ public long getMP3StartByte(File file) throws InvalidAudioFrameException, IOException { try { //Read ID3v2 tag size (if tag exists) to allow audio header parsing to skip over tag long startByte = AbstractID3v2Tag.getV2TagSizeIfExists(file); MP3AudioHeader audioHeader = new MP3AudioHeader(file, startByte); if (startByte != audioHeader.getMp3StartByte()) { logger.config("First header found after tag:" + audioHeader); audioHeader = checkAudioStart(startByte, audioHeader); } return audioHeader.getMp3StartByte(); } catch (InvalidAudioFrameException iafe) { throw iafe; } catch (IOException ioe) { throw ioe; } } /** * Extracts the raw ID3v2 tag data into a file. * <p> * This provides access to the raw data before manipulation, the data is written from the start of the file * to the start of the Audio Data. This is primarily useful for manipulating corrupted tags that are not * (fully) loaded using the standard methods. * * @param outputFile to write the data to * @return * @throws TagNotFoundException * @throws IOException */ public File extractID3v2TagDataIntoFile(File outputFile) throws TagNotFoundException, IOException { int startByte = (int) ((MP3AudioHeader) audioHeader).getMp3StartByte(); if (startByte >= 0) { //Read byte into buffer FileInputStream fis = new FileInputStream(file); FileChannel fc = fis.getChannel(); ByteBuffer bb = ByteBuffer.allocate(startByte); fc.read(bb); //Write bytes to outputFile FileOutputStream out = new FileOutputStream(outputFile); out.write(bb.array()); out.close(); fc.close(); fis.close(); return outputFile; } throw new TagNotFoundException("There is no ID3v2Tag data in this file"); } /** * Return audio header * * @return */ public MP3AudioHeader getMP3AudioHeader() { return (MP3AudioHeader) getAudioHeader(); } /** * Returns true if this datatype contains an <code>Id3v1</code> tag * * @return true if this datatype contains an <code>Id3v1</code> tag */ public boolean hasID3v1Tag() { return (id3v1tag != null); } /** * Returns true if this datatype contains an <code>Id3v2</code> tag * * @return true if this datatype contains an <code>Id3v2</code> tag */ public boolean hasID3v2Tag() { return (id3v2tag != null); } /** * Returns true if this datatype contains a <code>Lyrics3</code> tag * TODO disabled until Lyrics3 fixed * @return true if this datatype contains a <code>Lyrics3</code> tag */ /* public boolean hasLyrics3Tag() { return (lyrics3tag != null); } */ /** * Creates a new MP3File datatype and parse the tag from the given file * Object. * * @param file MP3 file * @throws IOException on any I/O error * @throws TagException on any exception generated by this library. * @throws org.jaudiotagger.audio.exceptions.ReadOnlyFileException * @throws org.jaudiotagger.audio.exceptions.InvalidAudioFrameException */ public MP3File(File file) throws IOException, TagException, ReadOnlyFileException, CannotReadException, InvalidAudioFrameException { this(file, LOAD_ALL); } /** * Sets the ID3v1(_1)tag to the tag provided as an argument. * * @param id3v1tag */ public void setID3v1Tag(ID3v1Tag id3v1tag) { logger.config("setting tagv1:v1 tag"); this.id3v1tag = id3v1tag; } public void setID3v1Tag(Tag id3v1tag) { logger.config("setting tagv1:v1 tag"); this.id3v1tag = (ID3v1Tag) id3v1tag; } /** * Sets the <code>ID3v1</code> tag for this dataType. A new * <code>ID3v1_1</code> dataType is created from the argument and then used * here. * * @param mp3tag Any MP3Tag dataType can be used and will be converted into a * new ID3v1_1 dataType. */ public void setID3v1Tag(AbstractTag mp3tag) { logger.config("setting tagv1:abstract"); id3v1tag = new ID3v11Tag(mp3tag); } /** * Returns the <code>ID3v1</code> tag for this dataType. * * @return the <code>ID3v1</code> tag for this dataType */ public ID3v1Tag getID3v1Tag() { return id3v1tag; } /** * Calculates hash with given algorithm. Buffer size is 32768 byte. * Hash is calculated EXCLUDING meta-data, like id3v1 or id3v2 * * @param algorithm options MD5,SHA-1,SHA-256 * @return hash value in byte * @throws IOException * @throws InvalidAudioFrameException * @throws NoSuchAlgorithmException */ public byte[] getHash(String algorithm) throws NoSuchAlgorithmException, InvalidAudioFrameException, IOException { return getHash(algorithm, 32768); } /** * Calculates hash with given buffer size. * Hash is calculated EXCLUDING meta-data, like id3v1 or id3v2 * * @param buffer * @return byte[] hash value in byte * @throws IOException * @throws InvalidAudioFrameException * @throws NoSuchAlgorithmException */ public byte[] getHash(int buffer) throws NoSuchAlgorithmException, InvalidAudioFrameException, IOException { return getHash("MD5", buffer); } /** * Calculates hash with algorithm "MD5". Buffer size is 32768 byte. * Hash is calculated EXCLUDING meta-data, like id3v1 or id3v2 * * @return byte[] hash value. * @throws IOException * @throws InvalidAudioFrameException * @throws NoSuchAlgorithmException */ public byte[] getHash() throws NoSuchAlgorithmException, InvalidAudioFrameException, IOException { return getHash("MD5", 32768); } /** * Calculates hash with algorithm "MD5", "SHA-1" or SHA-256". * Hash is calculated EXCLUDING meta-data, like id3v1 or id3v2 * * @return byte[] hash value in byte * @throws IOException * @throws InvalidAudioFrameException * @throws NoSuchAlgorithmException */ public byte[] getHash(String algorithm, int bufferSize) throws InvalidAudioFrameException, IOException, NoSuchAlgorithmException { File mp3File = getFile(); long startByte = getMP3StartByte(mp3File); int id3v1TagSize = 0; if (hasID3v1Tag()) { ID3v1Tag id1tag = getID3v1Tag(); id3v1TagSize = id1tag.getSize(); } InputStream inStream = new FileInputStream(mp3File); byte[] buffer = new byte[bufferSize]; MessageDigest digest = MessageDigest.getInstance(algorithm); inStream.skip(startByte); int read; long totalSize = mp3File.length() - startByte - id3v1TagSize; int pointer = buffer.length; while (pointer <= totalSize) { read = inStream.read(buffer); digest.update(buffer, 0, read); pointer += buffer.length; } read = inStream.read(buffer, 0, (int) totalSize - pointer + buffer.length); digest.update(buffer, 0, read); byte[] hash = digest.digest(); return hash; } /** * Sets the <code>ID3v2</code> tag for this dataType. A new * <code>ID3v2_4</code> dataType is created from the argument and then used * here. * * @param mp3tag Any MP3Tag dataType can be used and will be converted into a * new ID3v2_4 dataType. */ public void setID3v2Tag(AbstractTag mp3tag) { id3v2tag = new ID3v24Tag(mp3tag); } /** * Sets the v2 tag to the v2 tag provided as an argument. * Also store a v24 version of tag as v24 is the interface to be used * when talking with client applications. * * @param id3v2tag */ public void setID3v2Tag(AbstractID3v2Tag id3v2tag) { this.id3v2tag = id3v2tag; if (id3v2tag instanceof ID3v24Tag) { this.id3v2Asv24tag = (ID3v24Tag) this.id3v2tag; } else { this.id3v2Asv24tag = new ID3v24Tag(id3v2tag); } } /** * Set v2 tag ,don't need to set v24 tag because saving * * @param id3v2tag */ //TODO temp its rather messy public void setID3v2TagOnly(AbstractID3v2Tag id3v2tag) { this.id3v2tag = id3v2tag; this.id3v2Asv24tag = null; } /** * Returns the <code>ID3v2</code> tag for this datatype. * * @return the <code>ID3v2</code> tag for this datatype */ public AbstractID3v2Tag getID3v2Tag() { return id3v2tag; } /** * @return a representation of tag as v24 */ public ID3v24Tag getID3v2TagAsv24() { return id3v2Asv24tag; } /** * Sets the <code>Lyrics3</code> tag for this dataType. A new * <code>Lyrics3v2</code> dataType is created from the argument and then * * used here. * * @param mp3tag Any MP3Tag dataType can be used and will be converted into a * new Lyrics3v2 dataType. */ /* public void setLyrics3Tag(AbstractTag mp3tag) { lyrics3tag = new Lyrics3v2(mp3tag); } */ /** * * * @param lyrics3tag */ /* public void setLyrics3Tag(AbstractLyrics3 lyrics3tag) { this.lyrics3tag = lyrics3tag; } */ /** * Returns the <code>ID3v1</code> tag for this datatype. * * @return the <code>ID3v1</code> tag for this datatype */ /* public AbstractLyrics3 getLyrics3Tag() { return lyrics3tag; } */ /** * Remove tag from file * * @param mp3tag * @throws FileNotFoundException * @throws IOException */ public void delete(AbstractTag mp3tag) throws FileNotFoundException, IOException { RandomAccessFile raf = new RandomAccessFile(this.file, "rw"); mp3tag.delete(raf); raf.close(); if (mp3tag instanceof ID3v1Tag) { id3v1tag = null; } if (mp3tag instanceof AbstractID3v2Tag) { id3v2tag = null; } } /** * Saves the tags in this dataType to the file referred to by this dataType. * * @throws IOException on any I/O error * @throws TagException on any exception generated by this library. */ public void save() throws IOException, TagException { save(this.file); } /** * Overridden for compatibility with merged code * * @throws NoWritePermissionsException if the file could not be written to due to file permissions * @throws CannotWriteException */ public void commit() throws CannotWriteException { try { save(); } catch (UnableToModifyFileException umfe) { throw new NoWritePermissionsException(umfe); } catch (IOException ioe) { throw new CannotWriteException(ioe); } catch (TagException te) { throw new CannotWriteException(te); } } /** * Check can write to file * * @param file * @throws IOException */ public void precheck(File file) throws IOException { if (!file.exists()) { logger.severe(ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE_FILE_NOT_FOUND.getMsg(file.getName())); throw new IOException(ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE_FILE_NOT_FOUND.getMsg(file.getName())); } if (TagOptionSingleton.getInstance().isCheckIsWritable() && !file.canWrite()) { logger.severe(ErrorMessage.GENERAL_WRITE_FAILED.getMsg(file.getName())); throw new IOException(ErrorMessage.GENERAL_WRITE_FAILED.getMsg(file.getName())); } if (file.length() <= MINIMUM_FILESIZE) { logger.severe(ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE_FILE_IS_TOO_SMALL.getMsg(file.getName())); throw new IOException(ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE_FILE_IS_TOO_SMALL.getMsg(file.getName())); } } /** * Saves the tags in this dataType to the file argument. It will be saved as * TagConstants.MP3_FILE_SAVE_WRITE * * @param fileToSave file to save the this dataTypes tags to * @throws FileNotFoundException if unable to find file * @throws IOException on any I/O error */ public void save(File fileToSave) throws IOException { //Ensure we are dealing with absolute filepaths not relative ones File file = fileToSave.getAbsoluteFile(); logger.config("Saving : " + file.getPath()); //Checks before starting write precheck(file); RandomAccessFile rfile = null; try { //ID3v2 Tag if (TagOptionSingleton.getInstance().isId3v2Save()) { if (id3v2tag == null) { rfile = new RandomAccessFile(file, "rw"); (new ID3v24Tag()).delete(rfile); (new ID3v23Tag()).delete(rfile); (new ID3v22Tag()).delete(rfile); logger.config("Deleting ID3v2 tag:" + file.getName()); rfile.close(); } else { logger.config("Writing ID3v2 tag:" + file.getName()); final MP3AudioHeader mp3AudioHeader = (MP3AudioHeader) this.getAudioHeader(); final long mp3StartByte = mp3AudioHeader.getMp3StartByte(); final long newMp3StartByte = id3v2tag.write(file, mp3StartByte); if (mp3StartByte != newMp3StartByte) { logger.config("New mp3 start byte: " + newMp3StartByte); mp3AudioHeader.setMp3StartByte(newMp3StartByte); } } } rfile = new RandomAccessFile(file, "rw"); //Lyrics 3 Tag if (TagOptionSingleton.getInstance().isLyrics3Save()) { if (lyrics3tag != null) { lyrics3tag.write(rfile); } } //ID3v1 tag if (TagOptionSingleton.getInstance().isId3v1Save()) { logger.config("Processing ID3v1"); if (id3v1tag == null) { logger.config("Deleting ID3v1"); (new ID3v1Tag()).delete(rfile); } else { logger.config("Saving ID3v1"); id3v1tag.write(rfile); } } } catch (FileNotFoundException ex) { logger.log(Level.SEVERE, ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE_FILE_NOT_FOUND.getMsg(file.getName()), ex); throw ex; } catch (IOException iex) { logger.log(Level.SEVERE, ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE.getMsg(file.getName(), iex.getMessage()), iex); throw iex; } catch (RuntimeException re) { logger.log(Level.SEVERE, ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE.getMsg(file.getName(), re.getMessage()), re); throw re; } finally { if (rfile != null) { rfile.close(); } } } /** * Displays MP3File Structure */ public String displayStructureAsXML() { createXMLStructureFormatter(); tagFormatter.openHeadingElement("file", this.getFile().getAbsolutePath()); if (this.getID3v1Tag() != null) { this.getID3v1Tag().createStructure(); } if (this.getID3v2Tag() != null) { this.getID3v2Tag().createStructure(); } tagFormatter.closeHeadingElement("file"); return tagFormatter.toString(); } /** * Displays MP3File Structure */ public String displayStructureAsPlainText() { createPlainTextStructureFormatter(); tagFormatter.openHeadingElement("file", this.getFile().getAbsolutePath()); if (this.getID3v1Tag() != null) { this.getID3v1Tag().createStructure(); } if (this.getID3v2Tag() != null) { this.getID3v2Tag().createStructure(); } tagFormatter.closeHeadingElement("file"); return tagFormatter.toString(); } private static void createXMLStructureFormatter() { tagFormatter = new XMLTagDisplayFormatter(); } private static void createPlainTextStructureFormatter() { tagFormatter = new PlainTextTagDisplayFormatter(); } public static AbstractTagDisplayFormatter getStructureFormatter() { return tagFormatter; } /** * Set the Tag * <p> * If the parameter tag is a v1tag then the v1 tag is set if v2tag then the v2tag. * * @param tag */ public void setTag(Tag tag) { this.tag = tag; if (tag instanceof ID3v1Tag) { setID3v1Tag((ID3v1Tag) tag); } else { setID3v2Tag((AbstractID3v2Tag) tag); } } /** * Create Default Tag * * @return */ @Override public Tag createDefaultTag() { if (TagOptionSingleton.getInstance().getID3V2Version() == ID3V2Version.ID3_V24) { return new ID3v24Tag(); } else if (TagOptionSingleton.getInstance().getID3V2Version() == ID3V2Version.ID3_V23) { return new ID3v23Tag(); } else if (TagOptionSingleton.getInstance().getID3V2Version() == ID3V2Version.ID3_V22) { return new ID3v22Tag(); } //Default in case not set somehow return new ID3v24Tag(); } /** * Overridden to only consider ID3v2 Tag * * @return */ @Override public Tag getTagOrCreateDefault() { Tag tag = getID3v2Tag(); if (tag == null) { return createDefaultTag(); } return tag; } /** * Get the ID3v2 tag and convert to preferred version or if the file doesn't have one at all * create a default tag of preferred version and set it. The file may already contain a ID3v1 tag but because * this is not terribly useful the v1tag is not considered for this problem. * * @return */ @Override public Tag getTagAndConvertOrCreateAndSetDefault() { Tag tag = getTagOrCreateDefault(); Tag convertedTag = convertID3Tag((AbstractID3v2Tag) tag, TagOptionSingleton.getInstance().getID3V2Version()); if (convertedTag != null) { setTag(convertedTag); } else { setTag(tag); } return getTag(); } }
/* * Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.tools.javac.util; /** * Access to the compiler's name table. STandard names are defined, * as well as methods to create new names. * * <p><b>This is NOT part of any supported API. * If you write code that depends on this, you do so at your own risk. * This code and its internal interfaces are subject to change or * deletion without notice.</b> */ public class Names { public static final Context.Key<Names> namesKey = new Context.Key<Names>(); public static Names instance(Context context) { Names instance = context.get(namesKey); if (instance == null) { instance = new Names(context); context.put(namesKey, instance); } return instance; } public final Name slash; public final Name hyphen; public final Name T; public final Name slashequals; public final Name deprecated; public final Name init; public final Name clinit; public final Name error; public final Name any; public final Name empty; public final Name one; public final Name period; public final Name comma; public final Name semicolon; public final Name asterisk; public final Name _this; public final Name _super; public final Name _default; public final Name _class; public final Name java_lang; public final Name java_lang_Object; public final Name java_lang_Class; public final Name java_lang_Cloneable; public final Name java_io_Serializable; public final Name serialVersionUID; public final Name java_lang_Enum; public final Name java_lang_invoke_MethodHandle; public final Name package_info; public final Name ConstantValue; public final Name LineNumberTable; public final Name LocalVariableTable; public final Name LocalVariableTypeTable; public final Name CharacterRangeTable; public final Name StackMap; public final Name StackMapTable; public final Name SourceID; public final Name CompilationID; public final Name Code; public final Name Exceptions; public final Name SourceFile; public final Name InnerClasses; public final Name Synthetic; public final Name Bridge; public final Name Deprecated; public final Name Enum; public final Name _name; public final Name Signature; public final Name Varargs; public final Name Annotation; public final Name RuntimeVisibleAnnotations; public final Name RuntimeInvisibleAnnotations; public final Name RuntimeVisibleTypeAnnotations; public final Name RuntimeInvisibleTypeAnnotations; public final Name RuntimeVisibleParameterAnnotations; public final Name RuntimeInvisibleParameterAnnotations; public final Name Value; public final Name EnclosingMethod; public final Name desiredAssertionStatus; public final Name append; public final Name family; public final Name forName; public final Name toString; public final Name length; public final Name valueOf; public final Name value; public final Name getMessage; public final Name getClass; public final Name TYPE; public final Name TYPE_USE; public final Name TYPE_PARAMETER; public final Name FIELD; public final Name METHOD; public final Name PARAMETER; public final Name CONSTRUCTOR; public final Name LOCAL_VARIABLE; public final Name ANNOTATION_TYPE; public final Name PACKAGE; public final Name SOURCE; public final Name CLASS; public final Name RUNTIME; public final Name Array; public final Name Method; public final Name Bound; public final Name clone; public final Name getComponentType; public final Name getClassLoader; public final Name initCause; public final Name values; public final Name iterator; public final Name hasNext; public final Name next; public final Name AnnotationDefault; public final Name ordinal; public final Name equals; public final Name hashCode; public final Name compareTo; public final Name getDeclaringClass; public final Name ex; public final Name finalize; public final Name java_lang_AutoCloseable; public final Name close; public final Name addSuppressed; public final Name.Table table; public Names(Context context) { Options options = Options.instance(context); table = createTable(options); slash = fromString("/"); hyphen = fromString("-"); T = fromString("T"); slashequals = fromString("/="); deprecated = fromString("deprecated"); init = fromString("<init>"); clinit = fromString("<clinit>"); error = fromString("<error>"); any = fromString("<any>"); empty = fromString(""); one = fromString("1"); period = fromString("."); comma = fromString(","); semicolon = fromString(";"); asterisk = fromString("*"); _this = fromString("this"); _super = fromString("super"); _default = fromString("default"); _class = fromString("class"); java_lang = fromString("java.lang"); java_lang_Object = fromString("java.lang.Object"); java_lang_Class = fromString("java.lang.Class"); java_lang_Cloneable = fromString("java.lang.Cloneable"); java_io_Serializable = fromString("java.io.Serializable"); java_lang_Enum = fromString("java.lang.Enum"); java_lang_invoke_MethodHandle = fromString("java.lang.invoke.MethodHandle"); package_info = fromString("package-info"); serialVersionUID = fromString("serialVersionUID"); ConstantValue = fromString("ConstantValue"); LineNumberTable = fromString("LineNumberTable"); LocalVariableTable = fromString("LocalVariableTable"); LocalVariableTypeTable = fromString("LocalVariableTypeTable"); CharacterRangeTable = fromString("CharacterRangeTable"); StackMap = fromString("StackMap"); StackMapTable = fromString("StackMapTable"); SourceID = fromString("SourceID"); CompilationID = fromString("CompilationID"); Code = fromString("Code"); Exceptions = fromString("Exceptions"); SourceFile = fromString("SourceFile"); InnerClasses = fromString("InnerClasses"); Synthetic = fromString("Synthetic"); Bridge = fromString("Bridge"); Deprecated = fromString("Deprecated"); Enum = fromString("Enum"); _name = fromString("name"); Signature = fromString("Signature"); Varargs = fromString("Varargs"); Annotation = fromString("Annotation"); RuntimeVisibleAnnotations = fromString("RuntimeVisibleAnnotations"); RuntimeInvisibleAnnotations = fromString("RuntimeInvisibleAnnotations"); RuntimeVisibleTypeAnnotations = fromString("RuntimeVisibleTypeAnnotations"); RuntimeInvisibleTypeAnnotations = fromString("RuntimeInvisibleTypeAnnotations"); RuntimeVisibleParameterAnnotations = fromString("RuntimeVisibleParameterAnnotations"); RuntimeInvisibleParameterAnnotations = fromString("RuntimeInvisibleParameterAnnotations"); Value = fromString("Value"); EnclosingMethod = fromString("EnclosingMethod"); desiredAssertionStatus = fromString("desiredAssertionStatus"); append = fromString("append"); family = fromString("family"); forName = fromString("forName"); toString = fromString("toString"); length = fromString("length"); valueOf = fromString("valueOf"); value = fromString("value"); getMessage = fromString("getMessage"); getClass = fromString("getClass"); TYPE = fromString("TYPE"); TYPE_USE = fromString("TYPE_USE"); TYPE_PARAMETER = fromString("TYPE_PARAMETER"); FIELD = fromString("FIELD"); METHOD = fromString("METHOD"); PARAMETER = fromString("PARAMETER"); CONSTRUCTOR = fromString("CONSTRUCTOR"); LOCAL_VARIABLE = fromString("LOCAL_VARIABLE"); ANNOTATION_TYPE = fromString("ANNOTATION_TYPE"); PACKAGE = fromString("PACKAGE"); SOURCE = fromString("SOURCE"); CLASS = fromString("CLASS"); RUNTIME = fromString("RUNTIME"); Array = fromString("Array"); Method = fromString("Method"); Bound = fromString("Bound"); clone = fromString("clone"); getComponentType = fromString("getComponentType"); getClassLoader = fromString("getClassLoader"); initCause = fromString("initCause"); values = fromString("values"); iterator = fromString("iterator"); hasNext = fromString("hasNext"); next = fromString("next"); AnnotationDefault = fromString("AnnotationDefault"); ordinal = fromString("ordinal"); equals = fromString("equals"); hashCode = fromString("hashCode"); compareTo = fromString("compareTo"); getDeclaringClass = fromString("getDeclaringClass"); ex = fromString("ex"); finalize = fromString("finalize"); java_lang_AutoCloseable = fromString("java.lang.AutoCloseable"); close = fromString("close"); addSuppressed = fromString("addSuppressed"); } protected Name.Table createTable(Options options) { boolean useUnsharedTable = options.isSet("useUnsharedTable"); if (useUnsharedTable) return new UnsharedNameTable(this); else return new SharedNameTable(this); } public void dispose() { table.dispose(); } public Name fromChars(char[] cs, int start, int len) { return table.fromChars(cs, start, len); } public Name fromString(String s) { return table.fromString(s); } public Name fromUtf(byte[] cs) { return table.fromUtf(cs); } public Name fromUtf(byte[] cs, int start, int len) { return table.fromUtf(cs, start, len); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.util; import com.intellij.ide.IdeBundle; import com.intellij.ide.commander.CommanderPanel; import com.intellij.ide.commander.ProjectListBuilder; import com.intellij.ide.structureView.StructureViewBuilder; import com.intellij.ide.structureView.StructureViewModel; import com.intellij.ide.structureView.StructureViewTreeElement; import com.intellij.ide.structureView.TreeBasedStructureViewBuilder; import com.intellij.ide.structureView.newStructureView.TreeModelWrapper; import com.intellij.ide.util.treeView.AbstractTreeNode; import com.intellij.ide.util.treeView.smartTree.*; import com.intellij.lang.LanguageStructureViewBuilder; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.fileEditor.ex.IdeDocumentHistory; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Ref; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.ex.IdeFocusTraversalPolicy; import com.intellij.pom.Navigatable; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.MinusculeMatcher; import com.intellij.psi.codeStyle.NameUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.ui.*; import com.intellij.ui.docking.DockManager; import com.intellij.ui.speedSearch.SpeedSearchSupply; import com.intellij.util.ArrayUtil; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; import java.util.List; public class FileStructureDialog extends DialogWrapper { private final Editor myEditor; private final Navigatable myNavigatable; private final Project myProject; private MyCommanderPanel myCommanderPanel; private final StructureViewModel myTreeModel; private final StructureViewModel myBaseTreeModel; private SmartTreeStructure myTreeStructure; private final TreeStructureActionsOwner myTreeActionsOwner; @NonNls private static final String ourPropertyKey = "FileStructure.narrowDown"; private boolean myShouldNarrowDown = false; public FileStructureDialog(@NotNull StructureViewModel structureViewModel, @NotNull Editor editor, @NotNull Project project, Navigatable navigatable, @NotNull final Disposable auxDisposable, final boolean applySortAndFilter) { super(project, true); myProject = project; myEditor = editor; myNavigatable = navigatable; myBaseTreeModel = structureViewModel; if (applySortAndFilter) { myTreeActionsOwner = new TreeStructureActionsOwner(myBaseTreeModel); myTreeModel = new TreeModelWrapper(structureViewModel, myTreeActionsOwner); } else { myTreeActionsOwner = null; myTreeModel = structureViewModel; } PsiFile psiFile = getPsiFile(project); final PsiElement psiElement = getCurrentElement(psiFile); //myDialog.setUndecorated(true); init(); if (psiElement != null) { if (structureViewModel.shouldEnterElement(psiElement)) { myCommanderPanel.getBuilder().enterElement(psiElement, PsiUtilCore.getVirtualFile(psiElement)); } else { myCommanderPanel.getBuilder().selectElement(psiElement, PsiUtilCore.getVirtualFile(psiElement)); } } Disposer.register(myDisposable, auxDisposable); } protected PsiFile getPsiFile(@NotNull Project project) { return PsiDocumentManager.getInstance(project).getPsiFile(myEditor.getDocument()); } @Override @Nullable protected Border createContentPaneBorder() { return null; } @Override public void dispose() { myCommanderPanel.dispose(); super.dispose(); } @Override protected String getDimensionServiceKey() { return DockManager.getInstance(myProject).getDimensionKeyForFocus("#com.intellij.ide.util.FileStructureDialog"); } @Override public JComponent getPreferredFocusedComponent() { return IdeFocusTraversalPolicy.getPreferredFocusedComponent(myCommanderPanel); } @Nullable protected PsiElement getCurrentElement(@Nullable final PsiFile psiFile) { if (psiFile == null) return null; PsiDocumentManager.getInstance(myProject).commitAllDocuments(); Object elementAtCursor = myTreeModel.getCurrentEditorElement(); if (elementAtCursor instanceof PsiElement) { return (PsiElement)elementAtCursor; } return null; } @Override protected JComponent createCenterPanel() { myCommanderPanel = new MyCommanderPanel(myProject); myTreeStructure = new MyStructureTreeStructure(); List<FileStructureFilter> fileStructureFilters = new ArrayList<FileStructureFilter>(); List<FileStructureNodeProvider> fileStructureNodeProviders = new ArrayList<FileStructureNodeProvider>(); if (myTreeActionsOwner != null) { for(Filter filter: myBaseTreeModel.getFilters()) { if (filter instanceof FileStructureFilter) { final FileStructureFilter fsFilter = (FileStructureFilter)filter; myTreeActionsOwner.setActionIncluded(fsFilter, true); fileStructureFilters.add(fsFilter); } } if (myBaseTreeModel instanceof ProvidingTreeModel) { for (NodeProvider provider : ((ProvidingTreeModel)myBaseTreeModel).getNodeProviders()) { if (provider instanceof FileStructureNodeProvider) { fileStructureNodeProviders.add((FileStructureNodeProvider)provider); } } } } PsiFile psiFile = getPsiFile(myProject); boolean showRoot = isShowRoot(psiFile); ProjectListBuilder projectListBuilder = new ProjectListBuilder(myProject, myCommanderPanel, myTreeStructure, null, showRoot) { @Override protected boolean shouldEnterSingleTopLevelElement(Object rootChild) { Object element = ((StructureViewTreeElement)((AbstractTreeNode)rootChild).getValue()).getValue(); return myBaseTreeModel.shouldEnterElement(element); } @Override protected boolean nodeIsAcceptableForElement(AbstractTreeNode node, Object element) { return Comparing.equal(((StructureViewTreeElement)node.getValue()).getValue(), element); } @Override protected void refreshSelection() { myCommanderPanel.scrollSelectionInView(); if (myShouldNarrowDown) { myCommanderPanel.updateSpeedSearch(); } } @Override protected List<AbstractTreeNode> getAllAcceptableNodes(final Object[] childElements, VirtualFile file) { ArrayList<AbstractTreeNode> result = new ArrayList<AbstractTreeNode>(); for (Object childElement : childElements) { result.add((AbstractTreeNode)childElement); } return result; } }; myCommanderPanel.setBuilder(projectListBuilder); myCommanderPanel.setTitlePanelVisible(false); new AnAction() { @Override public void actionPerformed(AnActionEvent e) { final boolean succeeded = myCommanderPanel.navigateSelectedElement(); if (succeeded) { unregisterCustomShortcutSet(myCommanderPanel); } } }.registerCustomShortcutSet(ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE).getShortcutSet(), myCommanderPanel); myCommanderPanel.setPreferredSize(JBUI.size(400, 500)); JPanel panel = new JPanel(new BorderLayout()); JPanel comboPanel = new JPanel(new GridLayout(0, 2, 0, 0)); addNarrowDownCheckbox(comboPanel); for(FileStructureFilter filter: fileStructureFilters) { addCheckbox(comboPanel, filter); } for (FileStructureNodeProvider provider : fileStructureNodeProviders) { addCheckbox(comboPanel, provider); } myCommanderPanel.setBorder(IdeBorderFactory.createBorder(SideBorder.TOP)); panel.add(comboPanel, BorderLayout.NORTH); panel.add(myCommanderPanel, BorderLayout.CENTER); //new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1, 1, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); return panel; } protected boolean isShowRoot(final PsiFile psiFile) { StructureViewBuilder viewBuilder = LanguageStructureViewBuilder.INSTANCE.getStructureViewBuilder(psiFile); return viewBuilder instanceof TreeBasedStructureViewBuilder && ((TreeBasedStructureViewBuilder)viewBuilder).isRootNodeShown(); } private void addNarrowDownCheckbox(final JPanel panel) { final JCheckBox checkBox = new JCheckBox(IdeBundle.message("checkbox.narrow.down.the.list.on.typing")); checkBox.setSelected(PropertiesComponent.getInstance().isTrueValue(ourPropertyKey)); checkBox.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { myShouldNarrowDown = checkBox.isSelected(); PropertiesComponent.getInstance().setValue(ourPropertyKey, Boolean.toString(myShouldNarrowDown)); ProjectListBuilder builder = (ProjectListBuilder)myCommanderPanel.getBuilder(); if (builder == null) { return; } builder.addUpdateRequest(); } }); checkBox.setFocusable(false); panel.add(checkBox); //,new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 5, 0, 5), 0, 0)); } private void addCheckbox(final JPanel panel, final TreeAction action) { String text = action instanceof FileStructureFilter ? ((FileStructureFilter)action).getCheckBoxText() : action instanceof FileStructureNodeProvider ? ((FileStructureNodeProvider)action).getCheckBoxText() : null; if (text == null) return; Shortcut[] shortcuts = FileStructurePopup.extractShortcutFor(action); final JCheckBox chkFilter = new JCheckBox(); chkFilter.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { ProjectListBuilder builder = (ProjectListBuilder)myCommanderPanel.getBuilder(); PsiElement currentParent = null; if (builder != null) { final AbstractTreeNode parentNode = builder.getParentNode(); final Object value = parentNode.getValue(); if (value instanceof StructureViewTreeElement) { final Object elementValue = ((StructureViewTreeElement)value).getValue(); if (elementValue instanceof PsiElement) { currentParent = (PsiElement) elementValue; } } } final boolean state = chkFilter.isSelected(); myTreeActionsOwner.setActionIncluded(action, action instanceof FileStructureFilter ? !state : state); myTreeStructure.rebuildTree(); if (builder != null) { if (currentParent != null) { boolean oldNarrowDown = myShouldNarrowDown; myShouldNarrowDown = false; try { builder.enterElement(currentParent, PsiUtilCore.getVirtualFile(currentParent)); } finally { myShouldNarrowDown = oldNarrowDown; } } builder.updateList(true); } if (SpeedSearchBase.hasActiveSpeedSearch(myCommanderPanel.getList())) { final SpeedSearchSupply supply = SpeedSearchSupply.getSupply(myCommanderPanel.getList()); if (supply != null && supply.isPopupActive()) supply.refreshSelection(); } } }); chkFilter.setFocusable(false); if (shortcuts.length > 0) { text += " (" + KeymapUtil.getShortcutText(shortcuts [0]) + ")"; new AnAction() { @Override public void actionPerformed(final AnActionEvent e) { chkFilter.doClick(); } }.registerCustomShortcutSet(new CustomShortcutSet(shortcuts), myCommanderPanel); } chkFilter.setText(text); panel.add(chkFilter); //,new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 5, 0, 5), 0, 0)); } @Override @Nullable protected JComponent createSouthPanel() { return null; } public CommanderPanel getPanel() { return myCommanderPanel; } private class MyCommanderPanel extends CommanderPanel implements DataProvider { @Override protected boolean shouldDrillDownOnEmptyElement(final AbstractTreeNode node) { return false; } public MyCommanderPanel(Project _project) { super(_project, false, true); myList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); myListSpeedSearch.addChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { ProjectListBuilder builder = (ProjectListBuilder)getBuilder(); if (builder == null) { return; } builder.addUpdateRequest(hasPrefixShortened(evt)); ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { int index = myList.getSelectedIndex(); if (index != -1 && index < myList.getModel().getSize()) { myList.clearSelection(); ListScrollingUtil.selectItem(myList, index); } else { ListScrollingUtil.ensureSelectionExists(myList); } } }); } }); myListSpeedSearch.setComparator(createSpeedSearchComparator()); } private boolean hasPrefixShortened(final PropertyChangeEvent evt) { return evt.getNewValue() != null && evt.getOldValue() != null && ((String)evt.getNewValue()).length() < ((String)evt.getOldValue()).length(); } @Override public boolean navigateSelectedElement() { final Ref<Boolean> succeeded = new Ref<Boolean>(); final CommandProcessor commandProcessor = CommandProcessor.getInstance(); commandProcessor.executeCommand(myProject, new Runnable() { @Override public void run() { succeeded.set(MyCommanderPanel.super.navigateSelectedElement()); IdeDocumentHistory.getInstance(myProject).includeCurrentCommandAsNavigation(); } }, "Navigate", null); if (succeeded.get()) { close(CANCEL_EXIT_CODE); } return succeeded.get(); } @Override public Object getData(String dataId) { Object selectedElement = myCommanderPanel.getSelectedValue(); if (selectedElement instanceof TreeElement) selectedElement = ((StructureViewTreeElement)selectedElement).getValue(); if (CommonDataKeys.NAVIGATABLE.is(dataId)) { return selectedElement instanceof Navigatable ? selectedElement : myNavigatable; } if (OpenFileDescriptor.NAVIGATE_IN_EDITOR.is(dataId)) return myEditor; return getDataImpl(dataId); } public String getEnteredPrefix() { return myListSpeedSearch.getEnteredPrefix(); } public void updateSpeedSearch() { myListSpeedSearch.refreshSelection(); } public void scrollSelectionInView() { int selectedIndex = myList.getSelectedIndex(); if (selectedIndex >= 0) { ListScrollingUtil.ensureIndexIsVisible(myList, selectedIndex, 0); } } } private class MyStructureTreeStructure extends SmartTreeStructure { public MyStructureTreeStructure() { super(FileStructureDialog.this.myProject, myTreeModel); } @Override public Object[] getChildElements(Object element) { Object[] childElements = super.getChildElements(element); if (!myShouldNarrowDown) { return childElements; } String enteredPrefix = myCommanderPanel.getEnteredPrefix(); if (enteredPrefix == null) { return childElements; } ArrayList<Object> filteredElements = new ArrayList<Object>(childElements.length); SpeedSearchComparator speedSearchComparator = createSpeedSearchComparator(); for (Object child : childElements) { if (child instanceof AbstractTreeNode) { Object value = ((AbstractTreeNode)child).getValue(); if (value instanceof TreeElement) { String name = ((TreeElement)value).getPresentation().getPresentableText(); if (name == null) { continue; } if (speedSearchComparator.matchingFragments(enteredPrefix, name) == null) { continue; } } } filteredElements.add(child); } return ArrayUtil.toObjectArray(filteredElements); } @Override public void rebuildTree() { getChildElements(getRootElement()); // for some reason necessary to rebuild tree correctly super.rebuildTree(); } } private static SpeedSearchComparator createSpeedSearchComparator() { return new SpeedSearchComparator(false) { @NotNull @Override protected MinusculeMatcher createMatcher(@NotNull String pattern) { return createFileStructureMatcher(pattern); } }; } @NotNull public static MinusculeMatcher createFileStructureMatcher(@NotNull String pattern) { return new MinusculeMatcher(pattern, NameUtil.MatchingCaseSensitivity.NONE, " ()"); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.08.09 at 09:22:32 PM IST // package org.w3.math.mathml; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyAttribute; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlIDREF; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import javax.xml.namespace.QName; /** * <p>Java class for mfenced.type complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="mfenced.type"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;group ref="{http://www.w3.org/1998/Math/MathML}Presentation-expr.class" maxOccurs="unbounded" minOccurs="0"/> * &lt;attGroup ref="{http://www.w3.org/1998/Math/MathML}mfenced.attlist"/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "mfenced.type", propOrder = { "misAndMosAndMns" }) @XmlRootElement(name = "mfenced") public class Mfenced { @XmlElementRefs({ @XmlElementRef(name = "ceiling", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "image", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "domainofapplication", namespace = "http://www.w3.org/1998/Math/MathML", type = Domainofapplication.class, required = false), @XmlElementRef(name = "integers", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "ident", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "cosh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "exists", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "vectorproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = Vectorproduct.class, required = false), @XmlElementRef(name = "root", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "leq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "mode", namespace = "http://www.w3.org/1998/Math/MathML", type = Mode.class, required = false), @XmlElementRef(name = "variance", namespace = "http://www.w3.org/1998/Math/MathML", type = Variance.class, required = false), @XmlElementRef(name = "msubsup", namespace = "http://www.w3.org/1998/Math/MathML", type = Msubsup.class, required = false), @XmlElementRef(name = "plus", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "conjugate", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "mfenced", namespace = "http://www.w3.org/1998/Math/MathML", type = Mfenced.class, required = false), @XmlElementRef(name = "factorof", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "reals", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "arccoth", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "card", namespace = "http://www.w3.org/1998/Math/MathML", type = Card.class, required = false), @XmlElementRef(name = "mean", namespace = "http://www.w3.org/1998/Math/MathML", type = Mean.class, required = false), @XmlElementRef(name = "list", namespace = "http://www.w3.org/1998/Math/MathML", type = org.w3.math.mathml.List.class, required = false), @XmlElementRef(name = "arg", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "lt", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "rem", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "in", namespace = "http://www.w3.org/1998/Math/MathML", type = In.class, required = false), @XmlElementRef(name = "partialdiff", namespace = "http://www.w3.org/1998/Math/MathML", type = Partialdiff.class, required = false), @XmlElementRef(name = "sinh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "equivalent", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "imaginaryi", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "logbase", namespace = "http://www.w3.org/1998/Math/MathML", type = Logbase.class, required = false), @XmlElementRef(name = "power", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "mrow", namespace = "http://www.w3.org/1998/Math/MathML", type = Mrow.class, required = false), @XmlElementRef(name = "uplimit", namespace = "http://www.w3.org/1998/Math/MathML", type = Uplimit.class, required = false), @XmlElementRef(name = "lowlimit", namespace = "http://www.w3.org/1998/Math/MathML", type = Lowlimit.class, required = false), @XmlElementRef(name = "times", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "outerproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = Outerproduct.class, required = false), @XmlElementRef(name = "determinant", namespace = "http://www.w3.org/1998/Math/MathML", type = Determinant.class, required = false), @XmlElementRef(name = "cos", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "cn", namespace = "http://www.w3.org/1998/Math/MathML", type = Cn.class, required = false), @XmlElementRef(name = "csymbol", namespace = "http://www.w3.org/1998/Math/MathML", type = Csymbol.class, required = false), @XmlElementRef(name = "apply", namespace = "http://www.w3.org/1998/Math/MathML", type = Apply.class, required = false), @XmlElementRef(name = "notanumber", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "csch", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "mstyle", namespace = "http://www.w3.org/1998/Math/MathML", type = Mstyle.class, required = false), @XmlElementRef(name = "intersect", namespace = "http://www.w3.org/1998/Math/MathML", type = Intersect.class, required = false), @XmlElementRef(name = "approx", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "union", namespace = "http://www.w3.org/1998/Math/MathML", type = Union.class, required = false), @XmlElementRef(name = "munder", namespace = "http://www.w3.org/1998/Math/MathML", type = Munder.class, required = false), @XmlElementRef(name = "msub", namespace = "http://www.w3.org/1998/Math/MathML", type = Msub.class, required = false), @XmlElementRef(name = "mspace", namespace = "http://www.w3.org/1998/Math/MathML", type = Mspace.class, required = false), @XmlElementRef(name = "notsubset", namespace = "http://www.w3.org/1998/Math/MathML", type = Notsubset.class, required = false), @XmlElementRef(name = "set", namespace = "http://www.w3.org/1998/Math/MathML", type = Set.class, required = false), @XmlElementRef(name = "gcd", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "emptyset", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "eq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "rationals", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "infinity", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "arccot", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "lcm", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "and", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "abs", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "inverse", namespace = "http://www.w3.org/1998/Math/MathML", type = Inverse.class, required = false), @XmlElementRef(name = "sec", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "cartesianproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = Cartesianproduct.class, required = false), @XmlElementRef(name = "gt", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "mover", namespace = "http://www.w3.org/1998/Math/MathML", type = Mover.class, required = false), @XmlElementRef(name = "mphantom", namespace = "http://www.w3.org/1998/Math/MathML", type = Mphantom.class, required = false), @XmlElementRef(name = "codomain", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "momentabout", namespace = "http://www.w3.org/1998/Math/MathML", type = Momentabout.class, required = false), @XmlElementRef(name = "sech", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "imaginary", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "naturalnumbers", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "arccsc", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "mn", namespace = "http://www.w3.org/1998/Math/MathML", type = Mn.class, required = false), @XmlElementRef(name = "maligngroup", namespace = "http://www.w3.org/1998/Math/MathML", type = Maligngroup.class, required = false), @XmlElementRef(name = "max", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "floor", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "tendsto", namespace = "http://www.w3.org/1998/Math/MathML", type = Tendsto.class, required = false), @XmlElementRef(name = "diff", namespace = "http://www.w3.org/1998/Math/MathML", type = Diff.class, required = false), @XmlElementRef(name = "curl", namespace = "http://www.w3.org/1998/Math/MathML", type = Curl.class, required = false), @XmlElementRef(name = "sdev", namespace = "http://www.w3.org/1998/Math/MathML", type = Sdev.class, required = false), @XmlElementRef(name = "ln", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "arccsch", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "maction", namespace = "http://www.w3.org/1998/Math/MathML", type = Maction.class, required = false), @XmlElementRef(name = "piecewise", namespace = "http://www.w3.org/1998/Math/MathML", type = Piecewise.class, required = false), @XmlElementRef(name = "exp", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "arccos", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "arcsech", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "msup", namespace = "http://www.w3.org/1998/Math/MathML", type = Msup.class, required = false), @XmlElementRef(name = "mi", namespace = "http://www.w3.org/1998/Math/MathML", type = Mi.class, required = false), @XmlElementRef(name = "int", namespace = "http://www.w3.org/1998/Math/MathML", type = Int.class, required = false), @XmlElementRef(name = "implies", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "factorial", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "min", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "divergence", namespace = "http://www.w3.org/1998/Math/MathML", type = Divergence.class, required = false), @XmlElementRef(name = "product", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "arctan", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "geq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "selector", namespace = "http://www.w3.org/1998/Math/MathML", type = Selector.class, required = false), @XmlElementRef(name = "domain", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "divide", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "ms", namespace = "http://www.w3.org/1998/Math/MathML", type = Ms.class, required = false), @XmlElementRef(name = "forall", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "not", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "setdiff", namespace = "http://www.w3.org/1998/Math/MathML", type = Setdiff.class, required = false), @XmlElementRef(name = "neq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "scalarproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = Scalarproduct.class, required = false), @XmlElementRef(name = "mmultiscripts", namespace = "http://www.w3.org/1998/Math/MathML", type = Mmultiscripts.class, required = false), @XmlElementRef(name = "notin", namespace = "http://www.w3.org/1998/Math/MathML", type = Notin.class, required = false), @XmlElementRef(name = "laplacian", namespace = "http://www.w3.org/1998/Math/MathML", type = Laplacian.class, required = false), @XmlElementRef(name = "arcsin", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "msqrt", namespace = "http://www.w3.org/1998/Math/MathML", type = Msqrt.class, required = false), @XmlElementRef(name = "coth", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "subset", namespace = "http://www.w3.org/1998/Math/MathML", type = Subset.class, required = false), @XmlElementRef(name = "complexes", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "primes", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "mpadded", namespace = "http://www.w3.org/1998/Math/MathML", type = Mpadded.class, required = false), @XmlElementRef(name = "munderover", namespace = "http://www.w3.org/1998/Math/MathML", type = Munderover.class, required = false), @XmlElementRef(name = "moment", namespace = "http://www.w3.org/1998/Math/MathML", type = Moment.class, required = false), @XmlElementRef(name = "sin", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "tan", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "mtable", namespace = "http://www.w3.org/1998/Math/MathML", type = Mtable.class, required = false), @XmlElementRef(name = "condition", namespace = "http://www.w3.org/1998/Math/MathML", type = Condition.class, required = false), @XmlElementRef(name = "pi", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "vector", namespace = "http://www.w3.org/1998/Math/MathML", type = Vector.class, required = false), @XmlElementRef(name = "tanh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "real", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "exponentiale", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "log", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "matrix", namespace = "http://www.w3.org/1998/Math/MathML", type = Matrix.class, required = false), @XmlElementRef(name = "minus", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "degree", namespace = "http://www.w3.org/1998/Math/MathML", type = Degree.class, required = false), @XmlElementRef(name = "bvar", namespace = "http://www.w3.org/1998/Math/MathML", type = Bvar.class, required = false), @XmlElementRef(name = "csc", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "mfrac", namespace = "http://www.w3.org/1998/Math/MathML", type = Mfrac.class, required = false), @XmlElementRef(name = "notprsubset", namespace = "http://www.w3.org/1998/Math/MathML", type = Notprsubset.class, required = false), @XmlElementRef(name = "semantics", namespace = "http://www.w3.org/1998/Math/MathML", type = Semantics.class, required = false), @XmlElementRef(name = "mtext", namespace = "http://www.w3.org/1998/Math/MathML", type = Mtext.class, required = false), @XmlElementRef(name = "sum", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "or", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "eulergamma", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "limit", namespace = "http://www.w3.org/1998/Math/MathML", type = Limit.class, required = false), @XmlElementRef(name = "mo", namespace = "http://www.w3.org/1998/Math/MathML", type = Mo.class, required = false), @XmlElementRef(name = "interval", namespace = "http://www.w3.org/1998/Math/MathML", type = Interval.class, required = false), @XmlElementRef(name = "false", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "menclose", namespace = "http://www.w3.org/1998/Math/MathML", type = Menclose.class, required = false), @XmlElementRef(name = "prsubset", namespace = "http://www.w3.org/1998/Math/MathML", type = Prsubset.class, required = false), @XmlElementRef(name = "arcsinh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "declare", namespace = "http://www.w3.org/1998/Math/MathML", type = Declare.class, required = false), @XmlElementRef(name = "mroot", namespace = "http://www.w3.org/1998/Math/MathML", type = Mroot.class, required = false), @XmlElementRef(name = "arccosh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "ci", namespace = "http://www.w3.org/1998/Math/MathML", type = Ci.class, required = false), @XmlElementRef(name = "malignmark", namespace = "http://www.w3.org/1998/Math/MathML", type = Malignmark.class, required = false), @XmlElementRef(name = "lambda", namespace = "http://www.w3.org/1998/Math/MathML", type = Lambda.class, required = false), @XmlElementRef(name = "arctanh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "true", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "grad", namespace = "http://www.w3.org/1998/Math/MathML", type = Grad.class, required = false), @XmlElementRef(name = "xor", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "median", namespace = "http://www.w3.org/1998/Math/MathML", type = Median.class, required = false), @XmlElementRef(name = "transpose", namespace = "http://www.w3.org/1998/Math/MathML", type = Transpose.class, required = false), @XmlElementRef(name = "arcsec", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "cot", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "compose", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false), @XmlElementRef(name = "merror", namespace = "http://www.w3.org/1998/Math/MathML", type = Merror.class, required = false), @XmlElementRef(name = "quotient", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class, required = false) }) protected java.util.List<Object> misAndMosAndMns; @XmlAttribute(name = "open") protected String open; @XmlAttribute(name = "close") protected String close; @XmlAttribute(name = "separators") protected String separators; @XmlAttribute(name = "class") @XmlSchemaType(name = "NMTOKENS") protected java.util.List<String> clazzs; @XmlAttribute(name = "style") protected String style; @XmlAttribute(name = "xref") @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object xref; @XmlAttribute(name = "id") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "href", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anyURI") protected String href; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<QName, String>(); /** * Gets the value of the misAndMosAndMns property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the misAndMosAndMns property. * * <p> * For example, to add a new item, do as follows: * <pre> * getMisAndMosAndMns().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link Domainofapplication } * {@link JAXBElement }{@code <}{@link FunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link FunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link LogicType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Vectorproduct } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link RelationsType }{@code >} * {@link Variance } * {@link Mode } * {@link Msubsup } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link Mfenced } * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link JAXBElement }{@code <}{@link RelationsType }{@code >} * {@link Card } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link org.w3.math.mathml.List } * {@link Mean } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link RelationsType }{@code >} * {@link In } * {@link Partialdiff } * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link JAXBElement }{@code <}{@link RelationsType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Mrow } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link Logbase } * {@link Uplimit } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link Lowlimit } * {@link Determinant } * {@link Outerproduct } * {@link Cn } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Csymbol } * {@link Apply } * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link Mstyle } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Intersect } * {@link JAXBElement }{@code <}{@link RelationsType }{@code >} * {@link Union } * {@link Munder } * {@link Msub } * {@link Notsubset } * {@link Mspace } * {@link Set } * {@link JAXBElement }{@code <}{@link RelationsType }{@code >} * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link Cartesianproduct } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Inverse } * {@link Mover } * {@link JAXBElement }{@code <}{@link RelationsType }{@code >} * {@link Mphantom } * {@link JAXBElement }{@code <}{@link FunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Momentabout } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link Mn } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Maligngroup } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link Tendsto } * {@link Diff } * {@link Curl } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Sdev } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Maction } * {@link Piecewise } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Msup } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Mi } * {@link Int } * {@link JAXBElement }{@code <}{@link LogicType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link Divergence } * {@link JAXBElement }{@code <}{@link RelationsType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Selector } * {@link JAXBElement }{@code <}{@link FunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link LogicType }{@code >} * {@link Ms } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link RelationsType }{@code >} * {@link Setdiff } * {@link JAXBElement }{@code <}{@link LogicType }{@code >} * {@link Notin } * {@link Mmultiscripts } * {@link Scalarproduct } * {@link Laplacian } * {@link Msqrt } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Subset } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link Mpadded } * {@link Munderover } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Moment } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Mtable } * {@link Condition } * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link Vector } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Matrix } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link Degree } * {@link Bvar } * {@link Mfrac } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Notprsubset } * {@link Semantics } * {@link Mtext } * {@link JAXBElement }{@code <}{@link LogicType }{@code >} * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link Mo } * {@link Limit } * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link Interval } * {@link Prsubset } * {@link Menclose } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Declare } * {@link Mroot } * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Malignmark } * {@link Ci } * {@link JAXBElement }{@code <}{@link ConstantType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Lambda } * {@link Grad } * {@link JAXBElement }{@code <}{@link LogicType }{@code >} * {@link Transpose } * {@link Median } * {@link JAXBElement }{@code <}{@link FunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >} * {@link Merror } * {@link JAXBElement }{@code <}{@link ArithType }{@code >} * * */ public java.util.List<Object> getMisAndMosAndMns() { if (misAndMosAndMns == null) { misAndMosAndMns = new ArrayList<Object>(); } return this.misAndMosAndMns; } /** * Gets the value of the open property. * * @return * possible object is * {@link String } * */ public String getOpen() { if (open == null) { return "("; } else { return open; } } /** * Sets the value of the open property. * * @param value * allowed object is * {@link String } * */ public void setOpen(String value) { this.open = value; } /** * Gets the value of the close property. * * @return * possible object is * {@link String } * */ public String getClose() { if (close == null) { return ")"; } else { return close; } } /** * Sets the value of the close property. * * @param value * allowed object is * {@link String } * */ public void setClose(String value) { this.close = value; } /** * Gets the value of the separators property. * * @return * possible object is * {@link String } * */ public String getSeparators() { if (separators == null) { return ","; } else { return separators; } } /** * Sets the value of the separators property. * * @param value * allowed object is * {@link String } * */ public void setSeparators(String value) { this.separators = value; } /** * Gets the value of the clazzs property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the clazzs property. * * <p> * For example, to add a new item, do as follows: * <pre> * getClazzs().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public java.util.List<String> getClazzs() { if (clazzs == null) { clazzs = new ArrayList<String>(); } return this.clazzs; } /** * Gets the value of the style property. * * @return * possible object is * {@link String } * */ public String getStyle() { return style; } /** * Sets the value of the style property. * * @param value * allowed object is * {@link String } * */ public void setStyle(String value) { this.style = value; } /** * Gets the value of the xref property. * * @return * possible object is * {@link Object } * */ public Object getXref() { return xref; } /** * Sets the value of the xref property. * * @param value * allowed object is * {@link Object } * */ public void setXref(Object value) { this.xref = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and * the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute * by updating the map directly. Because of this design, there's no setter. * * * @return * always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } }
/* * Copyright (c) 2015, Alachisoft. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alachisoft.tayzgrid.caching.cacheloader; import com.alachisoft.tayzgrid.runtime.exceptions.OperationFailedException; import com.alachisoft.tayzgrid.caching.LockAccessType; import com.alachisoft.tayzgrid.caching.Cache; import com.alachisoft.tayzgrid.caching.OperationContext; import com.alachisoft.tayzgrid.caching.OperationContextFieldName; import com.alachisoft.tayzgrid.caching.OperationContextOperationType; import com.alachisoft.tayzgrid.caching.UserBinaryObject; import com.alachisoft.tayzgrid.common.IDisposable; import com.alachisoft.tayzgrid.common.logger.ILogger; import com.alachisoft.tayzgrid.common.util.LanguageContext; import com.alachisoft.tayzgrid.runtime.cacheloader.CacheLoader; import com.alachisoft.tayzgrid.runtime.exceptions.ConfigurationException; import com.alachisoft.tayzgrid.caching.evictionpolicies.EvictionHint; import com.alachisoft.tayzgrid.caching.evictionpolicies.PriorityEvictionHint; import com.alachisoft.tayzgrid.common.BitSet; import com.alachisoft.tayzgrid.common.util.AuthenticateFeature; import com.alachisoft.tayzgrid.runtime.caching.ProviderCacheItem; import com.alachisoft.tayzgrid.caching.autoexpiration.ExpirationHint; import com.alachisoft.tayzgrid.caching.datasourceproviders.ClassPaths; import com.alachisoft.tayzgrid.common.DirectoryUtil; import com.alachisoft.tayzgrid.common.caching.expiration.ExpirationType; import com.alachisoft.tayzgrid.common.enums.EventType; import com.alachisoft.tayzgrid.common.logger.EventLogger; import com.alachisoft.tayzgrid.runtime.cacheloader.LoaderState; import com.alachisoft.tayzgrid.runtime.util.TimeSpan; import com.alachisoft.tayzgrid.serialization.standard.CompactBinaryFormatter; import com.alachisoft.tayzgrid.serialization.util.SerializationBitSet; import com.alachisoft.tayzgrid.serialization.util.SerializationUtil; import com.alachisoft.tayzgrid.serialization.util.TypeInfoMap; import java.io.File; import java.io.FileInputStream; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.jar.JarEntry; import java.util.jar.JarInputStream; public class CacheStartupLoader implements IDisposable { private LoadCacheTask _task; private CacheLoader _cacheLoader; private Cache _cache; private int _noOfRetries; private int _retryInterval; private java.util.Map _properties; private boolean _loadCache = false; private ILogger cacheLog; private boolean _enabled; private LanguageContext _languageContext = LanguageContext.NONE; private static final String clInterface = "com.alachisoft.tayzgrid.runtime.cacheloader.CacheLoader"; private JarFileLoader _loader; private boolean _isTaskCompleted = false; private boolean _isloaderTaskIntruppted = false; public final boolean getIsCacheLoaderTaskCompleted() { return _isTaskCompleted; } public final void setIsCacheLoaderTaskCompleted(boolean value) { _isTaskCompleted = value; } public final boolean getIsCacheLoaderTaskIntruppted() { return _isloaderTaskIntruppted; } public final void setIsCacheLoaderTaskIntruppted(boolean value) { _isloaderTaskIntruppted = value; } public CacheStartupLoader(java.util.Map properities, Cache cache, ILogger cacheLog) throws Exception { if (properities.containsKey("retries")) { _noOfRetries = Integer.parseInt(properities.get("retries").toString()); } else { _noOfRetries = 0; } if (properities.containsKey("retry-interval")) { _retryInterval = Integer.parseInt(properities.get("retry-interval").toString()); } else { _retryInterval = 0; } if (properities.containsKey("enabled")) { _enabled = Boolean.parseBoolean(properities.get("enabled").toString()); } _cache = cache; this.cacheLog = cacheLog; _properties = properities; try { if (_enabled) { Initialize(properities); } } catch (Exception ex) { EventLogger.LogEvent("CacheLoader.Initialize. Error:" + ex.toString(), EventType.WARNING); cacheLog.Error("CacheStartupLoader.Initialize", ex.toString()); throw ex; } } public final boolean getIsCacheloaderEnabled() { return _enabled; } public final boolean getExecuteCacheLoader() { return _loadCache; } public final void setExecuteCacheLoader(boolean value) { _loadCache = value; } public final LoadCacheTask getTask() { return _task; } public final void setTask(LoadCacheTask value) { _task = value; } public final int getNoOfRetries() { return _noOfRetries; } public final void setNoOfRetries(int value) { _noOfRetries = value; } public final int getRetryInterval() { return _retryInterval * 1000; } public final void setRetryInterval(int value) { _retryInterval = value; } public final java.util.Map getProperties() { return _properties; } public final void setProperties(java.util.Map value) { _properties = value; } public final void Initialize() throws ConfigurationException { Initialize(getProperties()); } /** * Method that allows the object to initialize itself. Passes the property * map down the object hierarchy so that other objects may configure * themselves as well.. * * @param properties properties collection for this cache. */ private void Initialize(java.util.Map properties) throws ConfigurationException { if (properties == null) { throw new IllegalArgumentException("properties"); } try { if (!properties.containsKey("assembly")) { throw new ConfigurationException("Missing assembly name"); } if (!properties.containsKey("classname")) { throw new ConfigurationException("Missing class name"); } String assembly = String.valueOf(properties.get("assembly")); String classname = String.valueOf(properties.get("classname")); String assemblyFullName = String.valueOf(properties.get("full-name")); // This is added to load the .exe and .dll providers // to keep previous provider running this bad chunk of code is written // later on you can directly provide the provider name read from config. String extension = ".dll"; if (properties.containsKey("full-name")) { extension = assemblyFullName.split("\\.")[assemblyFullName.split("\\.").length - 1]; } java.util.HashMap startupparams = (java.util.HashMap) ((properties.get("parameters") instanceof java.util.Map) ? properties.get("parameters") : null); if (startupparams == null) { startupparams = new java.util.HashMap(); } try { if (extension.endsWith(".dll") || extension.endsWith(".exe")) { throw new UnsupportedOperationException(".exe or .dll not allowed as of yet"); } else if (extension.endsWith("jar") || extension.endsWith("class")) //else if (extension.endsWith("\\."+"jar") || extension.endsWith("\\."+"class")) { AuthenticateFeature.Authenticate(LanguageContext.JAVA); _languageContext = LanguageContext.JAVA; File path = DirectoryUtil.createDeployAssemblyFolder(_cache.getName()); if (_loader == null) { _loader = ClassPaths.addPath(path, this.cacheLog); } _cacheLoader = (CacheLoader) _loader.loadClass(classname).newInstance(); _cacheLoader.init(startupparams); SerializationUtil.registerTypeInfoMap(_cache.getName(), _cache.GetTypeInfoMap()); } } catch (ClassCastException e) { throw new ConfigurationException("The class specified does not implement ICacheLoader"); } catch (Exception e) { throw new ConfigurationException(e.getMessage(), e); } } catch (ConfigurationException e2) { throw e2; } catch (Exception e) { throw new ConfigurationException("Configuration Error: " + e.toString(), e); } } public final void LoadCache() { boolean userReturn = true; int retryCount = 0; byte[] serializedObject = null; java.util.LinkedHashMap data = new java.util.LinkedHashMap(); LoaderState index = new LoaderState(); do { userReturn = false; data.clear(); try { userReturn = _cacheLoader.loadNext(data, index); } catch (Exception e) { if (cacheLog != null && cacheLog.getIsErrorEnabled()) { EventLogger.LogEvent("ICacheLoader.LoadNext. Error:" + e.toString(), EventType.WARNING); cacheLog.Error("CacheStartupLoader.Load()", e.toString()); continue; } } try { if (data != null) { Map.Entry keyValue; Iterator de = data.entrySet().iterator(); while (de.hasNext()) { keyValue = (Map.Entry) de.next(); retryCount = 0; if (!(keyValue.getValue() instanceof ProviderCacheItem)) { if (cacheLog != null && cacheLog.getIsErrorEnabled()) { cacheLog.Error("CacheStartupLoader.Load()", "Invalid Key/Value type specified"); continue; } } Object key = keyValue.getKey(); ProviderCacheItem item = (ProviderCacheItem) keyValue.getValue(); if (item == null) { continue; } //expiration hints... if(_cache.getContext().ExpirationContract!=null){ java.util.HashMap resolutionMap = _cache.getContext().ExpirationContract.resolveClientExpirations(item.getAbsoluteExpiration(), item.getSlidingExpiration()); item.setAbsoluteExpiration((Date)resolutionMap.get(ExpirationType.FixedExpiration)); item.setSlidingExpiration((TimeSpan)resolutionMap.get(ExpirationType.SlidingExpiration)); } int expType = CacheLoaderUtil.EvaluateExpirationParameters(item.getAbsoluteExpiration(), item.getSlidingExpiration()); ExpirationHint expiration = com.alachisoft.tayzgrid.caching.autoexpiration.DependencyHelper.GetExpirationHint( item.getAbsoluteExpiration(), item.getSlidingExpiration()); if (expiration != null) { if (item.isResyncItemOnExpiration()) { expiration.SetBit(ExpirationHint.NEEDS_RESYNC); } } String resyncProviderName = item.getResyncProviderName() == null ? null : item.getResyncProviderName().toLowerCase(); //query and tag info... java.util.HashMap queryInfo = new java.util.HashMap(); TypeInfoMap typeMap = _cache.GetTypeInfoMap(); BitSet flag = new BitSet(); switch (_languageContext) { case DOTNET: if (typeMap != null) { queryInfo.put("query-info", CacheLoaderUtil.GetQueryInfo(item.getValue(), typeMap)); } try { if (item.getTags() != null) { queryInfo.put("tag-info", CacheLoaderUtil.GetTagInfo(item.getValue(), item.getTags())); } if (item.getNamedTags() != null) { java.util.HashMap namedTagInfo = CacheLoaderUtil.GetNamedTagsInfo(item.getValue(), item.getNamedTags(), typeMap); if (namedTagInfo != null) { queryInfo.put("named-tag-info", namedTagInfo); } } } catch (IllegalArgumentException exception) { cacheLog.Error("CacheStartupLoader.Load()", exception.getMessage()); continue; } break; case JAVA: ProviderCacheItem jItem = (ProviderCacheItem) item; Object javaQueryInfoMap = jItem.getValue(); if (javaQueryInfoMap != null) { com.alachisoft.tayzgrid.serialization.util.SerializationBitSet tempFlag = new SerializationBitSet(flag.getData()); serializedObject = (byte[]) SerializationUtil.safeSerialize(javaQueryInfoMap, _cache.getName(), tempFlag); flag.setData(tempFlag.getData()); } if (typeMap != null) { queryInfo.put("query-info", CacheLoaderUtil.GetQueryInfo(item.getValue(), typeMap)); } try { if (item.getTags() != null) { queryInfo.put("tag-info", CacheLoaderUtil.GetTagInfo(item.getValue(), item.getTags())); } if (item.getNamedTags() != null) { java.util.HashMap namedTagInfo = CacheLoaderUtil.GetNamedTagsInfo(item.getValue(), item.getNamedTags(), typeMap); if (namedTagInfo != null) { queryInfo.put("named-tag-info", namedTagInfo); } } } catch (IllegalArgumentException exception) { cacheLog.Error("CacheStartupLoader.Load()", exception.getMessage()); continue; } break; } ////verify group/subgroup and tags //eviction hint... EvictionHint eviction = new PriorityEvictionHint(item.getItemPriority()); //object serialization... if (_languageContext == LanguageContext.DOTNET) { Object tempVar = Safeserialize((Object) item.getValue()); serializedObject = (byte[]) ((tempVar instanceof byte[]) ? tempVar : null); } //convert to user binary object UserBinaryObject ubObject = null; if (serializedObject != null) { ubObject = UserBinaryObject.CreateUserBinaryObject(serializedObject); } while (retryCount <= getNoOfRetries()) { if (_cache.getIsRunning()) { try { _cache.Insert(key, ubObject, expiration, eviction, item.getGroup(), item.getSubGroup(), queryInfo, flag, null, 0, LockAccessType.IGNORE_LOCK, null, resyncProviderName, new OperationContext(OperationContextFieldName.OperationType, OperationContextOperationType.CacheOperation)); break; } catch (Exception e) { retryCount++; Thread.sleep(getRetryInterval()); if (retryCount > getNoOfRetries()) { if (e instanceof OperationFailedException) { if (!((OperationFailedException) e).getIsTracable()) { if (cacheLog != null && cacheLog.getIsErrorEnabled()) { cacheLog.Error("CacheStartupLoader.Load()", e.toString()); break; } } } else { if (cacheLog != null && cacheLog.getIsErrorEnabled()) { cacheLog.Error("CacheStartupLoader.Load()", e.toString()); break; } } } } } else { return; } } } } } catch (Exception e) { if (cacheLog != null && cacheLog.getIsErrorEnabled()) { EventLogger.LogEvent("CacheStartupLoader.Load(): " + e.toString(), EventType.WARNING); cacheLog.Error("CacheStartupLoader.Load()", e.toString()); continue; } } } while (userReturn); _isTaskCompleted = true; _isloaderTaskIntruppted = false; getTask().dispose(); } private String[] loadClasses(String path, String interfaceName) throws Exception { List list = new ArrayList(); ArrayList jarClasses = new ArrayList(); if (path.endsWith(".jar")) { JarInputStream jarFile = new JarInputStream(new FileInputStream(path)); JarEntry jarEntry; while (true) { jarEntry = jarFile.getNextJarEntry(); if (jarEntry == null) { break; } if (jarEntry.isDirectory()) { continue; } if ((jarEntry.getName().endsWith(".class"))) { jarClasses.add(jarEntry.getName().replaceAll("/", "\\.")); } } if (jarClasses != null && jarClasses.size() <= 0) { return null; } try { URL urls[] = {}; JarFileLoader cl = new JarFileLoader(urls); cl.addFile(path); Class interFace = Class.forName(interfaceName); for (int i = 0; i < jarClasses.size(); i++) { String[] tempClass = jarClasses.get(i).toString().split(".class"); Class cls = cl.loadClass(tempClass[0]); boolean match = !cls.isInterface() && !cls.isEnum() && interFace.isAssignableFrom(cls); if (match) { list.add(tempClass[0]); } } return (String[]) list.toArray(new String[0]); } catch (Exception ex) { throw ex; } } else if (path.endsWith(".class")) { try { File tempFile = new File(path); String pth = path.replaceAll(tempFile.getName(), ""); File file = new File(pth); URL url = file.toURL(); URL[] urls = new URL[]{ url }; Class interFace = Class.forName(interfaceName); String[] tempClass = (tempFile.getName()).split("\\."); URLClassLoader clLoader = new URLClassLoader(urls); Class cls = clLoader.loadClass(tempClass[0]); boolean match = !cls.isInterface() && !cls.isEnum() && interFace.isAssignableFrom(cls); if (match) { list.add(tempClass[0]); } } catch (Exception e) { throw e; } } return (String[]) list.toArray(new String[0]); } private Object Safeserialize(Object serializableObject) throws java.io.IOException { if (serializableObject != null) { serializableObject = CompactBinaryFormatter.toByteBuffer(serializableObject, _cache.getName()); } return serializableObject; } public final void dispose() { if (_cacheLoader != null) { try { _cacheLoader.dispose(); } catch (Exception ex) { EventLogger.LogEvent("ICacheLoader.Dispose. Error:" + ex.toString(), EventType.ERROR); cacheLog.Error("CacheStartupLoader.Dispose(): ", ex.toString()); } _cacheLoader = null; } if (_task != null && _task.isIsRunnung()) { _task.dispose(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query.internal.index; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.apache.geode.cache.Region; import org.apache.geode.cache.query.Index; import org.apache.geode.cache.query.IndexExistsException; import org.apache.geode.cache.query.IndexNameConflictException; import org.apache.geode.cache.query.IndexStatistics; import org.apache.geode.cache.query.IndexType; import org.apache.geode.cache.query.QueryException; import org.apache.geode.cache.query.QueryInvocationTargetException; import org.apache.geode.cache.query.SelectResults; import org.apache.geode.cache.query.TypeMismatchException; import org.apache.geode.cache.query.internal.CompiledValue; import org.apache.geode.cache.query.internal.ExecutionContext; import org.apache.geode.cache.query.internal.RuntimeIterator; import org.apache.geode.cache.query.types.ObjectType; import org.apache.geode.internal.cache.BucketRegion; import org.apache.geode.internal.cache.InternalCache; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.PartitionedRegionDataStore; import org.apache.geode.internal.cache.RegionEntry; import org.apache.geode.internal.cache.execute.BucketMovedException; /** * This class implements a Partitioned index over a group of partitioned region buckets. * * @since GemFire 5.1 */ public class PartitionedIndex extends AbstractIndex { /** * Contains the reference for all the local indexed buckets. */ private Map<Region, List<Index>> bucketIndexes = Collections.synchronizedMap(new HashMap<Region, List<Index>>()); // An arbitrary bucket index from this PartiionedIndex that is used as a representative // index for the entire PartitionIndex. Usually used for scoring/sizing of an index when // selecting which index to use private volatile Index arbitraryBucketIndex; /** * Type on index represented by this partitioned index. * * @see IndexType#FUNCTIONAL * @see IndexType#PRIMARY_KEY * @see IndexType#HASH */ private IndexType type; /** * Number of remote buckets indexed when creating an index on the partitioned region instance. */ private int numRemoteBucektsIndexed; /** * String for imports if needed for index creations */ private String imports; protected Set mapIndexKeys = Collections.newSetFromMap(new ConcurrentHashMap()); // Flag indicating that the populationg of this index is in progress private volatile boolean populateInProgress; /** * Constructor for partitioned indexed. Creates the partitioned index on given a partitioned * region. An index can be created programmatically or through cache.xml during initialization. */ public PartitionedIndex(InternalCache cache, IndexType iType, String indexName, Region r, String indexedExpression, String fromClause, String imports) { super(cache, indexName, r, fromClause, indexedExpression, null, fromClause, indexedExpression, null, null); this.type = iType; this.imports = imports; if (iType == IndexType.HASH) { if (!getRegion().getAttributes().getIndexMaintenanceSynchronous()) { throw new UnsupportedOperationException( "Hash index is currently not supported for regions with Asynchronous index maintenance."); } } } /** * Adds an index on a bucket to the list of already indexed buckets in the partitioned region. * * @param index bucket index to be added to the list. */ public void addToBucketIndexes(Region r, Index index) { synchronized (this.bucketIndexes) { setArbitraryBucketIndex(index); List<Index> indexes = this.bucketIndexes.get(r); if (indexes == null) { indexes = new ArrayList<Index>(); } indexes.add(index); bucketIndexes.put(r, indexes); } } public void removeFromBucketIndexes(Region r, Index index) { synchronized (this.bucketIndexes) { List<Index> indexes = this.bucketIndexes.get(r); if (indexes != null) { indexes.remove(index); if (indexes.isEmpty()) { this.bucketIndexes.remove(r); } } if (index == arbitraryBucketIndex) { setArbitraryBucketIndex(retrieveArbitraryBucketIndex()); } } } /** * Returns the number of locally indexed buckets. * * @return int number of buckets. */ public int getNumberOfIndexedBuckets() { synchronized (this.bucketIndexes) { int size = 0; for (List<Index> indexList : bucketIndexes.values()) { size += indexList.size(); } return size; } } /** * Gets a collection of all the bucket indexes created so far. * * @return bucketIndexes collection of all the bucket indexes. */ public List getBucketIndexes() { synchronized (this.bucketIndexes) { List<Index> indexes = new ArrayList<>(); for (List<Index> indexList : bucketIndexes.values()) { indexes.addAll(indexList); } return indexes; } } public List<Index> getBucketIndexes(Region r) { synchronized (this.bucketIndexes) { List<Index> indexes = new ArrayList<Index>(); List<Index> indexList = bucketIndexes.get(r); if (indexList != null) { indexes.addAll(indexList); } return indexes; } } public void setArbitraryBucketIndex(Index index) { if (arbitraryBucketIndex == null) { arbitraryBucketIndex = index; } } public Index retrieveArbitraryBucketIndex() { Index index = null; synchronized (this.bucketIndexes) { if (this.bucketIndexes.size() > 0) { List<Index> indexList = this.bucketIndexes.values().iterator().next(); if (indexList != null && indexList.size() > 0) { index = indexList.get(0); } } } return index; } public Index getBucketIndex() { return arbitraryBucketIndex; } protected Map.Entry<Region, List<Index>> getFirstBucketIndex() { Map.Entry<Region, List<Index>> firstIndexEntry = null; synchronized (this.bucketIndexes) { if (this.bucketIndexes.size() > 0) { firstIndexEntry = this.bucketIndexes.entrySet().iterator().next(); } } return firstIndexEntry; } /** * Returns the type of index this partitioned index represents. * * @return indexType type of partitioned index. */ @Override public IndexType getType() { return type; } /** * Returns the index for the bucket. */ public static AbstractIndex getBucketIndex(PartitionedRegion pr, String indexName, Integer bId) throws QueryInvocationTargetException { try { pr.checkReadiness(); } catch (Exception ex) { throw new QueryInvocationTargetException(ex.getMessage()); } PartitionedRegionDataStore prds = pr.getDataStore(); BucketRegion bukRegion; bukRegion = (BucketRegion) prds.getLocalBucketById(bId); if (bukRegion == null) { throw new BucketMovedException("Bucket not found for the id :" + bId); } AbstractIndex index = null; if (bukRegion.getIndexManager() != null) { index = (AbstractIndex) (bukRegion.getIndexManager().getIndex(indexName)); } else { if (pr.getCache().getLogger().fineEnabled()) { pr.getCache().getLogger().fine("Index Manager not found for the bucket region " + bukRegion.getFullPath() + " unable to fetch the index " + indexName); } throw new QueryInvocationTargetException( "Index Manager not found, " + " unable to fetch the index " + indexName); } return index; } /** * Verify if the index is available of the buckets. If not create index on the bucket. */ public void verifyAndCreateMissingIndex(List buckets) throws QueryInvocationTargetException { PartitionedRegion pr = (PartitionedRegion) this.getRegion(); PartitionedRegionDataStore prds = pr.getDataStore(); for (Object bId : buckets) { // create index BucketRegion bukRegion = (BucketRegion) prds.getLocalBucketById((Integer) bId); if (bukRegion == null) { throw new QueryInvocationTargetException("Bucket not found for the id :" + bId); } IndexManager im = IndexUtils.getIndexManager(cache, bukRegion, true); if (im != null && im.getIndex(indexName) == null) { try { if (pr.getCache().getLogger().fineEnabled()) { pr.getCache().getLogger() .fine("Verifying index presence on bucket region. " + " Found index " + this.indexName + " not present on the bucket region " + bukRegion.getFullPath() + ", index will be created on this region."); } ExecutionContext externalContext = new ExecutionContext(null, bukRegion.getCache()); externalContext.setBucketRegion(pr, bukRegion); im.createIndex(this.indexName, this.type, this.originalIndexedExpression, this.fromClause, this.imports, externalContext, this, true); } catch (IndexExistsException iee) { // Index exists. } catch (IndexNameConflictException ince) { // ignore. } } } } @Override protected boolean isCompactRangeIndex() { return false; } /** * Set the number of remotely indexed buckets when this partitioned index was created. * * @param remoteBucketsIndexed int representing number of remote buckets. */ public void setRemoteBucketesIndexed(int remoteBucketsIndexed) { this.numRemoteBucektsIndexed = remoteBucketsIndexed; } /** * Returns the number of remotely indexed buckets by this partitioned index. * * @return int number of remote indexed buckets. */ public int getNumRemoteBucketsIndexed() { return this.numRemoteBucektsIndexed; } /** * The Region this index is on. * * @return the Region for this index */ @Override public Region getRegion() { return super.getRegion(); } /** * Not supported on partitioned index. */ @Override void addMapping(RegionEntry entry) throws IMQException { throw new RuntimeException( "Not supported on partitioned index"); } /** * Not supported on partitioned index. */ @Override public void initializeIndex(boolean loadEntries) throws IMQException { throw new RuntimeException( "Not supported on partitioned index"); } /** * Not supported on partitioned index. */ @Override void lockedQuery(Object key, int operator, Collection results, CompiledValue iterOps, RuntimeIterator indpndntItr, ExecutionContext context, List projAttrib, SelectResults intermediateResults, boolean isIntersection) { throw new RuntimeException( "Not supported on partitioned index"); } /** * Not supported on partitioned index. */ @Override void recreateIndexData() throws IMQException { throw new RuntimeException( "Not supported on partitioned index"); } /** * Not supported on partitioned index. */ @Override void removeMapping(RegionEntry entry, int opCode) { throw new RuntimeException( "Not supported on partitioned index"); } /** * Returns false, clear is not supported on partitioned index. */ @Override public boolean clear() throws QueryException { return false; } /* * Not supported on partitioned index. */ /* * public void destroy() { throw new * RuntimeException("Not supported on partitioned index". * toLocalizedString()); } */ /** * Not supported on partitioned index. */ @Override public IndexStatistics getStatistics() { return this.internalIndexStats; } /** * Returns string representing imports. */ public String getImports() { return imports; } /** * String representing the state. * * @return string representing all the relevant information. */ public String toString() { StringBuffer st = new StringBuffer(); st.append(super.toString()).append("imports : ").append(imports); return st.toString(); } @Override protected InternalIndexStatistics createStats(String indexName) { if (this.internalIndexStats == null) { this.internalIndexStats = new PartitionedIndexStatistics(this.indexName); } return this.internalIndexStats; } /** * This will create extra {@link IndexStatistics} statistics for MapType PartitionedIndex. * * @return new PartitionedIndexStatistics */ protected InternalIndexStatistics createExplicitStats(String indexName) { return new PartitionedIndexStatistics(indexName); } /** * Internal class for partitioned index statistics. Statistics are not supported right now. */ class PartitionedIndexStatistics extends InternalIndexStatistics { private IndexStats vsdStats; public PartitionedIndexStatistics(String indexName) { this.vsdStats = new IndexStats(getRegion().getCache().getDistributedSystem(), indexName); } /** * Return the total number of times this index has been updated */ @Override public long getNumUpdates() { return this.vsdStats.getNumUpdates(); } @Override public void incNumValues(int delta) { this.vsdStats.incNumValues(delta); } @Override public void incNumUpdates() { this.vsdStats.incNumUpdates(); } @Override public void incNumUpdates(int delta) { this.vsdStats.incNumUpdates(delta); } @Override public void updateNumKeys(long numKeys) { this.vsdStats.updateNumKeys(numKeys); } @Override public void incNumKeys(long numKeys) { this.vsdStats.incNumKeys(numKeys); } @Override public void incNumMapIndexKeys(long numKeys) { this.vsdStats.incNumMapIndexKeys(numKeys); } @Override public void incUpdateTime(long delta) { this.vsdStats.incUpdateTime(delta); } @Override public void incUpdatesInProgress(int delta) { this.vsdStats.incUpdatesInProgress(delta); } @Override public void incNumUses() { this.vsdStats.incNumUses(); } @Override public void incUseTime(long delta) { this.vsdStats.incUseTime(delta); } @Override public void incUsesInProgress(int delta) { this.vsdStats.incUsesInProgress(delta); } @Override public void incReadLockCount(int delta) { this.vsdStats.incReadLockCount(delta); } @Override public void incNumBucketIndexes(int delta) { this.vsdStats.incNumBucketIndexes(delta); } /** * Returns the number of keys in this index at the highest level */ @Override public long getNumberOfMapIndexKeys() { return this.vsdStats.getNumberOfMapIndexKeys(); } /** * Returns the total amount of time (in nanoseconds) spent updating this index. */ @Override public long getTotalUpdateTime() { return this.vsdStats.getTotalUpdateTime(); } /** * Returns the total number of times this index has been accessed by a query. */ @Override public long getTotalUses() { return this.vsdStats.getTotalUses(); } /** * Returns the number of keys in this index. */ @Override public long getNumberOfKeys() { return this.vsdStats.getNumberOfKeys(); } /** * Returns the number of values in this index. */ @Override public long getNumberOfValues() { return this.vsdStats.getNumberOfValues(); } /** * Return the number of read locks taken on this index */ @Override public int getReadLockCount() { return this.vsdStats.getReadLockCount(); } @Override public int getNumberOfBucketIndexes() { return vsdStats.getNumberOfBucketIndexes(); } @Override public void close() { this.vsdStats.close(); } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("No Keys = ").append(getNumberOfKeys()).append("\n"); sb.append("No Map Index Keys = ").append(getNumberOfMapIndexKeys()).append("\n"); sb.append("No Values = ").append(getNumberOfValues()).append("\n"); sb.append("No Uses = ").append(getTotalUses()).append("\n"); sb.append("No Updates = ").append(getNumUpdates()).append("\n"); sb.append("Total Update time = ").append(getTotalUpdateTime()).append("\n"); return sb.toString(); } } @Override void instantiateEvaluator(IndexCreationHelper indexCreationHelper) { throw new UnsupportedOperationException(); } @Override public ObjectType getResultSetType() { throw new UnsupportedOperationException(); } /** * Not supported on partitioned index. */ @Override void lockedQuery(Object lowerBoundKey, int lowerBoundOperator, Object upperBoundKey, int upperBoundOperator, Collection results, Set keysToRemove, ExecutionContext context) throws TypeMismatchException { throw new RuntimeException( "Not supported on partitioned index"); } @Override public int getSizeEstimate(Object key, int op, int matchLevel) { throw new UnsupportedOperationException("This method should not have been invoked"); } @Override void lockedQuery(Object key, int operator, Collection results, Set keysToRemove, ExecutionContext context) throws TypeMismatchException { throw new RuntimeException("Not supported on partitioned index"); } @Override void addMapping(Object key, Object value, RegionEntry entry) throws IMQException { throw new RuntimeException( "Not supported on partitioned index"); } @Override void saveMapping(Object key, Object value, RegionEntry entry) throws IMQException { throw new RuntimeException( "Not supported on partitioned index"); } public void incNumMapKeysStats(Object mapKey) { if (internalIndexStats != null) { if (!mapIndexKeys.contains(mapKey)) { mapIndexKeys.add(mapKey); this.internalIndexStats.incNumMapIndexKeys(1); } } } public void incNumBucketIndexes() { if (internalIndexStats != null) { this.internalIndexStats.incNumBucketIndexes(1); } } @Override public boolean isEmpty() { boolean empty = true; for (Object index : getBucketIndexes()) { empty = ((AbstractIndex) index).isEmpty(); if (!empty) { return false; } } return empty; } public boolean isPopulateInProgress() { return populateInProgress; } public void setPopulateInProgress(boolean populateInProgress) { this.populateInProgress = populateInProgress; } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.clouddirectory.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/clouddirectory-2016-05-10/ListPolicyAttachments" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListPolicyAttachmentsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * ARN associated with the <a>Directory</a> where objects reside. For more information, see <a>arns</a>. * </p> */ private String directoryArn; /** * <p> * Reference that identifies the policy object. * </p> */ private ObjectReference policyReference; /** * <p> * The pagination token. * </p> */ private String nextToken; /** * <p> * Maximum number of items to be retrieved in a single call. This is an approximate number. * </p> */ private Integer maxResults; /** * <p> * Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * </p> */ private String consistencyLevel; /** * <p> * ARN associated with the <a>Directory</a> where objects reside. For more information, see <a>arns</a>. * </p> * * @param directoryArn * ARN associated with the <a>Directory</a> where objects reside. For more information, see <a>arns</a>. */ public void setDirectoryArn(String directoryArn) { this.directoryArn = directoryArn; } /** * <p> * ARN associated with the <a>Directory</a> where objects reside. For more information, see <a>arns</a>. * </p> * * @return ARN associated with the <a>Directory</a> where objects reside. For more information, see <a>arns</a>. */ public String getDirectoryArn() { return this.directoryArn; } /** * <p> * ARN associated with the <a>Directory</a> where objects reside. For more information, see <a>arns</a>. * </p> * * @param directoryArn * ARN associated with the <a>Directory</a> where objects reside. For more information, see <a>arns</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public ListPolicyAttachmentsRequest withDirectoryArn(String directoryArn) { setDirectoryArn(directoryArn); return this; } /** * <p> * Reference that identifies the policy object. * </p> * * @param policyReference * Reference that identifies the policy object. */ public void setPolicyReference(ObjectReference policyReference) { this.policyReference = policyReference; } /** * <p> * Reference that identifies the policy object. * </p> * * @return Reference that identifies the policy object. */ public ObjectReference getPolicyReference() { return this.policyReference; } /** * <p> * Reference that identifies the policy object. * </p> * * @param policyReference * Reference that identifies the policy object. * @return Returns a reference to this object so that method calls can be chained together. */ public ListPolicyAttachmentsRequest withPolicyReference(ObjectReference policyReference) { setPolicyReference(policyReference); return this; } /** * <p> * The pagination token. * </p> * * @param nextToken * The pagination token. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The pagination token. * </p> * * @return The pagination token. */ public String getNextToken() { return this.nextToken; } /** * <p> * The pagination token. * </p> * * @param nextToken * The pagination token. * @return Returns a reference to this object so that method calls can be chained together. */ public ListPolicyAttachmentsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * Maximum number of items to be retrieved in a single call. This is an approximate number. * </p> * * @param maxResults * Maximum number of items to be retrieved in a single call. This is an approximate number. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * Maximum number of items to be retrieved in a single call. This is an approximate number. * </p> * * @return Maximum number of items to be retrieved in a single call. This is an approximate number. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * Maximum number of items to be retrieved in a single call. This is an approximate number. * </p> * * @param maxResults * Maximum number of items to be retrieved in a single call. This is an approximate number. * @return Returns a reference to this object so that method calls can be chained together. */ public ListPolicyAttachmentsRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * </p> * * @param consistencyLevel * Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * @see ConsistencyLevel */ public void setConsistencyLevel(String consistencyLevel) { this.consistencyLevel = consistencyLevel; } /** * <p> * Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * </p> * * @return Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * @see ConsistencyLevel */ public String getConsistencyLevel() { return this.consistencyLevel; } /** * <p> * Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * </p> * * @param consistencyLevel * Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * @return Returns a reference to this object so that method calls can be chained together. * @see ConsistencyLevel */ public ListPolicyAttachmentsRequest withConsistencyLevel(String consistencyLevel) { setConsistencyLevel(consistencyLevel); return this; } /** * <p> * Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * </p> * * @param consistencyLevel * Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * @see ConsistencyLevel */ public void setConsistencyLevel(ConsistencyLevel consistencyLevel) { this.consistencyLevel = consistencyLevel.toString(); } /** * <p> * Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * </p> * * @param consistencyLevel * Represents the manner and timing in which the successful write or update of an object is reflected in a * subsequent read operation of that same object. * @return Returns a reference to this object so that method calls can be chained together. * @see ConsistencyLevel */ public ListPolicyAttachmentsRequest withConsistencyLevel(ConsistencyLevel consistencyLevel) { setConsistencyLevel(consistencyLevel); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDirectoryArn() != null) sb.append("DirectoryArn: ").append(getDirectoryArn()).append(","); if (getPolicyReference() != null) sb.append("PolicyReference: ").append(getPolicyReference()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getConsistencyLevel() != null) sb.append("ConsistencyLevel: ").append(getConsistencyLevel()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListPolicyAttachmentsRequest == false) return false; ListPolicyAttachmentsRequest other = (ListPolicyAttachmentsRequest) obj; if (other.getDirectoryArn() == null ^ this.getDirectoryArn() == null) return false; if (other.getDirectoryArn() != null && other.getDirectoryArn().equals(this.getDirectoryArn()) == false) return false; if (other.getPolicyReference() == null ^ this.getPolicyReference() == null) return false; if (other.getPolicyReference() != null && other.getPolicyReference().equals(this.getPolicyReference()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getConsistencyLevel() == null ^ this.getConsistencyLevel() == null) return false; if (other.getConsistencyLevel() != null && other.getConsistencyLevel().equals(this.getConsistencyLevel()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDirectoryArn() == null) ? 0 : getDirectoryArn().hashCode()); hashCode = prime * hashCode + ((getPolicyReference() == null) ? 0 : getPolicyReference().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getConsistencyLevel() == null) ? 0 : getConsistencyLevel().hashCode()); return hashCode; } @Override public ListPolicyAttachmentsRequest clone() { return (ListPolicyAttachmentsRequest) super.clone(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.parquet.schema; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.StringTokenizer; import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName; import org.apache.parquet.schema.Type.Repetition; import org.apache.parquet.schema.Types.GroupBuilder; import org.apache.parquet.schema.Types.PrimitiveBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Parses a schema from a textual format similar to that described in the Dremel paper. */ public class MessageTypeParser { private static final Logger LOG = LoggerFactory.getLogger(MessageTypeParser.class); private static class Tokenizer { private StringTokenizer st; private int line = 0; private StringBuffer currentLine = new StringBuffer(); public Tokenizer(String schemaString, String string) { st = new StringTokenizer(schemaString, " ,;{}()\n\t=", true); } public String nextToken() { while (st.hasMoreTokens()) { String t = st.nextToken(); if (t.equals("\n")) { ++ line; currentLine.setLength(0); } else { currentLine.append(t); } if (!isWhitespace(t)) { return t; } } throw new IllegalArgumentException("unexpected end of schema"); } private boolean isWhitespace(String t) { return t.equals(" ") || t.equals("\t") || t.equals("\n"); } public String getLocationString() { return "line " + line + ": " + currentLine.toString(); } } private MessageTypeParser() {} /** * * @param input the text representation of the schema to parse * @return the corresponding object representation */ public static MessageType parseMessageType(String input) { return parse(input); } private static MessageType parse(String schemaString) { Tokenizer st = new Tokenizer(schemaString, " ;{}()\n\t"); Types.MessageTypeBuilder builder = Types.buildMessage(); String t = st.nextToken(); check(t, "message", "start with 'message'", st); String name = st.nextToken(); addGroupTypeFields(st.nextToken(), st, builder); return builder.named(name); } private static void addGroupTypeFields(String t, Tokenizer st, Types.GroupBuilder builder) { check(t, "{", "start of message", st); while (!(t = st.nextToken()).equals("}")) { addType(t, st, builder); } } private static void addType(String t, Tokenizer st, Types.GroupBuilder builder) { Repetition repetition = asRepetition(t, st); // Read type. String type = st.nextToken(); if ("group".equalsIgnoreCase(type)) { addGroupType(st, repetition, builder); } else { addPrimitiveType(st, asPrimitive(type, st), repetition, builder); } } private static void addGroupType(Tokenizer st, Repetition r, GroupBuilder<?> builder) { GroupBuilder<?> childBuilder = builder.group(r); String t; String name = st.nextToken(); // Read annotation, if any. t = st.nextToken(); OriginalType originalType = null; if (t.equalsIgnoreCase("(")) { originalType = OriginalType.valueOf(st.nextToken()); childBuilder.as(originalType); check(st.nextToken(), ")", "original type ended by )", st); t = st.nextToken(); } if (t.equals("=")) { childBuilder.id(Integer.parseInt(st.nextToken())); t = st.nextToken(); } try { addGroupTypeFields(t, st, childBuilder); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("problem reading type: type = group, name = " + name + ", original type = " + originalType, e); } childBuilder.named(name); } private static void addPrimitiveType(Tokenizer st, PrimitiveTypeName type, Repetition r, Types.GroupBuilder<?> builder) { PrimitiveBuilder<?> childBuilder = builder.primitive(type, r); String t; if (type == PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) { t = st.nextToken(); // Read type length if the type is fixed_len_byte_array. if (!t.equalsIgnoreCase("(")) { throw new IllegalArgumentException("expecting (length) for field of type fixed_len_byte_array"); } childBuilder.length(Integer.parseInt(st.nextToken())); check(st.nextToken(), ")", "type length ended by )", st); } String name = st.nextToken(); // Read annotation, if any. t = st.nextToken(); OriginalType originalType = null; if (t.equalsIgnoreCase("(")) { t = st.nextToken(); if (isLogicalType(t)) { LogicalTypeAnnotation.LogicalTypeToken logicalType = LogicalTypeAnnotation.LogicalTypeToken.valueOf(t); t = st.nextToken(); List<String> tokens = new ArrayList<>(); if ("(".equals(t)) { while (!")".equals(t)) { if (!(",".equals(t) || "(".equals(t) || ")".equals(t))) { tokens.add(t); } t = st.nextToken(); } t = st.nextToken(); } LogicalTypeAnnotation logicalTypeAnnotation = logicalType.fromString(tokens); childBuilder.as(logicalTypeAnnotation); } else { // Try to parse as old logical type, called OriginalType originalType = OriginalType.valueOf(t); childBuilder.as(originalType); if (OriginalType.DECIMAL == originalType) { t = st.nextToken(); // parse precision and scale if (t.equalsIgnoreCase("(")) { childBuilder.precision(Integer.parseInt(st.nextToken())); t = st.nextToken(); if (t.equalsIgnoreCase(",")) { childBuilder.scale(Integer.parseInt(st.nextToken())); t = st.nextToken(); } check(t, ")", "decimal type ended by )", st); t = st.nextToken(); } } else { t = st.nextToken(); } } check(t, ")", "logical type ended by )", st); t = st.nextToken(); } if (t.equals("=")) { childBuilder.id(Integer.parseInt(st.nextToken())); t = st.nextToken(); } check(t, ";", "field ended by ';'", st); try { childBuilder.named(name); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("problem reading type: type = " + type + ", name = " + name + ", original type = " + originalType, e); } } private static boolean isLogicalType(String t) { return Arrays.stream(LogicalTypeAnnotation.LogicalTypeToken.values()).anyMatch((type) -> type.name().equals(t)); } private static PrimitiveTypeName asPrimitive(String t, Tokenizer st) { try { return PrimitiveTypeName.valueOf(t.toUpperCase(Locale.ENGLISH)); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("expected one of " + Arrays.toString(PrimitiveTypeName.values()) +" got " + t + " at " + st.getLocationString(), e); } } private static Repetition asRepetition(String t, Tokenizer st) { try { return Repetition.valueOf(t.toUpperCase(Locale.ENGLISH)); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("expected one of " + Arrays.toString(Repetition.values()) +" got " + t + " at " + st.getLocationString(), e); } } private static void check(String t, String expected, String message, Tokenizer tokenizer) { if (!t.equalsIgnoreCase(expected)) { throw new IllegalArgumentException(message+ ": expected '" + expected + "' but got '" + t + "' at " + tokenizer.getLocationString()); } } }
/********************************************************************************** * $URL$ * $Id$ ********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.component.app.scheduler; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.text.ParseException; import java.util.Collections; import java.util.Hashtable; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeMap; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.quartz.CronTrigger; import org.quartz.Job; import org.quartz.JobDataMap; import org.quartz.JobDetail; import org.quartz.JobListener; import org.quartz.Scheduler; import org.quartz.SchedulerException; import org.quartz.SchedulerFactory; import org.quartz.Trigger; import org.quartz.TriggerListener; import org.quartz.impl.StdSchedulerFactory; import org.sakaiproject.api.app.scheduler.ConfigurableJobProperty; import org.sakaiproject.api.app.scheduler.ConfigurableJobPropertyValidationException; import org.sakaiproject.api.app.scheduler.ConfigurableJobPropertyValidator; import org.sakaiproject.api.app.scheduler.JobBeanWrapper; import org.sakaiproject.api.app.scheduler.SchedulerManager; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.component.app.scheduler.jobs.SpringConfigurableJobBeanWrapper; import org.sakaiproject.component.app.scheduler.jobs.SpringInitialJobSchedule; import org.sakaiproject.component.app.scheduler.jobs.SpringJobBeanWrapper; import org.sakaiproject.db.api.SqlService; public class SchedulerManagerImpl implements SchedulerManager { private static final Log LOG = LogFactory.getLog(SchedulerManagerImpl.class); public final static String SCHEDULER_LOADJOBS = "scheduler.loadjobs"; private DataSource dataSource; private String serverId; private Set<String> qrtzJobs; private Map<String, String> qrtzQualifiedJobs = new TreeMap<String, String>(); // map for SelectItems /** The properties file from the classpath */ private String qrtzPropFile; /** The properties file from sakai.home */ private String qrtzPropFileSakai; private Properties qrtzProperties; private TriggerListener globalTriggerListener; private Boolean autoDdl; private boolean startScheduler = true; private Map<String, JobBeanWrapper> beanJobs = new Hashtable<String, JobBeanWrapper>(); private static final String JOB_INTERFACE = "org.quartz.Job"; private static final String STATEFULJOB_INTERFACE = "org.quartz.StatefulJob"; // Service dependencies private ServerConfigurationService serverConfigurationService; private SchedulerFactory schedFactory; private Scheduler scheduler; private SqlService sqlService; private LinkedList<TriggerListener> globalTriggerListeners = new LinkedList<TriggerListener>(); private LinkedList<JobListener> globalJobListeners = new LinkedList<JobListener>(); private LinkedList<SpringInitialJobSchedule> initialJobSchedule = null; public void init() { try { qrtzProperties = initQuartzConfiguration(); qrtzProperties.setProperty("org.quartz.scheduler.instanceId", serverId); // note: becuase job classes are jarred , it is impossible to iterate // through a directory by calling listFiles on a file object. // Therefore, we need the class list list from spring. // find quartz jobs from specified 'qrtzJobs' and verify they // that these jobs implement the Job interface Iterator<String> qrtzJobsIterator = qrtzJobs.iterator(); while (qrtzJobsIterator.hasNext()) { String className = (String) qrtzJobsIterator.next(); Class cl = null; try { cl = Class.forName(className); } catch (ClassNotFoundException e) { LOG.warn("Could not locate class: " + className + " on classpath"); } if (cl != null) { // check that each class implements the Job interface if (doesImplementJobInterface(cl)) { qrtzQualifiedJobs.put(cl.getName(), cl.getName()); } else { LOG.warn("Class: " + className + " does not implement quartz Job interface"); } } } // run ddl if (autoDdl.booleanValue()){ try { sqlService.ddl(this.getClass().getClassLoader(), "quartz"); } catch (Throwable t) { LOG.warn(this + ".init(): ", t); } } boolean isInitialStartup = isInitialStartup(sqlService); if (isInitialStartup && autoDdl.booleanValue()) { LOG.info("Performing initial population of the Quartz tables."); sqlService.ddl(this.getClass().getClassLoader(), "init_locks"); } /* Determine whether or not to load the jobs defined in the initialJobSchedules list. These jobs will be loaded under the following conditions: 1) the server configuration property "scheduler.loadjobs" is "true" 2) "scheduler.loadjobs" is "init" and this is the first startup for the scheduler (eg. this is a new Sakai instance) "scheduler.loadjobs" is set to "init" by default */ String loadJobs = serverConfigurationService.getString(SCHEDULER_LOADJOBS, "init").trim(); List<SpringInitialJobSchedule> initSchedules = getInitialJobSchedules(); boolean loadInitSchedules = (initSchedules != null) && (initSchedules.size() > 0) && (("init".equalsIgnoreCase(loadJobs) && isInitialStartup) || "true".equalsIgnoreCase(loadJobs)); if (loadInitSchedules) LOG.debug ("Preconfigured jobs will be loaded"); else LOG.debug ("Preconfigured jobs will not be loaded"); // start scheduler and load jobs schedFactory = new StdSchedulerFactory(qrtzProperties); scheduler = schedFactory.getScheduler(); // loop through persisted jobs removing both the job and associated // triggers for jobs where the associated job class is not found String[] arrJobs = scheduler.getJobNames(Scheduler.DEFAULT_GROUP); for (int i = 0; i < arrJobs.length; i++) { try { JobDetail detail = scheduler.getJobDetail(arrJobs[i], Scheduler.DEFAULT_GROUP); String bean = detail.getJobDataMap().getString(JobBeanWrapper.SPRING_BEAN_NAME); Job job = (Job) ComponentManager.get(bean); if (job == null) { LOG.warn("scheduler cannot load class for persistent job:" + arrJobs[i]); scheduler.deleteJob(arrJobs[i], Scheduler.DEFAULT_GROUP); LOG.warn("deleted persistent job:" + arrJobs[i]); } } catch (SchedulerException e) { LOG.warn("scheduler cannot load class for persistent job:" + arrJobs[i]); scheduler.deleteJob(arrJobs[i], Scheduler.DEFAULT_GROUP); LOG.warn("deleted persistent job:" + arrJobs[i]); } } for (TriggerListener tListener : globalTriggerListeners) { scheduler.addGlobalTriggerListener(tListener); } for (JobListener jListener : globalJobListeners) { scheduler.addGlobalJobListener(jListener); } if (loadInitSchedules) { LOG.debug ("Loading preconfigured jobs"); loadInitialSchedules(); } //scheduler.addGlobalTriggerListener(globalTriggerListener); if (isStartScheduler()) { scheduler.start(); } else { LOG.info("Scheduler Not Started, startScheduler=false"); } } catch (Exception e) { LOG.error("Failed to start scheduler.", e); throw new IllegalStateException("Scheduler cannot start!", e); } } /** * This loads the configurations for quartz. * It loads the defaults from the classpath and then loads override values from * sakai.home. * @return The quartz properties. * @throws IOException When we can't load the default values. */ private Properties initQuartzConfiguration() throws IOException { InputStream propertiesInputStream = null; Properties properties = new Properties(); // load the default quartz properties file // if this fails we want to propogate the error to stop startup. try { propertiesInputStream = this.getClass().getResourceAsStream(qrtzPropFile); properties.load(propertiesInputStream); } finally { if (propertiesInputStream != null) { try { propertiesInputStream.close(); } catch (IOException e) { LOG.debug("Failed to close stream.", e); } } } // load the configuration out of sakai home // any failures here shouldn't result in startup failing. File file = new File(serverConfigurationService.getSakaiHomePath(), qrtzPropFileSakai); if (file.exists() && file.isFile()) { try { propertiesInputStream = new FileInputStream(file); properties.load(propertiesInputStream); LOG.info("Loaded extra configuration from: "+ file.getAbsolutePath()); } catch (IOException e) { LOG.warn("Failed to load file: "+ file, e); } finally { if (propertiesInputStream != null) { try { propertiesInputStream.close(); } catch (IOException e) { LOG.debug("Failed to close stream.", e); } } } } return properties; } private boolean doesImplementJobInterface(Class cl) { Class[] classArr = cl.getInterfaces(); for (int i = 0; i < classArr.length; i++) { if (classArr[i].getName().equals(JOB_INTERFACE) || classArr[i].getName().equals(STATEFULJOB_INTERFACE)) { return true; } } return false; } /** * Runs an SQL select statement to determine if the Quartz lock rows exist in the database. If the rows do not exist * this method assumes this is the first time the scheduler has been started. The select statement will be defined * in the {vendor}/checkTables.sql file within the shared library deployed by this project. The statement should be * of the form "SELECT COUNT(*) from QUARTZ_LOCKS;". If the count is zero it is assumed this is a new install. * If the count is non-zero it is assumed the QUARTZ_LOCKS table has been initialized and this is not a new install. * * @param sqlService * @return */ private boolean isInitialStartup(SqlService sqlService) { String checkTablesScript = sqlService.getVendor() + "/checkTables.sql"; ClassLoader loader = this.getClass().getClassLoader(); String chkStmt = null; InputStream in = null; BufferedReader r = null; try { // find the resource from the loader in = loader.getResourceAsStream(checkTablesScript); r = new BufferedReader(new InputStreamReader(in)); chkStmt = r.readLine(); } catch (Exception e) { LOG.error("Could not read the file " + checkTablesScript + " to determine if this is a new installation. Preconfigured jobs will only be loaded if the server property scheduler.loadjobs is \"true\"", e); return false; } finally { try { r.close(); } catch (Exception e){} try { in.close(); } catch (Exception e){} } List<String> l = sqlService.dbRead(chkStmt); if (l != null && l.size() > 0) { return (l.get(0).equalsIgnoreCase("0")); } else { return false; } } /** * Loads jobs and schedules triggers for preconfigured jobs. */ private void loadInitialSchedules() { for (SpringInitialJobSchedule sched : getInitialJobSchedules()) { SpringJobBeanWrapper wrapper = sched.getJobBeanWrapper(); LOG.debug ("Loading schedule for preconfigured job \"" + wrapper.getJobType() + "\""); JobDetail jd = new JobDetail (sched.getJobName(), Scheduler.DEFAULT_GROUP, wrapper.getJobClass(), false, true, true); JobDataMap map = jd.getJobDataMap(); map.put(JobBeanWrapper.SPRING_BEAN_NAME, wrapper.getBeanId()); map.put(JobBeanWrapper.JOB_TYPE, wrapper.getJobType()); if (SpringConfigurableJobBeanWrapper.class.isAssignableFrom(wrapper.getClass())) { SpringConfigurableJobBeanWrapper confJob = (SpringConfigurableJobBeanWrapper) wrapper; ConfigurableJobPropertyValidator validator = confJob.getConfigurableJobPropertyValidator(); Map<String, String> conf = sched.getConfiguration(); boolean fail = false; for (ConfigurableJobProperty cProp : confJob.getConfigurableJobProperties()) { String key = cProp.getLabelResourceKey(), val = conf.get(key); LOG.debug ("job property '" + key + "' is set to '" + val + "'"); if (val == null && cProp.isRequired()) { val = cProp.getDefaultValue(); if (val == null) { LOG.error ("job property '" + key + "' is required but has no value; job '" + sched.getJobName() + "' of type '" + wrapper.getJobClass() + "' will not be configured"); fail = true; break; } LOG.debug ("job property '" + key + "' set to default value '" + val + "'"); } if (val != null) { try { validator.assertValid(key, val); } catch (ConfigurableJobPropertyValidationException cjpve) { LOG.error ("job property '" + key + "' was set to an invalid value '" + val + "'; job '" + sched.getJobName() + "' of type '" + wrapper.getJobClass() + "' will not be configured"); fail = true; break; } map.put (key, val); } } if (fail) continue; } try { scheduler.addJob(jd, false); } catch (SchedulerException e) { LOG.error ("Failed to schedule job '" + sched.getJobName() + "' of type '" + wrapper.getJobClass() + "'"); continue; } Trigger trigger = null; try { trigger = new CronTrigger(sched.getTriggerName(), Scheduler.DEFAULT_GROUP, jd.getName(), Scheduler.DEFAULT_GROUP, sched.getCronExpression()); } catch (ParseException e) { LOG.error ("Error parsing cron exception. Failed to schedule job '" + sched.getJobName() + "' of type '" + wrapper.getJobClass() + "'"); } try { scheduler.scheduleJob(trigger); } catch (SchedulerException e) { LOG.error ("Trigger could not be scheduled. Failed to schedule job '" + sched.getJobName() + "' of type '" + wrapper.getJobClass() + "'"); } } } /** * @see org.sakaiproject.api.app.scheduler.SchedulerManager#destroy() */ public void destroy() { try{ if (!scheduler.isShutdown()){ scheduler.shutdown(); } } catch (Throwable t){ LOG.error("An error occurred while stopping the scheduler", t); } } public List<SpringInitialJobSchedule> getInitialJobSchedules() { return initialJobSchedule; } public void setInitialJobSchedules(List<SpringInitialJobSchedule> jobSchedule) { if(jobSchedule == null || jobSchedule.size() < 1) return; this.initialJobSchedule = new LinkedList<SpringInitialJobSchedule> (); initialJobSchedule.addAll(jobSchedule); } /** * @deprecated use {@link #setGlobalTriggerListeners(Set<TriggerListener>)} * @return Returns the globalTriggerListener. */ public TriggerListener getGlobalTriggerListener() { return globalTriggerListener; } /** * @deprecated use {@link #getGlobalTriggerListeners()} * @param globalTriggerListener The globalTriggerListener to set. */ public void setGlobalTriggerListener(TriggerListener globalTriggerListener) { this.globalTriggerListener = globalTriggerListener; if (globalTriggerListeners != null) { globalTriggerListeners.addFirst(globalTriggerListener); } } public void setGlobalTriggerListeners (final List<TriggerListener> listeners) { globalTriggerListeners.clear(); if (globalTriggerListener != null) { globalTriggerListeners.add(globalTriggerListener); } if (listeners != null) { globalTriggerListeners.addAll(listeners); } } public List<TriggerListener> getGlobalTriggerListeners() { return Collections.unmodifiableList(globalTriggerListeners); } public void setGlobalJobListeners (final List<JobListener> listeners) { globalJobListeners.clear(); if (listeners != null) { globalJobListeners.addAll(listeners); } } public List<JobListener> getGlobalJobListeners() { return Collections.unmodifiableList(globalJobListeners); } /** * @return Returns the serverId. */ public String getServerId() { return serverId; } /** * @param serverId The serverId to set. */ public void setServerId(String serverId) { this.serverId = serverId; } /** * @return Returns the dataSource. */ public DataSource getDataSource() { return dataSource; } /** * @param dataSource The dataSource to set. */ public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } public void setSqlService(SqlService sqlService) { this.sqlService = sqlService; } /** * @return Returns the qrtzQualifiedJobs. */ public Map<String, String> getQrtzQualifiedJobs() { return qrtzQualifiedJobs; } /** * @param qrtzQualifiedJobs The qrtzQualifiedJobs to set. */ public void setQrtzQualifiedJobs(Map<String, String> qrtzQualifiedJobs) { this.qrtzQualifiedJobs = qrtzQualifiedJobs; } /** * @return Returns the qrtzJobs. */ public Set<String> getQrtzJobs() { return qrtzJobs; } /** * @param qrtzJobs The qrtzJobs to set. */ public void setQrtzJobs(Set<String> qrtzJobs) { this.qrtzJobs = qrtzJobs; } /** * @return Returns the qrtzPropFile. */ public String getQrtzPropFile() { return qrtzPropFile; } /** * @param qrtzPropFile The qrtzPropFile to set. */ public void setQrtzPropFile(String qrtzPropFile) { this.qrtzPropFile = qrtzPropFile; } public void setQrtzPropFileSakai(String qrtzPropFileSakai) { this.qrtzPropFileSakai = qrtzPropFileSakai; } /** * @return Returns the scheduler. */ public Scheduler getScheduler() { return scheduler; } /** * @param scheduler The sched to set. */ public void setScheduler(Scheduler scheduler) { this.scheduler = scheduler; } /** * @param serverConfigurationService The ServerConfigurationService to get our configuation from. */ public void setServerConfigurationService(ServerConfigurationService serverConfigurationService) { this.serverConfigurationService = serverConfigurationService; } /** * @see org.sakaiproject.api.app.scheduler.SchedulerManager#setAutoDdl(java.lang.Boolean) */ public void setAutoDdl(Boolean b) { autoDdl = b; } public Map<String, JobBeanWrapper> getBeanJobs() { return beanJobs; } public void registerBeanJob(String jobName, JobBeanWrapper job) { getBeanJobs().put(jobName, job); } public JobBeanWrapper getJobBeanWrapper(String beanWrapperId) { return (JobBeanWrapper) getBeanJobs().get(beanWrapperId); } public boolean isStartScheduler() { return startScheduler; } public void setStartScheduler(boolean startScheduler) { this.startScheduler = startScheduler; } }
/* * Copyright (c) 2018, Psikoi <https://github.com/Psikoi> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.worldhopper; import com.google.common.collect.Ordering; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import javax.swing.JPanel; import javax.swing.SwingUtilities; import lombok.AccessLevel; import lombok.Setter; import net.runelite.client.ui.ColorScheme; import net.runelite.client.ui.DynamicGridLayout; import net.runelite.client.ui.PluginPanel; import net.runelite.http.api.worlds.World; import net.runelite.http.api.worlds.WorldType; class WorldSwitcherPanel extends PluginPanel { private static final Color ODD_ROW = new Color(44, 44, 44); private static final int WORLD_COLUMN_WIDTH = 60; private static final int PLAYERS_COLUMN_WIDTH = 40; private static final int PING_COLUMN_WIDTH = 47; private final JPanel listContainer = new JPanel(); private WorldTableHeader worldHeader; private WorldTableHeader playersHeader; private WorldTableHeader activityHeader; private WorldTableHeader pingHeader; private WorldOrder orderIndex = WorldOrder.WORLD; private boolean ascendingOrder = true; private final ArrayList<WorldTableRow> rows = new ArrayList<>(); private final WorldHopperPlugin plugin; @Setter(AccessLevel.PACKAGE) private SubscriptionFilterMode subscriptionFilterMode; @Setter(AccessLevel.PACKAGE) private Set<RegionFilterMode> regionFilterMode; WorldSwitcherPanel(WorldHopperPlugin plugin) { this.plugin = plugin; setBorder(null); setLayout(new DynamicGridLayout(0, 1)); JPanel headerContainer = buildHeader(); listContainer.setLayout(new GridLayout(0, 1)); add(headerContainer); add(listContainer); } void switchCurrentHighlight(int newWorld, int lastWorld) { for (WorldTableRow row : rows) { if (row.getWorld().getId() == newWorld) { row.recolour(true); } else if (row.getWorld().getId() == lastWorld) { row.recolour(false); } } } void updateListData(Map<Integer, Integer> worldData) { for (WorldTableRow worldTableRow : rows) { World world = worldTableRow.getWorld(); Integer playerCount = worldData.get(world.getId()); if (playerCount != null) { worldTableRow.updatePlayerCount(playerCount); } } // If the list is being ordered by player count, then it has to be re-painted // to properly display the new data if (orderIndex == WorldOrder.PLAYERS) { updateList(); } } void updatePing(int world, int ping) { for (WorldTableRow worldTableRow : rows) { if (worldTableRow.getWorld().getId() == world) { worldTableRow.setPing(ping); // If the panel is sorted by ping, re-sort it if (orderIndex == WorldOrder.PING) { updateList(); } break; } } } void hidePing() { for (WorldTableRow worldTableRow : rows) { worldTableRow.hidePing(); } } void showPing() { for (WorldTableRow worldTableRow : rows) { worldTableRow.showPing(); } } void updateList() { rows.sort((r1, r2) -> { switch (orderIndex) { case PING: // Leave worlds with unknown ping at the bottom return getCompareValue(r1, r2, row -> { int ping = row.getPing(); return ping > 0 ? ping : null; }); case WORLD: return getCompareValue(r1, r2, row -> row.getWorld().getId()); case PLAYERS: return getCompareValue(r1, r2, WorldTableRow::getUpdatedPlayerCount); case ACTIVITY: // Leave empty activity worlds on the bottom of the list return getCompareValue(r1, r2, row -> { String activity = row.getWorld().getActivity(); return !activity.equals("-") ? activity : null; }); default: return 0; } }); rows.sort((r1, r2) -> { boolean b1 = plugin.isFavorite(r1.getWorld()); boolean b2 = plugin.isFavorite(r2.getWorld()); return Boolean.compare(b2, b1); }); listContainer.removeAll(); for (int i = 0; i < rows.size(); i++) { WorldTableRow row = rows.get(i); row.setBackground(i % 2 == 0 ? ODD_ROW : ColorScheme.DARK_GRAY_COLOR); listContainer.add(row); } listContainer.revalidate(); listContainer.repaint(); } private int getCompareValue(WorldTableRow row1, WorldTableRow row2, Function<WorldTableRow, Comparable> compareByFn) { Ordering<Comparable> ordering = Ordering.natural(); if (!ascendingOrder) { ordering = ordering.reverse(); } ordering = ordering.nullsLast(); return ordering.compare(compareByFn.apply(row1), compareByFn.apply(row2)); } void updateFavoriteMenu(int world, boolean favorite) { for (WorldTableRow row : rows) { if (row.getWorld().getId() == world) { row.setFavoriteMenu(favorite); } } } void populate(List<World> worlds) { rows.clear(); for (int i = 0; i < worlds.size(); i++) { World world = worlds.get(i); switch (subscriptionFilterMode) { case FREE: if (world.getTypes().contains(WorldType.MEMBERS)) { continue; } break; case MEMBERS: if (!world.getTypes().contains(WorldType.MEMBERS)) { continue; } break; } if (!regionFilterMode.isEmpty() && !regionFilterMode.contains(RegionFilterMode.of(world.getRegion()))) { continue; } rows.add(buildRow(world, i % 2 == 0, world.getId() == plugin.getCurrentWorld() && plugin.getLastWorld() != 0, plugin.isFavorite(world))); } updateList(); } private void orderBy(WorldOrder order) { pingHeader.highlight(false, ascendingOrder); worldHeader.highlight(false, ascendingOrder); playersHeader.highlight(false, ascendingOrder); activityHeader.highlight(false, ascendingOrder); switch (order) { case PING: pingHeader.highlight(true, ascendingOrder); break; case WORLD: worldHeader.highlight(true, ascendingOrder); break; case PLAYERS: playersHeader.highlight(true, ascendingOrder); break; case ACTIVITY: activityHeader.highlight(true, ascendingOrder); break; } orderIndex = order; updateList(); } /** * Builds the entire table header. */ private JPanel buildHeader() { JPanel header = new JPanel(new BorderLayout()); JPanel leftSide = new JPanel(new BorderLayout()); JPanel rightSide = new JPanel(new BorderLayout()); pingHeader = new WorldTableHeader("Ping", orderIndex == WorldOrder.PING, ascendingOrder, plugin::refresh); pingHeader.setPreferredSize(new Dimension(PING_COLUMN_WIDTH, 0)); pingHeader.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent mouseEvent) { if (SwingUtilities.isRightMouseButton(mouseEvent)) { return; } ascendingOrder = orderIndex != WorldOrder.PING || !ascendingOrder; orderBy(WorldOrder.PING); } }); worldHeader = new WorldTableHeader("World", orderIndex == WorldOrder.WORLD, ascendingOrder, plugin::refresh); worldHeader.setPreferredSize(new Dimension(WORLD_COLUMN_WIDTH, 0)); worldHeader.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent mouseEvent) { if (SwingUtilities.isRightMouseButton(mouseEvent)) { return; } ascendingOrder = orderIndex != WorldOrder.WORLD || !ascendingOrder; orderBy(WorldOrder.WORLD); } }); playersHeader = new WorldTableHeader("#", orderIndex == WorldOrder.PLAYERS, ascendingOrder, plugin::refresh); playersHeader.setPreferredSize(new Dimension(PLAYERS_COLUMN_WIDTH, 0)); playersHeader.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent mouseEvent) { if (SwingUtilities.isRightMouseButton(mouseEvent)) { return; } ascendingOrder = orderIndex != WorldOrder.PLAYERS || !ascendingOrder; orderBy(WorldOrder.PLAYERS); } }); activityHeader = new WorldTableHeader("Activity", orderIndex == WorldOrder.ACTIVITY, ascendingOrder, plugin::refresh); activityHeader.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent mouseEvent) { if (SwingUtilities.isRightMouseButton(mouseEvent)) { return; } ascendingOrder = orderIndex != WorldOrder.ACTIVITY || !ascendingOrder; orderBy(WorldOrder.ACTIVITY); } }); leftSide.add(worldHeader, BorderLayout.WEST); leftSide.add(playersHeader, BorderLayout.CENTER); rightSide.add(activityHeader, BorderLayout.CENTER); rightSide.add(pingHeader, BorderLayout.EAST); header.add(leftSide, BorderLayout.WEST); header.add(rightSide, BorderLayout.CENTER); return header; } /** * Builds a table row, that displays the world's information. */ private WorldTableRow buildRow(World world, boolean stripe, boolean current, boolean favorite) { WorldTableRow row = new WorldTableRow(world, current, favorite, plugin.getStoredPing(world), plugin::hopTo, (world12, add) -> { if (add) { plugin.addToFavorites(world12); } else { plugin.removeFromFavorites(world12); } updateList(); } ); row.setBackground(stripe ? ODD_ROW : ColorScheme.DARK_GRAY_COLOR); return row; } /** * Enumerates the multiple ordering options for the world list. */ private enum WorldOrder { WORLD, PLAYERS, ACTIVITY, PING } }
package tracker.bencode; import play.exceptions.UnexpectedException; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; public class Encoder { private Charset charset = Charset.forName("UTF-8"); private Charset byteCharset = Charset.forName("ISO-8859-1"); public ByteArrayOutputStream encode(Float value) throws IOException { return this.encode(String.valueOf(value)); } public ByteArrayOutputStream encode(String value) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ByteBuffer byteBuffer = this.getCharset().encode(value); this.write(outputStream, String.valueOf(byteBuffer.limit())); this.write(outputStream, ':'); this.write(outputStream, byteBuffer); return outputStream; } public ByteArrayOutputStream encode(Long value) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); this.write(outputStream, 'i'); this.write(outputStream, value); this.write(outputStream, 'e'); return outputStream; } public ByteArrayOutputStream encode(Integer value) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); this.write(outputStream, 'i'); this.write(outputStream, value); this.write(outputStream, 'e'); return outputStream; } public ByteArrayOutputStream encode(ByteBuffer value) throws IOException { return this.encode(value.array()); } public ByteArrayOutputStream encode(byte[] value) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); this.write(outputStream, String.valueOf(value.length)); this.write(outputStream, ':'); this.write(outputStream, value); return outputStream; } public ByteArrayOutputStream encode(List value) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); this.write(outputStream, 'l'); for (Object listValue : value) { this.encode(listValue.getClass().cast(listValue)).writeTo(outputStream); } this.write(outputStream, 'e'); return outputStream; } public ByteArrayOutputStream encode(Map<String, Object> value) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); this.write(outputStream, 'd'); SortedMap tree; if (value instanceof TreeMap) { tree = (TreeMap) value; } else { tree = new TreeMap<String, Object>(value); // do map sorting here } for (Object entryObject : tree.entrySet()) { Map.Entry entry = (Map.Entry)entryObject; Object entryKey = entry.getKey(); Object entryValue = entry.getValue(); if (entryValue == null) { continue; } this.encode(entryKey.getClass().cast(entryKey)).writeTo(outputStream); this.encode(entryValue.getClass().cast(entryValue)).writeTo(outputStream); } this.write(outputStream, 'e'); return outputStream; } public ByteArrayOutputStream encode(Object value) throws IOException { if (value instanceof Float) { return this.encode((Float)value); } if (value instanceof String) { return this.encode((String)value); } if (value instanceof Long) { return this.encode((Long)value); } if (value instanceof Integer) { return this.encode((Integer)value); } if (value instanceof ByteBuffer) { return this.encode((ByteBuffer)value); } if (value instanceof ByteArrayOutputStream) { return this.encode(((ByteArrayOutputStream)value).toByteArray()); } if (value instanceof byte[]) { return this.encode((byte[])value); } if (value instanceof List) { return this.encode((List)value); } if (value instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> mapValue = (TreeMap<String, Object>)value; return this.encode(mapValue); } throw new UnexpectedException(value.getClass().getName() + " class encoding not implemented."); } protected void write(OutputStream outputStream, Long value) throws IOException { this.write(outputStream, this.getCharset().encode(value.toString())); } protected void write(OutputStream outputStream, Integer value) throws IOException { this.write(outputStream, this.getCharset().encode(value.toString())); } protected void write(OutputStream outputStream, String value) throws IOException { this.write(outputStream, this.getCharset().encode(value)); } protected void write(OutputStream outputStream, Character value) throws IOException { outputStream.write(value); } protected void write(OutputStream outputStream, ByteBuffer byteBuffer) throws IOException { outputStream.write(byteBuffer.array(), 0, byteBuffer.limit()); } protected void write(OutputStream outputStream, byte[] byteArray) throws IOException { outputStream.write(byteArray, 0, byteArray.length); } public Encoder() { } public Encoder(String charset) { this(Charset.forName(charset)); } public Encoder(Charset charset) { this.charset = charset; } public Encoder(String charset, String byteCharset) { this(Charset.forName(charset), Charset.forName(byteCharset)); } public Encoder(Charset charset, Charset byteCharset) { this.charset = charset; this.byteCharset = byteCharset; } public Charset getCharset() { return this.charset; } public Encoder setCharset(String charset) { this.setCharset(Charset.forName(charset)); return this; } public Encoder setCharset(Charset charset) { this.charset = charset; return this; } public Charset getByteCharset() { return this.byteCharset; } public Encoder setByteCharset(String charset) { this.setByteCharset(Charset.forName(charset)); return this; } public Encoder setByteCharset(Charset charset) { this.byteCharset = charset; return this; } public static Encoder get() { return new Encoder(); } }
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.shell.kernel.apps; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.neo4j.function.Predicate; import org.neo4j.graphdb.Direction; import org.neo4j.graphdb.Label; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.Path; import org.neo4j.graphdb.Relationship; import org.neo4j.graphdb.RelationshipType; import org.neo4j.graphdb.traversal.BranchState; import org.neo4j.helpers.Service; import org.neo4j.helpers.collection.FilteringIterator; import org.neo4j.kernel.impl.util.SingleNodePath; import org.neo4j.shell.App; import org.neo4j.shell.AppCommandParser; import org.neo4j.shell.ColumnPrinter; import org.neo4j.shell.Continuation; import org.neo4j.shell.OptionDefinition; import org.neo4j.shell.OptionValueType; import org.neo4j.shell.Output; import org.neo4j.shell.Session; import org.neo4j.shell.ShellException; /** * Mimics the POSIX application with the same name, i.e. lists * properties/relationships on a node or a relationship. */ @Service.Implementation( App.class ) public class Ls extends TransactionProvidingApp { private static final int DEFAULT_MAX_RELS_PER_TYPE_LIMIT = 10; { addOptionDefinition( "b", new OptionDefinition( OptionValueType.NONE, "Brief summary instead of full content" ) ); addOptionDefinition( "v", new OptionDefinition( OptionValueType.NONE, "Verbose mode" ) ); addOptionDefinition( "q", new OptionDefinition( OptionValueType.NONE, "Quiet mode" ) ); addOptionDefinition( "p", new OptionDefinition( OptionValueType.NONE, "Lists properties" ) ); addOptionDefinition( "r", new OptionDefinition( OptionValueType.NONE, "Lists relationships" ) ); addOptionDefinition( "f", new OptionDefinition( OptionValueType.MUST, "Filters property keys/values and relationship types. Supplied either as a single value " + "or as a JSON string where both keys and values can contain regex. " + "Starting/ending {} brackets are optional. Examples:\n" + " \"username\"\n\tproperty/relationship 'username' gets listed\n" + " \".*name:ma.*, age:''\"\n\tproperties with keys matching '.*name' and values matching 'ma.*' " + "gets listed,\n\tas well as the 'age' property. Also relationships matching '.*name' or 'age'\n\tgets listed\n" + " \"KNOWS:out,LOVES:in\"\n\toutgoing KNOWS and incoming LOVES relationships gets listed" ) ); addOptionDefinition( "i", new OptionDefinition( OptionValueType.NONE, "Filters are case-insensitive (case-sensitive by default)" ) ); addOptionDefinition( "l", new OptionDefinition( OptionValueType.NONE, "Filters matches more loosely, i.e. it's considered a match if just " + "a part of a value matches the pattern, not necessarily the whole value" ) ); addOptionDefinition( "s", new OptionDefinition( OptionValueType.NONE, "Sorts relationships by type." ) ); addOptionDefinition( "m", new OptionDefinition( OptionValueType.MAY, "Display a maximum of M relationships per type (default " + DEFAULT_MAX_RELS_PER_TYPE_LIMIT + " if no value given)" ) ); addOptionDefinition( "a", new OptionDefinition( OptionValueType.NONE, "Allows for cd:ing to a node not connected to the current node (e.g. 'absolute')" ) ); } @Override public String getDescription() { return "Lists the contents of the current node or relationship. " + "Optionally supply\n" + "node id for listing a certain node using \"ls <node-id>\""; } @Override protected Continuation exec( AppCommandParser parser, Session session, Output out ) throws ShellException, RemoteException { boolean brief = parser.options().containsKey( "b" ); boolean verbose = parser.options().containsKey( "v" ); boolean quiet = parser.options().containsKey( "q" ); if ( verbose && quiet ) { verbose = false; quiet = false; } boolean displayProperties = parser.options().containsKey( "p" ); boolean displayRelationships = parser.options().containsKey( "r" ); boolean caseInsensitiveFilters = parser.options().containsKey( "i" ); boolean looseFilters = parser.options().containsKey( "l" ); Map<String, Object> filterMap = parseFilter( parser.options().get( "f" ), out ); if ( !displayProperties && !displayRelationships ) { displayProperties = true; displayRelationships = true; } NodeOrRelationship thing = null; if ( parser.arguments().isEmpty() ) { thing = this.getCurrent( session ); } else { thing = NodeOrRelationship.wrap( this.getNodeById( Long .parseLong( parser.arguments().get( 0 ) ) ) ); } if ( displayProperties ) { displayLabels( thing, out, filterMap, caseInsensitiveFilters, looseFilters, brief ); displayProperties( thing, out, verbose, quiet, filterMap, caseInsensitiveFilters, looseFilters, brief ); } if ( displayRelationships ) { if ( thing.isNode() ) { displayRelationships( parser, thing, session, out, verbose, quiet, filterMap, caseInsensitiveFilters, looseFilters, brief ); } else { displayNodes( parser, thing, session, out ); } } return Continuation.INPUT_COMPLETE; } private void displayNodes( AppCommandParser parser, NodeOrRelationship thing, Session session, Output out ) throws RemoteException, ShellException { Relationship rel = thing.asRelationship(); out.println( getDisplayName( getServer(), session, rel.getStartNode(), false ) + " --" + getDisplayName( getServer(), session, rel, true, false ) + "-> " + getDisplayName( getServer(), session, rel.getEndNode(), false ) ); } private Iterable<String> sortKeys( Iterable<String> source ) { List<String> list = new ArrayList<String>(); for ( String item : source ) { list.add( item ); } Collections.sort( list, new Comparator<String>() { @Override public int compare( String item1, String item2 ) { return item1.toLowerCase().compareTo( item2.toLowerCase() ); } } ); return list; } private void displayProperties( NodeOrRelationship thing, Output out, boolean verbose, boolean quiet, Map<String, Object> filterMap, boolean caseInsensitiveFilters, boolean looseFilters, boolean brief ) throws RemoteException { ColumnPrinter columnPrinter = quiet ? new ColumnPrinter( "*" ) : new ColumnPrinter( "*", "=" ); int count = 0; for ( String key : sortKeys( thing.getPropertyKeys() ) ) { Object value = thing.getProperty( key ); if ( !filterMatches( filterMap, caseInsensitiveFilters, looseFilters, key, value ) ) { continue; } count++; if ( !brief ) { if ( quiet ) { columnPrinter.add( key ); } else { columnPrinter.add( key, verbose ? format( value, true ) + " (" + getNiceType( value ) + ")" : format( value, true ) ); } } } columnPrinter.print( out ); if ( brief ) { out.println( "Property count: " + count ); } } private void displayLabels( NodeOrRelationship thing, Output out, Map<String, Object> filterMap, boolean caseInsensitiveFilters, boolean looseFilters, boolean brief ) throws RemoteException { List<String> labelNames = new ArrayList<String>(); for ( Label label : thing.asNode().getLabels() ) labelNames.add( label.name() ); if ( brief ) { out.println( "Label count: " + labelNames.size() ); } else { for ( String label : sortKeys( labelNames ) ) { if ( filterMatches( filterMap, caseInsensitiveFilters, looseFilters, label, "" ) ) out.println( ":" + label ); } } } private void displayRelationships( AppCommandParser parser, NodeOrRelationship thing, Session session, Output out, boolean verbose, boolean quiet, Map<String, Object> filterMap, boolean caseInsensitiveFilters, boolean looseFilters, boolean brief ) throws ShellException, RemoteException { boolean sortByType = parser.options().containsKey( "s" ); Node node = thing.asNode(); Iterable<Relationship> relationships = getRelationships( node, filterMap, caseInsensitiveFilters, looseFilters, sortByType|brief ); if ( brief ) { Iterator<Relationship> iterator = relationships.iterator(); if ( !iterator.hasNext() ) { return; } Relationship sampleRelationship = iterator.next(); RelationshipType lastType = sampleRelationship.getType(); int currentCounter = 1; while ( iterator.hasNext() ) { Relationship rel = iterator.next(); if ( !rel.isType( lastType ) ) { displayBriefRelationships( thing, session, out, sampleRelationship, currentCounter ); sampleRelationship = rel; lastType = sampleRelationship.getType(); currentCounter = 1; } else { currentCounter++; } } displayBriefRelationships( thing, session, out, sampleRelationship, currentCounter ); } else { Iterator<Relationship> iterator = relationships.iterator(); if ( parser.options().containsKey( "m" ) ) { iterator = wrapInLimitingIterator( parser, iterator, filterMap, caseInsensitiveFilters, looseFilters ); } while ( iterator.hasNext() ) { Relationship rel = iterator.next(); StringBuffer buf = new StringBuffer( getDisplayName( getServer(), session, thing, true ) ); String relDisplay = quiet ? "" : getDisplayName( getServer(), session, rel, verbose, true ); buf.append( withArrows( rel, relDisplay, thing.asNode() ) ); buf.append( getDisplayName( getServer(), session, rel.getOtherNode( node ), true ) ); out.println( buf ); } } } private Iterator<Relationship> wrapInLimitingIterator( AppCommandParser parser, Iterator<Relationship> iterator, Map<String, Object> filterMap, boolean caseInsensitiveFilters, boolean looseFilters ) throws ShellException { final AtomicBoolean handBreak = new AtomicBoolean(); int maxRelsPerType = parser.optionAsNumber( "m", DEFAULT_MAX_RELS_PER_TYPE_LIMIT ).intValue(); Map<String, Direction> types = filterMapToTypes( getServer().getDb(), Direction.BOTH, filterMap, caseInsensitiveFilters, looseFilters ); return new FilteringIterator<Relationship>( iterator, new LimitPerTypeFilter( maxRelsPerType, types, handBreak ) ) { @Override protected Relationship fetchNextOrNull() { return handBreak.get() ? null : super.fetchNextOrNull(); } }; } private static class LimitPerTypeFilter implements Predicate<Relationship> { private final int maxRelsPerType; private final Map<String, AtomicInteger> encounteredRelationships = new HashMap<String, AtomicInteger>(); private int typesMaxedOut = 0; private final AtomicBoolean iterationHalted; public LimitPerTypeFilter( int maxRelsPerType, Map<String, Direction> types, AtomicBoolean handBreak ) { this.maxRelsPerType = maxRelsPerType; this.iterationHalted = handBreak; for ( String type : types.keySet() ) { encounteredRelationships.put( type, new AtomicInteger() ); } } @Override public boolean test( Relationship item ) { AtomicInteger counter = encounteredRelationships.get( item.getType().name() ); int count = counter.get(); if ( count < maxRelsPerType ) { if ( counter.incrementAndGet() == maxRelsPerType ) { counter.incrementAndGet(); if ( (++typesMaxedOut) >= encounteredRelationships.size() ) { iterationHalted.set( true ); } return true; } return true; } return false; } } private Iterable<Relationship> getRelationships( final Node node, Map<String, Object> filterMap, boolean caseInsensitiveFilters, boolean looseFilters, boolean sortByType ) throws ShellException { if ( sortByType ) { Path nodeAsPath = new SingleNodePath( node ); return toSortedExpander( getServer().getDb(), Direction.BOTH, filterMap, caseInsensitiveFilters, looseFilters ).expand( nodeAsPath, BranchState.NO_STATE ); } else { if ( filterMap.isEmpty() ) { return node.getRelationships(); } else { Path nodeAsPath = new SingleNodePath( node ); return toExpander( getServer().getDb(), Direction.BOTH, filterMap, caseInsensitiveFilters, looseFilters ).expand( nodeAsPath, BranchState.NO_STATE ); } } } private void displayBriefRelationships( NodeOrRelationship thing, Session session, Output out, Relationship sampleRelationship, int count ) throws ShellException, RemoteException { String relDisplay = withArrows( sampleRelationship, getDisplayName( getServer(), session, sampleRelationship, false, true ), thing.asNode() ); out.println( getDisplayName( getServer(), session, thing, true ) + relDisplay + " x" + count ); } private static String getNiceType( Object value ) { return Set.getValueTypeName( value.getClass() ); } }
package hudson.model.queue; import com.google.common.collect.Iterables; import hudson.Extension; import jenkins.util.SystemProperties; import hudson.model.Computer; import hudson.model.Executor; import jenkins.model.Jenkins; import hudson.model.InvisibleAction; import hudson.model.Queue.BuildableItem; import hudson.model.queue.MappingWorksheet.ExecutorChunk; import hudson.model.queue.MappingWorksheet.ExecutorSlot; import hudson.model.queue.MappingWorksheet.Mapping; import hudson.model.queue.MappingWorksheet.WorkChunk; import hudson.util.TimeUnit2; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * Experimental. * * @author Kohsuke Kawaguchi */ public class BackFiller extends LoadPredictor { private boolean recursion = false; @Override public Iterable<FutureLoad> predict(MappingWorksheet plan, Computer computer, long start, long end) { TimeRange timeRange = new TimeRange(start, end - start); List<FutureLoad> loads = new ArrayList<FutureLoad>(); for (BuildableItem bi : Jenkins.getInstance().getQueue().getBuildableItems()) { TentativePlan tp = bi.getAction(TentativePlan.class); if (tp==null) {// do this even for bi==plan.item ensures that we have FIFO semantics in tentative plans. tp = makeTentativePlan(bi); if (tp==null) continue; // no viable plan. } if (tp.isStale()) { // if the tentative plan is stale, just keep on pushing it to the current time // (if we recreate the plan, it'll be put at the end of the queue, whereas this job // should actually get priority over others) tp.range.shiftTo(System.currentTimeMillis()); } // don't let its own tentative plan count when considering a scheduling for a job if (plan.item==bi) continue; // no overlap in the time span, meaning this plan is for a distant future if (!timeRange.overlapsWith(tp.range)) continue; // if this tentative plan has no baring on this computer, that's ignorable Integer i = tp.footprint.get(computer); if (i==null) continue; return Collections.singleton(tp.range.toFutureLoad(i)); } return loads; } private static final class PseudoExecutorSlot extends ExecutorSlot { private Executor executor; private PseudoExecutorSlot(Executor executor) { this.executor = executor; } @Override public Executor getExecutor() { return executor; } @Override public boolean isAvailable() { return true; } // this slot isn't executable @Override protected void set(WorkUnit p) { throw new UnsupportedOperationException(); } } private TentativePlan makeTentativePlan(BuildableItem bi) { if (recursion) return null; recursion = true; try { // pretend for now that all executors are available and decide some assignment that's executable. List<PseudoExecutorSlot> slots = new ArrayList<PseudoExecutorSlot>(); for (Computer c : Jenkins.getInstance().getComputers()) { if (c.isOffline()) continue; for (Executor e : c.getExecutors()) { slots.add(new PseudoExecutorSlot(e)); } } // also ignore all load predictions as we just want to figure out some executable assignment // and we are not trying to figure out if this task is executable right now. MappingWorksheet worksheet = new MappingWorksheet(bi, slots, Collections.<LoadPredictor>emptyList()); Mapping m = Jenkins.getInstance().getQueue().getLoadBalancer().map(bi.task, worksheet); if (m==null) return null; // figure out how many executors we need on each computer? Map<Computer,Integer> footprint = new HashMap<Computer, Integer>(); for (Entry<WorkChunk, ExecutorChunk> e : m.toMap().entrySet()) { Computer c = e.getValue().computer; Integer v = footprint.get(c); if (v==null) v = 0; v += e.getKey().size(); footprint.put(c,v); } // the point of a tentative plan is to displace other jobs to create a point in time // where this task can start executing. An incorrectly estimated duration is not // a problem in this regard, as we just need enough idle executors in the right moment. // The downside of guessing the duration wrong is that we can end up creating tentative plans // afterward that may be incorrect, but those plans will be rebuilt. long d = bi.task.getEstimatedDuration(); if (d<=0) d = TimeUnit2.MINUTES.toMillis(5); TimeRange slot = new TimeRange(System.currentTimeMillis(), d); // now, based on the real predicted loads, figure out the approximation of when we can // start executing this guy. for (Entry<Computer, Integer> e : footprint.entrySet()) { Computer computer = e.getKey(); Timeline timeline = new Timeline(); for (LoadPredictor lp : LoadPredictor.all()) { for (FutureLoad fl : Iterables.limit(lp.predict(worksheet, computer, slot.start, slot.end),100)) { timeline.insert(fl.startTime, fl.startTime+fl.duration, fl.numExecutors); } } Long x = timeline.fit(slot.start, slot.duration, computer.countExecutors()-e.getValue()); // if no suitable range was found in [slot.start,slot.end), slot.end would be a good approximation if (x==null) x = slot.end; slot = slot.shiftTo(x); } TentativePlan tp = new TentativePlan(footprint, slot); bi.addAction(tp); return tp; } finally { recursion = false; } } /** * Represents a duration in time. */ private static final class TimeRange { public final long start; public final long duration; public final long end; private TimeRange(long start, long duration) { this.start = start; this.duration = duration; this.end = start+duration; } public boolean overlapsWith(TimeRange that) { return (this.start <= that.start && that.start <=this.end) || (that.start <= this.start && this.start <=that.end); } public FutureLoad toFutureLoad(int size) { return new FutureLoad(start,duration,size); } public TimeRange shiftTo(long newStart) { if (newStart==start) return this; return new TimeRange(newStart,duration); } } public static final class TentativePlan extends InvisibleAction { private final Map<Computer,Integer> footprint; public final TimeRange range; public TentativePlan(Map<Computer, Integer> footprint, TimeRange range) { this.footprint = footprint; this.range = range; } public Object writeReplace() {// don't persist return null; } public boolean isStale() { return range.end < System.currentTimeMillis(); } } /** * Once this feature stabilizes, move it to the heavyjob plugin */ @Extension public static BackFiller newInstance() { if (SystemProperties.getBoolean(BackFiller.class.getName())) return new BackFiller(); return null; } }
package com.kalessil.phpStorm.phpInspectionsEA.inspectors.semanticalAnalysis; import com.intellij.codeInspection.ProblemHighlightType; import com.intellij.codeInspection.ProblemsHolder; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiComment; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiTreeUtil; import com.jetbrains.php.lang.lexer.PhpTokenTypes; import com.jetbrains.php.lang.psi.elements.*; import com.jetbrains.php.lang.psi.elements.impl.StatementImpl; import com.jetbrains.php.lang.psi.elements.impl.UnaryExpressionImpl; import com.kalessil.phpStorm.phpInspectionsEA.openApi.BasePhpElementVisitor; import com.kalessil.phpStorm.phpInspectionsEA.openApi.BasePhpInspection; import com.kalessil.phpStorm.phpInspectionsEA.utils.ExpressionSemanticUtil; import org.jetbrains.annotations.NotNull; import java.util.HashMap; import java.util.HashSet; public class DisconnectedForeachInstructionInspector extends BasePhpInspection { private static final String strProblemDescription = "This statement seems to be disconnected from parent foreach"; private static final String strProblemUseClone = "Objects should be created outside of a loop and cloned instead"; @NotNull public String getShortName() { return "DisconnectedForeachInstructionInspection"; } private static enum ExpressionType { IF, INCREMENT, DECREMENT, CLONE, NEW, REASSIGN, DOM_ELEMENT_CREATE, ACCUMULATE_IN_ARRAY, OTHER } @Override public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, final boolean isOnTheFly) { return new BasePhpElementVisitor() { public void visitPhpForeach(ForeachStatement foreach) { Variable value = foreach.getValue(); GroupStatement foreachBody = ExpressionSemanticUtil.getGroupStatement(foreach); /* ensure foreach structure is ready for inspection */ if (null != foreachBody && null != value && null != value.getName()) { /* pre-collect introduced and internally used variables */ HashSet<String> allModifiedVariables = new HashSet<String>(); allModifiedVariables.add(value.getName()); Variable key = foreach.getKey(); if (null != key && null != key.getName()) { allModifiedVariables.add(key.getName()); } HashMap<PsiElement, HashSet<String>> instructionDependencies = new HashMap<PsiElement, HashSet<String>>(); /* iteration 1 - investigate what are dependencies and influence */ for (PsiElement oneInstruction : foreachBody.getStatements()) { if (oneInstruction instanceof PhpPsiElement && !(oneInstruction instanceof PsiComment)) { HashSet<String> individualDependencies = new HashSet<String>(); individualDependencies.add("this"); instructionDependencies.put(oneInstruction, individualDependencies); investigateInfluence((PhpPsiElement) oneInstruction, individualDependencies, allModifiedVariables); } } /* iteration 2 - analyse dependencies */ for (PsiElement oneInstruction : foreachBody.getStatements()) { if (oneInstruction instanceof PhpPsiElement) { boolean isDependOnModifiedVariables = false; boolean hasDependencies = false; /* check if any dependency is overridden */ HashSet<String> individualDependencies = instructionDependencies.get(oneInstruction); if (null != individualDependencies && individualDependencies.size() > 1) { hasDependencies = true; /* contains not only this */ for (String dependencyName : individualDependencies) { if (allModifiedVariables.contains(dependencyName)) { isDependOnModifiedVariables = true; break; } } } /* verify and report if violation detected */ if (!isDependOnModifiedVariables && hasDependencies) { ExpressionType target = getExpressionType(oneInstruction); /** * TODO: hint using clone instead of '$var = new ...'; */ if ( ExpressionType.IF != target && ExpressionType.REASSIGN != target && ExpressionType.CLONE != target && ExpressionType.INCREMENT != target && ExpressionType.DECREMENT != target && ExpressionType.DOM_ELEMENT_CREATE != target && ExpressionType.ACCUMULATE_IN_ARRAY != target ) { holder.registerProblem(oneInstruction, strProblemDescription, ProblemHighlightType.WEAK_WARNING); } if (ExpressionType.DOM_ELEMENT_CREATE == target) { holder.registerProblem(oneInstruction, strProblemUseClone, ProblemHighlightType.WEAK_WARNING); } } /* cleanup dependencies details */ if (null != individualDependencies) { individualDependencies.clear(); } } } /* empty dependencies details container */ instructionDependencies.clear(); } } private void investigateInfluence( PhpPsiElement oneInstruction, HashSet<String> individualDependencies, HashSet<String> allModifiedVariables ) { for (PsiElement variable : PsiTreeUtil.findChildrenOfType(oneInstruction, Variable.class)) { Variable castedVariable = (Variable) variable; String variableName = castedVariable.getName(); if (null != variableName) { if (variable.getParent() instanceof AssignmentExpression) { AssignmentExpression assignment = (AssignmentExpression) variable.getParent(); if (assignment.getVariable() == variable) { allModifiedVariables.add(variableName); continue; } } /* increment/decrement are also write operations */ ExpressionType type = getExpressionType(variable.getParent()); if (ExpressionType.INCREMENT == type || ExpressionType.DECREMENT == type) { allModifiedVariables.add(variableName); continue; } /* TODO: lookup for array access and property access */ individualDependencies.add(variableName); } } } private ExpressionType getExpressionType(PsiElement expression) { if (expression instanceof If) { return ExpressionType.IF; } if (expression instanceof StatementImpl) { return getExpressionType(((StatementImpl) expression).getFirstPsiChild()); } if (expression instanceof UnaryExpressionImpl) { PsiElement operation = ((UnaryExpressionImpl) expression).getOperation(); IElementType operationType = null; if (null != operation) { operationType = operation.getNode().getElementType(); } if (PhpTokenTypes.opINCREMENT == operationType) { return ExpressionType.INCREMENT; } if (PhpTokenTypes.opDECREMENT == operationType) { return ExpressionType.DECREMENT; } } if (expression instanceof AssignmentExpression) { AssignmentExpression assignment = (AssignmentExpression) expression; PsiElement variable = assignment.getVariable(); if (variable instanceof Variable) { PsiElement value = assignment.getValue(); if (value instanceof NewExpression) { return ExpressionType.NEW; } if (value instanceof Variable) { return ExpressionType.REASSIGN; } if (value instanceof UnaryExpressionImpl) { PsiElement operation = ((UnaryExpressionImpl) value).getOperation(); if (null != operation && PhpTokenTypes.kwCLONE == operation.getNode().getElementType()) { return ExpressionType.CLONE; } } if (value instanceof MethodReference) { MethodReference call = (MethodReference) value; String methodName = call.getName(); if (!StringUtil.isEmpty(methodName) && methodName.equals("createElement")) { PsiElement resolved = call.resolve(); if (resolved instanceof Method) { String fqn = ((Method) resolved).getFQN(); if (!StringUtil.isEmpty(fqn) && fqn.equals("\\DOMDocument.createElement")) { return ExpressionType.DOM_ELEMENT_CREATE; } } } } } if (variable instanceof ArrayAccessExpression) { ArrayAccessExpression storage = (ArrayAccessExpression) variable; if (null == storage.getIndex() || null == storage.getIndex().getValue()) { return ExpressionType.ACCUMULATE_IN_ARRAY; } } } return ExpressionType.OTHER; } }; } }
package com.mcgowan.timetable.android.utility; import android.content.ContentValues; import android.content.Context; import android.content.SharedPreferences; import android.database.Cursor; import android.graphics.Typeface; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.preference.PreferenceManager; import android.text.Spannable; import android.text.SpannableString; import android.util.Log; import android.view.MenuItem; import com.mcgowan.timetable.android.CustomTypefaceSpan; import com.mcgowan.timetable.android.R; import com.mcgowan.timetable.android.TimeTableWeekFragment; import com.mcgowan.timetable.android.data.TimetableContract; import com.mcgowan.timetable.android.sync.TimetableSyncAdapter; import com.mcgowan.timetable.scraper.Course; import com.mcgowan.timetable.scraper.TimeTable; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Vector; public class Utility { private static final String LOG_TAG = Utility.class.getSimpleName(); public static int getDayNumberFromDay(String day) { int retVal; switch (day) { case "Monday": retVal = 1; break; case "Tuesday": retVal = 2; break; case "Wednesday": retVal = 3; break; case "Thursday": retVal = 4; break; case "Friday": retVal = 5; break; case "Saturday": retVal = 6; break; case "Sunday": retVal = 7; break; default: retVal = 0; break; } return retVal; } public static String getStudentId(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString( context.getString(R.string.student_id_key), context.getString(R.string.student_id_default)); } public static int getDayIcon(Context context, String name) { String today = new SimpleDateFormat("EEEE", Locale.ENGLISH).format(new Date()); String fileName = String.format("%s_130", name.toLowerCase()); if (!today.equals(name)) { fileName = fileName.concat("_bw"); } return context.getResources().getIdentifier(fileName, "drawable", context.getPackageName()); } public static int checkCursorForToday(Cursor c) { String today = new SimpleDateFormat("EEEE", Locale.ENGLISH).format(new Date()); int position = -1; while (c.moveToNext()) { String day = c.getString(TimeTableWeekFragment.COL_TIMETABLE_DAY); if (today.toLowerCase().equals(day.toLowerCase())) { position = c.getPosition(); break; } } return position; } public static Vector<ContentValues> convertTimeTableToVector(TimeTable t, String studentID) { Map<String, List<Course>> days = t.getDays(); Vector<ContentValues> cvVector = new Vector<>(); for (Map.Entry<String, List<Course>> entry : days.entrySet()) { for (Course c : entry.getValue()) { ContentValues classValues = new ContentValues(); classValues.put(TimetableContract.TimetableEntry.COLUMN_DAY, c.getDay()); classValues.put(TimetableContract.TimetableEntry.COLUMN_LECTURER, c.getLecturer()); classValues.put(TimetableContract.TimetableEntry.COLUMN_START_TIME, c.getStartTime()); classValues.put(TimetableContract.TimetableEntry.COLUMN_TIME, c.getTime()); classValues.put(TimetableContract.TimetableEntry.COLUMN_END_TIME, c.getEndTime()); classValues.put(TimetableContract.TimetableEntry.COLUMN_STUDENT_ID, studentID); classValues.put(TimetableContract.TimetableEntry.COLUMN_SUBJECT, c.getSubject()); classValues.put(TimetableContract.TimetableEntry.COLUMN_ROOM, c.getRoom()); classValues.put(TimetableContract.TimetableEntry.COLUMN_DAY_ID, Utility.getDayNumberFromDay(c.getDay())); cvVector.add(classValues); } } return cvVector; } public static void deleteRecordsFromDatabase(Context context, String studentId) { context.getContentResolver().delete( TimetableContract.TimetableEntry.CONTENT_URI, TimetableContract.TimetableEntry.COLUMN_STUDENT_ID + " = ?", new String[]{studentId} ); Log.d(LOG_TAG, "Records deleted before insertion"); } public static void deleteAllRecordsFromDatabase(Context context) { context.getContentResolver().delete( TimetableContract.TimetableEntry.CONTENT_URI, null, null ); Log.d(LOG_TAG, "All Records deleted"); } public static void addRecordsToDatabase(Context context, Vector<ContentValues> cvVector) { ContentValues[] cvArray = new ContentValues[cvVector.size()]; cvVector.toArray(cvArray); int inserted; inserted = context.getContentResolver().bulkInsert( TimetableContract.TimetableEntry.CONTENT_URI, cvArray ); Log.d(LOG_TAG, "addRecordsToDatabase Complete. " + inserted + " records inserted"); } public static boolean hasNetworkConnectivity(Context context){ ConnectivityManager cm = (ConnectivityManager)context.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo activeNetwork = cm.getActiveNetworkInfo(); return activeNetwork != null && activeNetwork.isConnectedOrConnecting(); } @SuppressWarnings("ResourceType") public static @TimetableSyncAdapter.ServerStatus int getServerStatus(Context c){ SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(c); return prefs.getInt(c.getString(R.string.server_status_key), TimetableSyncAdapter.SERVER_STATUS_UNKNOWN); } public static void resetServerStatus(Context c){ SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(c); SharedPreferences.Editor spe = prefs.edit(); spe.putInt(c.getString(R.string.server_status_key), TimetableSyncAdapter.SERVER_STATUS_UNKNOWN); spe.apply(); } public static int getImageForPeriod(Context context, int ref){ String fileName; switch (ref){ case 9: fileName = "nine"; break; case 10: fileName = "ten"; break; case 11: fileName = "eleven"; break; case 12: fileName = "twelve"; break; case 13: fileName = "one"; break; case 14: fileName = "two"; break; case 15: fileName = "three"; break; case 16: fileName = "four"; break; case 17: fileName = "five"; break; case 18: fileName = "six"; break; default: fileName = "blank"; break; } return context.getResources().getIdentifier(fileName, "drawable", context.getPackageName()); } public static void applyFontToMenuItem(Context context, MenuItem mi) { Typeface font = Typeface.createFromAsset(context.getAssets(), "fonts/RockSalt.ttf"); SpannableString mNewTitle = new SpannableString(mi.getTitle()); mNewTitle.setSpan(new CustomTypefaceSpan("" , font), 0 , mNewTitle.length(), Spannable.SPAN_INCLUSIVE_INCLUSIVE); mi.setTitle(mNewTitle); } public static String getDayNumberAsString() { Calendar cal = Calendar.getInstance(); cal.setFirstDayOfWeek(Calendar.MONDAY); return String.valueOf(cal.get(Calendar.DAY_OF_WEEK) - 1); } }
// Copyright 2013 The Chromium Authors. All rights reserved. // Copyright (c) 2013-2014 Intel Corporation. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.xwalk.core.internal; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Rect; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.view.View; import android.view.WindowManager; import android.text.TextUtils; import android.util.AttributeSet; import android.util.Log; import android.view.ViewGroup; import android.webkit.ValueCallback; import android.webkit.WebResourceResponse; import android.widget.FrameLayout; import java.io.IOException; import java.io.InputStream; import java.lang.annotation.Annotation; import org.chromium.base.CalledByNative; import org.chromium.base.JNINamespace; import org.chromium.base.ThreadUtils; import org.chromium.components.navigation_interception.InterceptNavigationDelegate; import org.chromium.content.browser.ContentView; import org.chromium.content.browser.ContentViewCore; import org.chromium.content.browser.ContentViewRenderView; import org.chromium.content.browser.ContentViewRenderView.CompositingSurfaceType; import org.chromium.content.browser.ContentViewStatics; import org.chromium.content.common.CleanupReference; import org.chromium.content_public.browser.JavaScriptCallback; import org.chromium.content_public.browser.LoadUrlParams; import org.chromium.content_public.browser.NavigationHistory; import org.chromium.content_public.browser.NavigationController; import org.chromium.content_public.browser.WebContents; import org.chromium.media.MediaPlayerBridge; import org.chromium.ui.base.ActivityWindowAndroid; import org.chromium.ui.gfx.DeviceDisplayInfo; @JNINamespace("xwalk") /** * This class is the implementation class for XWalkViewInternal by calling internal * various classes. */ class XWalkContent extends FrameLayout implements XWalkPreferencesInternal.KeyValueChangeListener { private static String TAG = "XWalkContent"; private static Class<? extends Annotation> javascriptInterfaceClass = null; private ContentViewCore mContentViewCore; private ContentView mContentView; private ContentViewRenderView mContentViewRenderView; private ActivityWindowAndroid mWindow; private XWalkDevToolsServer mDevToolsServer; private XWalkViewInternal mXWalkView; private XWalkContentsClientBridge mContentsClientBridge; private XWalkContentsIoThreadClient mIoThreadClient; private XWalkWebContentsDelegateAdapter mXWalkContentsDelegateAdapter; private XWalkSettings mSettings; private XWalkGeolocationPermissions mGeolocationPermissions; private XWalkLaunchScreenManager mLaunchScreenManager; private NavigationController mNavigationController; private WebContents mWebContents; long mNativeContent; long mNativeWebContents; static void setJavascriptInterfaceClass(Class<? extends Annotation> clazz) { assert(javascriptInterfaceClass == null); javascriptInterfaceClass = clazz; } private static final class DestroyRunnable implements Runnable { private final long mNativeContent; private DestroyRunnable(long nativeXWalkContent) { mNativeContent = nativeXWalkContent; } @Override public void run() { nativeDestroy(mNativeContent); } } // Reference to the active mNativeContent pointer while it is active use // (ie before it is destroyed). private CleanupReference mCleanupReference; public XWalkContent(Context context, AttributeSet attrs, XWalkViewInternal xwView) { super(context, attrs); // Initialize the WebContensDelegate. mXWalkView = xwView; mContentsClientBridge = new XWalkContentsClientBridge(mXWalkView); mXWalkContentsDelegateAdapter = new XWalkWebContentsDelegateAdapter( mContentsClientBridge); mIoThreadClient = new XWalkIoThreadClientImpl(); // Initialize mWindow which is needed by content mWindow = new ActivityWindowAndroid(xwView.getActivity()); SharedPreferences sharedPreferences = new InMemorySharedPreferences(); mGeolocationPermissions = new XWalkGeolocationPermissions(sharedPreferences); MediaPlayerBridge.setResourceLoadingFilter( new XWalkMediaPlayerResourceLoadingFilter()); XWalkPreferencesInternal.load(this); setNativeContent(nativeInit()); } private void setNativeContent(long newNativeContent) { if (mNativeContent != 0) { destroy(); mContentViewCore = null; } assert mNativeContent == 0 && mCleanupReference == null && mContentViewCore == null; // Initialize ContentViewRenderView boolean animated = XWalkPreferencesInternal.getValue( XWalkPreferencesInternal.ANIMATABLE_XWALK_VIEW); CompositingSurfaceType surfaceType = animated ? CompositingSurfaceType.TEXTURE_VIEW : CompositingSurfaceType.SURFACE_VIEW; mContentViewRenderView = new ContentViewRenderView(getContext(), surfaceType) { protected void onReadyToRender() { // Anything depending on the underlying Surface readiness should // be placed here. } }; mContentViewRenderView.onNativeLibraryLoaded(mWindow); mLaunchScreenManager = new XWalkLaunchScreenManager(getContext(), mXWalkView); mContentViewRenderView.registerFirstRenderedFrameListener(mLaunchScreenManager); addView(mContentViewRenderView, new FrameLayout.LayoutParams( FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT)); mNativeContent = newNativeContent; // The native side object has been bound to this java instance, so now is the time to // bind all the native->java relationships. mCleanupReference = new CleanupReference(this, new DestroyRunnable(mNativeContent)); mNativeWebContents = nativeGetWebContents(mNativeContent); // Initialize ContentView. mContentViewCore = new ContentViewCore(getContext()); mContentView = ContentView.newInstance(getContext(), mContentViewCore); mContentViewCore.initialize(mContentView, mContentView, mNativeWebContents, mWindow); mWebContents = mContentViewCore.getWebContents(); mNavigationController = mWebContents.getNavigationController(); addView(mContentView, new FrameLayout.LayoutParams( FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT)); mContentViewCore.setContentViewClient(mContentsClientBridge); mContentViewRenderView.setCurrentContentViewCore(mContentViewCore); // For addJavascriptInterface mContentsClientBridge.installWebContentsObserver(mWebContents); // Set DIP scale. mContentsClientBridge.setDIPScale(DeviceDisplayInfo.create(getContext()).getDIPScale()); mContentViewCore.setDownloadDelegate(mContentsClientBridge); // Set the third argument isAccessFromFileURLsGrantedByDefault to false, so that // the members mAllowUniversalAccessFromFileURLs and mAllowFileAccessFromFileURLs // won't be changed from false to true at the same time in the constructor of // XWalkSettings class. mSettings = new XWalkSettings(getContext(), mNativeWebContents, false); // Enable AllowFileAccessFromFileURLs, so that files under file:// path could be // loaded by XMLHttpRequest. mSettings.setAllowFileAccessFromFileURLs(true); // Enable this by default to suppport new window creation mSettings.setSupportMultipleWindows(true); nativeSetJavaPeers(mNativeContent, this, mXWalkContentsDelegateAdapter, mContentsClientBridge, mIoThreadClient, mContentsClientBridge.getInterceptNavigationDelegate()); } public void supplyContentsForPopup(XWalkContent newContents) { if (mNativeContent == 0) return; long popupNativeXWalkContent = nativeReleasePopupXWalkContent(mNativeContent); if (popupNativeXWalkContent == 0) { Log.w(TAG, "Popup XWalkView bind failed: no pending content."); if (newContents != null) newContents.destroy(); return; } if (newContents == null) { nativeDestroy(popupNativeXWalkContent); return; } newContents.receivePopupContents(popupNativeXWalkContent); } private void receivePopupContents(long popupNativeXWalkContents) { setNativeContent(popupNativeXWalkContents); mContentViewCore.onShow(); } void doLoadUrl(String url, String content) { // Handle the same url loading by parameters. if (url != null && !url.isEmpty() && TextUtils.equals(url, mWebContents.getUrl())) { mNavigationController.reload(true); } else { LoadUrlParams params = null; if (content == null || content.isEmpty()) { params = new LoadUrlParams(url); } else { params = LoadUrlParams.createLoadDataParamsWithBaseUrl( content, "text/html", false, url, null); } params.setOverrideUserAgent(LoadUrlParams.UA_OVERRIDE_TRUE); mNavigationController.loadUrl(params); } mContentView.requestFocus(); } public void loadUrl(String url, String data) { if (mNativeContent == 0) return; if ((url == null || url.isEmpty()) && (data == null || data.isEmpty())) { return; } doLoadUrl(url, data); } public void reload(int mode) { if (mNativeContent == 0) return; switch (mode) { case XWalkViewInternal.RELOAD_IGNORE_CACHE: mNavigationController.reloadIgnoringCache(true); break; case XWalkViewInternal.RELOAD_NORMAL: default: mNavigationController.reload(true); } } public String getUrl() { if (mNativeContent == 0) return null; String url = mWebContents.getUrl(); if (url == null || url.trim().isEmpty()) return null; return url; } public String getTitle() { if (mNativeContent == 0) return null; String title = mWebContents.getTitle().trim(); if (title == null) title = ""; return title; } public void addJavascriptInterface(Object object, String name) { if (mNativeContent == 0) return; mContentViewCore.addPossiblyUnsafeJavascriptInterface(object, name, javascriptInterfaceClass); } public void evaluateJavascript(String script, ValueCallback<String> callback) { if (mNativeContent == 0) return; final ValueCallback<String> fCallback = callback; JavaScriptCallback coreCallback = null; if (fCallback != null) { coreCallback = new JavaScriptCallback() { @Override public void handleJavaScriptResult(String jsonResult) { fCallback.onReceiveValue(jsonResult); } }; } mContentViewCore.evaluateJavaScript(script, coreCallback); } public void setUIClient(XWalkUIClientInternal client) { if (mNativeContent == 0) return; mContentsClientBridge.setUIClient(client); } public void setResourceClient(XWalkResourceClientInternal client) { if (mNativeContent == 0) return; mContentsClientBridge.setResourceClient(client); } public void setXWalkWebChromeClient(XWalkWebChromeClient client) { if (mNativeContent == 0) return; mContentsClientBridge.setXWalkWebChromeClient(client); } public XWalkWebChromeClient getXWalkWebChromeClient() { if (mNativeContent == 0) return null; return mContentsClientBridge.getXWalkWebChromeClient(); } public void setXWalkClient(XWalkClient client) { if (mNativeContent == 0) return; mContentsClientBridge.setXWalkClient(client); } public void setDownloadListener(DownloadListener listener) { if (mNativeContent == 0) return; mContentsClientBridge.setDownloadListener(listener); } public void setNavigationHandler(XWalkNavigationHandler handler) { if (mNativeContent == 0) return; mContentsClientBridge.setNavigationHandler(handler); } public void setNotificationService(XWalkNotificationService service) { if (mNativeContent == 0) return; mContentsClientBridge.setNotificationService(service); } public void onPause() { if (mNativeContent == 0) return; mContentViewCore.onHide(); } public void onResume() { if (mNativeContent == 0) return; mContentViewCore.onShow(); } public void onActivityResult(int requestCode, int resultCode, Intent data) { if (mNativeContent == 0) return; mWindow.onActivityResult(requestCode, resultCode, data); } public boolean onNewIntent(Intent intent) { if (mNativeContent == 0) return false; return mContentsClientBridge.onNewIntent(intent); } public void clearCache(boolean includeDiskFiles) { if (mNativeContent == 0) return; nativeClearCache(mNativeContent, includeDiskFiles); } public void clearHistory() { if (mNativeContent == 0) return; mNavigationController.clearHistory(); } public boolean canGoBack() { return (mNativeContent == 0) ? false : mNavigationController.canGoBack(); } public void goBack() { if (mNativeContent == 0) return; mNavigationController.goBack(); } public boolean canGoForward() { return (mNativeContent == 0) ? false : mNavigationController.canGoForward(); } public void goForward() { if (mNativeContent == 0) return; mNavigationController.goForward(); } void navigateTo(int offset) { mNavigationController.goToOffset(offset); } public void stopLoading() { if (mNativeContent == 0) return; mWebContents.stop(); mContentsClientBridge.onStopLoading(); } // Currently, timer pause/resume is actually // a global setting. And multiple pause will fail the // DCHECK in content (content_view_statics.cc:57). // Here uses a static boolean to avoid this issue. private static boolean timerPaused = false; // TODO(Guangzhen): ContentViewStatics will be removed in upstream, // details in content_view_statics.cc. // We need follow up after upstream updates that. public void pauseTimers() { if (timerPaused || (mNativeContent == 0)) return; ContentViewStatics.setWebKitSharedTimersSuspended(true); timerPaused = true; } public void resumeTimers() { if (!timerPaused || (mNativeContent == 0)) return; ContentViewStatics.setWebKitSharedTimersSuspended(false); timerPaused = false; } public String getOriginalUrl() { if (mNativeContent == 0) return null; NavigationHistory history = mNavigationController.getNavigationHistory(); int currentIndex = history.getCurrentEntryIndex(); if (currentIndex >= 0 && currentIndex < history.getEntryCount()) { return history.getEntryAtIndex(currentIndex).getOriginalUrl(); } return null; } public String getXWalkVersion() { if (mNativeContent == 0) return ""; return nativeGetVersion(mNativeContent); } public void setBackgroundColor(int color) { if (mNativeContent == 0) return; nativeSetBackgroundColor(mNativeContent, color); } public void setNetworkAvailable(boolean networkUp) { if (mNativeContent == 0) return; nativeSetJsOnlineProperty(mNativeContent, networkUp); } // For instrumentation test. public ContentViewCore getContentViewCoreForTest() { return mContentViewCore; } // For instrumentation test. public void installWebContentsObserverForTest(XWalkContentsClient contentClient) { if (mNativeContent == 0) return; contentClient.installWebContentsObserver(mContentViewCore.getWebContents()); } public String devToolsAgentId() { if (mNativeContent == 0) return ""; return nativeDevToolsAgentId(mNativeContent); } public XWalkSettings getSettings() { return mSettings; } public void loadAppFromManifest(String url, String data) { if (mNativeContent == 0 || ((url == null || url.isEmpty()) && (data == null || data.isEmpty()))) { return; } String content = data; // If the data of manifest.json is not set, try to load it. if (data == null || data.isEmpty()) { try { content = AndroidProtocolHandler.getUrlContent(mXWalkView.getActivity(), url); } catch (IOException e) { throw new RuntimeException("Failed to read the manifest: " + url); } } // Calculate the base url of manifestUrl. Used by native side. // TODO(yongsheng): It's from runtime side. Need to find a better way // to get base url. String baseUrl = url; int position = url.lastIndexOf("/"); if (position != -1) { baseUrl = url.substring(0, position + 1); } else { Log.w(TAG, "The url of manifest.json is probably not set correctly."); } if (!nativeSetManifest(mNativeContent, baseUrl, content)) { throw new RuntimeException("Failed to parse the manifest file: " + url); } } public XWalkNavigationHistoryInternal getNavigationHistory() { if (mNativeContent == 0) return null; return new XWalkNavigationHistoryInternal(mXWalkView, mNavigationController.getNavigationHistory()); } public static final String SAVE_RESTORE_STATE_KEY = "XWALKVIEW_STATE"; public XWalkNavigationHistoryInternal saveState(Bundle outState) { if (mNativeContent == 0 || outState == null) return null; byte[] state = nativeGetState(mNativeContent); if (state == null) return null; outState.putByteArray(SAVE_RESTORE_STATE_KEY, state); return getNavigationHistory(); } public XWalkNavigationHistoryInternal restoreState(Bundle inState) { if (mNativeContent == 0 || inState == null) return null; byte[] state = inState.getByteArray(SAVE_RESTORE_STATE_KEY); if (state == null) return null; boolean result = nativeSetState(mNativeContent, state); // The onUpdateTitle callback normally happens when a page is loaded, // but is optimized out in the restoreState case because the title is // already restored. See WebContentsImpl::UpdateTitleForEntry. So we // call the callback explicitly here. if (result) { mContentsClientBridge.onUpdateTitle(mWebContents.getTitle()); } return result ? getNavigationHistory() : null; } boolean hasEnteredFullscreen() { return mContentsClientBridge.hasEnteredFullscreen(); } void exitFullscreen() { if (hasEnteredFullscreen()) { mContentsClientBridge.exitFullscreen(mNativeWebContents); } } @CalledByNative public void onGetUrlFromManifest(String url) { if (url != null && !url.isEmpty()) { loadUrl(url, null); } } @CalledByNative public void onGetUrlAndLaunchScreenFromManifest(String url, String readyWhen, String imageBorder) { if (url == null || url.isEmpty()) return; mLaunchScreenManager.displayLaunchScreen(readyWhen, imageBorder); mContentsClientBridge.registerPageLoadListener(mLaunchScreenManager); loadUrl(url, null); } @CalledByNative public void onGetFullscreenFlagFromManifest(boolean enterFullscreen) { if (enterFullscreen) { if (VERSION.SDK_INT >= VERSION_CODES.KITKAT) { View decorView = mXWalkView.getActivity().getWindow().getDecorView(); decorView.setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY); } else { mXWalkView.getActivity().getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); } } } public void destroy() { if (mNativeContent == 0) return; XWalkPreferencesInternal.unload(this); // Reset existing notification service in order to destruct it. setNotificationService(null); // Remove its children used for page rendering from view hierarchy. removeView(mContentView); removeView(mContentViewRenderView); mContentViewRenderView.setCurrentContentViewCore(null); // Destroy the native resources. mContentViewRenderView.destroy(); mContentViewCore.destroy(); mCleanupReference.cleanupNow(); mCleanupReference = null; mNativeContent = 0; } public int getRoutingID() { return nativeGetRoutingID(mNativeContent); } //-------------------------------------------------------------------------------------------- private class XWalkIoThreadClientImpl implements XWalkContentsIoThreadClient { // All methods are called on the IO thread. @Override public int getCacheMode() { return mSettings.getCacheMode(); } @Override public InterceptedRequestData shouldInterceptRequest(final String url, boolean isMainFrame) { // Notify a resource load is started. This is not the best place to start the callback // but it's a workable way. mContentsClientBridge.getCallbackHelper().postOnResourceLoadStarted(url); WebResourceResponse webResourceResponse = mContentsClientBridge.shouldInterceptRequest(url); InterceptedRequestData interceptedRequestData = null; if (webResourceResponse == null) { mContentsClientBridge.getCallbackHelper().postOnLoadResource(url); } else { if (isMainFrame && webResourceResponse.getData() == null) { mContentsClientBridge.getCallbackHelper().postOnReceivedError( XWalkResourceClientInternal.ERROR_UNKNOWN, null, url); } interceptedRequestData = new InterceptedRequestData(webResourceResponse.getMimeType(), webResourceResponse.getEncoding(), webResourceResponse.getData()); } return interceptedRequestData; } @Override public boolean shouldBlockContentUrls() { return !mSettings.getAllowContentAccess(); } @Override public boolean shouldBlockFileUrls() { return !mSettings.getAllowFileAccess(); } @Override public boolean shouldBlockNetworkLoads() { return mSettings.getBlockNetworkLoads(); } @Override public void onDownloadStart(String url, String userAgent, String contentDisposition, String mimeType, long contentLength) { mContentsClientBridge.getCallbackHelper().postOnDownloadStart(url, userAgent, contentDisposition, mimeType, contentLength); } @Override public void newLoginRequest(String realm, String account, String args) { mContentsClientBridge.getCallbackHelper().postOnReceivedLoginRequest(realm, account, args); } } private class XWalkGeolocationCallback implements XWalkGeolocationPermissions.Callback { @Override public void invoke(final String origin, final boolean allow, final boolean retain) { ThreadUtils.runOnUiThread(new Runnable() { @Override public void run() { if (retain) { if (allow) { mGeolocationPermissions.allow(origin); } else { mGeolocationPermissions.deny(origin); } } nativeInvokeGeolocationCallback(mNativeContent, allow, origin); } }); } } @CalledByNative private void onGeolocationPermissionsShowPrompt(String origin) { if (mNativeContent == 0) return; // Reject if geolocation is disabled, or the origin has a retained deny. if (!mSettings.getGeolocationEnabled()) { nativeInvokeGeolocationCallback(mNativeContent, false, origin); return; } // Allow if the origin has a retained allow. if (mGeolocationPermissions.hasOrigin(origin)) { nativeInvokeGeolocationCallback(mNativeContent, mGeolocationPermissions.isOriginAllowed(origin), origin); return; } mContentsClientBridge.onGeolocationPermissionsShowPrompt( origin, new XWalkGeolocationCallback()); } @CalledByNative public void onGeolocationPermissionsHidePrompt() { mContentsClientBridge.onGeolocationPermissionsHidePrompt(); } public void enableRemoteDebugging(int allowedUid) { // Chrome looks for "devtools_remote" pattern in the name of a unix domain socket // to identify a debugging page final String socketName = getContext().getApplicationContext().getPackageName() + "_devtools_remote"; if (mDevToolsServer == null) { mDevToolsServer = new XWalkDevToolsServer(socketName); mDevToolsServer.allowConnectionFromUid(allowedUid); mDevToolsServer.setRemoteDebuggingEnabled(true); } } // Enables remote debugging and returns the URL at which the dev tools server is listening // for commands. Only the current process is allowed to connect to the server. void enableRemoteDebugging() { enableRemoteDebugging(getContext().getApplicationInfo().uid); } void disableRemoteDebugging() { if (mDevToolsServer == null) return; if (mDevToolsServer.isRemoteDebuggingEnabled()) { mDevToolsServer.setRemoteDebuggingEnabled(false); } mDevToolsServer.destroy(); mDevToolsServer = null; } public String getRemoteDebuggingUrl() { if (mDevToolsServer == null) return ""; // devtools/page is hardcoded in devtools_http_handler_impl.cc (kPageUrlPrefix) return "ws://" + mDevToolsServer.getSocketName() + "/devtools/page/" + devToolsAgentId(); } @Override public void onKeyValueChanged(String key, XWalkPreferencesInternal.PreferenceValue value) { if (key == null) return; if (key.equals(XWalkPreferencesInternal.REMOTE_DEBUGGING)) { if (value.getBooleanValue()) enableRemoteDebugging(); else disableRemoteDebugging(); } else if (key.equals(XWalkPreferencesInternal.ENABLE_JAVASCRIPT)) { if (mSettings != null) { mSettings.setJavaScriptEnabled(value.getBooleanValue()); } } else if (key.equals(XWalkPreferencesInternal.JAVASCRIPT_CAN_OPEN_WINDOW)) { if (mSettings != null) { mSettings.setJavaScriptCanOpenWindowsAutomatically(value.getBooleanValue()); } } else if (key.equals(XWalkPreferencesInternal.ALLOW_UNIVERSAL_ACCESS_FROM_FILE)) { if (mSettings != null) { mSettings.setAllowUniversalAccessFromFileURLs(value.getBooleanValue()); } } else if (key.equals(XWalkPreferencesInternal.SUPPORT_MULTIPLE_WINDOWS)) { if (mSettings != null) { mSettings.setSupportMultipleWindows(value.getBooleanValue()); } } } public void setOverlayVideoMode(boolean enabled) { if (mContentViewRenderView != null) { mContentViewRenderView.setOverlayVideoMode(enabled); } } private native long nativeInit(); private static native void nativeDestroy(long nativeXWalkContent); private native long nativeGetWebContents(long nativeXWalkContent); private native long nativeReleasePopupXWalkContent(long nativeXWalkContent); private native void nativeSetJavaPeers( long nativeXWalkContent, XWalkContent xwalkContent, XWalkWebContentsDelegateAdapter xwalkContentsDelegate, XWalkContentsClientBridge contentsClientBridge, XWalkContentsIoThreadClient ioThreadClient, InterceptNavigationDelegate navigationInterceptionDelegate); private native void nativeClearCache(long nativeXWalkContent, boolean includeDiskFiles); private native String nativeDevToolsAgentId(long nativeXWalkContent); private native String nativeGetVersion(long nativeXWalkContent); private native void nativeSetJsOnlineProperty(long nativeXWalkContent, boolean networkUp); private native boolean nativeSetManifest(long nativeXWalkContent, String path, String manifest); private native int nativeGetRoutingID(long nativeXWalkContent); private native void nativeInvokeGeolocationCallback( long nativeXWalkContent, boolean value, String requestingFrame); private native byte[] nativeGetState(long nativeXWalkContent); private native boolean nativeSetState(long nativeXWalkContent, byte[] state); private native void nativeSetBackgroundColor(long nativeXWalkContent, int color); }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.extractor.ts; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.audio.AacUtil; import com.google.android.exoplayer2.extractor.DummyTrackOutput; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; import java.util.Arrays; import java.util.Collections; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** * Parses a continuous ADTS byte stream and extracts individual frames. */ public final class AdtsReader implements ElementaryStreamReader { private static final String TAG = "AdtsReader"; private static final int STATE_FINDING_SAMPLE = 0; private static final int STATE_CHECKING_ADTS_HEADER = 1; private static final int STATE_READING_ID3_HEADER = 2; private static final int STATE_READING_ADTS_HEADER = 3; private static final int STATE_READING_SAMPLE = 4; private static final int HEADER_SIZE = 5; private static final int CRC_SIZE = 2; // Match states used while looking for the next sample private static final int MATCH_STATE_VALUE_SHIFT = 8; private static final int MATCH_STATE_START = 1 << MATCH_STATE_VALUE_SHIFT; private static final int MATCH_STATE_FF = 2 << MATCH_STATE_VALUE_SHIFT; private static final int MATCH_STATE_I = 3 << MATCH_STATE_VALUE_SHIFT; private static final int MATCH_STATE_ID = 4 << MATCH_STATE_VALUE_SHIFT; private static final int ID3_HEADER_SIZE = 10; private static final int ID3_SIZE_OFFSET = 6; private static final byte[] ID3_IDENTIFIER = {'I', 'D', '3'}; private static final int VERSION_UNSET = -1; private final boolean exposeId3; private final ParsableBitArray adtsScratch; private final ParsableByteArray id3HeaderBuffer; @Nullable private final String language; private @MonotonicNonNull String formatId; private @MonotonicNonNull TrackOutput output; private @MonotonicNonNull TrackOutput id3Output; private int state; private int bytesRead; private int matchState; private boolean hasCrc; private boolean foundFirstFrame; // Used to verifies sync words private int firstFrameVersion; private int firstFrameSampleRateIndex; private int currentFrameVersion; // Used when parsing the header. private boolean hasOutputFormat; private long sampleDurationUs; private int sampleSize; // Used when reading the samples. private long timeUs; private @MonotonicNonNull TrackOutput currentOutput; private long currentSampleDuration; /** * @param exposeId3 True if the reader should expose ID3 information. */ public AdtsReader(boolean exposeId3) { this(exposeId3, null); } /** * @param exposeId3 True if the reader should expose ID3 information. * @param language Track language. */ public AdtsReader(boolean exposeId3, @Nullable String language) { adtsScratch = new ParsableBitArray(new byte[HEADER_SIZE + CRC_SIZE]); id3HeaderBuffer = new ParsableByteArray(Arrays.copyOf(ID3_IDENTIFIER, ID3_HEADER_SIZE)); setFindingSampleState(); firstFrameVersion = VERSION_UNSET; firstFrameSampleRateIndex = C.INDEX_UNSET; sampleDurationUs = C.TIME_UNSET; this.exposeId3 = exposeId3; this.language = language; } /** Returns whether an integer matches an ADTS SYNC word. */ public static boolean isAdtsSyncWord(int candidateSyncWord) { return (candidateSyncWord & 0xFFF6) == 0xFFF0; } @Override public void seek() { resetSync(); } @Override public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) { idGenerator.generateNewId(); formatId = idGenerator.getFormatId(); output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_AUDIO); currentOutput = output; if (exposeId3) { idGenerator.generateNewId(); id3Output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_METADATA); id3Output.format( new Format.Builder() .setId(idGenerator.getFormatId()) .setSampleMimeType(MimeTypes.APPLICATION_ID3) .build()); } else { id3Output = new DummyTrackOutput(); } } @Override public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { timeUs = pesTimeUs; } @Override public void consume(ParsableByteArray data) throws ParserException { assertTracksCreated(); while (data.bytesLeft() > 0) { switch (state) { case STATE_FINDING_SAMPLE: findNextSample(data); break; case STATE_READING_ID3_HEADER: if (continueRead(data, id3HeaderBuffer.data, ID3_HEADER_SIZE)) { parseId3Header(); } break; case STATE_CHECKING_ADTS_HEADER: checkAdtsHeader(data); break; case STATE_READING_ADTS_HEADER: int targetLength = hasCrc ? HEADER_SIZE + CRC_SIZE : HEADER_SIZE; if (continueRead(data, adtsScratch.data, targetLength)) { parseAdtsHeader(); } break; case STATE_READING_SAMPLE: readSample(data); break; default: throw new IllegalStateException(); } } } @Override public void packetFinished() { // Do nothing. } /** * Returns the duration in microseconds per sample, or {@link C#TIME_UNSET} if the sample duration * is not available. */ public long getSampleDurationUs() { return sampleDurationUs; } private void resetSync() { foundFirstFrame = false; setFindingSampleState(); } /** * Continues a read from the provided {@code source} into a given {@code target}. It's assumed * that the data should be written into {@code target} starting from an offset of zero. * * @param source The source from which to read. * @param target The target into which data is to be read. * @param targetLength The target length of the read. * @return Whether the target length was reached. */ private boolean continueRead(ParsableByteArray source, byte[] target, int targetLength) { int bytesToRead = Math.min(source.bytesLeft(), targetLength - bytesRead); source.readBytes(target, bytesRead, bytesToRead); bytesRead += bytesToRead; return bytesRead == targetLength; } /** * Sets the state to STATE_FINDING_SAMPLE. */ private void setFindingSampleState() { state = STATE_FINDING_SAMPLE; bytesRead = 0; matchState = MATCH_STATE_START; } /** * Sets the state to STATE_READING_ID3_HEADER and resets the fields required for * {@link #parseId3Header()}. */ private void setReadingId3HeaderState() { state = STATE_READING_ID3_HEADER; bytesRead = ID3_IDENTIFIER.length; sampleSize = 0; id3HeaderBuffer.setPosition(0); } /** * Sets the state to STATE_READING_SAMPLE. * * @param outputToUse TrackOutput object to write the sample to * @param currentSampleDuration Duration of the sample to be read * @param priorReadBytes Size of prior read bytes * @param sampleSize Size of the sample */ private void setReadingSampleState(TrackOutput outputToUse, long currentSampleDuration, int priorReadBytes, int sampleSize) { state = STATE_READING_SAMPLE; bytesRead = priorReadBytes; this.currentOutput = outputToUse; this.currentSampleDuration = currentSampleDuration; this.sampleSize = sampleSize; } /** * Sets the state to STATE_READING_ADTS_HEADER. */ private void setReadingAdtsHeaderState() { state = STATE_READING_ADTS_HEADER; bytesRead = 0; } /** Sets the state to STATE_CHECKING_ADTS_HEADER. */ private void setCheckingAdtsHeaderState() { state = STATE_CHECKING_ADTS_HEADER; bytesRead = 0; } /** * Locates the next sample start, advancing the position to the byte that immediately follows * identifier. If a sample was not located, the position is advanced to the limit. * * @param pesBuffer The buffer whose position should be advanced. */ private void findNextSample(ParsableByteArray pesBuffer) { byte[] adtsData = pesBuffer.data; int position = pesBuffer.getPosition(); int endOffset = pesBuffer.limit(); while (position < endOffset) { int data = adtsData[position++] & 0xFF; if (matchState == MATCH_STATE_FF && isAdtsSyncBytes((byte) 0xFF, (byte) data)) { if (foundFirstFrame || checkSyncPositionValid(pesBuffer, /* syncPositionCandidate= */ position - 2)) { currentFrameVersion = (data & 0x8) >> 3; hasCrc = (data & 0x1) == 0; if (!foundFirstFrame) { setCheckingAdtsHeaderState(); } else { setReadingAdtsHeaderState(); } pesBuffer.setPosition(position); return; } } switch (matchState | data) { case MATCH_STATE_START | 0xFF: matchState = MATCH_STATE_FF; break; case MATCH_STATE_START | 'I': matchState = MATCH_STATE_I; break; case MATCH_STATE_I | 'D': matchState = MATCH_STATE_ID; break; case MATCH_STATE_ID | '3': setReadingId3HeaderState(); pesBuffer.setPosition(position); return; default: if (matchState != MATCH_STATE_START) { // If matching fails in a later state, revert to MATCH_STATE_START and // check this byte again matchState = MATCH_STATE_START; position--; } break; } } pesBuffer.setPosition(position); } /** * Peeks the Adts header of the current frame and checks if it is valid. If the header is valid, * transition to {@link #STATE_READING_ADTS_HEADER}; else, transition to {@link * #STATE_FINDING_SAMPLE}. */ private void checkAdtsHeader(ParsableByteArray buffer) { if (buffer.bytesLeft() == 0) { // Not enough data to check yet, defer this check. return; } // Peek the next byte of buffer into scratch array. adtsScratch.data[0] = buffer.data[buffer.getPosition()]; adtsScratch.setPosition(2); int currentFrameSampleRateIndex = adtsScratch.readBits(4); if (firstFrameSampleRateIndex != C.INDEX_UNSET && currentFrameSampleRateIndex != firstFrameSampleRateIndex) { // Invalid header. resetSync(); return; } if (!foundFirstFrame) { foundFirstFrame = true; firstFrameVersion = currentFrameVersion; firstFrameSampleRateIndex = currentFrameSampleRateIndex; } setReadingAdtsHeaderState(); } /** * Checks whether a candidate SYNC word position is likely to be the position of a real SYNC word. * The caller must check that the first byte of the SYNC word is 0xFF before calling this method. * This method performs the following checks: * * <ul> * <li>The MPEG version of this frame must match the previously detected version. * <li>The sample rate index of this frame must match the previously detected sample rate index. * <li>The frame size must be at least 7 bytes * <li>The bytes following the frame must be either another SYNC word with the same MPEG * version, or the start of an ID3 header. * </ul> * * With the exception of the first check, if there is insufficient data in the buffer then checks * are optimistically skipped and {@code true} is returned. * * @param pesBuffer The buffer containing at data to check. * @param syncPositionCandidate The candidate SYNC word position. May be -1 if the first byte of * the candidate was the last byte of the previously consumed buffer. * @return True if all checks were passed or skipped, indicating the position is likely to be the * position of a real SYNC word. False otherwise. */ private boolean checkSyncPositionValid(ParsableByteArray pesBuffer, int syncPositionCandidate) { pesBuffer.setPosition(syncPositionCandidate + 1); if (!tryRead(pesBuffer, adtsScratch.data, 1)) { return false; } // The MPEG version of this frame must match the previously detected version. adtsScratch.setPosition(4); int currentFrameVersion = adtsScratch.readBits(1); if (firstFrameVersion != VERSION_UNSET && currentFrameVersion != firstFrameVersion) { return false; } // The sample rate index of this frame must match the previously detected sample rate index. if (firstFrameSampleRateIndex != C.INDEX_UNSET) { if (!tryRead(pesBuffer, adtsScratch.data, 1)) { // Insufficient data for further checks. return true; } adtsScratch.setPosition(2); int currentFrameSampleRateIndex = adtsScratch.readBits(4); if (currentFrameSampleRateIndex != firstFrameSampleRateIndex) { return false; } pesBuffer.setPosition(syncPositionCandidate + 2); } // The frame size must be at least 7 bytes. if (!tryRead(pesBuffer, adtsScratch.data, 4)) { // Insufficient data for further checks. return true; } adtsScratch.setPosition(14); int frameSize = adtsScratch.readBits(13); if (frameSize < 7) { return false; } // The bytes following the frame must be either another SYNC word with the same MPEG version, or // the start of an ID3 header. byte[] data = pesBuffer.data; int dataLimit = pesBuffer.limit(); int nextSyncPosition = syncPositionCandidate + frameSize; if (nextSyncPosition >= dataLimit) { // Insufficient data for further checks. return true; } if (data[nextSyncPosition] == (byte) 0xFF) { if (nextSyncPosition + 1 == dataLimit) { // Insufficient data for further checks. return true; } return isAdtsSyncBytes((byte) 0xFF, data[nextSyncPosition + 1]) && ((data[nextSyncPosition + 1] & 0x8) >> 3) == currentFrameVersion; } else { if (data[nextSyncPosition] != 'I') { return false; } if (nextSyncPosition + 1 == dataLimit) { // Insufficient data for further checks. return true; } if (data[nextSyncPosition + 1] != 'D') { return false; } if (nextSyncPosition + 2 == dataLimit) { // Insufficient data for further checks. return true; } return data[nextSyncPosition + 2] == '3'; } } private boolean isAdtsSyncBytes(byte firstByte, byte secondByte) { int syncWord = (firstByte & 0xFF) << 8 | (secondByte & 0xFF); return isAdtsSyncWord(syncWord); } /** Reads {@code targetLength} bytes into target, and returns whether the read succeeded. */ private boolean tryRead(ParsableByteArray source, byte[] target, int targetLength) { if (source.bytesLeft() < targetLength) { return false; } source.readBytes(target, /* offset= */ 0, targetLength); return true; } /** Parses the Id3 header. */ @RequiresNonNull("id3Output") private void parseId3Header() { id3Output.sampleData(id3HeaderBuffer, ID3_HEADER_SIZE); id3HeaderBuffer.setPosition(ID3_SIZE_OFFSET); setReadingSampleState(id3Output, 0, ID3_HEADER_SIZE, id3HeaderBuffer.readSynchSafeInt() + ID3_HEADER_SIZE); } /** Parses the sample header. */ @RequiresNonNull("output") private void parseAdtsHeader() throws ParserException { adtsScratch.setPosition(0); if (!hasOutputFormat) { int audioObjectType = adtsScratch.readBits(2) + 1; if (audioObjectType != 2) { // The stream indicates AAC-Main (1), AAC-SSR (3) or AAC-LTP (4). When the stream indicates // AAC-Main it's more likely that the stream contains HE-AAC (5), which cannot be // represented correctly in the 2 bit audio_object_type field in the ADTS header. In // practice when the stream indicates AAC-SSR or AAC-LTP it more commonly contains AAC-LC or // HE-AAC. Since most Android devices don't support AAC-Main, AAC-SSR or AAC-LTP, and since // indicating AAC-LC works for HE-AAC streams, we pretend that we're dealing with AAC-LC and // hope for the best. In practice this often works. // See: https://github.com/google/ExoPlayer/issues/774 // See: https://github.com/google/ExoPlayer/issues/1383 Log.w(TAG, "Detected audio object type: " + audioObjectType + ", but assuming AAC LC."); audioObjectType = 2; } adtsScratch.skipBits(5); int channelConfig = adtsScratch.readBits(3); byte[] audioSpecificConfig = AacUtil.buildAudioSpecificConfig( audioObjectType, firstFrameSampleRateIndex, channelConfig); AacUtil.Config aacConfig = AacUtil.parseAudioSpecificConfig(audioSpecificConfig); Format format = new Format.Builder() .setId(formatId) .setSampleMimeType(MimeTypes.AUDIO_AAC) .setCodecs(aacConfig.codecs) .setChannelCount(aacConfig.channelCount) .setSampleRate(aacConfig.sampleRateHz) .setInitializationData(Collections.singletonList(audioSpecificConfig)) .setLanguage(language) .build(); // In this class a sample is an access unit, but the MediaFormat sample rate specifies the // number of PCM audio samples per second. sampleDurationUs = (C.MICROS_PER_SECOND * 1024) / format.sampleRate; output.format(format); hasOutputFormat = true; } else { adtsScratch.skipBits(10); } adtsScratch.skipBits(4); int sampleSize = adtsScratch.readBits(13) - 2 /* the sync word */ - HEADER_SIZE; if (hasCrc) { sampleSize -= CRC_SIZE; } setReadingSampleState(output, sampleDurationUs, 0, sampleSize); } /** Reads the rest of the sample */ @RequiresNonNull("currentOutput") private void readSample(ParsableByteArray data) { int bytesToRead = Math.min(data.bytesLeft(), sampleSize - bytesRead); currentOutput.sampleData(data, bytesToRead); bytesRead += bytesToRead; if (bytesRead == sampleSize) { currentOutput.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); timeUs += currentSampleDuration; setFindingSampleState(); } } @EnsuresNonNull({"output", "currentOutput", "id3Output"}) private void assertTracksCreated() { Assertions.checkNotNull(output); Util.castNonNull(currentOutput); Util.castNonNull(id3Output); } }
package com.muqdd.iuob2.features.my_schedule; import android.annotation.SuppressLint; import android.app.Dialog; import android.appwidget.AppWidgetManager; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.view.LayoutInflater; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.LinearLayout; import android.widget.TextView; import com.google.android.material.snackbar.Snackbar; import com.muqdd.iuob2.R; import com.muqdd.iuob2.app.BaseFragment; import com.muqdd.iuob2.databinding.FragmentMyScheduleBinding; import com.muqdd.iuob2.features.main.Menu; import com.muqdd.iuob2.features.widgets.FullScheduleWidget; import com.muqdd.iuob2.models.RestResponse; import com.muqdd.iuob2.models.Timing; import com.muqdd.iuob2.models.User; import com.muqdd.iuob2.network.ServiceGenerator; import com.muqdd.iuob2.network.UOBSchedule; import com.orhanobut.logger.Logger; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import androidx.annotation.NonNull; import androidx.appcompat.content.res.AppCompatResources; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; /** * Created by Ali Yusuf on 3/11/2017. * iUOB-2 */ @SuppressWarnings("FieldCanBeLocal") public class MyScheduleFragment extends BaseFragment { private FragmentMyScheduleBinding binding; private Drawable notificationActive; private Drawable notificationOff; private Map<Timing, MyCourse> uList; private Map<Timing, MyCourse> mList; private Map<Timing, MyCourse> tList; private Map<Timing, MyCourse> wList; private Map<Timing, MyCourse> hList; public MyScheduleFragment() { // Required empty public constructor } public static MyScheduleFragment newInstance() { MyScheduleFragment fragment = new MyScheduleFragment(); Bundle bundle = new Bundle(); bundle.putString(TITLE, Menu.MY_SCHEDULE.toString()); fragment.setArguments(bundle); return fragment; } @Override public void onAttach(@NonNull Context context) { super.onAttach(context); notificationActive = AppCompatResources.getDrawable(requireContext(), R.drawable.ic_notifications_active_24dp); notificationOff = AppCompatResources.getDrawable(requireContext(), R.drawable.ic_notifications_off_24dp); } @Override public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { super.onCreateView(inflater,container,savedInstanceState); if (binding == null) { // Inflate the layout for this fragment binding = FragmentMyScheduleBinding.inflate(inflater, container, false); initiate(); } setHasOptionsMenu(true); return binding.getRoot(); } @Override public void onCreateOptionsMenu(@NonNull android.view.Menu menu, @NonNull MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.my_schedule_menu, menu); // init notification icon menu.findItem(R.id.notification) .setIcon(User.isNotificationOn(getContext()) ? notificationActive : notificationOff) .setVisible(false); } @Override public boolean onOptionsItemSelected(MenuItem item) { // handle item selection int itemId = item.getItemId(); if (itemId == R.id.edit) { if (User.isFetchingData()) { Snackbar.make(binding.mainContent, "Please wait fetching data", Snackbar.LENGTH_SHORT).show(); } else { AddCoursesFragment fragment = AddCoursesFragment.newInstance(getString(R.string.fragment_add_courses)); displayFragment(fragment); } return true; } else if (itemId == R.id.notification) { changeNotificationState(item); return true; } return super.onOptionsItemSelected(item); } @Override public void onResume() { super.onResume(); toolbar.setTitle(title); // build schedule binding.mainContent.setRefreshing(true); if (User.isCoursesUpdated(getContext())) { // update UI buildMySchedule(); } else { fetchMyScheduleData(); } } private void initiate() { //binding.mainContent.setRefreshing(true); binding.mainContent.setColorSchemeColors( getThemeColor(R.attr.colorAccent), getThemeColor(R.attr.colorPrimary), getThemeColor(R.attr.colorPrimaryVariant) ); binding.mainContent.setOnRefreshListener(this::fetchMyScheduleData); // section time comparator Comparator<Timing> comparator = (t1, t2) -> { // compare from then to then room to sort sections int f,t; return (f = t1.getTimeFrom().compareTo(t2.getTimeFrom())) == 0 ? ((t = t1.getLocation().compareTo(t2.getLocation())) == 0 ? t1.getLocation().compareTo(t2.getLocation()) : t ): f; }; // init lists with comparator uList = new TreeMap<>(comparator); mList = new TreeMap<>(comparator); tList = new TreeMap<>(comparator); wList = new TreeMap<>(comparator); hList = new TreeMap<>(comparator); } private void fetchMyScheduleData() { MySchedule mySchedule = User.getMySchedule(getContext()); Logger.d(mySchedule.getYear()); Logger.d(mySchedule.getSemester()); ServiceGenerator.createService(UOBSchedule.class).sectionsList(mySchedule.getYear(), mySchedule.getSemester(), mySchedule.getSectionsParam()) .enqueue(new Callback<RestResponse<List<List<MyCourse>>>>() { @Override public void onResponse(@NonNull Call<RestResponse<List<List<MyCourse>>>> call, @NonNull Response<RestResponse<List<List<MyCourse>>>> response) { if (response.body() != null && response.body().getStatusCode() == 200) { User.updateCourses(getContext(), MySchedule.getCoursesList(response.body().getData())); Logger.d("build"); requestReviewFlowDelayed(5); buildMySchedule(); } else { binding.mainContent.setRefreshing(false); Logger.e("error accord while fetching data"); } } @Override public void onFailure(@NonNull Call<RestResponse<List<List<MyCourse>>>> call, @NonNull Throwable t) { binding.mainContent.setRefreshing(false); Logger.e("error accord while fetching data"); } }); } private void buildMySchedule() { // clear lists uList.clear(); mList.clear(); tList.clear(); wList.clear(); hList.clear(); for (MyCourse course : User.getMySchedule(getContext()).getCourseList()) { if (course.getTimingLegacy() != null) { for (Timing time : course.getTimingLegacy()) { if (time.getDay().contains("U")) { uList.put(time, course); } if (time.getDay().contains("M")) { mList.put(time, course); } if (time.getDay().contains("T")) { tList.put(time, course); } if (time.getDay().contains("W")) { wList.put(time, course); } if (time.getDay().contains("H")) { hList.put(time, course); } } } } // update UI addCoursesForLayout(binding.uLayout,uList); addCoursesForLayout(binding.mLayout,mList); addCoursesForLayout(binding.tLayout,tList); addCoursesForLayout(binding.wLayout,wList); addCoursesForLayout(binding.hLayout,hList); if (getActivity() != null) { Intent intent = new Intent(getActivity(), FullScheduleWidget.class); intent.setAction(AppWidgetManager.ACTION_APPWIDGET_UPDATE); int[] ids = AppWidgetManager.getInstance(getActivity().getApplication()) .getAppWidgetIds(new ComponentName(getActivity().getApplication(), FullScheduleWidget.class)); intent.putExtra(AppWidgetManager.EXTRA_APPWIDGET_IDS, ids); getActivity().sendBroadcast(intent); } binding.mainContent.setRefreshing(false); } private void addCoursesForLayout(LinearLayout layout, Map<Timing, MyCourse> list) { layout.removeAllViews(); for (Timing time : list.keySet()){ MyCourse course = list.get(time); if (course != null) { layout.addView(createScheduleCell(course, time)); } else { Logger.i(time.toString()); } } } @SuppressLint("SetTextI18n") private View createScheduleCell(final MyCourse course, Timing time) { View view = LayoutInflater.from(getContext()).inflate(R.layout.cell_schedule, binding.getRoot(), false); view.findViewById(R.id.layout).setBackgroundColor(course.getBgColor()); ((TextView)view.findViewById(R.id.course)).setText(course.getCourseId()); ((TextView)view.findViewById(R.id.time_from)).setText(time.getTimeFrom()); ((TextView)view.findViewById(R.id.time_to)).setText(time.getTimeTo()); ((TextView)view.findViewById(R.id.room)).setText(time.getLocation()); view.setOnClickListener(view1 -> { Bundle bundle = new Bundle(); bundle.putString("course_id", course.getCourseId()); bundle.putString("section_number", course.getSectionNo()); mFirebaseAnalytics.logEvent("view_my_course", bundle); final Dialog dialog = new Dialog(getBaseActivity()); // prepare dialog layout LayoutInflater inflater = (LayoutInflater)getBaseActivity().getSystemService(Context.LAYOUT_INFLATER_SERVICE); final View dialogView = inflater.inflate(R.layout.dialog_course_details, binding.getRoot(), false); ((TextView)dialogView.findViewById((R.id.title))).setText(course.getCourseId()); ((TextView)dialogView.findViewById((R.id.section))).setText("Section: "+course.getSectionNo()); ((TextView)dialogView.findViewById((R.id.doctor))).setText("Doctor: "+course.getInstructor()); if (course.getExam() != null) { ((TextView) dialogView.findViewById((R.id.final_time))).setText("Final : " + course.getExam().toString()); } else { dialogView.findViewById((R.id.final_time)).setVisibility(View.GONE); } dialogView.findViewById((R.id.close)).setOnClickListener(view11 -> { if (dialog.isShowing()) dialog.dismiss(); }); // show dialog dialog.setContentView(dialogView); dialog.show(); }); return view; } private void changeNotificationState(MenuItem item) { // change current notification state boolean notificationState = User.setNotification(getContext(), !User.isNotificationOn(getContext())); // set new icon based on state item.setIcon(notificationState ? notificationActive : notificationOff); } }
/* * <!-- * ~ Copyright 2015-2017 OpenCB * ~ * ~ Licensed under the Apache License, Version 2.0 (the "License"); * ~ you may not use this file except in compliance with the License. * ~ You may obtain a copy of the License at * ~ * ~ http://www.apache.org/licenses/LICENSE-2.0 * ~ * ~ Unless required by applicable law or agreed to in writing, software * ~ distributed under the License is distributed on an "AS IS" BASIS, * ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * ~ See the License for the specific language governing permissions and * ~ limitations under the License. * --> * */ package org.opencb.biodata.formats.variant.vcf4; import org.opencb.biodata.formats.variant.VariantFactory; import org.opencb.biodata.models.variant.StudyEntry; import org.opencb.biodata.models.variant.Variant; import org.opencb.biodata.models.variant.VariantFileMetadata; import org.opencb.biodata.models.variant.avro.AlternateCoordinate; import org.opencb.biodata.models.variant.avro.FileEntry; import org.opencb.biodata.models.variant.avro.SampleEntry; import org.opencb.biodata.models.variant.exceptions.NonStandardCompliantSampleField; import org.opencb.biodata.models.variant.exceptions.NotAVariantException; import org.opencb.biodata.models.variant.metadata.VariantStudyMetadata; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; /** * @author Alejandro Aleman Ramos &lt;aaleman@cipf.es&gt; * @author Cristina Yenyxe Gonzalez Garcia &lt;cyenyxe@ebi.ac.uk&gt; * @author Jose Miguel Mut Lopez &lt;jmmut@ebi.ac.uk&gt; */ public class VariantVcfFactory implements VariantFactory { @Deprecated public static final String ORI = "ori"; @Deprecated public static final String QUAL = StudyEntry.QUAL; @Deprecated public static final String FILTER = StudyEntry.FILTER; @Deprecated public static final String SRC = StudyEntry.SRC; /** * Creates a list of Variant objects using the fields in a record of a VCF * file. A new Variant object is created per allele, so several of them can * be created from a single line. * * Start/end coordinates assignment tries to work as similarly as possible * as Ensembl does, except for insertions, where start is greater than end: * http://www.ensembl.org/info/docs/tools/vep/vep_formats.html#vcf * * @param metadata Origin of the variants information * @param line Contents of the line in the file * @return The list of Variant objects that can be created using the fields * from a VCF record */ @Override public List<Variant> create(VariantStudyMetadata metadata, String line) throws IllegalArgumentException, NotAVariantException { String[] fields = line.split("\t"); if (fields.length < 8) { throw new IllegalArgumentException("Not enough fields provided (min 8)"); } // if(fields[4].equals(".")) { // throw new NotAVariantException("Alternative allele is a '.'. This is not an actual variant but a reference position."); // } String chromosome = fields[0]; int position = Integer.parseInt(fields[1]); String id = fields[2].equals(".") ? null : fields[2]; List<String> ids = id == null? Collections.emptyList() : Arrays.asList(id.split(";")); String reference = fields[3].equals(".") ? "" : fields[3]; String alternate = fields[4]; // String alternate = fields[4].equals(".") ? "" : fields[4]; String[] alternateAlleles = alternate.split(","); String mainAlternate = alternateAlleles[0]; float quality = fields[5].equals(".") ? -1 : Float.parseFloat(fields[5]); String filter = fields[6].equals(".") ? "" : fields[6]; String info = fields[7].equals(".") ? "" : fields[7]; String format = (fields.length <= 8 || fields[8].equals(".")) ? "" : fields[8]; int end = position + reference.length() - 1; Variant variant = new Variant(chromosome, position, end, reference, mainAlternate); List<AlternateCoordinate> secondaryAlternatesMap = Arrays.stream(alternateAlleles, 1, alternateAlleles.length) .map(a -> new AlternateCoordinate(chromosome, null, null, null, a, null)) .collect(Collectors.toList()); StudyEntry entry = new StudyEntry(metadata.getId(), secondaryAlternatesMap, Arrays.asList(format.split(":"))); VariantFileMetadata fileMetadata = new VariantFileMetadata(metadata.getFiles().get(0)); entry.setFileId(fileMetadata.getId()); variant.addStudyEntry(entry); try { parseSplitSampleData(entry, fileMetadata, fields, reference, alternateAlleles); // Fill the rest of fields (after samples because INFO depends on them) setOtherFields(variant, entry, fileMetadata, ids, quality, filter, info, format, alternateAlleles, line); } catch (NonStandardCompliantSampleField ex) { Logger.getLogger(VariantFactory.class.getName()).log(Level.SEVERE, String.format("Variant %s:%d:%s>%s will not be saved\n%s", chromosome, position, reference, alternate, ex.getMessage())); } return Collections.singletonList(variant); } protected void parseSplitSampleData(StudyEntry entry, VariantFileMetadata fileMetadata, String[] fields, String reference, String[] alternateAlleles) throws NonStandardCompliantSampleField { // List<String> formatFields = variant.getSourceEntry(fileMetadata.getFileId(), fileMetadata.getStudyId()).getFormat(); if (fields.length < 9) { entry.setSamples(Collections.emptyList()); entry.setSamplesPosition(Collections.emptyMap()); return; } List<String> formatFields = Arrays.asList(fields[8].split(":")); entry.setSamplesPosition(fileMetadata.getSamplesPosition()); List<SampleEntry> samplesData = Arrays.asList(new SampleEntry[fields.length - 9]); for (int i = 9; i < fields.length; i++) { List<String> data = Arrays.asList(fields[i].split(":")); if (data.size() < formatFields.size()) { List<String> correctSizeData = new ArrayList<>(formatFields.size()); correctSizeData.addAll(data); while (correctSizeData.size() < formatFields.size()) { correctSizeData.add("."); } data = correctSizeData; } samplesData.set(i - 9, new SampleEntry(null, null, data)); } // samplesData = variantNormalizer.normalizeSamplesData(variantKeyFields, samplesData, formatFields, reference, Arrays.asList(alternateAlleles), null); // Add samples data to the variant entry in the fileMetadata file entry.setSamples(samplesData); } /** * Checks whether a sample should be included in a variant's list of * samples. If current allele index is not found in the genotype and not all * alleles are references/missing, then the sample must not be included. * * @param genotype The genotype * @param alleleIdx The index of the allele * @return If the sample should be associated to the variant */ private boolean shouldAddSampleToVariant(String genotype, int alleleIdx) { if (genotype.contains(String.valueOf(alleleIdx))) { return true; } if (!genotype.contains("0") && !genotype.contains(".")) { return false; } String[] alleles = genotype.split("[/|]"); for (String allele : alleles) { if (!allele.equals("0") && !allele.equals(".")) { return false; } } return true; } protected void setOtherFields(Variant variant, StudyEntry study, VariantFileMetadata fileMetadata, List<String> ids, float quality, String filter, String info, String format, String[] alternateAlleles, String line) { // Fields not affected by the structure of REF and ALT fields if (!ids.isEmpty()) { variant.setIds(ids); } if (quality > -1) { study.addFileData(fileMetadata.getId(), StudyEntry.QUAL, String.valueOf(quality)); } if (!filter.isEmpty()) { study.addFileData(fileMetadata.getId(), StudyEntry.FILTER, filter); } if (!info.isEmpty()) { parseInfo(variant, fileMetadata.getId(), study.getStudyId(), info); } study.addFileData(fileMetadata.getId(), StudyEntry.SRC, line); } protected void parseInfo(Variant variant, String fileId, String studyId, String info) { StudyEntry study = variant.getStudy(studyId); FileEntry file = study.getFile(fileId); for (String var : info.split(";")) { String[] splits = var.split("="); if (splits.length == 2) { file.getData().put(splits[0], splits[1]); // switch (splits[0]) { // case "ACC": // // Managing accession ID for the allele // String[] ids = splits[1].split(","); // file.addAttribute(splits[0], ids[numAllele]); // break; // // // next is commented to store the AC, AF and AN as-is, to be able to compute stats from the DB using the attributes, and "ori" tag //// case "AC": //// // TODO For now, only one alternate is supported //// String[] counts = splits[1].split(","); //// file.addAttribute(splits[0], counts[numAllele]); //// break; //// case "AF": //// // TODO For now, only one alternate is supported //// String[] frequencies = splits[1].split(","); //// file.addAttribute(splits[0], frequencies[numAllele]); //// break; //// case "AN": //// // TODO For now, only two alleles (reference and one alternate) are supported, but this should be changed //// file.addAttribute(splits[0], "2"); //// break; // case "NS": // // Count the number of samples that are associated with the allele // file.addAttribute(splits[0], String.valueOf(file.getSamplesData().size())); // break; // case "DP": // int dp = 0; // for (String sampleName : file.getSamplesName()) { // String sampleDp = file.getSampleData(sampleName, "DP"); // if (StringUtils.isNumeric(sampleDp)) { // dp += Integer.parseInt(sampleDp); // } // } // file.addAttribute(splits[0], String.valueOf(dp)); // break; // case "MQ": // case "MQ0": // int mq = 0; // int mq0 = 0; // for (String sampleName : file.getSamplesName()) { // if (StringUtils.isNumeric(file.getSampleData(sampleName, "GQ"))) { // int gq = Integer.parseInt(file.getSampleData(sampleName, "GQ")); // mq += gq * gq; // if (gq == 0) { // mq0++; // } // } // } // file.addAttribute("MQ", String.valueOf(mq)); // file.addAttribute("MQ0", String.valueOf(mq0)); // break; // default: // file.addAttribute(splits[0], splits[1]); // break; // } } else { file.getData().put(splits[0], ""); } } } /** * In multiallelic variants, we have a list of alternates, where numAllele is the one whose variant we are parsing now. * If we are parsing the first variant (numAllele == 0) A1 refers to first alternative, (i.e. alternateAlleles[0]), A2 to * second alternative (alternateAlleles[1]), and so on. * However, if numAllele == 1, A1 refers to second alternate (alternateAlleles[1]), A2 to first (alternateAlleles[0]) and higher alleles remain unchanged. * Moreover, if NumAllele == 2, A1 is third alternate, A2 is first alternate and A3 is second alternate. * It's also assumed that A0 would be the reference, so it remains unchanged too. * * This pattern of the first allele moving along (and swapping) is what describes this function. * Also, look VariantVcfFactory.getSecondaryAlternates(). * @param parsedAllele the value of parsed alleles. e.g. 1 if genotype was "A1" (first allele). * @param numAllele current variant of the alternates. * @return the correct allele index depending on numAllele. */ public static int mapToMultiallelicIndex (int parsedAllele, int numAllele) { int correctedAllele = parsedAllele; if (parsedAllele > 0) { if (parsedAllele == numAllele + 1) { correctedAllele = 1; } else if (parsedAllele < numAllele + 1) { correctedAllele = parsedAllele + 1; } } return correctedAllele; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.glaf.matrix.resource.query; import java.util.*; import com.glaf.core.query.DataQuery; public class PageResourceQuery extends DataQuery { private static final long serialVersionUID = 1L; protected String resPath; protected String resPathLike; protected String resFileName; protected String resFileNameLike; protected String resName; protected String resNameLike; protected String resType; protected String resTypeLike; protected String resContentType; protected String resContentTypeLike; protected Date createTimeGreaterThanOrEqual; protected Date createTimeLessThanOrEqual; public PageResourceQuery() { } public PageResourceQuery createTimeGreaterThanOrEqual(Date createTimeGreaterThanOrEqual) { if (createTimeGreaterThanOrEqual == null) { throw new RuntimeException("createTime is null"); } this.createTimeGreaterThanOrEqual = createTimeGreaterThanOrEqual; return this; } public PageResourceQuery createTimeLessThanOrEqual(Date createTimeLessThanOrEqual) { if (createTimeLessThanOrEqual == null) { throw new RuntimeException("createTime is null"); } this.createTimeLessThanOrEqual = createTimeLessThanOrEqual; return this; } public Date getCreateTimeGreaterThanOrEqual() { return createTimeGreaterThanOrEqual; } public Date getCreateTimeLessThanOrEqual() { return createTimeLessThanOrEqual; } public String getOrderBy() { if (sortColumn != null) { String a_x = " asc "; if (sortOrder != null) { a_x = sortOrder; } if ("resPath".equals(sortColumn)) { orderBy = "E.PATH_" + a_x; } if ("resFileName".equals(sortColumn)) { orderBy = "E.FILENAME_" + a_x; } if ("resName".equals(sortColumn)) { orderBy = "E.NAME_" + a_x; } if ("resType".equals(sortColumn)) { orderBy = "E.TYPE_" + a_x; } if ("resContentType".equals(sortColumn)) { orderBy = "E.CONTENTTYPE_" + a_x; } if ("createTime".equals(sortColumn)) { orderBy = "E.CREATETIME_" + a_x; } } return orderBy; } public String getResContentType() { return resContentType; } public String getResContentTypeLike() { if (resContentTypeLike != null && resContentTypeLike.trim().length() > 0) { if (!resContentTypeLike.startsWith("%")) { resContentTypeLike = "%" + resContentTypeLike; } if (!resContentTypeLike.endsWith("%")) { resContentTypeLike = resContentTypeLike + "%"; } } return resContentTypeLike; } public String getResFileName() { return resFileName; } public String getResFileNameLike() { if (resFileNameLike != null && resFileNameLike.trim().length() > 0) { if (!resFileNameLike.startsWith("%")) { resFileNameLike = "%" + resFileNameLike; } if (!resFileNameLike.endsWith("%")) { resFileNameLike = resFileNameLike + "%"; } } return resFileNameLike; } public String getResName() { return resName; } public String getResNameLike() { if (resNameLike != null && resNameLike.trim().length() > 0) { if (!resNameLike.startsWith("%")) { resNameLike = "%" + resNameLike; } if (!resNameLike.endsWith("%")) { resNameLike = resNameLike + "%"; } } return resNameLike; } public String getResPath() { return resPath; } public String getResPathLike() { if (resPathLike != null && resPathLike.trim().length() > 0) { if (!resPathLike.startsWith("%")) { resPathLike = "%" + resPathLike; } if (!resPathLike.endsWith("%")) { resPathLike = resPathLike + "%"; } } return resPathLike; } public String getResType() { return resType; } public String getResTypeLike() { if (resTypeLike != null && resTypeLike.trim().length() > 0) { if (!resTypeLike.startsWith("%")) { resTypeLike = "%" + resTypeLike; } if (!resTypeLike.endsWith("%")) { resTypeLike = resTypeLike + "%"; } } return resTypeLike; } @Override public void initQueryColumns() { super.initQueryColumns(); addColumn("id", "ID_"); addColumn("resPath", "PATH_"); addColumn("resFileName", "FILENAME_"); addColumn("resName", "NAME_"); addColumn("resType", "TYPE_"); addColumn("resContentType", "CONTENTTYPE_"); addColumn("createTime", "CREATETIME_"); } public PageResourceQuery resContentType(String resContentType) { if (resContentType == null) { throw new RuntimeException("resContentType is null"); } this.resContentType = resContentType; return this; } public PageResourceQuery resContentTypeLike(String resContentTypeLike) { if (resContentTypeLike == null) { throw new RuntimeException("resContentType is null"); } this.resContentTypeLike = resContentTypeLike; return this; } public PageResourceQuery resFileName(String resFileName) { if (resFileName == null) { throw new RuntimeException("resFileName is null"); } this.resFileName = resFileName; return this; } public PageResourceQuery resFileNameLike(String resFileNameLike) { if (resFileNameLike == null) { throw new RuntimeException("resFileName is null"); } this.resFileNameLike = resFileNameLike; return this; } public PageResourceQuery resName(String resName) { if (resName == null) { throw new RuntimeException("resName is null"); } this.resName = resName; return this; } public PageResourceQuery resNameLike(String resNameLike) { if (resNameLike == null) { throw new RuntimeException("resName is null"); } this.resNameLike = resNameLike; return this; } public PageResourceQuery resPath(String resPath) { if (resPath == null) { throw new RuntimeException("resPath is null"); } this.resPath = resPath; return this; } public PageResourceQuery resPathLike(String resPathLike) { if (resPathLike == null) { throw new RuntimeException("resPath is null"); } this.resPathLike = resPathLike; return this; } public PageResourceQuery resType(String resType) { if (resType == null) { throw new RuntimeException("resType is null"); } this.resType = resType; return this; } public PageResourceQuery resTypeLike(String resTypeLike) { if (resTypeLike == null) { throw new RuntimeException("resType is null"); } this.resTypeLike = resTypeLike; return this; } public void setCreateTimeGreaterThanOrEqual(Date createTimeGreaterThanOrEqual) { this.createTimeGreaterThanOrEqual = createTimeGreaterThanOrEqual; } public void setCreateTimeLessThanOrEqual(Date createTimeLessThanOrEqual) { this.createTimeLessThanOrEqual = createTimeLessThanOrEqual; } public void setResContentType(String resContentType) { this.resContentType = resContentType; } public void setResContentTypeLike(String resContentTypeLike) { this.resContentTypeLike = resContentTypeLike; } public void setResFileName(String resFileName) { this.resFileName = resFileName; } public void setResFileNameLike(String resFileNameLike) { this.resFileNameLike = resFileNameLike; } public void setResName(String resName) { this.resName = resName; } public void setResNameLike(String resNameLike) { this.resNameLike = resNameLike; } public void setResPath(String resPath) { this.resPath = resPath; } public void setResPathLike(String resPathLike) { this.resPathLike = resPathLike; } public void setResType(String resType) { this.resType = resType; } public void setResTypeLike(String resTypeLike) { this.resTypeLike = resTypeLike; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.compiler.ant; import com.intellij.compiler.ant.taskdefs.*; import com.intellij.openapi.compiler.CompilerBundle; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NonNls; import java.io.File; import java.util.HashMap; import java.util.Map; /** * @author Eugene Zhuravlev * Date: Mar 19, 2004 */ public class CompileModuleChunkTarget extends CompositeGenerator { private final Target myMainTarget; private final Target myProductionTarget; private final Target myTestsTarget; public CompileModuleChunkTarget(final Project project, ModuleChunk moduleChunk, VirtualFile[] sourceRoots, VirtualFile[] testSourceRoots, File baseDir, GenerationOptions genOptions) { final String moduleChunkName = moduleChunk.getName(); //noinspection HardCodedStringLiteral final Tag compilerArgs = new Tag("compilerarg", Pair.create("line", BuildProperties.propertyRef( BuildProperties.getModuleChunkCompilerArgsProperty(moduleChunkName)))); //noinspection HardCodedStringLiteral final Pair<String, String> classpathRef = Pair.create("refid", BuildProperties.getClasspathProperty(moduleChunkName)); final Tag classpathTag = new Tag("classpath", classpathRef); //noinspection HardCodedStringLiteral final Tag bootclasspathTag = new Tag("bootclasspath", Pair.create("refid", BuildProperties.getBootClasspathProperty(moduleChunkName))); final PatternSetRef compilerExcludes = new PatternSetRef(BuildProperties.getExcludedFromCompilationProperty(moduleChunkName)); final String mainTargetName = BuildProperties.getCompileTargetName(moduleChunkName); final @NonNls String productionTargetName = mainTargetName + ".production"; final @NonNls String testsTargetName = mainTargetName + ".tests"; final int modulesCount = moduleChunk.getModules().length; myMainTarget = new Target(mainTargetName, productionTargetName + "," + testsTargetName, CompilerBundle.message("generated.ant.build.compile.modules.main.target.comment", modulesCount, moduleChunkName), null); myProductionTarget = new Target(productionTargetName, getChunkDependenciesString(moduleChunk), CompilerBundle.message("generated.ant.build.compile.modules.production.classes.target.comment", modulesCount, moduleChunkName), null); myTestsTarget = new Target(testsTargetName, productionTargetName, CompilerBundle.message("generated.ant.build.compile.modules.tests.target.comment", modulesCount, moduleChunkName), BuildProperties.PROPERTY_SKIP_TESTS); final ChunkCustomCompilerExtension[] customCompilers = moduleChunk.getCustomCompilers(); if (sourceRoots.length > 0) { final String outputPathRef = BuildProperties.propertyRef(BuildProperties.getOutputPathProperty(moduleChunkName)); final Tag srcTag = new Tag("src", Pair.create("refid", BuildProperties.getSourcepathProperty(moduleChunkName))); myProductionTarget.add(new Mkdir(outputPathRef)); createCustomCompilerTasks(project, moduleChunk, genOptions, false, customCompilers, compilerArgs, bootclasspathTag, classpathTag, compilerExcludes, srcTag, outputPathRef); if (customCompilers.length == 0 || genOptions.enableFormCompiler) { final Javac javac = new Javac(genOptions, moduleChunk, outputPathRef); javac.add(compilerArgs); javac.add(bootclasspathTag); javac.add(classpathTag); //noinspection HardCodedStringLiteral javac.add(srcTag); javac.add(compilerExcludes); myProductionTarget.add(javac); } myProductionTarget.add(createCopyTask(project, moduleChunk, sourceRoots, outputPathRef, baseDir, genOptions)); } if (testSourceRoots.length > 0) { final String testOutputPathRef = BuildProperties.propertyRef(BuildProperties.getOutputPathForTestsProperty(moduleChunkName)); final Tag srcTag = new Tag("src", Pair.create("refid", BuildProperties.getTestSourcepathProperty(moduleChunkName))); final Tag testClassPath = new Tag("classpath"); testClassPath.add(new Tag("path", classpathRef)); testClassPath.add(new PathElement(BuildProperties.propertyRef(BuildProperties.getOutputPathProperty(moduleChunkName)))); myTestsTarget.add(new Mkdir(testOutputPathRef)); createCustomCompilerTasks(project, moduleChunk, genOptions, true, customCompilers, compilerArgs, bootclasspathTag, testClassPath, compilerExcludes, srcTag, testOutputPathRef); if (customCompilers.length == 0 || genOptions.enableFormCompiler) { final Javac javac = new Javac(genOptions, moduleChunk, testOutputPathRef); javac.add(compilerArgs); javac.add(classpathTag); //noinspection HardCodedStringLiteral javac.add(testClassPath); //noinspection HardCodedStringLiteral javac.add(srcTag); javac.add(compilerExcludes); myTestsTarget.add(javac); } myTestsTarget.add(createCopyTask(project, moduleChunk, testSourceRoots, testOutputPathRef, baseDir, genOptions)); } add(myMainTarget); add(myProductionTarget, 1); add(myTestsTarget, 1); } /** * Create custom compiler tasks * * @param project the proejct * @param moduleChunk the module chunkc * @param genOptions generation options * @param compileTests if true tests are being compiled * @param customCompilers an array of custom compilers for this cunk * @param compilerArgs the javac compilier arguements * @param bootclasspathTag the boot classpath element for the javac compiler * @param classpathTag the classpath tag for the javac compiler * @param compilerExcludes the compiler excluded tag * @param srcTag the soruce tag * @param outputPathRef the output path references */ private void createCustomCompilerTasks(Project project, ModuleChunk moduleChunk, GenerationOptions genOptions, boolean compileTests, ChunkCustomCompilerExtension[] customCompilers, Tag compilerArgs, Tag bootclasspathTag, Tag classpathTag, PatternSetRef compilerExcludes, Tag srcTag, String outputPathRef) { if (customCompilers.length > 1) { myProductionTarget.add(new Tag("fail", Pair.create("message", CompilerBundle.message( "generated.ant.build.compile.modules.fail.custom.comipilers")))); } for (ChunkCustomCompilerExtension ext : customCompilers) { ext.generateCustomCompile(project, moduleChunk, genOptions, compileTests, myProductionTarget, compilerArgs, bootclasspathTag, classpathTag, compilerExcludes, srcTag, outputPathRef); } } private String getChunkDependenciesString(ModuleChunk moduleChunk) { final StringBuffer moduleDependencies = new StringBuffer(); final ModuleChunk[] dependencies = moduleChunk.getDependentChunks(); for (int idx = 0; idx < dependencies.length; idx++) { final ModuleChunk dependency = dependencies[idx]; if (idx > 0) { moduleDependencies.append(","); } moduleDependencies.append(BuildProperties.getCompileTargetName(dependency.getName())); } return moduleDependencies.toString(); } private static Generator createCopyTask(final Project project, ModuleChunk chunk, VirtualFile[] sourceRoots, String toDir, File baseDir, final GenerationOptions genOptions) { //noinspection HardCodedStringLiteral final Tag filesSelector = new Tag("type", Pair.create("type", "file")); final PatternSetRef excludes = CompilerExcludes.isAvailable(project) ? new PatternSetRef( BuildProperties.getExcludedFromCompilationProperty(chunk.getName())) : null; final PatternSetRef resourcePatternsPatternSet = new PatternSetRef(BuildProperties.PROPERTY_COMPILER_RESOURCE_PATTERNS); final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(project).getFileIndex(); final CompositeGenerator composite = new CompositeGenerator(); final Map<String, Copy> outputDirToTaskMap = new HashMap<String, Copy>(); for (final VirtualFile root : sourceRoots) { final String packagePrefix = fileIndex.getPackageNameByDirectory(root); final String targetDir = packagePrefix != null && packagePrefix.length() > 0 ? toDir + "/" + packagePrefix.replace('.', '/') : toDir; Copy copy = outputDirToTaskMap.get(targetDir); if (copy == null) { copy = new Copy(targetDir); outputDirToTaskMap.put(targetDir, copy); composite.add(copy); } final FileSet fileSet = new FileSet( GenerationUtils.toRelativePath(root, baseDir, BuildProperties.getModuleChunkBasedirProperty(chunk), genOptions)); fileSet.add(resourcePatternsPatternSet); fileSet.add(filesSelector); if (excludes != null) { fileSet.add(excludes); } copy.add(fileSet); } return composite; } }
package nam.ui2; import java.io.Serializable; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.annotation.PostConstruct; import javax.faces.application.ConfigurableNavigationHandler; import javax.faces.application.NavigationCase; import javax.faces.application.NavigationHandler; import javax.faces.bean.ManagedBean; import javax.faces.bean.ManagedProperty; import javax.faces.bean.SessionScoped; import javax.faces.context.ExternalContext; import javax.faces.context.FacesContext; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; //@ManagedBean //@SessionScoped public class AreaNavigator implements Serializable { //private static final String MODULE_VIEW_PARAMETER = "area"; private static final String AREA_VIEW_PARAMETER = "area"; private static final String SECTION_VIEW_PARAMETER = "section"; private static final String SEPARATOR = "/"; private static final String CONTENTS_FOLDER = "contents/"; private static final String CONTENT_POSTFIX = "-content"; //@ManagedProperty(value = "#{navigationParser.groupList}") private List<GroupDescriptor> groups; private ModuleDescriptor currentArea; private SectionDescriptor currentSection; private String sectionId; private String areaId; public String getArea() { return areaId; } public void setArea(String areaId) { this.areaId = areaId; } public String getSection() { return sectionId; } public void setSection(String sectionId) { this.sectionId = sectionId; } public String getActivePage() { if (currentSection != null) return currentSection.getId(); if (currentArea != null) return currentArea.getId(); return null; } public List<GroupDescriptor> getGroups() { return groups; } public void setGroups(List<GroupDescriptor> groups) { this.groups = groups; } public void getFilteredGroups() { } //@PostConstruct public void init() { currentArea = null; currentSection = null; groups = new ArrayList<GroupDescriptor>(Collections2.filter(groups, new Predicate<GroupDescriptor>() { public boolean apply(GroupDescriptor input) { return input.hasEnabledItems(); } })); } public ModuleDescriptor getCurrentArea() { String areaId = getViewParameter(AREA_VIEW_PARAMETER); if (currentArea == null || !currentArea.getId().equals(areaId)) { if (areaId != null) { currentArea = findAreaById(areaId); currentSection = null; } } return currentArea; } public SectionDescriptor getCurrentSection() { String id = getViewParameter(SECTION_VIEW_PARAMETER); if (currentSection == null || !currentSection.getId().equals(id)) { if (id != null) { currentSection = getCurrentArea().getTabById(id); } if (currentSection == null) { currentSection = getCurrentArea().getSections().iterator().next(); } } return currentSection; } private String getViewParameter(String name) { FacesContext facesContext = FacesContext.getCurrentInstance(); ExternalContext externalContext = facesContext.getExternalContext(); String param = (String) externalContext.getRequestParameterMap().get(name); if (param != null && param.trim().length() > 0) return param; return null; } public ModuleDescriptor findAreaById(String id) { Iterator<GroupDescriptor> iterator = groups.iterator(); while (iterator.hasNext()) { GroupDescriptor group = iterator.next(); Iterator<ModuleDescriptor> dit = group.getModules().iterator(); while (dit.hasNext()) { ModuleDescriptor locDemo = (ModuleDescriptor) dit.next(); if (locDemo.getId().equals(id)) { return locDemo; } } } return null; } public String getAreaURI() { FacesContext context = FacesContext.getCurrentInstance(); NavigationHandler handler = context.getApplication().getNavigationHandler(); if (handler instanceof ConfigurableNavigationHandler) { ConfigurableNavigationHandler navigationHandler = (ConfigurableNavigationHandler) handler; areaId = getCurrentArea().getId(); //sectionId = getCurrentSection().getId(); NavigationCase navCase = navigationHandler.getNavigationCase(context, null, "/bookshop2" + SEPARATOR + areaId + SEPARATOR + areaId); //NavigationCase navCase = new NavigationCase("/index.xhtml", null, null, null, "/bookshop2/area1/area1.xhtml", null, false, true); return navCase.getToViewId(context); } return null; } public String getSectionURI() { FacesContext context = FacesContext.getCurrentInstance(); NavigationHandler handler = context.getApplication().getNavigationHandler(); if (handler instanceof ConfigurableNavigationHandler) { ConfigurableNavigationHandler navigationHandler = (ConfigurableNavigationHandler) handler; areaId = getCurrentArea().getId(); sectionId = getCurrentSection().getId(); NavigationCase navCase = navigationHandler.getNavigationCase(context, null, "/bookshop2" + SEPARATOR + areaId + SEPARATOR + sectionId); if (navCase != null) return navCase.getToViewId(context); return getAreaURI(); } return null; } /** * @return actual sample inclusion src Consider that: 1) all the samples should be placed in "samples" subfolder of the * actual sample 2) all the samples pages should use the same name as main sample page with "-sample" prefix */ public String getAreaIncludeURI() { String areaURI = getAreaURI(); String sectionURI = getSectionURI(); String currentSectionId = currentSection.getId(); StringBuffer sectionURIBuffer = new StringBuffer(sectionURI); int folderOffset = sectionURIBuffer.lastIndexOf(currentSectionId); int fileNameOffset = sectionURIBuffer.lastIndexOf(currentSectionId) + currentSectionId.length() + currentSectionId.length() + 1; //int fileNameOffset = sectionURIBuffer.lastIndexOf(currentSectionId) + currentSectionId.length() + CONTENT_POSTFIX.length() + 1; String result = new StringBuffer(sectionURI).insert(folderOffset, currentSectionId+"/").insert(fileNameOffset, CONTENT_POSTFIX).toString(); //String result = new StringBuffer(sectionURI).insert(folderOffset, currentSectionId).insert(fileNameOffset, CONTENT_POSTFIX).toString(); return result; } /** * @return actual sample inclusion src Consider that: 1) all the samples should be placed in "samples" subfolder of the * actual sample 2) all the samples pages should use the same name as main sample page with "-sample" prefix */ public String getSectionIncludeURI() { String sectionURI = getSectionURI(); String currentSectionId = currentSection.getId(); StringBuffer sectionURIBuffer = new StringBuffer(sectionURI); int folderOffset = sectionURIBuffer.lastIndexOf(currentSectionId); int fileNameOffset = sectionURIBuffer.lastIndexOf(currentSectionId) + currentSectionId.length() + currentSectionId.length() + 1; //int fileNameOffset = sectionURIBuffer.lastIndexOf(currentSectionId) + currentSectionId.length() + CONTENT_POSTFIX.length() + 1; String result = new StringBuffer(sectionURI).insert(folderOffset, currentSectionId+"/").insert(fileNameOffset, CONTENT_POSTFIX).toString(); //String result = new StringBuffer(sectionURI).insert(folderOffset, currentSectionId).insert(fileNameOffset, CONTENT_POSTFIX).toString(); return result; } }
package com.perforce.p4java.impl.mapbased.server.cmd; import static com.perforce.p4java.core.file.FileSpecOpStatus.VALID; import static com.perforce.p4java.exception.MessageSeverityCode.E_INFO; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.CLIENT_REC_DELETED; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.CODE0; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.FMT0; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.INTEGRATION_REC_ADDED; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.INTEGRATION_REC_DELETED; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.LABEL_REC_DELETED; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.PURGE_FILE; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.PURGE_REV; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.REPORT_ONLY; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.REVISION_REC_DELETED; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.WORKING_REC_DELETED; import static com.perforce.p4java.server.CmdSpec.OBLITERATE; import static org.apache.commons.lang3.StringUtils.EMPTY; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import com.perforce.p4java.AbstractP4JavaUnitTest; import com.perforce.p4java.core.file.IFileSpec; import com.perforce.p4java.core.file.IObliterateResult; import com.perforce.p4java.exception.P4JavaException; import com.perforce.p4java.impl.mapbased.server.Server; import com.perforce.p4java.option.server.ObliterateFilesOptions; import com.perforce.p4java.tests.UnitTestGiven; import com.perforce.p4java.tests.UnitTestThen; /** * @author Sean Shou * @since 4/10/2016 */ public class ObliterateDelegatorTest extends AbstractP4JavaUnitTest { private static final String MESSAGE_CODE_IN_INFO_RANGE = "268435456"; private static final String EXECUTE_OBLITERATE = "-y"; private static final String FILE_DEPOT_PATH = "//depot/dev/test.txt"; private static final String[] CMD_ARGUMENTS = {EXECUTE_OBLITERATE, FILE_DEPOT_PATH}; /** * Rule for expected exception verification */ @Rule public ExpectedException thrown = ExpectedException.none(); private ObliterateDelegator obliterateDelegator; private Map<String, Object> resultMap; private Map<String, Object> resultMap2; private List<Map<String, Object>> resultMaps; private List<IFileSpec> fileSpecs; private ObliterateFilesOptions opts; /** * Runs before every test. */ @SuppressWarnings("unchecked") @Before public void beforeEach() throws P4JavaException { server = mock(Server.class); obliterateDelegator = new ObliterateDelegator(server); resultMap = mock(Map.class); resultMap2 = mock(Map.class); resultMaps = new ArrayList<Map<String, Object>>(); resultMaps.add(resultMap); resultMaps.add(resultMap2); fileSpecs = new ArrayList<IFileSpec>(); IFileSpec fileSpec = mock(IFileSpec.class); when(fileSpec.getOpStatus()).thenReturn(VALID); when(fileSpec.getAnnotatedPreferredPathString()).thenReturn(FILE_DEPOT_PATH); fileSpecs.add(fileSpec); opts = new ObliterateFilesOptions(EXECUTE_OBLITERATE); } /** * Expected throws <code>NullPointerException</code> when fileSpecs is null. * * @throws Exception */ @Test public void shouldThrownNullPointerExceptionWhenFileSpecsIsNull() throws Exception { thrown.expect(NullPointerException.class); fileSpecs = null; obliterateDelegator.obliterateFiles(fileSpecs, opts); } /** * Expected return empty obliterate files when command return null result maps. * * @throws Exception */ @Test public void shouldReturnEmptyListWhenResultMapsIsNull() throws Exception { expectedReturnEmptyList(new UnitTestGiven() { @Override public void given() throws P4JavaException { when(server.execMapCmdList( eq(OBLITERATE.toString()), eq(CMD_ARGUMENTS), eq((Map<String, Object>)null))).thenReturn(null); } }); } /** * Expected return empty obliterate files when command return non-null result maps, * but it's only include purge files * * @throws Exception */ @Test public void shouldReturnEmptyListWhenAllResultMapsArePurgeFile() throws Exception { expectedReturnEmptyList(new UnitTestGiven() { @Override public void given() throws P4JavaException { when(resultMap.containsKey(PURGE_FILE)).thenReturn(true); when(resultMap2.containsKey(PURGE_FILE)).thenReturn(true); } }); } /** * Expected return empty obliterate files when command return non-null result maps, * but it's not include any non purge files or non deleted revision record files * * @throws Exception */ @Test public void shouldReturnEmptyListWhenAllResultMapIsNotPurgeFileAndIsNotContainsDeletedRevisionRecord() throws Exception { expectedReturnEmptyList( new UnitTestGiven() { @Override public void given() throws P4JavaException { when(resultMap.containsKey(PURGE_FILE)).thenReturn(false); when(resultMap2.containsKey(PURGE_FILE)).thenReturn(false); when(resultMap.containsKey(REVISION_REC_DELETED)).thenReturn(false); when(resultMap2.containsKey(REVISION_REC_DELETED)).thenReturn(false); } }); } private void expectedReturnEmptyList(UnitTestGiven unitTestGiven) throws Exception { //given unitTestGiven.given(); when(server.execMapCmdList(eq(OBLITERATE.toString()), eq(CMD_ARGUMENTS), eq((Map<String, Object>)null))).thenReturn(resultMaps); //when List<IObliterateResult> obliterateResults = obliterateDelegator.obliterateFiles(fileSpecs, opts); //then assertThat(obliterateResults.size(), is(0)); } /** * Expectet return 'deleted revison records' * * @throws Exception */ @Test public void shouldReturnOneDeletedRevisionRecord() throws Exception { shouldReturnNonEmptyList( new UnitTestGiven() { @Override public void given() throws P4JavaException { when(resultMap.containsKey(PURGE_FILE)).thenReturn(true); when(resultMap2.containsKey(PURGE_FILE)).thenReturn(false); when(resultMap.containsKey(REVISION_REC_DELETED)).thenReturn(false); when(resultMap2.containsKey(REVISION_REC_DELETED)).thenReturn(true); mockingPureFileMap(resultMap); mockingRevisionRecDeletedMap(resultMap2); } }, new UnitTestThen<List<IObliterateResult>>() { @Override public void then(List<IObliterateResult> resultList) throws P4JavaException { assertThat(resultList.size(), is(1)); verify(resultMap).get(PURGE_FILE); verify(resultMap2).get(INTEGRATION_REC_ADDED); IObliterateResult obliterateResult = resultList.get(0); assertThat(obliterateResult.getLabelRecDeleted(), is(2)); assertThat(obliterateResult.isReportOnly(), is(true)); } }); } /** * Expected return one 'deleted reviison record' and one 'info or error obliterate' * * @throws Exception */ @Test public void shouldReturnOneDeletedRevisionRecordAndOneInfoOrErrorObliterates() throws Exception { shouldReturnNonEmptyList( new UnitTestGiven() { @Override public void given() throws P4JavaException { when(resultMap.get(E_INFO)).thenReturn(EMPTY); when(resultMap.containsKey(PURGE_FILE)).thenReturn(false); when(resultMap.containsKey(REVISION_REC_DELETED)).thenReturn(true); mockingRevisionRecDeletedMap(resultMap); when(resultMap2.get(E_INFO)).thenReturn("not blank"); when(resultMap2.get(CODE0)).thenReturn(MESSAGE_CODE_IN_INFO_RANGE); when(resultMap2.get(FMT0)).thenReturn(FILE_DEPOT_PATH); } }, new UnitTestThen<List<IObliterateResult>>() { @Override public void then(List<IObliterateResult> resultList) throws P4JavaException { assertThat(resultList.size(), is(2)); IObliterateResult revisionRecDeleteObliterateResult = resultList.get(0); assertThat(revisionRecDeleteObliterateResult.getLabelRecDeleted(), is(2)); assertThat(revisionRecDeleteObliterateResult.isReportOnly(), is(false)); IObliterateResult infoObliterateResult = resultList.get(1); assertThat(infoObliterateResult.getLabelRecDeleted(), is(0)); assertThat(infoObliterateResult.getFileSpecs().size(), is(1)); } }); } private void shouldReturnNonEmptyList( UnitTestGiven unitTestGiven, UnitTestThen<List<IObliterateResult>> unitTestThen) throws Exception { //given when(server.execMapCmdList(eq(OBLITERATE.toString()), eq(CMD_ARGUMENTS), eq((Map<String, Object>)null))) .thenReturn(resultMaps); unitTestGiven.given(); //when List<IObliterateResult> obliterateResults = obliterateDelegator.obliterateFiles( fileSpecs, opts); //then assertThat(obliterateResults.size() > 0, is(true)); unitTestThen.then(obliterateResults); } private void mockingRevisionRecDeletedMap(Map<String, Object> map) { when(map.containsKey(REPORT_ONLY)).thenReturn(true); when(map.get(INTEGRATION_REC_ADDED)).thenReturn(1); when(map.get(LABEL_REC_DELETED)).thenReturn(2); when(map.get(CLIENT_REC_DELETED)).thenReturn(3); when(map.get(INTEGRATION_REC_DELETED)).thenReturn(4); when(map.get(WORKING_REC_DELETED)).thenReturn(5); when(map.get(REVISION_REC_DELETED)).thenReturn(6); } private void mockingPureFileMap(Map<String, Object> map) { when(map.get(PURGE_FILE)).thenReturn("pure file"); when(map.get(PURGE_REV)).thenReturn(12); } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sip.communicator.impl.protocol.jabber; import java.lang.reflect.*; import java.util.*; import net.java.sip.communicator.impl.protocol.jabber.extensions.colibri.*; import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.*; import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.ContentPacketExtension.SendersEnum; import net.java.sip.communicator.impl.protocol.jabber.jinglesdp.*; import net.java.sip.communicator.service.protocol.*; import net.java.sip.communicator.util.*; import org.jitsi.service.neomedia.*; import org.jivesoftware.smack.*; import org.jivesoftware.smack.filter.*; import org.jivesoftware.smack.packet.*; import org.jivesoftware.smack.util.*; import org.jivesoftware.smackx.packet.*; /** * Implements a Jabber <tt>CallPeer</tt>. * * @author Emil Ivov * @author Lyubomir Marinov * @author Boris Grozev */ public class CallPeerJabberImpl extends AbstractCallPeerJabberGTalkImpl <CallJabberImpl, CallPeerMediaHandlerJabberImpl, JingleIQ> { /** * The <tt>Logger</tt> used by the <tt>CallPeerJabberImpl</tt> class and its * instances for logging output. */ private static final Logger logger = Logger.getLogger(CallPeerJabberImpl.class); /** * If the call is cancelled before session-initiate is sent. */ private boolean cancelled = false; /** * Synchronization object for candidates available. */ private final Object candSyncRoot = new Object(); /** * If the content-add does not contains candidates. */ private boolean contentAddWithNoCands = false; /** * If we have processed the session initiate. */ private boolean sessionInitiateProcessed = false; /** * Synchronization object. Synchronization object? Wow, who would have * thought! ;) Would be great to have a word on what we are syncing with it */ private final Object sessionInitiateSyncRoot = new Object(); /** * Synchronization object for SID. */ private final Object sidSyncRoot = new Object(); /** * The current value of the 'senders' field of the audio content in the * Jingle session with this <tt>CallPeer</tt>. * <tt>null</tt> should be interpreted as 'both', which is the default in * Jingle if the XML attribute is missing. */ private SendersEnum audioSenders = SendersEnum.none; /** * The current value of the 'senders' field of the video content in the * Jingle session with this <tt>CallPeer</tt>. * <tt>null</tt> should be interpreted as 'both', which is the default in * Jingle if the XML attribute is missing. */ private SendersEnum videoSenders = SendersEnum.none; /** * Creates a new call peer with address <tt>peerAddress</tt>. * * @param peerAddress the Jabber address of the new call peer. * @param owningCall the call that contains this call peer. */ public CallPeerJabberImpl(String peerAddress, CallJabberImpl owningCall) { super(peerAddress, owningCall); setMediaHandler(new CallPeerMediaHandlerJabberImpl(this)); } /** * Creates a new call peer with address <tt>peerAddress</tt>. * * @param peerAddress the Jabber address of the new call peer. * @param owningCall the call that contains this call peer. * @param sessionIQ The session-initiate <tt>JingleIQ</tt> which was * received from <tt>peerAddress</tt> and caused the creation of this * <tt>CallPeerJabberImpl</tt> */ public CallPeerJabberImpl(String peerAddress, CallJabberImpl owningCall, JingleIQ sessionIQ) { this(peerAddress, owningCall); this.sessionInitIQ = sessionIQ; } /** * Send a session-accept <tt>JingleIQ</tt> to this <tt>CallPeer</tt> * @throws OperationFailedException if we fail to create or send the * response. */ public synchronized void answer() throws OperationFailedException { Iterable<ContentPacketExtension> answer; CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); try { mediaHandler .getTransportManager() .wrapupConnectivityEstablishment(); answer = mediaHandler.generateSessionAccept(); for (ContentPacketExtension c : answer) setSenders(getMediaType(c), c.getSenders()); } catch(Exception exc) { logger.info("Failed to answer an incoming call", exc); //send an error response String reasonText = "Error: " + exc.getMessage(); JingleIQ errResp = JinglePacketFactory.createSessionTerminate( sessionInitIQ.getTo(), sessionInitIQ.getFrom(), sessionInitIQ.getSID(), Reason.FAILED_APPLICATION, reasonText); setState(CallPeerState.FAILED, reasonText); getProtocolProvider().getConnection().sendPacket(errResp); return; } JingleIQ response = JinglePacketFactory.createSessionAccept( sessionInitIQ.getTo(), sessionInitIQ.getFrom(), getSID(), answer); //send the packet first and start the stream later in case the media //relay needs to see it before letting hole punching techniques through. getProtocolProvider().getConnection().sendPacket(response); try { mediaHandler.start(); } catch(UndeclaredThrowableException e) { Throwable exc = e.getUndeclaredThrowable(); logger.info("Failed to establish a connection", exc); //send an error response String reasonText = "Error: " + exc.getMessage(); JingleIQ errResp = JinglePacketFactory.createSessionTerminate( sessionInitIQ.getTo(), sessionInitIQ.getFrom(), sessionInitIQ.getSID(), Reason.GENERAL_ERROR, reasonText); setState(CallPeerState.FAILED, reasonText); getProtocolProvider().getConnection().sendPacket(errResp); return; } //tell everyone we are connected so that the audio notifications would //stop setState(CallPeerState.CONNECTED); } /** * Returns the session ID of the Jingle session associated with this call. * * @return the session ID of the Jingle session associated with this call. */ @Override public String getSID() { return sessionInitIQ != null ? sessionInitIQ.getSID() : null; } /** * Returns the IQ ID of the Jingle session-initiate packet associated with * this call. * * @return the IQ ID of the Jingle session-initiate packet associated with * this call. */ public JingleIQ getSessionIQ() { return sessionInitIQ; } /** * Ends the call with this <tt>CallPeer</tt>. Depending on the state * of the peer the method would send a CANCEL, BYE, or BUSY_HERE message * and set the new state to DISCONNECTED. * * @param failed indicates if the hangup is following to a call failure or * simply a disconnect * @param reasonText the text, if any, to be set on the * <tt>ReasonPacketExtension</tt> as the value of its * @param reasonOtherExtension the <tt>PacketExtension</tt>, if any, to be * set on the <tt>ReasonPacketExtension</tt> as the value of its * <tt>otherExtension</tt> property */ public void hangup(boolean failed, String reasonText, PacketExtension reasonOtherExtension) { CallPeerState prevPeerState = getState(); // do nothing if the call is already ended if (CallPeerState.DISCONNECTED.equals(prevPeerState) || CallPeerState.FAILED.equals(prevPeerState)) { if (logger.isDebugEnabled()) logger.debug("Ignoring a request to hangup a call peer " + "that is already DISCONNECTED"); return; } setState( failed ? CallPeerState.FAILED : CallPeerState.DISCONNECTED, reasonText); JingleIQ responseIQ = null; if (prevPeerState.equals(CallPeerState.CONNECTED) || CallPeerState.isOnHold(prevPeerState)) { responseIQ = JinglePacketFactory.createBye( getProtocolProvider().getOurJID(), peerJID, getSID()); } else if (CallPeerState.CONNECTING.equals(prevPeerState) || CallPeerState.CONNECTING_WITH_EARLY_MEDIA.equals(prevPeerState) || CallPeerState.ALERTING_REMOTE_SIDE.equals(prevPeerState)) { String jingleSID = getSID(); if(jingleSID == null) { synchronized(sidSyncRoot) { // we cancelled the call too early because the jingleSID // is null (i.e. the session-initiate has not been created) // and no need to send the session-terminate cancelled = true; return; } } responseIQ = JinglePacketFactory.createCancel( getProtocolProvider().getOurJID(), peerJID, getSID()); } else if (prevPeerState.equals(CallPeerState.INCOMING_CALL)) { responseIQ = JinglePacketFactory.createBusy( getProtocolProvider().getOurJID(), peerJID, getSID()); } else if (prevPeerState.equals(CallPeerState.BUSY) || prevPeerState.equals(CallPeerState.FAILED)) { // For FAILED and BUSY we only need to update CALL_STATUS // as everything else has been done already. } else { logger.info("Could not determine call peer state!"); } if (responseIQ != null) { if (reasonOtherExtension != null) { ReasonPacketExtension reason = (ReasonPacketExtension) responseIQ.getExtension( ReasonPacketExtension.ELEMENT_NAME, ReasonPacketExtension.NAMESPACE); if (reason != null) { reason.setOtherExtension(reasonOtherExtension); } else if(reasonOtherExtension instanceof ReasonPacketExtension) { responseIQ.setReason( (ReasonPacketExtension)reasonOtherExtension); } } getProtocolProvider().getConnection().sendPacket(responseIQ); } } /** * Creates and sends a session-initiate {@link JingleIQ}. * * @param sessionInitiateExtensions a collection of additional and optional * <tt>PacketExtension</tt>s to be added to the <tt>session-initiate</tt> * {@link JingleIQ} which is to initiate the session with this * <tt>CallPeerJabberImpl</tt> * @throws OperationFailedException exception */ protected synchronized void initiateSession( Iterable<PacketExtension> sessionInitiateExtensions) throws OperationFailedException { initiator = false; //Create the media description that we'd like to send to the other side. List<ContentPacketExtension> offer = getMediaHandler().createContentList(); ProtocolProviderServiceJabberImpl protocolProvider = getProtocolProvider(); synchronized(sidSyncRoot) { sessionInitIQ = JinglePacketFactory.createSessionInitiate( protocolProvider.getOurJID(), this.peerJID, JingleIQ.generateSID(), offer); if(cancelled) { // we cancelled the call too early so no need to send the // session-initiate to peer getMediaHandler().getTransportManager().close(); return; } } if (sessionInitiateExtensions != null) { for (PacketExtension sessionInitiateExtension : sessionInitiateExtensions) { sessionInitIQ.addExtension(sessionInitiateExtension); } } protocolProvider.getConnection().sendPacket(sessionInitIQ); } /** * Notifies this instance that a specific <tt>ColibriConferenceIQ</tt> has * been received. This <tt>CallPeerJabberImpl</tt> uses the part of the * information provided in the specified <tt>conferenceIQ</tt> which * concerns it only. * * @param conferenceIQ the <tt>ColibriConferenceIQ</tt> which has been * received */ void processColibriConferenceIQ(ColibriConferenceIQ conferenceIQ) { /* * CallPeerJabberImpl does not itself/directly know the specifics * related to the channels allocated on the Jitsi Videobridge server. * The channels contain transport and media-related information so * forward the notification to CallPeerMediaHandlerJabberImpl. */ getMediaHandler().processColibriConferenceIQ(conferenceIQ); } /** * Processes the content-accept {@link JingleIQ}. * * @param content The {@link JingleIQ} that contains content that remote * peer has accepted */ public void processContentAccept(JingleIQ content) { List<ContentPacketExtension> contents = content.getContentList(); CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); try { mediaHandler .getTransportManager() .wrapupConnectivityEstablishment(); mediaHandler.processAnswer(contents); for (ContentPacketExtension c : contents) setSenders(getMediaType(c), c.getSenders()); } catch (Exception e) { logger.warn("Failed to process a content-accept", e); // Send an error response. String reason = "Error: " + e.getMessage(); JingleIQ errResp = JinglePacketFactory.createSessionTerminate( getProtocolProvider().getOurJID(), peerJID, sessionInitIQ.getSID(), Reason.INCOMPATIBLE_PARAMETERS, reason); setState(CallPeerState.FAILED, reason); getProtocolProvider().getConnection().sendPacket(errResp); return; } mediaHandler.start(); } /** * Processes the content-add {@link JingleIQ}. * * @param content The {@link JingleIQ} that contains content that remote * peer wants to be added */ public void processContentAdd(final JingleIQ content) { CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); List<ContentPacketExtension> contents = content.getContentList(); Iterable<ContentPacketExtension> answerContents; JingleIQ contentIQ; boolean noCands = false; MediaStream oldVideoStream = mediaHandler.getStream(MediaType.VIDEO); if(logger.isInfoEnabled()) logger.info("Looking for candidates in content-add."); try { if(!contentAddWithNoCands) { mediaHandler.processOffer(contents); /* * Gingle transport will not put candidate in session-initiate * and content-add. */ for(ContentPacketExtension c : contents) { if(JingleUtils.getFirstCandidate(c, 1) == null) { contentAddWithNoCands = true; noCands = true; } } } // if no candidates are present, launch a new Thread which will // process and wait for the connectivity establishment (otherwise // the existing thread will be blocked and thus cannot receive // transport-info with candidates if(noCands) { new Thread() { @Override public void run() { try { synchronized(candSyncRoot) { candSyncRoot.wait(); } } catch(InterruptedException e) { } processContentAdd(content); contentAddWithNoCands = false; } }.start(); if(logger.isInfoEnabled()) logger.info("No candidates found in content-add, started " + "new thread."); return; } mediaHandler .getTransportManager() .wrapupConnectivityEstablishment(); if(logger.isInfoEnabled()) logger.info("Wrapping up connectivity establishment"); answerContents = mediaHandler.generateSessionAccept(); contentIQ = null; } catch(Exception e) { logger.warn("Exception occurred", e); answerContents = null; contentIQ = JinglePacketFactory.createContentReject( getProtocolProvider().getOurJID(), this.peerJID, getSID(), answerContents); } if(contentIQ == null) { /* send content-accept */ contentIQ = JinglePacketFactory.createContentAccept( getProtocolProvider().getOurJID(), this.peerJID, getSID(), answerContents); for (ContentPacketExtension c : answerContents) setSenders(getMediaType(c), c.getSenders()); } getProtocolProvider().getConnection().sendPacket(contentIQ); mediaHandler.start(); /* * If a remote peer turns her video on in a conference which is hosted * by the local peer and the local peer is not streaming her local * video, reinvite the other remote peers to enable RTP translation. */ if (oldVideoStream == null) { MediaStream newVideoStream = mediaHandler.getStream(MediaType.VIDEO); if ((newVideoStream != null) && mediaHandler.isRTPTranslationEnabled(MediaType.VIDEO)) { try { getCall().modifyVideoContent(); } catch (OperationFailedException ofe) { logger.error("Failed to enable RTP translation", ofe); } } } } /** * Processes the content-modify {@link JingleIQ}. * * @param content The {@link JingleIQ} that contains content that remote * peer wants to be modified */ public void processContentModify(JingleIQ content) { ContentPacketExtension ext = content.getContentList().get(0); MediaType mediaType = getMediaType(ext); try { boolean modify = (ext.getFirstChildOfType(RtpDescriptionPacketExtension.class) != null); getMediaHandler().reinitContent(ext.getName(), ext, modify); setSenders(mediaType, ext.getSenders()); if (MediaType.VIDEO.equals(mediaType)) getCall().modifyVideoContent(); } catch(Exception e) { logger.info("Failed to process an incoming content-modify", e); // Send an error response. String reason = "Error: " + e.getMessage(); JingleIQ errResp = JinglePacketFactory.createSessionTerminate( getProtocolProvider().getOurJID(), peerJID, sessionInitIQ.getSID(), Reason.INCOMPATIBLE_PARAMETERS, reason); setState(CallPeerState.FAILED, reason); getProtocolProvider().getConnection().sendPacket(errResp); return; } } /** * Processes the content-reject {@link JingleIQ}. * * @param content The {@link JingleIQ} */ public void processContentReject(JingleIQ content) { if(content.getContentList().isEmpty()) { //send an error response; JingleIQ errResp = JinglePacketFactory.createSessionTerminate( sessionInitIQ.getTo(), sessionInitIQ.getFrom(), sessionInitIQ.getSID(), Reason.INCOMPATIBLE_PARAMETERS, "Error: content rejected"); setState(CallPeerState.FAILED, "Error: content rejected"); getProtocolProvider().getConnection().sendPacket(errResp); return; } } /** * Processes the content-remove {@link JingleIQ}. * * @param content The {@link JingleIQ} that contains content that remote * peer wants to be removed */ public void processContentRemove(JingleIQ content) { List<ContentPacketExtension> contents = content.getContentList(); boolean videoContentRemoved = false; if (!contents.isEmpty()) { CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); for(ContentPacketExtension c : contents) { mediaHandler.removeContent(c.getName()); MediaType mediaType = getMediaType(c); setSenders(mediaType, SendersEnum.none); if (MediaType.VIDEO.equals(mediaType)) videoContentRemoved = true; } /* * TODO XEP-0166: Jingle says: If the content-remove results in zero * content definitions for the session, the entity that receives the * content-remove SHOULD send a session-terminate action to the * other party (since a session with no content definitions is * void). */ } if (videoContentRemoved) { // removing of the video content might affect the other sessions // in the call try { getCall().modifyVideoContent(); } catch (Exception e) { logger.warn("Failed to update Jingle sessions"); } } } /** * Processes a session-accept {@link JingleIQ}. * * @param sessionInitIQ The session-accept {@link JingleIQ} to process. */ public void processSessionAccept(JingleIQ sessionInitIQ) { this.sessionInitIQ = sessionInitIQ; CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); List<ContentPacketExtension> answer = sessionInitIQ.getContentList(); try { mediaHandler .getTransportManager() .wrapupConnectivityEstablishment(); mediaHandler.processAnswer(answer); for (ContentPacketExtension c : answer) setSenders(getMediaType(c), c.getSenders()); } catch(Exception exc) { if (logger.isInfoEnabled()) logger.info("Failed to process a session-accept", exc); //send an error response; JingleIQ errResp = JinglePacketFactory.createSessionTerminate( sessionInitIQ.getTo(), sessionInitIQ.getFrom(), sessionInitIQ.getSID(), Reason.INCOMPATIBLE_PARAMETERS, exc.getClass().getName() + ": " + exc.getMessage()); setState(CallPeerState.FAILED, "Error: " + exc.getMessage()); getProtocolProvider().getConnection().sendPacket(errResp); return; } //tell everyone we are connected so that the audio notifications would //stop setState(CallPeerState.CONNECTED); mediaHandler.start(); /* * If video was added to the call after we sent the session-initiate * to this peer, it needs to be added to this peer's session with a * content-add. */ sendModifyVideoContent(); } /** * Handles the specified session <tt>info</tt> packet according to its * content. * * @param info the {@link SessionInfoPacketExtension} that we just received. */ public void processSessionInfo(SessionInfoPacketExtension info) { switch (info.getType()) { case ringing: setState(CallPeerState.ALERTING_REMOTE_SIDE); break; case hold: getMediaHandler().setRemotelyOnHold(true); reevalRemoteHoldStatus(); break; case unhold: case active: getMediaHandler().setRemotelyOnHold(false); reevalRemoteHoldStatus(); break; default: logger.warn("Received SessionInfoPacketExtension of unknown type"); } } /** * Processes the session initiation {@link JingleIQ} that we were created * with, passing its content to the media handler and then sends either a * "session-info/ringing" or a "session-terminate" response. * * @param sessionInitIQ The {@link JingleIQ} that created the session that * we are handling here. */ protected synchronized void processSessionInitiate(JingleIQ sessionInitIQ) { // Do initiate the session. this.sessionInitIQ = sessionInitIQ; this.initiator = true; // This is the SDP offer that came from the initial session-initiate. // Contrary to SIP, we are guaranteed to have content because XEP-0166 // says: "A session consists of at least one content type at a time." List<ContentPacketExtension> offer = sessionInitIQ.getContentList(); try { getMediaHandler().processOffer(offer); CoinPacketExtension coin = null; for(PacketExtension ext : sessionInitIQ.getExtensions()) { if(ext.getElementName().equals( CoinPacketExtension.ELEMENT_NAME)) { coin = (CoinPacketExtension)ext; break; } } /* does the call peer acts as a conference focus ? */ if(coin != null) { setConferenceFocus(Boolean.parseBoolean( (String)coin.getAttribute("isfocus"))); } } catch(Exception ex) { logger.info("Failed to process an incoming session initiate", ex); //send an error response; String reasonText = "Error: " + ex.getMessage(); JingleIQ errResp = JinglePacketFactory.createSessionTerminate( sessionInitIQ.getTo(), sessionInitIQ.getFrom(), sessionInitIQ.getSID(), Reason.INCOMPATIBLE_PARAMETERS, reasonText); setState(CallPeerState.FAILED, reasonText); getProtocolProvider().getConnection().sendPacket(errResp); return; } // If we do not get the info about the remote peer yet. Get it right // now. if(this.getDiscoveryInfo() == null) { String calleeURI = sessionInitIQ.getFrom(); retrieveDiscoveryInfo(calleeURI); } //send a ringing response if (logger.isTraceEnabled()) logger.trace("will send ringing response: "); getProtocolProvider().getConnection().sendPacket( JinglePacketFactory.createRinging(sessionInitIQ)); synchronized(sessionInitiateSyncRoot) { sessionInitiateProcessed = true; sessionInitiateSyncRoot.notify(); } //if this is a 3264 initiator, let's give them an early peek at our //answer so that they could start ICE (SIP-2-Jingle gateways won't //be able to send their candidates unless they have this) DiscoverInfo discoverInfo = getDiscoveryInfo(); if ((discoverInfo != null) && discoverInfo.containsFeature( ProtocolProviderServiceJabberImpl.URN_IETF_RFC_3264)) { getProtocolProvider().getConnection().sendPacket( JinglePacketFactory.createDescriptionInfo( sessionInitIQ.getTo(), sessionInitIQ.getFrom(), sessionInitIQ.getSID(), getMediaHandler().getLocalContentList())); } } /** * Puts this peer into a {@link CallPeerState#DISCONNECTED}, indicating a * reason to the user, if there is one. * * @param jingleIQ the {@link JingleIQ} that's terminating our session. */ public void processSessionTerminate(JingleIQ jingleIQ) { String reasonStr = "Call ended by remote side."; ReasonPacketExtension reasonExt = jingleIQ.getReason(); if(reasonExt != null) { Reason reason = reasonExt.getReason(); if(reason != null) reasonStr += " Reason: " + reason.toString() + "."; String text = reasonExt.getText(); if(text != null) reasonStr += " " + text; } setState(CallPeerState.DISCONNECTED, reasonStr); } /** * Processes a specific "XEP-0251: Jingle Session Transfer" * <tt>transfer</tt> packet (extension). * * @param transfer the "XEP-0251: Jingle Session Transfer" transfer packet * (extension) to process * @throws OperationFailedException if anything goes wrong while processing * the specified <tt>transfer</tt> packet (extension) */ public void processTransfer(TransferPacketExtension transfer) throws OperationFailedException { String attendantAddress = transfer.getFrom(); if (attendantAddress == null) { throw new OperationFailedException( "Session transfer must contain a \'from\' attribute value.", OperationFailedException.ILLEGAL_ARGUMENT); } String calleeAddress = transfer.getTo(); if (calleeAddress == null) { throw new OperationFailedException( "Session transfer must contain a \'to\' attribute value.", OperationFailedException.ILLEGAL_ARGUMENT); } // Checks if the transfer remote peer is contained by the roster of this // account. Roster roster = getProtocolProvider().getConnection().getRoster(); if(!roster.contains(StringUtils.parseBareAddress(calleeAddress))) { String failedMessage = "Transfer impossible:\n" + "Account roster does not contain transfer peer: " + StringUtils.parseBareAddress(calleeAddress); setState(CallPeerState.FAILED, failedMessage); logger.info(failedMessage); } OperationSetBasicTelephonyJabberImpl basicTelephony = (OperationSetBasicTelephonyJabberImpl) getProtocolProvider() .getOperationSet(OperationSetBasicTelephony.class); CallJabberImpl calleeCall = new CallJabberImpl(basicTelephony); TransferPacketExtension calleeTransfer = new TransferPacketExtension(); String sid = transfer.getSID(); calleeTransfer.setFrom(attendantAddress); if (sid != null) { calleeTransfer.setSID(sid); calleeTransfer.setTo(calleeAddress); } basicTelephony.createOutgoingCall( calleeCall, calleeAddress, Arrays.asList(new PacketExtension[] { calleeTransfer })); } /** * Processes the <tt>transport-info</tt> {@link JingleIQ}. * * @param jingleIQ the <tt>transport-info</tt> {@link JingleIQ} to process */ public void processTransportInfo(JingleIQ jingleIQ) { /* * The transport-info action is used to exchange transport candidates so * it only concerns the mediaHandler. */ try { if(isInitiator()) { synchronized(sessionInitiateSyncRoot) { if(!sessionInitiateProcessed) { try { sessionInitiateSyncRoot.wait(); } catch(InterruptedException e) { } } } } getMediaHandler().processTransportInfo( jingleIQ.getContentList()); } catch (OperationFailedException ofe) { logger.warn("Failed to process an incoming transport-info", ofe); //send an error response String reasonText = "Error: " + ofe.getMessage(); JingleIQ errResp = JinglePacketFactory.createSessionTerminate( getProtocolProvider().getOurJID(), peerJID, sessionInitIQ.getSID(), Reason.GENERAL_ERROR, reasonText); setState(CallPeerState.FAILED, reasonText); getProtocolProvider().getConnection().sendPacket(errResp); return; } synchronized(candSyncRoot) { candSyncRoot.notify(); } } /** * Puts the <tt>CallPeer</tt> represented by this instance on or off hold. * * @param onHold <tt>true</tt> to have the <tt>CallPeer</tt> put on hold; * <tt>false</tt>, otherwise * * @throws OperationFailedException if we fail to construct or send the * INVITE request putting the remote side on/off hold. */ public void putOnHold(boolean onHold) throws OperationFailedException { CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); mediaHandler.setLocallyOnHold(onHold); SessionInfoType type; if(onHold) type = SessionInfoType.hold; else { type = SessionInfoType.unhold; getMediaHandler().reinitAllContents(); } //we are now on hold and need to realize this before potentially //spoiling it all with an exception while sending the packet :). reevalLocalHoldStatus(); JingleIQ onHoldIQ = JinglePacketFactory.createSessionInfo( getProtocolProvider().getOurJID(), peerJID, getSID(), type); getProtocolProvider().getConnection().sendPacket(onHoldIQ); } /** * Send a <tt>content-add</tt> to add video setup. */ private void sendAddVideoContent() { List<ContentPacketExtension> contents; try { contents = getMediaHandler().createContentList(MediaType.VIDEO); } catch(Exception exc) { logger.warn("Failed to gather content for video type", exc); return; } ProtocolProviderServiceJabberImpl protocolProvider = getProtocolProvider(); JingleIQ contentIQ = JinglePacketFactory.createContentAdd( protocolProvider.getOurJID(), this.peerJID, getSID(), contents); protocolProvider.getConnection().sendPacket(contentIQ); } /** * Sends a <tt>content</tt> message to reflect changes in the setup such as * the local peer/user becoming a conference focus. */ public void sendCoinSessionInfo() { JingleIQ sessionInfoIQ = JinglePacketFactory.createSessionInfo( getProtocolProvider().getOurJID(), this.peerJID, getSID()); CoinPacketExtension coinExt = new CoinPacketExtension(getCall().isConferenceFocus()); sessionInfoIQ.addExtension(coinExt); getProtocolProvider().getConnection().sendPacket(sessionInfoIQ); } /** * Returns the <tt>MediaDirection</tt> that should be set for the content * of type <tt>mediaType</tt> in the Jingle session for this * <tt>CallPeer</tt>. * If we are the focus of a conference and are doing RTP translation, * takes into account the other <tt>CallPeer</tt>s in the <tt>Call</tt>. * * @param mediaType the <tt>MediaType</tt> for which to return the * <tt>MediaDirection</tt> * @return the <tt>MediaDirection</tt> that should be used for the content * of type <tt>mediaType</tt> in the Jingle session for this * <tt>CallPeer</tt>. */ private MediaDirection getDirectionForJingle(MediaType mediaType) { MediaDirection direction = MediaDirection.INACTIVE; CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); // If we are streaming media, the direction should allow sending if ( (MediaType.AUDIO == mediaType && mediaHandler.isLocalAudioTransmissionEnabled()) || (MediaType.VIDEO == mediaType && isLocalVideoStreaming())) direction = direction.or(MediaDirection.SENDONLY); // If we are receiving media from this CallPeer, the direction should // allow receiving SendersEnum senders = getSenders(mediaType); if (senders == null || senders == SendersEnum.both || (isInitiator() && senders == SendersEnum.initiator) || (!isInitiator() && senders == SendersEnum.responder)) direction = direction.or(MediaDirection.RECVONLY); // If we are the focus of a conference and we are receiving media from // another CallPeer in the same Call, the direction should allow sending CallJabberImpl call = getCall(); if (call != null && call.isConferenceFocus()) { for (CallPeerJabberImpl peer : call.getCallPeerList()) { if (peer != this) { senders = peer.getSenders(mediaType); if (senders == null || senders == SendersEnum.both || (peer.isInitiator() && senders == SendersEnum.initiator) || (!peer.isInitiator() && senders == SendersEnum.responder)) { direction = direction.or(MediaDirection.SENDONLY); break; } } } } return direction; } /** * Send, if necessary, a jingle <tt>content</tt> message to reflect change * in video setup. Whether the jingle session should have a video content, * and if so, the value of the <tt>senders</tt> field is determined * based on whether we are streaming local video and, if we are the focus * of a conference, on the other peers in the conference. * The message can be content-modify if video content exists (and the * <tt>senders</tt> field changes), content-add or content-remove. * * @return <tt>true</tt> if a jingle <tt>content</tt> message was sent. */ public boolean sendModifyVideoContent() { CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); MediaDirection direction = getDirectionForJingle(MediaType.VIDEO); ContentPacketExtension remoteContent = mediaHandler.getLocalContent(MediaType.VIDEO.toString()); if (remoteContent == null) { if (direction == MediaDirection.INACTIVE) { // no video content, none needed return false; } else { if (getState() == CallPeerState.CONNECTED) { if (logger.isInfoEnabled()) logger.info("Adding video content for " + this); sendAddVideoContent(); return true; } return false; } } else { if (direction == MediaDirection.INACTIVE) { sendRemoveVideoContent(); return true; } } SendersEnum senders = getSenders(MediaType.VIDEO); if (senders == null) senders = SendersEnum.both; SendersEnum newSenders = SendersEnum.none; if (MediaDirection.SENDRECV == direction) newSenders = SendersEnum.both; else if (MediaDirection.RECVONLY == direction) newSenders = isInitiator() ? SendersEnum.initiator : SendersEnum.responder; else if (MediaDirection.SENDONLY == direction) newSenders = isInitiator() ? SendersEnum.responder : SendersEnum.initiator; /* * Send Content-Modify */ ContentPacketExtension ext = new ContentPacketExtension(); String remoteContentName = remoteContent.getName(); ext.setSenders(newSenders); ext.setCreator(remoteContent.getCreator()); ext.setName(remoteContentName); if (newSenders != senders) { if (logger.isInfoEnabled()) logger.info("Sending content modify, senders: " + senders + "->" + newSenders); ProtocolProviderServiceJabberImpl protocolProvider = getProtocolProvider(); JingleIQ contentIQ = JinglePacketFactory.createContentModify( protocolProvider.getOurJID(), this.peerJID, getSID(), ext); protocolProvider.getConnection().sendPacket(contentIQ); } try { mediaHandler.reinitContent(remoteContentName, ext, false); mediaHandler.start(); } catch(Exception e) { logger.warn("Exception occurred during media reinitialization", e); } return (newSenders != senders); } /** * Send a <tt>content</tt> message to reflect change in video setup (start * or stop). */ public void sendModifyVideoResolutionContent() { CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); ContentPacketExtension remoteContent = mediaHandler.getRemoteContent(MediaType.VIDEO.toString()); ContentPacketExtension content; logger.info("send modify-content to change resolution"); // send content-modify with RTP description // create content list with resolution try { content = mediaHandler.createContentForMedia(MediaType.VIDEO); } catch (Exception e) { logger.warn("Failed to gather content for video type", e); return; } // if we are only receiving video senders is null SendersEnum senders = remoteContent.getSenders(); if (senders != null) content.setSenders(senders); ProtocolProviderServiceJabberImpl protocolProvider = getProtocolProvider(); JingleIQ contentIQ = JinglePacketFactory.createContentModify( protocolProvider.getOurJID(), this.peerJID, getSID(), content); protocolProvider.getConnection().sendPacket(contentIQ); try { mediaHandler.reinitContent(remoteContent.getName(), content, false); mediaHandler.start(); } catch(Exception e) { logger.warn("Exception occurred when media reinitialization", e); } } /** * Send a <tt>content-remove</tt> to remove video setup. */ private void sendRemoveVideoContent() { CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); ContentPacketExtension content = new ContentPacketExtension(); ContentPacketExtension remoteContent = mediaHandler.getRemoteContent(MediaType.VIDEO.toString()); if (remoteContent == null) return; String remoteContentName = remoteContent.getName(); content.setName(remoteContentName); content.setCreator(remoteContent.getCreator()); content.setSenders(remoteContent.getSenders()); ProtocolProviderServiceJabberImpl protocolProvider = getProtocolProvider(); JingleIQ contentIQ = JinglePacketFactory.createContentRemove( protocolProvider.getOurJID(), this.peerJID, getSID(), Arrays.asList(content)); protocolProvider.getConnection().sendPacket(contentIQ); mediaHandler.removeContent(remoteContentName); setSenders(MediaType.VIDEO, SendersEnum.none); } /** * Sends local candidate addresses from the local peer to the remote peer * using the <tt>transport-info</tt> {@link JingleIQ}. * * @param contents the local candidate addresses to be sent from the local * peer to the remote peer using the <tt>transport-info</tt> * {@link JingleIQ} */ protected void sendTransportInfo(Iterable<ContentPacketExtension> contents) { // if the call is canceled, do not start sending candidates in // transport-info if(cancelled) return; JingleIQ transportInfo = new JingleIQ(); for (ContentPacketExtension content : contents) transportInfo.addContent(content); ProtocolProviderServiceJabberImpl protocolProvider = getProtocolProvider(); transportInfo.setAction(JingleAction.TRANSPORT_INFO); transportInfo.setFrom(protocolProvider.getOurJID()); transportInfo.setSID(getSID()); transportInfo.setTo(getAddress()); transportInfo.setType(IQ.Type.SET); PacketCollector collector = protocolProvider.getConnection().createPacketCollector( new PacketIDFilter(transportInfo.getPacketID())); protocolProvider.getConnection().sendPacket(transportInfo); collector.nextResult(SmackConfiguration.getPacketReplyTimeout()); collector.cancel(); } @Override public void setState(CallPeerState newState, String reason, int reasonCode) { CallPeerState oldState = getState(); try { /* * We need to dispose of the transport manager before the * 'call' field is set to null, because if Jitsi Videobridge is in * use, it (the call) is needed in order to expire the * Videobridge channels. */ if (CallPeerState.DISCONNECTED.equals(newState) || CallPeerState.FAILED.equals(newState)) getMediaHandler().getTransportManager().close(); } finally { super.setState(newState, reason, reasonCode); } if (CallPeerState.isOnHold(oldState) && CallPeerState.CONNECTED.equals(newState)) { try { getCall().modifyVideoContent(); } catch (OperationFailedException ofe) { logger.error("Failed to update call video state after " + "'hold' status removed for "+this); } } } /** * Transfer (in the sense of call transfer) this <tt>CallPeer</tt> to a * specific callee address which may optionally be participating in an * active <tt>Call</tt>. * * @param to the address of the callee to transfer this <tt>CallPeer</tt> to * @param sid the Jingle session ID of the active <tt>Call</tt> between the * local peer and the callee in the case of attended transfer; <tt>null</tt> * in the case of unattended transfer * @throws OperationFailedException if something goes wrong */ protected void transfer(String to, String sid) throws OperationFailedException { JingleIQ transferSessionInfo = new JingleIQ(); ProtocolProviderServiceJabberImpl protocolProvider = getProtocolProvider(); transferSessionInfo.setAction(JingleAction.SESSION_INFO); transferSessionInfo.setFrom(protocolProvider.getOurJID()); transferSessionInfo.setSID(getSID()); transferSessionInfo.setTo(getAddress()); transferSessionInfo.setType(IQ.Type.SET); TransferPacketExtension transfer = new TransferPacketExtension(); // Attended transfer. if (sid != null) { /* * Not really sure what the value of the "from" attribute of the * "transfer" element should be but the examples in "XEP-0251: * Jingle Session Transfer" has it in the case of attended transfer. */ transfer.setFrom(protocolProvider.getOurJID()); transfer.setSID(sid); // Puts on hold the 2 calls before making the attended transfer. OperationSetBasicTelephonyJabberImpl basicTelephony = (OperationSetBasicTelephonyJabberImpl) protocolProvider.getOperationSet( OperationSetBasicTelephony.class); CallPeerJabberImpl callPeer = basicTelephony.getActiveCallPeer(sid); if(callPeer != null) { if(!CallPeerState.isOnHold(callPeer.getState())) { callPeer.putOnHold(true); } } if(!CallPeerState.isOnHold(this.getState())) { this.putOnHold(true); } } transfer.setTo(to); transferSessionInfo.addExtension(transfer); Connection connection = protocolProvider.getConnection(); PacketCollector collector = connection.createPacketCollector( new PacketIDFilter(transferSessionInfo.getPacketID())); protocolProvider.getConnection().sendPacket(transferSessionInfo); Packet result = collector.nextResult(SmackConfiguration.getPacketReplyTimeout()); if(result == null) { // Log the failed transfer call and notify the user. throw new OperationFailedException( "No response to the \"transfer\" request.", OperationFailedException.ILLEGAL_ARGUMENT); } else if (((IQ) result).getType() != IQ.Type.RESULT) { // Log the failed transfer call and notify the user. throw new OperationFailedException( "Remote peer does not manage call \"transfer\"." + "Response to the \"transfer\" request is: " + ((IQ) result).getType(), OperationFailedException.ILLEGAL_ARGUMENT); } else { String message = ((sid == null) ? "Unattended" : "Attended") + " transfer to: " + to; // Implements the SIP behavior: once the transfer is accepted, the // current call is closed. hangup( false, message, new ReasonPacketExtension(Reason.SUCCESS, message, new TransferredPacketExtension())); } } /** * {@inheritDoc} */ public String getEntity() { return getAddress(); } /** * {@inheritDoc} * * In Jingle there isn't an actual "direction" parameter. We use the * <tt>senders</tt> field to calculate the direction. */ @Override public MediaDirection getDirection(MediaType mediaType) { SendersEnum senders = getSenders(mediaType); if (senders == SendersEnum.none) { return MediaDirection.INACTIVE; } else if (senders == null || senders == SendersEnum.both) { return MediaDirection.SENDRECV; } else if (senders == SendersEnum.initiator) { return isInitiator() ? MediaDirection.RECVONLY : MediaDirection.SENDONLY; } else //senders == SendersEnum.responder { return isInitiator() ? MediaDirection.SENDONLY : MediaDirection.RECVONLY; } } /** * Gets the current value of the <tt>senders</tt> field of the content with * name <tt>mediaType</tt> in the Jingle session with this * <tt>CallPeer</tt>. * * @param mediaType the <tt>MediaType</tt> for which to get the current * value of the <tt>senders</tt> field. * @return the current value of the <tt>senders</tt> field of the content * with name <tt>mediaType</tt> in the Jingle session with this * <tt>CallPeer</tt>. */ public SendersEnum getSenders(MediaType mediaType) { switch (mediaType) { case AUDIO: return audioSenders; case DATA: /* * FIXME DATA has been introduced as a MediaType but explicit * support for DATA content has not been added yet. */ return SendersEnum.none; case VIDEO: return videoSenders; default: throw new IllegalArgumentException("mediaType"); } } /** * Set the current value of the <tt>senders</tt> field of the content with * name <tt>mediaType</tt> in the Jingle session with this <tt>CallPeer</tt> * @param mediaType the <tt>MediaType</tt> for which to get the current * value of the <tt>senders</tt> field. * @param senders the value to set */ public void setSenders(MediaType mediaType, SendersEnum senders) { switch(mediaType) { case AUDIO: this.audioSenders = senders; break; case VIDEO: this.videoSenders = senders; break; default: throw new IllegalArgumentException("mediaType"); } } /** * Gets the <tt>MediaType</tt> of <tt>content</tt>. If <tt>content</tt> * does not have a <tt>description</tt> child and therefore not * <tt>MediaType</tt> can be associated with it, tries to take the * <tt>MediaType</tt> from the session's already established contents with * the same name as <tt>content</tt> * @param content the <tt>ContentPacketExtention</tt> for which to get the * <tt>MediaType</tt> * @return the <tt>MediaType</tt> of <tt>content</tt>. */ public MediaType getMediaType(ContentPacketExtension content) { String contentName = content.getName(); if (contentName == null) return null; MediaType mediaType = JingleUtils.getMediaType(content); if (mediaType == null) { CallPeerMediaHandlerJabberImpl mediaHandler = getMediaHandler(); for (MediaType m : MediaType.values()) { ContentPacketExtension sessionContent = mediaHandler.getRemoteContent(m.toString()); if (sessionContent == null) sessionContent = mediaHandler.getLocalContent(m.toString()); if (sessionContent != null && contentName.equals(sessionContent.getName())) { mediaType = m; break; } } } return mediaType; } }
package com.ngdata.lily.security.hbase.client; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.OperationWithAttributes; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.client.RowLock; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.util.Bytes; import javax.annotation.Nullable; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; /** * An implementation of, and wrapper around, HTableInterface that automatically adds * the authentication context onto requests. This is just helper code to assure correct * communication of the user context on all requests. * * <p>Some methods don't support passing attributes, and for those we can't add authentication * info. These methods throw an exception. It is of course possible to bypass the authentication * by using the wrapped HTable instance, but then no authorization filtering will be applied!</p> * * <p>This method will modify the operation objects (the Get, Put, etc.) by adding an attribute * to them (cfr. HBase's OperationWithAttributes). This means it is not a good idea to have, say, * a Get object shared between threads.</p> */ public class AuthEnabledHTable implements HTableInterface { private HTableInterface delegate; private AuthorizationContextProvider authzCtxProvider; private byte[] extraPermissions; private byte[] appName; private boolean failWhenNotAuthenticated; private final String ERROR_MSG = "Method not supported with authentication"; /** * * @param failWhenNotAuthenticated throw an exception if there is no authorization context (= authenticated * user) available, i.o.w. refuse to do requests that will not be subject to * authorization filtering. * @param appName application name, identifies the permissions that should be active for this application. * Permissions start with "appName:...". * @param extraPermissions extra permissions which will be passed upon each request and which extend the * permissions of the user. See {@link HBaseAuthzUtil#EXTRA_PERMISSION_ATT}. This overwrites * any per-request permissions which might already have been set. */ public AuthEnabledHTable(AuthorizationContextProvider authzCtxProvider, boolean failWhenNotAuthenticated, String appName, @Nullable Set<String> extraPermissions, HTableInterface delegate) { this.authzCtxProvider = authzCtxProvider; this.appName = Bytes.toBytes(appName); this.extraPermissions = extraPermissions != null ? HBaseAuthzUtil.serialize(extraPermissions) : null; this.failWhenNotAuthenticated = failWhenNotAuthenticated; this.delegate = delegate; } private void addAuthInfo(OperationWithAttributes op) { addAuthInfo(Arrays.<OperationWithAttributes>asList(op)); } private void addAuthInfo(Iterable<? extends OperationWithAttributes> ops) { // For multi-ops: since the list of operations might be split to sent to different servers, we // need to add the authentication context to all of them. AuthorizationContext authzCtx = authzCtxProvider.getAuthorizationContext(); if (authzCtx == null && failWhenNotAuthenticated) { throw new RuntimeException("No authenticated user available."); } else if (authzCtx == null) { return; } byte[] userAsBytes = authzCtx.serialize(); for (OperationWithAttributes op : ops) { op.setAttribute(AuthorizationContext.OPERATION_ATTRIBUTE, userAsBytes); op.setAttribute(HBaseAuthzUtil.APP_NAME_ATT, appName); if (extraPermissions != null) { op.setAttribute(HBaseAuthzUtil.EXTRA_PERMISSION_ATT, extraPermissions); } } } @Override public byte[] getTableName() { return delegate.getTableName(); } @Override public Configuration getConfiguration() { return delegate.getConfiguration(); } @Override public HTableDescriptor getTableDescriptor() throws IOException { return delegate.getTableDescriptor(); } @Override public boolean exists(Get get) throws IOException { addAuthInfo(get); return delegate.exists(get); } @Override public void batch(List<? extends Row> actions, Object[] results) throws IOException, InterruptedException { throw new RuntimeException(ERROR_MSG); } @Override public Object[] batch(List<? extends Row> actions) throws IOException, InterruptedException { throw new RuntimeException(ERROR_MSG); } @Override public Result get(Get get) throws IOException { addAuthInfo(get); return delegate.get(get); } @Override public Result[] get(List<Get> gets) throws IOException { addAuthInfo(gets); return delegate.get(gets); } @Override public Result getRowOrBefore(byte[] row, byte[] family) throws IOException { throw new RuntimeException(ERROR_MSG); } @Override public ResultScanner getScanner(Scan scan) throws IOException { addAuthInfo(scan); return delegate.getScanner(scan); } @Override public ResultScanner getScanner(byte[] family) throws IOException { throw new RuntimeException(ERROR_MSG); } @Override public ResultScanner getScanner(byte[] family, byte[] qualifier) throws IOException { throw new RuntimeException(ERROR_MSG); } @Override public void put(Put put) throws IOException { addAuthInfo(put); delegate.put(put); } @Override public void put(List<Put> puts) throws IOException { addAuthInfo(puts); delegate.put(puts); } @Override public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, byte[] value, Put put) throws IOException { addAuthInfo(put); return delegate.checkAndPut(row, family, qualifier, value, put); } @Override public void delete(Delete delete) throws IOException { addAuthInfo(delete); delegate.delete(delete); } @Override public void delete(List<Delete> deletes) throws IOException { addAuthInfo(deletes); delegate.delete(deletes); } @Override public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete) throws IOException { addAuthInfo(delete); return delegate.checkAndDelete(row, family, qualifier, value, delete); } @Override public void mutateRow(RowMutations rm) throws IOException { throw new RuntimeException(ERROR_MSG); } @Override public Result append(Append append) throws IOException { addAuthInfo(append); return delegate.append(append); } @Override public Result increment(Increment increment) throws IOException { throw new RuntimeException(ERROR_MSG); } @Override public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException { throw new RuntimeException(ERROR_MSG); } @Override public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL) throws IOException { throw new RuntimeException(ERROR_MSG); } @Override public boolean isAutoFlush() { return delegate.isAutoFlush(); } @Override public void flushCommits() throws IOException { delegate.flushCommits(); } @Override public void close() throws IOException { delegate.close(); } @Override public RowLock lockRow(byte[] row) throws IOException { throw new RuntimeException(ERROR_MSG); } @Override public void unlockRow(RowLock rl) throws IOException { throw new RuntimeException(ERROR_MSG); } @Override public <T extends CoprocessorProtocol> T coprocessorProxy(Class<T> protocol, byte[] row) { return delegate.coprocessorProxy(protocol, row); } @Override public <T extends CoprocessorProtocol, R> Map<byte[], R> coprocessorExec(Class<T> protocol, byte[] startKey, byte[] endKey, Batch.Call<T, R> callable) throws IOException, Throwable { return delegate.coprocessorExec(protocol, startKey, endKey, callable); } @Override public <T extends CoprocessorProtocol, R> void coprocessorExec(Class<T> protocol, byte[] startKey, byte[] endKey, Batch.Call<T, R> callable, Batch.Callback<R> callback) throws IOException, Throwable { delegate.coprocessorExec(protocol, startKey, endKey, callable, callback); } @Override public void setAutoFlush(boolean autoFlush) { delegate.setAutoFlush(autoFlush); } @Override public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) { delegate.setAutoFlush(autoFlush, clearBufferOnFail); } @Override public long getWriteBufferSize() { return delegate.getWriteBufferSize(); } @Override public void setWriteBufferSize(long writeBufferSize) throws IOException { delegate.setWriteBufferSize(writeBufferSize); } }
package ca.uhn.fhirtest.interceptor; import static org.apache.commons.lang3.StringUtils.defaultIfBlank; import static org.apache.commons.lang3.StringUtils.isBlank; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.UUID; import javax.annotation.PreDestroy; import org.apache.commons.io.IOUtils; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.springframework.scheduling.annotation.Scheduled; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.client.apache.ApacheRestfulClientFactory; import ca.uhn.fhir.rest.server.interceptor.InterceptorAdapter; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.util.UrlUtil; public class AnalyticsInterceptor extends InterceptorAdapter { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(AnalyticsInterceptor.class); private String myAnalyticsTid; private int myCollectThreshold = 100000; private final LinkedList<AnalyticsEvent> myEventBuffer = new LinkedList<AnalyticsEvent>(); private String myHostname; private HttpClient myHttpClient; private long myLastFlushed; private long mySubmitPeriod = 60000; private int mySubmitThreshold = 1000; /** * Constructor */ public AnalyticsInterceptor() { myHttpClient = new ApacheRestfulClientFactory().getNativeHttpClient(); try { myHostname = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { myHostname = "Unknown"; } } @PreDestroy public void destroy() { if (myHttpClient instanceof CloseableHttpClient) { IOUtils.closeQuietly((CloseableHttpClient) myHttpClient); } } protected void doFlush() { List<AnalyticsEvent> eventsToFlush; synchronized (myEventBuffer) { int size = myEventBuffer.size(); if (size > 20) { size = 20; } eventsToFlush = new ArrayList<AnalyticsEvent>(size); for (int i = 0; i < size; i++) { AnalyticsEvent nextEvent = myEventBuffer.pollFirst(); if (nextEvent != null) { eventsToFlush.add(nextEvent); } } } StringBuilder b = new StringBuilder(); for (AnalyticsEvent next : eventsToFlush) { b.append("v=1"); b.append("&tid=").append(myAnalyticsTid); b.append("&t=event"); b.append("&an=").append(UrlUtil.escape(myHostname)).append('+').append(UrlUtil.escape(next.getApplicationName())); b.append("&ec=").append(next.getResourceName()); b.append("&ea=").append(next.getRestOperation()); b.append("&cid=").append(next.getClientId()); b.append("&uip=").append(UrlUtil.escape(next.getSourceIp())); b.append("&ua=").append(UrlUtil.escape(next.getUserAgent())); b.append("\n"); } String contents = b.toString(); HttpPost post = new HttpPost("https://www.google-analytics.com/batch"); post.setEntity(new StringEntity(contents, ContentType.APPLICATION_FORM_URLENCODED)); CloseableHttpResponse response = null; try { response = (CloseableHttpResponse) myHttpClient.execute(post); ourLog.trace("Analytics response: {}", response); ourLog.info("Flushed {} analytics events and got HTTP {} {}", eventsToFlush.size(), response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); } catch (Exception e) { ourLog.error("Failed to submit analytics:", e); } finally { if (response != null) { IOUtils.closeQuietly(response); } } } @Scheduled(fixedDelay = 5000) public synchronized void flush() { int pendingEvents; synchronized (myEventBuffer) { pendingEvents = myEventBuffer.size(); } if (pendingEvents == 0) { return; } if (System.currentTimeMillis() - myLastFlushed > mySubmitPeriod) { doFlush(); return; } if (pendingEvents >= mySubmitThreshold) { doFlush(); return; } } @Override public void incomingRequestPreHandled(RestOperationTypeEnum theOperation, ActionRequestDetails theRequest) { ServletRequestDetails details = (ServletRequestDetails) theRequest.getRequestDetails(); // Make sure we only send one event per request if (details.getUserData().containsKey(getClass().getName())) { return; } details.getUserData().put(getClass().getName(), ""); String sourceIp = details.getHeader("x-forwarded-for"); if (isBlank(sourceIp)) { sourceIp = details.getServletRequest().getRemoteAddr(); } if (sourceIp.contains(", ")) { sourceIp = sourceIp.substring(0, sourceIp.indexOf(", ")); } AnalyticsEvent event = new AnalyticsEvent(); event.setSourceIp(sourceIp); event.setRestOperation(theOperation); event.setUserAgent(details.getHeader("User-Agent")); event.setApplicationName(details.getServletRequest().getServletPath()); event.setRestOperation(theOperation); event.setResourceName(defaultIfBlank(details.getResourceName(), "SERVER")); event.setClientId(UUID.randomUUID().toString()); synchronized (myEventBuffer) { if (myEventBuffer.size() > myCollectThreshold) { ourLog.warn("Not collecting analytics on request! Event buffer has {} items in it", myEventBuffer.size()); } myEventBuffer.add(event); } } public void setAnalyticsTid(String theAnalyticsTid) { myAnalyticsTid = theAnalyticsTid; } public static class AnalyticsEvent { private String myApplicationName; private String myClientId; private String myResourceName; private RestOperationTypeEnum myRestOperation; private String mySourceIp; private String myUserAgent; public String getApplicationName() { return myApplicationName; } public String getClientId() { return myClientId; } public String getResourceName() { return myResourceName; } public RestOperationTypeEnum getRestOperation() { return myRestOperation; } public String getSourceIp() { return mySourceIp; } public String getUserAgent() { return myUserAgent; } public void setApplicationName(String theApplicationName) { myApplicationName = theApplicationName; } public void setClientId(String theClientId) { myClientId = theClientId; } public void setResourceName(String theResourceName) { myResourceName = theResourceName; } public void setRestOperation(RestOperationTypeEnum theRestOperation) { myRestOperation = theRestOperation; } public void setSourceIp(String theSourceIp) { mySourceIp = theSourceIp; } public void setUserAgent(String theUserAgent) { myUserAgent = theUserAgent; } } }
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.dmn.client.editors.types.listview.constraint; import com.google.gwt.event.dom.client.ClickEvent; import elemental2.dom.DOMTokenList; import elemental2.dom.Element; import elemental2.dom.HTMLAnchorElement; import elemental2.dom.HTMLButtonElement; import elemental2.dom.HTMLDivElement; import elemental2.dom.HTMLElement; import elemental2.dom.Node; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.workbench.common.dmn.api.definition.v1_1.ConstraintType; import org.kie.workbench.common.dmn.client.editors.types.listview.constraint.common.DataTypeConstraintComponent; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.uberfire.client.views.pfly.selectpicker.JQuery; import org.uberfire.client.views.pfly.selectpicker.JQuery.CallbackFunction; import org.uberfire.client.views.pfly.selectpicker.JQueryEvent; import org.uberfire.client.views.pfly.selectpicker.JQuerySelectPickerEvent; import org.uberfire.client.views.pfly.selectpicker.JQuerySelectPickerTarget; import org.uberfire.mvp.Command; import static org.junit.Assert.assertEquals; import static org.kie.workbench.common.dmn.api.definition.v1_1.ConstraintType.ENUMERATION; import static org.kie.workbench.common.dmn.api.definition.v1_1.ConstraintType.EXPRESSION; import static org.kie.workbench.common.dmn.client.editors.types.common.HiddenHelper.HIDDEN_CSS_CLASS; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.powermock.api.mockito.PowerMockito.mockStatic; @RunWith(PowerMockRunner.class) @PrepareForTest({JQuery.class}) public class DataTypeConstraintModalViewTest { @Mock private HTMLDivElement header; @Mock private HTMLDivElement body; @Mock private HTMLDivElement footer; @Mock private HTMLDivElement componentContainer; @Mock private HTMLButtonElement okButton; @Mock private HTMLButtonElement cancelButton; @Mock private HTMLAnchorElement clearAllAnchor; @Mock private HTMLElement type; @Mock private HTMLDivElement selectConstraint; @Mock private HTMLDivElement constraintWarningMessage; @Mock private HTMLButtonElement closeConstraintWarningMessage; @Mock private DataTypeConstraintModal presenter; private DataTypeConstraintModalView view; @Before public void setup() { view = spy(new DataTypeConstraintModalView(header, body, footer, componentContainer, okButton, cancelButton, clearAllAnchor, type, selectConstraint, constraintWarningMessage, closeConstraintWarningMessage)); view.init(presenter); } @Test public void testInit() { doNothing().when(view).setupSelectPicker(); doNothing().when(view).setupSelectPickerOnChangeHandler(); doNothing().when(view).setupEmptyContainer(); view.init(); verify(view).setupSelectPicker(); verify(view).setupSelectPickerOnChangeHandler(); verify(view).setupEmptyContainer(); } @Test public void testGetHeader() { final String expectedHeader = "header"; header.textContent = expectedHeader; final String actualHeader = view.getHeader(); assertEquals(expectedHeader, actualHeader); } @Test public void testGetBody() { assertEquals(body, view.getBody()); } @Test public void testGetFooter() { assertEquals(footer, view.getFooter()); } @Test public void testOnOkButtonClick() { view.onOkButtonClick(mock(ClickEvent.class)); verify(presenter).save(); } @Test public void testOnCancelButtonClick() { view.onCancelButtonClick(mock(ClickEvent.class)); verify(presenter).hide(); } @Test public void testOnClearAllAnchorClick() { view.onClearAllAnchorClick(mock(ClickEvent.class)); verify(presenter).clearAll(); } @Test public void testSetType() { final String expectedText = "type"; this.type.textContent = "something"; view.setType(expectedText); final String actualText = this.type.textContent; assertEquals(expectedText, actualText); } @Test public void testOnSelectChangeWhenValueIsNotBlank() { final JQuerySelectPickerEvent jQueryEvent = mock(JQuerySelectPickerEvent.class); final JQuerySelectPickerTarget pickerTarget = mock(JQuerySelectPickerTarget.class); final ConstraintType constraintType = ENUMERATION; doNothing().when(view).loadComponent(ENUMERATION); jQueryEvent.target = pickerTarget; pickerTarget.value = constraintType.value(); view.onSelectChange(jQueryEvent); verify(view).loadComponent(constraintType); } @Test public void testSetupEmptyContainer() { componentContainer.innerHTML = "something"; view.setupEmptyContainer(); assertEquals("", componentContainer.innerHTML); verify(componentContainer).appendChild(selectConstraint); } @Test public void testLoadComponent() { final ConstraintType constraintType = ENUMERATION; final DataTypeConstraintComponent constrainComponent = mock(DataTypeConstraintComponent.class); final Element element = mock(Element.class); when(presenter.getCurrentComponent()).thenReturn(constrainComponent); when(constrainComponent.getElement()).thenReturn(element); componentContainer.innerHTML = "something"; view.loadComponent(constraintType); assertEquals("", componentContainer.innerHTML); verify(presenter).setupComponent(constraintType); verify(componentContainer).appendChild(element); } @Test public void testOnShowWhenConstraintValueIsBlank() { final Element selectPicker = mock(HTMLElement.class); when(presenter.getConstraintValue()).thenReturn(null); when(presenter.inferComponentType(any())).thenCallRealMethod(); doReturn(selectPicker).when(view).getSelectPicker(); doNothing().when(view).setPickerValue(any(), anyString()); view.onShow(); verify(view).setPickerValue(selectPicker, EXPRESSION.value()); } @Test public void testOnShowWhenConstraintValueIsNotBlank() { final Element selectPicker = mock(HTMLElement.class); final String constraint = "1,2,3"; when(presenter.getConstraintValue()).thenReturn(constraint); when(presenter.inferComponentType(constraint)).thenReturn(ENUMERATION); doReturn(selectPicker).when(view).getSelectPicker(); doNothing().when(view).setPickerValue(any(), anyString()); view.onShow(); verify(view).setPickerValue(selectPicker, ENUMERATION.value()); } @Test public void testSetupSelectPicker() { final Element element = mock(Element.class); doReturn(element).when(view).getSelectPicker(); doNothing().when(view).triggerPickerAction(any(), anyString()); view.setupSelectPicker(); verify(view).triggerPickerAction(element, "refresh"); } @Test public void testSetupSelectPickerOnChangeHandler() { final Element element = mock(Element.class); doReturn(element).when(view).getSelectPicker(); doNothing().when(view).setupOnChangeHandler(any()); view.setupSelectPickerOnChangeHandler(); verify(view).setupOnChangeHandler(element); } @Test public void testGetSelectPicker() { final HTMLElement expectedSelect = mock(HTMLElement.class); when(body.querySelector(".selectpicker")).thenReturn(expectedSelect); final Element actualSelect = view.getSelectPicker(); assertEquals(expectedSelect, actualSelect); } @Test public void testOnCloseConstraintWarningClick() { constraintWarningMessage.classList = mock(DOMTokenList.class); view.onCloseConstraintWarningClick(mock(ClickEvent.class)); verify(constraintWarningMessage.classList).add(HIDDEN_CSS_CLASS); } @Test public void testShowConstraintWarningMessage() { constraintWarningMessage.classList = mock(DOMTokenList.class); view.showConstraintWarningMessage(); verify(constraintWarningMessage.classList).remove(HIDDEN_CSS_CLASS); } @Test public void testSetupOnHideHandler() { final HTMLElement body = mock(HTMLElement.class); final Node modalBody = mock(Node.class); final Node modalContent = mock(Node.class); final Node modalDialog = mock(Node.class); final Node modalComponent = mock(Node.class); final Command command = mock(Command.class); final JQuery jQuery = mock(JQuery.class); final ArgumentCaptor<CallbackFunction> captor = ArgumentCaptor.forClass(CallbackFunction.class); body.parentNode = modalBody; modalBody.parentNode = modalContent; modalContent.parentNode = modalDialog; modalDialog.parentNode = modalComponent; doReturn(body).when(view).getBody(); mockStatic(JQuery.class); PowerMockito.when(JQuery.$(modalComponent)).thenReturn(jQuery); view.setupOnHideHandler(command); verify(jQuery).on(eq("hidden.bs.modal"), captor.capture()); captor.getValue().call(mock(JQueryEvent.class)); verify(command).execute(); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.unscramble; import com.intellij.openapi.util.text.StringUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author yole */ public class ThreadDumpParser { private static final Pattern ourThreadStartPattern = Pattern.compile("^\"(.+)\".+prio=\\d+ tid=[^\\s]+ nid=[^\\s]+ ([^\\[]+)"); private static final Pattern ourThreadStatePattern = Pattern.compile("java\\.lang\\.Thread\\.State: (.+) \\((.+)\\)"); private static final Pattern ourThreadStatePattern2 = Pattern.compile("java\\.lang\\.Thread\\.State: (.+)"); private static final Pattern ourWaitingForLockPattern = Pattern.compile("- waiting (on|to lock) <(.+)>"); private static final Pattern ourParkingToWaitForLockPattern = Pattern.compile("- parking to wait for <(.+)>"); @NonNls private static final String PUMP_EVENT = "java.awt.EventDispatchThread.pumpOneEventForFilters"; private static final Pattern ourIdleTimerThreadPattern = Pattern.compile("java.lang.Object.wait\\([^()]+\\)\\s+at java.util.TimerThread.mainLoop"); private static final Pattern ourIdleSwingTimerThreadPattern = Pattern.compile("java.lang.Object.wait\\([^()]+\\)\\s+at javax.swing.TimerQueue.run"); private ThreadDumpParser() { } public static List<ThreadState> parse(String threadDump) { List<ThreadState> result = new ArrayList<ThreadState>(); StringBuilder lastThreadStack = new StringBuilder(); ThreadState lastThreadState = null; boolean expectingThreadState = false; boolean haveNonEmptyStackTrace = false; for(@NonNls String line: StringUtil.tokenize(threadDump, "\r\n")) { if (line.startsWith("============") || line.contains("Java-level deadlock")) { break; } ThreadState state = tryParseThreadStart(line); if (state != null) { if (lastThreadState != null) { lastThreadState.setStackTrace(lastThreadStack.toString(), !haveNonEmptyStackTrace); } lastThreadState = state; result.add(lastThreadState); lastThreadStack.setLength(0); haveNonEmptyStackTrace = false; lastThreadStack.append(line).append("\n"); expectingThreadState = true; } else { boolean parsedThreadState = false; if (expectingThreadState) { expectingThreadState = false; parsedThreadState = tryParseThreadState(line, lastThreadState); } lastThreadStack.append(line).append("\n"); if (!parsedThreadState && line.trim().startsWith("at")) { haveNonEmptyStackTrace = true; } } } if (lastThreadState != null) { lastThreadState.setStackTrace(lastThreadStack.toString(), !haveNonEmptyStackTrace); } for(ThreadState threadState: result) { inferThreadStateDetail(threadState); final String s = findWaitingForLock(threadState.getStackTrace()); if (s != null) { for(ThreadState lockOwner : result) { if (lockOwner == threadState) { continue; } final String marker = "- locked <" + s + ">"; if (lockOwner.getStackTrace().contains(marker)) { if (threadState.isAwaitedBy(lockOwner)) { threadState.addDeadlockedThread(lockOwner); lockOwner.addDeadlockedThread(threadState); } lockOwner.addWaitingThread(threadState); break; } } } } sortThreads(result); return result; } public static void sortThreads(List<ThreadState> result) { Collections.sort(result, new Comparator<ThreadState>() { public int compare(final ThreadState o1, final ThreadState o2) { return getInterestLevel(o2) - getInterestLevel(o1); } }); } @Nullable private static String findWaitingForLock(final String stackTrace) { Matcher m = ourWaitingForLockPattern.matcher(stackTrace); if (m.find()) { return m.group(2); } m = ourParkingToWaitForLockPattern.matcher(stackTrace); if (m.find()) { return m.group(1); } return null; } private static int getInterestLevel(final ThreadState state) { if (state.isEmptyStackTrace()) return -10; if (isKnownJdkThread(state)) return -5; if (state.isSleeping()) { return -2; } if (state.getOperation() == ThreadOperation.Socket) { return -1; } return state.getStackTrace().split("\n").length; } public static boolean isKnownJdkThread(final ThreadState state) { @NonNls String stackTrace = state.getStackTrace(); return stackTrace.contains("java.lang.ref.Reference$ReferenceHandler.run") || stackTrace.contains("java.lang.ref.Finalizer$FinalizerThread.run") || stackTrace.contains("sun.awt.AWTAutoShutdown.run") || stackTrace.contains("sun.java2d.Disposer.run") || stackTrace.contains("sun.awt.windows.WToolkit.eventLoop") || ourIdleTimerThreadPattern.matcher(stackTrace).find() || ourIdleSwingTimerThreadPattern.matcher(stackTrace).find(); } public static void inferThreadStateDetail(final ThreadState threadState) { @NonNls String stackTrace = threadState.getStackTrace(); if (stackTrace.contains("at java.net.PlainSocketImpl.socketAccept") || stackTrace.contains("at java.net.PlainDatagramSocketImpl.receive") || stackTrace.contains("at java.net.SocketInputStream.socketRead") || stackTrace.contains("at java.net.PlainSocketImpl.socketConnect")) { threadState.setOperation(ThreadOperation.Socket); } else if (stackTrace.contains("at java.io.FileInputStream.readBytes")) { threadState.setOperation(ThreadOperation.IO); } else if (stackTrace.contains("at java.lang.Thread.sleep")) { final String javaThreadState = threadState.getJavaThreadState(); if (!Thread.State.RUNNABLE.name().equals(javaThreadState)) { threadState.setThreadStateDetail("sleeping"); // JDK 1.6 sets this explicitly, but JDK 1.5 does not } } if (threadState.isEDT()) { if (stackTrace.contains("java.awt.EventQueue.getNextEvent")) { threadState.setThreadStateDetail("idle"); } int modality = 0; int pos = 0; while(true) { pos = stackTrace.indexOf(PUMP_EVENT, pos); if (pos < 0) break; modality++; pos += PUMP_EVENT.length(); } threadState.setExtraState("modality level " + modality); } } @Nullable private static ThreadState tryParseThreadStart(final String line) { Matcher m = ourThreadStartPattern.matcher(line); if (m.find()) { return new ThreadState(m.group(1), m.group(2)); } return null; } private static boolean tryParseThreadState(final String line, final ThreadState threadState) { Matcher m = ourThreadStatePattern.matcher(line); if (m.find()) { threadState.setJavaThreadState(m.group(1)); threadState.setThreadStateDetail(m.group(2).trim()); return true; } m = ourThreadStatePattern2.matcher(line); if (m.find()) { threadState.setJavaThreadState(m.group(1)); return true; } return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.structure.io.graphson; import org.apache.tinkerpop.gremlin.process.traversal.Path; import org.apache.tinkerpop.gremlin.process.traversal.Traversal; import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategy; import org.apache.tinkerpop.gremlin.process.traversal.step.util.MutablePath; import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree; import org.apache.tinkerpop.gremlin.process.traversal.util.DefaultTraversalMetrics; import org.apache.tinkerpop.gremlin.process.traversal.util.ImmutableExplanation; import org.apache.tinkerpop.gremlin.process.traversal.util.Metrics; import org.apache.tinkerpop.gremlin.process.traversal.util.MutableMetrics; import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalExplanation; import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalMetrics; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Element; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.apache.tinkerpop.gremlin.structure.util.Comparators; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedEdge; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedFactory; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedProperty; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; import org.apache.tinkerpop.shaded.jackson.core.JsonGenerationException; import org.apache.tinkerpop.shaded.jackson.core.JsonGenerator; import org.apache.tinkerpop.shaded.jackson.core.JsonParser; import org.apache.tinkerpop.shaded.jackson.core.JsonProcessingException; import org.apache.tinkerpop.shaded.jackson.core.JsonToken; import org.apache.tinkerpop.shaded.jackson.databind.DeserializationContext; import org.apache.tinkerpop.shaded.jackson.databind.JavaType; import org.apache.tinkerpop.shaded.jackson.databind.JsonNode; import org.apache.tinkerpop.shaded.jackson.databind.SerializerProvider; import org.apache.tinkerpop.shaded.jackson.databind.deser.std.StdDeserializer; import org.apache.tinkerpop.shaded.jackson.databind.jsontype.TypeSerializer; import org.apache.tinkerpop.shaded.jackson.databind.node.ArrayNode; import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdKeySerializer; import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdScalarSerializer; import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdSerializer; import org.apache.tinkerpop.shaded.jackson.databind.type.TypeFactory; import org.javatuples.Pair; import org.javatuples.Triplet; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import static org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONUtil.safeWriteObjectField; /** * GraphSON serializers for graph-based objects such as vertices, edges, properties, and paths. These serializers * present a generalized way to serialize the implementations of core interfaces. * * @author Stephen Mallette (http://stephen.genoprime.com) */ class GraphSONSerializersV2d0 { private GraphSONSerializersV2d0() { } ////////////////////////////// SERIALIZERS ///////////////////////////////// final static class VertexJacksonSerializer extends StdScalarSerializer<Vertex> { private final boolean normalize; public VertexJacksonSerializer(final boolean normalize) { super(Vertex.class); this.normalize = normalize; } @Override public void serialize(final Vertex vertex, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); jsonGenerator.writeObjectField(GraphSONTokens.ID, vertex.id()); jsonGenerator.writeStringField(GraphSONTokens.LABEL, vertex.label()); writeProperties(vertex, jsonGenerator); jsonGenerator.writeEndObject(); } private void writeProperties(final Vertex vertex, final JsonGenerator jsonGenerator) throws IOException { if (vertex.keys().size() == 0) return; jsonGenerator.writeFieldName(GraphSONTokens.PROPERTIES); jsonGenerator.writeStartObject(); final List<String> keys = normalize ? IteratorUtils.list(vertex.keys().iterator(), Comparator.naturalOrder()) : new ArrayList<>(vertex.keys()); for (String key : keys) { final Iterator<VertexProperty<Object>> vertexProperties = normalize ? IteratorUtils.list(vertex.properties(key), Comparators.PROPERTY_COMPARATOR).iterator() : vertex.properties(key); if (vertexProperties.hasNext()) { jsonGenerator.writeFieldName(key); jsonGenerator.writeStartArray(); while (vertexProperties.hasNext()) { jsonGenerator.writeObject(vertexProperties.next()); } jsonGenerator.writeEndArray(); } } jsonGenerator.writeEndObject(); } } final static class EdgeJacksonSerializer extends StdScalarSerializer<Edge> { private final boolean normalize; public EdgeJacksonSerializer(final boolean normalize) { super(Edge.class); this.normalize = normalize; } @Override public void serialize(final Edge edge, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); jsonGenerator.writeObjectField(GraphSONTokens.ID, edge.id()); jsonGenerator.writeStringField(GraphSONTokens.LABEL, edge.label()); jsonGenerator.writeStringField(GraphSONTokens.IN_LABEL, edge.inVertex().label()); jsonGenerator.writeStringField(GraphSONTokens.OUT_LABEL, edge.outVertex().label()); jsonGenerator.writeObjectField(GraphSONTokens.IN, edge.inVertex().id()); jsonGenerator.writeObjectField(GraphSONTokens.OUT, edge.outVertex().id()); writeProperties(edge, jsonGenerator); jsonGenerator.writeEndObject(); } private void writeProperties(final Edge edge, final JsonGenerator jsonGenerator) throws IOException { final Iterator<Property<Object>> elementProperties = normalize ? IteratorUtils.list(edge.properties(), Comparators.PROPERTY_COMPARATOR).iterator() : edge.properties(); if (elementProperties.hasNext()) { jsonGenerator.writeFieldName(GraphSONTokens.PROPERTIES); jsonGenerator.writeStartObject(); elementProperties.forEachRemaining(prop -> safeWriteObjectField(jsonGenerator, prop.key(), prop)); jsonGenerator.writeEndObject(); } } } final static class PropertyJacksonSerializer extends StdScalarSerializer<Property> { public PropertyJacksonSerializer() { super(Property.class); } @Override public void serialize(final Property property, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); jsonGenerator.writeObjectField(GraphSONTokens.KEY, property.key()); jsonGenerator.writeObjectField(GraphSONTokens.VALUE, property.value()); jsonGenerator.writeEndObject(); } } final static class VertexPropertyJacksonSerializer extends StdScalarSerializer<VertexProperty> { private final boolean normalize; private final boolean includeLabel; public VertexPropertyJacksonSerializer(final boolean normalize, final boolean includeLabel) { super(VertexProperty.class); this.normalize = normalize; this.includeLabel = includeLabel; } @Override public void serialize(final VertexProperty property, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); jsonGenerator.writeObjectField(GraphSONTokens.ID, property.id()); jsonGenerator.writeObjectField(GraphSONTokens.VALUE, property.value()); if (includeLabel) jsonGenerator.writeStringField(GraphSONTokens.LABEL, property.label()); tryWriteMetaProperties(property, jsonGenerator, normalize); jsonGenerator.writeEndObject(); } private static void tryWriteMetaProperties(final VertexProperty property, final JsonGenerator jsonGenerator, final boolean normalize) throws IOException { // when "detached" you can't check features of the graph it detached from so it has to be // treated differently from a regular VertexProperty implementation. if (property instanceof DetachedVertexProperty) { // only write meta properties key if they exist if (property.properties().hasNext()) { writeMetaProperties(property, jsonGenerator, normalize); } } else { // still attached - so we can check the features to see if it's worth even trying to write the // meta properties key if (property.graph().features().vertex().supportsMetaProperties() && property.properties().hasNext()) { writeMetaProperties(property, jsonGenerator, normalize); } } } private static void writeMetaProperties(final VertexProperty property, final JsonGenerator jsonGenerator, final boolean normalize) throws IOException { jsonGenerator.writeFieldName(GraphSONTokens.PROPERTIES); jsonGenerator.writeStartObject(); final Iterator<Property<Object>> metaProperties = normalize ? IteratorUtils.list((Iterator<Property<Object>>) property.properties(), Comparators.PROPERTY_COMPARATOR).iterator() : property.properties(); while (metaProperties.hasNext()) { final Property<Object> metaProperty = metaProperties.next(); jsonGenerator.writeObjectField(metaProperty.key(), metaProperty.value()); } jsonGenerator.writeEndObject(); } } final static class PathJacksonSerializer extends StdScalarSerializer<Path> { public PathJacksonSerializer() { super(Path.class); } @Override public void serialize(final Path path, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException, JsonGenerationException { jsonGenerator.writeStartObject(); // paths shouldn't serialize with properties if the path contains graph elements final Path p = DetachedFactory.detach(path, false); jsonGenerator.writeObjectField(GraphSONTokens.LABELS, p.labels()); jsonGenerator.writeObjectField(GraphSONTokens.OBJECTS, p.objects()); jsonGenerator.writeEndObject(); } } final static class TreeJacksonSerializer extends StdScalarSerializer<Tree> { public TreeJacksonSerializer() { super(Tree.class); } @Override public void serialize(final Tree tree, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException, JsonGenerationException { jsonGenerator.writeStartArray(); final Set<Map.Entry<Element, Tree>> set = tree.entrySet(); for (Map.Entry<Element, Tree> entry : set) { jsonGenerator.writeStartObject(); jsonGenerator.writeObjectField(GraphSONTokens.KEY, entry.getKey()); jsonGenerator.writeObjectField(GraphSONTokens.VALUE, entry.getValue()); jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); } } final static class TraversalExplanationJacksonSerializer extends StdScalarSerializer<TraversalExplanation> { public TraversalExplanationJacksonSerializer() { super(TraversalExplanation.class); } @Override public void serialize(final TraversalExplanation traversalExplanation, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { final Map<String, Object> m = new HashMap<>(); m.put(GraphSONTokens.ORIGINAL, getStepsAsList(traversalExplanation.getOriginalTraversal())); final List<Pair<TraversalStrategy, Traversal.Admin<?, ?>>> strategyTraversals = traversalExplanation.getStrategyTraversals(); final List<Map<String, Object>> intermediates = new ArrayList<>(); for (final Pair<TraversalStrategy, Traversal.Admin<?, ?>> pair : strategyTraversals) { final Map<String, Object> intermediate = new HashMap<>(); intermediate.put(GraphSONTokens.STRATEGY, pair.getValue0().toString()); intermediate.put(GraphSONTokens.CATEGORY, pair.getValue0().getTraversalCategory().getSimpleName()); intermediate.put(GraphSONTokens.TRAVERSAL, getStepsAsList(pair.getValue1())); intermediates.add(intermediate); } m.put(GraphSONTokens.INTERMEDIATE, intermediates); if (strategyTraversals.isEmpty()) m.put(GraphSONTokens.FINAL, getStepsAsList(traversalExplanation.getOriginalTraversal())); else m.put(GraphSONTokens.FINAL, getStepsAsList(strategyTraversals.get(strategyTraversals.size() - 1).getValue1())); jsonGenerator.writeObject(m); } private List<String> getStepsAsList(final Traversal.Admin<?, ?> t) { final List<String> steps = new ArrayList<>(); t.getSteps().iterator().forEachRemaining(s -> steps.add(s.toString())); return steps; } } final static class IntegerGraphSONSerializer extends StdScalarSerializer<Integer> { public IntegerGraphSONSerializer() { super(Integer.class); } @Override public void serialize(final Integer integer, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeNumber(((Integer) integer).intValue()); } } final static class DoubleGraphSONSerializer extends StdScalarSerializer<Double> { public DoubleGraphSONSerializer() { super(Double.class); } @Override public void serialize(final Double doubleValue, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeNumber(doubleValue); } } final static class TraversalMetricsJacksonSerializer extends StdScalarSerializer<TraversalMetrics> { public TraversalMetricsJacksonSerializer() { super(TraversalMetrics.class); } @Override public void serialize(final TraversalMetrics traversalMetrics, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { // creation of the map enables all the fields to be properly written with their type if required final Map<String, Object> m = new HashMap<>(); m.put(GraphSONTokens.DURATION, traversalMetrics.getDuration(TimeUnit.NANOSECONDS) / 1000000d); final List<Metrics> metrics = new ArrayList<>(); metrics.addAll(traversalMetrics.getMetrics()); m.put(GraphSONTokens.METRICS, metrics); jsonGenerator.writeObject(m); } } final static class MetricsJacksonSerializer extends StdScalarSerializer<Metrics> { public MetricsJacksonSerializer() { super(Metrics.class); } @Override public void serialize(final Metrics metrics, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { final Map<String, Object> m = new HashMap<>(); m.put(GraphSONTokens.ID, metrics.getId()); m.put(GraphSONTokens.NAME, metrics.getName()); m.put(GraphSONTokens.COUNTS, metrics.getCounts()); m.put(GraphSONTokens.DURATION, metrics.getDuration(TimeUnit.NANOSECONDS) / 1000000d); if (!metrics.getAnnotations().isEmpty()) { m.put(GraphSONTokens.ANNOTATIONS, metrics.getAnnotations()); } if (!metrics.getNested().isEmpty()) { final List<Metrics> nested = new ArrayList<>(); metrics.getNested().forEach(it -> nested.add(it)); m.put(GraphSONTokens.METRICS, nested); } jsonGenerator.writeObject(m); } } /** * Maps in the JVM can have {@link Object} as a key, but in JSON they must be a {@link String}. */ final static class GraphSONKeySerializer extends StdKeySerializer { @Override public void serialize(final Object o, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { ser(o, jsonGenerator, serializerProvider); } @Override public void serializeWithType(final Object o, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider, final TypeSerializer typeSerializer) throws IOException { ser(o, jsonGenerator, serializerProvider); } private void ser(final Object o, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { if (Element.class.isAssignableFrom(o.getClass())) jsonGenerator.writeFieldName((((Element) o).id()).toString()); else super.serialize(o, jsonGenerator, serializerProvider); } } //////////////////////////// DESERIALIZERS /////////////////////////// static class VertexJacksonDeserializer extends StdDeserializer<Vertex> { public VertexJacksonDeserializer() { super(Vertex.class); } public Vertex deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException { final DetachedVertex.Builder v = DetachedVertex.build(); while (jsonParser.nextToken() != JsonToken.END_OBJECT) { if (jsonParser.getCurrentName().equals(GraphSONTokens.ID)) { jsonParser.nextToken(); v.setId(deserializationContext.readValue(jsonParser, Object.class)); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.LABEL)) { jsonParser.nextToken(); v.setLabel(jsonParser.getText()); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.PROPERTIES)) { jsonParser.nextToken(); while (jsonParser.nextToken() != JsonToken.END_OBJECT) { jsonParser.nextToken(); while (jsonParser.nextToken() != JsonToken.END_ARRAY) { v.addProperty((DetachedVertexProperty) deserializationContext.readValue(jsonParser, VertexProperty.class)); } } } } return v.create(); } @Override public boolean isCachable() { return true; } } static class EdgeJacksonDeserializer extends StdDeserializer<Edge> { public EdgeJacksonDeserializer() { super(Edge.class); } @Override public Edge deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException { final DetachedEdge.Builder e = DetachedEdge.build(); final DetachedVertex.Builder inV = DetachedVertex.build(); final DetachedVertex.Builder outV = DetachedVertex.build(); while (jsonParser.nextToken() != JsonToken.END_OBJECT) { if (jsonParser.getCurrentName().equals(GraphSONTokens.ID)) { jsonParser.nextToken(); e.setId(deserializationContext.readValue(jsonParser, Object.class)); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.LABEL)) { jsonParser.nextToken(); e.setLabel(jsonParser.getText()); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.OUT)) { jsonParser.nextToken(); outV.setId(deserializationContext.readValue(jsonParser, Object.class)); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.OUT_LABEL)) { jsonParser.nextToken(); outV.setLabel(jsonParser.getText()); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.IN)) { jsonParser.nextToken(); inV.setId(deserializationContext.readValue(jsonParser, Object.class)); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.IN_LABEL)) { jsonParser.nextToken(); inV.setLabel(jsonParser.getText()); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.PROPERTIES)) { jsonParser.nextToken(); while (jsonParser.nextToken() != JsonToken.END_OBJECT) { jsonParser.nextToken(); e.addProperty(deserializationContext.readValue(jsonParser, Property.class)); } } } e.setInV(inV.create()); e.setOutV(outV.create()); return e.create(); } @Override public boolean isCachable() { return true; } } static class PropertyJacksonDeserializer extends StdDeserializer<Property> { public PropertyJacksonDeserializer() { super(Property.class); } @Override public Property deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException { String key = null; Object value = null; while (jsonParser.nextToken() != JsonToken.END_OBJECT) { if (jsonParser.getCurrentName().equals(GraphSONTokens.KEY)) { jsonParser.nextToken(); key = jsonParser.getText(); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.VALUE)) { jsonParser.nextToken(); value = deserializationContext.readValue(jsonParser, Object.class); } } return new DetachedProperty<>(key, value); } @Override public boolean isCachable() { return true; } } static class PathJacksonDeserializer extends StdDeserializer<Path> { private static final JavaType setType = TypeFactory.defaultInstance().constructCollectionType(HashSet.class, String.class); public PathJacksonDeserializer() { super(Path.class); } @Override public Path deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException { final JsonNode n = jsonParser.readValueAsTree(); final Path p = MutablePath.make(); final ArrayNode labels = (ArrayNode) n.get(GraphSONTokens.LABELS); final ArrayNode objects = (ArrayNode) n.get(GraphSONTokens.OBJECTS); for (int i = 0; i < objects.size(); i++) { final JsonParser po = objects.get(i).traverse(); po.nextToken(); final JsonParser pl = labels.get(i).traverse(); pl.nextToken(); p.extend(deserializationContext.readValue(po, Object.class), deserializationContext.readValue(pl, setType)); } return p; } @Override public boolean isCachable() { return true; } } static class VertexPropertyJacksonDeserializer extends StdDeserializer<VertexProperty> { private static final JavaType propertiesType = TypeFactory.defaultInstance().constructMapType(HashMap.class, String.class, Object.class); protected VertexPropertyJacksonDeserializer() { super(VertexProperty.class); } @Override public VertexProperty deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException { final DetachedVertexProperty.Builder vp = DetachedVertexProperty.build(); while (jsonParser.nextToken() != JsonToken.END_OBJECT) { if (jsonParser.getCurrentName().equals(GraphSONTokens.ID)) { jsonParser.nextToken(); vp.setId(deserializationContext.readValue(jsonParser, Object.class)); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.LABEL)) { jsonParser.nextToken(); vp.setLabel(jsonParser.getText()); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.VALUE)) { jsonParser.nextToken(); vp.setValue(deserializationContext.readValue(jsonParser, Object.class)); } else if (jsonParser.getCurrentName().equals(GraphSONTokens.PROPERTIES)) { jsonParser.nextToken(); while (jsonParser.nextToken() != JsonToken.END_OBJECT) { final String key = jsonParser.getCurrentName(); jsonParser.nextToken(); final Object val = deserializationContext.readValue(jsonParser, Object.class); vp.addProperty(new DetachedProperty(key, val)); } } } return vp.create(); } @Override public boolean isCachable() { return true; } } static class TraversalExplanationJacksonDeserializer extends StdDeserializer<TraversalExplanation> { public TraversalExplanationJacksonDeserializer() { super(TraversalExplanation.class); } @Override public TraversalExplanation deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException { final Map<String, Object> explainData = deserializationContext.readValue(jsonParser, Map.class); final String originalTraversal = explainData.get(GraphSONTokens.ORIGINAL).toString(); final List<Triplet<String, String, String>> intermediates = new ArrayList<>(); final List<Map<String,Object>> listMap = (List<Map<String,Object>>) explainData.get(GraphSONTokens.INTERMEDIATE); for (Map<String,Object> m : listMap) { intermediates.add(Triplet.with(m.get(GraphSONTokens.STRATEGY).toString(), m.get(GraphSONTokens.CATEGORY).toString(), m.get(GraphSONTokens.TRAVERSAL).toString())); } return new ImmutableExplanation(originalTraversal, intermediates); } @Override public boolean isCachable() { return true; } } static class MetricsJacksonDeserializer extends AbstractObjectDeserializer<Metrics> { public MetricsJacksonDeserializer() { super(Metrics.class); } @Override public Metrics createObject(final Map<String, Object> metricsData) { final MutableMetrics m = new MutableMetrics((String)metricsData.get(GraphSONTokens.ID), (String)metricsData.get(GraphSONTokens.NAME)); m.setDuration(Math.round((Double) metricsData.get(GraphSONTokens.DURATION) * 1000000), TimeUnit.NANOSECONDS); for (Map.Entry<String, Long> count : ((Map<String, Long>)metricsData.getOrDefault(GraphSONTokens.COUNTS, new HashMap<>(0))).entrySet()) { m.setCount(count.getKey(), count.getValue()); } for (Map.Entry<String, Long> count : ((Map<String, Long>) metricsData.getOrDefault(GraphSONTokens.ANNOTATIONS, new HashMap<>(0))).entrySet()) { m.setAnnotation(count.getKey(), count.getValue()); } for (MutableMetrics nested : (List<MutableMetrics>)metricsData.getOrDefault(GraphSONTokens.METRICS, new ArrayList<>(0))) { m.addNested(nested); } return m; } } static class TraversalMetricsJacksonDeserializer extends AbstractObjectDeserializer<TraversalMetrics> { public TraversalMetricsJacksonDeserializer() { super(TraversalMetrics.class); } @Override public TraversalMetrics createObject(final Map<String, Object> traversalMetricsData) { return new DefaultTraversalMetrics( Math.round((Double) traversalMetricsData.get(GraphSONTokens.DURATION) * 1000000), (List<MutableMetrics>) traversalMetricsData.get(GraphSONTokens.METRICS) ); } } static class TreeJacksonDeserializer extends StdDeserializer<Tree> { public TreeJacksonDeserializer() { super(Tree.class); } @Override public Tree deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException { final List<Map> data = deserializationContext.readValue(jsonParser, List.class); final Tree t = new Tree(); for (Map<String, Object> entry : data) { t.put(entry.get(GraphSONTokens.KEY), entry.get(GraphSONTokens.VALUE)); } return t; } @Override public boolean isCachable() { return true; } } static class IntegerJacksonDeserializer extends StdDeserializer<Integer> { protected IntegerJacksonDeserializer() { super(Integer.class); } @Override public Integer deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException { return jsonParser.getIntValue(); } @Override public boolean isCachable() { return true; } } static class DoubleJacksonDeserializer extends StdDeserializer<Double> { protected DoubleJacksonDeserializer() { super(Double.class); } @Override public Double deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException { if (jsonParser.getCurrentToken().isNumeric()) return jsonParser.getDoubleValue(); else { final String numberText = jsonParser.getValueAsString(); if ("NaN".equalsIgnoreCase(numberText)) return Double.NaN; else if ("-Infinity".equals(numberText) || "-INF".equalsIgnoreCase(numberText)) return Double.NEGATIVE_INFINITY; else if ("Infinity".equals(numberText) || "INF".equals(numberText)) return Double.POSITIVE_INFINITY; else throw new IllegalStateException("Double value unexpected: " + numberText); } } @Override public boolean isCachable() { return true; } } }
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.stream.app.syslog.source; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetSocketAddress; import java.net.Socket; import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeUnit; import javax.net.SocketFactory; import org.hamcrest.Matchers; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.DirectFieldAccessor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.test.IntegrationTest; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.cloud.stream.app.test.PropertiesInitializer; import org.springframework.cloud.stream.messaging.Source; import org.springframework.cloud.stream.test.binder.MessageCollector; import org.springframework.context.ApplicationContext; import org.springframework.integration.ip.tcp.connection.AbstractServerConnectionFactory; import org.springframework.integration.ip.tcp.connection.TcpNetServerConnectionFactory; import org.springframework.integration.ip.tcp.connection.TcpNioServerConnectionFactory; import org.springframework.integration.ip.udp.UnicastReceivingChannelAdapter; import org.springframework.integration.syslog.inbound.UdpSyslogReceivingChannelAdapter; import org.springframework.integration.test.util.TestUtils; import org.springframework.messaging.Message; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; /** * Tests for SyslogSource. * * @author Gary Russell */ @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(classes = SyslogSourceTests.SyslogSourceApplication.class, initializers = PropertiesInitializer.class) @DirtiesContext @IntegrationTest("syslog.port = 0") public abstract class SyslogSourceTests { protected static final String RFC3164_PACKET = "<157>JUL 26 22:08:35 WEBERN TESTING[70729]: TEST SYSLOG MESSAGE"; protected static final String RFC5424_PACKET = "<14>1 2014-06-20T09:14:07+00:00 loggregator d0602076-b14a-4c55-852a-981e7afeed38 DEA - " + "[exampleSDID@32473 iut=\\\"3\\\" eventSource=\\\"Application\\\" eventID=\\\"1011\\\"]" + "[exampleSDID@32473 iut=\\\"3\\\" eventSource=\\\"Application\\\" eventID=\\\"1011\\\"] " + "Removing instance"; @Autowired protected Source channels; @Autowired protected MessageCollector messageCollector; @Autowired(required = false) protected AbstractServerConnectionFactory connectionFactory; @Autowired(required = false) protected UdpSyslogReceivingChannelAdapter udpAdapter; @Autowired protected SyslogSourceProperties properties; @Autowired protected ApplicationContext context; @Value("${syslog.port}") private int port; @BeforeClass public static void configureSource() throws Throwable { // TODO: we can remove this when SI 4.3 is used; UDP can then specify port=0, TCP can already do it. DatagramSocket socket = new DatagramSocket(0); int port = socket.getLocalPort(); socket.close(); Properties properties = new Properties(); properties.put("syslog.port", Integer.toString(port)); PropertiesInitializer.PROPERTIES = properties; } @IntegrationTest({ "syslog.port = 0", "syslog.nio = true", "syslog.reverseLookup = true", "syslog.socketTimeout = 123", "syslog.bufferSize = 5" }) public static class PropertiesPopulatedTests extends SyslogSourceTests { @Test public void test() throws Exception { assertThat(this.connectionFactory, Matchers.instanceOf(TcpNioServerConnectionFactory.class)); assertTrue(TestUtils.getPropertyValue(this.connectionFactory, "lookupHost", Boolean.class)); assertEquals(123, TestUtils.getPropertyValue(this.connectionFactory, "soTimeout")); assertEquals(5, TestUtils.getPropertyValue(this.connectionFactory, "deserializer.maxMessageSize")); } } public static class NotNioTests extends SyslogSourceTests { @Test public void test() throws Exception { assertThat(this.connectionFactory, Matchers.instanceOf(TcpNetServerConnectionFactory.class)); assertFalse(TestUtils.getPropertyValue(this.connectionFactory, "lookupHost", Boolean.class)); assertEquals(0, TestUtils.getPropertyValue(this.connectionFactory, "soTimeout")); assertEquals(2048, TestUtils.getPropertyValue(this.connectionFactory, "deserializer.maxMessageSize")); } } public static class Tcp3164Tests extends SyslogSourceTests { @Test public void test() throws Exception { sendTcp(SyslogSourceTests.RFC3164_PACKET + "\n"); Message<?> syslog = messageCollector.forChannel(channels.output()).poll(10, TimeUnit.SECONDS); assertNotNull(syslog); assertThat(syslog.getPayload(), instanceOf(Map.class)); assertEquals("WEBERN", ((Map<?, ?>) syslog.getPayload()).get("HOST")); } } @IntegrationTest({ "syslog.port = 0", "syslog.rfc = 5424" }) public static class Tcp5424Tests extends SyslogSourceTests { @Test public void test() throws Exception { sendTcp("253 " + RFC5424_PACKET); Message<?> syslog = messageCollector.forChannel(channels.output()).poll(10, TimeUnit.SECONDS); assertNotNull(syslog); assertThat(syslog.getPayload(), instanceOf(Map.class)); assertEquals("loggregator", ((Map<?, ?>) syslog.getPayload()).get("syslog_HOST")); } } @IntegrationTest("syslog.protocol = udp") public static class Udp3164Tests extends SyslogSourceTests { @Test public void test() throws Exception { sendUdp(RFC3164_PACKET); Message<?> syslog = messageCollector.forChannel(channels.output()).poll(10, TimeUnit.SECONDS); assertNotNull(syslog); assertThat(syslog.getPayload(), instanceOf(Map.class)); assertEquals("WEBERN", ((Map<?, ?>) syslog.getPayload()).get("HOST")); } } @IntegrationTest({ "syslog.protocol = udp", "syslog.rfc = 5424" }) public static class Udp5424Tests extends SyslogSourceTests { @Test public void test() throws Exception { sendUdp(RFC5424_PACKET); Message<?> syslog = messageCollector.forChannel(channels.output()).poll(10, TimeUnit.SECONDS); assertNotNull(syslog); assertThat(syslog.getPayload(), instanceOf(Map.class)); assertEquals("loggregator", ((Map<?, ?>) syslog.getPayload()).get("syslog_HOST")); } } @IntegrationTest("syslog.protocol = both") public static class TcpAndUdp3164Tests extends SyslogSourceTests { @Test public void test() throws Exception { sendTcp(SyslogSourceTests.RFC3164_PACKET + "\n"); Message<?> syslog = messageCollector.forChannel(channels.output()).poll(10, TimeUnit.SECONDS); assertNotNull(syslog); assertThat(syslog.getPayload(), instanceOf(Map.class)); assertEquals("WEBERN", ((Map<?, ?>) syslog.getPayload()).get("HOST")); sendUdp(RFC3164_PACKET); syslog = messageCollector.forChannel(channels.output()).poll(10, TimeUnit.SECONDS); assertNotNull(syslog); assertThat(syslog.getPayload(), instanceOf(Map.class)); assertEquals("WEBERN", ((Map<?, ?>) syslog.getPayload()).get("HOST")); } } @IntegrationTest({ "syslog.protocol = both", "syslog.rfc = 5424" }) public static class TcpAndUdp5424Tests extends SyslogSourceTests { @Test public void test() throws Exception { sendTcp("253 " + RFC5424_PACKET); Message<?> syslog = messageCollector.forChannel(channels.output()).poll(10, TimeUnit.SECONDS); assertNotNull(syslog); assertThat(syslog.getPayload(), instanceOf(Map.class)); assertEquals("loggregator", ((Map<?, ?>) syslog.getPayload()).get("syslog_HOST")); sendUdp(RFC5424_PACKET); syslog = messageCollector.forChannel(channels.output()).poll(10, TimeUnit.SECONDS); assertNotNull(syslog); assertThat(syslog.getPayload(), instanceOf(Map.class)); assertEquals("loggregator", ((Map<?, ?>) syslog.getPayload()).get("syslog_HOST")); } } protected void sendTcp(String syslog) throws Exception { int port = getPort(); Socket socket = SocketFactory.getDefault().createSocket("localhost", port); socket.getOutputStream().write(syslog.getBytes()); socket.close(); } protected void sendUdp(String syslog) throws Exception { waitUdp(); DatagramSocket socket = new DatagramSocket(); DatagramPacket packet = new DatagramPacket(syslog.getBytes(), syslog.length()); packet.setSocketAddress(new InetSocketAddress("localhost", this.port)); socket.send(packet); socket.close(); } private int getPort() throws Exception { int n = 0; while (n++ < 100 && !this.connectionFactory.isListening()) { Thread.sleep(100); } assertTrue("server failed to start listening", this.connectionFactory.isListening()); int port = this.connectionFactory.getPort(); assertTrue("server stopped listening", port > 0); return port; } private void waitUdp() throws Exception { int n = 0; DirectFieldAccessor dfa = new DirectFieldAccessor(this.udpAdapter); while (n++ < 100 && !((UnicastReceivingChannelAdapter) dfa.getPropertyValue("udpAdapter")).isListening()) { Thread.sleep(100); } } @SpringBootApplication public static class SyslogSourceApplication { } }
package com.nhl.link.rest.runtime.encoder; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.Collection; import java.util.Date; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.cayenne.DataObject; import com.nhl.link.rest.EntityProperty; import com.nhl.link.rest.ResourceEntity; import com.nhl.link.rest.encoder.Encoder; import com.nhl.link.rest.encoder.GenericEncoder; import com.nhl.link.rest.encoder.ISODateEncoder; import com.nhl.link.rest.encoder.ISODateTimeEncoder; import com.nhl.link.rest.encoder.ISOTimeEncoder; import com.nhl.link.rest.encoder.IdEncoder; import com.nhl.link.rest.meta.LrAttribute; import com.nhl.link.rest.meta.LrEntity; import com.nhl.link.rest.meta.LrPersistentAttribute; import com.nhl.link.rest.meta.LrPersistentRelationship; import com.nhl.link.rest.meta.LrRelationship; import com.nhl.link.rest.meta.cayenne.CayenneLrEntity; import com.nhl.link.rest.property.BeanPropertyReader; import com.nhl.link.rest.property.IdPropertyReader; import com.nhl.link.rest.property.PropertyBuilder; public class AttributeEncoderFactory implements IAttributeEncoderFactory { static final Class<?> UTIL_DATE = Date.class; static final Class<?> SQL_DATE = java.sql.Date.class; static final Class<?> SQL_TIME = Time.class; static final Class<?> SQL_TIMESTAMP = Timestamp.class; // these are explicit overrides for named attributes private Map<String, EntityProperty> attributePropertiesByPath; private Map<String, EntityProperty> idPropertiesByEntity; private ConcurrentMap<LrEntity<?>, IdPropertyReader> idPropertyReaders; public AttributeEncoderFactory() { this.attributePropertiesByPath = new ConcurrentHashMap<>(); this.idPropertiesByEntity = new ConcurrentHashMap<>(); this.idPropertyReaders = new ConcurrentHashMap<>(); } @Override public EntityProperty getAttributeProperty(LrEntity<?> entity, LrAttribute attribute) { String key = entity.getName() + "." + attribute.getName(); EntityProperty property = attributePropertiesByPath.get(key); if (property == null) { property = buildAttributeProperty(entity, attribute); attributePropertiesByPath.put(key, property); } return property; } @Override public EntityProperty getRelationshipProperty(LrEntity<?> entity, LrRelationship relationship, Encoder encoder) { // TODO: can't cache, as target encoder is dynamic... return buildRelationshipProperty(entity, relationship, encoder); } @Override public EntityProperty getIdProperty(ResourceEntity<?> entity) { String key = entity.getLrEntity().getName(); EntityProperty property = idPropertiesByEntity.get(key); if (property == null) { property = buildIdProperty(entity); idPropertiesByEntity.put(key, property); } return property; } protected EntityProperty buildRelationshipProperty(LrEntity<?> entity, LrRelationship relationship, Encoder encoder) { boolean persistent = relationship instanceof LrPersistentRelationship; if (persistent && DataObject.class.isAssignableFrom(entity.getType())) { return PropertyBuilder.dataObjectProperty().encodedWith(encoder); } else { return PropertyBuilder.property().encodedWith(encoder); } } protected EntityProperty buildAttributeProperty(LrEntity<?> entity, LrAttribute attribute) { boolean persistent = attribute instanceof LrPersistentAttribute; int jdbcType = persistent ? ((LrPersistentAttribute) attribute).getJdbcType() : Integer.MIN_VALUE; Encoder encoder = buildEncoder(attribute.getType(), jdbcType); if (persistent && DataObject.class.isAssignableFrom(entity.getType())) { return PropertyBuilder.dataObjectProperty().encodedWith(encoder); } else { return PropertyBuilder.property().encodedWith(encoder); } } protected EntityProperty buildIdProperty(ResourceEntity<?> entity) { Collection<LrAttribute> ids = entity.getLrEntity().getIds(); if (entity.getLrEntity() instanceof CayenneLrEntity) { // Cayenne object - PK is an ObjectId (even if it is also a // meaningful object property) if (ids.size() > 1) { // keeping attribute encoders in alphabetical order Map<String, Encoder> valueEncoders = new TreeMap<>(); for (LrAttribute id : ids) { Encoder valueEncoder = buildEncoder(id.getType(), getJdbcType(id)); valueEncoders.put(id.getName(), valueEncoder); } return PropertyBuilder.property(getOrCreateIdPropertyReader(entity.getLrEntity())) .encodedWith(new IdEncoder(valueEncoders)); } else { LrAttribute id = ids.iterator().next(); Encoder valueEncoder = buildEncoder(id.getType(), getJdbcType(id)); return PropertyBuilder.property(getOrCreateIdPropertyReader(entity.getLrEntity())) .encodedWith(new IdEncoder(valueEncoder)); } } else { // POJO - PK is an object property if (ids.isEmpty()) { // use fake ID encoder return PropertyBuilder.doNothingProperty(); } // TODO: multi-attribute ID? LrAttribute id = ids.iterator().next(); return PropertyBuilder.property(BeanPropertyReader.reader(id.getName())); } } private int getJdbcType(LrAttribute attribute) { if (attribute instanceof LrPersistentAttribute) { return ((LrPersistentAttribute) attribute).getJdbcType(); } else { return Integer.MIN_VALUE; } } private IdPropertyReader getOrCreateIdPropertyReader(LrEntity<?> entity) { IdPropertyReader reader = idPropertyReaders.get(entity); if (reader == null) { reader = new IdPropertyReader(entity); IdPropertyReader oldReader = idPropertyReaders.putIfAbsent(entity, reader); reader = (oldReader == null) ? reader : oldReader; } return reader; } /** * @since 1.12 */ protected Encoder buildEncoder(Class<?> javaType, int jdbcType) { if (UTIL_DATE.equals(javaType)) { if (jdbcType == Types.DATE) { return ISODateEncoder.encoder(); } if (jdbcType == Types.TIME) { return ISOTimeEncoder.encoder(); } else { // JDBC TIMESTAMP or something entirely unrecognized return ISODateTimeEncoder.encoder(); } } // less common cases of mapping to java.sql.* types... else if (SQL_TIMESTAMP.equals(javaType)) { return ISODateTimeEncoder.encoder(); } else if (SQL_DATE.equals(javaType)) { return ISODateEncoder.encoder(); } else if (SQL_TIME.equals(javaType)) { return ISOTimeEncoder.encoder(); } return GenericEncoder.encoder(); } }
/* * Copyright 2017 Lookout, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.clouddriver.ecs.cache; import static com.netflix.spinnaker.clouddriver.core.provider.agent.Namespace.HEALTH; import static com.netflix.spinnaker.clouddriver.ecs.EcsCloudProvider.ID; import com.google.common.base.CaseFormat; import com.netflix.spinnaker.clouddriver.cache.KeyParser; import java.util.HashMap; import java.util.Map; public class Keys implements KeyParser { public enum Namespace { APPLICATIONS, IAM_ROLE, SERVICES, ECS_CLUSTERS, TASKS, CONTAINER_INSTANCES, TASK_DEFINITIONS, ALARMS, SCALABLE_TARGETS, SECRETS, SERVICE_DISCOVERY_REGISTRIES, TARGET_HEALTHS; public final String ns; Namespace() { ns = CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.LOWER_CAMEL, this.name()); } public String toString() { return ns; } } public static final String SEPARATOR = ";"; @Override public String getCloudProvider() { return ID; } @Override public Map<String, String> parseKey(String key) { return parse(key); } @Override public Boolean canParseType(String type) { return canParse(type); } private static Boolean canParse(String type) { for (Namespace key : Namespace.values()) { if (key.toString().equals(type)) { return true; } } return false; } public static Map<String, String> parse(String key) { String[] parts = key.split(SEPARATOR); if (parts.length < 3 || !parts[0].equals(ID)) { return null; } Map<String, String> result = new HashMap<>(); result.put("provider", parts[0]); result.put("type", parts[1]); if (parts[1].equals(HEALTH.getNs())) { result.put("account", parts[2]); result.put("region", parts[3]); result.put("taskId", parts[4]); return result; } Namespace namespace = Namespace.valueOf(CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, parts[1])); switch (namespace) { case APPLICATIONS: result.put("application", parts[2]); break; case SERVICES: result.put("account", parts[2]); result.put("region", parts[3]); result.put("serviceName", parts[4]); break; case ECS_CLUSTERS: result.put("account", parts[2]); result.put("region", parts[3]); result.put("clusterName", parts[4]); break; case TASKS: result.put("account", parts[2]); result.put("region", parts[3]); result.put("taskId", parts[4]); break; case CONTAINER_INSTANCES: result.put("account", parts[2]); result.put("region", parts[3]); result.put("containerInstanceArn", parts[4]); break; case TASK_DEFINITIONS: result.put("account", parts[2]); result.put("region", parts[3]); result.put("taskDefinitionArn", parts[4]); break; case ALARMS: result.put("account", parts[2]); result.put("region", parts[3]); result.put("alarmArn", parts[4]); break; case IAM_ROLE: result.put("account", parts[2]); result.put("roleName", parts[3]); break; case SECRETS: result.put("account", parts[2]); result.put("region", parts[3]); result.put("secretName", parts[4]); break; case SERVICE_DISCOVERY_REGISTRIES: result.put("account", parts[2]); result.put("region", parts[3]); result.put("serviceId", parts[4]); break; case SCALABLE_TARGETS: result.put("account", parts[2]); result.put("region", parts[3]); result.put("resource", parts[4]); break; case TARGET_HEALTHS: result.put("account", parts[2]); result.put("region", parts[3]); result.put("targetGroupArn", parts[4]); break; default: break; } return result; } @Override public Boolean canParseField(String type) { return false; } public static String getServiceKey(String account, String region, String serviceName) { return buildKey(Namespace.SERVICES.ns, account, region, serviceName); } public static String getClusterKey(String account, String region, String clusterName) { return buildKey(Namespace.ECS_CLUSTERS.ns, account, region, clusterName); } public static String getApplicationKey(String name) { return ID + SEPARATOR + Namespace.APPLICATIONS + SEPARATOR + name.toLowerCase(); } public static String getTaskKey(String account, String region, String taskId) { return buildKey(Namespace.TASKS.ns, account, region, taskId); } public static String getTaskHealthKey(String account, String region, String taskId) { return buildKey(HEALTH.getNs(), account, region, taskId); } public static String getTargetHealthKey(String account, String region, String targetGroupArn) { return buildKey(Namespace.TARGET_HEALTHS.ns, account, region, targetGroupArn); } public static String getContainerInstanceKey( String account, String region, String containerInstanceArn) { return buildKey(Namespace.CONTAINER_INSTANCES.ns, account, region, containerInstanceArn); } public static String getTaskDefinitionKey( String account, String region, String taskDefinitionArn) { return buildKey(Namespace.TASK_DEFINITIONS.ns, account, region, taskDefinitionArn); } public static String getAlarmKey(String account, String region, String alarmArn) { return buildKey(Namespace.ALARMS.ns, account, region, alarmArn); } public static String getScalableTargetKey(String account, String region, String resourceId) { return buildKey(Namespace.SCALABLE_TARGETS.ns, account, region, resourceId); } public static String getIamRoleKey(String account, String iamRoleName) { return ID + SEPARATOR + Namespace.IAM_ROLE + SEPARATOR + account + SEPARATOR + iamRoleName; } public static String getSecretKey(String account, String region, String secretName) { return buildKey(Namespace.SECRETS.ns, account, region, secretName); } public static String getServiceDiscoveryRegistryKey( String account, String region, String registryId) { return buildKey(Namespace.SERVICE_DISCOVERY_REGISTRIES.ns, account, region, registryId); } private static String buildKey( String namespace, String account, String region, String identifier) { return ID + SEPARATOR + namespace + SEPARATOR + account + SEPARATOR + region + SEPARATOR + identifier; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.security.cli; import joptsimple.OptionSet; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; import org.bouncycastle.asn1.DERIA5String; import org.bouncycastle.asn1.DEROctetString; import org.bouncycastle.asn1.DLSequence; import org.bouncycastle.asn1.pkcs.Attribute; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.x509.Extension; import org.bouncycastle.asn1.x509.Extensions; import org.bouncycastle.asn1.x509.GeneralName; import org.bouncycastle.asn1.x509.GeneralNames; import org.bouncycastle.pkcs.PKCS10CertificationRequest; import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequest; import org.bouncycastle.util.io.pem.PemObject; import org.bouncycastle.util.io.pem.PemReader; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.ssl.PemUtils; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Tuple; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ssl.CertParsingUtils; import org.elasticsearch.xpack.security.cli.HttpCertificateCommand.FileType; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.BeforeClass; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; import java.security.GeneralSecurityException; import java.security.Key; import java.security.KeyStore; import java.security.PrivateKey; import java.security.PublicKey; import java.security.Signature; import java.security.cert.Certificate; import java.security.cert.CertificateParsingException; import java.security.cert.X509Certificate; import java.security.interfaces.RSAKey; import java.time.Instant; import java.time.Period; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.security.auth.x500.X500Principal; import static org.elasticsearch.test.FileMatchers.isDirectory; import static org.elasticsearch.test.FileMatchers.isRegularFile; import static org.elasticsearch.test.FileMatchers.pathExists; import static org.elasticsearch.xpack.security.cli.HttpCertificateCommand.guessFileType; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.oneOf; public class HttpCertificateCommandTests extends ESTestCase { private static final String CA_PASSWORD = "ca-password"; private FileSystem jimfs; private Path testRoot; @Before public void createTestDir() throws Exception { Configuration conf = Configuration.unix().toBuilder().setAttributeViews("posix").build(); jimfs = Jimfs.newFileSystem(conf); testRoot = jimfs.getPath(getClass().getSimpleName() + "-" + getTestName()); IOUtils.rm(testRoot); Files.createDirectories(testRoot); } @BeforeClass public static void muteInFips() { assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); } public void testGenerateSingleCertificateSigningRequest() throws Exception { final Path outFile = testRoot.resolve("csr.zip").toAbsolutePath(); final List<String> hostNames = randomHostNames(); final List<String> ipAddresses = randomIpAddresses(); final String certificateName = hostNames.get(0); final HttpCertificateCommand command = new PathAwareHttpCertificateCommand(outFile); final MockTerminal terminal = new MockTerminal(); terminal.addTextInput("y"); // generate CSR terminal.addTextInput(randomBoolean() ? "n" : ""); // cert-per-node // enter hostnames hostNames.forEach(terminal::addTextInput); terminal.addTextInput(""); // end-of-hosts terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct // enter ip names ipAddresses.forEach(terminal::addTextInput); terminal.addTextInput(""); // end-of-ips terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct terminal.addTextInput(randomBoolean() ? "n" : ""); // don't change advanced settings final String password = randomPassword(false); terminal.addSecretInput(password); if ("".equals(password) == false) { terminal.addSecretInput(password); } // confirm terminal.addTextInput(outFile.toString()); final Environment env = newEnvironment(); final OptionSet options = command.getParser().parse(new String[0]); command.execute(terminal, options, env); Path zipRoot = getZipRoot(outFile); assertThat(zipRoot.resolve("elasticsearch"), isDirectory()); final Path csrPath = zipRoot.resolve("elasticsearch/http-" + certificateName + ".csr"); final PKCS10CertificationRequest csr = readPemObject(csrPath, "CERTIFICATE REQUEST", PKCS10CertificationRequest::new); final Path keyPath = zipRoot.resolve("elasticsearch/http-" + certificateName + ".key"); final AtomicBoolean wasEncrypted = new AtomicBoolean(false); final PrivateKey privateKey = PemUtils.readPrivateKey(keyPath, () -> { wasEncrypted.set(true); return password.toCharArray(); }); if ("".equals(password) == false) { assertTrue("Password should have been required to decrypted key", wasEncrypted.get()); } final Path esReadmePath = zipRoot.resolve("elasticsearch/README.txt"); assertThat(esReadmePath, isRegularFile()); final String esReadme = Files.readString(esReadmePath); final Path ymlPath = zipRoot.resolve("elasticsearch/sample-elasticsearch.yml"); assertThat(ymlPath, isRegularFile()); final String yml = Files.readString(ymlPath); // Verify the CSR was built correctly verifyCertificationRequest(csr, certificateName, hostNames, ipAddresses); // Verify the key assertMatchingPair(getPublicKey(csr), privateKey); final String csrName = csrPath.getFileName().toString(); final String crtName = csrName.substring(0, csrName.length() - 4) + ".crt"; // Verify the README assertThat(esReadme, containsString(csrName)); assertThat(esReadme, containsString(crtName)); assertThat(esReadme, containsString(keyPath.getFileName().toString())); assertThat(esReadme, containsString(ymlPath.getFileName().toString())); if ("".equals(password) == false) { assertThat(esReadme, not(containsString(password))); } // Verify the yml assertThat(yml, not(containsString(csrName))); assertThat(yml, containsString(crtName)); assertThat(yml, containsString(keyPath.getFileName().toString())); if ("".equals(password) == false) { assertThat(yml, not(containsString(password))); } // Should not be a CA directory in CSR mode assertThat(zipRoot.resolve("ca"), not(pathExists())); // No CA in CSR mode verifyKibanaDirectory( zipRoot, false, List.of("Certificate Signing Request"), Stream.of(password, csrName).filter(s -> "".equals(s) == false).collect(Collectors.toList()) ); } public void testGenerateSingleCertificateWithExistingCA() throws Exception { final Path outFile = testRoot.resolve("certs.zip").toAbsolutePath(); final List<String> hostNames = randomHostNames(); final List<String> ipAddresses = randomIpAddresses(); final String certificateName = hostNames.get(0); final Path caCertPath = getDataPath("ca.crt"); assertThat(caCertPath, isRegularFile()); final Path caKeyPath = getDataPath("ca.key"); assertThat(caKeyPath, isRegularFile()); final String caPassword = CA_PASSWORD; final int years = randomIntBetween(1, 8); final HttpCertificateCommand command = new PathAwareHttpCertificateCommand(outFile); final MockTerminal terminal = new MockTerminal(); terminal.addTextInput(randomBoolean() ? "n" : ""); // don't generate CSR terminal.addTextInput("y"); // existing CA // randomise between cert+key, key+cert, PKCS12 : the tool is smart enough to handle any of those. switch (randomFrom(FileType.PEM_CERT, FileType.PEM_KEY, FileType.PKCS12)) { case PEM_CERT: terminal.addTextInput(caCertPath.toAbsolutePath().toString()); terminal.addTextInput(caKeyPath.toAbsolutePath().toString()); break; case PEM_KEY: terminal.addTextInput(caKeyPath.toAbsolutePath().toString()); terminal.addTextInput(caCertPath.toAbsolutePath().toString()); break; case PKCS12: terminal.addTextInput(getDataPath("ca.p12").toAbsolutePath().toString()); break; } terminal.addSecretInput(caPassword); terminal.addTextInput(years + "y"); // validity period terminal.addTextInput(randomBoolean() ? "n" : ""); // don't use cert-per-node // enter hostnames hostNames.forEach(terminal::addTextInput); terminal.addTextInput(""); // end-of-hosts terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct // enter ip names ipAddresses.forEach(terminal::addTextInput); terminal.addTextInput(""); // end-of-ips terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct terminal.addTextInput(randomBoolean() ? "n" : ""); // don't change advanced settings final String password = randomPassword(randomBoolean()); terminal.addSecretInput(password); if ("".equals(password) == false) { terminal.addSecretInput(password); if (password.length() > 50) { terminal.addTextInput("y"); // Accept OpenSSL issue } } // confirm terminal.addTextInput(outFile.toString()); final Environment env = newEnvironment(); final OptionSet options = command.getParser().parse(new String[0]); command.execute(terminal, options, env); if (password.length() > 50) { assertThat(terminal.getOutput(), containsString("OpenSSL")); } else { assertThat(terminal.getOutput(), not(containsString("OpenSSL"))); } Path zipRoot = getZipRoot(outFile); assertThat(zipRoot.resolve("elasticsearch"), isDirectory()); final Path p12Path = zipRoot.resolve("elasticsearch/http.p12"); final Path readmePath = zipRoot.resolve("elasticsearch/README.txt"); assertThat(readmePath, isRegularFile()); final String readme = Files.readString(readmePath); final Path ymlPath = zipRoot.resolve("elasticsearch/sample-elasticsearch.yml"); assertThat(ymlPath, isRegularFile()); final String yml = Files.readString(ymlPath); final Tuple<X509Certificate, PrivateKey> certAndKey = readCertificateAndKey(p12Path, password.toCharArray()); // Verify the Cert was built correctly verifyCertificate(certAndKey.v1(), certificateName, years, hostNames, ipAddresses); assertThat(getRSAKeySize(certAndKey.v1().getPublicKey()), is(HttpCertificateCommand.DEFAULT_CERT_KEY_SIZE)); assertThat(getRSAKeySize(certAndKey.v2()), is(HttpCertificateCommand.DEFAULT_CERT_KEY_SIZE)); final X509Certificate caCert = CertParsingUtils.readX509Certificate(caCertPath); verifyChain(certAndKey.v1(), caCert); // Verify the README assertThat(readme, containsString(p12Path.getFileName().toString())); assertThat(readme, containsString(ymlPath.getFileName().toString())); if ("".equals(password) == false) { assertThat(readme, not(containsString(password))); } assertThat(readme, not(containsString(caPassword))); // Verify the yml assertThat(yml, containsString(p12Path.getFileName().toString())); if ("".equals(password) == false) { assertThat(yml, not(containsString(password))); } assertThat(yml, not(containsString(caPassword))); // Should not be a CA directory when using an existing CA. assertThat(zipRoot.resolve("ca"), not(pathExists())); verifyKibanaDirectory( zipRoot, true, List.of("2. elasticsearch-ca.pem"), Stream.of(password, caPassword, caKeyPath.getFileName().toString()) .filter(s -> "".equals(s) == false) .collect(Collectors.toList()) ); } public void testGenerateMultipleCertificateWithNewCA() throws Exception { final Path outFile = testRoot.resolve("certs.zip").toAbsolutePath(); final int numberCerts = randomIntBetween(3, 6); final String[] certNames = new String[numberCerts]; final String[] hostNames = new String[numberCerts]; for (int i = 0; i < numberCerts; i++) { certNames[i] = randomAlphaOfLengthBetween(6, 12); hostNames[i] = randomAlphaOfLengthBetween(4, 8); } final HttpCertificateCommand command = new PathAwareHttpCertificateCommand(outFile); final MockTerminal terminal = new MockTerminal(); terminal.addTextInput(randomBoolean() ? "n" : ""); // don't generate CSR terminal.addTextInput(randomBoolean() ? "n" : ""); // no existing CA final String caDN; final int caYears; final int caKeySize; // randomise whether to change CA defaults. if (randomBoolean()) { terminal.addTextInput("y"); // Change defaults caDN = "CN=" + randomAlphaOfLengthBetween(3, 8); caYears = randomIntBetween(1, 3); caKeySize = randomFrom(2048, 3072, 4096); terminal.addTextInput(caDN); terminal.addTextInput(caYears + "y"); terminal.addTextInput(Integer.toString(caKeySize)); terminal.addTextInput("n"); // Don't change values } else { terminal.addTextInput(randomBoolean() ? "n" : ""); // Don't change defaults caDN = HttpCertificateCommand.DEFAULT_CA_NAME.toString(); caYears = HttpCertificateCommand.DEFAULT_CA_VALIDITY.getYears(); caKeySize = HttpCertificateCommand.DEFAULT_CA_KEY_SIZE; } final String caPassword = randomPassword(randomBoolean()); boolean expectLongPasswordWarning = caPassword.length() > 50; // randomly enter a long password here, and then say "no" on the warning prompt if (randomBoolean()) { String longPassword = randomAlphaOfLengthBetween(60, 120); terminal.addSecretInput(longPassword); terminal.addSecretInput(longPassword); terminal.addTextInput("n"); // Change our mind expectLongPasswordWarning = true; } terminal.addSecretInput(caPassword); if ("".equals(caPassword) == false) { terminal.addSecretInput(caPassword); if (caPassword.length() > 50) { terminal.addTextInput("y"); // Acknowledge possible OpenSSL issue } } // confirm final int certYears = randomIntBetween(1, 8); terminal.addTextInput(certYears + "y"); // node cert validity period terminal.addTextInput("y"); // cert-per-node for (int i = 0; i < numberCerts; i++) { if (i != 0) { terminal.addTextInput(randomBoolean() ? "y" : ""); // another cert } // certificate / node name terminal.addTextInput(certNames[i]); // enter hostname terminal.addTextInput(hostNames[i]); // end-of-hosts terminal.addTextInput(""); // end-of-hosts terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct // no ip terminal.addTextInput(""); // end-of-ip terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct terminal.addTextInput(randomBoolean() ? "n" : ""); // don't change advanced settings } terminal.addTextInput("n"); // no more certs final String password = randomPassword(false); // randomly enter an incorrect password here which will fail the "enter twice" check and prompt to try again if (randomBoolean()) { String wrongPassword = randomAlphaOfLengthBetween(8, 20); terminal.addSecretInput(wrongPassword); terminal.addSecretInput("__" + wrongPassword); } terminal.addSecretInput(password); if ("".equals(password) == false) { terminal.addSecretInput(password); } // confirm terminal.addTextInput(outFile.toString()); final Environment env = newEnvironment(); final OptionSet options = command.getParser().parse(new String[0]); command.execute(terminal, options, env); if (expectLongPasswordWarning) { assertThat(terminal.getOutput(), containsString("OpenSSL")); } else { assertThat(terminal.getOutput(), not(containsString("OpenSSL"))); } Path zipRoot = getZipRoot(outFile); // Should have a CA directory with the generated CA. assertThat(zipRoot.resolve("ca"), isDirectory()); final Path caPath = zipRoot.resolve("ca/ca.p12"); final Tuple<X509Certificate, PrivateKey> caCertKey = readCertificateAndKey(caPath, caPassword.toCharArray()); verifyCertificate(caCertKey.v1(), caDN.replaceFirst("CN=", ""), caYears, List.of(), List.of()); assertThat(getRSAKeySize(caCertKey.v1().getPublicKey()), is(caKeySize)); assertThat(getRSAKeySize(caCertKey.v2()), is(caKeySize)); assertThat(zipRoot.resolve("elasticsearch"), isDirectory()); for (int i = 0; i < numberCerts; i++) { assertThat(zipRoot.resolve("elasticsearch/" + certNames[i]), isDirectory()); final Path p12Path = zipRoot.resolve("elasticsearch/" + certNames[i] + "/http.p12"); assertThat(p12Path, isRegularFile()); final Path readmePath = zipRoot.resolve("elasticsearch/" + certNames[i] + "/README.txt"); assertThat(readmePath, isRegularFile()); final String readme = Files.readString(readmePath); final Path ymlPath = zipRoot.resolve("elasticsearch/" + certNames[i] + "/sample-elasticsearch.yml"); assertThat(ymlPath, isRegularFile()); final String yml = Files.readString(ymlPath); final Tuple<X509Certificate, PrivateKey> certAndKey = readCertificateAndKey(p12Path, password.toCharArray()); // Verify the Cert was built correctly verifyCertificate(certAndKey.v1(), certNames[i], certYears, List.of(hostNames[i]), List.of()); verifyChain(certAndKey.v1(), caCertKey.v1()); assertThat(getRSAKeySize(certAndKey.v1().getPublicKey()), is(HttpCertificateCommand.DEFAULT_CERT_KEY_SIZE)); assertThat(getRSAKeySize(certAndKey.v2()), is(HttpCertificateCommand.DEFAULT_CERT_KEY_SIZE)); // Verify the README assertThat(readme, containsString(p12Path.getFileName().toString())); assertThat(readme, containsString(ymlPath.getFileName().toString())); if ("".equals(password) == false) { assertThat(readme, not(containsString(password))); } if ("".equals(caPassword) == false) { assertThat(readme, not(containsString(caPassword))); } // Verify the yml assertThat(yml, containsString(p12Path.getFileName().toString())); if ("".equals(password) == false) { assertThat(yml, not(containsString(password))); } if ("".equals(caPassword) == false) { assertThat(yml, not(containsString(caPassword))); } } verifyKibanaDirectory( zipRoot, true, List.of("2. elasticsearch-ca.pem"), Stream.of(password, caPassword, caPath.getFileName().toString()).filter(s -> "".equals(s) == false).collect(Collectors.toList()) ); } public void testParsingValidityPeriod() throws Exception { final HttpCertificateCommand command = new HttpCertificateCommand(); final MockTerminal terminal = new MockTerminal(); terminal.addTextInput("2y"); assertThat(command.readPeriodInput(terminal, "", null, 1), is(Period.ofYears(2))); terminal.addTextInput("18m"); assertThat(command.readPeriodInput(terminal, "", null, 1), is(Period.ofMonths(18))); terminal.addTextInput("90d"); assertThat(command.readPeriodInput(terminal, "", null, 1), is(Period.ofDays(90))); terminal.addTextInput("1y, 6m"); assertThat(command.readPeriodInput(terminal, "", null, 1), is(Period.ofYears(1).withMonths(6))); // Test: Re-prompt on bad input. terminal.addTextInput("2m & 4d"); terminal.addTextInput("2m 4d"); assertThat(command.readPeriodInput(terminal, "", null, 1), is(Period.ofMonths(2).withDays(4))); terminal.addTextInput("1y, 6m"); assertThat(command.readPeriodInput(terminal, "", null, 1), is(Period.ofYears(1).withMonths(6))); // Test: Accept default value final Period p = Period.of(randomIntBetween(1, 5), randomIntBetween(0, 11), randomIntBetween(0, 30)); terminal.addTextInput(""); assertThat(command.readPeriodInput(terminal, "", p, 1), is(p)); final int y = randomIntBetween(1, 5); final int m = randomIntBetween(1, 11); final int d = randomIntBetween(1, 30); terminal.addTextInput(y + "y " + m + "m " + d + "d"); assertThat(command.readPeriodInput(terminal, "", null, 1), is(Period.of(y, m, d))); // Test: Minimum Days final int shortDays = randomIntBetween(1, 20); terminal.addTextInput(shortDays + "d"); terminal.addTextInput("y"); // I'm sure assertThat(command.readPeriodInput(terminal, "", null, 21), is(Period.ofDays(shortDays))); terminal.addTextInput(shortDays + "d"); terminal.addTextInput("n"); // I'm not sure terminal.addTextInput("30d"); assertThat(command.readPeriodInput(terminal, "", null, 21), is(Period.ofDays(30))); terminal.addTextInput("2m"); terminal.addTextInput("n"); // I'm not sure terminal.addTextInput("2y"); assertThat(command.readPeriodInput(terminal, "", null, 90), is(Period.ofYears(2))); } public void testValidityPeriodToString() throws Exception { assertThat(HttpCertificateCommand.toString(Period.ofYears(2)), is("2y")); assertThat(HttpCertificateCommand.toString(Period.ofMonths(5)), is("5m")); assertThat(HttpCertificateCommand.toString(Period.ofDays(60)), is("60d")); assertThat(HttpCertificateCommand.toString(Period.ZERO), is("0d")); assertThat(HttpCertificateCommand.toString(null), is("N/A")); final int y = randomIntBetween(1, 5); final int m = randomIntBetween(1, 11); final int d = randomIntBetween(1, 30); assertThat(HttpCertificateCommand.toString(Period.of(y, m, d)), is(y + "y," + m + "m," + d + "d")); } public void testGuessFileType() throws Exception { MockTerminal terminal = new MockTerminal(); final Path caCert = getDataPath("ca.crt"); final Path caKey = getDataPath("ca.key"); assertThat(guessFileType(caCert, terminal), is(FileType.PEM_CERT)); assertThat(guessFileType(caKey, terminal), is(FileType.PEM_KEY)); final Path certChain = testRoot.resolve("ca.pem"); try (OutputStream out = Files.newOutputStream(certChain)) { Files.copy(getDataPath("testnode.crt"), out); Files.copy(caCert, out); } assertThat(guessFileType(certChain, terminal), is(FileType.PEM_CERT_CHAIN)); final Path tmpP12 = testRoot.resolve("tmp.p12"); assertThat(guessFileType(tmpP12, terminal), is(FileType.PKCS12)); final Path tmpJks = testRoot.resolve("tmp.jks"); assertThat(guessFileType(tmpJks, terminal), is(FileType.JKS)); final Path tmpKeystore = testRoot.resolve("tmp.keystore"); writeDummyKeystore(tmpKeystore, "PKCS12"); assertThat(guessFileType(tmpKeystore, terminal), is(FileType.PKCS12)); writeDummyKeystore(tmpKeystore, "jks"); assertThat(guessFileType(tmpKeystore, terminal), is(FileType.JKS)); } public void testTextFileSubstitutions() throws Exception { CheckedBiFunction<String, Map<String, String>, String, Exception> copy = (source, subs) -> { try ( InputStream in = new ByteArrayInputStream(source.getBytes(StandardCharsets.UTF_8)); StringWriter out = new StringWriter(); PrintWriter writer = new PrintWriter(out) ) { HttpCertificateCommand.copyWithSubstitutions(in, writer, subs); return out.toString().replace("\r\n", "\n"); } }; assertThat(copy.apply("abc\n", Map.of()), is("abc\n")); assertThat(copy.apply("${not_a_var}\n", Map.of()), is("${not_a_var}\n")); assertThat(copy.apply("${var}\n", Map.of("var", "xyz")), is("xyz\n")); assertThat(copy.apply("#if not\nbody\n#endif\n", Map.of()), is("")); assertThat(copy.apply("#if blank\nbody\n#endif\n", Map.of("blank", "")), is("")); assertThat(copy.apply("#if yes\nbody\n#endif\n", Map.of("yes", "true")), is("body\n")); assertThat(copy.apply("#if yes\ntrue\n#else\nfalse\n#endif\n", Map.of("yes", "*")), is("true\n")); assertThat(copy.apply("#if blank\ntrue\n#else\nfalse\n#endif\n", Map.of("blank", "")), is("false\n")); assertThat(copy.apply("#if var\n--> ${var} <--\n#else\n(${var})\n#endif\n", Map.of("var", "foo")), is("--> foo <--\n")); } private Path getZipRoot(Path outFile) throws IOException, URISyntaxException { assertThat(outFile, isRegularFile()); FileSystem fileSystem = FileSystems.newFileSystem(new URI("jar:" + outFile.toUri()), Collections.emptyMap()); return fileSystem.getPath("/"); } private List<String> randomIpAddresses() throws UnknownHostException { final int ipCount = randomIntBetween(0, 3); final List<String> ipAddresses = new ArrayList<>(ipCount); for (int i = 0; i < ipCount; i++) { String ip = randomIpAddress(); ipAddresses.add(ip); } return ipAddresses; } private String randomIpAddress() throws UnknownHostException { return formatIpAddress(randomByteArrayOfLength(4)); } private String formatIpAddress(byte[] addr) throws UnknownHostException { return NetworkAddress.format(InetAddress.getByAddress(addr)); } private List<String> randomHostNames() { final int hostCount = randomIntBetween(1, 5); final List<String> hostNames = new ArrayList<>(hostCount); for (int i = 0; i < hostCount; i++) { String host = String.join(".", randomArray(1, 4, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))); if (i > 0 && randomBoolean()) { host = "*." + host; } hostNames.add(host); } return hostNames; } private String randomPassword(boolean longPassword) { // We want to assert that this password doesn't end up in any output files, so we need to make sure we // don't randomly generate a real word. return randomFrom( "", randomAlphaOfLengthBetween(4, 8) + randomFrom('~', '*', '%', '$', '|') + randomAlphaOfLength(longPassword ? 100 : 4) ); } private void verifyCertificationRequest( PKCS10CertificationRequest csr, String certificateName, List<String> hostNames, List<String> ipAddresses ) throws IOException { // We rebuild the DN from the encoding because BC uses openSSL style toString, but we use LDAP style. assertThat(new X500Principal(csr.getSubject().getEncoded()).toString(), is("CN=" + certificateName.replaceAll("\\.", ", DC="))); final Attribute[] extensionAttributes = csr.getAttributes(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest); assertThat(extensionAttributes, arrayWithSize(1)); assertThat(extensionAttributes[0].getAttributeValues(), arrayWithSize(1)); assertThat(extensionAttributes[0].getAttributeValues()[0], instanceOf(DLSequence.class)); // We register 1 extension - the subject alternative names final Extensions extensions = Extensions.getInstance(extensionAttributes[0].getAttributeValues()[0]); assertThat(extensions, notNullValue()); final GeneralNames names = GeneralNames.fromExtensions(extensions, Extension.subjectAlternativeName); assertThat(names.getNames(), arrayWithSize(hostNames.size() + ipAddresses.size())); for (GeneralName name : names.getNames()) { assertThat(name.getTagNo(), oneOf(GeneralName.dNSName, GeneralName.iPAddress)); if (name.getTagNo() == GeneralName.dNSName) { final String dns = DERIA5String.getInstance(name.getName()).getString(); assertThat(dns, in(hostNames)); } else if (name.getTagNo() == GeneralName.iPAddress) { final String ip = formatIpAddress(DEROctetString.getInstance(name.getName()).getOctets()); assertThat(ip, in(ipAddresses)); } } } private void verifyCertificate( X509Certificate cert, String certificateName, int years, List<String> hostNames, List<String> ipAddresses ) throws CertificateParsingException { assertThat(cert.getSubjectX500Principal().toString(), is("CN=" + certificateName.replaceAll("\\.", ", DC="))); final Collection<List<?>> san = cert.getSubjectAlternativeNames(); final int expectedSanEntries = hostNames.size() + ipAddresses.size(); if (expectedSanEntries > 0) { assertThat(san, hasSize(expectedSanEntries)); for (List<?> name : san) { assertThat(name, hasSize(2)); assertThat(name.get(0), Matchers.instanceOf(Integer.class)); assertThat(name.get(1), Matchers.instanceOf(String.class)); final Integer tag = (Integer) name.get(0); final String value = (String) name.get(1); assertThat(tag, oneOf(GeneralName.dNSName, GeneralName.iPAddress)); if (tag.intValue() == GeneralName.dNSName) { assertThat(value, in(hostNames)); } else if (tag.intValue() == GeneralName.iPAddress) { assertThat(value, in(ipAddresses)); } } } else if (san != null) { assertThat(san, hasSize(0)); } // We don't know exactly when the certificate was generated, but it should have been in the last 10 minutes long now = System.currentTimeMillis(); long nowMinus10Minutes = now - TimeUnit.MINUTES.toMillis(10); assertThat(cert.getNotBefore().getTime(), Matchers.lessThanOrEqualTo(now)); assertThat(cert.getNotBefore().getTime(), Matchers.greaterThanOrEqualTo(nowMinus10Minutes)); final ZonedDateTime expiry = Instant.ofEpochMilli(cert.getNotBefore().getTime()).atZone(ZoneOffset.UTC).plusYears(years); assertThat(cert.getNotAfter().getTime(), is(expiry.toInstant().toEpochMilli())); } private void verifyChain(X509Certificate... chain) throws GeneralSecurityException { for (int i = 1; i < chain.length; i++) { assertThat(chain[i - 1].getIssuerX500Principal(), is(chain[i].getSubjectX500Principal())); chain[i - 1].verify(chain[i].getPublicKey()); } final X509Certificate root = chain[chain.length - 1]; assertThat(root.getIssuerX500Principal(), is(root.getSubjectX500Principal())); } /** * Checks that a public + private key are a matching pair. */ private void assertMatchingPair(PublicKey publicKey, PrivateKey privateKey) throws GeneralSecurityException { final byte[] bytes = randomByteArrayOfLength(128); final Signature rsa = Signature.getInstance("SHA512withRSA"); rsa.initSign(privateKey); rsa.update(bytes); final byte[] signature = rsa.sign(); rsa.initVerify(publicKey); rsa.update(bytes); assertTrue("PublicKey and PrivateKey are not a matching pair", rsa.verify(signature)); } private void verifyKibanaDirectory( Path zipRoot, boolean expectCAFile, Iterable<String> readmeShouldContain, Iterable<String> shouldNotContain ) throws IOException { assertThat(zipRoot.resolve("kibana"), isDirectory()); if (expectCAFile) { assertThat(zipRoot.resolve("kibana/elasticsearch-ca.pem"), isRegularFile()); } else { assertThat(zipRoot.resolve("kibana/elasticsearch-ca.pem"), not(pathExists())); } final Path kibanaReadmePath = zipRoot.resolve("kibana/README.txt"); assertThat(kibanaReadmePath, isRegularFile()); final String kibanaReadme = Files.readString(kibanaReadmePath); final Path kibanaYmlPath = zipRoot.resolve("kibana/sample-kibana.yml"); assertThat(kibanaYmlPath, isRegularFile()); final String kibanaYml = Files.readString(kibanaYmlPath); assertThat(kibanaReadme, containsString(kibanaYmlPath.getFileName().toString())); assertThat(kibanaReadme, containsString("elasticsearch.hosts")); assertThat(kibanaReadme, containsString("https://")); assertThat(kibanaReadme, containsString("elasticsearch-ca.pem")); readmeShouldContain.forEach(s -> assertThat(kibanaReadme, containsString(s))); shouldNotContain.forEach(s -> assertThat(kibanaReadme, not(containsString(s)))); assertThat(kibanaYml, containsString("elasticsearch.ssl.certificateAuthorities: [ \"config/elasticsearch-ca.pem\" ]")); assertThat(kibanaYml, containsString("https://")); shouldNotContain.forEach(s -> assertThat(kibanaYml, not(containsString(s)))); } private PublicKey getPublicKey(PKCS10CertificationRequest pkcs) throws GeneralSecurityException { return new JcaPKCS10CertificationRequest(pkcs).getPublicKey(); } private int getRSAKeySize(Key key) { assertThat(key, instanceOf(RSAKey.class)); final RSAKey rsa = (RSAKey) key; return rsa.getModulus().bitLength(); } private Tuple<X509Certificate, PrivateKey> readCertificateAndKey(Path pkcs12, char[] password) throws IOException, GeneralSecurityException { final Map<Certificate, Key> entries = CertParsingUtils.readPkcs12KeyPairs(pkcs12, password, alias -> password); assertThat(entries.entrySet(), Matchers.hasSize(1)); Certificate cert = entries.keySet().iterator().next(); Key key = entries.get(cert); assertThat(cert, instanceOf(X509Certificate.class)); assertThat(key, instanceOf(PrivateKey.class)); assertMatchingPair(cert.getPublicKey(), (PrivateKey) key); return new Tuple<>((X509Certificate) cert, (PrivateKey) key); } private <T> T readPemObject(Path path, String expectedType, CheckedFunction<? super byte[], T, IOException> factory) throws IOException { assertThat(path, isRegularFile()); final PemReader csrReader = new PemReader(Files.newBufferedReader(path)); final PemObject csrPem = csrReader.readPemObject(); assertThat(csrPem.getType(), is(expectedType)); return factory.apply(csrPem.getContent()); } private void writeDummyKeystore(Path path, String type) throws GeneralSecurityException, IOException { Files.deleteIfExists(path); KeyStore ks = KeyStore.getInstance(type); ks.load(null); if (randomBoolean()) { final X509Certificate cert = CertParsingUtils.readX509Certificate(getDataPath("ca.crt")); ks.setCertificateEntry(randomAlphaOfLength(4), cert); } try (OutputStream out = Files.newOutputStream(path)) { ks.store(out, randomAlphaOfLength(8).toCharArray()); } } /** * A special version of {@link HttpCertificateCommand} that can resolve input strings back to JIMFS paths */ private class PathAwareHttpCertificateCommand extends HttpCertificateCommand { final Map<String, Path> paths; PathAwareHttpCertificateCommand(Path... configuredPaths) { paths = Stream.of(configuredPaths).collect(Collectors.toUnmodifiableMap(Path::toString, Function.identity())); } @Override protected Path resolvePath(String name) { return Optional.ofNullable(this.paths.get(name)).orElseGet(() -> super.resolvePath(name)); } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2/answer_record.proto package com.google.cloud.dialogflow.v2; /** * * * <pre> * Answer records are records to manage answer history and feedbacks for * Dialogflow. * Currently, answer record includes: * - human agent assistant article suggestion * - human agent assistant faq article * It doesn't include: * - `DetectIntent` intent matching * - `DetectIntent` knowledge * Answer records are not related to the conversation history in the * Dialogflow Console. A Record is generated even when the end-user disables * conversation history in the console. Records are created when there's a human * agent assistant suggestion generated. * A typical workflow for customers provide feedback to an answer is: * 1. For human agent assistant, customers get suggestion via ListSuggestions * API. Together with the answers, [AnswerRecord.name][google.cloud.dialogflow.v2.AnswerRecord.name] are returned to the * customers. * 2. The customer uses the [AnswerRecord.name][google.cloud.dialogflow.v2.AnswerRecord.name] to call the * [UpdateAnswerRecord][] method to send feedback about a specific answer * that they believe is wrong. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.AnswerRecord} */ public final class AnswerRecord extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.AnswerRecord) AnswerRecordOrBuilder { private static final long serialVersionUID = 0L; // Use AnswerRecord.newBuilder() to construct. private AnswerRecord(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AnswerRecord() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AnswerRecord(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AnswerRecord( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } case 18: { com.google.cloud.dialogflow.v2.AnswerFeedback.Builder subBuilder = null; if (answerFeedback_ != null) { subBuilder = answerFeedback_.toBuilder(); } answerFeedback_ = input.readMessage( com.google.cloud.dialogflow.v2.AnswerFeedback.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(answerFeedback_); answerFeedback_ = subBuilder.buildPartial(); } break; } case 34: { com.google.cloud.dialogflow.v2.AgentAssistantRecord.Builder subBuilder = null; if (recordCase_ == 4) { subBuilder = ((com.google.cloud.dialogflow.v2.AgentAssistantRecord) record_).toBuilder(); } record_ = input.readMessage( com.google.cloud.dialogflow.v2.AgentAssistantRecord.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom((com.google.cloud.dialogflow.v2.AgentAssistantRecord) record_); record_ = subBuilder.buildPartial(); } recordCase_ = 4; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.AnswerRecordsProto .internal_static_google_cloud_dialogflow_v2_AnswerRecord_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.AnswerRecordsProto .internal_static_google_cloud_dialogflow_v2_AnswerRecord_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.AnswerRecord.class, com.google.cloud.dialogflow.v2.AnswerRecord.Builder.class); } private int recordCase_ = 0; private java.lang.Object record_; public enum RecordCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { AGENT_ASSISTANT_RECORD(4), RECORD_NOT_SET(0); private final int value; private RecordCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static RecordCase valueOf(int value) { return forNumber(value); } public static RecordCase forNumber(int value) { switch (value) { case 4: return AGENT_ASSISTANT_RECORD; case 0: return RECORD_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public RecordCase getRecordCase() { return RecordCase.forNumber(recordCase_); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * The unique identifier of this answer record. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/answerRecords/&lt;Answer Record ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The unique identifier of this answer record. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/answerRecords/&lt;Answer Record ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ANSWER_FEEDBACK_FIELD_NUMBER = 2; private com.google.cloud.dialogflow.v2.AnswerFeedback answerFeedback_; /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the answerFeedback field is set. */ @java.lang.Override public boolean hasAnswerFeedback() { return answerFeedback_ != null; } /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The answerFeedback. */ @java.lang.Override public com.google.cloud.dialogflow.v2.AnswerFeedback getAnswerFeedback() { return answerFeedback_ == null ? com.google.cloud.dialogflow.v2.AnswerFeedback.getDefaultInstance() : answerFeedback_; } /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.v2.AnswerFeedbackOrBuilder getAnswerFeedbackOrBuilder() { return getAnswerFeedback(); } public static final int AGENT_ASSISTANT_RECORD_FIELD_NUMBER = 4; /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the agentAssistantRecord field is set. */ @java.lang.Override public boolean hasAgentAssistantRecord() { return recordCase_ == 4; } /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The agentAssistantRecord. */ @java.lang.Override public com.google.cloud.dialogflow.v2.AgentAssistantRecord getAgentAssistantRecord() { if (recordCase_ == 4) { return (com.google.cloud.dialogflow.v2.AgentAssistantRecord) record_; } return com.google.cloud.dialogflow.v2.AgentAssistantRecord.getDefaultInstance(); } /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.v2.AgentAssistantRecordOrBuilder getAgentAssistantRecordOrBuilder() { if (recordCase_ == 4) { return (com.google.cloud.dialogflow.v2.AgentAssistantRecord) record_; } return com.google.cloud.dialogflow.v2.AgentAssistantRecord.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (answerFeedback_ != null) { output.writeMessage(2, getAnswerFeedback()); } if (recordCase_ == 4) { output.writeMessage(4, (com.google.cloud.dialogflow.v2.AgentAssistantRecord) record_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (answerFeedback_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getAnswerFeedback()); } if (recordCase_ == 4) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 4, (com.google.cloud.dialogflow.v2.AgentAssistantRecord) record_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2.AnswerRecord)) { return super.equals(obj); } com.google.cloud.dialogflow.v2.AnswerRecord other = (com.google.cloud.dialogflow.v2.AnswerRecord) obj; if (!getName().equals(other.getName())) return false; if (hasAnswerFeedback() != other.hasAnswerFeedback()) return false; if (hasAnswerFeedback()) { if (!getAnswerFeedback().equals(other.getAnswerFeedback())) return false; } if (!getRecordCase().equals(other.getRecordCase())) return false; switch (recordCase_) { case 4: if (!getAgentAssistantRecord().equals(other.getAgentAssistantRecord())) return false; break; case 0: default: } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); if (hasAnswerFeedback()) { hash = (37 * hash) + ANSWER_FEEDBACK_FIELD_NUMBER; hash = (53 * hash) + getAnswerFeedback().hashCode(); } switch (recordCase_) { case 4: hash = (37 * hash) + AGENT_ASSISTANT_RECORD_FIELD_NUMBER; hash = (53 * hash) + getAgentAssistantRecord().hashCode(); break; case 0: default: } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2.AnswerRecord parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.AnswerRecord parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dialogflow.v2.AnswerRecord prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Answer records are records to manage answer history and feedbacks for * Dialogflow. * Currently, answer record includes: * - human agent assistant article suggestion * - human agent assistant faq article * It doesn't include: * - `DetectIntent` intent matching * - `DetectIntent` knowledge * Answer records are not related to the conversation history in the * Dialogflow Console. A Record is generated even when the end-user disables * conversation history in the console. Records are created when there's a human * agent assistant suggestion generated. * A typical workflow for customers provide feedback to an answer is: * 1. For human agent assistant, customers get suggestion via ListSuggestions * API. Together with the answers, [AnswerRecord.name][google.cloud.dialogflow.v2.AnswerRecord.name] are returned to the * customers. * 2. The customer uses the [AnswerRecord.name][google.cloud.dialogflow.v2.AnswerRecord.name] to call the * [UpdateAnswerRecord][] method to send feedback about a specific answer * that they believe is wrong. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.AnswerRecord} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.AnswerRecord) com.google.cloud.dialogflow.v2.AnswerRecordOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.AnswerRecordsProto .internal_static_google_cloud_dialogflow_v2_AnswerRecord_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.AnswerRecordsProto .internal_static_google_cloud_dialogflow_v2_AnswerRecord_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.AnswerRecord.class, com.google.cloud.dialogflow.v2.AnswerRecord.Builder.class); } // Construct using com.google.cloud.dialogflow.v2.AnswerRecord.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; if (answerFeedbackBuilder_ == null) { answerFeedback_ = null; } else { answerFeedback_ = null; answerFeedbackBuilder_ = null; } recordCase_ = 0; record_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2.AnswerRecordsProto .internal_static_google_cloud_dialogflow_v2_AnswerRecord_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2.AnswerRecord getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2.AnswerRecord.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2.AnswerRecord build() { com.google.cloud.dialogflow.v2.AnswerRecord result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2.AnswerRecord buildPartial() { com.google.cloud.dialogflow.v2.AnswerRecord result = new com.google.cloud.dialogflow.v2.AnswerRecord(this); result.name_ = name_; if (answerFeedbackBuilder_ == null) { result.answerFeedback_ = answerFeedback_; } else { result.answerFeedback_ = answerFeedbackBuilder_.build(); } if (recordCase_ == 4) { if (agentAssistantRecordBuilder_ == null) { result.record_ = record_; } else { result.record_ = agentAssistantRecordBuilder_.build(); } } result.recordCase_ = recordCase_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2.AnswerRecord) { return mergeFrom((com.google.cloud.dialogflow.v2.AnswerRecord) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2.AnswerRecord other) { if (other == com.google.cloud.dialogflow.v2.AnswerRecord.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } if (other.hasAnswerFeedback()) { mergeAnswerFeedback(other.getAnswerFeedback()); } switch (other.getRecordCase()) { case AGENT_ASSISTANT_RECORD: { mergeAgentAssistantRecord(other.getAgentAssistantRecord()); break; } case RECORD_NOT_SET: { break; } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dialogflow.v2.AnswerRecord parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dialogflow.v2.AnswerRecord) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int recordCase_ = 0; private java.lang.Object record_; public RecordCase getRecordCase() { return RecordCase.forNumber(recordCase_); } public Builder clearRecord() { recordCase_ = 0; record_ = null; onChanged(); return this; } private java.lang.Object name_ = ""; /** * * * <pre> * The unique identifier of this answer record. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/answerRecords/&lt;Answer Record ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The unique identifier of this answer record. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/answerRecords/&lt;Answer Record ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The unique identifier of this answer record. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/answerRecords/&lt;Answer Record ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * The unique identifier of this answer record. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/answerRecords/&lt;Answer Record ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * The unique identifier of this answer record. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/answerRecords/&lt;Answer Record ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } private com.google.cloud.dialogflow.v2.AnswerFeedback answerFeedback_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.AnswerFeedback, com.google.cloud.dialogflow.v2.AnswerFeedback.Builder, com.google.cloud.dialogflow.v2.AnswerFeedbackOrBuilder> answerFeedbackBuilder_; /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the answerFeedback field is set. */ public boolean hasAnswerFeedback() { return answerFeedbackBuilder_ != null || answerFeedback_ != null; } /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The answerFeedback. */ public com.google.cloud.dialogflow.v2.AnswerFeedback getAnswerFeedback() { if (answerFeedbackBuilder_ == null) { return answerFeedback_ == null ? com.google.cloud.dialogflow.v2.AnswerFeedback.getDefaultInstance() : answerFeedback_; } else { return answerFeedbackBuilder_.getMessage(); } } /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAnswerFeedback(com.google.cloud.dialogflow.v2.AnswerFeedback value) { if (answerFeedbackBuilder_ == null) { if (value == null) { throw new NullPointerException(); } answerFeedback_ = value; onChanged(); } else { answerFeedbackBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAnswerFeedback( com.google.cloud.dialogflow.v2.AnswerFeedback.Builder builderForValue) { if (answerFeedbackBuilder_ == null) { answerFeedback_ = builderForValue.build(); onChanged(); } else { answerFeedbackBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeAnswerFeedback(com.google.cloud.dialogflow.v2.AnswerFeedback value) { if (answerFeedbackBuilder_ == null) { if (answerFeedback_ != null) { answerFeedback_ = com.google.cloud.dialogflow.v2.AnswerFeedback.newBuilder(answerFeedback_) .mergeFrom(value) .buildPartial(); } else { answerFeedback_ = value; } onChanged(); } else { answerFeedbackBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearAnswerFeedback() { if (answerFeedbackBuilder_ == null) { answerFeedback_ = null; onChanged(); } else { answerFeedback_ = null; answerFeedbackBuilder_ = null; } return this; } /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.v2.AnswerFeedback.Builder getAnswerFeedbackBuilder() { onChanged(); return getAnswerFeedbackFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.v2.AnswerFeedbackOrBuilder getAnswerFeedbackOrBuilder() { if (answerFeedbackBuilder_ != null) { return answerFeedbackBuilder_.getMessageOrBuilder(); } else { return answerFeedback_ == null ? com.google.cloud.dialogflow.v2.AnswerFeedback.getDefaultInstance() : answerFeedback_; } } /** * * * <pre> * Required. The AnswerFeedback for this record. You can set this with * [AnswerRecords.UpdateAnswerRecord][google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord] in order to give us feedback about * this answer. * </pre> * * <code> * .google.cloud.dialogflow.v2.AnswerFeedback answer_feedback = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.AnswerFeedback, com.google.cloud.dialogflow.v2.AnswerFeedback.Builder, com.google.cloud.dialogflow.v2.AnswerFeedbackOrBuilder> getAnswerFeedbackFieldBuilder() { if (answerFeedbackBuilder_ == null) { answerFeedbackBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.AnswerFeedback, com.google.cloud.dialogflow.v2.AnswerFeedback.Builder, com.google.cloud.dialogflow.v2.AnswerFeedbackOrBuilder>( getAnswerFeedback(), getParentForChildren(), isClean()); answerFeedback_ = null; } return answerFeedbackBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.AgentAssistantRecord, com.google.cloud.dialogflow.v2.AgentAssistantRecord.Builder, com.google.cloud.dialogflow.v2.AgentAssistantRecordOrBuilder> agentAssistantRecordBuilder_; /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the agentAssistantRecord field is set. */ @java.lang.Override public boolean hasAgentAssistantRecord() { return recordCase_ == 4; } /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The agentAssistantRecord. */ @java.lang.Override public com.google.cloud.dialogflow.v2.AgentAssistantRecord getAgentAssistantRecord() { if (agentAssistantRecordBuilder_ == null) { if (recordCase_ == 4) { return (com.google.cloud.dialogflow.v2.AgentAssistantRecord) record_; } return com.google.cloud.dialogflow.v2.AgentAssistantRecord.getDefaultInstance(); } else { if (recordCase_ == 4) { return agentAssistantRecordBuilder_.getMessage(); } return com.google.cloud.dialogflow.v2.AgentAssistantRecord.getDefaultInstance(); } } /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setAgentAssistantRecord( com.google.cloud.dialogflow.v2.AgentAssistantRecord value) { if (agentAssistantRecordBuilder_ == null) { if (value == null) { throw new NullPointerException(); } record_ = value; onChanged(); } else { agentAssistantRecordBuilder_.setMessage(value); } recordCase_ = 4; return this; } /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setAgentAssistantRecord( com.google.cloud.dialogflow.v2.AgentAssistantRecord.Builder builderForValue) { if (agentAssistantRecordBuilder_ == null) { record_ = builderForValue.build(); onChanged(); } else { agentAssistantRecordBuilder_.setMessage(builderForValue.build()); } recordCase_ = 4; return this; } /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergeAgentAssistantRecord( com.google.cloud.dialogflow.v2.AgentAssistantRecord value) { if (agentAssistantRecordBuilder_ == null) { if (recordCase_ == 4 && record_ != com.google.cloud.dialogflow.v2.AgentAssistantRecord.getDefaultInstance()) { record_ = com.google.cloud.dialogflow.v2.AgentAssistantRecord.newBuilder( (com.google.cloud.dialogflow.v2.AgentAssistantRecord) record_) .mergeFrom(value) .buildPartial(); } else { record_ = value; } onChanged(); } else { if (recordCase_ == 4) { agentAssistantRecordBuilder_.mergeFrom(value); } agentAssistantRecordBuilder_.setMessage(value); } recordCase_ = 4; return this; } /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearAgentAssistantRecord() { if (agentAssistantRecordBuilder_ == null) { if (recordCase_ == 4) { recordCase_ = 0; record_ = null; onChanged(); } } else { if (recordCase_ == 4) { recordCase_ = 0; record_ = null; } agentAssistantRecordBuilder_.clear(); } return this; } /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.dialogflow.v2.AgentAssistantRecord.Builder getAgentAssistantRecordBuilder() { return getAgentAssistantRecordFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.v2.AgentAssistantRecordOrBuilder getAgentAssistantRecordOrBuilder() { if ((recordCase_ == 4) && (agentAssistantRecordBuilder_ != null)) { return agentAssistantRecordBuilder_.getMessageOrBuilder(); } else { if (recordCase_ == 4) { return (com.google.cloud.dialogflow.v2.AgentAssistantRecord) record_; } return com.google.cloud.dialogflow.v2.AgentAssistantRecord.getDefaultInstance(); } } /** * * * <pre> * Output only. The record for human agent assistant. * </pre> * * <code> * .google.cloud.dialogflow.v2.AgentAssistantRecord agent_assistant_record = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.AgentAssistantRecord, com.google.cloud.dialogflow.v2.AgentAssistantRecord.Builder, com.google.cloud.dialogflow.v2.AgentAssistantRecordOrBuilder> getAgentAssistantRecordFieldBuilder() { if (agentAssistantRecordBuilder_ == null) { if (!(recordCase_ == 4)) { record_ = com.google.cloud.dialogflow.v2.AgentAssistantRecord.getDefaultInstance(); } agentAssistantRecordBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.AgentAssistantRecord, com.google.cloud.dialogflow.v2.AgentAssistantRecord.Builder, com.google.cloud.dialogflow.v2.AgentAssistantRecordOrBuilder>( (com.google.cloud.dialogflow.v2.AgentAssistantRecord) record_, getParentForChildren(), isClean()); record_ = null; } recordCase_ = 4; onChanged(); ; return agentAssistantRecordBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.AnswerRecord) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.AnswerRecord) private static final com.google.cloud.dialogflow.v2.AnswerRecord DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.AnswerRecord(); } public static com.google.cloud.dialogflow.v2.AnswerRecord getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AnswerRecord> PARSER = new com.google.protobuf.AbstractParser<AnswerRecord>() { @java.lang.Override public AnswerRecord parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new AnswerRecord(input, extensionRegistry); } }; public static com.google.protobuf.Parser<AnswerRecord> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AnswerRecord> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2.AnswerRecord getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2012 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package ti.modules.titanium.ui.widget.picker; import java.text.DateFormatSymbols; import java.text.DecimalFormat; import java.text.FieldPosition; import java.text.NumberFormat; import java.text.ParsePosition; import java.util.Calendar; import java.util.Date; import java.util.Locale; import kankan.wheel.widget.WheelView; import org.appcelerator.kroll.KrollDict; import org.appcelerator.kroll.KrollProxy; import org.appcelerator.kroll.common.Log; import org.appcelerator.titanium.proxy.TiViewProxy; import org.appcelerator.titanium.util.TiConvert; import org.appcelerator.titanium.util.TiUIHelper; import org.appcelerator.titanium.view.TiUIView; import android.app.Activity; import android.widget.LinearLayout; public class TiUIDateSpinner extends TiUIView implements WheelView.OnItemSelectedListener { private static final String TAG = "TiUIDateSpinner"; private WheelView monthWheel; private WheelView dayWheel; private WheelView yearWheel; private FormatNumericWheelAdapter monthAdapter; private FormatNumericWheelAdapter dayAdapter; private FormatNumericWheelAdapter yearAdapter; private boolean suppressChangeEvent = false; private boolean ignoreItemSelection = false; private Calendar maxDate = Calendar.getInstance(), minDate = Calendar.getInstance(); private Locale locale = Locale.getDefault(); private boolean dayBeforeMonth = false; private boolean numericMonths = false; private Calendar calendar = Calendar.getInstance(); public TiUIDateSpinner(TiViewProxy proxy) { super(proxy); } public TiUIDateSpinner(TiViewProxy proxy, Activity activity) { this(proxy); createNativeView(activity); } private void createNativeView(Activity activity) { // defaults maxDate.set(calendar.get(Calendar.YEAR) + 100, 11, 31); minDate.set(calendar.get(Calendar.YEAR) - 100, 0, 1); monthWheel = new WheelView(activity); dayWheel = new WheelView(activity); yearWheel = new WheelView(activity); monthWheel.setTextSize(20); dayWheel.setTextSize(monthWheel.getTextSize()); yearWheel.setTextSize(monthWheel.getTextSize()); monthWheel.setItemSelectedListener(this); dayWheel.setItemSelectedListener(this); yearWheel.setItemSelectedListener(this); LinearLayout layout = new LinearLayout(activity) { @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); TiUIHelper.firePostLayoutEvent(proxy); } }; layout.setOrientation(LinearLayout.HORIZONTAL); if (proxy.hasProperty("dayBeforeMonth")) { // TODO dayBeforeMonth = TiConvert.toBoolean(proxy.getProperties(), "dayBeforeMonth"); } if (dayBeforeMonth) { layout.addView(dayWheel); layout.addView(monthWheel); } else { layout.addView(monthWheel); layout.addView(dayWheel); } layout.addView(yearWheel); setNativeView(layout); } @Override public void processProperties(KrollDict d) { super.processProperties(d); boolean valueExistsInProxy = false; if (d.containsKey("value")) { calendar.setTime((Date)d.get("value")); valueExistsInProxy = true; } if (d.containsKey("minDate")) { Calendar c = Calendar.getInstance(); minDate.setTime(TiConvert.toDate(d, "minDate")); c.setTime(minDate.getTime()); } if (d.containsKey("maxDate")) { Calendar c = Calendar.getInstance(); maxDate.setTime(TiConvert.toDate(d, "maxDate")); c.setTime(maxDate.getTime()); } if (d.containsKey("locale")) { setLocale(TiConvert.toString(d, "locale")); } if (d.containsKey("dayBeforeMonth")) { dayBeforeMonth = TiConvert.toBoolean(d, "dayBeforeMonth"); } if (d.containsKey("numericMonths")) { numericMonths = TiConvert.toBoolean(d, "numericMonths"); } if (maxDate.before(minDate)) { maxDate.setTime(minDate.getTime()); } // If initial value is out-of-bounds, set date to nearest bound if (calendar.after(maxDate)) { calendar.setTime(maxDate.getTime()); } else if (calendar.before(minDate)) { calendar.setTime(minDate.getTime()); } setValue(calendar.getTimeInMillis() , true); if (!valueExistsInProxy) { proxy.setProperty("value", calendar.getTime()); } } @Override public void propertyChanged(String key, Object oldValue, Object newValue, KrollProxy proxy) { if ("value".equals(key)) { Date date = (Date)newValue; setValue(date.getTime()); } else if ("locale".equals(key)) { setLocale(TiConvert.toString(newValue)); } super.propertyChanged(key, oldValue, newValue, proxy); } private void setAdapters() { setYearAdapter(); setMonthAdapter(); setDayAdapter(); } private void setYearAdapter() { int minYear = minDate.get(Calendar.YEAR); int maxYear = maxDate.get(Calendar.YEAR); if (yearAdapter != null && yearAdapter.getMinValue() == minYear && yearAdapter.getMaxValue() == maxYear) { return; } yearAdapter = new FormatNumericWheelAdapter(minYear, maxYear, new DecimalFormat("0000"), 4); ignoreItemSelection = true; yearWheel.setAdapter(yearAdapter); ignoreItemSelection = false; } private void setMonthAdapter() { setMonthAdapter(false); } private void setMonthAdapter(boolean forceUpdate) { int setMinMonth = 1; int setMaxMonth = 12; int currentMin = -1, currentMax = -1; if (monthAdapter != null) { currentMin = monthAdapter.getMinValue(); currentMax = monthAdapter.getMaxValue(); } int maxYear = maxDate.get(Calendar.YEAR); int minYear = minDate.get(Calendar.YEAR); int selYear = getSelectedYear(); if (selYear == maxYear) { setMaxMonth = maxDate.get(Calendar.MONTH) + 1; } if (selYear == minYear) { setMinMonth = minDate.get(Calendar.MONTH) + 1; } if (currentMin != setMinMonth || currentMax != setMaxMonth || forceUpdate) { NumberFormat format; int width = 4; if (numericMonths) { format = new DecimalFormat("00"); } else { format = new MonthFormat(this.locale); width = ((MonthFormat)format).getLongestMonthName(); } monthAdapter = new FormatNumericWheelAdapter(setMinMonth, setMaxMonth, format, width); ignoreItemSelection = true; monthWheel.setAdapter(monthAdapter); ignoreItemSelection = false; } } private void setDayAdapter() { int setMinDay = 1; int setMaxDay = calendar.getActualMaximum(Calendar.DAY_OF_MONTH); int currentMin = -1, currentMax = -1; if (dayAdapter != null) { currentMin = dayAdapter.getMinValue(); currentMax = dayAdapter.getMaxValue(); } int maxYear = maxDate.get(Calendar.YEAR); int minYear = minDate.get(Calendar.YEAR); int selYear = getSelectedYear(); int maxMonth = maxDate.get(Calendar.MONTH) + 1; int minMonth = minDate.get(Calendar.MONTH) + 1; int selMonth = getSelectedMonth(); if (selYear == maxYear && selMonth == maxMonth) { setMaxDay = maxDate.get(Calendar.DAY_OF_MONTH); } if (selYear == minYear && selMonth == minMonth) { setMinDay = minDate.get(Calendar.DAY_OF_MONTH); } if (currentMin != setMinDay || currentMax != setMaxDay) { dayAdapter = new FormatNumericWheelAdapter(setMinDay, setMaxDay, new DecimalFormat("00"), 4); ignoreItemSelection = true; dayWheel.setAdapter(dayAdapter); ignoreItemSelection = false; } } private void syncWheels() { ignoreItemSelection = true; yearWheel.setCurrentItem(yearAdapter.getIndex(calendar.get(Calendar.YEAR))); monthWheel.setCurrentItem(monthAdapter.getIndex(calendar.get(Calendar.MONTH) + 1)); dayWheel.setCurrentItem(dayAdapter.getIndex(calendar.get(Calendar.DAY_OF_MONTH))); ignoreItemSelection = false; } public void setValue(long value) { setValue(value, false); } public void setValue(long value, boolean suppressEvent) { Date oldVal, newVal; oldVal = calendar.getTime(); setCalendar(value); newVal = calendar.getTime(); if (newVal.after(maxDate.getTime())) { newVal = maxDate.getTime(); setCalendar(newVal); } else if (newVal.before(minDate.getTime())) { newVal = minDate.getTime(); setCalendar(newVal); } boolean isChanged = (!newVal.equals(oldVal)); setAdapters(); syncWheels(); proxy.setProperty("value", newVal); if (isChanged && !suppressEvent) { if (!suppressChangeEvent) { KrollDict data = new KrollDict(); data.put("value", newVal); proxy.fireEvent("change", data); } } } public void setValue(Date value, boolean suppressEvent) { long millis = value.getTime(); setValue(millis, suppressEvent); } public void setValue(Date value) { setValue(value, false); } public void setValue() { setValue(getSelectedDate()); } private void setLocale(String localeString) { Locale locale = Locale.getDefault(); if (localeString != null && localeString.length() > 1) { String stripped = localeString.replaceAll("-", "").replaceAll("_", ""); if (stripped.length() == 2) { locale = new Locale(stripped); } else if (stripped.length() >= 4) { String language = stripped.substring(0, 2); String country = stripped.substring(2, 4); if (stripped.length() > 4) { locale = new Locale(language, country, stripped.substring(4)); } else { locale = new Locale(language, country); } } else { Log.w(TAG, "Locale string '" + localeString + "' not understood. Using default locale."); } } if (!this.locale.equals(locale)) { this.locale = locale; setMonthAdapter(true); syncWheels(); } } private void setCalendar(long millis) { calendar.setTimeInMillis(millis); } private void setCalendar(Date date) { calendar.setTime(date); } private int getSelectedYear() { return yearAdapter.getValue(yearWheel.getCurrentItem()); } private int getSelectedMonth() { return monthAdapter.getValue(monthWheel.getCurrentItem()); } private int getSelectedDay() { return dayAdapter.getValue(dayWheel.getCurrentItem()); } private Date getSelectedDate() { int year = getSelectedYear(); int month = getSelectedMonth() - 1; int day = getSelectedDay(); Calendar c = Calendar.getInstance(); c.set(year, month, day); return c.getTime(); } @Override public void onItemSelected(WheelView view, int index) { if (ignoreItemSelection) { return; } setValue(); } class MonthFormat extends NumberFormat { private static final long serialVersionUID = 1L; private DateFormatSymbols symbols = new DateFormatSymbols(Locale.getDefault()); public MonthFormat(Locale locale) { super(); setLocale(locale); } @Override public StringBuffer format(double value, StringBuffer buffer, FieldPosition position) { return format((long) value, buffer, position); } @Override public StringBuffer format(long value, StringBuffer buffer, FieldPosition position) { buffer.append(symbols.getMonths()[((int)value) - 1]); return buffer; } @Override public Number parse(String value, ParsePosition position) { String[] months = symbols.getMonths(); for (int i = 0; i < months.length; i++) { if (months[i].equals(value)) { return new Long(i + 1); } } return null; } public void setLocale(Locale locale) { symbols = new DateFormatSymbols(locale); } public int getLongestMonthName() { int max = 0; for (String month : symbols.getMonths()) { max = (month.length() > max) ? month.length() : max; } return max; } } }
// Copyright 2000-2017 JetBrains s.r.o. // Use of this source code is governed by the Apache 2.0 license that can be // found in the LICENSE file. package com.intellij.codeInsight.daemon.impl.quickfix; import com.intellij.codeInsight.FileModificationService; import com.intellij.codeInsight.daemon.QuickFixBundle; import com.intellij.codeInsight.daemon.impl.analysis.HighlightControlFlowUtil; import com.intellij.codeInsight.intention.HighPriorityAction; import com.intellij.codeInspection.LocalQuickFixAndIntentionActionOnPsiElement; import com.intellij.openapi.command.undo.UndoUtil; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ScrollType; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.controlFlow.AnalysisCanceledException; import com.intellij.psi.controlFlow.ControlFlow; import com.intellij.psi.controlFlow.ControlFlowUtil; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.util.*; import com.intellij.refactoring.changeSignature.ChangeSignatureProcessor; import com.intellij.refactoring.changeSignature.OverriderUsageInfo; import com.intellij.refactoring.changeSignature.ParameterInfoImpl; import com.intellij.refactoring.typeMigration.TypeMigrationProcessor; import com.intellij.refactoring.typeMigration.TypeMigrationRules; import com.intellij.usageView.UsageInfo; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; public class MethodReturnTypeFix extends LocalQuickFixAndIntentionActionOnPsiElement implements HighPriorityAction { private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.daemon.impl.quickfix.MethodReturnBooleanFix"); private final SmartTypePointer myReturnTypePointer; private final boolean myFixWholeHierarchy; private final String myName; private final String myCanonicalText; public MethodReturnTypeFix(@NotNull PsiMethod method, @NotNull PsiType returnType, boolean fixWholeHierarchy) { super(method); myReturnTypePointer = SmartTypePointerManager.getInstance(method.getProject()).createSmartTypePointer(returnType); myFixWholeHierarchy = fixWholeHierarchy; myName = method.getName(); myCanonicalText = returnType.getCanonicalText(); } @NotNull @Override public String getText() { return QuickFixBundle.message("fix.return.type.text", myName, myCanonicalText); } @Override @NotNull public String getFamilyName() { return QuickFixBundle.message("fix.return.type.family"); } @Override public boolean isAvailable(@NotNull Project project, @NotNull PsiFile file, @NotNull PsiElement startElement, @NotNull PsiElement endElement) { final PsiMethod myMethod = (PsiMethod)startElement; final PsiType myReturnType = myReturnTypePointer.getType(); if (myMethod.isValid() && myMethod.getManager().isInProject(myMethod) && myReturnType != null && myReturnType.isValid() && !TypeConversionUtil.isNullType(myReturnType)) { final PsiType returnType = myMethod.getReturnType(); if (returnType != null && returnType.isValid() && !Comparing.equal(myReturnType, returnType)) { return allTypeParametersResolved(myMethod, myReturnType); } } return false; } private static boolean allTypeParametersResolved(PsiMethod myMethod, PsiType myReturnType) { PsiTypesUtil.TypeParameterSearcher searcher = new PsiTypesUtil.TypeParameterSearcher(); myReturnType.accept(searcher); Set<PsiTypeParameter> parameters = searcher.getTypeParameters(); return parameters.stream().allMatch(parameter -> isAccessibleAt(parameter, myMethod)); } private static boolean isAccessibleAt(PsiTypeParameter parameter, PsiMethod method) { PsiTypeParameterListOwner owner = parameter.getOwner(); if(owner == method) return true; if(owner instanceof PsiClass) { return PsiTreeUtil.isAncestor(owner, method, true) && InheritanceUtil.hasEnclosingInstanceInScope((PsiClass)owner, method, false, false); } return false; } @Override public void invoke(@NotNull Project project, @NotNull PsiFile file, Editor editor, @NotNull PsiElement startElement, @NotNull PsiElement endElement) { final PsiMethod myMethod = (PsiMethod)startElement; if (!FileModificationService.getInstance().prepareFileForWrite(myMethod.getContainingFile())) return; final PsiType myReturnType = myReturnTypePointer.getType(); if (myReturnType == null) return; if (myFixWholeHierarchy) { final PsiMethod superMethod = myMethod.findDeepestSuperMethod(); final PsiType superReturnType = superMethod == null ? null : superMethod.getReturnType(); if (superReturnType != null && !Comparing.equal(myReturnType, superReturnType) && !changeClassTypeArgument(myMethod, project, superReturnType, superMethod.getContainingClass(), editor, myReturnType)) { return; } } final List<PsiMethod> affectedMethods = changeReturnType(myMethod, myReturnType); PsiElementFactory factory = JavaPsiFacade.getInstance(project).getElementFactory(); PsiReturnStatement statementToSelect = null; if (!PsiType.VOID.equals(myReturnType)) { final ReturnStatementAdder adder = new ReturnStatementAdder(factory, myReturnType); for (PsiMethod affectedMethod : affectedMethods) { PsiReturnStatement statement = adder.addReturnForMethod(file, affectedMethod); if (statement != null && affectedMethod == myMethod) { statementToSelect = statement; } } } if (statementToSelect != null) { Editor editorForMethod = getEditorForMethod(myMethod, project, editor, file); if (editorForMethod != null) { selectReturnValueInEditor(statementToSelect, editorForMethod); } } } // to clearly separate data private static class ReturnStatementAdder { @NotNull private final PsiElementFactory factory; @NotNull private final PsiType myTargetType; private ReturnStatementAdder(@NotNull final PsiElementFactory factory, @NotNull final PsiType targetType) { this.factory = factory; myTargetType = targetType; } private PsiReturnStatement addReturnForMethod(final PsiFile file, final PsiMethod method) { final PsiModifierList modifiers = method.getModifierList(); if (modifiers.hasModifierProperty(PsiModifier.ABSTRACT) || method.getBody() == null) { return null; } try { final ConvertReturnStatementsVisitor visitor = new ConvertReturnStatementsVisitor(factory, method, myTargetType); ControlFlow controlFlow; try { controlFlow = HighlightControlFlowUtil.getControlFlowNoConstantEvaluate(method.getBody()); } catch (AnalysisCanceledException e) { return null; //must be an error } PsiReturnStatement returnStatement; if (ControlFlowUtil.processReturns(controlFlow, visitor)) { // extra return statement not needed // get latest modified return statement and select... returnStatement = visitor.getLatestReturn(); } else { returnStatement = visitor.createReturnInLastStatement(); } if (method.getContainingFile() != file) { UndoUtil.markPsiFileForUndo(file); } return returnStatement; } catch (IncorrectOperationException e) { LOG.error(e); } return null; } } private static Editor getEditorForMethod(PsiMethod myMethod, @NotNull final Project project, final Editor editor, final PsiFile file) { PsiFile containingFile = myMethod.getContainingFile(); if (containingFile != file) { OpenFileDescriptor descriptor = new OpenFileDescriptor(project, containingFile.getVirtualFile()); return FileEditorManager.getInstance(project).openTextEditor(descriptor, true); } return editor; } @NotNull private PsiMethod[] getChangeRoots(final PsiMethod method, @NotNull PsiType returnType) { if (!myFixWholeHierarchy) return new PsiMethod[]{method}; final PsiMethod[] methods = method.findDeepestSuperMethods(); if (methods.length > 0) { for (PsiMethod psiMethod : methods) { if (returnType.equals(psiMethod.getReturnType())) { return new PsiMethod[] {method}; } } return methods; } // no - only base return new PsiMethod[] {method}; } @NotNull private List<PsiMethod> changeReturnType(final PsiMethod method, @NotNull final PsiType returnType) { final PsiMethod[] methods = getChangeRoots(method, returnType); final MethodSignatureChangeVisitor methodSignatureChangeVisitor = new MethodSignatureChangeVisitor(); for (PsiMethod targetMethod : methods) { methodSignatureChangeVisitor.addBase(targetMethod); ChangeSignatureProcessor processor = new UsagesAwareChangeSignatureProcessor(method.getProject(), targetMethod, false, null, myName, returnType, RemoveUnusedParameterFix.getNewParametersInfo(targetMethod, null), methodSignatureChangeVisitor); processor.run(); } return methodSignatureChangeVisitor.getAffectedMethods(); } private static class MethodSignatureChangeVisitor implements UsageVisitor { private final List<PsiMethod> myAffectedMethods; private MethodSignatureChangeVisitor() { myAffectedMethods = new ArrayList<>(); } public void addBase(final PsiMethod baseMethod) { myAffectedMethods.add(baseMethod); } @Override public void visit(final UsageInfo usage) { if (usage instanceof OverriderUsageInfo) { myAffectedMethods.add(((OverriderUsageInfo) usage).getOverridingMethod()); } } public List<PsiMethod> getAffectedMethods() { return myAffectedMethods; } @Override public void preprocessCovariantOverriders(final List<UsageInfo> covariantOverriderInfos) { for (Iterator<UsageInfo> usageInfoIterator = covariantOverriderInfos.iterator(); usageInfoIterator.hasNext();) { final UsageInfo info = usageInfoIterator.next(); if (info instanceof OverriderUsageInfo) { final OverriderUsageInfo overrideUsage = (OverriderUsageInfo) info; if (myAffectedMethods.contains(overrideUsage.getOverridingMethod())) { usageInfoIterator.remove(); } } } } } private interface UsageVisitor { void visit(final UsageInfo usage); void preprocessCovariantOverriders(final List<UsageInfo> covariantOverriderInfos); } private static class UsagesAwareChangeSignatureProcessor extends ChangeSignatureProcessor { private final UsageVisitor myUsageVisitor; private UsagesAwareChangeSignatureProcessor(final Project project, final PsiMethod method, final boolean generateDelegate, @PsiModifier.ModifierConstant final String newVisibility, final String newName, final PsiType newType, @NotNull final ParameterInfoImpl[] parameterInfo, final UsageVisitor usageVisitor) { super(project, method, generateDelegate, newVisibility, newName, newType, parameterInfo); myUsageVisitor = usageVisitor; } @Override protected void preprocessCovariantOverriders(final List<UsageInfo> covariantOverriderInfos) { myUsageVisitor.preprocessCovariantOverriders(covariantOverriderInfos); } @Override protected void performRefactoring(@NotNull final UsageInfo[] usages) { super.performRefactoring(usages); for (UsageInfo usage : usages) { myUsageVisitor.visit(usage); } } } static void selectReturnValueInEditor(final PsiReturnStatement returnStatement, final Editor editor) { final PsiExpression returnValue = returnStatement.getReturnValue(); LOG.assertTrue(returnValue != null, returnStatement); TextRange range = returnValue.getTextRange(); int offset = range.getStartOffset(); editor.getCaretModel().moveToOffset(offset); editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); editor.getSelectionModel().setSelection(range.getEndOffset(), range.getStartOffset()); } private static boolean changeClassTypeArgument(PsiMethod myMethod, Project project, PsiType superReturnType, PsiClass superClass, Editor editor, PsiType returnType) { if (superClass == null || !superClass.hasTypeParameters()) return true; final PsiClass superReturnTypeClass = PsiUtil.resolveClassInType(superReturnType); if (superReturnTypeClass == null || !(superReturnTypeClass instanceof PsiTypeParameter || superReturnTypeClass.hasTypeParameters())) return true; final PsiClass derivedClass = myMethod.getContainingClass(); if (derivedClass == null) return true; final PsiReferenceParameterList referenceParameterList = findTypeArgumentsList(superClass, derivedClass); if (referenceParameterList == null) return true; final PsiElement resolve = ((PsiJavaCodeReferenceElement)referenceParameterList.getParent()).resolve(); if (!(resolve instanceof PsiClass)) return true; final PsiClass baseClass = (PsiClass)resolve; if (returnType instanceof PsiPrimitiveType) { returnType = ((PsiPrimitiveType)returnType).getBoxedType(derivedClass); } final PsiSubstitutor superClassSubstitutor = TypeConversionUtil.getSuperClassSubstitutor(superClass, baseClass, PsiSubstitutor.EMPTY); final PsiType superReturnTypeInBaseClassType = superClassSubstitutor.substitute(superReturnType); final PsiResolveHelper resolveHelper = JavaPsiFacade.getInstance(project).getResolveHelper(); final PsiSubstitutor psiSubstitutor = resolveHelper.inferTypeArguments(PsiTypesUtil.filterUnusedTypeParameters(superReturnTypeInBaseClassType, baseClass.getTypeParameters()), new PsiType[]{superReturnTypeInBaseClassType}, new PsiType[]{returnType}, PsiUtil.getLanguageLevel(superClass)); final TypeMigrationRules rules = new TypeMigrationRules(project); final PsiSubstitutor compoundSubstitutor = TypeConversionUtil.getSuperClassSubstitutor(superClass, derivedClass, PsiSubstitutor.EMPTY).putAll(psiSubstitutor); rules.setBoundScope(new LocalSearchScope(derivedClass)); TypeMigrationProcessor.runHighlightingTypeMigration(project, editor, rules, referenceParameterList, JavaPsiFacade.getElementFactory(project).createType(baseClass, compoundSubstitutor)); return false; } @Nullable private static PsiReferenceParameterList findTypeArgumentsList(final PsiClass superClass, final PsiClass derivedClass) { PsiReferenceParameterList referenceParameterList = null; if (derivedClass instanceof PsiAnonymousClass) { referenceParameterList = ((PsiAnonymousClass)derivedClass).getBaseClassReference().getParameterList(); } else { final PsiReferenceList implementsList = derivedClass.getImplementsList(); if (implementsList != null) { referenceParameterList = extractReferenceParameterList(superClass, implementsList); } if (referenceParameterList == null) { final PsiReferenceList extendsList = derivedClass.getExtendsList(); if (extendsList != null) { referenceParameterList = extractReferenceParameterList(superClass, extendsList); } } } return referenceParameterList; } @Nullable private static PsiReferenceParameterList extractReferenceParameterList(final PsiClass superClass, final PsiReferenceList extendsList) { for (PsiJavaCodeReferenceElement referenceElement : extendsList.getReferenceElements()) { final PsiElement element = referenceElement.resolve(); if (element instanceof PsiClass && InheritanceUtil.isInheritorOrSelf((PsiClass)element, superClass, true)) { return referenceElement.getParameterList(); } } return null; } @Override public boolean startInWriteAction() { return false; } }
package com.mariux.teleport.lib; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.util.Log; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.common.data.FreezableUtils; import com.google.android.gms.wearable.Asset; import com.google.android.gms.wearable.DataApi; import com.google.android.gms.wearable.DataEvent; import com.google.android.gms.wearable.DataEventBuffer; import com.google.android.gms.wearable.DataMap; import com.google.android.gms.wearable.DataMapItem; import com.google.android.gms.wearable.MessageApi; import com.google.android.gms.wearable.MessageEvent; import com.google.android.gms.wearable.Node; import com.google.android.gms.wearable.NodeApi; import com.google.android.gms.wearable.PutDataMapRequest; import com.google.android.gms.wearable.PutDataRequest; import com.google.android.gms.wearable.Wearable; import java.io.InputStream; import java.util.Collection; import java.util.HashSet; import java.util.List; /** * Created by Mario Viviani on 09/07/2014. */ public class TeleportClient implements DataApi.DataListener, MessageApi.MessageListener, NodeApi.NodeListener, GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener { private static final String TAG = "TeleportClient"; private GoogleApiClient mGoogleApiClient; // private AsyncTask<?,?,?> asyncTask; private OnSyncDataItemTask onSyncDataItemTask; private OnGetMessageTask onGetMessageTask; private Handler mHandler; public TeleportClient(Context context) { mGoogleApiClient = new GoogleApiClient.Builder(context) .addApi(Wearable.API) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .build(); } public void connect() { mGoogleApiClient.connect(); } @Override public void onConnected(Bundle bundle) { Log.e(TAG, "onConnected"); Wearable.DataApi.addListener(mGoogleApiClient, this); Wearable.MessageApi.addListener(mGoogleApiClient, this); Wearable.NodeApi.addListener(mGoogleApiClient, this); } public void disconnect() { Log.e(TAG, "disconnect"); Wearable.DataApi.removeListener(mGoogleApiClient, this); Wearable.MessageApi.removeListener(mGoogleApiClient, this); Wearable.NodeApi.removeListener(mGoogleApiClient, this); mGoogleApiClient.disconnect(); } @Override public void onConnectionSuspended(int i) { } //--------------SYNC DATAITEM ------------------// @Override public void onDataChanged(DataEventBuffer dataEvents) { final List<DataEvent> events = FreezableUtils.freezeIterable(dataEvents); for (DataEvent event : events) { if (event.getType() == DataEvent.TYPE_CHANGED) { DataMapItem dataMapItem = DataMapItem.fromDataItem(event.getDataItem()); DataMap dataMap = dataMapItem.getDataMap(); if (onSyncDataItemTask!=null) onSyncDataItemTask.execute(dataMap); } else if (event.getType() == DataEvent.TYPE_DELETED) { Log.d("DataItem Deleted", event.getDataItem().toString()); } } } //sync String public void syncString(String key, String item) { PutDataMapRequest putDataMapRequest = PutDataMapRequest.create("/" + key); putDataMapRequest.getDataMap().putString(key, item); syncDataItem(putDataMapRequest); } public void syncInt(String key, int item) { PutDataMapRequest putDataMapRequest = PutDataMapRequest.create("/" + key); putDataMapRequest.getDataMap().putInt(key, item); syncDataItem(putDataMapRequest); } public void syncLong(String key, long item) { PutDataMapRequest putDataMapRequest = PutDataMapRequest.create("/" + key); putDataMapRequest.getDataMap().putLong(key, item); syncDataItem(putDataMapRequest); } public void syncBoolean(String key, boolean item) { PutDataMapRequest putDataMapRequest = PutDataMapRequest.create("/" + key); putDataMapRequest.getDataMap().putBoolean(key, item); syncDataItem(putDataMapRequest); } public void syncByteArray(String key, byte[] item) { PutDataMapRequest putDataMapRequest = PutDataMapRequest.create("/" + key); putDataMapRequest.getDataMap().putByteArray(key, item); syncDataItem(putDataMapRequest); } public void syncByte(String key, byte item) { PutDataMapRequest putDataMapRequest = PutDataMapRequest.create("/" + key); putDataMapRequest.getDataMap().putByte(key, item); syncDataItem(putDataMapRequest); } public void syncAsset(String key, Asset item) { PutDataMapRequest putDataMapRequest = PutDataMapRequest.create("/" + key); putDataMapRequest.getDataMap().putAsset(key, item); syncDataItem(putDataMapRequest); } public void syncAll(DataMap item) { PutDataMapRequest putDataMapRequest = PutDataMapRequest.create("/dataMap"); putDataMapRequest.getDataMap().putAll(item); syncDataItem(putDataMapRequest); } //General method to sync data in the Data Layer public void syncDataItem(PutDataMapRequest putDataMapRequest) { PutDataRequest request = putDataMapRequest.asPutDataRequest(); Log.d(TAG, "Generating DataItem: " + request); if (!mGoogleApiClient.isConnected()) { return; } //let's send the dataItem to the DataLayer API Wearable.DataApi.putDataItem(mGoogleApiClient, request) .setResultCallback(new ResultCallback<DataApi.DataItemResult>() { @Override public void onResult(DataApi.DataItemResult dataItemResult) { if (!dataItemResult.getStatus().isSuccess()) { Log.e(TAG, "ERROR: failed to putDataItem, status code: " + dataItemResult.getStatus().getStatusCode()); } } }); } /** * Get the TeleportTask that will be executed when a DataItem is synced * */ public OnSyncDataItemTask getOnSyncDataItemTask() { return onSyncDataItemTask; } /** * Set the TeleportTask to be executed when a DataItem is synced * * @param onSyncDataItemTask A Task that extends TeleportTask that should be executed when a DataItem is Synced. Keep in mind it will be executed only once, so you might need to reset it. */ public void setOnSyncDataItemTask(OnSyncDataItemTask onSyncDataItemTask) { this.onSyncDataItemTask = onSyncDataItemTask; } /** * AsyncTask that will be executed when a DataItem is synced. You should extend this task and implement the onPostExecute() method when implementing your Activity. * * * */ public abstract static class OnSyncDataItemTask extends AsyncTask<DataMap, Void, DataMap> { protected DataMap doInBackground(DataMap... param) { //DataMap dataMap = DataMap.fromByteArray((byte[]) param[0]); return param[0]; //return param[0]; } protected abstract void onPostExecute(DataMap result); } //-----------------MESSAGING------------------// private Collection<String> getNodes() { HashSet<String> results = new HashSet<String>(); NodeApi.GetConnectedNodesResult nodes = Wearable.NodeApi.getConnectedNodes(mGoogleApiClient).await(); for (Node node : nodes.getNodes()) { results.add(node.getId()); } return results; } //Task to send messages to nodes private class StartTeleportMessageTask extends AsyncTask<Object, Void, Object> { @Override protected Void doInBackground(Object... args) { Collection<String> nodes = getNodes(); for (String node : nodes) { propagateMessageToNodes(node, (String) args[0], (byte[]) args[1]); } return null; } } //propagate message to nodes private void propagateMessageToNodes(String node, String path, byte[] payload) { Wearable.MessageApi.sendMessage( mGoogleApiClient, node, path, payload).setResultCallback( new ResultCallback<MessageApi.SendMessageResult>() { @Override public void onResult(MessageApi.SendMessageResult sendMessageResult) { if (!sendMessageResult.getStatus().isSuccess()) { Log.e(TAG, "Failed to send message with status code: " + sendMessageResult.getStatus().getStatusCode()); } } } ); } public void sendMessage(String path, byte[] payload) { //Start a StartTeleportMessageTask with proper Path and Payload new StartTeleportMessageTask().execute(path, payload); } @Override public void onMessageReceived(MessageEvent messageEvent) { Log.d(TAG, "onMessageReceived() A message from watch was received:" + messageEvent.getRequestId() + " " + messageEvent.getPath()); if (onGetMessageTask != null) { String messagePath= messageEvent.getPath(); onGetMessageTask.execute(messagePath); } } /** * AsyncTask that will be executed when a Message is received You should extend this task and implement the onPostExecute() method when implementing your Activity. * * */ public abstract static class OnGetMessageTask extends AsyncTask<String, Void, String> { protected String doInBackground(String... path) { return path[0]; } protected abstract void onPostExecute(String path); } public OnGetMessageTask getOnGetMessageTask() { return onGetMessageTask; } public void setOnGetMessageTask(OnGetMessageTask onGetMessageTask) { this.onGetMessageTask = onGetMessageTask; } //---END MESSAGING ------ @Override public void onPeerConnected(Node node) { } @Override public void onPeerDisconnected(Node node) { } @Override public void onConnectionFailed(ConnectionResult connectionResult) { } public GoogleApiClient getGoogleApiClient() { return mGoogleApiClient; } public void setGoogleApiClient(GoogleApiClient mGoogleApiClient) { this.mGoogleApiClient = mGoogleApiClient; } /** * Loads Bitmap from Asset * * @param asset Asset to be converted to Bitmap */ public Bitmap loadBitmapFromAsset(Asset asset) { if (asset == null) { throw new IllegalArgumentException("Asset must be non-null"); } // convert asset into a file descriptor and block until it's ready InputStream assetInputStream = Wearable.DataApi.getFdForAsset( mGoogleApiClient, asset).await().getInputStream(); if (assetInputStream == null) { Log.w(TAG, "Requested an unknown Asset."); return null; } // decode the stream into a bitmap return BitmapFactory.decodeStream(assetInputStream); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * DeregisterImageType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST) */ package com.amazon.ec2; /** * DeregisterImageType bean class */ public class DeregisterImageType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = DeregisterImageType Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for ImageId */ protected java.lang.String localImageId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getImageId(){ return localImageId; } /** * Auto generated setter method * @param param ImageId */ public void setImageId(java.lang.String param){ this.localImageId=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { DeregisterImageType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":DeregisterImageType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "DeregisterImageType", xmlWriter); } } namespace = "http://ec2.amazonaws.com/doc/2012-08-15/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"imageId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"imageId"); } } else { xmlWriter.writeStartElement("imageId"); } if (localImageId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("imageId cannot be null!!"); }else{ xmlWriter.writeCharacters(localImageId); } xmlWriter.writeEndElement(); xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/", "imageId")); if (localImageId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localImageId)); } else { throw new org.apache.axis2.databinding.ADBException("imageId cannot be null!!"); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static DeregisterImageType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ DeregisterImageType object = new DeregisterImageType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"DeregisterImageType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (DeregisterImageType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","imageId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setImageId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement()) // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/* * ConnectBot: simple, powerful, open-source SSH client for Android * Copyright 2007 Kenny Root, Jeffrey Sharkey * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sk.vx.connectbot; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.security.KeyPair; import java.security.PrivateKey; import java.security.PublicKey; import java.util.EventListener; import java.util.List; import sk.vx.connectbot.bean.PubkeyBean; import sk.vx.connectbot.service.TerminalManager; import sk.vx.connectbot.util.FileChooser; import sk.vx.connectbot.util.FileChooserCallback; import sk.vx.connectbot.util.PubkeyDatabase; import sk.vx.connectbot.util.PubkeyUtils; import android.app.AlertDialog; import android.app.ListActivity; import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.ServiceConnection; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.IBinder; import android.os.Message; import android.text.ClipboardManager; import android.util.Log; import android.view.ContextMenu; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.MenuItem.OnMenuItemClickListener; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.EditText; import android.widget.ImageView; import android.widget.TableRow; import android.widget.TextView; import android.widget.Toast; import com.trilead.ssh2.crypto.Base64; import com.trilead.ssh2.crypto.PEMDecoder; import com.trilead.ssh2.crypto.PEMStructure; /** * List public keys in database by nickname and describe their properties. Allow users to import, * generate, rename, and delete key pairs. * * @author Kenny Root */ public class PubkeyListActivity extends ListActivity implements EventListener, FileChooserCallback { public final static String TAG = "ConnectBot.PubkeyListActivity"; private static final int MAX_KEYFILE_SIZE = 8192; private static final int KEYTYPE_PUBLIC = 0; private static final int KEYTYPE_PRIVATE = 1; protected PubkeyDatabase pubkeydb; private List<PubkeyBean> pubkeys; protected ClipboardManager clipboard; protected LayoutInflater inflater = null; protected TerminalManager bound = null; private MenuItem onstartToggle = null; private MenuItem confirmUse = null; private ServiceConnection connection = new ServiceConnection() { public void onServiceConnected(ComponentName className, IBinder service) { bound = ((TerminalManager.TerminalBinder) service).getService(); // update our listview binder to find the service updateList(); } public void onServiceDisconnected(ComponentName className) { bound = null; updateList(); } }; @Override public void onStart() { super.onStart(); bindService(new Intent(this, TerminalManager.class), connection, Context.BIND_AUTO_CREATE); if(pubkeydb == null) pubkeydb = new PubkeyDatabase(this); } @Override public void onStop() { super.onStop(); unbindService(connection); if(pubkeydb != null) { pubkeydb.close(); pubkeydb = null; } } @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.act_pubkeylist); this.setTitle(String.format("%s: %s", getResources().getText(R.string.app_name), getResources().getText(R.string.title_pubkey_list))); // connect with hosts database and populate list pubkeydb = new PubkeyDatabase(this); updateList(); registerForContextMenu(getListView()); getListView().setOnItemClickListener(new OnItemClickListener() { public void onItemClick(AdapterView<?> adapter, View view, int position, long id) { PubkeyBean pubkey = (PubkeyBean) getListView().getItemAtPosition(position); boolean loaded = bound.isKeyLoaded(pubkey.getNickname()); // handle toggling key in-memory on/off if(loaded) { bound.removeKey(pubkey.getNickname()); updateHandler.sendEmptyMessage(-1); } else { handleAddKey(pubkey); } } }); clipboard = (ClipboardManager)getSystemService(CLIPBOARD_SERVICE); inflater = LayoutInflater.from(this); } /** * Read given file into memory as <code>byte[]</code>. */ protected static byte[] readRaw(File file) throws Exception { InputStream is = new FileInputStream(file); ByteArrayOutputStream os = new ByteArrayOutputStream(); int bytesRead; byte[] buffer = new byte[1024]; while ((bytesRead = is.read(buffer)) != -1) { os.write(buffer, 0, bytesRead); } os.flush(); os.close(); is.close(); return os.toByteArray(); } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); MenuItem generatekey = menu.add(R.string.pubkey_generate); generatekey.setIcon(android.R.drawable.ic_menu_manage); generatekey.setIntent(new Intent(PubkeyListActivity.this, GeneratePubkeyActivity.class)); MenuItem importkey = menu.add(R.string.pubkey_import); importkey.setIcon(android.R.drawable.ic_menu_upload); importkey.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { FileChooser.selectFile(PubkeyListActivity.this, PubkeyListActivity.this, FileChooser.REQUEST_CODE_SELECT_FILE, getString(R.string.file_chooser_select_file,getString(R.string.select_for_key_import))); return true; } }); return true; } protected void handleAddKey(final PubkeyBean pubkey) { if (pubkey.isEncrypted()) { final View view = inflater.inflate(R.layout.dia_password, null); final EditText passwordField = (EditText)view.findViewById(android.R.id.text1); new AlertDialog.Builder(PubkeyListActivity.this) .setView(view) .setPositiveButton(R.string.pubkey_unlock, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { handleAddKey(pubkey, passwordField.getText().toString()); } }) .setNegativeButton(android.R.string.cancel, null).create().show(); } else { handleAddKey(pubkey, null); } } protected void handleAddKey(PubkeyBean pubkey, String password) { Object trileadKey = null; if(PubkeyDatabase.KEY_TYPE_IMPORTED.equals(pubkey.getType())) { // load specific key using pem format try { trileadKey = PEMDecoder.decode(new String(pubkey.getPrivateKey()).toCharArray(), password); } catch(Exception e) { String message = getResources().getString(R.string.pubkey_failed_add, pubkey.getNickname()); Log.e(TAG, message, e); Toast.makeText(PubkeyListActivity.this, message, Toast.LENGTH_LONG).show(); } } else { // load using internal generated format PrivateKey privKey = null; PublicKey pubKey = null; try { privKey = PubkeyUtils.decodePrivate(pubkey.getPrivateKey(), pubkey.getType(), password); pubKey = pubkey.getPublicKey(); } catch (Exception e) { String message = getResources().getString(R.string.pubkey_failed_add, pubkey.getNickname()); Log.e(TAG, message, e); Toast.makeText(PubkeyListActivity.this, message, Toast.LENGTH_LONG).show(); return; } // convert key to trilead format trileadKey = PubkeyUtils.convertToTrilead(privKey, pubKey); Log.d(TAG, "Unlocked key " + PubkeyUtils.formatKey(pubKey)); } if(trileadKey == null) return; Log.d(TAG, String.format("Unlocked key '%s'", pubkey.getNickname())); // save this key in memory bound.addKey(pubkey, trileadKey, true); updateHandler.sendEmptyMessage(-1); } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) { // Create menu to handle deleting and editing pubkey AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) menuInfo; final PubkeyBean pubkey = (PubkeyBean) getListView().getItemAtPosition(info.position); menu.setHeaderTitle(pubkey.getNickname()); // TODO: option load/unload key from in-memory list // prompt for password as needed for passworded keys // cant change password or clipboard imported keys final boolean imported = PubkeyDatabase.KEY_TYPE_IMPORTED.equals(pubkey.getType()); final boolean loaded = bound.isKeyLoaded(pubkey.getNickname()); MenuItem load = menu.add(loaded ? R.string.pubkey_memory_unload : R.string.pubkey_memory_load); load.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { if(loaded) { bound.removeKey(pubkey.getNickname()); updateHandler.sendEmptyMessage(-1); } else { handleAddKey(pubkey); //bound.addKey(nickname, trileadKey); } return true; } }); onstartToggle = menu.add(R.string.pubkey_load_on_start); onstartToggle.setVisible(!pubkey.isEncrypted()); onstartToggle.setCheckable(true); onstartToggle.setChecked(pubkey.isStartup()); onstartToggle.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { // toggle onstart status pubkey.setStartup(!pubkey.isStartup()); pubkeydb.savePubkey(pubkey); updateHandler.sendEmptyMessage(-1); return true; } }); MenuItem changePassword = menu.add(R.string.pubkey_change_password); changePassword.setVisible(!imported); changePassword.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { final View changePasswordView = inflater.inflate(R.layout.dia_changepassword, null, false); ((TableRow)changePasswordView.findViewById(R.id.old_password_prompt)) .setVisibility(pubkey.isEncrypted() ? View.VISIBLE : View.GONE); new AlertDialog.Builder(PubkeyListActivity.this) .setView(changePasswordView) .setPositiveButton(R.string.button_change, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { String oldPassword = ((EditText)changePasswordView.findViewById(R.id.old_password)).getText().toString(); String password1 = ((EditText)changePasswordView.findViewById(R.id.password1)).getText().toString(); String password2 = ((EditText)changePasswordView.findViewById(R.id.password2)).getText().toString(); if (!password1.equals(password2)) { new AlertDialog.Builder(PubkeyListActivity.this) .setMessage(R.string.alert_passwords_do_not_match_msg) .setPositiveButton(android.R.string.ok, null) .create().show(); return; } try { if (!pubkey.changePassword(oldPassword, password1)) new AlertDialog.Builder(PubkeyListActivity.this) .setMessage(R.string.alert_wrong_password_msg) .setPositiveButton(android.R.string.ok, null) .create().show(); else { pubkeydb.savePubkey(pubkey); updateHandler.sendEmptyMessage(-1); } } catch (Exception e) { Log.e(TAG, "Could not change private key password", e); new AlertDialog.Builder(PubkeyListActivity.this) .setMessage(R.string.alert_key_corrupted_msg) .setPositiveButton(android.R.string.ok, null) .create().show(); } } }) .setNegativeButton(android.R.string.cancel, null).create().show(); return true; } }); confirmUse = menu.add(R.string.pubkey_confirm_use); confirmUse.setCheckable(true); confirmUse.setChecked(pubkey.isConfirmUse()); confirmUse.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { // toggle confirm use pubkey.setConfirmUse(!pubkey.isConfirmUse()); pubkeydb.savePubkey(pubkey); updateHandler.sendEmptyMessage(-1); return true; } }); MenuItem copyPublicToClipboard = menu.add(R.string.pubkey_copy_public); copyPublicToClipboard.setVisible(!imported); copyPublicToClipboard.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { String keyString = PubkeyUtils.getPubkeyString(pubkey); if (keyString != null) clipboard.setText(keyString); return true; } }); MenuItem exportPublic = menu.add(R.string.pubkey_export_public); exportPublic.setVisible(!imported); exportPublic.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { String keyString = PubkeyUtils.getPubkeyString(pubkey); if (keyString != null) saveKeyToFile(keyString, pubkey.getNickname(), KEYTYPE_PUBLIC); return true; } }); MenuItem copyPrivateToClipboard = menu.add(R.string.pubkey_copy_private); copyPrivateToClipboard.setVisible(!pubkey.isEncrypted() || imported); copyPrivateToClipboard.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { String keyString = PubkeyUtils.getPrivkeyString(pubkey, null); if (keyString != null) clipboard.setText(keyString); return true; } }); MenuItem exportPrivate = menu.add(R.string.pubkey_export_private); exportPrivate.setVisible(!pubkey.isEncrypted() || imported); exportPrivate.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { /* if (pubkey.isEncrypted()) { final View view = inflater.inflate(R.layout.dia_password, null); final EditText passwordField = (EditText)view.findViewById(android.R.id.text1); new AlertDialog.Builder(PubkeyListActivity.this) .setView(view) .setPositiveButton(R.string.pubkey_unlock, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { String keyString = PubkeyUtils.getPrivkeyString(pubkey, passwordField.getText().toString()); if (keyString != null) saveKeyToFile(keyString, pubkey.getNickname(), KEYTYPE_PRIVATE); } }) .setNegativeButton(android.R.string.cancel, null).create().show(); } else { */ String keyString = PubkeyUtils.getPrivkeyString(pubkey, null); if (keyString != null) saveKeyToFile(keyString, pubkey.getNickname(), KEYTYPE_PRIVATE); // } return true; } }); MenuItem delete = menu.add(R.string.pubkey_delete); delete.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { // prompt user to make sure they really want this new AlertDialog.Builder(PubkeyListActivity.this) .setMessage(getString(R.string.delete_message, pubkey.getNickname())) .setPositiveButton(R.string.delete_pos, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { // dont forget to remove from in-memory if(loaded) bound.removeKey(pubkey.getNickname()); // delete from backend database and update gui pubkeydb.deletePubkey(pubkey); updateHandler.sendEmptyMessage(-1); } }) .setNegativeButton(R.string.delete_neg, null).create().show(); return true; } }); } protected Handler updateHandler = new Handler() { @Override public void handleMessage(Message msg) { updateList(); } }; protected void updateList() { if (pubkeydb == null) return; pubkeys = pubkeydb.allPubkeys(); PubkeyAdapter adapter = new PubkeyAdapter(this, pubkeys); this.setListAdapter(adapter); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent intent) { super.onActivityResult(requestCode, resultCode, intent); switch (requestCode) { case FileChooser.REQUEST_CODE_SELECT_FILE: if (resultCode == RESULT_OK && intent != null) { File file = FileChooser.getSelectedFile(intent); if (file != null) readKeyFromFile(file); } break; } } /** * @param name */ private void readKeyFromFile(File file) { PubkeyBean pubkey = new PubkeyBean(); // find the exact file selected pubkey.setNickname(file.getName()); if (file.length() > MAX_KEYFILE_SIZE) { Toast.makeText(PubkeyListActivity.this, R.string.pubkey_import_parse_problem, Toast.LENGTH_LONG).show(); return; } // parse the actual key once to check if its encrypted // then save original file contents into our database try { byte[] raw = readRaw(file); String data = new String(raw); if (data.startsWith(PubkeyUtils.PKCS8_START)) { int start = data.indexOf(PubkeyUtils.PKCS8_START) + PubkeyUtils.PKCS8_START.length(); int end = data.indexOf(PubkeyUtils.PKCS8_END); if (end > start) { char[] encoded = data.substring(start, end - 1).toCharArray(); Log.d(TAG, "encoded: " + new String(encoded)); byte[] decoded = Base64.decode(encoded); KeyPair kp = PubkeyUtils.recoverKeyPair(decoded); pubkey.setType(kp.getPrivate().getAlgorithm()); pubkey.setPrivateKey(kp.getPrivate().getEncoded()); pubkey.setPublicKey(kp.getPublic().getEncoded()); } else { Log.e(TAG, "Problem parsing PKCS#8 file; corrupt?"); Toast.makeText(PubkeyListActivity.this, R.string.pubkey_import_parse_problem, Toast.LENGTH_LONG).show(); } } else { PEMStructure struct = PEMDecoder.parsePEM(new String(raw).toCharArray()); pubkey.setEncrypted(PEMDecoder.isPEMEncrypted(struct)); pubkey.setType(PubkeyDatabase.KEY_TYPE_IMPORTED); pubkey.setPrivateKey(raw); } // write new value into database if (pubkeydb == null) pubkeydb = new PubkeyDatabase(this); pubkeydb.savePubkey(pubkey); updateHandler.sendEmptyMessage(-1); } catch(Exception e) { Log.e(TAG, "Problem parsing imported private key", e); Toast.makeText(PubkeyListActivity.this, R.string.pubkey_import_parse_problem, Toast.LENGTH_LONG).show(); } } private void saveKeyToFile(final String keyString, final String nickName, int keyType) { final int titleId, messageId, successId, errorId; final String errorString; if (keyType == KEYTYPE_PRIVATE) { titleId = R.string.pubkey_private_save_as; messageId = R.string.pubkey_private_save_as_desc; successId = R.string.pubkey_private_export_success; errorId = R.string.pubkey_private_export_problem; errorString = "Error exporting private key"; } else { titleId = R.string.pubkey_public_save_as; messageId = R.string.pubkey_public_save_as_desc; errorId = R.string.pubkey_public_export_problem; successId = R.string.pubkey_public_export_success; errorString = "Error exporting public key"; } final String sdcard = Environment.getExternalStorageDirectory().toString(); final EditText fileName = new EditText(PubkeyListActivity.this); fileName.setSingleLine(); if (nickName != null) { if (keyType == KEYTYPE_PRIVATE) fileName.setText(sdcard + "/" + nickName.trim()); else fileName.setText(sdcard + "/" + nickName.trim() + ".pub"); } new AlertDialog.Builder(PubkeyListActivity.this) .setTitle(titleId) .setMessage(messageId) .setView(fileName) .setPositiveButton(R.string.save, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { File keyFile = new File(fileName.getText().toString()); if (!keyFile.exists()) { try { keyFile.createNewFile(); } catch (IOException e) { Log.e(TAG, errorString); Toast.makeText(PubkeyListActivity.this, errorId, Toast.LENGTH_LONG).show(); return; } } FileOutputStream fout = null; try { fout = new FileOutputStream(keyFile); fout.write(keyString.getBytes(),0,keyString.getBytes().length); fout.flush(); } catch (Exception e) { Log.e(TAG, errorString); Toast.makeText(PubkeyListActivity.this, errorId, Toast.LENGTH_LONG).show(); return; } Toast.makeText(PubkeyListActivity.this, getResources().getString(successId,keyFile.getPath().toString()), Toast.LENGTH_LONG).show(); } }).setNegativeButton(android.R.string.cancel, null).create().show(); } public void fileSelected(File f) { Log.d(TAG, "File chooser returned " + f); readKeyFromFile(f); } class PubkeyAdapter extends ArrayAdapter<PubkeyBean> { private List<PubkeyBean> pubkeys; class ViewHolder { public TextView nickname; public TextView caption; public ImageView icon; } public PubkeyAdapter(Context context, List<PubkeyBean> pubkeys) { super(context, R.layout.item_pubkey, pubkeys); this.pubkeys = pubkeys; } @Override public View getView(int position, View convertView, ViewGroup parent) { ViewHolder holder; if (convertView == null) { convertView = inflater.inflate(R.layout.item_pubkey, null, false); holder = new ViewHolder(); holder.nickname = (TextView) convertView.findViewById(android.R.id.text1); holder.caption = (TextView) convertView.findViewById(android.R.id.text2); holder.icon = (ImageView) convertView.findViewById(android.R.id.icon1); convertView.setTag(holder); } else holder = (ViewHolder) convertView.getTag(); PubkeyBean pubkey = pubkeys.get(position); holder.nickname.setText(pubkey.getNickname()); boolean imported = PubkeyDatabase.KEY_TYPE_IMPORTED.equals(pubkey.getType()); if (imported) { try { PEMStructure struct = PEMDecoder.parsePEM(new String(pubkey.getPrivateKey()).toCharArray()); String type = (struct.pemType == PEMDecoder.PEM_RSA_PRIVATE_KEY) ? "RSA" : "DSA"; holder.caption.setText(String.format("%s unknown-bit", type)); } catch (IOException e) { Log.e(TAG, "Error decoding IMPORTED public key at " + pubkey.getId(), e); } } else { try { holder.caption.setText(pubkey.getDescription()); } catch (Exception e) { Log.e(TAG, "Error decoding public key at " + pubkey.getId(), e); holder.caption.setText(R.string.pubkey_unknown_format); } } if (bound == null) { holder.icon.setVisibility(View.GONE); } else { holder.icon.setVisibility(View.VISIBLE); if (bound.isKeyLoaded(pubkey.getNickname())) holder.icon.setImageState(new int[] { android.R.attr.state_checked }, true); else holder.icon.setImageState(new int[] { }, true); } return convertView; } } }
package vxt.uielement.timedurationpicker; import vxt.abmulani.customtimepicker.R; import android.R.color; import android.app.Activity; import android.app.Dialog; import android.app.Service; import android.content.Context; import android.content.res.Resources; import android.graphics.Color; import android.graphics.drawable.ColorDrawable; import android.os.SystemClock; import android.os.Vibrator; import android.util.DisplayMetrics; import android.view.Gravity; import android.view.View; import android.view.View.OnClickListener; import android.view.Window; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.Button; import android.widget.FrameLayout; import android.widget.TextView; /** * * */ public class CustomTimePicker { private int defaultTextColor; private Dialog _DIALOG; private OnTimePickerChangeListener listener = null; private Context mContext; private int radialWidth; private Vibrator mVibrator; private boolean mVibrate = false; private long mLastVibrate = 0; private RadialTextsView radialHoursTexts, radialMinutesText; private FrameLayout hoursLayout, minutesLayout; private int _HOURS = 12, _MINUTES = 0; private TextView hoursTextView, minutesTextView; private Animation enterAnim, exitAnim; private HoursPicker hourPicker; private MinutesPicker minutesPicker; private Button submitButton; private Activity mActivity; public CustomTimePicker(Activity mContext) { this(mContext, null); } public CustomTimePicker(Activity mContext, OnTimePickerChangeListener listener) { setOnTimePickerChangeListener(listener); this.mActivity = mContext; this.mContext = mContext; defaultTextColor = mContext.getResources().getColor(R.color.text_color); InitializeDialogView(); } public void setOnTimePickerChangeListener( OnTimePickerChangeListener listener) { if (listener != null) { this.listener = listener; } } private void InitializeDialogView() { _DIALOG = new Dialog(mContext); _DIALOG.setContentView(R.layout.dialog_layout); _DIALOG.getWindow().addFlags(Window.FEATURE_NO_TITLE); _DIALOG.getWindow().setBackgroundDrawable(new ColorDrawable(0)); mVibrator = (Vibrator) mContext .getSystemService(Service.VIBRATOR_SERVICE); int screenDensityHeight = getWindowHeight(); int screenDensityWidth = getWindowWidth(); radialWidth = Math.min(screenDensityHeight, screenDensityWidth); radialWidth = (int) (radialWidth * 0.6); hoursTextView = (TextView) _DIALOG.findViewById(R.id.date_hours); minutesTextView = (TextView) _DIALOG.findViewById(R.id.date_minutes); hoursTextView.setOnClickListener(onHoursClicklistener); minutesTextView.setOnClickListener(onMinutesClicklistener); enterAnim = AnimationUtils.loadAnimation(mContext, R.anim.bottom_up); exitAnim = AnimationUtils.loadAnimation(mContext, R.anim.bottom_down); InitHoursLayout(); InitMinutesLayout(); submitButton = (Button) _DIALOG.findViewById(R.id.submit_button); submitButton.setOnClickListener(onSubmitClicklistener); switchToHoursLayout(); } public Dialog create() { return _DIALOG; } public void setVibration(boolean doVibrate) { mVibrate = doVibrate; } OnClickListener onSubmitClicklistener = new OnClickListener() { @Override public void onClick(View v) { if (listener != null) listener.onSubmitClicked(get_HOURS(), get_MINUTES(), _DIALOG); } }; OnClickListener onMinutesClicklistener = new OnClickListener() { @Override public void onClick(View v) { switchToMinutesLayout(); } }; OnClickListener onHoursClicklistener = new OnClickListener() { @Override public void onClick(View v) { switchToHoursLayout(); } }; private void switchToMinutesLayout() { tryVibrate(15); if (minutesLayout.getVisibility() == View.GONE) { minutesTextView.setTextColor(Color.GREEN); minutesTextView.setSelected(true); hoursTextView.setSelected(false); minutesLayout.startAnimation(enterAnim); minutesLayout.setVisibility(View.VISIBLE); hoursTextView.setTextColor(defaultTextColor); hoursLayout.startAnimation(exitAnim); hoursLayout.setVisibility(View.GONE); minutesPicker.setVisible(true); hourPicker.setVisible(false); } } private void switchToHoursLayout() { mVibrator.vibrate(15); if (hoursLayout.getVisibility() == View.GONE) { minutesTextView.setTextColor(defaultTextColor); minutesTextView.setSelected(false); hoursTextView.setSelected(true); minutesLayout.startAnimation(exitAnim); minutesLayout.setVisibility(View.GONE); hoursTextView.setTextColor(Color.RED); hoursLayout.startAnimation(enterAnim); hoursLayout.setVisibility(View.VISIBLE); minutesPicker.setVisible(false); hourPicker.setVisible(true); } } private void InitMinutesLayout() { minutesLayout = (FrameLayout) _DIALOG.findViewById(R.id.minutes_layout); radialMinutesText = new RadialTextsView(mContext); radialMinutesText.setLayoutParams(new FrameLayout.LayoutParams( radialWidth, radialWidth, Gravity.CENTER)); Resources res = mContext.getResources(); int[] minutes = { 0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55 }; String[] minutesTexts = new String[12]; for (int i = 0; i < 12; i++) { minutesTexts[i] = String.format("%02d", minutes[i]); } radialMinutesText.initialize(res, minutesTexts, null, false, true); radialMinutesText.invalidate(); minutesPicker = new MinutesPicker(mContext,radialWidth); minutesPicker.setLayoutParams(new FrameLayout.LayoutParams(radialWidth, radialWidth, Gravity.CENTER)); minutesPicker.setOnSeekBarChangeListener(oncircleListener); minutesPicker.setBackgroundColor(color.holo_blue_bright); minutesLayout.addView(minutesPicker); minutesLayout.addView(radialMinutesText); } private void InitHoursLayout() { hoursLayout = (FrameLayout) _DIALOG.findViewById(R.id.hours_layout); radialHoursTexts = new RadialTextsView(mContext); radialHoursTexts.setLayoutParams(new FrameLayout.LayoutParams( radialWidth, radialWidth, Gravity.CENTER)); Resources res = mContext.getResources(); int[] hours = { 12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 }; String[] hoursTexts = new String[12]; for (int i = 0; i < 12; i++) { hoursTexts[i] = String.format("%d", hours[i]); } radialHoursTexts.initialize(res, hoursTexts, null, false, true); radialHoursTexts.invalidate(); hourPicker = new HoursPicker(mContext, radialWidth); hourPicker.setLayoutParams(new FrameLayout.LayoutParams(radialWidth, radialWidth, Gravity.CENTER)); hourPicker.setOnSeekBarChangeListener(oncircleListener); hoursLayout.addView(hourPicker); hoursLayout.addView(radialHoursTexts); } private void tryVibrate() { if (mVibrate && mVibrator != null) { long now = SystemClock.uptimeMillis(); // We want to try to vibrate each individual tick discretely. if (now - mLastVibrate >= 125) { mVibrator.vibrate(5); mLastVibrate = now; } } } private void tryVibrate(int val) { if (mVibrate && mVibrator != null) { mVibrator.vibrate(val); } } /** Gets Window width */ private int getWindowWidth() { DisplayMetrics metrics = new DisplayMetrics(); (mActivity).getWindowManager().getDefaultDisplay().getMetrics(metrics); return metrics.widthPixels; } OnCircleSeekBarChangeListener oncircleListener = new OnCircleSeekBarChangeListener() { @Override public void onProgressChanged(Object seekBar, int progress, boolean fromHour) { tryVibrate(); if (fromHour) { set_HOURS(progress); } else { set_MINUTES(progress); } if (listener != null) listener.onProgressChanged(get_HOURS(), get_MINUTES()); } @Override public void onScrollRelease(HoursPicker seekBar, int progress, boolean fromUser) { switchToMinutesLayout(); } }; private int get_HOURS() { return _HOURS; } private void set_HOURS(int _HOURS) { if (_HOURS < 0) { _HOURS = 0; } if (_HOURS > 11) { _HOURS = 0; } hoursTextView.setText(getPadding(_HOURS)); this._HOURS = _HOURS; } private int get_MINUTES() { return _MINUTES; } private void set_MINUTES(int _MINUTES) { if (_MINUTES < 0) { _MINUTES = 0; } if (_MINUTES >= 60) { _MINUTES = 59; } minutesTextView.setText(getPadding(_MINUTES)); this._MINUTES = _MINUTES; } private CharSequence getPadding(int _VALUE) { if (_VALUE < 10) { return "0" + _VALUE; } return _VALUE + ""; } /** Gets Height width */ private int getWindowHeight() { DisplayMetrics metrics = new DisplayMetrics(); (mActivity).getWindowManager().getDefaultDisplay().getMetrics(metrics); return metrics.heightPixels; } }
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.cli.net; import static org.onosproject.cli.net.DevicesListCommand.getSortedDevices; import static org.onosproject.net.DeviceId.deviceId; import java.util.Comparator; import java.util.List; import java.util.concurrent.TimeUnit; import com.google.common.collect.Lists; import org.apache.karaf.shell.api.action.Argument; import org.apache.karaf.shell.api.action.Command; import org.apache.karaf.shell.api.action.Completion; import org.apache.karaf.shell.api.action.lifecycle.Service; import org.apache.karaf.shell.api.action.Option; import org.onosproject.cli.AbstractShellCommand; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.PortNumber; import org.onosproject.net.device.DeviceService; import org.onosproject.net.device.PortStatistics; /** * Lists port statistic of all ports in the system. */ @Service @Command(scope = "onos", name = "portstats", description = "Lists statistics of all ports in the system") public class DevicePortStatsCommand extends AbstractShellCommand { @Option(name = "-nz", aliases = "--nonzero", description = "Show only non-zero portstats", required = false, multiValued = false) private boolean nonzero = false; @Option(name = "-d", aliases = "--delta", description = "Show delta port statistics," + "only for the last polling interval", required = false, multiValued = false) private boolean delta = false; @Option(name = "-t", aliases = "--table", description = "Show delta port statistics in table format " + "using human readable unit", required = false, multiValued = false) private boolean table = false; @Argument(index = 0, name = "uri", description = "Device ID", required = false, multiValued = false) @Completion(DeviceIdCompleter.class) String uri = null; @Argument(index = 1, name = "portNumber", description = "Port Number", required = false, multiValued = false) @Completion(PortNumberCompleter.class) String portNumberStr = null; PortNumber portNumber = null; private static final String FORMAT = " port=%s, pktRx=%s, pktTx=%s, bytesRx=%s, bytesTx=%s, pktRxDrp=%s, pktTxDrp=%s, Dur=%s%s"; @Override protected void doExecute() { DeviceService deviceService = get(DeviceService.class); if (portNumberStr != null) { portNumber = PortNumber.fromString(portNumberStr); } if (uri == null) { for (Device d : getSortedDevices(deviceService)) { if (delta) { if (table) { printPortStatsDeltaTable(d.id(), deviceService.getPortDeltaStatistics(d.id())); } else { printPortStatsDelta(d.id(), deviceService.getPortDeltaStatistics(d.id())); } } else { printPortStats(d.id(), deviceService.getPortStatistics(d.id())); } } } else { Device d = deviceService.getDevice(deviceId(uri)); if (d == null) { error("No such device %s", uri); } else if (delta) { if (table) { printPortStatsDeltaTable(d.id(), deviceService.getPortDeltaStatistics(d.id())); } else { printPortStatsDelta(d.id(), deviceService.getPortDeltaStatistics(d.id())); } } else { printPortStats(d.id(), deviceService.getPortStatistics(d.id())); } } } /** * Prints Port Statistics. * * @param deviceId * @param portStats */ private void printPortStats(DeviceId deviceId, Iterable<PortStatistics> portStats) { print("deviceId=%s", deviceId); for (PortStatistics stat : sortByPort(portStats)) { if (isIrrelevant(stat)) { continue; } if (nonzero && stat.isZero()) { continue; } print(FORMAT, stat.portNumber(), stat.packetsReceived(), stat.packetsSent(), stat.bytesReceived(), stat.bytesSent(), stat.packetsRxDropped(), stat.packetsTxDropped(), stat.durationSec(), annotations(stat.annotations())); } } private boolean isIrrelevant(PortStatistics stat) { // TODO revisit logical port (e.g., ALL) handling return portNumber != null && !portNumber.equals(stat.portNumber()); } /** * Prints Port delta statistics. * * @param deviceId * @param portStats */ private void printPortStatsDelta(DeviceId deviceId, Iterable<PortStatistics> portStats) { final String formatDelta = " port=%s, pktRx=%s, pktTx=%s, bytesRx=%s, bytesTx=%s," + " rateRx=%s, rateTx=%s, pktRxDrp=%s, pktTxDrp=%s, interval=%s"; print("deviceId=%s", deviceId); for (PortStatistics stat : sortByPort(portStats)) { if (isIrrelevant(stat)) { continue; } if (nonzero && stat.isZero()) { continue; } float duration = ((float) stat.durationSec()) + (((float) stat.durationNano()) / TimeUnit.SECONDS.toNanos(1)); float rateRx = stat.bytesReceived() * 8 / duration; float rateTx = stat.bytesSent() * 8 / duration; print(formatDelta, stat.portNumber(), stat.packetsReceived(), stat.packetsSent(), stat.bytesReceived(), stat.bytesSent(), String.format("%.1f", rateRx), String.format("%.1f", rateTx), stat.packetsRxDropped(), stat.packetsTxDropped(), String.format("%.3f", duration)); } } /** * Prints human readable table with delta Port Statistics for specific device. * * @param deviceId * @param portStats */ private void printPortStatsDeltaTable(DeviceId deviceId, Iterable<PortStatistics> portStats) { final String formatDeltaTable = "|%5s | %7s | %7s | %7s | %7s | %7s | %7s | %7s | %7s |%9s |"; print("+---------------------------------------------------------------------------------------------------+"); print("| DeviceId = %-86s |", deviceId); print("|---------------------------------------------------------------------------------------------------|"); print("| | Receive | Transmit | Time [s] |"); print("| Port | Packets | Bytes | Rate bps | Drop | Packets | Bytes | Rate bps | Drop | Interval |"); print("|---------------------------------------------------------------------------------------------------|"); for (PortStatistics stat : sortByPort(portStats)) { if (isIrrelevant(stat)) { continue; } if (nonzero && stat.isZero()) { continue; } float duration = ((float) stat.durationSec()) + (((float) stat.durationNano()) / TimeUnit.SECONDS.toNanos(1)); float rateRx = duration > 0 ? stat.bytesReceived() * 8 / duration : 0; float rateTx = duration > 0 ? stat.bytesSent() * 8 / duration : 0; print(formatDeltaTable, stat.portNumber(), humanReadable(stat.packetsReceived()), humanReadable(stat.bytesReceived()), humanReadableBps(rateRx), humanReadable(stat.packetsRxDropped()), humanReadable(stat.packetsSent()), humanReadable(stat.bytesSent()), humanReadableBps(rateTx), humanReadable(stat.packetsTxDropped()), String.format("%.3f", duration)); } print("+---------------------------------------------------------------------------------------------------+"); } /** * Converts bytes to human readable string with Kilo, Mega, Giga, etc. * * @param bytes input byte array * @return human readble string */ public static String humanReadable(long bytes) { int unit = 1000; if (bytes < unit) { return String.format("%s ", bytes); } int exp = (int) (Math.log(bytes) / Math.log(unit)); Character pre = ("KMGTPE").charAt(exp - 1); return String.format("%.2f%s", bytes / Math.pow(unit, exp), pre); } /** * Converts bps to human readable format. * * @param bps input rate * @return human readble string */ public static String humanReadableBps(float bps) { int unit = 1000; if (bps < unit) { return String.format("%.0f ", bps); } int exp = (int) (Math.log(bps) / Math.log(unit)); Character pre = ("KMGTPE").charAt(exp - 1); return String.format("%.2f%s", bps / Math.pow(unit, exp), pre); } private static List<PortStatistics> sortByPort(Iterable<PortStatistics> portStats) { List<PortStatistics> portStatsList = Lists.newArrayList(portStats); portStatsList.sort(Comparator.comparing(ps -> ps.portNumber().toLong())); return portStatsList; } }
/* * SampleImporterFromLAICPMSLegacyCVSFile_UH.java * * Copyright 2006-2018 James F. Bowring, CIRDLES.org, and Earth-Time.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.earthtime.UPb_Redux.samples.sampleImporters; import java.io.File; import java.io.FileNotFoundException; import java.math.BigDecimal; import java.util.Scanner; import java.util.Vector; import org.earthtime.UPb_Redux.ReduxConstants; import org.earthtime.UPb_Redux.fractions.FractionI; import org.earthtime.UPb_Redux.fractions.UPbReduxFractions.UPbLegacyFraction; import org.earthtime.UPb_Redux.valueModels.ValueModel; import org.earthtime.UPb_Redux.valueModels.definedValueModels.PercentDiscordance; import org.earthtime.dataDictionaries.RadDates; import org.earthtime.fractions.ETFractionInterface; /** * * @author James F. Bowring */ public class SampleImporterFromLAICPMSLegacyCVSFile_UH // extends AbstractSampleImporterFromLegacyCSVFile { /** * Notes: 1. Built for Tom Lapin University of Houston * @param file * @return * @throws FileNotFoundException */ @Override protected Vector<ETFractionInterface> extractFractionsFromFile ( File file ) throws FileNotFoundException { Vector<ETFractionInterface> retFractions = new Vector<>(); boolean readingFractions = false; //first use a Scanner to get each line Scanner scanner = new Scanner( file ); try { while (scanner.hasNextLine()) { // get content of line Vector<String> myFractionData = processLegacyCSVLine( scanner.nextLine() ); // determine content of line where a zero for fraction name = blank line if ( readingFractions && ! myFractionData.get( 0 ).equalsIgnoreCase( "0" ) ) { // process fraction line System.out.println( "Reading Fraction " + myFractionData.get( 0 ) ); FractionI myFraction = new UPbLegacyFraction( "NONE" ); myFraction.setRatioType( "UPb" ); int index = 0; myFraction.setFractionID( myFractionData.get( index ++ ) ); myFraction.setGrainID( myFraction.getFractionID() ); // concentration U String ratioName = "concU"; myFraction.getCompositionalMeasureByName( ratioName )// .setValue( readCSVCell( myFractionData.get( index ++ ) ).// movePointLeft( 6 ) ); ratioName = "concTh"; myFraction.getCompositionalMeasureByName( ratioName )// .setValue( readCSVCell( myFractionData.get( index ++ ) ).// movePointLeft( 6 ) ); //Sets Ratio ThU myFraction.getCompositionalMeasureByName( "rTh_Usample" ).// setValue( readCSVCell( myFractionData.get( index ++ ) ) ); // skip column e index ++; //Sets r206_204m ratio ratioName = "r206_204m"; myFraction.getMeasuredRatioByName( ratioName ).// setValue( readCSVCell( myFractionData.get( index ++ ) ) ); //Sets r206_238r ratio and sigma ratioName = "r206_238r"; myFraction.getRadiogenicIsotopeRatioByName( ratioName ).// setValue( readCSVCell( myFractionData.get( index ++ ) ) ); ValueModel ratio = myFraction.getRadiogenicIsotopeRatioByName( ratioName ); // 1-sigma from 2-sigma% BigDecimal oneSigmaPct = readCSVCell( myFractionData.get( index ++ ) )// .divide(new BigDecimal( 2.0 ) , // ReduxConstants.mathContext15 ); myFraction.getRadiogenicIsotopeRatioByName( ratioName )// .setOneSigma( ValueModel.convertOneSigmaPctToAbsIfRequired( ratio, oneSigmaPct ) ); //skip column i index ++; //Sets r207_235r ratio and sigma ratioName = "r207_235r"; myFraction.getRadiogenicIsotopeRatioByName( ratioName ).// setValue( readCSVCell( myFractionData.get( index ++ ) ) ); ratio = myFraction.getRadiogenicIsotopeRatioByName( ratioName ); // 1-sigma from 2-sigma% oneSigmaPct = readCSVCell( myFractionData.get( index ++ ) )// .divide(new BigDecimal( 2.0 ) , // ReduxConstants.mathContext15 ); myFraction.getRadiogenicIsotopeRatioByName( ratioName )// .setOneSigma( ValueModel.convertOneSigmaPctToAbsIfRequired( ratio, oneSigmaPct ) ); //skip column l index ++; //Sets r207_206r ratio and sigma ratioName = "r207_206r"; myFraction.getRadiogenicIsotopeRatioByName( ratioName ).// setValue( readCSVCell( myFractionData.get( index ++ ) ) ); ratio = myFraction.getRadiogenicIsotopeRatioByName( ratioName ); // 1-sigma from 2-sigma% oneSigmaPct = readCSVCell( myFractionData.get( index ++ ) )// .divide(new BigDecimal( 2.0 ) , // ReduxConstants.mathContext15 ); myFraction.getRadiogenicIsotopeRatioByName( ratioName )// .setOneSigma( ValueModel.convertOneSigmaPctToAbsIfRequired( ratio, oneSigmaPct ) ); //Sets rhoR206_238r__r207_235r (correlation Coeff) myFraction.getRadiogenicIsotopeRatioByName( "rhoR206_238r__r207_235r" ).// setValue( readCSVCell( myFractionData.get( index ++ ) ) ); ((UPbLegacyFraction) myFraction).calculateTeraWasserburgRho(); //skip column p index ++; //Sets age206_238r ratioName = RadDates.age206_238r.getName(); myFraction.getRadiogenicIsotopeDateByName( ratioName )// .setValue( readCSVCell( myFractionData.get( index ++ ) ).// movePointRight( 6 ) ); myFraction.getRadiogenicIsotopeDateByName( ratioName )// .setOneSigma( readCSVCell( myFractionData.get( index ++ ) ).// divide( new BigDecimal( 2.0 ) ).// movePointRight( 6 ) ); //skip column s index ++; //Sets age207_235r ratioName = RadDates.age207_235r.getName(); myFraction.getRadiogenicIsotopeDateByName( ratioName )// .setValue( readCSVCell( myFractionData.get( index ++ ) ).// movePointRight( 6 ) ); myFraction.getRadiogenicIsotopeDateByName( ratioName )// .setOneSigma( readCSVCell( myFractionData.get( index ++ ) ).// divide( new BigDecimal( 2.0 ) ).// movePointRight( 6 ) ); //skip column v index ++; //Sets age207_206r ratioName = RadDates.age207_206r.getName(); myFraction.getRadiogenicIsotopeDateByName( ratioName )// .setValue( readCSVCell( myFractionData.get( index ++ ) ).// movePointRight( 6 ) ); myFraction.getRadiogenicIsotopeDateByName( ratioName )// .setOneSigma( readCSVCell( myFractionData.get( index ++ ) ).// divide( new BigDecimal( 2.0 ) ).// movePointRight( 6 ) ); // calculate percentDiscordance ValueModel percentDiscordance = new PercentDiscordance(); myFraction.setRadiogenicIsotopeDateByName( RadDates.percentDiscordance, percentDiscordance ); percentDiscordance.calculateValue( new ValueModel[]{ myFraction.getRadiogenicIsotopeDateByName( RadDates.age206_238r ), myFraction.getRadiogenicIsotopeDateByName( RadDates.age207_206r )}, null ); retFractions.add( myFraction ); } if ( (myFractionData.get( 0 ).compareToIgnoreCase( "Spot" ) == 0)// // || // // (myFractionData.get( 0 ).compareToIgnoreCase( "0" ) == 0) ) { // the next line contains the aliquot name myFractionData = processLegacyCSVLine( scanner.nextLine() ); aliquotName = myFractionData.get( 0 ); // and then the next line(s) contain fraction data or are blank readingFractions = true; } } } finally { //ensure the underlying stream is always closed scanner.close(); } return retFractions; } /** * */ public static void writeAndOpenCSVFileOfLegacyDataSampleFieldNames () { } }
package org.fcrepo.server.validation.ecm; import org.fcrepo.server.Context; import org.fcrepo.server.errors.LowlevelStorageException; import org.fcrepo.server.errors.ServerException; import org.fcrepo.server.storage.DOReader; import org.fcrepo.server.storage.RepositoryReader; import org.fcrepo.server.storage.types.Datastream; import org.fcrepo.server.storage.types.RelationshipTuple; import org.fcrepo.server.storage.types.Validation; import org.fcrepo.server.validation.ecm.jaxb.DsCompositeModel; import org.fcrepo.server.validation.ecm.jaxb.DsTypeModel; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.util.OWLClassExpressionVisitorAdapter; import org.semanticweb.owlapi.util.OWLOntologyMerger; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.xml.bind.JAXB; import java.io.InputStream; import java.util.*; /** * Created by IntelliJ IDEA. * User: abr * Date: Aug 4, 2010 * Time: 10:50:52 AM * To change this template use File | Settings | File Templates. */ public class OwlValidator { private RepositoryReader doMgr; private static final Logger logger = LoggerFactory.getLogger(OwlValidator.class); public OwlValidator(RepositoryReader doMgr) { //To change body of created methods use File | Settings | File Templates. this.doMgr = doMgr; } /** * This is one complex method. * <p/> * 1. retrieve the list of content models from the object. * 2. retrieve the ontology datastreams from each of these content models * 3. * * @param context * @param asOfDateTime * @param currentObjectReader * @param validation * @throws org.fcrepo.server.errors.ServerException * */ public void validate(Context context, Date asOfDateTime, DOReader currentObjectReader, Validation validation) throws ServerException { OWLOntologyManager owlManager = OWLManager.createOWLOntologyManager(); List<String> contentmodels = currentObjectReader.getContentModels(); for (String contentmodel : contentmodels) { contentmodel = contentmodel.substring("info:fedora/".length()); DOReader contentmodelReader; try { contentmodelReader = doMgr.getReader(false, context, contentmodel); } catch (LowlevelStorageException e) {//content model could not be found continue; } if (asOfDateTime != null) { //disregard content models created after the asOfDateTime if (!contentmodelReader.getCreateDate().before(asOfDateTime)) { continue; } } Datastream ontologyDS = contentmodelReader.GetDatastream("ONTOLOGY", asOfDateTime); if (ontologyDS == null) {//No ontology in the content model, continue continue; } InputStream ontologyStream = ontologyDS.getContentStream(); try { owlManager.loadOntologyFromOntologyDocument(ontologyStream); } catch (OWLOntologyCreationException e) { logger.debug("Failed to load ontology for object " + currentObjectReader.GetObjectPID(), e); } } OWLOntologyMerger merger = new OWLOntologyMerger(owlManager); IRI mergedOntologyIRI = IRI.create("http://www.semanticweb.com/mymergedont"); OWLOntology mergedOntology = null; try { mergedOntology = merger.createMergedOntology(owlManager, mergedOntologyIRI); } catch (OWLOntologyCreationException e) { logger.debug("Failed to load ontology for object " + currentObjectReader.GetObjectPID(), e); } //Make a new restrition visitor. RestrictionVisitor restrictionVisitor = new RestrictionVisitor(Collections.singleton(mergedOntology)); Set<RelationshipTuple> relations = currentObjectReader.getRelationships(); for (String contentmodel : contentmodels) { List<String> datastreamNames = getDatastreamNames(context, contentmodel, asOfDateTime); for (String datastreamName : datastreamNames) { IRI datastreamDeclaration = toIRI(contentmodel, datastreamName); OWLClass datastreamClass = owlManager.getOWLDataFactory().getOWLClass(datastreamDeclaration); for (OWLSubClassOfAxiom ax : mergedOntology.getSubClassAxiomsForSubClass(datastreamClass)) { OWLClassExpression superCls = ax.getSuperClass(); // Ask our superclass to accept a visit from the RestrictionVisitor - if it is an // existential restiction then our restriction visitor will answer it - if not our // visitor will ignore it superCls.accept(restrictionVisitor); } String datastream = "info:fedora/" + currentObjectReader.GetObjectPID() + "/" + datastreamName; /* NodeList relationsNodes = xpathselector .selectNodeList(relsint, "/rdf:RDF/rdf:Description[@rdf:about='" + datastream + "']*/ /*"); */ Set<RelationshipTuple> relationsAbout = getRelationsSubjectTo(relations, datastream); checkMinCardinality(datastream, relationsAbout, restrictionVisitor, validation); checkMaxCardinality(datastream, relationsAbout, restrictionVisitor, validation); checkExactCardinality(datastream, relationsAbout, restrictionVisitor, validation); checkSomeValuesFrom(datastream, relationsAbout, restrictionVisitor, validation, context); checkAllValuesFrom(datastream, relationsAbout, restrictionVisitor, validation, context); restrictionVisitor.reset(); } } for (String contentmodel : contentmodels) { IRI objectDeclaration = toIRI(contentmodel, null); OWLClass objectClass = owlManager.getOWLDataFactory().getOWLClass(objectDeclaration); for (OWLSubClassOfAxiom ax : mergedOntology.getSubClassAxiomsForSubClass(objectClass)) { OWLClassExpression superCls = ax.getSuperClass(); // Ask our superclass to accept a visit from the RestrictionVisitor - if it is an // existential restiction then our restriction visitor will answer it - if not our // visitor will ignore it superCls.accept(restrictionVisitor); } String pid = "info:fedora/" + currentObjectReader.GetObjectPID(); Set<RelationshipTuple> relationsAbout = getRelationsSubjectTo(relations, pid); checkMinCardinality(pid, relationsAbout, restrictionVisitor, validation); checkMaxCardinality(pid, relationsAbout, restrictionVisitor, validation); checkExactCardinality(pid, relationsAbout, restrictionVisitor, validation); checkSomeValuesFrom(pid, relationsAbout, restrictionVisitor, validation, context); checkAllValuesFrom(pid, relationsAbout, restrictionVisitor, validation, context); restrictionVisitor.reset(); } } private Set<RelationshipTuple> getRelationsSubjectTo(Set<RelationshipTuple> relations, String datastream) { HashSet<RelationshipTuple> found = new HashSet<RelationshipTuple>(); for (RelationshipTuple relation : relations) { if (relation.subject.equals(datastream)){ found.add(relation); } } return found; } private void checkAllValuesFrom(String subject, Set<RelationshipTuple> relations, RestrictionVisitor restrictionVisitor, Validation validation, Context context) throws ServerException { Map<OWLObjectProperty, OWLClass> map = restrictionVisitor.getAllValuesFrom(); for (OWLObjectProperty owlObjectProperty : map.keySet()) { String ontologyrelation = owlObjectProperty.getIRI().toString(); OWLClass requiredclass = map.get(owlObjectProperty); String requiredTarget = requiredclass.getIRI().toString(); for (RelationshipTuple relation : relations) { String objectRelationName = relation.predicate; if (objectRelationName.equals(ontologyrelation)) {//This is one of the restricted relations String target = relation.object; List<String> classes; try { classes = getClassesOfTarget(target, context); } catch (ServerException e){ validation.setValid(false); validation.getObjectProblems().add(Errors.missingObjectViolation(subject,objectRelationName,requiredTarget, target)); continue; } boolean found = false; for (String aClass : classes) { if (aClass.equals(requiredclass.getIRI().toString())){ found = true; break; } } if (!found) { validation.setValid(false); validation.getObjectProblems().add(Errors.allValuesFromViolation(subject, ontologyrelation,requiredTarget)); } } } } } private List<String> getClassesOfTarget(String target, Context context) throws ServerException { List<String> classes = new ArrayList<String>(); if (!target.startsWith("info:fedora/")) { return new ArrayList<String>(); } else { target = target.substring("info:fedora/".length()); } int lastIndexOfSlash = target.lastIndexOf("/"); String targetPid; String dsname = ""; if (lastIndexOfSlash > 0) {//target is a datastream targetPid = target.substring(0, lastIndexOfSlash); dsname = "datastreams/"+target.substring(lastIndexOfSlash+1)+"/"; } else { //target is an object targetPid = target; } List<String> targetContentModels; DOReader targetReader = doMgr.getReader(false, context, targetPid); targetContentModels = targetReader.getContentModels(); for (String targetContentModel : targetContentModels) { targetContentModel = targetContentModel+"#" +dsname+"class"; classes.add(targetContentModel); } return classes; } private void checkSomeValuesFrom(String subject, Set<RelationshipTuple> relations, RestrictionVisitor restrictionVisitor, Validation validation, Context context) throws ServerException { Map<OWLObjectProperty, OWLClass> map = restrictionVisitor.getSomeValuesFrom(); for (OWLObjectProperty owlObjectProperty : map.keySet()) { String ontologyrelation = owlObjectProperty.getIRI().toString(); OWLClass requiredclass = map.get(owlObjectProperty); String requiredTarget = requiredclass.getIRI().toString(); int count = countRelations(ontologyrelation, relations); if (count < 1) { validation.setValid(false); validation.getObjectProblems().add(Errors.someValuesFromViolationNoSuchRelation(subject, ontologyrelation,requiredTarget)); continue; } boolean found = false; for (RelationshipTuple relation : relations) { String objectRelationName = relation.predicate; if (objectRelationName.equals(ontologyrelation)) {//This is one of the restricted relations String target = relation.object; List<String> classes; try { classes = getClassesOfTarget(target, context); } catch (ServerException e){ //object not found. So, continue to next continue; } for (String aClass : classes) { if (aClass.equals(requiredclass.getIRI().toString())){ found = true; break; } } } else { continue; } } if (!found) { validation.setValid(false); validation.getObjectProblems().add( Errors.someValuesFromViolationWrongClassOfTarget(subject, ontologyrelation, requiredTarget)); } } } private void checkMinCardinality(String subject, Set<RelationshipTuple> relations, RestrictionVisitor restrictionVisitor, Validation validation) { Map<OWLObjectProperty, Integer> map = restrictionVisitor.getMinCardinality(); for (OWLObjectProperty owlObjectProperty : map.keySet()) { String ontologyrelation = owlObjectProperty.getIRI().toString(); int count = countRelations(ontologyrelation, relations); int min = map.get(owlObjectProperty); if (count < min) { validation.setValid(false); validation.getObjectProblems() .add(Errors.minCardinalityViolation(subject, ontologyrelation,min)); } } } private void checkMaxCardinality(String subject, Set<RelationshipTuple> relations, RestrictionVisitor restrictionVisitor, Validation validation) { Map<OWLObjectProperty, Integer> map = restrictionVisitor.getMaxCardinality(); for (OWLObjectProperty owlObjectProperty : map.keySet()) { String ontologyrelation = owlObjectProperty.getIRI().toString(); int count = countRelations(ontologyrelation, relations); int max = map.get(owlObjectProperty); if (count > max) { validation.setValid(false); validation.getObjectProblems() .add(Errors.maxCardinalityViolation(subject, ontologyrelation,max)); } } } private void checkExactCardinality(String subject, Set<RelationshipTuple> relations, RestrictionVisitor restrictionVisitor, Validation validation) { Map<OWLObjectProperty, Integer> map = restrictionVisitor.getCardinality(); for (OWLObjectProperty owlObjectProperty : map.keySet()) { String ontologyrelation = owlObjectProperty.getIRI().toString(); int count; count = countRelations(ontologyrelation, relations); Integer exact = map.get(owlObjectProperty); if (count != exact) { validation.setValid(false); validation.getObjectProblems() .add(Errors.exactCardinalityViolation(subject, ontologyrelation,exact)); } } } /** * Private utility method. Counts the number of relations with a given name in a list of relatiosn * * @param relationName the relation name * @param objectRelations the list of relations * @return the number of relations with relationName in the list */ private int countRelations(String relationName, Set<RelationshipTuple> objectRelations) { int count = 0; if (objectRelations == null) { return 0; } for (RelationshipTuple objectRelation : objectRelations) { if (objectRelation.predicate.equals(relationName)) {//This is one of the restricted relations count++; } } return count; } private IRI toIRI(String contentmodel, String datastreamName) { if (!contentmodel.startsWith("info:fedora/")) { contentmodel = "info:fedora/" + contentmodel; } if (datastreamName != null) { datastreamName = "datastreams/" + datastreamName+"/"; } else { datastreamName = ""; } return IRI.create(contentmodel + "#"+datastreamName+"class"); } private List<String> getDatastreamNames(Context context, String contentmodel, Date asOfDateTime) throws ServerException { ArrayList<String> names = new ArrayList<String>(); if (contentmodel.startsWith("info:fedora/")) { contentmodel = contentmodel.substring("info:fedora/".length()); } DOReader reader = doMgr.getReader(false, context, contentmodel); Datastream dscompmodelDS = reader.GetDatastream("DS-COMPOSITE-MODEL", asOfDateTime); if (dscompmodelDS == null) {//NO ds composite model, thats okay, continue to next content model return names; } DsCompositeModel dscompobject = JAXB.unmarshal(dscompmodelDS.getContentStream(context), DsCompositeModel.class); for (DsTypeModel typeModel : dscompobject.getDsTypeModel()) { names.add(typeModel.getID()); } return names; } /** * Visits restrictions and collects the properties which are restricted */ private static class RestrictionVisitor extends OWLClassExpressionVisitorAdapter { private boolean processInherited = true; private Set<OWLClass> processedClasses; private Map<OWLObjectProperty, OWLClass> someValuesFrom; private Map<OWLObjectProperty, OWLClass> allValuesFrom; private Map<OWLObjectProperty, Integer> minCardinality; private Map<OWLObjectProperty, Integer> cardinality; private Map<OWLObjectProperty, Integer> maxCardinality; private Set<OWLOntology> onts; public RestrictionVisitor(Set<OWLOntology> onts) { someValuesFrom = new HashMap<OWLObjectProperty, OWLClass>(); allValuesFrom = new HashMap<OWLObjectProperty, OWLClass>(); minCardinality = new HashMap<OWLObjectProperty, Integer>(); cardinality = new HashMap<OWLObjectProperty, Integer>(); maxCardinality = new HashMap<OWLObjectProperty, Integer>(); processedClasses = new HashSet<OWLClass>(); this.onts = onts; } @SuppressWarnings("unused") public void setProcessInherited(boolean processInherited) { this.processInherited = processInherited; } public void visit(OWLClass desc) { if (processInherited && !processedClasses.contains(desc)) { // If we are processing inherited restrictions then // we recursively visit named supers. Note that we // need to keep track of the classes that we have processed // so that we don't get caught out by cycles in the taxonomy processedClasses.add(desc); for (OWLOntology ont : onts) { for (OWLSubClassOfAxiom ax : ont.getSubClassAxiomsForSubClass(desc)) { ax.getSuperClass().accept(this); } } } } public void reset() { processedClasses.clear(); someValuesFrom.clear(); allValuesFrom.clear(); minCardinality.clear(); cardinality.clear(); maxCardinality.clear(); } public void visit(OWLObjectExactCardinality desc) { cardinality.put(desc.getProperty().asOWLObjectProperty(), desc.getCardinality()); } public void visit(OWLObjectMaxCardinality desc) { maxCardinality.put(desc.getProperty().asOWLObjectProperty(), desc.getCardinality()); } public void visit(OWLObjectMinCardinality desc) { minCardinality.put(desc.getProperty().asOWLObjectProperty(), desc.getCardinality()); } public void visit(OWLObjectAllValuesFrom desc) { allValuesFrom.put(desc.getProperty().asOWLObjectProperty(), desc.getFiller().asOWLClass()); } public void visit(OWLObjectSomeValuesFrom desc) { // This method gets called when a class expression is an // existential (someValuesFrom) restriction and it asks us to visit it someValuesFrom.put(desc.getProperty().asOWLObjectProperty(), desc.getFiller().asOWLClass()); } public void visit(OWLDataSomeValuesFrom desc) { } public void visit(OWLDataAllValuesFrom desc) { } public void visit(OWLDataMinCardinality desc) { } public void visit(OWLDataExactCardinality desc) { } public void visit(OWLDataMaxCardinality desc) { } public Map<OWLObjectProperty, OWLClass> getSomeValuesFrom() { return someValuesFrom; } public Map<OWLObjectProperty, OWLClass> getAllValuesFrom() { return allValuesFrom; } public Map<OWLObjectProperty, Integer> getMinCardinality() { return minCardinality; } public Map<OWLObjectProperty, Integer> getCardinality() { return cardinality; } public Map<OWLObjectProperty, Integer> getMaxCardinality() { return maxCardinality; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.processor.internals; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.MockConsumer; import org.apache.kafka.clients.consumer.OffsetResetStrategy; import org.apache.kafka.clients.producer.MockProducer; import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.MetricName; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.errors.ProducerFencedException; import org.apache.kafka.common.errors.TimeoutException; import org.apache.kafka.common.metrics.JmxReporter; import org.apache.kafka.common.metrics.KafkaMetric; import org.apache.kafka.common.metrics.MetricConfig; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.metrics.Sensor; import org.apache.kafka.common.record.TimestampType; import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.Serializer; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.common.utils.MockTime; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.errors.DefaultProductionExceptionHandler; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.errors.TaskMigratedException; import org.apache.kafka.streams.processor.PunctuationType; import org.apache.kafka.streams.processor.Punctuator; import org.apache.kafka.streams.processor.StateRestoreListener; import org.apache.kafka.streams.processor.StateStore; import org.apache.kafka.streams.processor.TaskId; import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl; import org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender; import org.apache.kafka.streams.state.internals.OffsetCheckpoint; import org.apache.kafka.test.MockKeyValueStore; import org.apache.kafka.test.MockProcessorNode; import org.apache.kafka.test.MockSourceNode; import org.apache.kafka.test.MockStateRestoreListener; import org.apache.kafka.test.MockTimestampExtractor; import org.apache.kafka.test.NoOpRecordCollector; import org.apache.kafka.test.TestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.time.Duration; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import static java.util.Arrays.asList; import static java.util.Collections.singletonList; import static org.apache.kafka.common.utils.Utils.mkEntry; import static org.apache.kafka.common.utils.Utils.mkMap; import static org.apache.kafka.common.utils.Utils.mkProperties; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class StreamTaskTest { private final Serializer<Integer> intSerializer = Serdes.Integer().serializer(); private final Serializer<byte[]> bytesSerializer = Serdes.ByteArray().serializer(); private final Deserializer<Integer> intDeserializer = Serdes.Integer().deserializer(); private final String topic1 = "topic1"; private final String topic2 = "topic2"; private final TopicPartition partition1 = new TopicPartition(topic1, 1); private final TopicPartition partition2 = new TopicPartition(topic2, 1); private final Set<TopicPartition> partitions = Utils.mkSet(partition1, partition2); private final MockSourceNode<Integer, Integer> source1 = new MockSourceNode<>(new String[]{topic1}, intDeserializer, intDeserializer); private final MockSourceNode<Integer, Integer> source2 = new MockSourceNode<>(new String[]{topic2}, intDeserializer, intDeserializer); private final MockSourceNode<Integer, Integer> source3 = new MockSourceNode<Integer, Integer>(new String[]{topic2}, intDeserializer, intDeserializer) { @Override public void process(final Integer key, final Integer value) { throw new RuntimeException("KABOOM!"); } @Override public void close() { throw new RuntimeException("KABOOM!"); } }; private final MockProcessorNode<Integer, Integer> processorStreamTime = new MockProcessorNode<>(10L); private final MockProcessorNode<Integer, Integer> processorSystemTime = new MockProcessorNode<>(10L, PunctuationType.WALL_CLOCK_TIME); private final String storeName = "store"; private final StateStore stateStore = new MockKeyValueStore(storeName, false); private final TopicPartition changelogPartition = new TopicPartition("store-changelog", 0); private final Long offset = 543L; private final ProcessorTopology topology = withSources( asList(source1, source2, processorStreamTime, processorSystemTime), mkMap(mkEntry(topic1, source1), mkEntry(topic2, source2)) ); private final MockConsumer<byte[], byte[]> consumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); private MockProducer<byte[], byte[]> producer; private final MockConsumer<byte[], byte[]> restoreStateConsumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); private final StateRestoreListener stateRestoreListener = new MockStateRestoreListener(); private final StoreChangelogReader changelogReader = new StoreChangelogReader(restoreStateConsumer, Duration.ZERO, stateRestoreListener, new LogContext("stream-task-test ")) { @Override public Map<TopicPartition, Long> restoredOffsets() { return Collections.singletonMap(changelogPartition, offset); } }; private final byte[] recordValue = intSerializer.serialize(null, 10); private final byte[] recordKey = intSerializer.serialize(null, 1); private final Metrics metrics = new Metrics(new MetricConfig().recordLevel(Sensor.RecordingLevel.DEBUG)); private final StreamsMetricsImpl streamsMetrics = new MockStreamsMetrics(metrics); private final TaskId taskId00 = new TaskId(0, 0); private final MockTime time = new MockTime(); private final File baseDir = TestUtils.tempDirectory(); private StateDirectory stateDirectory; private StreamTask task; private long punctuatedAt; private final Punctuator punctuator = new Punctuator() { @Override public void punctuate(final long timestamp) { punctuatedAt = timestamp; } }; static ProcessorTopology withRepartitionTopics(final List<ProcessorNode> processorNodes, final Map<String, SourceNode> sourcesByTopic, final Set<String> repartitionTopics) { return new ProcessorTopology(processorNodes, sourcesByTopic, Collections.emptyMap(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap(), repartitionTopics); } static ProcessorTopology withSources(final List<ProcessorNode> processorNodes, final Map<String, SourceNode> sourcesByTopic) { return new ProcessorTopology(processorNodes, sourcesByTopic, Collections.emptyMap(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap(), Collections.emptySet()); } private StreamsConfig createConfig(final boolean enableEoS) { final String canonicalPath; try { canonicalPath = baseDir.getCanonicalPath(); } catch (final IOException e) { throw new RuntimeException(e); } return new StreamsConfig(mkProperties(mkMap( mkEntry(StreamsConfig.APPLICATION_ID_CONFIG, "stream-task-test"), mkEntry(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:2171"), mkEntry(StreamsConfig.BUFFERED_RECORDS_PER_PARTITION_CONFIG, "3"), mkEntry(StreamsConfig.STATE_DIR_CONFIG, canonicalPath), mkEntry(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, MockTimestampExtractor.class.getName()), mkEntry(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, enableEoS ? StreamsConfig.EXACTLY_ONCE : StreamsConfig.AT_LEAST_ONCE), mkEntry(StreamsConfig.MAX_TASK_IDLE_MS_CONFIG, "100") ))); } @Before public void setup() { consumer.assign(asList(partition1, partition2)); stateDirectory = new StateDirectory(createConfig(false), new MockTime(), true); } @After public void cleanup() throws IOException { try { if (task != null) { try { task.close(true, false); } catch (final Exception e) { // swallow } } } finally { Utils.delete(baseDir); } } @Test public void shouldHandleInitTransactionsTimeoutExceptionOnCreation() { final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(); final ProcessorTopology topology = withSources( asList(source1, source2, processorStreamTime, processorSystemTime), mkMap(mkEntry(topic1, (SourceNode) source1), mkEntry(topic2, (SourceNode) source2)) ); source1.addChild(processorStreamTime); source2.addChild(processorStreamTime); source1.addChild(processorSystemTime); source2.addChild(processorSystemTime); try { new StreamTask( taskId00, partitions, topology, consumer, changelogReader, createConfig(true), streamsMetrics, stateDirectory, null, time, () -> producer = new MockProducer<byte[], byte[]>(false, bytesSerializer, bytesSerializer) { @Override public void initTransactions() { throw new TimeoutException("test"); } }, null ); fail("Expected an exception"); } catch (final StreamsException expected) { // make sure we log the explanation as an ERROR assertTimeoutErrorLog(appender); // make sure we report the correct message assertThat(expected.getMessage(), is("task [0_0] Failed to initialize task 0_0 due to timeout.")); // make sure we preserve the cause assertEquals(expected.getCause().getClass(), TimeoutException.class); assertThat(expected.getCause().getMessage(), is("test")); } LogCaptureAppender.unregister(appender); } @Test public void shouldHandleInitTransactionsTimeoutExceptionOnResume() { final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(); final ProcessorTopology topology = withSources( asList(source1, source2, processorStreamTime, processorSystemTime), mkMap(mkEntry(topic1, (SourceNode) source1), mkEntry(topic2, (SourceNode) source2)) ); source1.addChild(processorStreamTime); source2.addChild(processorStreamTime); source1.addChild(processorSystemTime); source2.addChild(processorSystemTime); final AtomicBoolean timeOut = new AtomicBoolean(false); final StreamTask testTask = new StreamTask( taskId00, partitions, topology, consumer, changelogReader, createConfig(true), streamsMetrics, stateDirectory, null, time, () -> producer = new MockProducer<byte[], byte[]>(false, bytesSerializer, bytesSerializer) { @Override public void initTransactions() { if (timeOut.get()) { throw new TimeoutException("test"); } else { super.initTransactions(); } } }, null ); testTask.initializeTopology(); testTask.suspend(); timeOut.set(true); try { testTask.resume(); fail("Expected an exception"); } catch (final StreamsException expected) { // make sure we log the explanation as an ERROR assertTimeoutErrorLog(appender); // make sure we report the correct message assertThat(expected.getMessage(), is("task [0_0] Failed to initialize task 0_0 due to timeout.")); // make sure we preserve the cause assertEquals(expected.getCause().getClass(), TimeoutException.class); assertThat(expected.getCause().getMessage(), is("test")); } LogCaptureAppender.unregister(appender); } private void assertTimeoutErrorLog(final LogCaptureAppender appender) { final String expectedErrorLogMessage = "task [0_0] Timeout exception caught when initializing transactions for task 0_0. " + "This might happen if the broker is slow to respond, if the network " + "connection to the broker was interrupted, or if similar circumstances arise. " + "You can increase producer parameter `max.block.ms` to increase this timeout."; final List<String> expectedError = appender .getEvents() .stream() .filter(event -> event.getMessage().equals(expectedErrorLogMessage)) .map(LogCaptureAppender.Event::getLevel) .collect(Collectors.toList()); assertThat(expectedError, is(singletonList("ERROR"))); } @SuppressWarnings("unchecked") @Test public void testProcessOrder() { task = createStatelessTask(createConfig(false)); task.addRecords(partition1, asList( getConsumerRecord(partition1, 10), getConsumerRecord(partition1, 20), getConsumerRecord(partition1, 30) )); task.addRecords(partition2, asList( getConsumerRecord(partition2, 25), getConsumerRecord(partition2, 35), getConsumerRecord(partition2, 45) )); assertTrue(task.process()); assertEquals(5, task.numBuffered()); assertEquals(1, source1.numReceived); assertEquals(0, source2.numReceived); assertTrue(task.process()); assertEquals(4, task.numBuffered()); assertEquals(2, source1.numReceived); assertEquals(0, source2.numReceived); assertTrue(task.process()); assertEquals(3, task.numBuffered()); assertEquals(2, source1.numReceived); assertEquals(1, source2.numReceived); assertTrue(task.process()); assertEquals(2, task.numBuffered()); assertEquals(3, source1.numReceived); assertEquals(1, source2.numReceived); assertTrue(task.process()); assertEquals(1, task.numBuffered()); assertEquals(3, source1.numReceived); assertEquals(2, source2.numReceived); assertTrue(task.process()); assertEquals(0, task.numBuffered()); assertEquals(3, source1.numReceived); assertEquals(3, source2.numReceived); } @Test public void testMetrics() { task = createStatelessTask(createConfig(false)); assertNotNull(getMetric("%s-latency-avg", "The average latency of %s operation.", task.id().toString())); assertNotNull(getMetric("%s-latency-max", "The max latency of %s operation.", task.id().toString())); assertNotNull(getMetric("%s-rate", "The average number of occurrence of %s operation per second.", task.id().toString())); assertNotNull(getMetric("%s-latency-avg", "The average latency of %s operation.", "all")); assertNotNull(getMetric("%s-latency-max", "The max latency of %s operation.", "all")); assertNotNull(getMetric("%s-rate", "The average number of occurrence of %s operation per second.", "all")); final JmxReporter reporter = new JmxReporter("kafka.streams"); metrics.addReporter(reporter); assertTrue(reporter.containsMbean(String.format("kafka.streams:type=stream-task-metrics,client-id=test,task-id=%s", task.id.toString()))); assertTrue(reporter.containsMbean("kafka.streams:type=stream-task-metrics,client-id=test,task-id=all")); } private KafkaMetric getMetric(final String nameFormat, final String descriptionFormat, final String taskId) { return metrics.metrics().get(metrics.metricName( String.format(nameFormat, "commit"), "stream-task-metrics", String.format(descriptionFormat, "commit"), mkMap(mkEntry("task-id", taskId), mkEntry("client-id", "test")) )); } @SuppressWarnings("unchecked") @Test public void testPauseResume() { task = createStatelessTask(createConfig(false)); task.addRecords(partition1, asList( getConsumerRecord(partition1, 10), getConsumerRecord(partition1, 20) )); task.addRecords(partition2, asList( getConsumerRecord(partition2, 35), getConsumerRecord(partition2, 45), getConsumerRecord(partition2, 55), getConsumerRecord(partition2, 65) )); assertTrue(task.process()); assertEquals(1, source1.numReceived); assertEquals(0, source2.numReceived); assertEquals(1, consumer.paused().size()); assertTrue(consumer.paused().contains(partition2)); task.addRecords(partition1, asList( getConsumerRecord(partition1, 30), getConsumerRecord(partition1, 40), getConsumerRecord(partition1, 50) )); assertEquals(2, consumer.paused().size()); assertTrue(consumer.paused().contains(partition1)); assertTrue(consumer.paused().contains(partition2)); assertTrue(task.process()); assertEquals(2, source1.numReceived); assertEquals(0, source2.numReceived); assertEquals(1, consumer.paused().size()); assertTrue(consumer.paused().contains(partition2)); assertTrue(task.process()); assertEquals(3, source1.numReceived); assertEquals(0, source2.numReceived); assertEquals(1, consumer.paused().size()); assertTrue(consumer.paused().contains(partition2)); assertTrue(task.process()); assertEquals(3, source1.numReceived); assertEquals(1, source2.numReceived); assertEquals(0, consumer.paused().size()); } @SuppressWarnings("unchecked") @Test public void shouldPunctuateOnceStreamTimeAfterGap() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); task.addRecords(partition1, asList( getConsumerRecord(partition1, 20), getConsumerRecord(partition1, 142), getConsumerRecord(partition1, 155), getConsumerRecord(partition1, 160) )); task.addRecords(partition2, asList( getConsumerRecord(partition2, 25), getConsumerRecord(partition2, 145), getConsumerRecord(partition2, 159), getConsumerRecord(partition2, 161) )); // st: -1 assertFalse(task.maybePunctuateStreamTime()); // punctuate at 20 // st: 20 assertTrue(task.process()); assertEquals(7, task.numBuffered()); assertEquals(1, source1.numReceived); assertEquals(0, source2.numReceived); assertTrue(task.maybePunctuateStreamTime()); // st: 25 assertTrue(task.process()); assertEquals(6, task.numBuffered()); assertEquals(1, source1.numReceived); assertEquals(1, source2.numReceived); assertFalse(task.maybePunctuateStreamTime()); // st: 142 // punctuate at 142 assertTrue(task.process()); assertEquals(5, task.numBuffered()); assertEquals(2, source1.numReceived); assertEquals(1, source2.numReceived); assertTrue(task.maybePunctuateStreamTime()); // st: 145 // only one punctuation after 100ms gap assertTrue(task.process()); assertEquals(4, task.numBuffered()); assertEquals(2, source1.numReceived); assertEquals(2, source2.numReceived); assertFalse(task.maybePunctuateStreamTime()); // st: 155 // punctuate at 155 assertTrue(task.process()); assertEquals(3, task.numBuffered()); assertEquals(3, source1.numReceived); assertEquals(2, source2.numReceived); assertTrue(task.maybePunctuateStreamTime()); // st: 159 assertTrue(task.process()); assertEquals(2, task.numBuffered()); assertEquals(3, source1.numReceived); assertEquals(3, source2.numReceived); assertFalse(task.maybePunctuateStreamTime()); // st: 160, aligned at 0 assertTrue(task.process()); assertEquals(1, task.numBuffered()); assertEquals(4, source1.numReceived); assertEquals(3, source2.numReceived); assertTrue(task.maybePunctuateStreamTime()); // st: 161 assertTrue(task.process()); assertEquals(0, task.numBuffered()); assertEquals(4, source1.numReceived); assertEquals(4, source2.numReceived); assertFalse(task.maybePunctuateStreamTime()); processorStreamTime.mockProcessor.checkAndClearPunctuateResult(PunctuationType.STREAM_TIME, 20L, 142L, 155L, 160L); } @Test public void shouldRespectPunctuateCancellationStreamTime() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); task.addRecords(partition1, asList( getConsumerRecord(partition1, 20), getConsumerRecord(partition1, 30), getConsumerRecord(partition1, 40) )); task.addRecords(partition2, asList( getConsumerRecord(partition2, 25), getConsumerRecord(partition2, 35), getConsumerRecord(partition2, 45) )); assertFalse(task.maybePunctuateStreamTime()); // st is now 20 assertTrue(task.process()); assertTrue(task.maybePunctuateStreamTime()); // st is now 25 assertTrue(task.process()); assertFalse(task.maybePunctuateStreamTime()); // st is now 30 assertTrue(task.process()); processorStreamTime.mockProcessor.scheduleCancellable.cancel(); assertFalse(task.maybePunctuateStreamTime()); processorStreamTime.mockProcessor.checkAndClearPunctuateResult(PunctuationType.STREAM_TIME, 20L); } @Test public void shouldRespectPunctuateCancellationSystemTime() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); final long now = time.milliseconds(); time.sleep(10); assertTrue(task.maybePunctuateSystemTime()); processorSystemTime.mockProcessor.scheduleCancellable.cancel(); time.sleep(10); assertFalse(task.maybePunctuateSystemTime()); processorSystemTime.mockProcessor.checkAndClearPunctuateResult(PunctuationType.WALL_CLOCK_TIME, now + 10); } @Test public void shouldRespectCommitNeeded() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); assertFalse(task.commitNeeded()); task.addRecords(partition1, singletonList(getConsumerRecord(partition1, 0))); assertTrue(task.process()); assertTrue(task.commitNeeded()); task.commit(); assertFalse(task.commitNeeded()); assertTrue(task.maybePunctuateStreamTime()); assertTrue(task.commitNeeded()); task.commit(); assertFalse(task.commitNeeded()); time.sleep(10); assertTrue(task.maybePunctuateSystemTime()); assertTrue(task.commitNeeded()); task.commit(); assertFalse(task.commitNeeded()); } @Test public void shouldRespectCommitRequested() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); task.requestCommit(); assertTrue(task.commitRequested()); } @Test public void shouldBeProcessableIfAllPartitionsBuffered() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); assertFalse(task.isProcessable(0L)); final byte[] bytes = ByteBuffer.allocate(4).putInt(1).array(); task.addRecords(partition1, Collections.singleton(new ConsumerRecord<>(topic1, 1, 0, bytes, bytes))); assertFalse(task.isProcessable(0L)); task.addRecords(partition2, Collections.singleton(new ConsumerRecord<>(topic2, 1, 0, bytes, bytes))); assertTrue(task.isProcessable(0L)); } @Test public void shouldBeProcessableIfWaitedForTooLong() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); final MetricName enforcedProcessMetric = metrics.metricName("enforced-processing-total", "stream-task-metrics", mkMap(mkEntry("client-id", "test"), mkEntry("task-id", taskId00.toString()))); assertFalse(task.isProcessable(0L)); assertEquals(0.0, metrics.metric(enforcedProcessMetric).metricValue()); final byte[] bytes = ByteBuffer.allocate(4).putInt(1).array(); task.addRecords(partition1, Collections.singleton(new ConsumerRecord<>(topic1, 1, 0, bytes, bytes))); assertFalse(task.isProcessable(time.milliseconds())); assertFalse(task.isProcessable(time.milliseconds() + 50L)); assertTrue(task.isProcessable(time.milliseconds() + 100L)); assertEquals(1.0, metrics.metric(enforcedProcessMetric).metricValue()); // once decided to enforce, continue doing that assertTrue(task.isProcessable(time.milliseconds() + 101L)); assertEquals(2.0, metrics.metric(enforcedProcessMetric).metricValue()); task.addRecords(partition2, Collections.singleton(new ConsumerRecord<>(topic2, 1, 0, bytes, bytes))); assertTrue(task.isProcessable(time.milliseconds() + 130L)); assertEquals(2.0, metrics.metric(enforcedProcessMetric).metricValue()); // one resumed to normal processing, the timer should be reset task.process(); assertFalse(task.isProcessable(time.milliseconds() + 150L)); assertEquals(2.0, metrics.metric(enforcedProcessMetric).metricValue()); assertFalse(task.isProcessable(time.milliseconds() + 249L)); assertEquals(2.0, metrics.metric(enforcedProcessMetric).metricValue()); assertTrue(task.isProcessable(time.milliseconds() + 250L)); assertEquals(3.0, metrics.metric(enforcedProcessMetric).metricValue()); } @Test public void shouldPunctuateSystemTimeWhenIntervalElapsed() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); final long now = time.milliseconds(); time.sleep(10); assertTrue(task.maybePunctuateSystemTime()); time.sleep(10); assertTrue(task.maybePunctuateSystemTime()); time.sleep(9); assertFalse(task.maybePunctuateSystemTime()); time.sleep(1); assertTrue(task.maybePunctuateSystemTime()); time.sleep(20); assertTrue(task.maybePunctuateSystemTime()); assertFalse(task.maybePunctuateSystemTime()); processorSystemTime.mockProcessor.checkAndClearPunctuateResult(PunctuationType.WALL_CLOCK_TIME, now + 10, now + 20, now + 30, now + 50); } @Test public void shouldNotPunctuateSystemTimeWhenIntervalNotElapsed() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); assertFalse(task.maybePunctuateSystemTime()); time.sleep(9); assertFalse(task.maybePunctuateSystemTime()); processorSystemTime.mockProcessor.checkAndClearPunctuateResult(PunctuationType.WALL_CLOCK_TIME); } @Test public void shouldPunctuateOnceSystemTimeAfterGap() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); final long now = time.milliseconds(); time.sleep(100); assertTrue(task.maybePunctuateSystemTime()); assertFalse(task.maybePunctuateSystemTime()); time.sleep(10); assertTrue(task.maybePunctuateSystemTime()); time.sleep(12); assertTrue(task.maybePunctuateSystemTime()); time.sleep(7); assertFalse(task.maybePunctuateSystemTime()); time.sleep(1); // punctuate at now + 130 assertTrue(task.maybePunctuateSystemTime()); time.sleep(105); // punctuate at now + 235 assertTrue(task.maybePunctuateSystemTime()); assertFalse(task.maybePunctuateSystemTime()); time.sleep(5); // punctuate at now + 240, still aligned on the initial punctuation assertTrue(task.maybePunctuateSystemTime()); assertFalse(task.maybePunctuateSystemTime()); processorSystemTime.mockProcessor.checkAndClearPunctuateResult(PunctuationType.WALL_CLOCK_TIME, now + 100, now + 110, now + 122, now + 130, now + 235, now + 240); } @Test public void shouldWrapKafkaExceptionsWithStreamsExceptionAndAddContext() { task = createTaskThatThrowsException(false); task.initializeStateStores(); task.initializeTopology(); task.addRecords(partition2, singletonList(getConsumerRecord(partition2, 0))); try { task.process(); fail("Should've thrown StreamsException"); } catch (final Exception e) { assertThat(task.processorContext.currentNode(), nullValue()); } } @Test public void shouldWrapKafkaExceptionsWithStreamsExceptionAndAddContextWhenPunctuatingStreamTime() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); try { task.punctuate(processorStreamTime, 1, PunctuationType.STREAM_TIME, new Punctuator() { @Override public void punctuate(final long timestamp) { throw new KafkaException("KABOOM!"); } }); fail("Should've thrown StreamsException"); } catch (final StreamsException e) { final String message = e.getMessage(); assertTrue("message=" + message + " should contain processor", message.contains("processor '" + processorStreamTime.name() + "'")); assertThat(task.processorContext.currentNode(), nullValue()); } } @Test public void shouldWrapKafkaExceptionsWithStreamsExceptionAndAddContextWhenPunctuatingWallClockTimeTime() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); try { task.punctuate(processorSystemTime, 1, PunctuationType.WALL_CLOCK_TIME, new Punctuator() { @Override public void punctuate(final long timestamp) { throw new KafkaException("KABOOM!"); } }); fail("Should've thrown StreamsException"); } catch (final StreamsException e) { final String message = e.getMessage(); assertTrue("message=" + message + " should contain processor", message.contains("processor '" + processorSystemTime.name() + "'")); assertThat(task.processorContext.currentNode(), nullValue()); } } @Test public void shouldFlushRecordCollectorOnFlushState() { final AtomicBoolean flushed = new AtomicBoolean(false); final StreamsMetricsImpl streamsMetrics = new MockStreamsMetrics(new Metrics()); final StreamTask streamTask = new StreamTask( taskId00, partitions, topology, consumer, changelogReader, createConfig(false), streamsMetrics, stateDirectory, null, time, () -> producer = new MockProducer<>(false, bytesSerializer, bytesSerializer), new NoOpRecordCollector() { @Override public void flush() { flushed.set(true); } }); streamTask.flushState(); assertTrue(flushed.get()); } @Test public void shouldCheckpointOffsetsOnCommit() throws IOException { task = createStatefulTask(createConfig(false), true); task.initializeStateStores(); task.initializeTopology(); task.commit(); final OffsetCheckpoint checkpoint = new OffsetCheckpoint( new File(stateDirectory.directoryForTask(taskId00), ProcessorStateManager.CHECKPOINT_FILE_NAME) ); assertThat(checkpoint.read(), equalTo(Collections.singletonMap(changelogPartition, offset))); } @Test public void shouldNotCheckpointOffsetsOnCommitIfEosIsEnabled() { task = createStatefulTask(createConfig(true), true); task.initializeStateStores(); task.initializeTopology(); task.commit(); final File checkpointFile = new File( stateDirectory.directoryForTask(taskId00), ProcessorStateManager.CHECKPOINT_FILE_NAME ); assertFalse(checkpointFile.exists()); } @Test public void shouldThrowIllegalStateExceptionIfCurrentNodeIsNotNullWhenPunctuateCalled() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); task.processorContext.setCurrentNode(processorStreamTime); try { task.punctuate(processorStreamTime, 10, PunctuationType.STREAM_TIME, punctuator); fail("Should throw illegal state exception as current node is not null"); } catch (final IllegalStateException e) { // pass } } @Test public void shouldCallPunctuateOnPassedInProcessorNode() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); task.punctuate(processorStreamTime, 5, PunctuationType.STREAM_TIME, punctuator); assertThat(punctuatedAt, equalTo(5L)); task.punctuate(processorStreamTime, 10, PunctuationType.STREAM_TIME, punctuator); assertThat(punctuatedAt, equalTo(10L)); } @Test public void shouldSetProcessorNodeOnContextBackToNullAfterSuccessfulPunctuate() { task = createStatelessTask(createConfig(false)); task.initializeStateStores(); task.initializeTopology(); task.punctuate(processorStreamTime, 5, PunctuationType.STREAM_TIME, punctuator); assertThat(((ProcessorContextImpl) task.context()).currentNode(), nullValue()); } @Test(expected = IllegalStateException.class) public void shouldThrowIllegalStateExceptionOnScheduleIfCurrentNodeIsNull() { task = createStatelessTask(createConfig(false)); task.schedule(1, PunctuationType.STREAM_TIME, new Punctuator() { @Override public void punctuate(final long timestamp) { // no-op } }); } @Test public void shouldNotThrowExceptionOnScheduleIfCurrentNodeIsNotNull() { task = createStatelessTask(createConfig(false)); task.processorContext.setCurrentNode(processorStreamTime); task.schedule(1, PunctuationType.STREAM_TIME, new Punctuator() { @Override public void punctuate(final long timestamp) { // no-op } }); } @Test public void shouldNotCloseProducerOnCleanCloseWithEosDisabled() { task = createStatelessTask(createConfig(false)); task.close(true, false); task = null; assertFalse(producer.closed()); } @Test public void shouldNotCloseProducerOnUncleanCloseWithEosDisabled() { task = createStatelessTask(createConfig(false)); task.close(false, false); task = null; assertFalse(producer.closed()); } @Test public void shouldNotCloseProducerOnErrorDuringCleanCloseWithEosDisabled() { task = createTaskThatThrowsException(false); try { task.close(true, false); fail("should have thrown runtime exception"); } catch (final RuntimeException expected) { task = null; } assertFalse(producer.closed()); } @Test public void shouldNotCloseProducerOnErrorDuringUncleanCloseWithEosDisabled() { task = createTaskThatThrowsException(false); task.close(false, false); task = null; assertFalse(producer.closed()); } @Test public void shouldCommitTransactionAndCloseProducerOnCleanCloseWithEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); task.close(true, false); task = null; assertTrue(producer.transactionCommitted()); assertFalse(producer.transactionInFlight()); assertTrue(producer.closed()); } @Test public void shouldNotAbortTransactionAndNotCloseProducerOnErrorDuringCleanCloseWithEosEnabled() { task = createTaskThatThrowsException(true); task.initializeTopology(); try { task.close(true, false); fail("should have thrown runtime exception"); } catch (final RuntimeException expected) { task = null; } assertTrue(producer.transactionInFlight()); assertFalse(producer.closed()); } @Test public void shouldOnlyCloseProducerIfFencedOnCommitDuringCleanCloseWithEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); producer.fenceProducer(); try { task.close(true, false); fail("should have thrown TaskMigratedException"); } catch (final TaskMigratedException expected) { task = null; assertTrue(expected.getCause() instanceof ProducerFencedException); } assertFalse(producer.transactionCommitted()); assertTrue(producer.transactionInFlight()); assertFalse(producer.transactionAborted()); assertFalse(producer.transactionCommitted()); assertTrue(producer.closed()); } @Test public void shouldNotCloseProducerIfFencedOnCloseDuringCleanCloseWithEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); producer.fenceProducerOnClose(); try { task.close(true, false); fail("should have thrown TaskMigratedException"); } catch (final TaskMigratedException expected) { task = null; assertTrue(expected.getCause() instanceof ProducerFencedException); } assertTrue(producer.transactionCommitted()); assertFalse(producer.transactionInFlight()); assertFalse(producer.closed()); } @Test public void shouldAbortTransactionAndCloseProducerOnUncleanCloseWithEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); task.close(false, false); task = null; assertTrue(producer.transactionAborted()); assertFalse(producer.transactionInFlight()); assertTrue(producer.closed()); } @Test public void shouldAbortTransactionAndCloseProducerOnErrorDuringUncleanCloseWithEosEnabled() { task = createTaskThatThrowsException(true); task.initializeTopology(); task.close(false, false); assertTrue(producer.transactionAborted()); assertTrue(producer.closed()); } @Test public void shouldOnlyCloseProducerIfFencedOnAbortDuringUncleanCloseWithEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); producer.fenceProducer(); task.close(false, false); task = null; assertTrue(producer.transactionInFlight()); assertFalse(producer.transactionAborted()); assertFalse(producer.transactionCommitted()); assertTrue(producer.closed()); } @Test public void shouldOnlyCloseFencedProducerOnUncleanClosedWithEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); producer.fenceProducer(); task.close(false, true); task = null; assertFalse(producer.transactionAborted()); assertTrue(producer.closed()); } @Test public void shouldAbortTransactionButNotCloseProducerIfFencedOnCloseDuringUncleanCloseWithEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); producer.fenceProducerOnClose(); task.close(false, false); task = null; assertTrue(producer.transactionAborted()); assertFalse(producer.closed()); } @Test public void shouldThrowExceptionIfAnyExceptionsRaisedDuringCloseButStillCloseAllProcessorNodesTopology() { task = createTaskThatThrowsException(false); task.initializeStateStores(); task.initializeTopology(); try { task.close(true, false); fail("should have thrown runtime exception"); } catch (final RuntimeException expected) { task = null; } assertTrue(processorSystemTime.closed); assertTrue(processorStreamTime.closed); assertTrue(source1.closed); } @Test public void shouldInitAndBeginTransactionOnCreateIfEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); assertTrue(producer.transactionInitialized()); assertTrue(producer.transactionInFlight()); } @Test public void shouldWrapProducerFencedExceptionWithTaskMigratedExceptionForBeginTransaction() { task = createStatelessTask(createConfig(true)); producer.fenceProducer(); try { task.initializeTopology(); fail("Should have throws TaskMigratedException"); } catch (final TaskMigratedException expected) { assertTrue(expected.getCause() instanceof ProducerFencedException); } } @Test public void shouldNotThrowOnCloseIfTaskWasNotInitializedWithEosEnabled() { task = createStatelessTask(createConfig(true)); assertFalse(producer.transactionInFlight()); task.close(false, false); } @Test public void shouldNotInitOrBeginTransactionOnCreateIfEosDisabled() { task = createStatelessTask(createConfig(false)); assertFalse(producer.transactionInitialized()); assertFalse(producer.transactionInFlight()); } @Test public void shouldSendOffsetsAndCommitTransactionButNotStartNewTransactionOnSuspendIfEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); task.addRecords(partition1, singletonList(getConsumerRecord(partition1, 0))); task.process(); task.suspend(); assertTrue(producer.sentOffsets()); assertTrue(producer.transactionCommitted()); assertFalse(producer.transactionInFlight()); } @Test public void shouldCommitTransactionOnSuspendEvenIfTransactionIsEmptyIfEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); task.suspend(); assertTrue(producer.transactionCommitted()); assertFalse(producer.transactionInFlight()); } @Test public void shouldNotSendOffsetsAndCommitTransactionNorStartNewTransactionOnSuspendIfEosDisabled() { task = createStatelessTask(createConfig(false)); task.addRecords(partition1, singletonList(getConsumerRecord(partition1, 0))); task.process(); task.suspend(); assertFalse(producer.sentOffsets()); assertFalse(producer.transactionCommitted()); assertFalse(producer.transactionInFlight()); } @Test public void shouldWrapProducerFencedExceptionWithTaskMigragedExceptionInSuspendWhenCommitting() { task = createStatelessTask(createConfig(true)); producer.fenceProducer(); try { task.suspend(); fail("Should have throws TaskMigratedException"); } catch (final TaskMigratedException expected) { assertTrue(expected.getCause() instanceof ProducerFencedException); } task = null; assertFalse(producer.transactionCommitted()); } @Test public void shouldWrapProducerFencedExceptionWithTaskMigragedExceptionInSuspendWhenClosingProducer() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); producer.fenceProducerOnClose(); try { task.suspend(); fail("Should have throws TaskMigratedException"); } catch (final TaskMigratedException expected) { assertTrue(expected.getCause() instanceof ProducerFencedException); } assertTrue(producer.transactionCommitted()); } @Test public void shouldStartNewTransactionOnResumeIfEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); task.addRecords(partition1, singletonList(getConsumerRecord(partition1, 0))); task.process(); task.suspend(); task.resume(); task.initializeTopology(); assertTrue(producer.transactionInFlight()); } @Test public void shouldNotStartNewTransactionOnResumeIfEosDisabled() { task = createStatelessTask(createConfig(false)); task.addRecords(partition1, singletonList(getConsumerRecord(partition1, 0))); task.process(); task.suspend(); task.resume(); assertFalse(producer.transactionInFlight()); } @Test public void shouldStartNewTransactionOnCommitIfEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); task.addRecords(partition1, singletonList(getConsumerRecord(partition1, 0))); task.process(); task.commit(); assertTrue(producer.transactionInFlight()); } @Test public void shouldNotStartNewTransactionOnCommitIfEosDisabled() { task = createStatelessTask(createConfig(false)); task.addRecords(partition1, singletonList(getConsumerRecord(partition1, 0))); task.process(); task.commit(); assertFalse(producer.transactionInFlight()); } @Test public void shouldNotAbortTransactionOnZombieClosedIfEosEnabled() { task = createStatelessTask(createConfig(true)); task.close(false, true); task = null; assertFalse(producer.transactionAborted()); } @Test public void shouldNotAbortTransactionOnDirtyClosedIfEosDisabled() { task = createStatelessTask(createConfig(false)); task.close(false, false); task = null; assertFalse(producer.transactionAborted()); } @Test public void shouldCloseProducerOnCloseWhenEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); task.close(true, false); task = null; assertTrue(producer.closed()); } @Test public void shouldCloseProducerOnUncleanCloseNotZombieWhenEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); task.close(false, false); task = null; assertTrue(producer.closed()); } @Test public void shouldCloseProducerOnUncleanCloseIsZombieWhenEosEnabled() { task = createStatelessTask(createConfig(true)); task.initializeTopology(); task.close(false, true); task = null; assertTrue(producer.closed()); } @Test public void shouldNotViolateAtLeastOnceWhenExceptionOccursDuringFlushing() { task = createTaskThatThrowsException(false); task.initializeStateStores(); task.initializeTopology(); try { task.commit(); fail("should have thrown an exception"); } catch (final Exception e) { // all good } } @Test public void shouldNotViolateAtLeastOnceWhenExceptionOccursDuringTaskSuspension() { final StreamTask task = createTaskThatThrowsException(false); task.initializeStateStores(); task.initializeTopology(); try { task.suspend(); fail("should have thrown an exception"); } catch (final Exception e) { // all good } } @Test public void shouldCloseStateManagerIfFailureOnTaskClose() { task = createStatefulTaskThatThrowsExceptionOnClose(); task.initializeStateStores(); task.initializeTopology(); try { task.close(true, false); fail("should have thrown an exception"); } catch (final Exception e) { // all good } task = null; assertFalse(stateStore.isOpen()); } @Test public void shouldNotCloseTopologyProcessorNodesIfNotInitialized() { final StreamTask task = createTaskThatThrowsException(false); try { task.close(false, false); } catch (final Exception e) { fail("should have not closed non-initialized topology"); } } @Test public void shouldBeInitializedIfChangelogPartitionsIsEmpty() { final StreamTask task = createStatefulTask(createConfig(false), false); assertTrue(task.initializeStateStores()); } @Test public void shouldNotBeInitializedIfChangelogPartitionsIsNonEmpty() { final StreamTask task = createStatefulTask(createConfig(false), true); assertFalse(task.initializeStateStores()); } @Test public void shouldReturnOffsetsForRepartitionTopicsForPurging() { final TopicPartition repartition = new TopicPartition("repartition", 1); final ProcessorTopology topology = withRepartitionTopics( asList(source1, source2), mkMap(mkEntry(topic1, source1), mkEntry(repartition.topic(), source2)), Collections.singleton(repartition.topic()) ); consumer.assign(asList(partition1, repartition)); task = new StreamTask( taskId00, Utils.mkSet(partition1, repartition), topology, consumer, changelogReader, createConfig(false), streamsMetrics, stateDirectory, null, time, () -> producer = new MockProducer<>(false, bytesSerializer, bytesSerializer)); task.initializeStateStores(); task.initializeTopology(); task.addRecords(partition1, singletonList(getConsumerRecord(partition1, 5L))); task.addRecords(repartition, singletonList(getConsumerRecord(repartition, 10L))); assertTrue(task.process()); assertTrue(task.process()); task.commit(); final Map<TopicPartition, Long> map = task.purgableOffsets(); assertThat(map, equalTo(Collections.singletonMap(repartition, 11L))); } @Test public void shouldThrowOnCleanCloseTaskWhenEosEnabledIfTransactionInFlight() { task = createStatelessTask(createConfig(true)); try { task.close(true, false); fail("should have throw IllegalStateException"); } catch (final IllegalStateException expected) { // pass } task = null; assertTrue(producer.closed()); } @Test public void shouldAlwaysCommitIfEosEnabled() { task = createStatelessTask(createConfig(true)); final RecordCollectorImpl recordCollector = new RecordCollectorImpl("StreamTask", new LogContext("StreamTaskTest "), new DefaultProductionExceptionHandler(), new Metrics().sensor("skipped-records")); recordCollector.init(producer); task.initializeStateStores(); task.initializeTopology(); task.punctuate(processorSystemTime, 5, PunctuationType.WALL_CLOCK_TIME, new Punctuator() { @Override public void punctuate(final long timestamp) { recordCollector.send("result-topic1", 3, 5, null, 0, time.milliseconds(), new IntegerSerializer(), new IntegerSerializer()); } }); task.commit(); assertEquals(1, producer.history().size()); } private StreamTask createStatefulTask(final StreamsConfig config, final boolean logged) { final ProcessorTopology topology = ProcessorTopologyFactories.with( asList(source1, source2), mkMap(mkEntry(topic1, source1), mkEntry(topic2, source2)), singletonList(stateStore), logged ? Collections.singletonMap(storeName, storeName + "-changelog") : Collections.emptyMap()); return new StreamTask( taskId00, partitions, topology, consumer, changelogReader, config, streamsMetrics, stateDirectory, null, time, () -> producer = new MockProducer<>(false, bytesSerializer, bytesSerializer)); } private StreamTask createStatefulTaskThatThrowsExceptionOnClose() { final ProcessorTopology topology = ProcessorTopologyFactories.with( asList(source1, source3), mkMap(mkEntry(topic1, source1), mkEntry(topic2, source3)), singletonList(stateStore), Collections.emptyMap()); return new StreamTask( taskId00, partitions, topology, consumer, changelogReader, createConfig(true), streamsMetrics, stateDirectory, null, time, () -> producer = new MockProducer<>(false, bytesSerializer, bytesSerializer)); } private StreamTask createStatelessTask(final StreamsConfig streamsConfig) { final ProcessorTopology topology = withSources( asList(source1, source2, processorStreamTime, processorSystemTime), mkMap(mkEntry(topic1, source1), mkEntry(topic2, source2)) ); source1.addChild(processorStreamTime); source2.addChild(processorStreamTime); source1.addChild(processorSystemTime); source2.addChild(processorSystemTime); return new StreamTask( taskId00, partitions, topology, consumer, changelogReader, streamsConfig, streamsMetrics, stateDirectory, null, time, () -> producer = new MockProducer<>(false, bytesSerializer, bytesSerializer)); } // this task will throw exception when processing (on partition2), flushing, suspending and closing private StreamTask createTaskThatThrowsException(final boolean enableEos) { final ProcessorTopology topology = withSources( asList(source1, source3, processorStreamTime, processorSystemTime), mkMap(mkEntry(topic1, source1), mkEntry(topic2, source3)) ); source1.addChild(processorStreamTime); source3.addChild(processorStreamTime); source1.addChild(processorSystemTime); source3.addChild(processorSystemTime); return new StreamTask( taskId00, partitions, topology, consumer, changelogReader, createConfig(enableEos), streamsMetrics, stateDirectory, null, time, () -> producer = new MockProducer<>(false, bytesSerializer, bytesSerializer)) { @Override protected void flushState() { throw new RuntimeException("KABOOM!"); } }; } private ConsumerRecord<byte[], byte[]> getConsumerRecord(final TopicPartition topicPartition, final long offset) { return new ConsumerRecord<>( topicPartition.topic(), topicPartition.partition(), offset, offset, // use the offset as the timestamp TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue ); } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hdf.event; import org.apache.poi.hdf.model.util.BTreeSet; import org.apache.poi.hdf.model.util.NumberFormatter; import org.apache.poi.hdf.model.hdftypes.*; import org.apache.poi.util.LittleEndian; import java.util.ArrayList; public final class EventBridge implements HDFLowLevelParsingListener { private static int HEADER_EVEN_INDEX = 0; private static int HEADER_ODD_INDEX = 1; private static int FOOTER_EVEN_INDEX = 2; private static int FOOTER_ODD_INDEX = 3; private static int HEADER_FIRST_INDEX = 4; private static int FOOTER_FIRST_INDEX = 5; /** This class translates low level events into high level events for this * listener */ HDFParsingListener _listener; /** stylesheet for this document */ StyleSheet _stsh; /** name says it all */ DocumentProperties _dop; /** StyleDescription for the current paragraph. */ StyleDescription _currentStd; /** List info for this doc */ ListTables _listTables; /** "WordDocument" from the POIFS */ byte[] _mainDocument; /** Table0 or Table1 from POIFS */ byte[] _tableStream; /** text offset in main stream */ int _fcMin; int _ccpText; int _ccpFtn; int _hdrSize; int _hdrOffset; /** text pieces */ BTreeSet _text = new BTreeSet(); private boolean _beginHeaders; BTreeSet _hdrSections = new BTreeSet(); BTreeSet _hdrParagraphs = new BTreeSet(); BTreeSet _hdrCharacterRuns = new BTreeSet(); int _sectionCounter = 1; ArrayList _hdrs = new ArrayList(); private boolean _holdParagraph = false; private int _endHoldIndex = -1; private ArrayList _onHold; public EventBridge(HDFParsingListener listener) { _listener = listener; } public void mainDocument(byte[] mainDocument) { _mainDocument = mainDocument; } public void tableStream(byte[] tableStream) { _tableStream = tableStream; } public void miscellaneous(int fcMin, int ccpText, int ccpFtn, int fcPlcfhdd, int lcbPlcfhdd) { _fcMin = fcMin; _ccpText = ccpText; _ccpFtn = ccpFtn; _hdrOffset = fcPlcfhdd; _hdrSize = lcbPlcfhdd; } public void document(DocumentProperties dop) { _dop = dop; } public void bodySection(SepxNode sepx) { SectionProperties sep = (SectionProperties)StyleSheet.uncompressProperty(sepx.getSepx(), new SectionProperties(), _stsh); HeaderFooter[] hdrArray = findSectionHdrFtrs(_sectionCounter); _hdrs.add(hdrArray); _listener.section(sep, sepx.getStart() - _fcMin, sepx.getEnd() - _fcMin); _sectionCounter++; } public void hdrSection(SepxNode sepx) { _beginHeaders = true; _hdrSections.add(sepx); } public void endSections() { for (int x = 1; x < _sectionCounter; x++) { HeaderFooter[] hdrArray = (HeaderFooter[])_hdrs.get(x-1); HeaderFooter hf = null; if (!hdrArray[HeaderFooter.HEADER_EVEN - 1].isEmpty()) { hf = hdrArray[HeaderFooter.HEADER_EVEN - 1]; _listener.header(x - 1, HeaderFooter.HEADER_EVEN); flushHeaderProps(hf.getStart(), hf.getEnd()); } if (!hdrArray[HeaderFooter.HEADER_ODD - 1].isEmpty()) { hf = hdrArray[HeaderFooter.HEADER_ODD - 1]; _listener.header(x - 1, HeaderFooter.HEADER_ODD); flushHeaderProps(hf.getStart(), hf.getEnd()); } if (!hdrArray[HeaderFooter.FOOTER_EVEN - 1].isEmpty()) { hf = hdrArray[HeaderFooter.FOOTER_EVEN - 1]; _listener.footer(x - 1, HeaderFooter.FOOTER_EVEN); flushHeaderProps(hf.getStart(), hf.getEnd()); } if (!hdrArray[HeaderFooter.FOOTER_ODD - 1].isEmpty()) { hf = hdrArray[HeaderFooter.FOOTER_EVEN - 1]; _listener.footer(x - 1, HeaderFooter.FOOTER_EVEN); flushHeaderProps(hf.getStart(), hf.getEnd()); } if (!hdrArray[HeaderFooter.HEADER_FIRST - 1].isEmpty()) { hf = hdrArray[HeaderFooter.HEADER_FIRST - 1]; _listener.header(x - 1, HeaderFooter.HEADER_FIRST); flushHeaderProps(hf.getStart(), hf.getEnd()); } if (!hdrArray[HeaderFooter.FOOTER_FIRST - 1].isEmpty()) { hf = hdrArray[HeaderFooter.FOOTER_FIRST - 1]; _listener.footer(x - 1, HeaderFooter.FOOTER_FIRST); flushHeaderProps(hf.getStart(), hf.getEnd()); } } } public void paragraph(PapxNode papx) { if (_beginHeaders) { _hdrParagraphs.add(papx); } byte[] bytePapx = papx.getPapx(); int istd = LittleEndian.getShort(bytePapx, 0); _currentStd = _stsh.getStyleDescription(istd); ParagraphProperties pap = (ParagraphProperties)StyleSheet.uncompressProperty(bytePapx, _currentStd.getPAP(), _stsh); if (pap.getFTtp() > 0) { TableProperties tap = (TableProperties)StyleSheet.uncompressProperty(bytePapx, new TableProperties(), _stsh); _listener.tableRowEnd(tap, papx.getStart() - _fcMin, papx.getEnd() - _fcMin); } else if (pap.getIlfo() > 0) { _holdParagraph = true; _endHoldIndex = papx.getEnd(); _onHold.add(papx); } else { _listener.paragraph(pap, papx.getStart() - _fcMin, papx.getEnd() - _fcMin); } } public void characterRun(ChpxNode chpx) { if (_beginHeaders) { _hdrCharacterRuns.add(chpx); } int start = chpx.getStart(); int end = chpx.getEnd(); //check to see if we should hold this characterRun if (_holdParagraph) { _onHold.add(chpx); if (end >= _endHoldIndex) { _holdParagraph = false; _endHoldIndex = -1; flushHeldParagraph(); _onHold = new ArrayList(); } } byte[] byteChpx = chpx.getChpx(); CharacterProperties chp = (CharacterProperties)StyleSheet.uncompressProperty(byteChpx, _currentStd.getCHP(), _stsh); ArrayList textList = BTreeSet.findProperties(start, end, _text.root); String text = getTextFromNodes(textList, start, end); _listener.characterRun(chp, text, start - _fcMin, end - _fcMin); } public void text(TextPiece t) { _text.add(t); } public void fonts(FontTable fontTbl) { } public void lists(ListTables listTbl) { _listTables = listTbl; } public void styleSheet(StyleSheet stsh) { _stsh = stsh; } private void flushHeaderProps(int start, int end) { ArrayList list = BTreeSet.findProperties(start, end, _hdrSections.root); int size = list.size(); for (int x = 0; x < size; x++) { SepxNode oldNode = (SepxNode)list.get(x); int secStart = Math.max(oldNode.getStart(), start); int secEnd = Math.min(oldNode.getEnd(), end); //SepxNode node = new SepxNode(-1, secStart, secEnd, oldNode.getSepx()); //bodySection(node); ArrayList parList = BTreeSet.findProperties(secStart, secEnd, _hdrParagraphs.root); int parSize = parList.size(); for (int y = 0; y < parSize; y++) { PapxNode oldParNode = (PapxNode)parList.get(y); int parStart = Math.max(oldParNode.getStart(), secStart); int parEnd = Math.min(oldParNode.getEnd(), secEnd); PapxNode parNode = new PapxNode(parStart, parEnd, oldParNode.getPapx()); paragraph(parNode); ArrayList charList = BTreeSet.findProperties(parStart, parEnd, _hdrCharacterRuns.root); int charSize = charList.size(); for (int z = 0; z < charSize; z++) { ChpxNode oldCharNode = (ChpxNode)charList.get(z); int charStart = Math.max(oldCharNode.getStart(), parStart); int charEnd = Math.min(oldCharNode.getEnd(), parEnd); ChpxNode charNode = new ChpxNode(charStart, charEnd, oldCharNode.getChpx()); characterRun(charNode); } } } } private String getTextFromNodes(ArrayList list, int start, int end) { int size = list.size(); StringBuffer sb = new StringBuffer(); for (int x = 0; x < size; x++) { TextPiece piece = (TextPiece)list.get(x); int charStart = Math.max(start, piece.getStart()); int charEnd = Math.min(end, piece.getEnd()); if(piece.usesUnicode()) { for (int y = charStart; y < charEnd; y += 2) { sb.append((char)LittleEndian.getShort(_mainDocument, y)); } } else { for (int y = charStart; y < charEnd; y++) { sb.append(_mainDocument[y]); } } } return sb.toString(); } private void flushHeldParagraph() { PapxNode papx = (PapxNode)_onHold.get(0); byte[] bytePapx = papx.getPapx(); int istd = LittleEndian.getShort(bytePapx, 0); StyleDescription std = _stsh.getStyleDescription(istd); ParagraphProperties pap = (ParagraphProperties)StyleSheet.uncompressProperty(bytePapx, _currentStd.getPAP(), _stsh); LVL lvl = _listTables.getLevel(pap.getIlfo(), pap.getIlvl()); pap = (ParagraphProperties)StyleSheet.uncompressProperty(lvl._papx, pap, _stsh, false); int size = _onHold.size() - 1; CharacterProperties numChp = (CharacterProperties)StyleSheet.uncompressProperty(((ChpxNode)_onHold.get(size)).getChpx(), std.getCHP(), _stsh); numChp = (CharacterProperties)StyleSheet.uncompressProperty(lvl._chpx, numChp, _stsh); String bulletText = getBulletText(lvl, pap); _listener.listEntry(bulletText, numChp, pap, papx.getStart() - _fcMin, papx.getEnd() - _fcMin); for (int x = 1; x <= size; x++) { characterRun((ChpxNode)_onHold.get(x)); } } private String getBulletText(LVL lvl, ParagraphProperties pap) { StringBuffer bulletBuffer = new StringBuffer(); for(int x = 0; x < lvl._xst.length; x++) { if(lvl._xst[x] < 9) { LVL numLevel = _listTables.getLevel(pap.getIlfo(), lvl._xst[x]); int num = numLevel._iStartAt; if(lvl == numLevel) { numLevel._iStartAt++; } else if(num > 1) { num--; } bulletBuffer.append(NumberFormatter.getNumber(num, lvl._nfc)); } else { bulletBuffer.append(lvl._xst[x]); } } switch (lvl._ixchFollow) { case 0: bulletBuffer.append('\u0009'); break; case 1: bulletBuffer.append(' '); break; } return bulletBuffer.toString(); } private HeaderFooter[] findSectionHdrFtrs(int index) { HeaderFooter[] hdrArray = new HeaderFooter[6]; for (int x = 1; x < 7; x++) { hdrArray[x-1] = createSectionHdrFtr(index, x); } return hdrArray; } private HeaderFooter createSectionHdrFtr(int index, int type) { if(_hdrSize < 50) { return new HeaderFooter(0,0,0); } int start = _fcMin + _ccpText + _ccpFtn; int end = start; int arrayIndex = 0; switch(type) { case HeaderFooter.HEADER_EVEN: arrayIndex = (HEADER_EVEN_INDEX + (index * 6)); break; case HeaderFooter.FOOTER_EVEN: arrayIndex = (FOOTER_EVEN_INDEX + (index * 6)); break; case HeaderFooter.HEADER_ODD: arrayIndex = (HEADER_ODD_INDEX + (index * 6)); break; case HeaderFooter.FOOTER_ODD: arrayIndex = (FOOTER_ODD_INDEX + (index * 6)); break; case HeaderFooter.HEADER_FIRST: arrayIndex = (HEADER_FIRST_INDEX + (index * 6)); break; case HeaderFooter.FOOTER_FIRST: arrayIndex = (FOOTER_FIRST_INDEX + (index * 6)); break; } start += LittleEndian.getInt(_tableStream, _hdrOffset + (arrayIndex * 4)); end += LittleEndian.getInt(_tableStream, _hdrOffset + (arrayIndex + 1) * 4); HeaderFooter retValue = new HeaderFooter(type, start, end); if((end - start) == 0 && index > 1) { retValue = createSectionHdrFtr(type, index - 1); } return retValue; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.transport.netty; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.RequestHandlerRegistry; import org.elasticsearch.transport.TransportRequest; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Collection; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; /** * */ @ClusterScope(scope = Scope.TEST, numDataNodes = 1) public class NettyTransportIT extends ESIntegTestCase { // static so we can use it in anonymous classes private static String channelProfileName = null; @Override protected Settings nodeSettings(int nodeOrdinal) { return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) .put("node.mode", "network") .put(NetworkModule.TRANSPORT_TYPE_KEY, "exception-throwing").build(); } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return pluginList(ExceptionThrowingNettyTransport.TestPlugin.class); } public void testThatConnectionFailsAsIntended() throws Exception { Client transportClient = internalCluster().transportClient(); ClusterHealthResponse clusterIndexHealths = transportClient.admin().cluster().prepareHealth().get(); assertThat(clusterIndexHealths.getStatus(), is(ClusterHealthStatus.GREEN)); try { transportClient.admin().cluster().prepareHealth().putHeader("ERROR", "MY MESSAGE").get(); fail("Expected exception, but didnt happen"); } catch (ElasticsearchException e) { assertThat(e.getMessage(), containsString("MY MESSAGE")); assertThat(channelProfileName, is(NettyTransport.DEFAULT_PROFILE)); } } public static final class ExceptionThrowingNettyTransport extends NettyTransport { public static class TestPlugin extends Plugin { @Override public String name() { return "exception-throwing-netty-transport"; } @Override public String description() { return "an exception throwing transport for testing"; } public void onModule(NetworkModule module) { module.registerTransport("exception-throwing", ExceptionThrowingNettyTransport.class); } } @Inject public ExceptionThrowingNettyTransport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, Version version, NamedWriteableRegistry namedWriteableRegistry) { super(settings, threadPool, networkService, bigArrays, version, namedWriteableRegistry); } @Override public ChannelPipelineFactory configureServerChannelPipelineFactory(String name, Settings groupSettings) { return new ErrorPipelineFactory(this, name, groupSettings); } private static class ErrorPipelineFactory extends ServerChannelPipelineFactory { private final ESLogger logger; public ErrorPipelineFactory(ExceptionThrowingNettyTransport exceptionThrowingNettyTransport, String name, Settings groupSettings) { super(exceptionThrowingNettyTransport, name, groupSettings); this.logger = exceptionThrowingNettyTransport.logger; } @Override public ChannelPipeline getPipeline() throws Exception { ChannelPipeline pipeline = super.getPipeline(); pipeline.replace("dispatcher", "dispatcher", new MessageChannelHandler(nettyTransport, logger, NettyTransport.DEFAULT_PROFILE) { @Override protected String handleRequest(Channel channel, StreamInput buffer, long requestId, Version version) throws IOException { final String action = buffer.readString(); final NettyTransportChannel transportChannel = new NettyTransportChannel(transport, transportServiceAdapter, action, channel, requestId, version, name); try { final RequestHandlerRegistry reg = transportServiceAdapter.getRequestHandler(action); if (reg == null) { throw new ActionNotFoundTransportException(action); } final TransportRequest request = reg.newRequest(); request.remoteAddress(new InetSocketTransportAddress((InetSocketAddress) channel.getRemoteAddress())); request.readFrom(buffer); if (request.hasHeader("ERROR")) { throw new ElasticsearchException((String) request.getHeader("ERROR")); } if (reg.getExecutor() == ThreadPool.Names.SAME) { //noinspection unchecked reg.processMessageReceived(request, transportChannel); } else { threadPool.executor(reg.getExecutor()).execute(new RequestHandler(reg, request, transportChannel)); } } catch (Throwable e) { try { transportChannel.sendResponse(e); } catch (IOException e1) { logger.warn("Failed to send error message back to client for action [" + action + "]", e); logger.warn("Actual Exception", e1); } } channelProfileName = transportChannel.getProfileName(); return action; } class RequestHandler extends AbstractRunnable { private final RequestHandlerRegistry reg; private final TransportRequest request; private final NettyTransportChannel transportChannel; public RequestHandler(RequestHandlerRegistry reg, TransportRequest request, NettyTransportChannel transportChannel) { this.reg = reg; this.request = request; this.transportChannel = transportChannel; } @SuppressWarnings({"unchecked"}) @Override protected void doRun() throws Exception { reg.processMessageReceived(request, transportChannel); } @Override public boolean isForceExecution() { return reg.isForceExecution(); } @Override public void onFailure(Throwable e) { if (transport.lifecycleState() == Lifecycle.State.STARTED) { // we can only send a response transport is started.... try { transportChannel.sendResponse(e); } catch (Throwable e1) { logger.warn("Failed to send error message back to client for action [" + reg.getAction() + "]", e1); logger.warn("Actual Exception", e); } } } } }); return pipeline; } } } }
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) 1999-2006 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <p> */ package org.olat.system.commons; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.net.URLEncoder; import java.text.DateFormat; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringEscapeUtils; import org.apache.log4j.Logger; import org.olat.system.exception.AssertException; import org.olat.system.logging.log4j.LoggerHelper; import org.owasp.esapi.ESAPI; /** * enclosing_type Description: <br> * helper class for formating Strings (not locale specific) * * @author Felix Jost */ public class StringHelper { private static final Logger LOG = LoggerHelper.getLogger(); private static final NumberFormat numFormatter; private static final String WHITESPACE_REGEXP = "^\\s*$"; private static final Pattern WHITESPACE_PATTERN = Pattern.compile(WHITESPACE_REGEXP); private static final Pattern p1 = Pattern.compile("\\+"); private static final Pattern p2 = Pattern.compile("%2F"); /** * regex for not allowing <code>;,:</code> <code>ALL_WITHOUT_COMMA_2POINT_STRPNT</code> */ public static final String ALL_WITHOUT_COMMA_2POINT_STRPNT = "^[^,;:]*$"; private static final Pattern ALL_WITHOUT_COMMA_2POINT_STRPNT_PATTERN = Pattern.compile(ALL_WITHOUT_COMMA_2POINT_STRPNT); private static final String X_MAC_ENC = "x-mac-"; private static final String MAC_ENC = "mac"; static { DecimalFormatSymbols dfs = new DecimalFormatSymbols(); dfs.setDecimalSeparator('.'); numFormatter = new DecimalFormat("#.#", dfs); } /** * unused * * @param in * @param delim * @return List */ public static List<String> getParts(String in, String delim) { List<String> li = new ArrayList<String>(); String part; int delimlen = delim.length(); int oldpos = 0; int k; while ((k = in.indexOf(delim, oldpos)) != -1) { part = in.substring(oldpos, k); li.add(part); oldpos = k + delimlen; } if (oldpos != 0) { // min. ein Trennzeichen -> nimm rest part = in.substring(oldpos); li.add(part); } return li; } /** * @param date * @param locale * @return formatted date */ public static String formatLocaleDate(long date, Locale locale) { if (date == -1) return "-"; return DateFormat.getDateInstance(DateFormat.SHORT, locale).format(new Date(date)); } /** * @param date * @param locale * @return formatted date */ public static String formatLocaleDateFull(long date, Locale locale) { if (date == -1) return "-"; return DateFormat.getDateInstance(DateFormat.FULL, locale).format(new Date(date)); } /** * @param date * @param locale * @return formatted date/time */ public static String formatLocaleDateTime(long date, Locale locale) { if (date == -1) return "-"; return DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT, locale).format(new Date(date)); } /** * @param time * @param locale * @return formatted time */ public static String formatLocaleTime(long time, Locale locale) { if (time == -1) return "-"; return DateFormat.getTimeInstance(DateFormat.SHORT, locale).format(new Date(time)); } /** * @param mem * @return formatted memory */ public static String formatMemory(long mem) { long kb = mem / 1024; long mb = kb / 1024; if (mb > 0) return mb + " MB"; else if (kb > 0) return kb + " KB"; else return mem + " B"; } /** * @param f * @param fractionDigits * @return formatted float */ public static String formatFloat(float f, int fractionDigits) { numFormatter.setMaximumFractionDigits(fractionDigits); return numFormatter.format(f); } /** * @param url * @return encoded string */ public static String urlEncodeISO88591(String url) { String part; try { part = URLEncoder.encode(url, "iso-8859-1"); } catch (UnsupportedEncodingException e) { throw new RuntimeException("encoding failed (iso-8859-1) for :" + url); } return part; } /** * @param url * @return encoded string */ public static String urlEncodeUTF8(String url) { String encodedURL; try { encodedURL = URLEncoder.encode(url, "UTF-8"); } catch (UnsupportedEncodingException e) { /* * from java.nio.Charset Standard charsets Every implementation of the Java platform is required to support the following standard charsets... ... UTF-8 * Eight-bit UCS Transformation Format ... */ throw new AssertException("utf-8 encoding is needed for proper encoding, but not offered on this java platform????"); } encodedURL = p1.matcher(encodedURL).replaceAll("%20"); encodedURL = p2.matcher(encodedURL).replaceAll("/"); return encodedURL; } /** * Converts all keys of a hash map to a string array. * * @param m * The (hash) map with the key and values * @return The string array containing all keys for this map */ public static String[] getMapKeysAsStringArray(Map m) { return (String[]) m.keySet().toArray(new String[m.size()]); } /** * Converts all values of a hash map to a string array. * * @param m * The (hash) map with the key and values * @return The string array containing all values for this map */ public static String[] getMapValuesAsStringArray(Map m) { return (String[]) m.values().toArray(new String[m.size()]); } /** * matches any but ^[^,;:]*$ * * @param s * @return true if does not match regexp */ public static boolean containsNoneOfCoDouSemi(String s) { if (s == null) return false; Matcher m = ALL_WITHOUT_COMMA_2POINT_STRPNT_PATTERN.matcher(s); return m.find(); } /** * Checks if a string has anything in it to display. Will return true if the string is not null and does contain at least one none-whitespace character. * * @param s * The String to be evaluated * @return true if the string contains any non-whitespace character, false otherwhise */ public static boolean containsNonWhitespace(String s) { if (s == null) return false; Matcher matcher = WHITESPACE_PATTERN.matcher(s); // if string matches whitespace pattern then string does not // contain non-whitespace return !matcher.find(); } /** * @param cellValue * @return stripped string */ public static String stripLineBreaks(String cellValue) { cellValue = cellValue.replace('\n', ' '); cellValue = cellValue.replace('\r', ' '); return cellValue; } /** * replaces all newlines and carriage returns with <code> <br/></code> <br/> * on windows: \r\n <br/> * on linux: \n <br/> */ public static String convertLineBreaksToHTML(String plainText) { if (plainText != null && plainText.indexOf("\n") != -1) { return plainText.replaceAll("(\r\n|\n)", "<br/>"); } return plainText; } /** * transforms a displayname to a name that causes no problems on the filesystem (e.g. Webclass Energie 2004/2005 -> Webclass_Energie_2004_2005) * * @param s * @return transformed string */ public static String transformDisplayNameToFileSystemName(String s) { s = s.replace('?', '_'); s = s.replace('/', '_'); s = s.replace(' ', '_'); return s; } /** * @param extractedCharset * @return */ public static String check4xMacRoman(String extractedCharset) { // OLAT-1844 // TODO:pb: why do http encoding names not match java encoding names? // the encoding name 'x-mac-roman' must be translated to javas 'x-MacRoman' // but it must be x-mac-roman for browser and htmleditor.. weird naming problem. if (extractedCharset == null) return null; if (extractedCharset.toLowerCase().startsWith(X_MAC_ENC)) { String tmp = extractedCharset.substring(6); String first = tmp.substring(0, 1); tmp = tmp.substring(1); // e.g. convert 'x-mac-roman' to 'x-MacRoman' extractedCharset = "x-Mac" + first.toUpperCase() + tmp; return extractedCharset; } else if (extractedCharset.toLowerCase().startsWith(MAC_ENC)) { // word for macintosh creates charset=macintosh which java does not know, load with iso-8859-1 return "iso-8859-1"; } return extractedCharset; } /** * set of strings to one string comma separated.<br> * e.g. ["a","b","c","s"] -> "a,b,c,s" * * @param selection * @return */ public static String formatAsCSVString(Set<String> entries) { boolean isFirst = true; String csvStr = null; for (Iterator<String> iter = entries.iterator(); iter.hasNext();) { String group = iter.next(); if (isFirst) { csvStr = group; isFirst = false; } else { csvStr += ", " + group; } } return csvStr; } /** * list of strings to one string comma separated.<br> * e.g. ["a","b","c","s"] -> "a,b,c,s" * * @param selection * @return */ public static String formatAsCSVString(List<String> entries) { boolean isFirst = true; String csvStr = null; for (String entry : entries) { if (isFirst) { csvStr = entry; isFirst = false; } else { csvStr += ", " + entry; } } return csvStr; } /** * list of strings to one string comma separated.<br> * e.g. ["z","a","b","c","s","a"] -> "a, b, c, s, z" No duplicates, alphabetically sorted * * @param selection * @return */ public static String formatAsSortUniqCSVString(List<String> s) { Map<String, String> u = new HashMap<String, String>(); for (Iterator<String> si = s.iterator(); si.hasNext();) { u.put(si.next().trim(), null); } List<String> rv = new ArrayList<String>(); rv.addAll(u.keySet()); rv.remove(""); Collections.sort(rv); return formatAsCSVString(rv); } /** * list of strings to one string comma separated.<br> * e.g. ["z","a","b","c","s","a"] -> "a, b, c, s, z" No duplicates, alphabetically sorted * * @param selection * @return */ public static String formatAsSortUniqCSVString(Set<String> s) { Map<String, String> u = new HashMap<String, String>(); for (Iterator<String> si = s.iterator(); si.hasNext();) { u.put(si.next().trim(), null); } List<String> rv = new ArrayList<String>(); rv.addAll(u.keySet()); rv.remove(""); Collections.sort(rv); return formatAsCSVString(rv); } /** * Wraps whatever external library we are using for "output escaping" <br> * * @param str * the String to escape, may be null * @return a new escaped String, null if null string input */ public static final String escapeHtml(String str) { return StringEscapeUtils.escapeHtml(str); } public static final void escapeHtml(Writer writer, String str) { try { StringEscapeUtils.escapeHtml(writer, str); } catch (IOException e) { LOG.error("Error escaping HTML", e); } } public static final String escapeJavaScript(String str) { return ESAPI.encoder().encodeForJavaScript(str); // return StringEscapeUtils.escapeJavaScript(str); } public static final String escapeHtmlAttribute(String str) { return ESAPI.encoder().encodeForHTMLAttribute(str); } public static final String escapeXml(String str) { return StringEscapeUtils.escapeXml(str); } public static final String unescapeHtml(String str) { return StringEscapeUtils.unescapeHtml(str); } }
// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium; import org.apache.cordova.CordovaArgs; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaResourceApi; import org.apache.cordova.PluginResult; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.app.Notification; import android.app.PendingIntent; import android.app.NotificationManager; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.support.v4.app.NotificationCompat; import android.support.v4.content.IntentCompat; import android.text.Html; import android.util.Log; import java.io.InputStream; import java.util.List; import java.util.ArrayList; public class ChromeNotifications extends CordovaPlugin { private static final String LOG_TAG = "ChromeNotifications"; private static final String INTENT_PREFIX = "ChromeNotifications."; private static final String MAIN_ACTIVITY_LABEL = INTENT_PREFIX + "MainActivity"; private static final String NOTIFICATION_CLICKED_ACTION = INTENT_PREFIX + "Click"; private static final String NOTIFICATION_CLOSED_ACTION = INTENT_PREFIX + "Close"; private static final String NOTIFICATION_BUTTON_CLICKED_ACTION = INTENT_PREFIX + "ButtonClick"; private static ChromeNotifications pluginInstance; private static List<EventInfo> pendingEvents = new ArrayList<EventInfo>(); private NotificationManager notificationManager; private CallbackContext messageChannel; private static class EventInfo { public String action; public String notificationId; public int buttonIndex; public EventInfo(String action, String notificationId, int buttonIndex) { this.action = action; this.notificationId = notificationId; this.buttonIndex = buttonIndex; } } public static void handleNotificationAction(Context context, Intent intent) { String[] strings = intent.getAction().split("\\|", 3); int buttonIndex = strings.length >= 3 ? Integer.parseInt(strings[2]) : -1; if (pluginInstance != null && pluginInstance.messageChannel != null) { Log.w(LOG_TAG, "Firing notification to already running webview"); pluginInstance.sendNotificationMessage(strings[0], strings[1], buttonIndex); } else { pendingEvents.add(new EventInfo(strings[0], strings[1], buttonIndex)); if (pluginInstance == null) { Intent activityIntent = IntentCompat.makeMainActivity((ComponentName)intent.getParcelableExtra(MAIN_ACTIVITY_LABEL)); activityIntent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_FROM_BACKGROUND); activityIntent.putExtra(MAIN_ACTIVITY_LABEL, MAIN_ACTIVITY_LABEL); context.startActivity(activityIntent); } } } @Override public void pluginInitialize() { if (pluginInstance == null && cordova.getActivity().getIntent().hasExtra(MAIN_ACTIVITY_LABEL)) { cordova.getActivity().moveTaskToBack(true); } pluginInstance = this; notificationManager = (NotificationManager) cordova.getActivity().getSystemService(Context.NOTIFICATION_SERVICE); } @Override public void onReset() { messageChannel = null; } @Override public void onDestroy() { messageChannel = null; } @Override public boolean execute(String action, CordovaArgs args, final CallbackContext callbackContext) throws JSONException { if ("create".equals(action)) { create(args, callbackContext); return true; } else if ("update".equals(action)) { update(args, callbackContext); return true; } else if ("clear".equals(action)) { clear(args, callbackContext); return true; } else if ("messageChannel".equals(action)) { messageChannel = callbackContext; for (EventInfo event : pendingEvents) { sendNotificationMessage(event.action, event.notificationId, event.buttonIndex); } pendingEvents.clear(); return true; } return false; } private void sendNotificationMessage(String action, String notificationId, int buttonIndex) { JSONObject obj = new JSONObject(); try { obj.put("action", action.substring(INTENT_PREFIX.length())); obj.put("id", notificationId); if (NOTIFICATION_BUTTON_CLICKED_ACTION.equals(action)) { obj.put("buttonIndex", buttonIndex); } } catch (JSONException e) { } PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, obj); pluginResult.setKeepCallback(true); messageChannel.sendPluginResult(pluginResult); } private boolean doesNotificationExist(String notificationId) { return makePendingIntent(NOTIFICATION_CLICKED_ACTION, notificationId, -1, PendingIntent.FLAG_NO_CREATE) != null; } private Bitmap makeBitmap(String imageUrl, int scaledWidth, int scaledHeight) { InputStream largeIconStream; try { Uri uri = Uri.parse(imageUrl); CordovaResourceApi resourceApi = webView.getResourceApi(); uri = resourceApi.remapUri(uri); largeIconStream = resourceApi.openForRead(uri).inputStream; } catch (Exception e) { Log.e(LOG_TAG, "Failed to open image file " + imageUrl + ": " + e); return null; } Bitmap unscaledBitmap = BitmapFactory.decodeStream(largeIconStream); try { largeIconStream.close(); } catch (Exception e) { Log.e(LOG_TAG, "Failed to close image file"); } if (scaledWidth != 0 && scaledHeight != 0) { return Bitmap.createScaledBitmap(unscaledBitmap, scaledWidth, scaledHeight, false); } else { return unscaledBitmap; } } public PendingIntent makePendingIntent(String action, String notificationId, int buttonIndex, int flags) { Intent intent = new Intent(cordova.getActivity(), ChromeNotificationsReceiver.class); String fullAction = action + "|" + notificationId; if (buttonIndex >= 0) { fullAction += "|" + buttonIndex; } intent.setAction(fullAction); intent.putExtra(MAIN_ACTIVITY_LABEL, cordova.getActivity().getIntent().getComponent()); return PendingIntent.getBroadcast(cordova.getActivity(), 0, intent, flags); } private void makeNotification(final CordovaArgs args) throws JSONException { String notificationId = args.getString(0); JSONObject options = args.getJSONObject(1); Resources resources = cordova.getActivity().getResources(); Bitmap largeIcon = makeBitmap(options.getString("iconUrl"), resources.getDimensionPixelSize(android.R.dimen.notification_large_icon_width), resources.getDimensionPixelSize(android.R.dimen.notification_large_icon_height)); int smallIconId = resources.getIdentifier("notification_icon", "drawable", cordova.getActivity().getPackageName()); if (smallIconId == 0) { smallIconId = resources.getIdentifier("icon", "drawable", cordova.getActivity().getPackageName()); } NotificationCompat.Builder builder = new NotificationCompat.Builder(cordova.getActivity()) .setSmallIcon(smallIconId) .setContentTitle(options.getString("title")) .setContentText(options.getString("message")) .setLargeIcon(largeIcon) .setPriority(options.optInt("priority")) .setContentIntent(makePendingIntent(NOTIFICATION_CLICKED_ACTION, notificationId, -1, PendingIntent.FLAG_CANCEL_CURRENT)) .setDeleteIntent(makePendingIntent(NOTIFICATION_CLOSED_ACTION, notificationId, -1, PendingIntent.FLAG_CANCEL_CURRENT)); double eventTime = options.optDouble("eventTime"); if (eventTime != 0) { builder.setWhen(Math.round(eventTime)); } JSONArray buttons = options.optJSONArray("buttons"); if (buttons != null) { for (int i = 0; i < buttons.length(); i++) { JSONObject button = buttons.getJSONObject(i); builder.addAction(android.R.drawable.ic_dialog_info, button.getString("title"), makePendingIntent(NOTIFICATION_BUTTON_CLICKED_ACTION, notificationId, i, PendingIntent.FLAG_CANCEL_CURRENT)); } } String type = options.getString("type"); Notification notification; if ("image".equals(type)) { NotificationCompat.BigPictureStyle bigPictureStyle = new NotificationCompat.BigPictureStyle(builder); String bigImageUrl = options.optString("imageUrl"); if (!bigImageUrl.isEmpty()) { bigPictureStyle.bigPicture(makeBitmap(bigImageUrl, 0, 0)); } notification = bigPictureStyle.build(); } else if ("list".equals(type)) { NotificationCompat.InboxStyle inboxStyle = new NotificationCompat.InboxStyle(builder); JSONArray items = options.optJSONArray("items"); if (items != null) { for (int i = 0; i < items.length(); i++) { JSONObject item = items.getJSONObject(i); inboxStyle.addLine(Html.fromHtml("<b>" + item.getString("title") + "</b>&nbsp;&nbsp;&nbsp;&nbsp;" + item.getString("message"))); } } notification = inboxStyle.build(); } else { if ("progress".equals(type)) { int progress = options.optInt("progress"); builder.setProgress(100, progress, false); } NotificationCompat.BigTextStyle bigTextStyle = new NotificationCompat.BigTextStyle(builder); bigTextStyle.bigText(options.getString("message")); notification = bigTextStyle.build(); } notificationManager.notify(notificationId.hashCode(), notification); } private void create(final CordovaArgs args, final CallbackContext callbackContext) { cordova.getThreadPool().execute(new Runnable() { @Override public void run() { try { makeNotification(args); callbackContext.success(); } catch (Exception e) { Log.e(LOG_TAG, "Could not create notification", e); callbackContext.error("Could not create notification"); } } }); } private void update(final CordovaArgs args, final CallbackContext callbackContext) { cordova.getThreadPool().execute(new Runnable() { @Override public void run() { try { if (doesNotificationExist(args.getString(0))) { makeNotification(args); callbackContext.success(1); } else { callbackContext.success(0); } } catch (Exception e) { Log.e(LOG_TAG, "Could not update notification", e); callbackContext.error("Could not update notification"); } } }); } private void clear(final CordovaArgs args, final CallbackContext callbackContext) { try { String notificationId = args.getString(0); PendingIntent pendingIntent = makePendingIntent(NOTIFICATION_CLICKED_ACTION, notificationId, -1, PendingIntent.FLAG_NO_CREATE); if (pendingIntent != null) { Log.w(LOG_TAG, "Cancel notification: " + notificationId); notificationManager.cancel(notificationId.hashCode()); pendingIntent.cancel(); callbackContext.success(1); } else { Log.w(LOG_TAG, "Cancel notification does not exist: " + notificationId); callbackContext.success(0); } } catch (Exception e) { Log.e(LOG_TAG, "Could not clear notification", e); callbackContext.error("Could not clear notification"); } } }
/* * Copyright 2005 Wavechain Consulting LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.firstopen.singularity.business.inventory; import java.io.StringReader; import javax.ejb.MessageDrivenContext; import javax.jms.Message; import javax.jms.TextMessage; import javax.naming.InitialContext; import javax.naming.NamingException; import org.apache.log4j.Logger; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; public class InventorySynchBean implements javax.ejb.MessageDrivenBean, javax.jms.MessageListener, org.xml.sax.ContentHandler { /** * */ private static final long serialVersionUID = -3340823785078394144L; MessageDrivenContext ejbContext = null; InitialContext jndiContext = null; StringBuffer charArray = new StringBuffer(); int length = 0; Logger log = null; public static final String COUNT_REPORT = "countReport"; public static final String COUNT = "count"; public static final String STOCK_ID = "epc"; public static final String DELIMITER = "urn:epc:tag:gid-64-i:"; public static final String DELETION = "DELETION"; InventorySLSBRemote iSLSB = null; String locCode = null; String reportName = null; Double additionDouble = new Double(1.0); Double deletionDouble = new Double(-1.0); public void ejbCreate() { } public void setMessageDrivenContext(MessageDrivenContext ctx) { try { log = Logger.getLogger(getClass()); log.debug("setMessageDrivenContext()"); jndiContext = new InitialContext(); InventorySLSBHome iSLSBHome = (InventorySLSBHome) jndiContext .lookup("ejb/inventory/InventorySLSB"); iSLSB = iSLSBHome.create(); } catch (Exception x) { log.debug("setMessageDrivenContext() Exception = " + x); } ejbContext = ctx; } public void onMessage(Message message) { log.debug("onMessage()"); InputSource inSource = null; StringReader stringReader = null; try { /* * if(System.getSecurityManager() == null) { log.debug("onMessage() * Instantiating new RMISecurityManager"); * System.setSecurityManager(new RMISecurityManager()); } */ if (message instanceof TextMessage) { stringReader = new StringReader(((TextMessage) message) .getText()); inSource = new InputSource(stringReader); XMLReader xmlReaderObj = XMLReaderFactory .createXMLReader("org.apache.xerces.parsers.SAXParser"); xmlReaderObj.setContentHandler(this); xmlReaderObj.setFeature( "http://xml.org/sax/features/namespaces", true); xmlReaderObj .setFeature( "http://xml.org/sax/features/namespace-prefixes", false); xmlReaderObj.setFeature( "http://apache.org/xml/features/validation/schema", false); log.debug("onMessage() About to parse"); xmlReaderObj.parse(inSource); } else { message.clearBody(); } } catch (Exception x) { x.printStackTrace(); log.debug("onMessage(): Exception creating thread! Exception = " + x); } finally { try { if (stringReader != null) stringReader.close(); } catch (Exception x) { log.debug("could not close a stream"); } } } public void ejbRemove() { log.debug("ejbRemove()"); try { jndiContext.close(); ejbContext = null; } catch (NamingException x) { log.debug("ejbRemove() NamingException = " + x); } } public void startDocument() { log.debug("startDocument"); } public void startPrefixMapping(java.lang.String prefix, String uri) { log.debug("startPrefixMapping() "); } public void skippedEntity(java.lang.String name) { log.debug("skippedEntity() id = " + name); } public void setDocumentLocator(org.xml.sax.Locator locator) { log.debug("setDocumentLocator() locator = " + locator); } public void endPrefixMapping(java.lang.String prefix) { log.debug("endPrefixMapping() prefix = " + prefix); } public void processingInstruction(java.lang.String target, java.lang.String data) { log.debug("processingInstruction() "); } public void startElement(String uri, String localName, String qName, Attributes attrs) { // log.debug("startElement: localName = " +localName); if (localName.equals("listReport")) { // log.debug("startElement: attributes = "+attrs.getLength()); log.debug("startElement: attribute value = " + attrs.getValue(0)); if (attrs.getValue(0) != null) reportName = attrs.getValue(0); } } public void ignorableWhitespace(char buf[], int offset, int len) { log.debug("ignorableWhitespace: offset = " + offset + "; length = " + len); } public void characters(char buf[], int offset, int len) { /* * While parsing a java.io.Reader (as opposed to a java.io.InputStream), * SAX parser calls this method when it encounters the '\n' character * ... no good for me 'cause in doing so, my charArray StringBuffer will * be over-written and populated with a '\n'. */ if (buf[offset] == '\n') return; charArray.append(buf, offset, len); length = len; } public void endElement(String nameSpaceURI, String localName, String qName) { try { // log.debug("endElement: localName = "+localName+" value = // "+charArray.toString()); if (localName.equals(COUNT)) log.debug("Count = " + charArray.toString()); else if (localName.equals(STOCK_ID)) { String stockId = charArray.toString(); log.debug("stockId = " + stockId); locCode = "AN1"; int start = stockId.indexOf(DELIMITER) + 21; if (reportName.equals(DELETION)) iSLSB.updateInventory(locCode, stockId.substring(start), deletionDouble, new Integer(2)); else iSLSB.updateInventory(locCode, stockId.substring(start), additionDouble, new Integer(2)); } } catch (Exception x) { log.debug("endElement() Exception = " + x); } finally { int l = charArray.length(); charArray.delete(0, l); } } public void endDocument() { log.debug("endDocument()"); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.machinelearning.model; import java.io.Serializable; /** * <p> * Describes the data specification of a <code>DataSource</code>. * </p> */ public class S3DataSpec implements Serializable, Cloneable { /** * <p> * The location of the data file(s) used by a <code>DataSource</code>. The * URI specifies a data file or an Amazon Simple Storage Service (Amazon S3) * directory or bucket containing data files. * </p> */ private String dataLocationS3; /** * <p> * Describes the splitting requirement of a <code>Datasource</code>. * </p> */ private String dataRearrangement; /** * <p> * A JSON string that represents the schema for an Amazon S3 * <code>DataSource</code>. The <code>DataSchema</code> defines the * structure of the observation data in the data file(s) referenced in the * <code>DataSource</code>. * </p> * <p> * Define your <code>DataSchema</code> as a series of key-value pairs. * <code>attributes</code> and <code>excludedVariableNames</code> have an * array of key-value pairs for their value. Use the following format to * define your <code>DataSchema</code>. * </p> * <p> * { "version": "1.0", * </p> * <p> * "recordAnnotationFieldName": "F1", * </p> * <p> * "recordWeightFieldName": "F2", * </p> * <p> * "targetFieldName": "F3", * </p> * <p> * "dataFormat": "CSV", * </p> * <p> * "dataFileContainsHeader": true, * </p> * <p> * "attributes": [ * </p> * <p> * { "fieldName": "F1", "fieldType": "TEXT" }, { "fieldName": "F2", * "fieldType": "NUMERIC" }, { "fieldName": "F3", "fieldType": "CATEGORICAL" * }, { "fieldName": "F4", "fieldType": "NUMERIC" }, { "fieldName": "F5", * "fieldType": "CATEGORICAL" }, { "fieldName": "F6", "fieldType": "TEXT" }, * { "fieldName": "F7", "fieldType": "WEIGHTED_INT_SEQUENCE" }, { * "fieldName": "F8", "fieldType": "WEIGHTED_STRING_SEQUENCE" } ], * </p> * <p> * "excludedVariableNames": [ "F6" ] } * </p> * <?oxy_insert_end> */ private String dataSchema; /** * <p> * Describes the schema Location in Amazon S3. * </p> */ private String dataSchemaLocationS3; /** * <p> * The location of the data file(s) used by a <code>DataSource</code>. The * URI specifies a data file or an Amazon Simple Storage Service (Amazon S3) * directory or bucket containing data files. * </p> * * @param dataLocationS3 * The location of the data file(s) used by a <code>DataSource</code> * . The URI specifies a data file or an Amazon Simple Storage * Service (Amazon S3) directory or bucket containing data files. */ public void setDataLocationS3(String dataLocationS3) { this.dataLocationS3 = dataLocationS3; } /** * <p> * The location of the data file(s) used by a <code>DataSource</code>. The * URI specifies a data file or an Amazon Simple Storage Service (Amazon S3) * directory or bucket containing data files. * </p> * * @return The location of the data file(s) used by a * <code>DataSource</code>. The URI specifies a data file or an * Amazon Simple Storage Service (Amazon S3) directory or bucket * containing data files. */ public String getDataLocationS3() { return this.dataLocationS3; } /** * <p> * The location of the data file(s) used by a <code>DataSource</code>. The * URI specifies a data file or an Amazon Simple Storage Service (Amazon S3) * directory or bucket containing data files. * </p> * * @param dataLocationS3 * The location of the data file(s) used by a <code>DataSource</code> * . The URI specifies a data file or an Amazon Simple Storage * Service (Amazon S3) directory or bucket containing data files. * @return Returns a reference to this object so that method calls can be * chained together. */ public S3DataSpec withDataLocationS3(String dataLocationS3) { setDataLocationS3(dataLocationS3); return this; } /** * <p> * Describes the splitting requirement of a <code>Datasource</code>. * </p> * * @param dataRearrangement * Describes the splitting requirement of a <code>Datasource</code>. */ public void setDataRearrangement(String dataRearrangement) { this.dataRearrangement = dataRearrangement; } /** * <p> * Describes the splitting requirement of a <code>Datasource</code>. * </p> * * @return Describes the splitting requirement of a <code>Datasource</code>. */ public String getDataRearrangement() { return this.dataRearrangement; } /** * <p> * Describes the splitting requirement of a <code>Datasource</code>. * </p> * * @param dataRearrangement * Describes the splitting requirement of a <code>Datasource</code>. * @return Returns a reference to this object so that method calls can be * chained together. */ public S3DataSpec withDataRearrangement(String dataRearrangement) { setDataRearrangement(dataRearrangement); return this; } /** * <p> * A JSON string that represents the schema for an Amazon S3 * <code>DataSource</code>. The <code>DataSchema</code> defines the * structure of the observation data in the data file(s) referenced in the * <code>DataSource</code>. * </p> * <p> * Define your <code>DataSchema</code> as a series of key-value pairs. * <code>attributes</code> and <code>excludedVariableNames</code> have an * array of key-value pairs for their value. Use the following format to * define your <code>DataSchema</code>. * </p> * <p> * { "version": "1.0", * </p> * <p> * "recordAnnotationFieldName": "F1", * </p> * <p> * "recordWeightFieldName": "F2", * </p> * <p> * "targetFieldName": "F3", * </p> * <p> * "dataFormat": "CSV", * </p> * <p> * "dataFileContainsHeader": true, * </p> * <p> * "attributes": [ * </p> * <p> * { "fieldName": "F1", "fieldType": "TEXT" }, { "fieldName": "F2", * "fieldType": "NUMERIC" }, { "fieldName": "F3", "fieldType": "CATEGORICAL" * }, { "fieldName": "F4", "fieldType": "NUMERIC" }, { "fieldName": "F5", * "fieldType": "CATEGORICAL" }, { "fieldName": "F6", "fieldType": "TEXT" }, * { "fieldName": "F7", "fieldType": "WEIGHTED_INT_SEQUENCE" }, { * "fieldName": "F8", "fieldType": "WEIGHTED_STRING_SEQUENCE" } ], * </p> * <p> * "excludedVariableNames": [ "F6" ] } * </p> * <?oxy_insert_end> * * @param dataSchema * A JSON string that represents the schema for an Amazon S3 * <code>DataSource</code>. The <code>DataSchema</code> defines the * structure of the observation data in the data file(s) referenced * in the <code>DataSource</code>.</p> * <p> * Define your <code>DataSchema</code> as a series of key-value * pairs. <code>attributes</code> and * <code>excludedVariableNames</code> have an array of key-value * pairs for their value. Use the following format to define your * <code>DataSchema</code>. * </p> * <p> * { "version": "1.0", * </p> * <p> * "recordAnnotationFieldName": "F1", * </p> * <p> * "recordWeightFieldName": "F2", * </p> * <p> * "targetFieldName": "F3", * </p> * <p> * "dataFormat": "CSV", * </p> * <p> * "dataFileContainsHeader": true, * </p> * <p> * "attributes": [ * </p> * <p> * { "fieldName": "F1", "fieldType": "TEXT" }, { "fieldName": "F2", * "fieldType": "NUMERIC" }, { "fieldName": "F3", "fieldType": * "CATEGORICAL" }, { "fieldName": "F4", "fieldType": "NUMERIC" }, { * "fieldName": "F5", "fieldType": "CATEGORICAL" }, { "fieldName": * "F6", "fieldType": "TEXT" }, { "fieldName": "F7", "fieldType": * "WEIGHTED_INT_SEQUENCE" }, { "fieldName": "F8", "fieldType": * "WEIGHTED_STRING_SEQUENCE" } ], * </p> * <p> * "excludedVariableNames": [ "F6" ] } * </p> */ public void setDataSchema(String dataSchema) { this.dataSchema = dataSchema; } /** * <p> * A JSON string that represents the schema for an Amazon S3 * <code>DataSource</code>. The <code>DataSchema</code> defines the * structure of the observation data in the data file(s) referenced in the * <code>DataSource</code>. * </p> * <p> * Define your <code>DataSchema</code> as a series of key-value pairs. * <code>attributes</code> and <code>excludedVariableNames</code> have an * array of key-value pairs for their value. Use the following format to * define your <code>DataSchema</code>. * </p> * <p> * { "version": "1.0", * </p> * <p> * "recordAnnotationFieldName": "F1", * </p> * <p> * "recordWeightFieldName": "F2", * </p> * <p> * "targetFieldName": "F3", * </p> * <p> * "dataFormat": "CSV", * </p> * <p> * "dataFileContainsHeader": true, * </p> * <p> * "attributes": [ * </p> * <p> * { "fieldName": "F1", "fieldType": "TEXT" }, { "fieldName": "F2", * "fieldType": "NUMERIC" }, { "fieldName": "F3", "fieldType": "CATEGORICAL" * }, { "fieldName": "F4", "fieldType": "NUMERIC" }, { "fieldName": "F5", * "fieldType": "CATEGORICAL" }, { "fieldName": "F6", "fieldType": "TEXT" }, * { "fieldName": "F7", "fieldType": "WEIGHTED_INT_SEQUENCE" }, { * "fieldName": "F8", "fieldType": "WEIGHTED_STRING_SEQUENCE" } ], * </p> * <p> * "excludedVariableNames": [ "F6" ] } * </p> * <?oxy_insert_end> * * @return A JSON string that represents the schema for an Amazon S3 * <code>DataSource</code>. The <code>DataSchema</code> defines the * structure of the observation data in the data file(s) referenced * in the <code>DataSource</code>.</p> * <p> * Define your <code>DataSchema</code> as a series of key-value * pairs. <code>attributes</code> and * <code>excludedVariableNames</code> have an array of key-value * pairs for their value. Use the following format to define your * <code>DataSchema</code>. * </p> * <p> * { "version": "1.0", * </p> * <p> * "recordAnnotationFieldName": "F1", * </p> * <p> * "recordWeightFieldName": "F2", * </p> * <p> * "targetFieldName": "F3", * </p> * <p> * "dataFormat": "CSV", * </p> * <p> * "dataFileContainsHeader": true, * </p> * <p> * "attributes": [ * </p> * <p> * { "fieldName": "F1", "fieldType": "TEXT" }, { "fieldName": "F2", * "fieldType": "NUMERIC" }, { "fieldName": "F3", "fieldType": * "CATEGORICAL" }, { "fieldName": "F4", "fieldType": "NUMERIC" }, { * "fieldName": "F5", "fieldType": "CATEGORICAL" }, { "fieldName": * "F6", "fieldType": "TEXT" }, { "fieldName": "F7", "fieldType": * "WEIGHTED_INT_SEQUENCE" }, { "fieldName": "F8", "fieldType": * "WEIGHTED_STRING_SEQUENCE" } ], * </p> * <p> * "excludedVariableNames": [ "F6" ] } * </p> */ public String getDataSchema() { return this.dataSchema; } /** * <p> * A JSON string that represents the schema for an Amazon S3 * <code>DataSource</code>. The <code>DataSchema</code> defines the * structure of the observation data in the data file(s) referenced in the * <code>DataSource</code>. * </p> * <p> * Define your <code>DataSchema</code> as a series of key-value pairs. * <code>attributes</code> and <code>excludedVariableNames</code> have an * array of key-value pairs for their value. Use the following format to * define your <code>DataSchema</code>. * </p> * <p> * { "version": "1.0", * </p> * <p> * "recordAnnotationFieldName": "F1", * </p> * <p> * "recordWeightFieldName": "F2", * </p> * <p> * "targetFieldName": "F3", * </p> * <p> * "dataFormat": "CSV", * </p> * <p> * "dataFileContainsHeader": true, * </p> * <p> * "attributes": [ * </p> * <p> * { "fieldName": "F1", "fieldType": "TEXT" }, { "fieldName": "F2", * "fieldType": "NUMERIC" }, { "fieldName": "F3", "fieldType": "CATEGORICAL" * }, { "fieldName": "F4", "fieldType": "NUMERIC" }, { "fieldName": "F5", * "fieldType": "CATEGORICAL" }, { "fieldName": "F6", "fieldType": "TEXT" }, * { "fieldName": "F7", "fieldType": "WEIGHTED_INT_SEQUENCE" }, { * "fieldName": "F8", "fieldType": "WEIGHTED_STRING_SEQUENCE" } ], * </p> * <p> * "excludedVariableNames": [ "F6" ] } * </p> * <?oxy_insert_end> * * @param dataSchema * A JSON string that represents the schema for an Amazon S3 * <code>DataSource</code>. The <code>DataSchema</code> defines the * structure of the observation data in the data file(s) referenced * in the <code>DataSource</code>.</p> * <p> * Define your <code>DataSchema</code> as a series of key-value * pairs. <code>attributes</code> and * <code>excludedVariableNames</code> have an array of key-value * pairs for their value. Use the following format to define your * <code>DataSchema</code>. * </p> * <p> * { "version": "1.0", * </p> * <p> * "recordAnnotationFieldName": "F1", * </p> * <p> * "recordWeightFieldName": "F2", * </p> * <p> * "targetFieldName": "F3", * </p> * <p> * "dataFormat": "CSV", * </p> * <p> * "dataFileContainsHeader": true, * </p> * <p> * "attributes": [ * </p> * <p> * { "fieldName": "F1", "fieldType": "TEXT" }, { "fieldName": "F2", * "fieldType": "NUMERIC" }, { "fieldName": "F3", "fieldType": * "CATEGORICAL" }, { "fieldName": "F4", "fieldType": "NUMERIC" }, { * "fieldName": "F5", "fieldType": "CATEGORICAL" }, { "fieldName": * "F6", "fieldType": "TEXT" }, { "fieldName": "F7", "fieldType": * "WEIGHTED_INT_SEQUENCE" }, { "fieldName": "F8", "fieldType": * "WEIGHTED_STRING_SEQUENCE" } ], * </p> * <p> * "excludedVariableNames": [ "F6" ] } * </p> * @return Returns a reference to this object so that method calls can be * chained together. */ public S3DataSpec withDataSchema(String dataSchema) { setDataSchema(dataSchema); return this; } /** * <p> * Describes the schema Location in Amazon S3. * </p> * * @param dataSchemaLocationS3 * Describes the schema Location in Amazon S3. */ public void setDataSchemaLocationS3(String dataSchemaLocationS3) { this.dataSchemaLocationS3 = dataSchemaLocationS3; } /** * <p> * Describes the schema Location in Amazon S3. * </p> * * @return Describes the schema Location in Amazon S3. */ public String getDataSchemaLocationS3() { return this.dataSchemaLocationS3; } /** * <p> * Describes the schema Location in Amazon S3. * </p> * * @param dataSchemaLocationS3 * Describes the schema Location in Amazon S3. * @return Returns a reference to this object so that method calls can be * chained together. */ public S3DataSpec withDataSchemaLocationS3(String dataSchemaLocationS3) { setDataSchemaLocationS3(dataSchemaLocationS3); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDataLocationS3() != null) sb.append("DataLocationS3: " + getDataLocationS3() + ","); if (getDataRearrangement() != null) sb.append("DataRearrangement: " + getDataRearrangement() + ","); if (getDataSchema() != null) sb.append("DataSchema: " + getDataSchema() + ","); if (getDataSchemaLocationS3() != null) sb.append("DataSchemaLocationS3: " + getDataSchemaLocationS3()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof S3DataSpec == false) return false; S3DataSpec other = (S3DataSpec) obj; if (other.getDataLocationS3() == null ^ this.getDataLocationS3() == null) return false; if (other.getDataLocationS3() != null && other.getDataLocationS3().equals(this.getDataLocationS3()) == false) return false; if (other.getDataRearrangement() == null ^ this.getDataRearrangement() == null) return false; if (other.getDataRearrangement() != null && other.getDataRearrangement().equals( this.getDataRearrangement()) == false) return false; if (other.getDataSchema() == null ^ this.getDataSchema() == null) return false; if (other.getDataSchema() != null && other.getDataSchema().equals(this.getDataSchema()) == false) return false; if (other.getDataSchemaLocationS3() == null ^ this.getDataSchemaLocationS3() == null) return false; if (other.getDataSchemaLocationS3() != null && other.getDataSchemaLocationS3().equals( this.getDataSchemaLocationS3()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDataLocationS3() == null) ? 0 : getDataLocationS3() .hashCode()); hashCode = prime * hashCode + ((getDataRearrangement() == null) ? 0 : getDataRearrangement().hashCode()); hashCode = prime * hashCode + ((getDataSchema() == null) ? 0 : getDataSchema().hashCode()); hashCode = prime * hashCode + ((getDataSchemaLocationS3() == null) ? 0 : getDataSchemaLocationS3().hashCode()); return hashCode; } @Override public S3DataSpec clone() { try { return (S3DataSpec) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** */ package etlMetaModel.impl; import etlMetaModel.*; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EDataType; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.impl.EFactoryImpl; import org.eclipse.emf.ecore.plugin.EcorePlugin; /** * <!-- begin-user-doc --> * An implementation of the model <b>Factory</b>. * <!-- end-user-doc --> * @generated */ public class EtlMetaModelFactoryImpl extends EFactoryImpl implements EtlMetaModelFactory { /** * Creates the default factory implementation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static EtlMetaModelFactory init() { try { EtlMetaModelFactory theEtlMetaModelFactory = (EtlMetaModelFactory)EPackage.Registry.INSTANCE.getEFactory(EtlMetaModelPackage.eNS_URI); if (theEtlMetaModelFactory != null) { return theEtlMetaModelFactory; } } catch (Exception exception) { EcorePlugin.INSTANCE.log(exception); } return new EtlMetaModelFactoryImpl(); } /** * Creates an instance of the factory. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EtlMetaModelFactoryImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public EObject create(EClass eClass) { switch (eClass.getClassifierID()) { case EtlMetaModelPackage.MOF_NAMED_ELEMENT: return createMofNamedElement(); case EtlMetaModelPackage.MOF_TYPE: return createMofType(); case EtlMetaModelPackage.MOF_CLASS: return createMofClass(); case EtlMetaModelPackage.MOF_TYPED_ELEMENT: return createMofTypedElement(); case EtlMetaModelPackage.MOF_MULTIPLICITY_ELEMENT: return createMofMultiplicityElement(); case EtlMetaModelPackage.MOF_PROPERTY: return createMofProperty(); case EtlMetaModelPackage.MOF_ASSOCIATION: return createMofAssociation(); case EtlMetaModelPackage.MOF_PRIMITIVE_TYPE: return createMofPrimitiveType(); case EtlMetaModelPackage.MOF_BOOLEAN: return createMofBoolean(); case EtlMetaModelPackage.MOF_INTEGER: return createMofInteger(); case EtlMetaModelPackage.MOF_STRING: return createMofString(); case EtlMetaModelPackage.ETL_MODULE: return createEtlModule(); case EtlMetaModelPackage.ERL_NAMED_RULE: return createErlNamedRule(); case EtlMetaModelPackage.ETL_TRANSFORMATION_RULE: return createEtlTransformationRule(); case EtlMetaModelPackage.EOL_LIBRARY_MODULE: return createEolLibraryModule(); case EtlMetaModelPackage.EOL_OPERATION: return createEolOperation(); case EtlMetaModelPackage.EOL_MOF_CLASS_FORMAL_PARAMETER: return createEolMofClassFormalParameter(); case EtlMetaModelPackage.EOL_EXECUTEABLE_ANNOTATION: return createEolExecuteableAnnotation(); case EtlMetaModelPackage.SIMPLE_ANNOTATION: return createSimpleAnnotation(); case EtlMetaModelPackage.MODEL_ELEMENT_TYPE: return createModelElementType(); case EtlMetaModelPackage.EOL_NATIVE: return createEolNative(); case EtlMetaModelPackage.EOL_BOOLEAN_TYPE: return createEolBooleanType(); case EtlMetaModelPackage.EOL_REAL_TYPE: return createEolRealType(); case EtlMetaModelPackage.EOL_INTEGER_TYPE: return createEolIntegerType(); case EtlMetaModelPackage.EOL_STRING_TYPE: return createEolStringType(); case EtlMetaModelPackage.EOL_MAP_TYPE: return createEolMapType(); case EtlMetaModelPackage.EOL_ORDERED_SET_TYPE: return createEolOrderedSetType(); case EtlMetaModelPackage.EOL_SET_TYPE: return createEolSetType(); case EtlMetaModelPackage.EOL_BAG_TYPE: return createEolBagType(); case EtlMetaModelPackage.EOL_SEQUENCE_TYPE: return createEolSequenceType(); case EtlMetaModelPackage.EOL_EXPRESSION_OR_STATEMENT_BLOCK: return createEolExpressionOrStatementBlock(); case EtlMetaModelPackage.EOL_LOGICAL_EXPRESSION: return createEolLogicalExpression(); case EtlMetaModelPackage.EOL_CHAINED_LOGICAL_EXPRESSION: return createEolChainedLogicalExpression(); case EtlMetaModelPackage.EOL_RELATIONAL_EXPRESSION: return createEolRelationalExpression(); case EtlMetaModelPackage.EOL_CHAINED_RELATIONAL_EXPRESSION: return createEolChainedRelationalExpression(); case EtlMetaModelPackage.EOL_COMPARING_TO_ADDITIVE_RELATIONAL_EXPRESSION: return createEolComparingToAdditiveRelationalExpression(); case EtlMetaModelPackage.EOL_ADDITIVE_EXPRESSION: return createEolAdditiveExpression(); case EtlMetaModelPackage.EOL_CHAINED_ADDITIVE_EXPRESSION: return createEolChainedAdditiveExpression(); case EtlMetaModelPackage.EOL_MULTIPLICATIVE_EXPRESSION: return createEolMultiplicativeExpression(); case EtlMetaModelPackage.EOL_CHAINED_MULTIPLICATIVE_EXPRESSION: return createEolChainedMultiplicativeExpression(); case EtlMetaModelPackage.EOL_UNARY_EXPRESSION: return createEolUnaryExpression(); case EtlMetaModelPackage.EOL_UNARY_OPRATOR_NULLABLE: return createEolUnaryOpratorNullable(); case EtlMetaModelPackage.EOL_POSTFIX_EXPRESSION: return createEolPostfixExpression(); case EtlMetaModelPackage.EOL_CHAINED_FEATURE_CALL_POSTFIX_EXPRESSION: return createEolChainedFeatureCallPostfixExpression(); case EtlMetaModelPackage.EOL_ITEM_SELECTOR_EXPRESSION: return createEolItemSelectorExpression(); case EtlMetaModelPackage.EOL_SIMPLE_FEATURE_CALL: return createEolSimpleFeatureCall(); case EtlMetaModelPackage.EOL_MOF_PROPERTY_FEATURE_CALL: return createEolMofPropertyFeatureCall(); case EtlMetaModelPackage.ETL_EQUIVALENT_METHOD_FEATURE_CALL: return createEtlEquivalentMethodFeatureCall(); case EtlMetaModelPackage.EOL_IS_TYPE_OF_SOURCE_MOF_CLASS_FEATURE_CALL: return createEolIsTypeOfSourceMofClassFeatureCall(); case EtlMetaModelPackage.EOL_PARAMETER_LIST: return createEolParameterList(); case EtlMetaModelPackage.EOL_NEW_EXPRESSION: return createEolNewExpression(); case EtlMetaModelPackage.EOL_VARIABLE_DECLARATION_EXPRESSION: return createEolVariableDeclarationExpression(); case EtlMetaModelPackage.EOL_STRING_LITERAL: return createEolStringLiteral(); case EtlMetaModelPackage.EOL_FORMAL_PARAMETER_REFERENCE_EXPRESSION: return createEolFormalParameterReferenceExpression(); case EtlMetaModelPackage.EOL_VARIABLE_REFERENCE_EXPRESSION: return createEolVariableReferenceExpression(); case EtlMetaModelPackage.EOL_STATEMENT_BLOCK: return createEolStatementBlock(); case EtlMetaModelPackage.EOL_ASSIGNMENT_STATEMENT: return createEolAssignmentStatement(); case EtlMetaModelPackage.EOL_ASSIGNMENT_EXPRESSION_STATEMENT: return createEolAssignmentExpressionStatement(); case EtlMetaModelPackage.EOL_LOGICAL_EXPRESSION_STATEMENT: return createEolLogicalExpressionStatement(); case EtlMetaModelPackage.EOL_FOR: return createEolFor(); default: throw new IllegalArgumentException("The class '" + eClass.getName() + "' is not a valid classifier"); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object createFromString(EDataType eDataType, String initialValue) { switch (eDataType.getClassifierID()) { case EtlMetaModelPackage.EOL_LOGICAL_OPERATOR: return createEolLogicalOperatorFromString(eDataType, initialValue); case EtlMetaModelPackage.EOL_RELATIONAL_OPERATOR: return createEolRelationalOperatorFromString(eDataType, initialValue); case EtlMetaModelPackage.EOL_ADDITIVE_COMPARISON_OPERATOR: return createEolAdditiveComparisonOperatorFromString(eDataType, initialValue); case EtlMetaModelPackage.EOL_ADDITIVE_ARITHMETICAL_OPERATOR: return createEolAdditiveArithmeticalOperatorFromString(eDataType, initialValue); case EtlMetaModelPackage.EOL_MULTIPLICATIVE_ARITHMETICAL_OPERATOR: return createEolMultiplicativeArithmeticalOperatorFromString(eDataType, initialValue); case EtlMetaModelPackage.EOL_UNARY_OPRATOR: return createEolUnaryOpratorFromString(eDataType, initialValue); default: throw new IllegalArgumentException("The datatype '" + eDataType.getName() + "' is not a valid classifier"); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String convertToString(EDataType eDataType, Object instanceValue) { switch (eDataType.getClassifierID()) { case EtlMetaModelPackage.EOL_LOGICAL_OPERATOR: return convertEolLogicalOperatorToString(eDataType, instanceValue); case EtlMetaModelPackage.EOL_RELATIONAL_OPERATOR: return convertEolRelationalOperatorToString(eDataType, instanceValue); case EtlMetaModelPackage.EOL_ADDITIVE_COMPARISON_OPERATOR: return convertEolAdditiveComparisonOperatorToString(eDataType, instanceValue); case EtlMetaModelPackage.EOL_ADDITIVE_ARITHMETICAL_OPERATOR: return convertEolAdditiveArithmeticalOperatorToString(eDataType, instanceValue); case EtlMetaModelPackage.EOL_MULTIPLICATIVE_ARITHMETICAL_OPERATOR: return convertEolMultiplicativeArithmeticalOperatorToString(eDataType, instanceValue); case EtlMetaModelPackage.EOL_UNARY_OPRATOR: return convertEolUnaryOpratorToString(eDataType, instanceValue); default: throw new IllegalArgumentException("The datatype '" + eDataType.getName() + "' is not a valid classifier"); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofNamedElement createMofNamedElement() { MofNamedElementImpl mofNamedElement = new MofNamedElementImpl(); return mofNamedElement; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofType createMofType() { MofTypeImpl mofType = new MofTypeImpl(); return mofType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofClass createMofClass() { MofClassImpl mofClass = new MofClassImpl(); return mofClass; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofTypedElement createMofTypedElement() { MofTypedElementImpl mofTypedElement = new MofTypedElementImpl(); return mofTypedElement; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofMultiplicityElement createMofMultiplicityElement() { MofMultiplicityElementImpl mofMultiplicityElement = new MofMultiplicityElementImpl(); return mofMultiplicityElement; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofProperty createMofProperty() { MofPropertyImpl mofProperty = new MofPropertyImpl(); return mofProperty; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofAssociation createMofAssociation() { MofAssociationImpl mofAssociation = new MofAssociationImpl(); return mofAssociation; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofPrimitiveType createMofPrimitiveType() { MofPrimitiveTypeImpl mofPrimitiveType = new MofPrimitiveTypeImpl(); return mofPrimitiveType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofBoolean createMofBoolean() { MofBooleanImpl mofBoolean = new MofBooleanImpl(); return mofBoolean; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofInteger createMofInteger() { MofIntegerImpl mofInteger = new MofIntegerImpl(); return mofInteger; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MofString createMofString() { MofStringImpl mofString = new MofStringImpl(); return mofString; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EtlModule createEtlModule() { EtlModuleImpl etlModule = new EtlModuleImpl(); return etlModule; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ErlNamedRule createErlNamedRule() { ErlNamedRuleImpl erlNamedRule = new ErlNamedRuleImpl(); return erlNamedRule; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EtlTransformationRule createEtlTransformationRule() { EtlTransformationRuleImpl etlTransformationRule = new EtlTransformationRuleImpl(); return etlTransformationRule; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolLibraryModule createEolLibraryModule() { EolLibraryModuleImpl eolLibraryModule = new EolLibraryModuleImpl(); return eolLibraryModule; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolOperation createEolOperation() { EolOperationImpl eolOperation = new EolOperationImpl(); return eolOperation; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolMofClassFormalParameter createEolMofClassFormalParameter() { EolMofClassFormalParameterImpl eolMofClassFormalParameter = new EolMofClassFormalParameterImpl(); return eolMofClassFormalParameter; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolExecuteableAnnotation createEolExecuteableAnnotation() { EolExecuteableAnnotationImpl eolExecuteableAnnotation = new EolExecuteableAnnotationImpl(); return eolExecuteableAnnotation; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SimpleAnnotation createSimpleAnnotation() { SimpleAnnotationImpl simpleAnnotation = new SimpleAnnotationImpl(); return simpleAnnotation; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ModelElementType createModelElementType() { ModelElementTypeImpl modelElementType = new ModelElementTypeImpl(); return modelElementType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolNative createEolNative() { EolNativeImpl eolNative = new EolNativeImpl(); return eolNative; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolBooleanType createEolBooleanType() { EolBooleanTypeImpl eolBooleanType = new EolBooleanTypeImpl(); return eolBooleanType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolRealType createEolRealType() { EolRealTypeImpl eolRealType = new EolRealTypeImpl(); return eolRealType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolIntegerType createEolIntegerType() { EolIntegerTypeImpl eolIntegerType = new EolIntegerTypeImpl(); return eolIntegerType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolStringType createEolStringType() { EolStringTypeImpl eolStringType = new EolStringTypeImpl(); return eolStringType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolMapType createEolMapType() { EolMapTypeImpl eolMapType = new EolMapTypeImpl(); return eolMapType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolOrderedSetType createEolOrderedSetType() { EolOrderedSetTypeImpl eolOrderedSetType = new EolOrderedSetTypeImpl(); return eolOrderedSetType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolSetType createEolSetType() { EolSetTypeImpl eolSetType = new EolSetTypeImpl(); return eolSetType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolBagType createEolBagType() { EolBagTypeImpl eolBagType = new EolBagTypeImpl(); return eolBagType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolSequenceType createEolSequenceType() { EolSequenceTypeImpl eolSequenceType = new EolSequenceTypeImpl(); return eolSequenceType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolExpressionOrStatementBlock createEolExpressionOrStatementBlock() { EolExpressionOrStatementBlockImpl eolExpressionOrStatementBlock = new EolExpressionOrStatementBlockImpl(); return eolExpressionOrStatementBlock; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolLogicalExpression createEolLogicalExpression() { EolLogicalExpressionImpl eolLogicalExpression = new EolLogicalExpressionImpl(); return eolLogicalExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolChainedLogicalExpression createEolChainedLogicalExpression() { EolChainedLogicalExpressionImpl eolChainedLogicalExpression = new EolChainedLogicalExpressionImpl(); return eolChainedLogicalExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolRelationalExpression createEolRelationalExpression() { EolRelationalExpressionImpl eolRelationalExpression = new EolRelationalExpressionImpl(); return eolRelationalExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolChainedRelationalExpression createEolChainedRelationalExpression() { EolChainedRelationalExpressionImpl eolChainedRelationalExpression = new EolChainedRelationalExpressionImpl(); return eolChainedRelationalExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolComparingToAdditiveRelationalExpression createEolComparingToAdditiveRelationalExpression() { EolComparingToAdditiveRelationalExpressionImpl eolComparingToAdditiveRelationalExpression = new EolComparingToAdditiveRelationalExpressionImpl(); return eolComparingToAdditiveRelationalExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolAdditiveExpression createEolAdditiveExpression() { EolAdditiveExpressionImpl eolAdditiveExpression = new EolAdditiveExpressionImpl(); return eolAdditiveExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolChainedAdditiveExpression createEolChainedAdditiveExpression() { EolChainedAdditiveExpressionImpl eolChainedAdditiveExpression = new EolChainedAdditiveExpressionImpl(); return eolChainedAdditiveExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolMultiplicativeExpression createEolMultiplicativeExpression() { EolMultiplicativeExpressionImpl eolMultiplicativeExpression = new EolMultiplicativeExpressionImpl(); return eolMultiplicativeExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolChainedMultiplicativeExpression createEolChainedMultiplicativeExpression() { EolChainedMultiplicativeExpressionImpl eolChainedMultiplicativeExpression = new EolChainedMultiplicativeExpressionImpl(); return eolChainedMultiplicativeExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolUnaryExpression createEolUnaryExpression() { EolUnaryExpressionImpl eolUnaryExpression = new EolUnaryExpressionImpl(); return eolUnaryExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolUnaryOpratorNullable createEolUnaryOpratorNullable() { EolUnaryOpratorNullableImpl eolUnaryOpratorNullable = new EolUnaryOpratorNullableImpl(); return eolUnaryOpratorNullable; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolPostfixExpression createEolPostfixExpression() { EolPostfixExpressionImpl eolPostfixExpression = new EolPostfixExpressionImpl(); return eolPostfixExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolChainedFeatureCallPostfixExpression createEolChainedFeatureCallPostfixExpression() { EolChainedFeatureCallPostfixExpressionImpl eolChainedFeatureCallPostfixExpression = new EolChainedFeatureCallPostfixExpressionImpl(); return eolChainedFeatureCallPostfixExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolItemSelectorExpression createEolItemSelectorExpression() { EolItemSelectorExpressionImpl eolItemSelectorExpression = new EolItemSelectorExpressionImpl(); return eolItemSelectorExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolSimpleFeatureCall createEolSimpleFeatureCall() { EolSimpleFeatureCallImpl eolSimpleFeatureCall = new EolSimpleFeatureCallImpl(); return eolSimpleFeatureCall; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolMofPropertyFeatureCall createEolMofPropertyFeatureCall() { EolMofPropertyFeatureCallImpl eolMofPropertyFeatureCall = new EolMofPropertyFeatureCallImpl(); return eolMofPropertyFeatureCall; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EtlEquivalentMethodFeatureCall createEtlEquivalentMethodFeatureCall() { EtlEquivalentMethodFeatureCallImpl etlEquivalentMethodFeatureCall = new EtlEquivalentMethodFeatureCallImpl(); return etlEquivalentMethodFeatureCall; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolIsTypeOfSourceMofClassFeatureCall createEolIsTypeOfSourceMofClassFeatureCall() { EolIsTypeOfSourceMofClassFeatureCallImpl eolIsTypeOfSourceMofClassFeatureCall = new EolIsTypeOfSourceMofClassFeatureCallImpl(); return eolIsTypeOfSourceMofClassFeatureCall; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolParameterList createEolParameterList() { EolParameterListImpl eolParameterList = new EolParameterListImpl(); return eolParameterList; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolNewExpression createEolNewExpression() { EolNewExpressionImpl eolNewExpression = new EolNewExpressionImpl(); return eolNewExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolVariableDeclarationExpression createEolVariableDeclarationExpression() { EolVariableDeclarationExpressionImpl eolVariableDeclarationExpression = new EolVariableDeclarationExpressionImpl(); return eolVariableDeclarationExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolStringLiteral createEolStringLiteral() { EolStringLiteralImpl eolStringLiteral = new EolStringLiteralImpl(); return eolStringLiteral; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolFormalParameterReferenceExpression createEolFormalParameterReferenceExpression() { EolFormalParameterReferenceExpressionImpl eolFormalParameterReferenceExpression = new EolFormalParameterReferenceExpressionImpl(); return eolFormalParameterReferenceExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolVariableReferenceExpression createEolVariableReferenceExpression() { EolVariableReferenceExpressionImpl eolVariableReferenceExpression = new EolVariableReferenceExpressionImpl(); return eolVariableReferenceExpression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolStatementBlock createEolStatementBlock() { EolStatementBlockImpl eolStatementBlock = new EolStatementBlockImpl(); return eolStatementBlock; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolAssignmentStatement createEolAssignmentStatement() { EolAssignmentStatementImpl eolAssignmentStatement = new EolAssignmentStatementImpl(); return eolAssignmentStatement; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolAssignmentExpressionStatement createEolAssignmentExpressionStatement() { EolAssignmentExpressionStatementImpl eolAssignmentExpressionStatement = new EolAssignmentExpressionStatementImpl(); return eolAssignmentExpressionStatement; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolLogicalExpressionStatement createEolLogicalExpressionStatement() { EolLogicalExpressionStatementImpl eolLogicalExpressionStatement = new EolLogicalExpressionStatementImpl(); return eolLogicalExpressionStatement; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolFor createEolFor() { EolForImpl eolFor = new EolForImpl(); return eolFor; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolLogicalOperator createEolLogicalOperatorFromString(EDataType eDataType, String initialValue) { EolLogicalOperator result = EolLogicalOperator.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertEolLogicalOperatorToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolRelationalOperator createEolRelationalOperatorFromString(EDataType eDataType, String initialValue) { EolRelationalOperator result = EolRelationalOperator.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertEolRelationalOperatorToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolAdditiveComparisonOperator createEolAdditiveComparisonOperatorFromString(EDataType eDataType, String initialValue) { EolAdditiveComparisonOperator result = EolAdditiveComparisonOperator.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertEolAdditiveComparisonOperatorToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolAdditiveArithmeticalOperator createEolAdditiveArithmeticalOperatorFromString(EDataType eDataType, String initialValue) { EolAdditiveArithmeticalOperator result = EolAdditiveArithmeticalOperator.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertEolAdditiveArithmeticalOperatorToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolMultiplicativeArithmeticalOperator createEolMultiplicativeArithmeticalOperatorFromString(EDataType eDataType, String initialValue) { EolMultiplicativeArithmeticalOperator result = EolMultiplicativeArithmeticalOperator.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertEolMultiplicativeArithmeticalOperatorToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EolUnaryOprator createEolUnaryOpratorFromString(EDataType eDataType, String initialValue) { EolUnaryOprator result = EolUnaryOprator.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String convertEolUnaryOpratorToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EtlMetaModelPackage getEtlMetaModelPackage() { return (EtlMetaModelPackage)getEPackage(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @deprecated * @generated */ @Deprecated public static EtlMetaModelPackage getPackage() { return EtlMetaModelPackage.eINSTANCE; } } //EtlMetaModelFactoryImpl
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.pinot; import com.facebook.presto.common.block.SortOrder; import com.facebook.presto.common.type.BigintType; import com.facebook.presto.common.type.BooleanType; import com.facebook.presto.common.type.CharType; import com.facebook.presto.common.type.DateTimeEncoding; import com.facebook.presto.common.type.DateType; import com.facebook.presto.common.type.DecimalType; import com.facebook.presto.common.type.DoubleType; import com.facebook.presto.common.type.IntegerType; import com.facebook.presto.common.type.RealType; import com.facebook.presto.common.type.SmallintType; import com.facebook.presto.common.type.TimestampType; import com.facebook.presto.common.type.TimestampWithTimeZoneType; import com.facebook.presto.common.type.TinyintType; import com.facebook.presto.common.type.Type; import com.facebook.presto.common.type.VarcharType; import com.facebook.presto.spi.plan.AggregationNode; import com.facebook.presto.spi.plan.TopNNode; import com.facebook.presto.spi.relation.CallExpression; import com.facebook.presto.spi.relation.ConstantExpression; import com.facebook.presto.spi.relation.RowExpression; import com.facebook.presto.spi.relation.VariableReferenceExpression; import com.google.common.collect.ImmutableList; import io.airlift.slice.Slice; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Optional; import java.util.Set; import static com.facebook.presto.common.type.Decimals.decodeUnscaledValue; import static com.facebook.presto.pinot.PinotErrorCode.PINOT_UNSUPPORTED_EXPRESSION; import static com.google.common.base.Preconditions.checkState; import static java.lang.Float.intBitsToFloat; import static java.lang.String.format; public class PinotPushdownUtils { public static final String PINOT_DISTINCT_COUNT_FUNCTION_NAME = "distinctCount"; private static final String COUNT_FUNCTION_NAME = "count"; private static final String DISTINCT_MASK = "$distinct"; private PinotPushdownUtils() {} public enum ExpressionType { GROUP_BY, AGGREGATE, } /** * Group by field description */ public static class GroupByColumnNode extends AggregationColumnNode { private final VariableReferenceExpression inputColumn; public GroupByColumnNode(VariableReferenceExpression inputColumn, VariableReferenceExpression output) { super(ExpressionType.GROUP_BY, output); this.inputColumn = inputColumn; } public VariableReferenceExpression getInputColumn() { return inputColumn; } @Override public String toString() { return inputColumn.toString(); } } /** * Agg function description. */ public static class AggregationFunctionColumnNode extends AggregationColumnNode { private final CallExpression callExpression; public AggregationFunctionColumnNode(VariableReferenceExpression output, CallExpression callExpression) { super(ExpressionType.AGGREGATE, output); this.callExpression = callExpression; } public CallExpression getCallExpression() { return callExpression; } @Override public String toString() { return callExpression.toString(); } } public static void checkSupported(boolean condition, String errorMessage, Object... errorMessageArgs) { if (!condition) { throw new PinotException(PINOT_UNSUPPORTED_EXPRESSION, Optional.empty(), String.format(errorMessage, errorMessageArgs)); } } public abstract static class AggregationColumnNode { private final ExpressionType expressionType; private final VariableReferenceExpression outputColumn; public AggregationColumnNode(ExpressionType expressionType, VariableReferenceExpression outputColumn) { this.expressionType = expressionType; this.outputColumn = outputColumn; } public VariableReferenceExpression getOutputColumn() { return outputColumn; } public ExpressionType getExpressionType() { return expressionType; } } public static List<AggregationColumnNode> computeAggregationNodes(AggregationNode aggregationNode) { int groupByKeyIndex = 0; ImmutableList.Builder<AggregationColumnNode> nodeBuilder = ImmutableList.builder(); for (VariableReferenceExpression outputColumn : aggregationNode.getOutputVariables()) { AggregationNode.Aggregation aggregation = aggregationNode.getAggregations().get(outputColumn); if (aggregation != null) { if (aggregation.getFilter().isPresent() || aggregation.isDistinct() || aggregation.getOrderBy().isPresent()) { throw new PinotException(PINOT_UNSUPPORTED_EXPRESSION, Optional.empty(), "Unsupported aggregation node " + aggregationNode); } if (aggregation.getMask().isPresent()) { // This block handles the case when a distinct aggregation is present in addition to another aggregation function. // E.g. `SELECT count(distinct COL_A), sum(COL_B) FROM myTable` to Pinot as `SELECT distinctCount(COL_A), sum(COL_B) FROM myTable` if (aggregation.getCall().getDisplayName().equalsIgnoreCase(COUNT_FUNCTION_NAME) && aggregation.getMask().get().getName().contains(DISTINCT_MASK)) { nodeBuilder.add(new AggregationFunctionColumnNode(outputColumn, new CallExpression(aggregation.getCall().getSourceLocation(), PINOT_DISTINCT_COUNT_FUNCTION_NAME, aggregation.getCall().getFunctionHandle(), aggregation.getCall().getType(), aggregation.getCall().getArguments()))); continue; } // Pinot doesn't support push down aggregation functions other than count on top of distinct function. throw new PinotException(PINOT_UNSUPPORTED_EXPRESSION, Optional.empty(), "Unsupported aggregation node with mask " + aggregationNode); } if (handlePushDownSingleDistinctCount(nodeBuilder, aggregationNode, outputColumn, aggregation)) { continue; } nodeBuilder.add(new AggregationFunctionColumnNode(outputColumn, aggregation.getCall())); } else { // group by output VariableReferenceExpression inputColumn = aggregationNode.getGroupingKeys().get(groupByKeyIndex); nodeBuilder.add(new GroupByColumnNode(inputColumn, outputColumn)); groupByKeyIndex++; } } return nodeBuilder.build(); } /** * Try to push down query like: `SELECT count(distinct $COLUMN) FROM myTable` to Pinot as `SELECT distinctCount($COLUMN) FROM myTable`. * This function only handles the case of an AggregationNode (COUNT on $COLUMN) on top of an AggregationNode(of non-aggregate on $COLUMN). * * @param nodeBuilder * @param aggregationNode * @param outputColumn * @param aggregation * @return true if push down successfully otherwise false. */ private static boolean handlePushDownSingleDistinctCount(ImmutableList.Builder<AggregationColumnNode> nodeBuilder, AggregationNode aggregationNode, VariableReferenceExpression outputColumn, AggregationNode.Aggregation aggregation) { if (!aggregation.getCall().getDisplayName().equalsIgnoreCase(COUNT_FUNCTION_NAME)) { return false; } List<RowExpression> arguments = aggregation.getCall().getArguments(); if (arguments.size() != 1) { return false; } RowExpression aggregationArgument = arguments.get(0); // Handle the case of Count Aggregation on top of a Non-Agg GroupBy Aggregation. if (!(aggregationNode.getSource() instanceof AggregationNode)) { return false; } AggregationNode sourceAggregationNode = (AggregationNode) aggregationNode.getSource(); Set<String> sourceAggregationGroupSet = getGroupKeys(sourceAggregationNode.getGroupingKeys()); Set<String> aggregationGroupSet = getGroupKeys(aggregationNode.getGroupingKeys()); aggregationGroupSet.add(aggregationArgument.toString()); if (!sourceAggregationGroupSet.containsAll(aggregationGroupSet) && aggregationGroupSet.containsAll(sourceAggregationGroupSet)) { return false; } nodeBuilder.add( new AggregationFunctionColumnNode( outputColumn, new CallExpression( aggregation.getCall().getSourceLocation(), PINOT_DISTINCT_COUNT_FUNCTION_NAME, aggregation.getFunctionHandle(), aggregation.getCall().getType(), ImmutableList.of(aggregationArgument)))); return true; } private static Set<String> getGroupKeys(List<VariableReferenceExpression> groupingKeys) { Set<String> groupKeySet = new HashSet<>(); groupingKeys.forEach(groupingKey -> groupKeySet.add(groupingKey.getName())); return groupKeySet; } public static LinkedHashMap<VariableReferenceExpression, SortOrder> getOrderingScheme(TopNNode topNNode) { LinkedHashMap<VariableReferenceExpression, SortOrder> orderingScheme = new LinkedHashMap<>(); topNNode.getOrderingScheme().getOrderByVariables().forEach(value -> orderingScheme.put(value, topNNode.getOrderingScheme().getOrdering(value))); return orderingScheme; } private static Number decodeDecimal(BigInteger unscaledValue, DecimalType type) { return new BigDecimal(unscaledValue, type.getScale(), new MathContext(type.getPrecision())); } // Copied from com.facebook.presto.sql.planner.LiteralInterpreter.evaluate public static String getLiteralAsString(ConstantExpression node) { Type type = node.getType(); if (node.getValue() == null) { throw new PinotException(PINOT_UNSUPPORTED_EXPRESSION, Optional.empty(), String.format("Null constant expression %s with value of type %s", node, type)); } if (type instanceof BooleanType) { return String.valueOf(((Boolean) node.getValue()).booleanValue()); } if (type instanceof BigintType || type instanceof TinyintType || type instanceof SmallintType || type instanceof IntegerType) { Number number = (Number) node.getValue(); return format("%d", number.longValue()); } if (type instanceof DoubleType) { return node.getValue().toString(); } if (type instanceof RealType) { Long number = (Long) node.getValue(); return format("%f", intBitsToFloat(number.intValue())); } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; if (decimalType.isShort()) { checkState(node.getValue() instanceof Long); return decodeDecimal(BigInteger.valueOf((long) node.getValue()), decimalType).toString(); } checkState(node.getValue() instanceof Slice); Slice value = (Slice) node.getValue(); return decodeDecimal(decodeUnscaledValue(value), decimalType).toString(); } if (type instanceof VarcharType || type instanceof CharType) { return "'" + ((Slice) node.getValue()).toStringUtf8() + "'"; } if (type instanceof TimestampType || type instanceof DateType) { return node.getValue().toString(); } if (type instanceof TimestampWithTimeZoneType) { Long millisUtc = DateTimeEncoding.unpackMillisUtc((Long) node.getValue()); return millisUtc.toString(); } throw new PinotException(PINOT_UNSUPPORTED_EXPRESSION, Optional.empty(), String.format("Cannot handle the constant expression %s with value of type %s", node, type)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ode.utils; import org.apache.axis2.Constants; import org.apache.axis2.addressing.AddressingConstants; import org.apache.axis2.util.JavaUtils; import org.apache.axis2.client.Options; import org.apache.axis2.transport.http.HTTPConstants; import org.apache.axis2.transport.http.HttpTransportProperties; import org.apache.axis2.transport.jms.JMSConstants; import org.apache.commons.httpclient.Header; import org.apache.commons.httpclient.HttpVersion; import org.apache.commons.httpclient.ProtocolException; import org.apache.commons.httpclient.params.DefaultHttpParams; import org.apache.commons.httpclient.params.HostParams; import org.apache.commons.httpclient.params.HttpClientParams; import org.apache.commons.httpclient.params.HttpConnectionParams; import org.apache.commons.httpclient.params.HttpMethodParams; import org.apache.commons.httpclient.params.HttpParams; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.ArrayList; import java.util.Collection; import java.util.Map; /** * @author <a href="mailto:midon@intalio.com">Alexis Midon</a> */ public class Properties { /** * Property used to define how long (in milliseconds) the message will wait for a response. Default value is {@link #DEFAULT_MEX_TIMEOUT} */ public static final String PROP_MEX_TIMEOUT = "mex.timeout"; /** * Property used to define how long (in milliseconds) the message will wait for a response for process-to-process invocations. */ public static final String PROP_P2P_MEX_TIMEOUT = "p2p.mex.timeout"; // its default value public static final int DEFAULT_MEX_TIMEOUT = 2 * 60 * 1000; public static final String PROP_HTTP_CONNECTION_TIMEOUT = HttpConnectionParams.CONNECTION_TIMEOUT; public static final String PROP_HTTP_SOCKET_TIMEOUT = HttpMethodParams.SO_TIMEOUT; public static final String PROP_HTTP_PROTOCOL_VERSION = HttpMethodParams.PROTOCOL_VERSION; public static final String PROP_HTTP_HEADER_PREFIX = "http.default-headers."; public static final String PROP_HTTP_PROXY_PREFIX = "http.proxy."; public static final String PROP_HTTP_PROXY_HOST = PROP_HTTP_PROXY_PREFIX + "host"; public static final String PROP_HTTP_PROXY_PORT = PROP_HTTP_PROXY_PREFIX + "port"; public static final String PROP_HTTP_PROXY_DOMAIN = PROP_HTTP_PROXY_PREFIX + "domain"; public static final String PROP_HTTP_PROXY_USER = PROP_HTTP_PROXY_PREFIX + "user"; public static final String PROP_HTTP_PROXY_PASSWORD = PROP_HTTP_PROXY_PREFIX + "password"; /** * @deprecated use org.apache.commons.httpclient.params.HttpMethodParams#HTTP_CONTENT_CHARSET (="http.protocol.content-charset") */ public static final String PROP_HTTP_PROTOCOL_ENCODING = "http.protocol.encoding"; /** * Property to override the location set in soap:address or http:address */ public static final String PROP_ADDRESS = "address"; // Httpclient specific public static final String PROP_HTTP_MAX_REDIRECTS = HttpClientParams.MAX_REDIRECTS; // Axis2-specific public static final String PROP_HTTP_REQUEST_CHUNK = "http.request.chunk"; public static final String PROP_HTTP_REQUEST_GZIP = "http.request.gzip"; public static final String PROP_HTTP_ACCEPT_GZIP = "http.accept.gzip"; public static final String PROP_SECURITY_POLICY = "security.policy.file"; public static final String PROP_JMS_REPLY_DESTINATION = "jms.reply.destination"; public static final String PROP_JMS_REPLY_TIMEOUT = "jms.reply.timeout"; public static final String PROP_JMS_DESTINATION_TYPE = "jms.destination.type"; public static final String PROP_SEND_WS_ADDRESSING_HEADERS = "ws-addressing.headers"; protected static final Log log = LogFactory.getLog(Properties.class); public static Object[] getProxyAndHeaders(Map<String, String> properties) { ArrayList<Header> headers = null; // /!\ Axis2 requires an ArrayList (not a List implementation) HttpTransportProperties.ProxyProperties proxy = null; for (Map.Entry<String, String> e : properties.entrySet()) { final String k = e.getKey(); final String v = e.getValue(); if (k.startsWith(PROP_HTTP_HEADER_PREFIX)) { if (headers == null) headers = new ArrayList<Header>(); // extract the header name String name = k.substring(PROP_HTTP_HEADER_PREFIX.length()); headers.add(new Header(name, v)); } else if (k.startsWith(PROP_HTTP_PROXY_PREFIX)) { if (proxy == null) proxy = new HttpTransportProperties.ProxyProperties(); if (PROP_HTTP_PROXY_HOST.equals(k)) proxy.setProxyName(v); else if (PROP_HTTP_PROXY_PORT.equals(k)) proxy.setProxyPort(Integer.parseInt(v)); else if (PROP_HTTP_PROXY_DOMAIN.equals(k)) proxy.setDomain(v); else if (PROP_HTTP_PROXY_USER.equals(k)) proxy.setUserName(v); else if (PROP_HTTP_PROXY_PASSWORD.equals(k)) proxy.setPassWord(v); else if (log.isWarnEnabled()) log.warn("Unknown proxy properties [" + k + "]. " + PROP_HTTP_PROXY_PREFIX + " is a refix reserved for proxy properties."); } } if (proxy != null) { String host = proxy.getProxyHostName(); if (host == null || host.length() == 0) { // disable proxy if the host is not null proxy = null; if (log.isDebugEnabled()) log.debug("Proxy host is null. Proxy will not be taken into account."); } } return new Object[]{proxy, headers}; } public static class Axis2 { public static Options translate(Map<String, String> properties) { return translate(properties, new Options()); } public static Options translate(Map<String, String> properties, Options options) { if (log.isDebugEnabled()) log.debug("Translating Properties for Axis2"); if (properties.isEmpty()) return options; // First set any default values to make sure they can be overwriten // set the default encoding for HttpClient (HttpClient uses ISO-8859-1 by default) options.setProperty(Constants.Configuration.CHARACTER_SET_ENCODING, "UTF-8"); /*then add all property pairs so that new properties (with string value) are automatically handled (i.e no translation needed) */ for (Map.Entry<String, String> e : properties.entrySet()) { options.setProperty(e.getKey(), e.getValue()); } if (properties.containsKey(PROP_HTTP_CONNECTION_TIMEOUT)) { final String value = properties.get(PROP_HTTP_CONNECTION_TIMEOUT); try { options.setProperty(HTTPConstants.CONNECTION_TIMEOUT, Integer.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_HTTP_CONNECTION_TIMEOUT + "=" + value + "]. Integer expected. Property will be skipped."); } } if (properties.containsKey(PROP_HTTP_SOCKET_TIMEOUT)) { final String value = properties.get(PROP_HTTP_SOCKET_TIMEOUT); try { options.setProperty(HTTPConstants.SO_TIMEOUT, Integer.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_HTTP_SOCKET_TIMEOUT + "=" + value + "]. Integer expected. Property will be skipped."); } } if (properties.containsKey(PROP_HTTP_PROTOCOL_ENCODING)) { if(log.isWarnEnabled())log.warn("Deprecated property: http.protocol.encoding. Use http.protocol.content-charset"); options.setProperty(Constants.Configuration.CHARACTER_SET_ENCODING, properties.get(PROP_HTTP_PROTOCOL_ENCODING)); } if (properties.containsKey(HttpMethodParams.HTTP_CONTENT_CHARSET)) { options.setProperty(Constants.Configuration.CHARACTER_SET_ENCODING, properties.get(HttpMethodParams.HTTP_CONTENT_CHARSET)); } if (properties.containsKey(PROP_HTTP_PROTOCOL_VERSION)) { options.setProperty(HTTPConstants.HTTP_PROTOCOL_VERSION, properties.get(PROP_HTTP_PROTOCOL_VERSION)); } if (properties.containsKey(PROP_HTTP_REQUEST_CHUNK)) { options.setProperty(HTTPConstants.CHUNKED, properties.get(PROP_HTTP_REQUEST_CHUNK)); } if (properties.containsKey(PROP_HTTP_REQUEST_GZIP)) { options.setProperty(HTTPConstants.MC_GZIP_REQUEST, properties.get(PROP_HTTP_REQUEST_GZIP)); } if (properties.containsKey(PROP_HTTP_ACCEPT_GZIP)) { options.setProperty(HTTPConstants.MC_ACCEPT_GZIP, properties.get(PROP_HTTP_ACCEPT_GZIP)); } if (properties.containsKey(PROP_HTTP_MAX_REDIRECTS)) { if (log.isWarnEnabled()) log.warn("Property Not Supported: " + PROP_HTTP_MAX_REDIRECTS); } if (properties.containsKey(PROP_JMS_REPLY_DESTINATION)) { options.setProperty(JMSConstants.PARAM_REPLY_DESTINATION, properties.get(PROP_JMS_REPLY_DESTINATION)); } if (properties.containsKey(PROP_JMS_REPLY_TIMEOUT)) { String value = properties.get(PROP_JMS_REPLY_TIMEOUT); options.setProperty(JMSConstants.JMS_WAIT_REPLY, value); // The value of this property must be a string object, not a long object. // try { // options.setProperty(JMSConstants.JMS_WAIT_REPLY, Long.valueOf(value)); // } catch (NumberFormatException e) { // if (log.isWarnEnabled()) // log.warn("Mal-formatted Property: [" + Properties.PROP_JMS_REPLY_TIMEOUT + "=" + value + "]. Long expected. Property will be skipped."); // } } if (properties.containsKey(PROP_JMS_DESTINATION_TYPE)) { String value = properties.get(PROP_JMS_DESTINATION_TYPE); try { options.setProperty(JMSConstants.PARAM_DEST_TYPE, Long.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_JMS_DESTINATION_TYPE + "=" + value + "]. Long expected. Property will be skipped."); } } if(properties.containsKey(PROP_SEND_WS_ADDRESSING_HEADERS)){ String value = properties.get(PROP_SEND_WS_ADDRESSING_HEADERS); options.setProperty(AddressingConstants.DISABLE_ADDRESSING_FOR_OUT_MESSAGES, !Boolean.parseBoolean(value)); } if (properties.containsKey("ws-adddressing.headers")) { if(log.isWarnEnabled())log.warn("Deprecated property: ws-adddressing.headers (Mind the 3 d's). Use ws-addressing.headers"); String value = properties.get("ws-adddressing.headers"); options.setProperty(AddressingConstants.DISABLE_ADDRESSING_FOR_OUT_MESSAGES, !Boolean.parseBoolean(value)); } // iterate through the properties to get Headers & Proxy information Object[] o = getProxyAndHeaders(properties); HttpTransportProperties.ProxyProperties proxy = (HttpTransportProperties.ProxyProperties) o[0]; ArrayList<Header> headers = (ArrayList<Header>) o[1]; // /!\ Axis2 requires an ArrayList (not a List implementation) if (headers != null && !headers.isEmpty()) options.setProperty(HTTPConstants.HTTP_HEADERS, headers); if (proxy != null) options.setProperty(HTTPConstants.PROXY, proxy); // Set properties that canNOT be overridden if(JavaUtils.isTrueExplicitly(options.getProperty(HTTPConstants.REUSE_HTTP_CLIENT))){ if (log.isWarnEnabled()) log.warn("This property cannot be overidden, and must always be false. "+ HTTPConstants.REUSE_HTTP_CLIENT); } options.setProperty(HTTPConstants.REUSE_HTTP_CLIENT, "false"); return options; } } public static class HttpClient { public static HttpParams translate(Map<String, String> properties) { return translate(properties, new DefaultHttpParams()); } public static HttpParams translate(Map<String, String> properties, HttpParams p) { if (log.isDebugEnabled()) log.debug("Translating Properties for HttpClient. Properties size=" + properties.size()); if (properties.isEmpty()) return p; // First set any default values to make sure they can be overwriten // set the default encoding for HttpClient (HttpClient uses ISO-8859-1 by default) p.setParameter(HttpMethodParams.HTTP_CONTENT_CHARSET, "UTF-8"); /*then all property pairs so that new properties (with string value) are automatically handled (i.e no translation needed) */ for (Map.Entry<String, String> e : properties.entrySet()) { p.setParameter(e.getKey(), e.getValue()); } // initialize the collection of headers p.setParameter(HostParams.DEFAULT_HEADERS, new ArrayList()); if (properties.containsKey(PROP_HTTP_CONNECTION_TIMEOUT)) { final String value = properties.get(PROP_HTTP_CONNECTION_TIMEOUT); try { p.setParameter(HttpConnectionParams.CONNECTION_TIMEOUT, Integer.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_HTTP_CONNECTION_TIMEOUT + "=" + value + "] Property will be skipped."); } } if (properties.containsKey(PROP_HTTP_SOCKET_TIMEOUT)) { final String value = properties.get(PROP_HTTP_SOCKET_TIMEOUT); try { p.setParameter(HttpMethodParams.SO_TIMEOUT, Integer.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_HTTP_SOCKET_TIMEOUT + "=" + value + "] Property will be skipped."); } } if (properties.containsKey(PROP_HTTP_PROTOCOL_ENCODING)) { if(log.isWarnEnabled())log.warn("Deprecated property: http.protocol.encoding. Use http.protocol.content-charset"); p.setParameter(HttpMethodParams.HTTP_CONTENT_CHARSET, properties.get(PROP_HTTP_PROTOCOL_ENCODING)); } // the next one is redundant because HttpMethodParams.HTTP_CONTENT_CHARSET accepts a string and we use the same property name // so the property has already been added. if (properties.containsKey(HttpMethodParams.HTTP_CONTENT_CHARSET)) { p.setParameter(HttpMethodParams.HTTP_CONTENT_CHARSET, properties.get(HttpMethodParams.HTTP_CONTENT_CHARSET)); } if (properties.containsKey(PROP_HTTP_PROTOCOL_VERSION)) { try { p.setParameter(HttpMethodParams.PROTOCOL_VERSION, HttpVersion.parse(properties.get(PROP_HTTP_PROTOCOL_VERSION))); } catch (ProtocolException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + PROP_HTTP_PROTOCOL_VERSION + "]", e); } } if (properties.containsKey(PROP_HTTP_REQUEST_CHUNK)) { // see org.apache.commons.httpclient.methods.EntityEnclosingMethod.setContentChunked() p.setBooleanParameter(PROP_HTTP_REQUEST_CHUNK, Boolean.parseBoolean(properties.get(PROP_HTTP_REQUEST_CHUNK))); } if (properties.containsKey(PROP_HTTP_REQUEST_GZIP)) { if (log.isWarnEnabled()) log.warn("Property not supported by HTTP External Services: " + PROP_HTTP_REQUEST_GZIP); } if (Boolean.parseBoolean(properties.get(PROP_HTTP_ACCEPT_GZIP))) { // append gzip to the list of accepted encoding // HttpClient does not support compression natively // Additional code would be necessary to handle it. // ((Collection) p.getParameter(HostParams.DEFAULT_HEADERS)).add(new Header("Accept-Encoding", "gzip")); if (log.isWarnEnabled()) log.warn("Property not supported by HTTP External Services: " + PROP_HTTP_ACCEPT_GZIP); } if (properties.containsKey(PROP_HTTP_MAX_REDIRECTS)) { final String value = properties.get(PROP_HTTP_MAX_REDIRECTS); try { p.setParameter(HttpClientParams.MAX_REDIRECTS, Integer.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_HTTP_MAX_REDIRECTS + "=" + value + "] Property will be skipped."); } } Object[] o = getProxyAndHeaders(properties); HttpTransportProperties.ProxyProperties proxy = (HttpTransportProperties.ProxyProperties) o[0]; Collection headers = (Collection) o[1]; if (headers != null && !headers.isEmpty()) ((Collection) p.getParameter(HostParams.DEFAULT_HEADERS)).addAll(headers); if (proxy != null) p.setParameter(PROP_HTTP_PROXY_PREFIX, proxy); return new UnmodifiableHttpParams(p); } static class UnmodifiableHttpParams implements HttpParams { final HttpParams p; private UnmodifiableHttpParams(HttpParams p) { this.p = p; } public void setBooleanParameter(String name, boolean value) { throw new UnsupportedOperationException(); } public void setDefaults(HttpParams params) { throw new UnsupportedOperationException(); } public void setDoubleParameter(String name, double value) { throw new UnsupportedOperationException(); } public void setIntParameter(String name, int value) { throw new UnsupportedOperationException(); } public void setLongParameter(String name, long value) { throw new UnsupportedOperationException(); } public void setParameter(String name, Object value) { throw new UnsupportedOperationException(); } public boolean getBooleanParameter(String name, boolean defaultValue) { return p.getBooleanParameter(name, defaultValue); } public HttpParams getDefaults() { return null; } public double getDoubleParameter(String name, double defaultValue) { return p.getDoubleParameter(name, defaultValue); } public int getIntParameter(String name, int defaultValue) { return p.getIntParameter(name, defaultValue); } public long getLongParameter(String name, long defaultValue) { return p.getLongParameter(name, defaultValue); } public Object getParameter(String name) { return p.getParameter(name); } public boolean isParameterFalse(String name) { return p.isParameterFalse(name); } public boolean isParameterSet(String name) { return p.isParameterSet(name); } public boolean isParameterSetLocally(String name) { return p.isParameterSetLocally(name); } public boolean isParameterTrue(String name) { return p.isParameterTrue(name); } } } }