repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
liuyuanyuan/dbeaver | plugins/org.jkiss.dbeaver.lang/src/org/jkiss/dbeaver/lang/SCMRoot.java | 2003 | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.lang;
import org.eclipse.jface.text.rules.IToken;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
/**
* Source code node
*/
public class SCMRoot extends SCMGroupNode {
private final SCMSourceParser parser;
private final SCMSourceScanner scanner;
public SCMRoot(SCMSourceParser parser, SCMSourceScanner scanner) {
super(null);
this.parser = parser;
this.scanner = scanner;
}
@NotNull
@Override
public SCMSourceText getSource() {
return scanner.getSource();
}
@Nullable
@Override
public IToken parseComposite(@NotNull SCMSourceScanner scanner) {
for (; ; ) {
IToken token = scanner.nextToken();
if (token.isEOF()) {
break;
}
parseToken(this, token);
}
return null;
}
private void parseToken(SCMCompositeNode container, IToken token) {
SCMNode node = parser.parseNode(container, token, scanner);
container.addChild(node);
if (node instanceof SCMCompositeNode) {
SCMCompositeNode composite = (SCMCompositeNode) node;
token = composite.parseComposite(scanner);
if (token != null) {
parseToken(container, token);
}
}
}
}
| apache-2.0 |
deeplearning4j/nd4j | nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/StandardScaler.java | 4814 | package org.nd4j.linalg.dataset.api.iterator;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.ops.transforms.Transforms;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
/**
* Standard scaler calculates a moving column wise
* variance and mean
* http://www.johndcook.com/blog/standard_deviation/
*
* @deprecated Use {@link org.nd4j.linalg.dataset.api.preprocessor.NormalizerStandardize} for equivalent functionality.
* NormalizerStandardize is more stable (for examples, when a column contains all the same values for every example) but
* otherwise provides equivalent functionality. See also {@link org.nd4j.linalg.dataset.api.preprocessor.NormalizerMinMaxScaler}
*/
@Deprecated
public class StandardScaler {
private static Logger logger = LoggerFactory.getLogger(StandardScaler.class);
private INDArray mean, std;
private long runningTotal = 0;
private long batchCount = 0;
public void fit(DataSet dataSet) {
mean = dataSet.getFeatureMatrix().mean(0);
std = dataSet.getFeatureMatrix().std(0);
std.addi(Nd4j.scalar(Nd4j.EPS_THRESHOLD));
if (std.min(1) == Nd4j.scalar(Nd4j.EPS_THRESHOLD))
logger.info("API_INFO: Std deviation found to be zero. Transform will round upto epsilon to avoid nans.");
}
/**
* Fit the given model
* @param iterator the data to iterate oer
*/
public void fit(DataSetIterator iterator) {
while (iterator.hasNext()) {
DataSet next = iterator.next();
runningTotal += next.numExamples();
batchCount = next.getFeatures().size(0);
if (mean == null) {
//start with the mean and std of zero
//column wise
mean = next.getFeatureMatrix().mean(0);
std = (batchCount == 1) ? Nd4j.zeros(mean.shape()) : Transforms.pow(next.getFeatureMatrix().std(0), 2);
std.muli(batchCount);
} else {
// m_newM = m_oldM + (x - m_oldM)/m_n;
// This only works if batch size is 1, m_newS = m_oldS + (x - m_oldM)*(x - m_newM);
INDArray xMinusMean = next.getFeatureMatrix().subRowVector(mean);
INDArray newMean = mean.add(xMinusMean.sum(0).divi(runningTotal));
// Using http://i.stanford.edu/pub/cstr/reports/cs/tr/79/773/CS-TR-79-773.pdf
// for a version of calc variance when dataset is partitioned into two sample sets
// Also described in https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm
// delta = mean_B - mean_A; A is data seen so far, B is the current batch
// M2 is the var*n
// M2 = M2_A + M2_B + delta^2 * nA * nB/(nA+nB)
INDArray meanB = next.getFeatureMatrix().mean(0);
INDArray deltaSq = Transforms.pow(meanB.subRowVector(mean), 2);
INDArray deltaSqScaled =
deltaSq.mul(((float) runningTotal - batchCount) * batchCount / (float) runningTotal);
INDArray mtwoB = Transforms.pow(next.getFeatureMatrix().std(0), 2);
mtwoB.muli(batchCount);
std = std.add(mtwoB);
std = std.add(deltaSqScaled);
mean = newMean;
}
}
std.divi(runningTotal);
std = Transforms.sqrt(std);
std.addi(Nd4j.scalar(Nd4j.EPS_THRESHOLD));
if (std.min(1) == Nd4j.scalar(Nd4j.EPS_THRESHOLD))
logger.info("API_INFO: Std deviation found to be zero. Transform will round upto epsilon to avoid nans.");
iterator.reset();
}
/**
* Load the given mean and std
* @param mean the mean file
* @param std the std file
* @throws IOException
*/
public void load(File mean, File std) throws IOException {
this.mean = Nd4j.readBinary(mean);
this.std = Nd4j.readBinary(std);
}
/**
* Save the current mean and std
* @param mean the mean
* @param std the std
* @throws IOException
*/
public void save(File mean, File std) throws IOException {
Nd4j.saveBinary(this.mean, mean);
Nd4j.saveBinary(this.std, std);
}
/**
* Transform the data
* @param dataSet the dataset to transform
*/
public void transform(DataSet dataSet) {
dataSet.setFeatures(dataSet.getFeatures().subRowVector(mean));
dataSet.setFeatures(dataSet.getFeatures().divRowVector(std));
}
public INDArray getMean() {
return mean;
}
public INDArray getStd() {
return std;
}
}
| apache-2.0 |
jk1/intellij-community | plugins/gradle/src/org/jetbrains/plugins/gradle/service/project/AbstractProjectImportErrorHandler.java | 3012 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.gradle.service.project;
import com.intellij.openapi.externalSystem.model.ExternalSystemException;
import com.intellij.openapi.util.Pair;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.gradle.service.execution.GradleExecutionErrorHandler;
import java.util.regex.Matcher;
/**
* @author Vladislav.Soroka
* @since 10/16/13
*/
public abstract class AbstractProjectImportErrorHandler {
public static final String OPEN_GRADLE_SETTINGS = "Please fix the project's Gradle settings.";
public static final String SET_UP_HTTP_PROXY =
"If you are behind an HTTP proxy, please configure the proxy settings either in IDE or Gradle.";
public static final String UNEXPECTED_ERROR_FILE_BUG = "This is an unexpected error. Please file a bug containing the idea.log file.";
public static final String FIX_GRADLE_VERSION =
"Please point to a supported Gradle version in the project's Gradle settings or in the project's Gradle wrapper (if applicable.)";
public static final String EMPTY_LINE = "\n\n";
@Nullable
public abstract ExternalSystemException getUserFriendlyError(@NotNull Throwable error,
@NotNull String projectPath,
@Nullable String buildFilePath);
@NotNull
public ExternalSystemException createUserFriendlyError(@NotNull String msg, @Nullable String location, @NotNull String... quickFixes) {
return GradleExecutionErrorHandler.createUserFriendlyError(msg, location, quickFixes);
}
@NotNull
public String parseMissingMethod(@NotNull String rootCauseText) {
Matcher matcher = GradleExecutionErrorHandler.MISSING_METHOD_PATTERN.matcher(rootCauseText);
return matcher.find() ? matcher.group(1) : "";
}
/**
* @deprecated use {@link GradleExecutionErrorHandler#getRootCauseAndLocation(Throwable)}
*/
@Deprecated
@NotNull
public Pair<Throwable, String> getRootCauseAndLocation(@NotNull Throwable error) {
return GradleExecutionErrorHandler.getRootCauseAndLocation(error);
}
/**
* @deprecated use {@link GradleExecutionErrorHandler#getLocationFrom(Throwable)}
*/
@Deprecated
@Nullable
public String getLocationFrom(@NotNull Throwable error) {
return GradleExecutionErrorHandler.getLocationFrom(error);
}
}
| apache-2.0 |
asereda-gs/immutables | value-fixture/src/nonimmutables/SampleRuntimeException.java | 777 | /*
Copyright 2016 Immutables Authors and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nonimmutables;
public class SampleRuntimeException extends RuntimeException {
public SampleRuntimeException(String message) {
super(message);
}
}
| apache-2.0 |
callkalpa/carbon-commons | components/logging/org.wso2.carbon.logging.service/src/main/java/org/wso2/carbon/logging/service/appender/CarbonMemoryAppender.java | 6042 | /*
* Copyright 2005,2006 WSO2, Inc. http://www.wso2.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.logging.service.appender;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import org.wso2.carbon.bootstrap.logging.LoggingBridge;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.utils.logging.CircularBuffer;
import org.wso2.carbon.utils.logging.LoggingUtils;
import org.wso2.carbon.logging.service.internal.LoggingServiceComponent;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.core.tenant.TenantManager;
import org.wso2.carbon.utils.logging.TenantAwareLoggingEvent;
import org.wso2.carbon.utils.logging.handler.TenantDomainSetter;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.logging.LogRecord;
/**
* This appender will be used to capture the logs and later send to clients, if
* requested via the logging web service. This maintains a circular buffer, of
* some fixed amount (say 100).
*/
public class CarbonMemoryAppender extends AppenderSkeleton implements LoggingBridge {
private static final Log log = LogFactory.getLog(CarbonMemoryAppender.class);
private CircularBuffer<TenantAwareLoggingEvent> circularBuffer;
private int bufferSize = -1;
private String columnList;
public CarbonMemoryAppender() {
}
public CarbonMemoryAppender(CircularBuffer<TenantAwareLoggingEvent> circularBuffer) {
this.circularBuffer = circularBuffer;
}
public String getColumnList() {
return columnList;
}
public void setColumnList(String columnList) {
this.columnList = columnList;
}
protected synchronized void append(LoggingEvent loggingEvent) {
int tenantId = AccessController.doPrivileged(new PrivilegedAction<Integer>() {
public Integer run() {
return PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
}
});
if (tenantId == MultitenantConstants.INVALID_TENANT_ID) {
String tenantDomain = TenantDomainSetter.getTenantDomain();
if (tenantDomain != null && !tenantDomain.equals("")) {
try {
tenantId = getTenantIdForDomain(tenantDomain);
} catch (UserStoreException e) {
log.warn("Cannot find tenant id for the given tenant domain.", e);
//Ignore this exception.
}
}
}
String appName = CarbonContext.getThreadLocalCarbonContext().getApplicationName();
if (appName == null) {
appName = TenantDomainSetter.getServiceName();
}
Logger logger = Logger.getLogger(loggingEvent.getLoggerName());
TenantAwareLoggingEvent tenantEvent;
if (loggingEvent.getThrowableInformation() != null) {
tenantEvent = new TenantAwareLoggingEvent(loggingEvent.fqnOfCategoryClass, logger,
loggingEvent.timeStamp, loggingEvent.getLevel(), loggingEvent.getMessage(),
loggingEvent.getThrowableInformation().getThrowable());
} else {
tenantEvent = new TenantAwareLoggingEvent(loggingEvent.fqnOfCategoryClass, logger,
loggingEvent.timeStamp, loggingEvent.getLevel(), loggingEvent.getMessage(),
null);
}
tenantEvent.setTenantId(Integer.toString(tenantId));
tenantEvent.setServiceName(appName);
if (circularBuffer != null) {
circularBuffer.append(tenantEvent);
}
}
public int getTenantIdForDomain(String tenantDomain) throws UserStoreException {
int tenantId;
TenantManager tenantManager = LoggingServiceComponent.getTenantManager();
if (tenantDomain == null || tenantDomain.equals("")) {
tenantId = MultitenantConstants.SUPER_TENANT_ID;
} else {
tenantId = tenantManager.getTenantId(tenantDomain);
}
return tenantId;
}
public void close() {
// do we need to do anything here. I hope we do not need to reset the
// queue
// as it might still be exposed to others
}
public boolean requiresLayout() {
return true;
}
public CircularBuffer getCircularQueue() {
return circularBuffer;
}
public void setCircularBuffer(CircularBuffer<TenantAwareLoggingEvent> circularBuffer) {
this.circularBuffer = circularBuffer;
}
public void clearCircularBuffer() {
this.circularBuffer.clear();
}
public void activateOptions() {
if (bufferSize < 0) {
if (circularBuffer == null) {
this.circularBuffer = new CircularBuffer<TenantAwareLoggingEvent>();
}
} else {
this.circularBuffer = new CircularBuffer<TenantAwareLoggingEvent>(bufferSize);
}
}
public int getBufferSize() {
return bufferSize;
}
public void setBufferSize(int bufferSize) {
this.bufferSize = bufferSize;
}
public void push(LogRecord logRecord) {
LoggingEvent loggingEvent = LoggingUtils.getLogEvent(logRecord);
append(loggingEvent);
}
}
| apache-2.0 |
MikeThomsen/nifi | nifi-api/src/test/java/org/apache/nifi/processor/TestDataUnit.java | 1919 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processor;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
*
*/
public class TestDataUnit {
@Test
public void testParseWithIntegerValue() {
assertEquals(300L, DataUnit.parseDataSize("300 B", DataUnit.B).longValue());
assertEquals(300L * 1024L, DataUnit.parseDataSize("300 KB", DataUnit.B).longValue());
assertEquals(300L * 1024L * 1024L, DataUnit.parseDataSize("300 MB", DataUnit.B).longValue());
assertEquals(300L * 1024L * 1024L * 1024L, DataUnit.parseDataSize("300 GB", DataUnit.B).longValue());
}
@Test
public void testParseWithDecimalValue() {
assertEquals(300L, DataUnit.parseDataSize("300 B", DataUnit.B).longValue());
assertEquals((long) (3.22D * 1024D), DataUnit.parseDataSize("3.22 KB", DataUnit.B).longValue());
assertEquals((long) (3.22D * 1024D * 1024D), DataUnit.parseDataSize("3.22 MB", DataUnit.B).longValue());
assertEquals((long) (3.22D * 1024D * 1024D * 1024D), DataUnit.parseDataSize("3.22 GB", DataUnit.B).longValue());
}
}
| apache-2.0 |
nmorel/gwt-jackson | gwt-jackson/src/test/java/com/github/nmorel/gwtjackson/client/ser/date/DateJsonSerializerTest.java | 1249 | /*
* Copyright 2013 Nicolas Morel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.nmorel.gwtjackson.client.ser.date;
import java.util.Date;
import com.github.nmorel.gwtjackson.client.ser.AbstractJsonSerializerTest;
import com.github.nmorel.gwtjackson.client.ser.BaseDateJsonSerializer.DateJsonSerializer;
/**
* @author Nicolas Morel
*/
public class DateJsonSerializerTest extends AbstractJsonSerializerTest<Date> {
@Override
protected DateJsonSerializer createSerializer() {
return DateJsonSerializer.getInstance();
}
public void testSerializeValue() {
Date date = getUTCDate( 2012, 8, 18, 12, 45, 56, 543 );
assertSerialization( "" + date.getTime(), date );
}
}
| apache-2.0 |
kierarad/gocd | util/src/main/java/com/thoughtworks/go/util/command/CheckedCommandLineException.java | 811 | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.util.command;
public class CheckedCommandLineException extends CruiseControlException {
public CheckedCommandLineException(String s, Throwable t) {
super(s, t);
}
} | apache-2.0 |
MetSystem/jbpm-designer | jbpm-designer-backend/src/main/java/org/jbpm/designer/epn/impl/EpnStencil.java | 618 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.designer.epn.impl;
public class EpnStencil {
}
| apache-2.0 |
nguyenphanhuynh/metasyntactic | java/org/metasyntactic/automata/compiler/python/scanner/delimiters/MinusEqualsDelimiterToken.java | 292 | package org.metasyntactic.automata.compiler.python.scanner.delimiters;
public class MinusEqualsDelimiterToken extends DelimiterToken {
public static final MinusEqualsDelimiterToken instance = new MinusEqualsDelimiterToken();
private MinusEqualsDelimiterToken() {
super("-=");
}
}
| apache-2.0 |
apache/gora | gora-redis/src/test/java/org/apache/gora/redis/store/RedisStoreHashTest.java | 2016 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gora.redis.store;
import org.apache.gora.redis.GoraRedisTestDriver;
import org.apache.gora.redis.util.ServerMode;
import org.apache.gora.redis.util.StorageMode;
import org.apache.gora.store.DataStoreTestBase;
import org.junit.Ignore;
import org.junit.Test;
/**
* Tests extending {@link org.apache.gora.store.DataStoreTestBase} which run the
* base JUnit test suite for Gora.
*/
public class RedisStoreHashTest extends DataStoreTestBase {
static {
setTestDriver(new GoraRedisTestDriver(StorageMode.SINGLEKEY, ServerMode.SINGLE));
}
// Unsupported functionality due to the limitations in Redis
@Test
@Ignore("Explicit schema creation related functionality is not supported in Redis")
@Override
public void testTruncateSchema() throws Exception {
super.testTruncateSchema();
}
@Test
@Ignore("Explicit schema creation related functionality is not supported in Redis")
@Override
public void testDeleteSchema() throws Exception {
super.testDeleteSchema();
}
@Test
@Ignore("Explicit schema creation related functionality is not supported in Redis")
@Override
public void testSchemaExists() throws Exception {
super.testSchemaExists();
}
}
| apache-2.0 |
jean-merelis/keycloak | services/src/main/java/org/keycloak/exportimport/dir/DirExportProvider.java | 3522 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.exportimport.dir;
import org.keycloak.representations.VersionRepresentation;
import org.keycloak.exportimport.util.ExportUtils;
import org.keycloak.exportimport.util.MultipleStepsExportProvider;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserModel;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.util.JsonSerialization;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class DirExportProvider extends MultipleStepsExportProvider {
private final File rootDirectory;
public DirExportProvider() {
// Determine system tmp directory
String tempDir = System.getProperty("java.io.tmpdir");
this.rootDirectory = new File(tempDir + "/keycloak-export");
this.rootDirectory.mkdirs();
logger.infof("Exporting into directory %s", this.rootDirectory.getAbsolutePath());
}
public DirExportProvider(File rootDirectory) {
this.rootDirectory = rootDirectory;
this.rootDirectory.mkdirs();
logger.infof("Exporting into directory %s", this.rootDirectory.getAbsolutePath());
}
public static boolean recursiveDeleteDir(File dirPath) {
if (dirPath.exists()) {
File[] files = dirPath.listFiles();
for (int i = 0; i < files.length; i++) {
if (files[i].isDirectory()) {
recursiveDeleteDir(files[i]);
} else {
files[i].delete();
}
}
}
if (dirPath.exists())
return dirPath.delete();
else
return true;
}
@Override
public void writeRealm(String fileName, RealmRepresentation rep) throws IOException {
File file = new File(this.rootDirectory, fileName);
FileOutputStream stream = new FileOutputStream(file);
JsonSerialization.prettyMapper.writeValue(stream, rep);
}
@Override
protected void writeUsers(String fileName, KeycloakSession session, RealmModel realm, List<UserModel> users) throws IOException {
File file = new File(this.rootDirectory, fileName);
FileOutputStream os = new FileOutputStream(file);
ExportUtils.exportUsersToStream(session, realm, users, JsonSerialization.prettyMapper, os);
}
@Override
protected void writeVersion(String fileName, VersionRepresentation version) throws IOException {
File file = new File(this.rootDirectory, fileName);
FileOutputStream stream = new FileOutputStream(file);
JsonSerialization.prettyMapper.writeValue(stream, version);
}
@Override
public void close() {
}
}
| apache-2.0 |
payten/nyu-sakai-10.4 | evaluation/api/src/java/org/sakaiproject/evaluation/dao/EvalDaoInvoker.java | 1036 | /**
* $Id$
* $URL$
* EvalDaoInvoker.java - evaluation - Mar 7, 2008 1:17:03 PM - azeckoski
**************************************************************************
* Copyright (c) 2008 Centre for Applied Research in Educational Technologies, University of Cambridge
* Licensed under the Educational Community License version 1.0
*
* A copy of the Educational Community License has been included in this
* distribution and is available at: http://www.opensource.org/licenses/ecl1.php
*
* Aaron Zeckoski (azeckoski@gmail.com) (aaronz@vt.edu) (aaron@caret.cam.ac.uk)
*/
package org.sakaiproject.evaluation.dao;
/**
* Used to allow the tool to wrap our transactions,
* this allows us to have one large transaction per tool request
*
* @author Aaron Zeckoski (aaron@caret.cam.ac.uk)
*/
public interface EvalDaoInvoker {
/**
* A runnable to wrap a transaction around (as controlled by the dao)
*
* @param toInvoke a Runnable
*/
public void invokeTransactionalAccess(Runnable toInvoke);
}
| apache-2.0 |
dolfdijkstra/gst-foundation | gsf-legacy/src/main/java/com/fatwire/gst/foundation/taglib/GsfSimpleTag.java | 1654 | /*
* Copyright 2010 FatWire Corporation. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fatwire.gst.foundation.taglib;
import javax.servlet.jsp.PageContext;
import javax.servlet.jsp.tagext.SimpleTagSupport;
import com.fatwire.gst.foundation.controller.action.support.IcsFactoryUtil;
import COM.FutureTense.Interfaces.ICS;
/**
* Abstract class, usefull to get to ICS in a SimpleTag.
*
* @author Dolf Dijkstra
* @since Apr 11, 2011
*
* @deprecated as of release 12.x
*
*/
public abstract class GsfSimpleTag extends SimpleTagSupport {
protected final ICS getICS() {
final Object o = getJspContext().getAttribute(GsfRootTag.ICS_VARIABLE_NAME, PageContext.PAGE_SCOPE);
if (o instanceof ICS) {
return (ICS) o;
}
throw new RuntimeException("Can't find ICS object on the page context.");
}
protected final PageContext getPageContext() {
return (PageContext) getJspContext();
}
protected final <T> T getService(String name, Class<T> type) {
return IcsFactoryUtil.getFactory(getICS()).getObject(name, type);
}
}
| apache-2.0 |
ecarm002/incubator-asterixdb | asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/ToDoubleTypeComputer.java | 2063 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.om.typecomputer.impl;
import org.apache.asterix.om.typecomputer.base.AbstractResultTypeComputer;
import org.apache.asterix.om.types.AUnionType;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.IAType;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
public class ToDoubleTypeComputer extends AbstractResultTypeComputer {
public static final ToDoubleTypeComputer INSTANCE = new ToDoubleTypeComputer();
private ToDoubleTypeComputer() {
}
@Override
protected IAType getResultType(ILogicalExpression expr, IAType... strippedInputTypes) throws AlgebricksException {
IAType strippedInputType = strippedInputTypes[0];
switch (strippedInputType.getTypeTag()) {
case BOOLEAN:
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case FLOAT:
case DOUBLE:
// these types cannot result in NULL output
return BuiltinType.ADOUBLE;
default:
return AUnionType.createNullableType(BuiltinType.ADOUBLE);
}
}
}
| apache-2.0 |
dvandok/not-yet-commons-ssl-debian | src/java/org/apache/commons/ssl/asn1/ASN1StreamParser.java | 5634 | package org.apache.commons.ssl.asn1;
import java.io.ByteArrayInputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
public class ASN1StreamParser {
InputStream _in;
private int _limit;
private boolean _eofFound;
public ASN1StreamParser(
InputStream in) {
this(in, Integer.MAX_VALUE);
}
public ASN1StreamParser(
InputStream in,
int limit) {
this._in = in;
this._limit = limit;
}
public ASN1StreamParser(
byte[] encoding) {
this(new ByteArrayInputStream(encoding), encoding.length);
}
InputStream getParentStream() {
return _in;
}
private int readLength()
throws IOException {
int length = _in.read();
if (length < 0) {
throw new EOFException("EOF found when length expected");
}
if (length == 0x80) {
return -1; // indefinite-length encoding
}
if (length > 127) {
int size = length & 0x7f;
if (size > 4) {
throw new IOException("DER length more than 4 bytes");
}
length = 0;
for (int i = 0; i < size; i++) {
int next = _in.read();
if (next < 0) {
throw new EOFException("EOF found reading length");
}
length = (length << 8) + next;
}
if (length < 0) {
throw new IOException("corrupted stream - negative length found");
}
if (length >= _limit) // after all we must have read at least 1 byte
{
throw new IOException("corrupted stream - out of bounds length found");
}
}
return length;
}
public DEREncodable readObject()
throws IOException {
int tag = _in.read();
if (tag == -1) {
if (_eofFound) {
throw new EOFException("attempt to read past end of file.");
}
_eofFound = true;
return null;
}
//
// turn of looking for "00" while we resolve the tag
//
set00Check(false);
//
// calculate tag number
//
int baseTagNo = tag & ~DERTags.CONSTRUCTED;
int tagNo = baseTagNo;
if ((tag & DERTags.TAGGED) != 0) {
tagNo = tag & 0x1f;
//
// with tagged object tag number is bottom 5 bits, or stored at the start of the content
//
if (tagNo == 0x1f) {
tagNo = 0;
int b = _in.read();
while ((b >= 0) && ((b & 0x80) != 0)) {
tagNo |= (b & 0x7f);
tagNo <<= 7;
b = _in.read();
}
if (b < 0) {
_eofFound = true;
throw new EOFException("EOF encountered inside tag value.");
}
tagNo |= (b & 0x7f);
}
}
//
// calculate length
//
int length = readLength();
if (length < 0) // indefinite length
{
IndefiniteLengthInputStream indIn = new IndefiniteLengthInputStream(_in);
switch (baseTagNo) {
case DERTags.NULL:
while (indIn.read() >= 0) {
// make sure we skip to end of object
}
return BERNull.INSTANCE;
case DERTags.OCTET_STRING:
return new BEROctetStringParser(new ASN1ObjectParser(tag, tagNo, indIn));
case DERTags.SEQUENCE:
return new BERSequenceParser(new ASN1ObjectParser(tag, tagNo, indIn));
case DERTags.SET:
return new BERSetParser(new ASN1ObjectParser(tag, tagNo, indIn));
default:
return new BERTaggedObjectParser(tag, tagNo, indIn);
}
} else {
DefiniteLengthInputStream defIn = new DefiniteLengthInputStream(_in, length);
switch (baseTagNo) {
case DERTags.INTEGER:
return new DERInteger(defIn.toByteArray());
case DERTags.NULL:
defIn.toByteArray(); // make sure we read to end of object bytes.
return DERNull.INSTANCE;
case DERTags.OBJECT_IDENTIFIER:
return new DERObjectIdentifier(defIn.toByteArray());
case DERTags.OCTET_STRING:
return new DEROctetString(defIn.toByteArray());
case DERTags.SEQUENCE:
return new DERSequence(loadVector(defIn, length)).parser();
case DERTags.SET:
return new DERSet(loadVector(defIn, length)).parser();
default:
return new BERTaggedObjectParser(tag, tagNo, defIn);
}
}
}
private void set00Check(boolean enabled) {
if (_in instanceof IndefiniteLengthInputStream) {
((IndefiniteLengthInputStream) _in).setEofOn00(enabled);
}
}
private ASN1EncodableVector loadVector(InputStream in, int length)
throws IOException {
ASN1InputStream aIn = new ASN1InputStream(in, length);
ASN1EncodableVector v = new ASN1EncodableVector();
DERObject obj;
while ((obj = aIn.readObject()) != null) {
v.add(obj);
}
return v;
}
}
| apache-2.0 |
Flipkart/elasticsearch | src/main/java/org/elasticsearch/common/io/stream/StreamInput.java | 14677 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.io.stream;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.StringAndBytesText;
import org.elasticsearch.common.text.Text;
import org.joda.time.DateTime;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.util.*;
/**
*
*/
public abstract class StreamInput extends InputStream {
private Version version = Version.CURRENT;
public Version getVersion() {
return this.version;
}
public StreamInput setVersion(Version version) {
this.version = version;
return this;
}
/**
* Reads and returns a single byte.
*/
public abstract byte readByte() throws IOException;
/**
* Reads a specified number of bytes into an array at the specified offset.
*
* @param b the array to read bytes into
* @param offset the offset in the array to start storing bytes
* @param len the number of bytes to read
*/
public abstract void readBytes(byte[] b, int offset, int len) throws IOException;
/**
* Reads a bytes reference from this stream, might hold an actual reference to the underlying
* bytes of the stream.
*/
public BytesReference readBytesReference() throws IOException {
int length = readVInt();
return readBytesReference(length);
}
/**
* Reads a bytes reference from this stream, might hold an actual reference to the underlying
* bytes of the stream.
*/
public BytesReference readBytesReference(int length) throws IOException {
if (length == 0) {
return BytesArray.EMPTY;
}
byte[] bytes = new byte[length];
readBytes(bytes, 0, length);
return new BytesArray(bytes, 0, length);
}
public BytesRef readBytesRef() throws IOException {
int length = readVInt();
return readBytesRef(length);
}
public BytesRef readBytesRef(int length) throws IOException {
if (length == 0) {
return new BytesRef();
}
byte[] bytes = new byte[length];
readBytes(bytes, 0, length);
return new BytesRef(bytes, 0, length);
}
public void readFully(byte[] b) throws IOException {
readBytes(b, 0, b.length);
}
public short readShort() throws IOException {
return (short) (((readByte() & 0xFF) << 8) | (readByte() & 0xFF));
}
/**
* Reads four bytes and returns an int.
*/
public int readInt() throws IOException {
return ((readByte() & 0xFF) << 24) | ((readByte() & 0xFF) << 16)
| ((readByte() & 0xFF) << 8) | (readByte() & 0xFF);
}
/**
* Reads an int stored in variable-length format. Reads between one and
* five bytes. Smaller values take fewer bytes. Negative numbers
* will always use all 5 bytes and are therefore better serialized
* using {@link #readInt}
*/
public int readVInt() throws IOException {
byte b = readByte();
int i = b & 0x7F;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7F) << 7;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7F) << 14;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7F) << 21;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
assert (b & 0x80) == 0;
return i | ((b & 0x7F) << 28);
}
/**
* Reads eight bytes and returns a long.
*/
public long readLong() throws IOException {
return (((long) readInt()) << 32) | (readInt() & 0xFFFFFFFFL);
}
/**
* Reads a long stored in variable-length format. Reads between one and
* nine bytes. Smaller values take fewer bytes. Negative numbers are not
* supported.
*/
public long readVLong() throws IOException {
byte b = readByte();
long i = b & 0x7FL;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 7;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 14;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 21;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 28;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 35;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 42;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 49;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
assert (b & 0x80) == 0;
return i | ((b & 0x7FL) << 56);
}
@Nullable
public Text readOptionalText() throws IOException {
int length = readInt();
if (length == -1) {
return null;
}
return new StringAndBytesText(readBytesReference(length));
}
public Text readText() throws IOException {
// use StringAndBytes so we can cache the string if its ever converted to it
int length = readInt();
return new StringAndBytesText(readBytesReference(length));
}
@Nullable
public String readOptionalString() throws IOException {
if (readBoolean()) {
return readString();
}
return null;
}
@Nullable
public Integer readOptionalVInt() throws IOException {
if (readBoolean()) {
return readVInt();
}
return null;
}
private final CharsRefBuilder spare = new CharsRefBuilder();
public String readString() throws IOException {
final int charCount = readVInt();
spare.clear();
spare.grow(charCount);
int c = 0;
while (spare.length() < charCount) {
c = readByte() & 0xff;
switch (c >> 4) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
spare.append((char) c);
break;
case 12:
case 13:
spare.append((char) ((c & 0x1F) << 6 | readByte() & 0x3F));
break;
case 14:
spare.append((char) ((c & 0x0F) << 12 | (readByte() & 0x3F) << 6 | (readByte() & 0x3F) << 0));
break;
}
}
return spare.toString();
}
public final float readFloat() throws IOException {
return Float.intBitsToFloat(readInt());
}
public final double readDouble() throws IOException {
return Double.longBitsToDouble(readLong());
}
/**
* Reads a boolean.
*/
public final boolean readBoolean() throws IOException {
return readByte() != 0;
}
@Nullable
public final Boolean readOptionalBoolean() throws IOException {
byte val = readByte();
if (val == 2) {
return null;
}
if (val == 1) {
return true;
}
return false;
}
/**
* Resets the stream.
*/
@Override
public abstract void reset() throws IOException;
/**
* Closes the stream to further operations.
*/
@Override
public abstract void close() throws IOException;
// // IS
//
// @Override public int read() throws IOException {
// return readByte();
// }
//
// // Here, we assume that we always can read the full byte array
//
// @Override public int read(byte[] b, int off, int len) throws IOException {
// readBytes(b, off, len);
// return len;
// }
public String[] readStringArray() throws IOException {
int size = readVInt();
if (size == 0) {
return Strings.EMPTY_ARRAY;
}
String[] ret = new String[size];
for (int i = 0; i < size; i++) {
ret[i] = readString();
}
return ret;
}
@Nullable
public Map<String, Object> readMap() throws IOException {
return (Map<String, Object>) readGenericValue();
}
@SuppressWarnings({"unchecked"})
@Nullable
public Object readGenericValue() throws IOException {
byte type = readByte();
switch (type) {
case -1:
return null;
case 0:
return readString();
case 1:
return readInt();
case 2:
return readLong();
case 3:
return readFloat();
case 4:
return readDouble();
case 5:
return readBoolean();
case 6:
int bytesSize = readVInt();
byte[] value = new byte[bytesSize];
readBytes(value, 0, bytesSize);
return value;
case 7:
int size = readVInt();
List list = new ArrayList(size);
for (int i = 0; i < size; i++) {
list.add(readGenericValue());
}
return list;
case 8:
int size8 = readVInt();
Object[] list8 = new Object[size8];
for (int i = 0; i < size8; i++) {
list8[i] = readGenericValue();
}
return list8;
case 9:
int size9 = readVInt();
Map map9 = new LinkedHashMap(size9);
for (int i = 0; i < size9; i++) {
map9.put(readString(), readGenericValue());
}
return map9;
case 10:
int size10 = readVInt();
Map map10 = new HashMap(size10);
for (int i = 0; i < size10; i++) {
map10.put(readString(), readGenericValue());
}
return map10;
case 11:
return readByte();
case 12:
return new Date(readLong());
case 13:
return new DateTime(readLong());
case 14:
return readBytesReference();
case 15:
return readText();
case 16:
return readShort();
case 17:
return readIntArray();
case 18:
return readLongArray();
case 19:
return readFloatArray();
case 20:
return readDoubleArray();
case 21:
return readBytesRef();
default:
throw new IOException("Can't read unknown type [" + type + "]");
}
}
public int[] readIntArray() throws IOException {
int length = readVInt();
int[] values = new int[length];
for (int i = 0; i < length; i++) {
values[i] = readInt();
}
return values;
}
public long[] readLongArray() throws IOException {
int length = readVInt();
long[] values = new long[length];
for (int i = 0; i < length; i++) {
values[i] = readLong();
}
return values;
}
public float[] readFloatArray() throws IOException {
int length = readVInt();
float[] values = new float[length];
for (int i = 0; i < length; i++) {
values[i] = readFloat();
}
return values;
}
public double[] readDoubleArray() throws IOException {
int length = readVInt();
double[] values = new double[length];
for (int i = 0; i < length; i++) {
values[i] = readDouble();
}
return values;
}
public byte[] readByteArray() throws IOException {
int length = readVInt();
byte[] values = new byte[length];
for (int i = 0; i < length; i++) {
values[i] = readByte();
}
return values;
}
/**
* Serializes a potential null value.
*/
public <T extends Streamable> T readOptionalStreamable(T streamable) throws IOException {
if (readBoolean()) {
streamable.readFrom(this);
return streamable;
} else {
return null;
}
}
public <T extends Throwable> T readThrowable() throws IOException {
try {
ObjectInputStream oin = new ObjectInputStream(this);
return (T) oin.readObject();
} catch (ClassNotFoundException e) {
throw new IOException("failed to deserialize exception", e);
}
}
public static StreamInput wrap(BytesReference reference) {
if (reference.hasArray() == false) {
reference = reference.toBytesArray();
}
return wrap(reference.array(), reference.arrayOffset(), reference.length());
}
public static StreamInput wrap(byte[] bytes) {
return wrap(bytes, 0, bytes.length);
}
public static StreamInput wrap(byte[] bytes, int offset, int length) {
return new InputStreamStreamInput(new ByteArrayInputStream(bytes, offset, length));
}
}
| apache-2.0 |
RLDevOps/Demo | src/main/java/org/olat/basesecurity/BaseSecurity.java | 15856 | /**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.basesecurity;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.olat.core.id.Identity;
import org.olat.core.id.OLATResourceable;
import org.olat.core.id.Roles;
import org.olat.core.id.User;
import org.olat.resource.OLATResource;
/**
* Description: <br>
* TODO: Class Description
* <P>
*
* @author Felix Jost
*/
public interface BaseSecurity {
/**
*
*/
public void init();
/**
* is allowed to....
*
* @param identity
* @param permission
* @param olatResourceable
* @return true if permitted
*/
public boolean isIdentityPermittedOnResourceable(Identity identity, String permission, OLATResourceable olatResourceable);
/**
* Get the identity's roles
*
* @param identity
* @return The roles of the identity
*/
public Roles getRoles(Identity identity);
/**
* @param identity
* @param permission
* @param olatResourceable
* @param checkTypeRight
* @return true if permitted
*/
public boolean isIdentityPermittedOnResourceable(Identity identity, String permission, OLATResourceable olatResourceable, boolean checkTypeRight);
/**
* use only if really needed. Normally better use isIdentityPermittedOnResourceable!
*
* @param identity
* @param secGroup
* @return true if the identity is in the group
*/
public boolean isIdentityInSecurityGroup(Identity identity, SecurityGroup secGroup);
/**
* search
*
* @param secGroup
* @return list of Identities
*/
public List<Identity> getIdentitiesOfSecurityGroup(SecurityGroup secGroup);
/**
* @param secGroup
* @return a List of Object[] with the array[0] = Identity, array[1] = addedToGroupTimestamp
*/
public List<Object[]> getIdentitiesAndDateOfSecurityGroup(SecurityGroup secGroup);
/**
* @see org.olat.basesecurity.Manager#getIdentitiesAndDateOfSecurityGroup(org.olat.basesecurity.SecurityGroup)
* @param sortedByAddDate true= return list of idenities sorted by added date
*/
public List<Object[]> getIdentitiesAndDateOfSecurityGroup(SecurityGroup secGroup, boolean sortedByAddDate);
/**
* Get date where identity joined a security group
*
* @param secGroup
* @param identity
* @return joindate of given securityGroup. May return null if group doesn't exist or user isn't in this group
*/
public Date getSecurityGroupJoinDateForIdentity(SecurityGroup secGroup, Identity identity);
/**
* @param securityGroupName
* @return the securitygroup
*/
public SecurityGroup findSecurityGroupByName(String securityGroupName);
/**
* Find an identity by its name. This is an exact match. Use the getIdentititesByPowerSearch() method if you also want to find substrings.
* <p>
* Be aware that this method does <b>not</b> check the identities status! This method returns identities with any state, also deleted identities!
*
* @param identityName
* @return the identity or null if not found
*/
public Identity findIdentityByName(String identityName);
/**
* find an identity by the key instead of the username. Prefer this method as findByName will become deprecated soon.
*
* @param identityKey the key of the identity to load; may not be null or zero
* @return the identity or an exception if not found
*/
public Identity loadIdentityByKey(Long identityKey);
/**
* find an identity by the key or return null if no identity found
*
* @param identityKey the key of the identity to load; may not be null or zero
* @return the identity or null
*/
public Identity loadIdentityByKey(Long identityKey, boolean strict);
/**
* get number of users with last login greater than lastLoginLimit
*
* @param lastLoginLimit
* @return
*/
public Long countUniqueUserLoginsSince(Date lastLoginLimit);
/**
* @param secGroup
* @return nr of members in the securitygroup
*/
public int countIdentitiesOfSecurityGroup(SecurityGroup secGroup);
/**
* @param username the username
* @param user the unpresisted User
* @param provider the provider of the authentication ("OLAT" or "AAI"). If null, no authentication token is generated.
* @param authusername the username used as authentication credential (=username for provider "OLAT")
* @param credential the credentials or null if not used
* @return the new identity
*/
public Identity createAndPersistIdentity(String username, User user, String provider, String authusername, String credential);
/**
* @param username the username
* @param user the unpresisted User
* @param provider the provider of the authentication ("OLAT" or "AAI"). If null, no authentication token is generated.
* @param authusername the username used as authentication credential (=username for provider "OLAT")
* @param credential the credentials or null if not used
* @return the new identity
*/
public Identity createAndPersistIdentityAndUser(String username, User user, String provider, String authusername, String credential);
/**
* Return the List of associated Authentications.
*
* @param identity
* @return a list of Authentication
*/
public List<Authentication> getAuthentications(Identity identity);
/**
* @param identity
* @param provider
* @return Authentication for this identitity and provider or NULL if not found
*/
public Authentication findAuthentication(Identity identity, String provider);
/**
* @param identity
* @param provider
* @param authUsername
* @param credential
* @return an Authentication
*/
public Authentication createAndPersistAuthentication(Identity identity, String provider, String authUsername, String credential);
/**
* @param authentication
*/
public void deleteAuthentication(Authentication authentication);
// --- SecGroup management
/**
* create only makes no sense, since there are no attibutes to set
*
* @return a new persisted SecurityGroup or throws an Exception
*/
public SecurityGroup createAndPersistSecurityGroup();
/**
* create only makes no sense, since there are no attibutes to set
*
* @param groupName
* @return the newly created securitygroup
*/
public SecurityGroup createAndPersistNamedSecurityGroup(String groupName); //
/**
* removes the group with all the idendities contained in it, the idenities itself are of course not deleted.
*
* @param secGroup
*/
public void deleteSecurityGroup(SecurityGroup secGroup);
/**
* @param identity
* @param secGroup
*/
public void addIdentityToSecurityGroup(Identity identity, SecurityGroup secGroup);
/**
* Removes the identity from this security group or does nothing if the identity is not in the group at all.
*
* @param identity
* @param secGroup
*/
public void removeIdentityFromSecurityGroup(Identity identity, SecurityGroup secGroup);
// --- Policy management
// again no pure RAM creation, since all attributes are mandatory and given by
// the system, not by user input
/**
* the olatResourceable is not required to have some persisted implementation, but the manager will use the OLATResource to persist it. If the olatResourceable used
* OLATResource as its persister, then the same OLATResource (same row in table) will be used by the manager use as internal reference in the Policy table
*
* @param secGroup
* @param permission
* @param olatResourceable
* @return the newly created policy
*/
public Policy createAndPersistPolicy(SecurityGroup secGroup, String permission, OLATResourceable olatResourceable);
public Policy createAndPersistPolicy(SecurityGroup secGroup, String permission, Date from, Date to, OLATResourceable olatResourceable);
/**
* Creates and persist a policy for certain OLAT-resource (instead of OLAT-resourceable)
*
* @param secGroup
* @param permission
* @param olatResource
* @return the newly created policy
*/
public Policy createAndPersistPolicyWithResource(SecurityGroup secGroup, String permission, OLATResource olatResource);
public Policy createAndPersistPolicyWithResource(SecurityGroup secGroup, String permission, Date from, Date to, OLATResource olatResource);
/**
* Create and persist an invitation with its security group and security token.
*
* @return
*/
public Invitation createAndPersistInvitation();
/**
* Update the invitation
*
* @param invitation
*/
public void updateInvitation(Invitation invitation);
/**
* Is the invitation linked to any valid policies
*
* @param token
* @param atDate
* @return
*/
public boolean hasInvitationPolicies(String token, Date atDate);
/**
* Find an invitation by its security group
*
* @param secGroup
* @return The invitation or null if not found
*/
public Invitation findInvitation(SecurityGroup secGroup);
/**
* Find an invitation by its security token
*
* @param token
* @return The invitation or null if not found
*/
public Invitation findInvitation(String token);
/**
* Check if the identity has an invitation, valid or not
*
* @param identity
* @return
*/
public boolean isIdentityInvited(Identity identity);
/**
* Delete an invitation
*
* @param invitation
*/
public void deleteInvitation(Invitation invitation);
/**
* Clean up old invitation and set to deleted temporary users
*/
public void cleanUpInvitations();
/**
* @param secGroup
* @param permission
* @param olatResourceable
*/
public void deletePolicy(SecurityGroup secGroup, String permission, OLATResourceable olatResourceable);
// public void deletePolicy(Policy policy); //just deletes the policy, but not
// the resource
// some queries mainly for the group/groupcontext management
/**
* @param secGroup
* @return a list of Policy objects
*/
public List<Policy> getPoliciesOfSecurityGroup(SecurityGroup secGroup);
/**
* Return the policies
*
* @param resource The resource (mandatory)
* @param securityGroup The securityGroup (optional)
* @return
*/
public List<Policy> getPoliciesOfResource(OLATResourceable resource, SecurityGroup securityGroup);
/**
* Update the policy valid dates
*
* @param policy
* @param from
* @param to
*/
public void updatePolicy(Policy policy, Date from, Date to);
/**
* use for testing ONLY.
*
* @param permission
* @param olatResourceable
* @return a list of SecurityGroup objects
*/
public List<SecurityGroup> getGroupsWithPermissionOnOlatResourceable(String permission, OLATResourceable olatResourceable);
/**
* use for testing ONLY.
*
* @param permission
* @param olatResourceable
* @return a list of Identity objects
*/
public List<Identity> getIdentitiesWithPermissionOnOlatResourceable(String permission, OLATResourceable olatResourceable);
/**
* for debugging and info by the olat admins:
*
* @param identity
* @return scalar query return list of object[] with SecurityGroupImpl, PolicyImpl, OLATResourceImpl
*/
public List<Identity> getPoliciesOfIdentity(Identity identity);
/**
* @param authusername
* @param provider
* @return Authentication for this authusername and provider or NULL if not found
*/
public Authentication findAuthenticationByAuthusername(String authusername, String provider);
/**
* Get a list of identities that match the following conditions. All parameters are additive. NULL values mean "no constraints" (e.g. all parameters NULL would result
* in a list with all identities of the entire system)
*
* @param login
* @param userPropertyHandlers Map of user properties that needs to be matched.
* @param userPropertiesAsIntersectionSearch true: user properties and login name are combined with an AND query; false: user properties and login name are combined
* with an OR query
* @param groups Array of SecurityGroups the user participates in. Search machtches if user is in any of the groups (OR query)
* @param permissionOnResources Array of resource permissions the user has. Search machtches if user has any of the permissions (OR query)
* @param authProviders Array of authenticaton providers the user has. Search machtches if user has any of the authProviders (OR query)
* @param createdAfter date after which the user has been created
* @param createdBefore date before which the user has been created
* @return List of identities
*/
public List<Identity> getVisibleIdentitiesByPowerSearch(String login, Map<String, String> userProperties, boolean userPropertiesAsIntersectionSearch,
SecurityGroup[] groups, PermissionOnResourceable[] permissionOnResources, String[] authProviders, Date createdAfter, Date createdBefore);
/**
* Get a list of identities that match the following conditions. All parameters are additive. NULL values mean "no constraints" (e.g. all parameters NULL would result
* in a list with all identities of the entire system)
*
* @param login
* @param userPropertyHandlers Map of user properties that needs to be matched.
* @param userPropertiesAsIntersectionSearch true: user properties and login name are combined with an AND query; false: user properties and login name are combined
* with an OR query
* @param groups Array of SecurityGroups the user participates in. Search machtches if user is in any of the groups (OR query)
* @param permissionOnResources Array of resource permissions the user has. Search machtches if user has any of the permissions (OR query)
* @param authProviders Array of authenticaton providers the user has. Search machtches if user has any of the authProviders (OR query)
* @param createdAfter date after which the user has been created
* @param createdBefore date before which the user has been created
* @param userLoginBefore date before the user has logged in the last time
* @param userLoginAfter date after the user has logged in the last time
* @param status identity status, define in interface Identity e.g. ACTIV, LOGIN_DENIED, DELETED
* @return List of identities
*/
public List<Identity> getIdentitiesByPowerSearch(String login, Map<String, String> userProperties, boolean userPropertiesAsIntersectionSearch,
SecurityGroup[] groups, PermissionOnResourceable[] permissionOnResources, String[] authProviders, Date createdAfter, Date createdBefore, Date userLoginAfter,
Date userLoginBefore, Integer status);
/**
* Save an identity
*
* @param identity Save this identity
*/
public void saveIdentityStatus(Identity identity, Integer status);
/**
* Check if identity is visible. Deleted or login-denied users are not visible.
*
* @param identityName
* @return
*/
public boolean isIdentityVisible(String identityName);
/**
* Get all SecurtityGroups an Identity is in
*
* @param identity
* @return List with SecurityGroups
*/
public List<SecurityGroup> getSecurityGroupsForIdentity(Identity identity);
/**
* Returns the anonymous identity for a given locale, normally used to log in as guest user
*
* @param locale
* @return The identity
*/
public Identity getAndUpdateAnonymousUserForLanguage(Locale locale);
} | apache-2.0 |
groboclown/p4ic4idea | p4java/r19-1/src/test/java/com/perforce/p4java/tests/dev/unit/features101/GetMatchingLinesTest.java | 6090 | /**
*
*/
package com.perforce.p4java.tests.dev.unit.features101;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.List;
import org.junit.Test;
import com.perforce.p4java.core.IFileLineMatch;
import com.perforce.p4java.core.IFileLineMatch.MatchType;
import com.perforce.p4java.core.file.FileSpecBuilder;
import com.perforce.p4java.option.server.MatchingLinesOptions;
import com.perforce.p4java.server.IOptionsServer;
import com.perforce.p4java.tests.dev.annotations.TestId;
import com.perforce.p4java.tests.dev.unit.P4JavaTestCase;
/**
* Tests IOptionsServer.getMatchingLines as best it can...<p>
*
* Relies <i>heavily</i> on no one changing what's in DEPOT_PATH_ROOT
* files and file tree; all bets are off if these change.
*/
@TestId("Features101_GetMatchingLinesTest")
public class GetMatchingLinesTest extends P4JavaTestCase {
public static final String DEPOT_PATH_ROOT = "//depot/basic/readonly/grep/...";
public static final String DEPOT_PATH_ROOT_PREFIX = "//depot/basic/readonly/grep";
public GetMatchingLinesTest() {
}
@Test
public void testGetMatchingLinesBasics() {
IOptionsServer server = null;
try {
server = getServer();
List<IFileLineMatch> matches = server.getMatchingLines(
FileSpecBuilder.makeFileSpecList(DEPOT_PATH_ROOT),
"P4Java",
null);
checkBasics(matches, 8, false);
matches = server.getMatchingLines(
FileSpecBuilder.makeFileSpecList(DEPOT_PATH_ROOT),
"P4Java",
new MatchingLinesOptions().setIncludeLineNumbers(true));
checkBasics(matches, 8, true);
matches = server.getMatchingLines(
FileSpecBuilder.makeFileSpecList(DEPOT_PATH_ROOT),
"P4Java",
new MatchingLinesOptions("-n"));
checkBasics(matches, 8, true);
matches = server.getMatchingLines(
FileSpecBuilder.makeFileSpecList(DEPOT_PATH_ROOT),
"P4Java",
new MatchingLinesOptions(
false,
false,
true,
true,
false,
0,
0,
0,
false));
checkBasics(matches, 5331, true);
matches = server.getMatchingLines(
FileSpecBuilder.makeFileSpecList(DEPOT_PATH_ROOT),
"P4Java",
new MatchingLinesOptions().setCaseInsensitive(true));
checkBasics(matches, 117, false);
matches = server.getMatchingLines(
FileSpecBuilder.makeFileSpecList(DEPOT_PATH_ROOT),
"P4Java",
new MatchingLinesOptions().setCaseInsensitive(true).setNonMatchingLines(true));
checkBasics(matches, 5222, false);
} catch (Exception exc) {
fail("Unexpected exception: " + exc.getLocalizedMessage());
} finally {
if (server != null) {
this.endServerSession(server);
}
}
}
@Test
public void testContexts() {
IOptionsServer server = null;
try {
server = getServer();
List<IFileLineMatch> matches = server.getMatchingLines(
FileSpecBuilder.makeFileSpecList(DEPOT_PATH_ROOT),
"P4Java",
new MatchingLinesOptions().setOutputContext(2).setIncludeLineNumbers(true));
checkContexts(matches, 0, true, true, true);
matches = server.getMatchingLines(
FileSpecBuilder.makeFileSpecList(DEPOT_PATH_ROOT),
"P4Java",
new MatchingLinesOptions().setLeadingContext(2).setIncludeLineNumbers(false));
checkContexts(matches, 0, false, true, false);
matches = server.getMatchingLines(
FileSpecBuilder.makeFileSpecList(DEPOT_PATH_ROOT),
"P4Java",
new MatchingLinesOptions().setTrailingContext(2));
checkContexts(matches, 0, false, false, true);
} catch (Exception exc) {
fail("Unexpected exception: " + exc.getLocalizedMessage());
} finally {
if (server != null) {
this.endServerSession(server);
}
}
}
private void checkBasics(List<IFileLineMatch> matches, int expectedSize,
boolean checkLineNos) {
assertNotNull(matches);
if (expectedSize >= 0) {
assertEquals(expectedSize, matches.size());
}
for (IFileLineMatch match : matches) {
assertNotNull(match);
checkMatchTypes(match.getType(), true, false, false);
assertEquals(MatchType.MATCH, match.getType());
assertNotNull(match.getDepotFile());
assertTrue(match.getDepotFile().startsWith(DEPOT_PATH_ROOT_PREFIX));
if (checkLineNos) {
assertTrue("line number set with no -n option", match.getLineNumber() > 0);
} else {
assertEquals("line number not set with -n option", -1, match.getLineNumber());
}
assertTrue(match.getRevision() > 0);
}
}
private void checkContexts(List<IFileLineMatch> matches, int expectedSize, boolean checkLineNos,
boolean checkBefore, boolean checkAfter) {
assertNotNull(matches);
for (IFileLineMatch match : matches) {
assertNotNull(match);
checkMatchTypes(match.getType(), true, checkBefore, checkAfter);
assertNotNull(match.getDepotFile());
assertTrue(match.getDepotFile().startsWith(DEPOT_PATH_ROOT_PREFIX));
if (checkLineNos) {
assertTrue("line number set with no -n option", match.getLineNumber() > 0);
} else {
assertEquals("line number not set with -n option", -1, match.getLineNumber());
}
assertTrue(match.getRevision() > 0);
}
}
private void checkMatchTypes(MatchType matchType, boolean match, boolean before, boolean after) {
assertNotNull(matchType);
if (match && before && after) {
assertTrue((matchType == MatchType.MATCH)
|| (matchType == MatchType.BEFORE)
|| (matchType == MatchType.AFTER));
} else if (match && before) {
assertTrue((matchType == MatchType.MATCH)
|| (matchType == MatchType.BEFORE));
} else if (match && after) {
assertTrue((matchType == MatchType.MATCH)
|| (matchType == MatchType.AFTER));
} else if (before && after) {
assertTrue((matchType == MatchType.BEFORE)
|| (matchType == MatchType.AFTER));
} else if (match) {
assertEquals(MatchType.MATCH, matchType);
} else if (before) {
assertEquals(MatchType.BEFORE, matchType);
} else if (after) {
assertEquals(MatchType.AFTER, matchType);
} else {
fail("?");
}
}
}
| apache-2.0 |
MaxRau/CoffeeMud | com/planet_ink/coffee_mud/core/collections/Converter.java | 698 | package com.planet_ink.coffee_mud.core.collections;
/*
Copyright 2000-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public interface Converter<K, L>
{
public L convert(K obj);
}
| apache-2.0 |
joansmith/spring-boot | spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/ServerProperties.java | 25508 | /*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.web;
import java.io.File;
import java.net.InetAddress;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.SessionCookieConfig;
import javax.servlet.SessionTrackingMode;
import javax.validation.constraints.NotNull;
import org.apache.catalina.Context;
import org.apache.catalina.connector.Connector;
import org.apache.catalina.valves.AccessLogValve;
import org.apache.catalina.valves.RemoteIpValve;
import org.apache.coyote.AbstractProtocol;
import org.apache.coyote.ProtocolHandler;
import org.apache.coyote.http11.AbstractHttp11Protocol;
import org.springframework.boot.autoconfigure.web.ServerProperties.Session.Cookie;
import org.springframework.boot.cloud.CloudPlatform;
import org.springframework.boot.context.embedded.Compression;
import org.springframework.boot.context.embedded.ConfigurableEmbeddedServletContainer;
import org.springframework.boot.context.embedded.EmbeddedServletContainerCustomizer;
import org.springframework.boot.context.embedded.EmbeddedServletContainerCustomizerBeanPostProcessor;
import org.springframework.boot.context.embedded.EmbeddedServletContainerFactory;
import org.springframework.boot.context.embedded.InitParameterConfiguringServletContextInitializer;
import org.springframework.boot.context.embedded.JspServlet;
import org.springframework.boot.context.embedded.ServletContextInitializer;
import org.springframework.boot.context.embedded.Ssl;
import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainerFactory;
import org.springframework.boot.context.embedded.tomcat.TomcatConnectorCustomizer;
import org.springframework.boot.context.embedded.tomcat.TomcatContextCustomizer;
import org.springframework.boot.context.embedded.tomcat.TomcatEmbeddedServletContainerFactory;
import org.springframework.boot.context.embedded.undertow.UndertowEmbeddedServletContainerFactory;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.NestedConfigurationProperty;
import org.springframework.context.EnvironmentAware;
import org.springframework.core.Ordered;
import org.springframework.core.env.Environment;
import org.springframework.util.StringUtils;
/**
* {@link ConfigurationProperties} for a web server (e.g. port and path settings). Will be
* used to customize an {@link EmbeddedServletContainerFactory} when an
* {@link EmbeddedServletContainerCustomizerBeanPostProcessor} is active.
*
* @author Dave Syer
* @author Stephane Nicoll
* @author Andy Wilkinson
* @author Ivan Sopov
* @author Marcos Barbero
* @author Eddú Meléndez
*/
@ConfigurationProperties(prefix = "server", ignoreUnknownFields = true)
public class ServerProperties
implements EmbeddedServletContainerCustomizer, EnvironmentAware, Ordered {
/**
* Server HTTP port.
*/
private Integer port;
/**
* Network address to which the server should bind to.
*/
private InetAddress address;
/**
* Context path of the application.
*/
private String contextPath;
/**
* Display name of the application.
*/
private String displayName = "application";
@NestedConfigurationProperty
private ErrorProperties error = new ErrorProperties();
/**
* Path of the main dispatcher servlet.
*/
@NotNull
private String servletPath = "/";
/**
* ServletContext parameters.
*/
private final Map<String, String> contextParameters = new HashMap<String, String>();
/**
* If X-Forwarded-* headers should be applied to the HttpRequest.
*/
private Boolean useForwardHeaders;
/**
* Value to use for the server header (uses servlet container default if empty).
*/
private String serverHeader;
private Session session = new Session();
@NestedConfigurationProperty
private Ssl ssl;
@NestedConfigurationProperty
private Compression compression = new Compression();
@NestedConfigurationProperty
private JspServlet jspServlet;
private final Tomcat tomcat = new Tomcat();
private final Jetty jetty = new Jetty();
private final Undertow undertow = new Undertow();
private Environment environment;
@Override
public int getOrder() {
return 0;
}
@Override
public void setEnvironment(Environment environment) {
this.environment = environment;
}
@Override
public void customize(ConfigurableEmbeddedServletContainer container) {
if (getPort() != null) {
container.setPort(getPort());
}
if (getAddress() != null) {
container.setAddress(getAddress());
}
if (getContextPath() != null) {
container.setContextPath(getContextPath());
}
if (getDisplayName() != null) {
container.setDisplayName(getDisplayName());
}
if (getSession().getTimeout() != null) {
container.setSessionTimeout(getSession().getTimeout());
}
container.setPersistSession(getSession().isPersistent());
container.setSessionStoreDir(getSession().getStoreDir());
if (getSsl() != null) {
container.setSsl(getSsl());
}
if (getJspServlet() != null) {
container.setJspServlet(getJspServlet());
}
if (getCompression() != null) {
container.setCompression(getCompression());
}
container.setServerHeader(getServerHeader());
if (container instanceof TomcatEmbeddedServletContainerFactory) {
getTomcat().customizeTomcat(this,
(TomcatEmbeddedServletContainerFactory) container);
}
if (container instanceof JettyEmbeddedServletContainerFactory) {
getJetty().customizeJetty(this,
(JettyEmbeddedServletContainerFactory) container);
}
if (container instanceof UndertowEmbeddedServletContainerFactory) {
getUndertow().customizeUndertow(this,
(UndertowEmbeddedServletContainerFactory) container);
}
container.addInitializers(new SessionConfiguringInitializer(this.session));
container.addInitializers(new InitParameterConfiguringServletContextInitializer(
getContextParameters()));
}
public String getServletMapping() {
if (this.servletPath.equals("") || this.servletPath.equals("/")) {
return "/";
}
if (this.servletPath.contains("*")) {
return this.servletPath;
}
if (this.servletPath.endsWith("/")) {
return this.servletPath + "*";
}
return this.servletPath + "/*";
}
public String getPath(String path) {
String prefix = getServletPrefix();
if (!path.startsWith("/")) {
path = "/" + path;
}
return prefix + path;
}
public String getServletPrefix() {
String result = this.servletPath;
if (result.contains("*")) {
result = result.substring(0, result.indexOf("*"));
}
if (result.endsWith("/")) {
result = result.substring(0, result.length() - 1);
}
return result;
}
public String[] getPathsArray(Collection<String> paths) {
String[] result = new String[paths.size()];
int i = 0;
for (String path : paths) {
result[i++] = getPath(path);
}
return result;
}
public String[] getPathsArray(String[] paths) {
String[] result = new String[paths.length];
int i = 0;
for (String path : paths) {
result[i++] = getPath(path);
}
return result;
}
public void setLoader(String value) {
// no op to support Tomcat running as a traditional container (not embedded)
}
public Integer getPort() {
return this.port;
}
public void setPort(Integer port) {
this.port = port;
}
public InetAddress getAddress() {
return this.address;
}
public void setAddress(InetAddress address) {
this.address = address;
}
public String getContextPath() {
return this.contextPath;
}
public void setContextPath(String contextPath) {
this.contextPath = cleanContextPath(contextPath);
}
private String cleanContextPath(String contextPath) {
if (StringUtils.hasText(contextPath) && contextPath.endsWith("/")) {
return contextPath.substring(0, contextPath.length() - 1);
}
return contextPath;
}
public String getDisplayName() {
return this.displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public String getServletPath() {
return this.servletPath;
}
public void setServletPath(String servletPath) {
this.servletPath = servletPath;
}
public Map<String, String> getContextParameters() {
return this.contextParameters;
}
public Boolean isUseForwardHeaders() {
return this.useForwardHeaders;
}
public void setUseForwardHeaders(Boolean useForwardHeaders) {
this.useForwardHeaders = useForwardHeaders;
}
public String getServerHeader() {
return this.serverHeader;
}
public void setServerHeader(String serverHeader) {
this.serverHeader = serverHeader;
}
protected final boolean getOrDeduceUseForwardHeaders() {
if (this.useForwardHeaders != null) {
return this.useForwardHeaders;
}
CloudPlatform platform = CloudPlatform.getActive(this.environment);
return (platform == null ? false : platform.isUsingForwardHeaders());
}
public ErrorProperties getError() {
return this.error;
}
public Session getSession() {
return this.session;
}
public void setSession(Session session) {
this.session = session;
}
public Ssl getSsl() {
return this.ssl;
}
public void setSsl(Ssl ssl) {
this.ssl = ssl;
}
public Compression getCompression() {
return this.compression;
}
public JspServlet getJspServlet() {
return this.jspServlet;
}
public void setJspServlet(JspServlet jspServlet) {
this.jspServlet = jspServlet;
}
public Tomcat getTomcat() {
return this.tomcat;
}
private Jetty getJetty() {
return this.jetty;
}
public Undertow getUndertow() {
return this.undertow;
}
public static class Session {
/**
* Session timeout in seconds.
*/
private Integer timeout;
/**
* Session tracking modes (one or more of the following: "cookie", "url", "ssl").
*/
private Set<SessionTrackingMode> trackingModes;
/**
* Persist session data between restarts.
*/
private boolean persistent;
/**
* Directory used to store session data.
*/
private File storeDir;
private Cookie cookie = new Cookie();
public Cookie getCookie() {
return this.cookie;
}
public Integer getTimeout() {
return this.timeout;
}
public void setTimeout(Integer sessionTimeout) {
this.timeout = sessionTimeout;
}
public Set<SessionTrackingMode> getTrackingModes() {
return this.trackingModes;
}
public void setTrackingModes(Set<SessionTrackingMode> trackingModes) {
this.trackingModes = trackingModes;
}
public boolean isPersistent() {
return this.persistent;
}
public void setPersistent(boolean persistent) {
this.persistent = persistent;
}
public File getStoreDir() {
return this.storeDir;
}
public void setStoreDir(File storeDir) {
this.storeDir = storeDir;
}
public static class Cookie {
/**
* Session cookie name.
*/
private String name;
/**
* Domain for the session cookie.
*/
private String domain;
/**
* Path of the session cookie.
*/
private String path;
/**
* Comment for the session cookie.
*/
private String comment;
/**
* "HttpOnly" flag for the session cookie.
*/
private Boolean httpOnly;
/**
* "Secure" flag for the session cookie.
*/
private Boolean secure;
/**
* Maximum age of the session cookie in seconds.
*/
private Integer maxAge;
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getDomain() {
return this.domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public String getPath() {
return this.path;
}
public void setPath(String path) {
this.path = path;
}
public String getComment() {
return this.comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public Boolean getHttpOnly() {
return this.httpOnly;
}
public void setHttpOnly(Boolean httpOnly) {
this.httpOnly = httpOnly;
}
public Boolean getSecure() {
return this.secure;
}
public void setSecure(Boolean secure) {
this.secure = secure;
}
public Integer getMaxAge() {
return this.maxAge;
}
public void setMaxAge(Integer maxAge) {
this.maxAge = maxAge;
}
}
}
public static class Tomcat {
/**
* Access log configuration.
*/
private final Accesslog accesslog = new Accesslog();
/**
* Regular expression that matches proxies that are to be trusted.
*/
private String internalProxies = "10\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}|" // 10/8
+ "192\\.168\\.\\d{1,3}\\.\\d{1,3}|" // 192.168/16
+ "169\\.254\\.\\d{1,3}\\.\\d{1,3}|" // 169.254/16
+ "127\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}|" // 127/8
+ "172\\.1[6-9]{1}\\.\\d{1,3}\\.\\d{1,3}|" // 172.16/12
+ "172\\.2[0-9]{1}\\.\\d{1,3}\\.\\d{1,3}|"
+ "172\\.3[0-1]{1}\\.\\d{1,3}\\.\\d{1,3}";
/**
* Header that holds the incoming protocol, usually named "X-Forwarded-Proto".
*/
private String protocolHeader;
/**
* Value of the protocol header that indicates that the incoming request uses SSL.
*/
private String protocolHeaderHttpsValue = "https";
/**
* Name of the HTTP header used to override the original port value.
*/
private String portHeader = "X-Forwarded-Port";
/**
* Name of the http header from which the remote ip is extracted..
*/
private String remoteIpHeader;
/**
* Tomcat base directory. If not specified a temporary directory will be used.
*/
private File basedir;
/**
* Delay in seconds between the invocation of backgroundProcess methods.
*/
private int backgroundProcessorDelay = 30; // seconds
/**
* Maximum amount of worker threads.
*/
private int maxThreads = 0; // Number of threads in protocol handler
/**
* Maximum size in bytes of the HTTP message header.
*/
private int maxHttpHeaderSize = 0; // bytes
/**
* Character encoding to use to decode the URI.
*/
private Charset uriEncoding;
public int getMaxThreads() {
return this.maxThreads;
}
public void setMaxThreads(int maxThreads) {
this.maxThreads = maxThreads;
}
public int getMaxHttpHeaderSize() {
return this.maxHttpHeaderSize;
}
public void setMaxHttpHeaderSize(int maxHttpHeaderSize) {
this.maxHttpHeaderSize = maxHttpHeaderSize;
}
public Accesslog getAccesslog() {
return this.accesslog;
}
public int getBackgroundProcessorDelay() {
return this.backgroundProcessorDelay;
}
public void setBackgroundProcessorDelay(int backgroundProcessorDelay) {
this.backgroundProcessorDelay = backgroundProcessorDelay;
}
public File getBasedir() {
return this.basedir;
}
public void setBasedir(File basedir) {
this.basedir = basedir;
}
public String getInternalProxies() {
return this.internalProxies;
}
public void setInternalProxies(String internalProxies) {
this.internalProxies = internalProxies;
}
public String getProtocolHeader() {
return this.protocolHeader;
}
public void setProtocolHeader(String protocolHeader) {
this.protocolHeader = protocolHeader;
}
public String getProtocolHeaderHttpsValue() {
return this.protocolHeaderHttpsValue;
}
public void setProtocolHeaderHttpsValue(String protocolHeaderHttpsValue) {
this.protocolHeaderHttpsValue = protocolHeaderHttpsValue;
}
public String getPortHeader() {
return this.portHeader;
}
public void setPortHeader(String portHeader) {
this.portHeader = portHeader;
}
public String getRemoteIpHeader() {
return this.remoteIpHeader;
}
public void setRemoteIpHeader(String remoteIpHeader) {
this.remoteIpHeader = remoteIpHeader;
}
public Charset getUriEncoding() {
return this.uriEncoding;
}
public void setUriEncoding(Charset uriEncoding) {
this.uriEncoding = uriEncoding;
}
void customizeTomcat(ServerProperties serverProperties,
TomcatEmbeddedServletContainerFactory factory) {
if (getBasedir() != null) {
factory.setBaseDirectory(getBasedir());
}
customizeBackgroundProcessorDelay(factory);
customizeRemoteIpValve(serverProperties, factory);
if (this.maxThreads > 0) {
customizeMaxThreads(factory);
}
if (this.maxHttpHeaderSize > 0) {
customizeMaxHttpHeaderSize(factory);
}
if (this.accesslog.enabled) {
customizeAccessLog(factory);
}
if (getUriEncoding() != null) {
factory.setUriEncoding(getUriEncoding());
}
}
private void customizeBackgroundProcessorDelay(
TomcatEmbeddedServletContainerFactory factory) {
factory.addContextCustomizers(new TomcatContextCustomizer() {
@Override
public void customize(Context context) {
context.setBackgroundProcessorDelay(
Tomcat.this.backgroundProcessorDelay);
}
});
}
private void customizeRemoteIpValve(ServerProperties properties,
TomcatEmbeddedServletContainerFactory factory) {
String protocolHeader = getProtocolHeader();
String remoteIpHeader = getRemoteIpHeader();
// For back compatibility the valve is also enabled if protocol-header is set
if (StringUtils.hasText(protocolHeader) || StringUtils.hasText(remoteIpHeader)
|| properties.getOrDeduceUseForwardHeaders()) {
RemoteIpValve valve = new RemoteIpValve();
valve.setProtocolHeader(StringUtils.hasLength(protocolHeader)
? protocolHeader : "X-Forwarded-Proto");
if (StringUtils.hasLength(remoteIpHeader)) {
valve.setRemoteIpHeader(remoteIpHeader);
}
// The internal proxies default to a white list of "safe" internal IP
// addresses
valve.setInternalProxies(getInternalProxies());
valve.setPortHeader(getPortHeader());
valve.setProtocolHeaderHttpsValue(getProtocolHeaderHttpsValue());
// ... so it's safe to add this valve by default.
factory.addContextValves(valve);
}
}
@SuppressWarnings("rawtypes")
private void customizeMaxThreads(TomcatEmbeddedServletContainerFactory factory) {
factory.addConnectorCustomizers(new TomcatConnectorCustomizer() {
@Override
public void customize(Connector connector) {
ProtocolHandler handler = connector.getProtocolHandler();
if (handler instanceof AbstractProtocol) {
AbstractProtocol protocol = (AbstractProtocol) handler;
protocol.setMaxThreads(Tomcat.this.maxThreads);
}
}
});
}
@SuppressWarnings("rawtypes")
private void customizeMaxHttpHeaderSize(
TomcatEmbeddedServletContainerFactory factory) {
factory.addConnectorCustomizers(new TomcatConnectorCustomizer() {
@Override
public void customize(Connector connector) {
ProtocolHandler handler = connector.getProtocolHandler();
if (handler instanceof AbstractHttp11Protocol) {
AbstractHttp11Protocol protocol = (AbstractHttp11Protocol) handler;
protocol.setMaxHttpHeaderSize(Tomcat.this.maxHttpHeaderSize);
}
}
});
}
private void customizeAccessLog(TomcatEmbeddedServletContainerFactory factory) {
AccessLogValve valve = new AccessLogValve();
valve.setPattern(this.accesslog.getPattern());
valve.setDirectory(this.accesslog.getDirectory());
valve.setPrefix(this.accesslog.getPrefix());
valve.setSuffix(this.accesslog.getSuffix());
factory.addContextValves(valve);
}
public static class Accesslog {
/**
* Enable access log.
*/
private boolean enabled = false;
/**
* Format pattern for access logs.
*/
private String pattern = "common";
/**
* Directory in which log files are created. Can be relative to the tomcat
* base dir or absolute.
*/
private String directory = "logs";
/**
* Log file name prefix.
*/
protected String prefix = "access_log";
/**
* Log file name suffix.
*/
private String suffix = ".log";
public boolean isEnabled() {
return this.enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getPattern() {
return this.pattern;
}
public void setPattern(String pattern) {
this.pattern = pattern;
}
public String getDirectory() {
return this.directory;
}
public void setDirectory(String directory) {
this.directory = directory;
}
public String getPrefix() {
return this.prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public String getSuffix() {
return this.suffix;
}
public void setSuffix(String suffix) {
this.suffix = suffix;
}
}
}
private static class Jetty {
void customizeJetty(ServerProperties serverProperties,
JettyEmbeddedServletContainerFactory factory) {
factory.setUseForwardHeaders(serverProperties.getOrDeduceUseForwardHeaders());
}
}
public static class Undertow {
/**
* Size of each buffer in bytes.
*/
private Integer bufferSize;
/**
* Number of buffer per region.
*/
private Integer buffersPerRegion;
/**
* Number of I/O threads to create for the worker.
*/
private Integer ioThreads;
/**
* Number of worker threads.
*/
private Integer workerThreads;
/**
* Allocate buffers outside the Java heap.
*/
private Boolean directBuffers;
private final Accesslog accesslog = new Accesslog();
public Integer getBufferSize() {
return this.bufferSize;
}
public void setBufferSize(Integer bufferSize) {
this.bufferSize = bufferSize;
}
public Integer getBuffersPerRegion() {
return this.buffersPerRegion;
}
public void setBuffersPerRegion(Integer buffersPerRegion) {
this.buffersPerRegion = buffersPerRegion;
}
public Integer getIoThreads() {
return this.ioThreads;
}
public void setIoThreads(Integer ioThreads) {
this.ioThreads = ioThreads;
}
public Integer getWorkerThreads() {
return this.workerThreads;
}
public void setWorkerThreads(Integer workerThreads) {
this.workerThreads = workerThreads;
}
public Boolean getDirectBuffers() {
return this.directBuffers;
}
public void setDirectBuffers(Boolean directBuffers) {
this.directBuffers = directBuffers;
}
public Accesslog getAccesslog() {
return this.accesslog;
}
void customizeUndertow(ServerProperties serverProperties,
UndertowEmbeddedServletContainerFactory factory) {
factory.setBufferSize(this.bufferSize);
factory.setBuffersPerRegion(this.buffersPerRegion);
factory.setIoThreads(this.ioThreads);
factory.setWorkerThreads(this.workerThreads);
factory.setDirectBuffers(this.directBuffers);
factory.setAccessLogDirectory(this.accesslog.dir);
factory.setAccessLogPattern(this.accesslog.pattern);
factory.setAccessLogEnabled(this.accesslog.enabled);
factory.setUseForwardHeaders(serverProperties.getOrDeduceUseForwardHeaders());
}
public static class Accesslog {
/**
* Enable access log.
*/
private boolean enabled = false;
/**
* Format pattern for access logs.
*/
private String pattern = "common";
/**
* Undertow access log directory.
*/
private File dir = new File("logs");
public boolean isEnabled() {
return this.enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getPattern() {
return this.pattern;
}
public void setPattern(String pattern) {
this.pattern = pattern;
}
public File getDir() {
return this.dir;
}
public void setDir(File dir) {
this.dir = dir;
}
}
}
/**
* {@link ServletContextInitializer} to apply appropriate parts of the {@link Session}
* configuration.
*/
private static class SessionConfiguringInitializer
implements ServletContextInitializer {
private final Session session;
SessionConfiguringInitializer(Session session) {
this.session = session;
}
@Override
public void onStartup(ServletContext servletContext) throws ServletException {
if (this.session.getTrackingModes() != null) {
servletContext.setSessionTrackingModes(this.session.getTrackingModes());
}
configureSessionCookie(servletContext.getSessionCookieConfig());
}
private void configureSessionCookie(SessionCookieConfig config) {
Cookie cookie = this.session.getCookie();
if (cookie.getName() != null) {
config.setName(cookie.getName());
}
if (cookie.getDomain() != null) {
config.setDomain(cookie.getDomain());
}
if (cookie.getPath() != null) {
config.setPath(cookie.getPath());
}
if (cookie.getComment() != null) {
config.setComment(cookie.getComment());
}
if (cookie.getHttpOnly() != null) {
config.setHttpOnly(cookie.getHttpOnly());
}
if (cookie.getSecure() != null) {
config.setSecure(cookie.getSecure());
}
if (cookie.getMaxAge() != null) {
config.setMaxAge(cookie.getMaxAge());
}
}
}
}
| apache-2.0 |
nezirus/elasticsearch | core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java | 16494 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.PointRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.index.query.MultiMatchQueryBuilder.Type;
import org.elasticsearch.index.search.MatchQuery;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertDisjunctionSubQuery;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.either;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatchQueryBuilder> {
private static final String MISSING_WILDCARD_FIELD_NAME = "missing_*";
private static final String MISSING_FIELD_NAME = "missing";
@Override
protected MultiMatchQueryBuilder doCreateTestQueryBuilder() {
String fieldName = randomFrom(STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME,
MISSING_FIELD_NAME, MISSING_WILDCARD_FIELD_NAME);
if (fieldName.equals(DATE_FIELD_NAME)) {
assumeTrue("test with date fields runs only when at least a type is registered", getCurrentTypes().length > 0);
}
// creates the query with random value and field name
Object value;
if (fieldName.equals(STRING_FIELD_NAME)) {
value = getRandomQueryText();
} else {
value = getRandomValueForFieldName(fieldName);
}
MultiMatchQueryBuilder query = new MultiMatchQueryBuilder(value, fieldName);
// field with random boost
if (randomBoolean()) {
query.field(fieldName, randomFloat() * 10);
}
// sets other parameters of the multi match query
if (randomBoolean()) {
query.type(randomFrom(MultiMatchQueryBuilder.Type.values()));
}
if (randomBoolean()) {
query.operator(randomFrom(Operator.values()));
}
if (randomBoolean() && fieldName.equals(STRING_FIELD_NAME)) {
query.analyzer(randomAnalyzer());
}
if (randomBoolean()) {
query.slop(randomIntBetween(0, 5));
}
if (fieldName.equals(STRING_FIELD_NAME) && randomBoolean() && (query.type() == Type.BEST_FIELDS || query.type() == Type.MOST_FIELDS)) {
query.fuzziness(randomFuzziness(fieldName));
}
if (randomBoolean()) {
query.prefixLength(randomIntBetween(0, 5));
}
if (randomBoolean()) {
query.maxExpansions(randomIntBetween(1, 5));
}
if (randomBoolean()) {
query.minimumShouldMatch(randomMinimumShouldMatch());
}
if (randomBoolean()) {
query.fuzzyRewrite(getRandomRewriteMethod());
}
if (randomBoolean()) {
query.useDisMax(randomBoolean());
}
if (randomBoolean()) {
query.tieBreaker(randomFloat());
}
if (randomBoolean()) {
query.lenient(randomBoolean());
}
if (randomBoolean()) {
query.cutoffFrequency((float) 10 / randomIntBetween(1, 100));
}
if (randomBoolean()) {
query.zeroTermsQuery(randomFrom(MatchQuery.ZeroTermsQuery.values()));
}
// test with fields with boost and patterns delegated to the tests further below
return query;
}
@Override
protected Map<String, MultiMatchQueryBuilder> getAlternateVersions() {
Map<String, MultiMatchQueryBuilder> alternateVersions = new HashMap<>();
String query = "{\n" +
" \"multi_match\": {\n" +
" \"query\": \"foo bar\",\n" +
" \"fields\": \"myField\"\n" +
" }\n" +
"}";
alternateVersions.put(query, new MultiMatchQueryBuilder("foo bar", "myField"));
return alternateVersions;
}
@Override
protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
// we rely on integration tests for deeper checks here
assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class))
.or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class))
.or(instanceOf(FuzzyQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class))
.or(instanceOf(MatchAllDocsQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class))
.or(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(PhraseQuery.class))
.or(instanceOf(PointRangeQuery.class)).or(instanceOf(IndexOrDocValuesQuery.class)));
}
public void testIllegaArguments() {
expectThrows(IllegalArgumentException.class, () -> new MultiMatchQueryBuilder(null, "field"));
expectThrows(IllegalArgumentException.class, () -> new MultiMatchQueryBuilder("value", (String[]) null));
expectThrows(IllegalArgumentException.class, () -> new MultiMatchQueryBuilder("value", new String[]{""}));
expectThrows(IllegalArgumentException.class, () -> new MultiMatchQueryBuilder("value", "field").type(null));
}
public void testToQueryBoost() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
QueryShardContext shardContext = createShardContext();
MultiMatchQueryBuilder multiMatchQueryBuilder = new MultiMatchQueryBuilder("test");
multiMatchQueryBuilder.field(STRING_FIELD_NAME, 5f);
Query query = multiMatchQueryBuilder.toQuery(shardContext);
assertTermOrBoostQuery(query, STRING_FIELD_NAME, "test", 5f);
multiMatchQueryBuilder = new MultiMatchQueryBuilder("test");
multiMatchQueryBuilder.field(STRING_FIELD_NAME, 5f);
multiMatchQueryBuilder.boost(2f);
query = multiMatchQueryBuilder.toQuery(shardContext);
assertThat(query, instanceOf(BoostQuery.class));
BoostQuery boostQuery = (BoostQuery) query;
assertThat(boostQuery.getBoost(), equalTo(2f));
assertTermOrBoostQuery(boostQuery.getQuery(), STRING_FIELD_NAME, "test", 5f);
}
public void testToQueryMultipleTermsBooleanQuery() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query query = multiMatchQuery("test1 test2").field(STRING_FIELD_NAME).useDisMax(false).toQuery(createShardContext());
assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery bQuery = (BooleanQuery) query;
assertThat(bQuery.clauses().size(), equalTo(2));
assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test1")));
assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test2")));
}
public void testToQueryMultipleFieldsDisableDismax() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query query = multiMatchQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(false).toQuery(createShardContext());
assertThat(query, instanceOf(DisjunctionMaxQuery.class));
DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query;
assertThat(dQuery.getTieBreakerMultiplier(), equalTo(1.0f));
assertThat(dQuery.getDisjuncts().size(), equalTo(2));
assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test")));
assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test")));
}
public void testToQueryMultipleFieldsDisMaxQuery() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query query = multiMatchQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(true).toQuery(createShardContext());
assertThat(query, instanceOf(DisjunctionMaxQuery.class));
DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query;
assertThat(disMaxQuery.getTieBreakerMultiplier(), equalTo(0.0f));
List<Query> disjuncts = disMaxQuery.getDisjuncts();
assertThat(disjuncts.get(0), instanceOf(TermQuery.class));
assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test")));
assertThat(disjuncts.get(1), instanceOf(TermQuery.class));
assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test")));
}
public void testToQueryFieldsWildcard() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query query = multiMatchQuery("test").field("mapped_str*").useDisMax(false).toQuery(createShardContext());
assertThat(query, instanceOf(DisjunctionMaxQuery.class));
DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query;
assertThat(dQuery.getTieBreakerMultiplier(), equalTo(1.0f));
assertThat(dQuery.getDisjuncts().size(), equalTo(2));
assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test")));
assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test")));
}
public void testToQueryFieldMissing() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
assertThat(multiMatchQuery("test").field(MISSING_WILDCARD_FIELD_NAME).toQuery(createShardContext()), instanceOf(MatchNoDocsQuery.class));
assertThat(multiMatchQuery("test").field(MISSING_FIELD_NAME).toQuery(createShardContext()), instanceOf(TermQuery.class));
}
public void testFromJson() throws IOException {
String json =
"{\n" +
" \"multi_match\" : {\n" +
" \"query\" : \"quick brown fox\",\n" +
" \"fields\" : [ \"title^1.0\", \"title.original^1.0\", \"title.shingles^1.0\" ],\n" +
" \"type\" : \"most_fields\",\n" +
" \"operator\" : \"OR\",\n" +
" \"slop\" : 0,\n" +
" \"prefix_length\" : 0,\n" +
" \"max_expansions\" : 50,\n" +
" \"lenient\" : false,\n" +
" \"zero_terms_query\" : \"NONE\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
MultiMatchQueryBuilder parsed = (MultiMatchQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, "quick brown fox", parsed.value());
assertEquals(json, 3, parsed.fields().size());
assertEquals(json, MultiMatchQueryBuilder.Type.MOST_FIELDS, parsed.type());
assertEquals(json, Operator.OR, parsed.operator());
}
/**
* `fuzziness` is not allowed for `cross_fields`, `phrase` and `phrase_prefix` and should throw an error
*/
public void testFuzzinessNotAllowedTypes() throws IOException {
String[] notAllowedTypes = new String[]{ Type.CROSS_FIELDS.parseField().getPreferredName(),
Type.PHRASE.parseField().getPreferredName(), Type.PHRASE_PREFIX.parseField().getPreferredName()};
for (String type : notAllowedTypes) {
String json =
"{\n" +
" \"multi_match\" : {\n" +
" \"query\" : \"quick brown fox\",\n" +
" \"fields\" : [ \"title^1.0\", \"title.original^1.0\", \"title.shingles^1.0\" ],\n" +
" \"type\" : \"" + type + "\",\n" +
" \"fuzziness\" : 1" +
" }\n" +
"}";
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
assertEquals("Fuzziness not allowed for type [" + type +"]", e.getMessage());
}
}
public void testQueryParameterArrayException() {
String json =
"{\n" +
" \"multi_match\" : {\n" +
" \"query\" : [\"quick\", \"brown\", \"fox\"]\n" +
" \"fields\" : [ \"title^1.0\", \"title.original^1.0\", \"title.shingles^1.0\" ]" +
" }\n" +
"}";
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
assertEquals("[multi_match] unknown token [START_ARRAY] after [query]", e.getMessage());
}
public void testExceptionUsingAnalyzerOnNumericField() {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
QueryShardContext shardContext = createShardContext();
MultiMatchQueryBuilder multiMatchQueryBuilder = new MultiMatchQueryBuilder(6.075210893508043E-4);
multiMatchQueryBuilder.field(DOUBLE_FIELD_NAME);
multiMatchQueryBuilder.analyzer("simple");
NumberFormatException e = expectThrows(NumberFormatException.class, () -> multiMatchQueryBuilder.toQuery(shardContext));
assertEquals("For input string: \"e\"", e.getMessage());
}
public void testFuzzinessOnNonStringField() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
MultiMatchQueryBuilder query = new MultiMatchQueryBuilder(42).field(INT_FIELD_NAME).field(BOOLEAN_FIELD_NAME);
query.fuzziness(randomFuzziness(INT_FIELD_NAME));
QueryShardContext context = createShardContext();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> query.toQuery(context));
assertThat(e.getMessage(), containsString("Can only use fuzzy queries on keyword and text fields"));
query.analyzer("keyword"); // triggers a different code path
e = expectThrows(IllegalArgumentException.class,
() -> query.toQuery(context));
assertThat(e.getMessage(), containsString("Can only use fuzzy queries on keyword and text fields"));
query.lenient(true);
query.toQuery(context); // no exception
query.analyzer(null);
query.toQuery(context); // no exception
}
}
| apache-2.0 |
ThiagoGarciaAlves/intellij-community | java/debugger/impl/src/com/intellij/debugger/settings/DebuggerDataViewsConfigurable.java | 16248 | // Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.debugger.settings;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.impl.DebuggerUtilsEx;
import com.intellij.debugger.ui.JavaDebuggerSupport;
import com.intellij.debugger.ui.tree.render.ClassRenderer;
import com.intellij.debugger.ui.tree.render.PrimitiveRenderer;
import com.intellij.debugger.ui.tree.render.ToStringRenderer;
import com.intellij.openapi.options.OptionsBundle;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.openapi.project.Project;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.StateRestoringCheckBox;
import com.intellij.ui.classFilter.ClassFilterEditor;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import static java.awt.GridBagConstraints.*;
/**
* @author Eugene Belyaev
*/
public class DebuggerDataViewsConfigurable implements SearchableConfigurable {
private JCheckBox myCbAutoscroll;
private JCheckBox myCbShowSyntheticFields;
private StateRestoringCheckBox myCbShowValFieldsAsLocalVariables;
private JCheckBox myCbHideNullArrayElements;
private JCheckBox myCbShowStatic;
private JCheckBox myCbShowDeclaredType;
private JCheckBox myCbShowFQNames;
private JCheckBox myCbShowObjectId;
private JCheckBox myCbShowStringsType;
private JCheckBox myCbHexValue;
private JCheckBox myCbPopulateThrowableStack;
private StateRestoringCheckBox myCbShowStaticFinalFields;
//private final ArrayRendererConfigurable myArrayRendererConfigurable;
private JCheckBox myCbEnableAlternateViews;
private JCheckBox myCbEnableToString;
private JRadioButton myRbAllThatOverride;
private JRadioButton myRbFromList;
private ClassFilterEditor myToStringFilterEditor;
private Project myProject;
public DebuggerDataViewsConfigurable(@Nullable Project project) {
myProject = project;
//myArrayRendererConfigurable = new ArrayRendererConfigurable(NodeRendererSettings.getInstance().getArrayRenderer());
}
@Override
public void disposeUIResources() {
//myArrayRendererConfigurable.disposeUIResources();
myToStringFilterEditor = null;
myProject = null;
}
@Override
public String getDisplayName() {
return OptionsBundle.message("options.java.display.name");
}
@Override
public JComponent createComponent() {
if (myProject == null) {
myProject = JavaDebuggerSupport.getContextProjectForEditorFieldsInDebuggerConfigurables();
}
final JPanel panel = new JPanel(new GridBagLayout());
myCbAutoscroll = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.autoscroll"));
myCbShowSyntheticFields = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.synthetic.fields"));
myCbShowValFieldsAsLocalVariables = new StateRestoringCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.val.fields.as.locals"));
myCbHideNullArrayElements = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.hide.null.array.elements"));
myCbShowStatic = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.static.fields"));
myCbShowStaticFinalFields = new StateRestoringCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.static.final.fields"));
myCbEnableAlternateViews = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.alternate.view"));
myCbShowStatic.addChangeListener(new ChangeListener(){
@Override
public void stateChanged(ChangeEvent e) {
if(myCbShowStatic.isSelected()) {
myCbShowStaticFinalFields.makeSelectable();
}
else {
myCbShowStaticFinalFields.makeUnselectable(false);
}
}
});
myCbShowSyntheticFields.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
if(myCbShowSyntheticFields.isSelected()) {
myCbShowValFieldsAsLocalVariables.makeSelectable();
}
else {
myCbShowValFieldsAsLocalVariables.makeUnselectable(false);
}
}
});
myCbShowDeclaredType = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.declared.type"));
myCbShowFQNames = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.fq.names"));
myCbShowObjectId = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.object.id"));
myCbHexValue = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.hex.value"));
myCbShowStringsType = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.strings.type"));
myCbPopulateThrowableStack = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.populate.throwable.stack"));
myCbEnableToString = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.enable.toString"));
myRbAllThatOverride = new JRadioButton(DebuggerBundle.message("label.base.renderer.configurable.all.overriding"));
myRbFromList = new JRadioButton(DebuggerBundle.message("label.base.renderer.configurable.classes.from.list"));
ButtonGroup group = new ButtonGroup();
group.add(myRbAllThatOverride);
group.add(myRbFromList);
myToStringFilterEditor = new ClassFilterEditor(myProject, null, "reference.viewBreakpoints.classFilters.newPattern");
myCbEnableToString.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
final boolean enabled = myCbEnableToString.isSelected();
myRbAllThatOverride.setEnabled(enabled);
myRbFromList.setEnabled(enabled);
myToStringFilterEditor.setEnabled(enabled && myRbFromList.isSelected());
}
});
myRbFromList.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
myToStringFilterEditor.setEnabled(myCbEnableToString.isSelected() && myRbFromList.isSelected());
}
});
panel.add(myCbAutoscroll, new GridBagConstraints(0, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insetsTop(4), 0, 0));
final JPanel showPanel = new JPanel(new GridBagLayout());
showPanel.setBorder(IdeBorderFactory.createTitledBorder("Show", true));
showPanel.add(myCbShowDeclaredType, new GridBagConstraints(0, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.emptyInsets(), 0, 0));
showPanel.add(myCbShowObjectId, new GridBagConstraints(0, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insetsTop(4), 0, 0));
showPanel.add(myCbShowSyntheticFields, new GridBagConstraints(1, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insetsLeft(10), 0, 0));
showPanel.add(myCbShowStatic, new GridBagConstraints(1, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insets(4, 10, 0, 0), 0, 0));
showPanel.add(myCbShowValFieldsAsLocalVariables, new GridBagConstraints(2, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insets(4, 10, 0, 0), 0, 0));
showPanel.add(myCbShowStaticFinalFields, new GridBagConstraints(2, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insets(4, 10, 0, 0), 0, 0));
showPanel.add(myCbShowFQNames, new GridBagConstraints(3, RELATIVE, 1, 1, 1.0, 0.0, WEST, NONE, JBUI.insetsLeft(10), 0, 0));
panel.add(showPanel, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, WEST, HORIZONTAL, JBUI.insetsTop(4), 0, 0));
//final JPanel arraysPanel = new JPanel(new BorderLayout(0, UIUtil.DEFAULT_VGAP));
//final JComponent arraysComponent = myArrayRendererConfigurable.createComponent();
//assert arraysComponent != null;
//arraysPanel.add(arraysComponent, BorderLayout.CENTER);
//arraysPanel.add(myCbHideNullArrayElements, BorderLayout.SOUTH);
//arraysPanel.setBorder(IdeBorderFactory.createTitledBorder("Arrays", true));
//panel.add(arraysPanel, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 3, 1, 1.0, 0.0, GridBagConstraints.NORTH, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
panel.add(myCbShowStringsType, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, NORTH, HORIZONTAL, JBUI.emptyInsets(), 0, 0));
panel.add(myCbHexValue, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, NORTH, HORIZONTAL, JBUI.insetsTop(4), 0, 0));
panel.add(myCbHideNullArrayElements, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, NORTH, HORIZONTAL, JBUI.insetsTop(4), 0, 0));
panel.add(myCbPopulateThrowableStack, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, NORTH, HORIZONTAL, JBUI.insetsTop(4), 0, 0));
panel.add(myCbEnableAlternateViews, new GridBagConstraints(0, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insets(4, 0, 0, 10), 0, 0));
// starting 4-th row
panel.add(myCbEnableToString, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, WEST, NONE, JBUI.insetsTop(4), 0, 0));
panel.add(myRbAllThatOverride, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, WEST, NONE, JBUI.insetsLeft(12), 0, 0));
panel.add(myRbFromList, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, WEST, NONE, JBUI.insetsLeft(12), 0, 0));
myToStringFilterEditor.setMinimumSize(JBUI.size(50, 100));
panel.add(myToStringFilterEditor, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 1.0, CENTER, BOTH, JBUI.insetsLeft(12), 0, 0));
return panel;
}
@Override
public void apply() {
final ViewsGeneralSettings generalSettings = ViewsGeneralSettings.getInstance();
final NodeRendererSettings rendererSettings = NodeRendererSettings.getInstance();
generalSettings.AUTOSCROLL_TO_NEW_LOCALS = myCbAutoscroll.isSelected();
rendererSettings.setAlternateCollectionViewsEnabled(myCbEnableAlternateViews.isSelected());
generalSettings.HIDE_NULL_ARRAY_ELEMENTS = myCbHideNullArrayElements.isSelected();
generalSettings.POPULATE_THROWABLE_STACKTRACE = myCbPopulateThrowableStack.isSelected();
final ClassRenderer classRenderer = rendererSettings.getClassRenderer();
classRenderer.SHOW_STATIC = myCbShowStatic.isSelected();
classRenderer.SHOW_STATIC_FINAL = myCbShowStaticFinalFields.isSelectedWhenSelectable();
classRenderer.SHOW_SYNTHETICS = myCbShowSyntheticFields.isSelected();
classRenderer.SHOW_VAL_FIELDS_AS_LOCAL_VARIABLES = myCbShowValFieldsAsLocalVariables.isSelectedWhenSelectable();
classRenderer.SHOW_DECLARED_TYPE = myCbShowDeclaredType.isSelected();
classRenderer.SHOW_FQ_TYPE_NAMES = myCbShowFQNames.isSelected();
classRenderer.SHOW_OBJECT_ID = myCbShowObjectId.isSelected();
classRenderer.SHOW_STRINGS_TYPE = myCbShowStringsType.isSelected();
final ToStringRenderer toStringRenderer = rendererSettings.getToStringRenderer();
toStringRenderer.setOnDemand(!myCbEnableToString.isSelected());
toStringRenderer.setUseClassFilters(myRbFromList.isSelected());
toStringRenderer.setClassFilters(myToStringFilterEditor.getFilters());
PrimitiveRenderer primitiveRenderer = rendererSettings.getPrimitiveRenderer();
primitiveRenderer.setShowHexValue(myCbHexValue.isSelected());
rendererSettings.fireRenderersChanged();
}
@Override
public void reset() {
final ViewsGeneralSettings generalSettings = ViewsGeneralSettings.getInstance();
final NodeRendererSettings rendererSettings = NodeRendererSettings.getInstance();
myCbAutoscroll.setSelected(generalSettings.AUTOSCROLL_TO_NEW_LOCALS);
myCbHideNullArrayElements.setSelected(generalSettings.HIDE_NULL_ARRAY_ELEMENTS);
myCbEnableAlternateViews.setSelected(rendererSettings.areAlternateCollectionViewsEnabled());
myCbPopulateThrowableStack.setSelected(generalSettings.POPULATE_THROWABLE_STACKTRACE);
ClassRenderer classRenderer = rendererSettings.getClassRenderer();
myCbShowSyntheticFields.setSelected(classRenderer.SHOW_SYNTHETICS);
myCbShowValFieldsAsLocalVariables.setSelected(classRenderer.SHOW_VAL_FIELDS_AS_LOCAL_VARIABLES);
if (!classRenderer.SHOW_SYNTHETICS) {
myCbShowValFieldsAsLocalVariables.makeUnselectable(false);
}
myCbShowStatic.setSelected(classRenderer.SHOW_STATIC);
myCbShowStaticFinalFields.setSelected(classRenderer.SHOW_STATIC_FINAL);
if(!classRenderer.SHOW_STATIC) {
myCbShowStaticFinalFields.makeUnselectable(false);
}
myCbShowDeclaredType.setSelected(classRenderer.SHOW_DECLARED_TYPE);
myCbShowFQNames.setSelected(classRenderer.SHOW_FQ_TYPE_NAMES);
myCbShowObjectId.setSelected(classRenderer.SHOW_OBJECT_ID);
myCbShowStringsType.setSelected(classRenderer.SHOW_STRINGS_TYPE);
final ToStringRenderer toStringRenderer = rendererSettings.getToStringRenderer();
final boolean toStringEnabled = !toStringRenderer.isOnDemand();
final boolean useClassFilters = toStringRenderer.isUseClassFilters();
myCbEnableToString.setSelected(toStringEnabled);
myRbAllThatOverride.setSelected(!useClassFilters);
myRbFromList.setSelected(useClassFilters);
myToStringFilterEditor.setFilters(toStringRenderer.getClassFilters());
myToStringFilterEditor.setEnabled(toStringEnabled && useClassFilters);
myRbFromList.setEnabled(toStringEnabled);
myRbAllThatOverride.setEnabled(toStringEnabled);
PrimitiveRenderer primitiveRenderer = rendererSettings.getPrimitiveRenderer();
myCbHexValue.setSelected(primitiveRenderer.isShowHexValue());
}
@Override
public boolean isModified() {
return areGeneralSettingsModified() || areDefaultRenderersModified();
}
private boolean areGeneralSettingsModified() {
ViewsGeneralSettings generalSettings = ViewsGeneralSettings.getInstance();
return generalSettings.AUTOSCROLL_TO_NEW_LOCALS != myCbAutoscroll.isSelected() ||
generalSettings.HIDE_NULL_ARRAY_ELEMENTS != myCbHideNullArrayElements.isSelected() ||
generalSettings.POPULATE_THROWABLE_STACKTRACE != myCbPopulateThrowableStack.isSelected();
}
private boolean areDefaultRenderersModified() {
//if (myArrayRendererConfigurable.isModified()) {
// return true;
//}
final NodeRendererSettings rendererSettings = NodeRendererSettings.getInstance();
final ClassRenderer classRenderer = rendererSettings.getClassRenderer();
final boolean isClassRendererModified=
(classRenderer.SHOW_STATIC != myCbShowStatic.isSelected()) ||
(classRenderer.SHOW_STATIC_FINAL != myCbShowStaticFinalFields.isSelectedWhenSelectable()) ||
(classRenderer.SHOW_SYNTHETICS != myCbShowSyntheticFields.isSelected()) ||
(classRenderer.SHOW_VAL_FIELDS_AS_LOCAL_VARIABLES != myCbShowValFieldsAsLocalVariables.isSelectedWhenSelectable()) ||
(classRenderer.SHOW_DECLARED_TYPE != myCbShowDeclaredType.isSelected()) ||
(classRenderer.SHOW_FQ_TYPE_NAMES != myCbShowFQNames.isSelected()) ||
(classRenderer.SHOW_OBJECT_ID != myCbShowObjectId.isSelected()) ||
(classRenderer.SHOW_STRINGS_TYPE != myCbShowStringsType.isSelected());
if (isClassRendererModified) {
return true;
}
final ToStringRenderer toStringRenderer = rendererSettings.getToStringRenderer();
final boolean isToStringRendererModified =
(toStringRenderer.isOnDemand() == myCbEnableToString.isSelected()) ||
(toStringRenderer.isUseClassFilters() != myRbFromList.isSelected()) ||
(!DebuggerUtilsEx.filterEquals(toStringRenderer.getClassFilters(), myToStringFilterEditor.getFilters()));
if (isToStringRendererModified) {
return true;
}
if (rendererSettings.areAlternateCollectionViewsEnabled() != myCbEnableAlternateViews.isSelected()) {
return true;
}
PrimitiveRenderer primitiveRenderer = rendererSettings.getPrimitiveRenderer();
if (primitiveRenderer.isShowHexValue() != myCbHexValue.isSelected()) {
return true;
}
return false;
}
@SuppressWarnings("SpellCheckingInspection")
@Override
@NotNull
public String getHelpTopic() {
return "Debugger_Data_Views_Java";
}
@Override
@NotNull
public String getId() {
return getHelpTopic();
}
}
| apache-2.0 |
alihalabyah/stashbot | src/main/java/com/palantir/stash/stashbot/urlbuilder/StashbotUrlBuilder.java | 4267 | // Copyright 2014 Palantir Technologies
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.palantir.stash.stashbot.urlbuilder;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.sql.SQLException;
import com.atlassian.stash.nav.NavBuilder;
import com.atlassian.stash.pull.PullRequest;
import com.atlassian.stash.repository.Repository;
import com.atlassian.stash.repository.RepositoryCloneLinksRequest;
import com.atlassian.stash.repository.RepositoryService;
import com.palantir.stash.stashbot.jobtemplate.JobType;
import com.palantir.stash.stashbot.persistence.JenkinsServerConfiguration;
public class StashbotUrlBuilder {
private final NavBuilder nb;
private final RepositoryService rs;
public StashbotUrlBuilder(NavBuilder nb, RepositoryService rs) {
this.nb = nb;
this.rs = rs;
}
public String getJenkinsTriggerUrl(Repository repo, JobType jt,
String buildHead, PullRequest pullRequest) throws SQLException {
StringBuffer urlB = new StringBuffer(nb.buildAbsolute());
urlB.append("/plugins/servlet/stashbot/build-trigger/");
urlB.append(repo.getId().toString()).append("/");
urlB.append(jt.toString()).append("/");
urlB.append(buildHead);
if (pullRequest != null) {
urlB.append("/");
urlB.append(pullRequest.getToRef().getLatestChangeset());
urlB.append("/");
urlB.append(pullRequest.getId().toString());
}
return urlB.toString();
}
public String buildReportingUrl(Repository repo, JobType jobType,
JenkinsServerConfiguration jsc, String status) {
// Look at the BuildSuccessReportinServlet if you change this:
// "BASE_URL/REPO_ID/JOB_NAME/STATE/BUILD_NUMBER/BUILD_HEAD[/MERGE_HEAD/PULLREQUEST_ID]";
// SEE ALSO:
// https://wiki.jenkins-ci.org/display/JENKINS/Building+a+software+project#Buildingasoftwareproject-JenkinsSetEnvironmentVariables
// TODO: Remove $repoId, hardcode ID?
String url = nb
.buildAbsolute()
.concat("/plugins/servlet/stashbot/build-reporting/$repoId/"
+ jobType.toString() + "/" + status
+ "/$BUILD_NUMBER/$buildHead/$mergeHead/$pullRequestId");
return url;
}
public String buildCloneUrl(Repository repo, JenkinsServerConfiguration jsc) {
RepositoryCloneLinksRequest rclr =
new RepositoryCloneLinksRequest.Builder().repository(repo).protocol("http").user(null).build();
String url = rs.getCloneLinks(rclr).iterator().next().getHref();
// we build without username because we insert username AND password, and need both, in the case where we are using USERNAME_AND_PASSWORD.
switch (jsc.getAuthenticationMode()) {
case USERNAME_AND_PASSWORD:
url = url.replace("://",
"://" + mask(jsc.getStashUsername()) + ":" + mask(jsc.getStashPassword())
+ "@");
break;
case CREDENTIAL_MANUALLY_CONFIGURED:
// do nothing
// XXX: do we need to get the git/ssh link instead of the http link here? maybe that's a new mode?
break;
default:
throw new IllegalStateException("Invalid value - update this code after adding an authentication mode");
}
return url;
}
public String buildStashCommitUrl(Repository repo, String changeset) {
return nb.repo(repo).changeset(changeset).buildAbsolute();
}
private String mask( String str ) {
try {
return URLEncoder.encode( str, "UTF-8" );
} catch( UnsupportedEncodingException e ) {
return str;
}
}
}
| apache-2.0 |
nguyentruongtho/buck | test/com/facebook/buck/features/apple/project/SchemeGeneratorTest.java | 56208 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.features.apple.project;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import com.facebook.buck.apple.xcode.AbstractPBXObjectFactory;
import com.facebook.buck.apple.xcode.XCScheme;
import com.facebook.buck.apple.xcode.xcodeproj.PBXFileReference;
import com.facebook.buck.apple.xcode.xcodeproj.PBXNativeTarget;
import com.facebook.buck.apple.xcode.xcodeproj.PBXReference;
import com.facebook.buck.apple.xcode.xcodeproj.PBXTarget;
import com.facebook.buck.apple.xcode.xcodeproj.ProductTypes;
import com.facebook.buck.features.apple.common.SchemeActionType;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.util.timing.SettableFakeClock;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
public class SchemeGeneratorTest {
private SettableFakeClock clock;
private ProjectFilesystem projectFilesystem;
@Before
public void setUp() {
clock = SettableFakeClock.DO_NOT_CARE;
projectFilesystem = new FakeProjectFilesystem(clock);
}
@Test
public void schemeWithMultipleTargetsBuildsInCorrectOrder() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
PBXTarget leftTarget =
new PBXNativeTarget("leftRule", AbstractPBXObjectFactory.DefaultFactory());
leftTarget.setGlobalID("leftGID");
leftTarget.setProductReference(
new PBXFileReference(
"left.a", "left.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
leftTarget.setProductType(ProductTypes.STATIC_LIBRARY);
PBXTarget rightTarget =
new PBXNativeTarget("rightRule", AbstractPBXObjectFactory.DefaultFactory());
rightTarget.setGlobalID("rightGID");
rightTarget.setProductReference(
new PBXFileReference(
"right.a", "right.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rightTarget.setProductType(ProductTypes.STATIC_LIBRARY);
PBXTarget childTarget =
new PBXNativeTarget("childRule", AbstractPBXObjectFactory.DefaultFactory());
childTarget.setGlobalID("childGID");
childTarget.setProductReference(
new PBXFileReference(
"child.a", "child.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
childTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(leftTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(rightTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(childTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(childTarget),
ImmutableSet.of(rootTarget, leftTarget, rightTarget, childTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
String schemeXml = projectFilesystem.readFileIfItExists(schemePath).get();
System.out.println(schemeXml);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath xpath = xpathFactory.newXPath();
XPathExpression expr = xpath.compile("//BuildAction//BuildableReference/@BlueprintIdentifier");
NodeList nodes = (NodeList) expr.evaluate(scheme, XPathConstants.NODESET);
List<String> expectedOrdering = ImmutableList.of("rootGID", "leftGID", "rightGID", "childGID");
List<String> actualOrdering = new ArrayList<>();
for (int i = 0; i < nodes.getLength(); i++) {
actualOrdering.add(nodes.item(i).getNodeValue());
}
assertThat(actualOrdering, equalTo(expectedOrdering));
}
@Test
public void schemeBuildsAndTestsAppleTestTargets() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget testDepTarget =
new PBXNativeTarget("testDep", AbstractPBXObjectFactory.DefaultFactory());
testDepTarget.setGlobalID("testDepGID");
testDepTarget.setProductReference(
new PBXFileReference(
"libDep.a", "libDep.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
testDepTarget.setProductType(ProductTypes.STATIC_LIBRARY);
PBXTarget testLibraryTarget =
new PBXNativeTarget("testLibrary", AbstractPBXObjectFactory.DefaultFactory());
testLibraryTarget.setGlobalID("testLibraryGID");
testLibraryTarget.setProductReference(
new PBXFileReference(
"lib.a", "lib.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
testLibraryTarget.setProductType(ProductTypes.STATIC_LIBRARY);
PBXTarget testTarget = new PBXNativeTarget("test", AbstractPBXObjectFactory.DefaultFactory());
testTarget.setGlobalID("testGID");
testTarget.setProductReference(
new PBXFileReference(
"test.xctest",
"test.xctest",
PBXReference.SourceTree.BUILT_PRODUCTS_DIR,
Optional.empty()));
testTarget.setProductType(ProductTypes.UNIT_TEST);
PBXTarget rootTarget = new PBXNativeTarget("root", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path projectPath = Paths.get("foo/test.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(testTarget, projectPath);
targetToProjectPathMapBuilder.put(testDepTarget, projectPath);
targetToProjectPathMapBuilder.put(testLibraryTarget, projectPath);
targetToProjectPathMapBuilder.put(rootTarget, projectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(testDepTarget, testTarget),
ImmutableSet.of(testTarget),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
String schemeXml = projectFilesystem.readFileIfItExists(schemePath).get();
System.out.println(schemeXml);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildXpath = xpathFactory.newXPath();
XPathExpression buildExpr =
buildXpath.compile("//BuildAction//BuildableReference/@BlueprintIdentifier");
NodeList buildNodes = (NodeList) buildExpr.evaluate(scheme, XPathConstants.NODESET);
List<String> expectedBuildOrdering = ImmutableList.of("rootGID", "testDepGID", "testGID");
List<String> actualBuildOrdering = new ArrayList<>();
for (int i = 0; i < buildNodes.getLength(); i++) {
actualBuildOrdering.add(buildNodes.item(i).getNodeValue());
}
assertThat(actualBuildOrdering, equalTo(expectedBuildOrdering));
XPath textXpath = xpathFactory.newXPath();
XPathExpression testExpr =
textXpath.compile(
"//TestAction//TestableReference/BuildableReference/@BlueprintIdentifier");
NodeList testNodes = (NodeList) testExpr.evaluate(scheme, XPathConstants.NODESET);
List<String> expectedTestOrdering = ImmutableList.of("testGID");
List<String> actualTestOrdering = new ArrayList<>();
for (int i = 0; i < testNodes.getLength(); i++) {
actualTestOrdering.add(testNodes.item(i).getNodeValue());
}
assertThat(actualTestOrdering, equalTo(expectedTestOrdering));
}
@Test
public void schemeIncludesAllExpectedActions() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
PBXTarget testTarget =
new PBXNativeTarget("testRule", AbstractPBXObjectFactory.DefaultFactory());
testTarget.setGlobalID("testGID");
testTarget.setProductReference(
new PBXFileReference(
"test.a", "test.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
testTarget.setProductType(ProductTypes.STATIC_LIBRARY);
PBXTarget testBundleTarget =
new PBXNativeTarget("testBundleRule", AbstractPBXObjectFactory.DefaultFactory());
testBundleTarget.setGlobalID("testBundleGID");
testBundleTarget.setProductReference(
new PBXFileReference(
"test.xctest",
"test.xctest",
PBXReference.SourceTree.BUILT_PRODUCTS_DIR,
Optional.empty()));
testBundleTarget.setProductType(ProductTypes.UNIT_TEST);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(testTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(testBundleTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(testBundleTarget),
ImmutableSet.of(testBundleTarget),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
String schemeXml = projectFilesystem.readFileIfItExists(schemePath).get();
System.out.println(schemeXml);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildActionXpath = xpathFactory.newXPath();
XPathExpression buildActionExpr =
buildActionXpath.compile("//BuildAction//BuildableReference/@BlueprintIdentifier");
NodeList buildActionNodes = (NodeList) buildActionExpr.evaluate(scheme, XPathConstants.NODESET);
List<String> expectedOrdering = ImmutableList.of("rootGID", "testBundleGID");
List<String> actualOrdering = new ArrayList<>();
for (int i = 0; i < buildActionNodes.getLength(); i++) {
actualOrdering.add(buildActionNodes.item(i).getNodeValue());
}
assertThat(actualOrdering, equalTo(expectedOrdering));
XPath testActionXpath = xpathFactory.newXPath();
XPathExpression testActionExpr =
testActionXpath.compile("//TestAction//BuildableReference/@BlueprintIdentifier");
String testActionBlueprintIdentifier =
(String) testActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(testActionBlueprintIdentifier, equalTo("testBundleGID"));
XPath launchActionXpath = xpathFactory.newXPath();
XPathExpression launchActionExpr =
launchActionXpath.compile("//LaunchAction//BuildableReference/@BlueprintIdentifier");
String launchActionBlueprintIdentifier =
(String) launchActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(launchActionBlueprintIdentifier, equalTo("rootGID"));
XPath profileActionXpath = xpathFactory.newXPath();
XPathExpression profileActionExpr =
profileActionXpath.compile("//ProfileAction//BuildableReference/@BlueprintIdentifier");
String profileActionBlueprintIdentifier =
(String) profileActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(profileActionBlueprintIdentifier, equalTo("rootGID"));
}
@Test
public void buildableReferenceShouldHaveExpectedProperties() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildableReferenceXPath = xpathFactory.newXPath();
XPathExpression buildableReferenceExpr =
buildableReferenceXPath.compile("//BuildableReference");
NodeList buildableReferences =
(NodeList) buildableReferenceExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(buildableReferences.getLength(), greaterThan(0));
for (int i = 0; i < buildableReferences.getLength(); i++) {
NamedNodeMap attributes = buildableReferences.item(i).getAttributes();
assertThat(attributes, notNullValue());
assertThat(attributes.getNamedItem("BlueprintIdentifier"), notNullValue());
assertThat(attributes.getNamedItem("BuildableIdentifier"), notNullValue());
assertThat(attributes.getNamedItem("ReferencedContainer"), notNullValue());
assertThat(attributes.getNamedItem("BlueprintName"), notNullValue());
assertThat(attributes.getNamedItem("BuildableName"), notNullValue());
}
}
@Test
public void allActionsShouldBePresentInSchemeWithDefaultBuildConfigurations() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath schemeChildrenXPath = xpathFactory.newXPath();
XPathExpression schemeChildrenExpr = schemeChildrenXPath.compile("/Scheme/node()");
NodeList actions = (NodeList) schemeChildrenExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(actions.getLength(), equalTo(6));
Node buildAction = actions.item(0);
assertThat(buildAction.getNodeName(), equalTo("BuildAction"));
assertThat(buildAction.getAttributes().getNamedItem("buildConfiguration"), nullValue());
Node testAction = actions.item(1);
assertThat(testAction.getNodeName(), equalTo("TestAction"));
assertThat(
testAction.getAttributes().getNamedItem("buildConfiguration").getNodeValue(),
equalTo("Debug"));
Node launchAction = actions.item(2);
assertThat(launchAction.getNodeName(), equalTo("LaunchAction"));
assertThat(
launchAction.getAttributes().getNamedItem("buildConfiguration").getNodeValue(),
equalTo("Debug"));
Node profileAction = actions.item(3);
assertThat(profileAction.getNodeName(), equalTo("ProfileAction"));
assertThat(
profileAction.getAttributes().getNamedItem("buildConfiguration").getNodeValue(),
equalTo("Release"));
Node analyzeAction = actions.item(4);
assertThat(analyzeAction.getNodeName(), equalTo("AnalyzeAction"));
assertThat(
analyzeAction.getAttributes().getNamedItem("buildConfiguration").getNodeValue(),
equalTo("Debug"));
Node archiveAction = actions.item(5);
assertThat(archiveAction.getNodeName(), equalTo("ArchiveAction"));
assertThat(
archiveAction.getAttributes().getNamedItem("buildConfiguration").getNodeValue(),
equalTo("Release"));
}
@Test
public void schemeIsRewrittenIfContentsHaveChanged() throws IOException {
{
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
clock.setCurrentTimeMillis(49152);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
assertThat(
projectFilesystem.getLastModifiedTime(schemePath), equalTo(FileTime.fromMillis(49152L)));
}
{
PBXTarget rootTarget =
new PBXNativeTarget("rootRule2", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("root2GID");
rootTarget.setProductReference(
new PBXFileReference(
"root2.a", "root2.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
clock.setCurrentTimeMillis(64738);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
ImmutableMap.of(rootTarget, pbxprojectPath),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
assertThat(
projectFilesystem.getLastModifiedTime(schemePath), equalTo(FileTime.fromMillis(64738L)));
}
}
@Test
public void schemeIsNotRewrittenIfContentsHaveNotChanged() throws IOException {
{
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
clock.setCurrentTimeMillis(49152);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
ImmutableMap.of(rootTarget, pbxprojectPath),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
assertThat(
projectFilesystem.getLastModifiedTime(schemePath), equalTo(FileTime.fromMillis(49152L)));
}
{
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
clock.setCurrentTimeMillis(64738);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
ImmutableMap.of(rootTarget, pbxprojectPath),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
assertThat(
projectFilesystem.getLastModifiedTime(schemePath), equalTo(FileTime.fromMillis(49152L)));
}
}
@Test
public void schemeWithNoPrimaryRuleCanIncludeTests() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget testLibraryTarget =
new PBXNativeTarget("testLibrary", AbstractPBXObjectFactory.DefaultFactory());
testLibraryTarget.setGlobalID("testLibraryGID");
testLibraryTarget.setProductReference(
new PBXFileReference(
"lib.a", "lib.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
testLibraryTarget.setProductType(ProductTypes.STATIC_LIBRARY);
PBXTarget testTarget =
new PBXNativeTarget("testRule", AbstractPBXObjectFactory.DefaultFactory());
testTarget.setGlobalID("testGID");
testTarget.setProductReference(
new PBXFileReference(
"test.a", "test.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
testTarget.setProductType(ProductTypes.STATIC_LIBRARY);
PBXTarget testBundleTarget =
new PBXNativeTarget("testBundleRule", AbstractPBXObjectFactory.DefaultFactory());
testBundleTarget.setGlobalID("testBundleGID");
testBundleTarget.setProductReference(
new PBXFileReference(
"test.xctest",
"test.xctest",
PBXReference.SourceTree.BUILT_PRODUCTS_DIR,
Optional.empty()));
testBundleTarget.setProductType(ProductTypes.UNIT_TEST);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(testLibraryTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(testTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(testBundleTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.empty(),
ImmutableSet.of(),
ImmutableSet.of(testBundleTarget),
ImmutableSet.of(testBundleTarget),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
String schemeXml = projectFilesystem.readFileIfItExists(schemePath).get();
System.out.println(schemeXml);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildActionXpath = xpathFactory.newXPath();
XPathExpression buildActionExpr =
buildActionXpath.compile("//BuildAction//BuildableReference/@BlueprintIdentifier");
NodeList buildActionNodes = (NodeList) buildActionExpr.evaluate(scheme, XPathConstants.NODESET);
List<String> expectedOrdering = ImmutableList.of("testBundleGID");
List<String> actualOrdering = new ArrayList<>();
for (int i = 0; i < buildActionNodes.getLength(); i++) {
actualOrdering.add(buildActionNodes.item(i).getNodeValue());
}
assertThat(actualOrdering, equalTo(expectedOrdering));
XPath testActionXpath = xpathFactory.newXPath();
XPathExpression testActionExpr =
testActionXpath.compile("//TestAction//BuildableReference/@BlueprintIdentifier");
String testActionBlueprintIdentifier =
(String) testActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(testActionBlueprintIdentifier, equalTo("testBundleGID"));
XPath launchActionXpath = xpathFactory.newXPath();
XPathExpression launchActionExpr =
launchActionXpath.compile("//LaunchAction//BuildableReference/@BlueprintIdentifier");
String launchActionBlueprintIdentifier =
(String) launchActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(launchActionBlueprintIdentifier, equalTo(""));
XPath launchActionBuildConfigurationXpath = xpathFactory.newXPath();
XPathExpression launchActionBuildConfigurationExpr =
launchActionBuildConfigurationXpath.compile("//LaunchAction//@buildConfiguration");
String launchActionBuildConfigurationBlueprintIdentifier =
(String) launchActionBuildConfigurationExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(launchActionBuildConfigurationBlueprintIdentifier, equalTo("Debug"));
XPath profileActionXpath = xpathFactory.newXPath();
XPathExpression profileActionExpr =
profileActionXpath.compile("//ProfileAction//BuildableReference/@BlueprintIdentifier");
String profileActionBlueprintIdentifier =
(String) profileActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(profileActionBlueprintIdentifier, equalTo(""));
XPath profileActionBuildConfigurationXpath = xpathFactory.newXPath();
XPathExpression profileActionBuildConfigurationExpr =
profileActionBuildConfigurationXpath.compile("//ProfileAction//@buildConfiguration");
String profileActionBuildConfigurationBlueprintIdentifier =
(String) profileActionBuildConfigurationExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(profileActionBuildConfigurationBlueprintIdentifier, equalTo("Release"));
XPath analyzeActionBuildConfigurationXpath = xpathFactory.newXPath();
XPathExpression analyzeActionBuildConfigurationExpr =
analyzeActionBuildConfigurationXpath.compile("//AnalyzeAction//@buildConfiguration");
String analyzeActionBuildConfigurationBlueprintIdentifier =
(String) analyzeActionBuildConfigurationExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(analyzeActionBuildConfigurationBlueprintIdentifier, equalTo("Debug"));
XPath archiveActionBuildConfigurationXpath = xpathFactory.newXPath();
XPathExpression archiveActionBuildConfigurationExpr =
archiveActionBuildConfigurationXpath.compile("//ArchiveAction//@buildConfiguration");
String archiveActionBuildConfigurationBlueprintIdentifier =
(String) archiveActionBuildConfigurationExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(archiveActionBuildConfigurationBlueprintIdentifier, equalTo("Release"));
}
@Test
public void launchActionShouldNotContainRemoteRunnableWhenNotProvided() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath remoteRunnableLaunchActionXPath = xpathFactory.newXPath();
XPathExpression remoteRunnableLaunchActionExpr =
remoteRunnableLaunchActionXPath.compile("//LaunchAction/RemoteRunnable");
NodeList remoteRunnables =
(NodeList) remoteRunnableLaunchActionExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(remoteRunnables.getLength(), equalTo(0));
}
@Test
public void launchActionShouldContainRemoteRunnableWhenProvided() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.of("/RemoteApp") /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* payloadNotificationFile */);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath remoteRunnableLaunchActionXPath = xpathFactory.newXPath();
XPathExpression remoteRunnableLaunchActionExpr =
remoteRunnableLaunchActionXPath.compile("//LaunchAction/RemoteRunnable");
NodeList remoteRunnables =
(NodeList) remoteRunnableLaunchActionExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(remoteRunnables.getLength(), equalTo(1));
Node remoteRunnable = remoteRunnables.item(0);
assertThat(
remoteRunnable.getAttributes().getNamedItem("runnableDebuggingMode").getNodeValue(),
equalTo("2"));
assertThat(
remoteRunnable.getAttributes().getNamedItem("BundleIdentifier").getNodeValue(),
equalTo("com.apple.springboard"));
assertThat(
remoteRunnable.getAttributes().getNamedItem("RemotePath").getNodeValue(),
equalTo("/RemoteApp"));
XPath buildXpath = xpathFactory.newXPath();
XPathExpression buildExpr =
buildXpath.compile("//LaunchAction//BuildableReference/@BlueprintIdentifier");
NodeList buildNodes = (NodeList) buildExpr.evaluate(scheme, XPathConstants.NODESET);
// Make sure both copies of the BuildableReference are present.
assertThat(buildNodes.getLength(), equalTo(2));
assertThat(buildNodes.item(0).getNodeValue(), equalTo("rootGID"));
assertThat(buildNodes.item(1).getNodeValue(), equalTo("rootGID"));
}
@Test
public void prePostActionsSerializedWithRootBuildable() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
ImmutableMap<SchemeActionType, ImmutableMap<XCScheme.AdditionalActions, ImmutableList<String>>>
schemeActions =
ImmutableMap.of(
SchemeActionType.LAUNCH,
ImmutableMap.of(
XCScheme.AdditionalActions.PRE_SCHEME_ACTIONS,
ImmutableList.of("echo takeoff")));
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.of(schemeActions),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath preLaunchActionXPath = xpathFactory.newXPath();
XPathExpression preLaunchActionExpr = preLaunchActionXPath.compile("//LaunchAction/PreActions");
NodeList preActions = (NodeList) preLaunchActionExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(preActions.getLength(), equalTo(1));
Node executionAction = preActions.item(0).getFirstChild();
assertThat(
executionAction.getAttributes().getNamedItem("ActionType").getNodeValue(),
equalTo("Xcode.IDEStandardExecutionActionsCore.ExecutionActionType.ShellScriptAction"));
Node actionContent = executionAction.getFirstChild();
assertThat(
actionContent.getAttributes().getNamedItem("title").getNodeValue(), equalTo("Run Script"));
assertThat(
actionContent.getAttributes().getNamedItem("scriptText").getNodeValue(),
equalTo("echo takeoff"));
assertThat(
actionContent.getAttributes().getNamedItem("shellToInvoke").getNodeValue(),
equalTo("/bin/bash"));
XPath buildXpath = xpathFactory.newXPath();
XPathExpression buildableExpr =
buildXpath.compile(
"//LaunchAction//PreActions//ExecutionAction//EnvironmentBuildable//BuildableReference/@BlueprintIdentifier");
NodeList buildableNodes = (NodeList) buildableExpr.evaluate(scheme, XPathConstants.NODESET);
// Make sure both copies of the BuildableReference are present.
assertThat(buildableNodes.getLength(), equalTo(1));
assertThat(buildableNodes.item(0).getNodeValue(), equalTo("rootGID"));
}
@Test
public void enablingParallelizeBuild() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
true /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notifcationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildActionXpath = xpathFactory.newXPath();
XPathExpression buildActionExpr = buildActionXpath.compile("//BuildAction");
NodeList buildActionNodes = (NodeList) buildActionExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(buildActionNodes.getLength(), is(1));
Node buildActionNode = buildActionNodes.item(0);
assertThat(
buildActionNode.getAttributes().getNamedItem("buildImplicitDependencies").getNodeValue(),
equalTo("YES"));
assertThat(
buildActionNode.getAttributes().getNamedItem("parallelizeBuildables").getNodeValue(),
equalTo("YES"));
}
@Test
public void serializesEnvironmentVariables() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
ImmutableMap<SchemeActionType, ImmutableMap<String, String>> environmentVariables =
ImmutableMap.of(SchemeActionType.LAUNCH, ImmutableMap.of("ENV_VARIABLE", "IS_SET"));
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
true /* parallelizeBuild */,
Optional.empty() /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.of(environmentVariables),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildActionXpath = xpathFactory.newXPath();
XPathExpression buildActionExpr =
buildActionXpath.compile("//LaunchAction/EnvironmentVariables/EnvironmentVariable");
NodeList envVariableList = (NodeList) buildActionExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(envVariableList.getLength(), is(1));
Node envVar = envVariableList.item(0);
assertThat(envVar.getAttributes().getNamedItem("key").getNodeValue(), equalTo("ENV_VARIABLE"));
assertThat(envVar.getAttributes().getNamedItem("value").getNodeValue(), equalTo("IS_SET"));
}
/**
* Include `wasCreatedForAppExtension` when true.
*
* @throws Exception
*/
@Test
public void serializesWasCreatedForAppExtension() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/xcshareddata/xcshemes"),
true /* parallelizeBuild */,
Optional.of(true) /* wasCreatedForAppExtension */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildActionXpath = xpathFactory.newXPath();
XPathExpression buildActionExpr = buildActionXpath.compile("//Scheme");
NodeList schemeElements = (NodeList) buildActionExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(schemeElements.getLength(), is(1));
Node schemeNode = schemeElements.item(0);
assertThat(
schemeNode.getAttributes().getNamedItem("wasCreatedForAppExtension").getNodeValue(),
equalTo("YES"));
}
/**
* Exclude `wasCreatedForAppExtension` when null or false.
*
* @throws Exception
*/
@Test
public void excludesWasCreatedForAppExtension() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget =
new PBXNativeTarget("rootRule", AbstractPBXObjectFactory.DefaultFactory());
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductTypes.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
ImmutableList<Optional<Boolean>> testValues =
ImmutableList.of(Optional.empty(), Optional.of(false));
for (Optional<Boolean> wasCreatedForAppExtension : testValues) {
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/xcshareddata/xcshemes"),
true /* parallelizeBuild */,
wasCreatedForAppExtension,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
XCScheme.LaunchAction.LaunchStyle.AUTO,
Optional.empty(), /* watchAdapter */
Optional.empty() /* notificationPayloadFile */);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildActionXpath = xpathFactory.newXPath();
XPathExpression buildActionExpr = buildActionXpath.compile("//Scheme");
NodeList schemeElements = (NodeList) buildActionExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(schemeElements.getLength(), is(1));
Node schemeNode = schemeElements.item(0);
assertNull(schemeNode.getAttributes().getNamedItem("wasCreatedForAppExtension"));
}
}
}
| apache-2.0 |
ollie314/spring-security | samples/javaconfig/chat/src/main/java/sample/data/ActiveWebSocketUser.java | 1385 | /*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sample.data;
import java.util.Calendar;
import javax.persistence.Entity;
import javax.persistence.Id;
@Entity
public class ActiveWebSocketUser {
@Id
private String id;
private String username;
private Calendar connectionTime;
public ActiveWebSocketUser() {
}
public ActiveWebSocketUser(String id, String username, Calendar connectionTime) {
super();
this.id = id;
this.username = username;
this.connectionTime = connectionTime;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public Calendar getConnectionTime() {
return connectionTime;
}
public void setConnectionTime(Calendar connectionTime) {
this.connectionTime = connectionTime;
}
}
| apache-2.0 |
dell-oss/Doradus | doradus-server/src/main/java/com/dell/doradus/olap/search/SearchResultComparer.java | 2592 | /*
* Copyright (C) 2014 Dell, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dell.doradus.olap.search;
import java.util.ArrayList;
import java.util.List;
import com.dell.doradus.olap.aggregate.mr.MFCollectorSet;
import com.dell.doradus.olap.collections.BdLongSet;
import com.dell.doradus.olap.store.CubeSearcher;
import com.dell.doradus.olap.store.IntIterator;
import com.dell.doradus.search.aggregate.AggregationGroup;
import com.dell.doradus.search.aggregate.SortOrder;
import com.dell.doradus.search.util.HeapList;
public class SearchResultComparer {
public static IntIterator sort(CubeSearcher searcher, Result result, SortOrder[] orders, int size) {
if(orders == null || orders.length == 0 || size >= result.countSet()) {
int[] res = new int[Math.min(size, result.countSet())];
int num = 0;
for(int i = 0; i < result.size(); i++) {
if(num >= res.length) break;
if(!result.get(i)) continue;
res[num++] = i;
}
return new IntIterator(res, 0, res.length);
}
BdLongSet[] sets = new BdLongSet[orders.length];
for(int i = 0; i < orders.length; i++) {
sets[i] = new BdLongSet(1024);
sets[i].enableClearBuffer();
}
List<AggregationGroup> aggGroups = new ArrayList<AggregationGroup>(orders.length);
for(SortOrder order: orders) { aggGroups.add(order.getAggregationGroup()); }
MFCollectorSet collectorSet = new MFCollectorSet(searcher, aggGroups, false);
HeapList<SortKey> heap = new HeapList<SortKey>(size);
SortKey cur = null;
for(int doc = 0; doc < result.size(); doc++) {
if(!result.get(doc)) continue;
collectorSet.collect(doc, sets);
if(cur == null) cur = new SortKey(orders);
cur.set(doc, sets);
cur = heap.AddEx(cur);
for(int i = 0; i < sets.length; i++) sets[i].clear();
}
SortKey[] keys = heap.GetValues(SortKey.class);
int[] res = new int[keys.length];
for(int i = 0; i < keys.length; i++) {
res[i] = keys[i].doc();
}
return new IntIterator(res, 0, res.length);
}
}
| apache-2.0 |
shyTNT/googleads-java-lib | modules/adwords_axis_utility_extension/src/main/java/com/google/api/ads/adwords/axis/utility/extension/delegates/AdParamDelegate.java | 3754 | // Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.adwords.axis.utility.extension.delegates;
import com.google.api.ads.adwords.axis.utility.extension.util.SelectorFields;
import com.google.api.ads.adwords.axis.utils.v201506.SelectorBuilder;
import com.google.api.ads.adwords.axis.v201506.cm.AdParam;
import com.google.api.ads.adwords.axis.v201506.cm.AdParamOperation;
import com.google.api.ads.adwords.axis.v201506.cm.AdParamServiceInterface;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.common.annotations.VisibleForTesting;
import java.rmi.RemoteException;
import java.util.List;
/**
* Specific AbstractGetMutateDelegate for {@link AdParam}.
*
* <p>
* Implementation is not thread-safe,
* because AdWordsSession and Apache Axis service objects are not thread-safe.
* </p>
*/
public final class AdParamDelegate extends
AbstractGetMutateDelegate<AdParam, AdParamOperation, AdParamServiceInterface> {
/**
* Default Constructor.
*
* @param adWordsSession the {@code adWordsSession} to use with the delegate/service
*/
public AdParamDelegate(AdWordsSession adWordsSession) {
super(adWordsSession, SelectorFields.AdParam.all(), AdParam.class, AdParamOperation.class,
AdParamServiceInterface.class);
}
/**
* Default Constructor with custom service.
*
* @param adWordsSession the {@code adWordsSession} to use with the delegate/service
* @param service the custom service class for the SOAP service
*/
@VisibleForTesting
AdParamDelegate(AdWordsSession adWordsSession, AdParamServiceInterface service) {
super(adWordsSession, SelectorFields.AdParam.all(), AdParam.class, AdParamOperation.class,
service);
}
/**
* Constructor with custom fields.
*
* @param adWordsSession the {@code adWordsSession} to use with the delegate/service
* @param selectorFields for the Generic Selectors using the SelectorField class
*/
public AdParamDelegate(AdWordsSession adWordsSession,
List<SelectorFields.AdParam> selectorFields) {
super(adWordsSession, selectorFields, AdParam.class, AdParamOperation.class,
AdParamServiceInterface.class);
}
/**
* Retrieves AdParams by adGroupId.
*
* @param adGroupId
* @return a list of AdParams matching the adGroupId
* @throws RemoteException for communication-related exceptions
*/
public List<AdParam> getByAdGroupId(Long adGroupId) throws RemoteException {
return getByField(SelectorFields.AdParam.ADGROUP_ID, adGroupId);
}
/**
* Retrieves AdParams by adGroupId and criterionId.
*
* @param adGroupId
* @param criterionId
* @return a list of AdParams matching the adGroupId and criterionId
* @throws RemoteException for communication-related exceptions
*/
public List<AdParam> getByAdGroupIdCriterionId(Long adGroupId, Long criterionId)
throws RemoteException {
SelectorBuilder builder = createSelectorBuilder()
.equals(SelectorFields.AdParam.ADGROUP_ID.getField(), String.valueOf(adGroupId))
.equals(SelectorFields.AdParam.CRITERION_ID.getField(), String.valueOf(criterionId));
return get(builder.build());
}
}
| apache-2.0 |
pgfox/activemq-artemis | tests/extra-tests/src/test/java/org/apache/activemq/artemis/tests/extras/jms/ra/MDBMultipleHandlersServerDisconnectTest.java | 18149 | /*
* Copyright 2005-2014 Red Hat, Inc.
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.apache.activemq.artemis.tests.extras.jms.ra;
import javax.jms.Message;
import javax.resource.ResourceException;
import javax.resource.spi.LocalTransactionException;
import javax.resource.spi.UnavailableException;
import javax.resource.spi.endpoint.MessageEndpoint;
import javax.resource.spi.endpoint.MessageEndpointFactory;
import javax.transaction.HeuristicMixedException;
import javax.transaction.HeuristicRollbackException;
import javax.transaction.RollbackException;
import javax.transaction.Status;
import javax.transaction.SystemException;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
import javax.transaction.xa.XAResource;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.reflect.Method;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import com.arjuna.ats.arjuna.coordinator.TransactionReaper;
import com.arjuna.ats.arjuna.coordinator.TxControl;
import com.arjuna.ats.internal.jta.transaction.arjunacore.TransactionManagerImple;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.client.ClientConsumer;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientProducer;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.ClientSessionFactory;
import org.apache.activemq.artemis.api.core.client.ServerLocator;
import org.apache.activemq.artemis.core.server.ServerConsumer;
import org.apache.activemq.artemis.core.server.ServerSession;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.ra.ActiveMQResourceAdapter;
import org.apache.activemq.artemis.ra.inflow.ActiveMQActivation;
import org.apache.activemq.artemis.ra.inflow.ActiveMQActivationSpec;
import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection;
import org.apache.activemq.artemis.tests.integration.ra.ActiveMQRATestBase;
import org.apache.activemq.artemis.utils.RandomUtil;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Simulates several messages being received over multiple instances with reconnects during the process.
*/
public class MDBMultipleHandlersServerDisconnectTest extends ActiveMQRATestBase {
final ConcurrentHashMap<Integer, AtomicInteger> mapCounter = new ConcurrentHashMap<>();
volatile ActiveMQResourceAdapter resourceAdapter;
ServerLocator nettyLocator;
// This thread will keep bugging the handlers.
// if they behave well with XA, the test pass!
final AtomicBoolean running = new AtomicBoolean(true);
private volatile boolean playTXTimeouts = true;
private volatile boolean playServerClosingSession = true;
private volatile boolean playServerClosingConsumer = true;
@Override
@Before
public void setUp() throws Exception {
nettyLocator = createNettyNonHALocator();
nettyLocator.setRetryInterval(100);
nettyLocator.setReconnectAttempts(300);
mapCounter.clear();
resourceAdapter = null;
super.setUp();
createQueue(true, "outQueue");
DummyTMLocator.startTM();
running.set(true);
}
@Override
@After
public void tearDown() throws Exception {
DummyTMLocator.stopTM();
super.tearDown();
}
@Override
protected boolean usePersistence() {
return true;
}
@Override
public boolean useSecurity() {
return false;
}
@Test
public void testReconnectMDBNoMessageLoss() throws Exception {
AddressSettings settings = new AddressSettings();
settings.setRedeliveryDelay(100);
settings.setMaxDeliveryAttempts(-1);
server.getAddressSettingsRepository().clear();
server.getAddressSettingsRepository().addMatch("#", settings);
ActiveMQResourceAdapter qResourceAdapter = newResourceAdapter();
resourceAdapter = qResourceAdapter;
resourceAdapter.setConfirmationWindowSize(-1);
resourceAdapter.setCallTimeout(1000L);
resourceAdapter.setConsumerWindowSize(1024 * 1024);
resourceAdapter.setReconnectAttempts(-1);
resourceAdapter.setRetryInterval(100L);
// qResourceAdapter.setTransactionManagerLocatorClass(DummyTMLocator.class.getName());
// qResourceAdapter.setTransactionManagerLocatorMethod("getTM");
MyBootstrapContext ctx = new MyBootstrapContext();
qResourceAdapter.setConnectorClassName(NETTY_CONNECTOR_FACTORY);
qResourceAdapter.start(ctx);
final int NUMBER_OF_SESSIONS = 10;
ActiveMQActivationSpec spec = new ActiveMQActivationSpec();
spec.setTransactionTimeout(1);
spec.setMaxSession(NUMBER_OF_SESSIONS);
spec.setSetupAttempts(-1);
spec.setSetupInterval(100L);
spec.setResourceAdapter(qResourceAdapter);
spec.setUseJNDI(false);
spec.setDestinationType("javax.jms.Queue");
spec.setDestination(MDBQUEUE);
// Some the routines would be screwed up if using the default one
Assert.assertFalse(spec.isHasBeenUpdated());
TestEndpointFactory endpointFactory = new TestEndpointFactory(true);
qResourceAdapter.endpointActivation(endpointFactory, spec);
Assert.assertEquals(1, resourceAdapter.getActivations().values().size());
ActiveMQActivation activation = resourceAdapter.getActivations().values().toArray(new ActiveMQActivation[1])[0];
final int NUMBER_OF_MESSAGES = 1000;
Thread producer = new Thread() {
@Override
public void run() {
try {
ServerLocator locator = createInVMLocator(0);
ClientSessionFactory factory = locator.createSessionFactory();
ClientSession session = factory.createSession(false, false);
ClientProducer clientProducer = session.createProducer(MDBQUEUEPREFIXED);
StringBuffer buffer = new StringBuffer();
for (int b = 0; b < 500; b++) {
buffer.append("ab");
}
for (int i = 0; i < NUMBER_OF_MESSAGES; i++) {
ClientMessage message = session.createMessage(true);
message.getBodyBuffer().writeString(buffer.toString() + i);
message.putIntProperty("i", i);
clientProducer.send(message);
if (i % 100 == 0) {
session.commit();
}
}
session.commit();
} catch (Exception e) {
e.printStackTrace();
}
}
};
producer.start();
final AtomicBoolean metaDataFailed = new AtomicBoolean(false);
Thread buggerThread = new Thread() {
@Override
public void run() {
while (running.get()) {
try {
Thread.sleep(RandomUtil.randomInterval(100, 200));
} catch (InterruptedException intex) {
intex.printStackTrace();
return;
}
List<ServerSession> serverSessions = lookupServerSessions("resource-adapter", NUMBER_OF_SESSIONS);
System.err.println("Contains " + serverSessions.size() + " RA sessions");
if (serverSessions.size() != NUMBER_OF_SESSIONS) {
System.err.println("the server was supposed to have " + NUMBER_OF_MESSAGES + " RA Sessions but it only contained accordingly to the meta-data");
metaDataFailed.set(true);
} else if (serverSessions.size() == NUMBER_OF_SESSIONS) {
// it became the same after some reconnect? which would be acceptable
metaDataFailed.set(false);
}
if (playServerClosingSession && serverSessions.size() > 0) {
int randomBother = RandomUtil.randomInterval(0, serverSessions.size() - 1);
System.out.println("bugging session " + randomBother);
ServerSession serverSession = serverSessions.get(randomBother);
if (playServerClosingConsumer && RandomUtil.randomBoolean()) {
// will play this randomly, only half of the times
for (ServerConsumer consumer : serverSession.getServerConsumers()) {
try {
// Simulating a rare race that could happen in production
// where the consumer is closed while things are still happening
consumer.close(true);
Thread.sleep(100);
} catch (Exception e) {
e.printStackTrace();
}
}
}
RemotingConnection connection = serverSession.getRemotingConnection();
connection.fail(new ActiveMQException("failed at random " + randomBother));
}
}
}
};
buggerThread.start();
ServerLocator locator = createInVMLocator(0);
ClientSessionFactory factory = locator.createSessionFactory();
ClientSession session = factory.createSession(false, false);
session.start();
ClientConsumer consumer = session.createConsumer("outQueue");
for (int i = 0; i < NUMBER_OF_MESSAGES; i++) {
ClientMessage message = consumer.receive(60000);
if (message == null) {
break;
}
if (i == NUMBER_OF_MESSAGES * 0.50) {
// This is to make sure the MDBs will survive a reboot
// and no duplications or message loss will happen because of this
System.err.println("Rebooting the MDBs at least once!");
activation.startReconnectThread("I");
}
if (i == NUMBER_OF_MESSAGES * 0.90) {
System.out.println("Disabled failures at " + i);
playTXTimeouts = false;
playServerClosingSession = false;
playServerClosingConsumer = false;
}
System.out.println("Received " + i + " messages");
doReceiveMessage(message);
if (i % 200 == 0) {
System.out.println("received " + i);
session.commit();
}
}
session.commit();
while (true) {
ClientMessage message = consumer.receiveImmediate();
if (message == null) {
break;
}
System.out.println("Received extra message " + message);
doReceiveMessage(message);
}
session.commit();
Assert.assertNull(consumer.receiveImmediate());
StringWriter writer = new StringWriter();
PrintWriter out = new PrintWriter(writer);
boolean failed = false;
for (int i = 0; i < NUMBER_OF_MESSAGES; i++) {
AtomicInteger atomicInteger = mapCounter.get(Integer.valueOf(i));
if (atomicInteger == null) {
out.println("didn't receive message with i=" + i);
failed = true;
} else if (atomicInteger.get() > 1) {
out.println("message with i=" + i + " received " + atomicInteger.get() + " times");
failed = true;
}
}
running.set(false);
buggerThread.join();
producer.join();
qResourceAdapter.stop();
session.close();
if (failed) {
for (int i = 0; i < 10; i++) {
System.out.println("----------------------------------------------------");
}
System.out.println(writer.toString());
}
Assert.assertFalse(writer.toString(), failed);
System.out.println("Received " + NUMBER_OF_MESSAGES + " messages");
Assert.assertFalse("There was meta-data failures, some sessions didn't reconnect properly", metaDataFailed.get());
}
private void doReceiveMessage(ClientMessage message) throws Exception {
Assert.assertNotNull(message);
message.acknowledge();
Integer value = message.getIntProperty("i");
AtomicInteger mapCount = new AtomicInteger(1);
mapCount = mapCounter.putIfAbsent(value, mapCount);
if (mapCount != null) {
mapCount.incrementAndGet();
}
}
private List<ServerSession> lookupServerSessions(String parameter, int numberOfSessions) {
long timeout = System.currentTimeMillis() + 50000;
List<ServerSession> serverSessions = new LinkedList<>();
do {
if (!serverSessions.isEmpty()) {
System.err.println("Retry on serverSessions!!! currently with " + serverSessions.size());
serverSessions.clear();
try {
Thread.sleep(100);
} catch (Exception e) {
break;
}
}
serverSessions.clear();
for (ServerSession session : server.getSessions()) {
if (session.getMetaData(parameter) != null) {
serverSessions.add(session);
}
}
}
while (running.get() && serverSessions.size() != numberOfSessions && timeout > System.currentTimeMillis());
System.err.println("Returning " + serverSessions.size() + " sessions");
return serverSessions;
}
protected class TestEndpointFactory implements MessageEndpointFactory {
private final boolean isDeliveryTransacted;
public TestEndpointFactory(boolean deliveryTransacted) {
isDeliveryTransacted = deliveryTransacted;
}
@Override
public MessageEndpoint createEndpoint(XAResource xaResource) throws UnavailableException {
TestEndpoint retEnd = new TestEndpoint();
if (xaResource != null) {
retEnd.setXAResource(xaResource);
}
return retEnd;
}
@Override
public boolean isDeliveryTransacted(Method method) throws NoSuchMethodException {
return isDeliveryTransacted;
}
}
public class TestEndpoint extends DummyMessageEndpoint {
ClientSessionFactory factory;
ClientSession endpointSession;
ClientProducer producer;
Transaction currentTX;
public TestEndpoint() {
super(null);
try {
factory = nettyLocator.createSessionFactory();
// buggingList.add(factory);
endpointSession = factory.createSession(true, false, false);
producer = endpointSession.createProducer("outQueue");
} catch (Throwable e) {
throw new RuntimeException(e);
}
}
@Override
public void beforeDelivery(Method method) throws NoSuchMethodException, ResourceException {
super.beforeDelivery(method);
try {
DummyTMLocator.tm.begin();
currentTX = DummyTMLocator.tm.getTransaction();
currentTX.enlistResource(xaResource);
} catch (Throwable e) {
throw new RuntimeException(e.getMessage(), e);
}
}
@Override
public void onMessage(Message message) {
Integer value = 0;
try {
value = message.getIntProperty("i");
} catch (Exception e) {
}
super.onMessage(message);
try {
currentTX.enlistResource(endpointSession);
ClientMessage message1 = endpointSession.createMessage(true);
message1.putIntProperty("i", value);
producer.send(message1);
currentTX.delistResource(endpointSession, XAResource.TMSUCCESS);
if (playTXTimeouts) {
if (RandomUtil.randomInterval(0, 5) == 3) {
Thread.sleep(2000);
}
}
} catch (Exception e) {
e.printStackTrace();
try {
currentTX.setRollbackOnly();
} catch (Exception ex) {
}
e.printStackTrace();
// throw new RuntimeException(e);
}
}
@Override
public void afterDelivery() throws ResourceException {
// This is a copy & paste of what the Application server would do here
try {
if (currentTX.getStatus() == Status.STATUS_MARKED_ROLLBACK) {
DummyTMLocator.tm.rollback();
} else {
DummyTMLocator.tm.commit();
}
} catch (HeuristicMixedException e) {
throw new LocalTransactionException(e);
} catch (SystemException e) {
throw new LocalTransactionException(e);
} catch (HeuristicRollbackException e) {
throw new LocalTransactionException(e);
} catch (RollbackException e) {
throw new LocalTransactionException(e);
}
super.afterDelivery();
}
}
public static class DummyTMLocator {
public static TransactionManagerImple tm;
public static void stopTM() {
try {
TransactionReaper.terminate(true);
TxControl.disable(true);
} catch (Exception e) {
e.printStackTrace();
}
tm = null;
}
public static void startTM() {
tm = new TransactionManagerImple();
TxControl.enable();
}
public TransactionManager getTM() {
return tm;
}
}
}
| apache-2.0 |
mosoft521/spring-boot | spring-boot-actuator/src/test/java/org/springframework/boot/actuate/autoconfigure/ManagementContextConfigurationImportSelectorTests.java | 3362 | /*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.autoconfigure;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.Test;
import org.springframework.core.annotation.Order;
import org.springframework.core.type.StandardAnnotationMetadata;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link ManagementContextConfigurationImportSelector}.
*
* @author Phillip Webb
* @author Andy Wilkinson
*/
public class ManagementContextConfigurationImportSelectorTests {
@Test
public void selectImportsShouldOrderResult() throws Exception {
String[] imports = new TestManagementContextConfigurationsImportSelector(C.class,
A.class, D.class, B.class).selectImports(
new StandardAnnotationMetadata(EnableChildContext.class));
assertThat(imports).containsExactly(A.class.getName(), B.class.getName(),
C.class.getName(), D.class.getName());
}
@Test
public void selectImportsFiltersChildOnlyConfigurationWhenUsingSameContext()
throws Exception {
String[] imports = new TestManagementContextConfigurationsImportSelector(
ChildOnly.class, SameOnly.class, A.class).selectImports(
new StandardAnnotationMetadata(EnableSameContext.class));
assertThat(imports).containsExactlyInAnyOrder(SameOnly.class.getName(),
A.class.getName());
}
@Test
public void selectImportsFiltersSameOnlyConfigurationWhenUsingChildContext()
throws Exception {
String[] imports = new TestManagementContextConfigurationsImportSelector(
ChildOnly.class, SameOnly.class, A.class).selectImports(
new StandardAnnotationMetadata(EnableChildContext.class));
assertThat(imports).containsExactlyInAnyOrder(ChildOnly.class.getName(),
A.class.getName());
}
private static final class TestManagementContextConfigurationsImportSelector
extends ManagementContextConfigurationImportSelector {
private final List<String> factoryNames;
private TestManagementContextConfigurationsImportSelector(Class<?>... classes) {
this.factoryNames = Stream.of(classes).map(Class::getName)
.collect(Collectors.toList());
}
@Override
protected List<String> loadFactoryNames() {
return this.factoryNames;
}
}
@Order(1)
private static class A {
}
@Order(2)
private static class B {
}
@Order(3)
private static class C {
}
static class D {
}
@ManagementContextConfiguration(ManagementContextType.CHILD)
static class ChildOnly {
}
@ManagementContextConfiguration(ManagementContextType.SAME)
static class SameOnly {
}
@EnableManagementContext(ManagementContextType.CHILD)
static class EnableChildContext {
}
@EnableManagementContext(ManagementContextType.SAME)
static class EnableSameContext {
}
}
| apache-2.0 |
mdeinum/spring-boot | spring-boot-project/spring-boot-tools/spring-boot-loader/src/main/java/org/springframework/boot/loader/archive/Archive.java | 5466 | /*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.loader.archive;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.function.Consumer;
import java.util.jar.Manifest;
import org.springframework.boot.loader.Launcher;
/**
* An archive that can be launched by the {@link Launcher}.
*
* @author Phillip Webb
* @since 1.0.0
* @see JarFileArchive
*/
public interface Archive extends Iterable<Archive.Entry>, AutoCloseable {
/**
* Returns a URL that can be used to load the archive.
* @return the archive URL
* @throws MalformedURLException if the URL is malformed
*/
URL getUrl() throws MalformedURLException;
/**
* Returns the manifest of the archive.
* @return the manifest
* @throws IOException if the manifest cannot be read
*/
Manifest getManifest() throws IOException;
/**
* Returns nested {@link Archive}s for entries that match the specified filters.
* @param searchFilter filter used to limit when additional sub-entry searching is
* required or {@code null} if all entries should be considered.
* @param includeFilter filter used to determine which entries should be included in
* the result or {@code null} if all entries should be included
* @return the nested archives
* @throws IOException on IO error
* @since 2.3.0
*/
default Iterator<Archive> getNestedArchives(EntryFilter searchFilter, EntryFilter includeFilter)
throws IOException {
EntryFilter combinedFilter = (entry) -> (searchFilter == null || searchFilter.matches(entry))
&& (includeFilter == null || includeFilter.matches(entry));
List<Archive> nestedArchives = getNestedArchives(combinedFilter);
return nestedArchives.iterator();
}
/**
* Returns nested {@link Archive}s for entries that match the specified filter.
* @param filter the filter used to limit entries
* @return nested archives
* @throws IOException if nested archives cannot be read
* @deprecated since 2.3.0 for removal in 2.5.0 in favor of
* {@link #getNestedArchives(EntryFilter, EntryFilter)}
*/
@Deprecated
default List<Archive> getNestedArchives(EntryFilter filter) throws IOException {
throw new IllegalStateException("Unexpected call to getNestedArchives(filter)");
}
/**
* Return a new iterator for the archive entries.
* @deprecated since 2.3.0 for removal in 2.5.0 in favor of using
* {@link org.springframework.boot.loader.jar.JarFile} to access entries and
* {@link #getNestedArchives(EntryFilter, EntryFilter)} for accessing nested archives.
* @see java.lang.Iterable#iterator()
*/
@Deprecated
@Override
Iterator<Entry> iterator();
/**
* Performs the given action for each element of the {@code Iterable} until all
* elements have been processed or the action throws an exception.
* @deprecated since 2.3.0 for removal in 2.5.0 in favor of using
* {@link org.springframework.boot.loader.jar.JarFile} to access entries and
* {@link #getNestedArchives(EntryFilter, EntryFilter)} for accessing nested archives.
* @see Iterable#forEach
*/
@Deprecated
@Override
default void forEach(Consumer<? super Entry> action) {
Objects.requireNonNull(action);
for (Entry entry : this) {
action.accept(entry);
}
}
/**
* Creates a {@link Spliterator} over the elements described by this {@code Iterable}.
* @deprecated since 2.3.0 for removal in 2.5.0 in favor of using
* {@link org.springframework.boot.loader.jar.JarFile} to access entries and
* {@link #getNestedArchives(EntryFilter, EntryFilter)} for accessing nested archives.
* @see Iterable#spliterator
*/
@Deprecated
@Override
default Spliterator<Entry> spliterator() {
return Spliterators.spliteratorUnknownSize(iterator(), 0);
}
/**
* Return if the archive is exploded (already unpacked).
* @return if the archive is exploded
* @since 2.3.0
*/
default boolean isExploded() {
return false;
}
/**
* Closes the {@code Archive}, releasing any open resources.
* @throws Exception if an error occurs during close processing
* @since 2.2.0
*/
@Override
default void close() throws Exception {
}
/**
* Represents a single entry in the archive.
*/
interface Entry {
/**
* Returns {@code true} if the entry represents a directory.
* @return if the entry is a directory
*/
boolean isDirectory();
/**
* Returns the name of the entry.
* @return the name of the entry
*/
String getName();
}
/**
* Strategy interface to filter {@link Entry Entries}.
*/
@FunctionalInterface
interface EntryFilter {
/**
* Apply the jar entry filter.
* @param entry the entry to filter
* @return {@code true} if the filter matches
*/
boolean matches(Entry entry);
}
}
| apache-2.0 |
linkedin/cleo | src/main/java/cleo/search/SimpleTypeaheadElement.java | 2371 | /*
* Copyright (c) 2011 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package cleo.search;
/**
* SimpleTypeaheadElement
*
* @author jwu
* @since 02/05, 2011
*/
public class SimpleTypeaheadElement extends SimpleElement implements TypeaheadElement, Cloneable {
private static final long serialVersionUID = 1L;
private String line1;
private String line2;
private String line3;
private String media;
public SimpleTypeaheadElement(int id) {
super(id);
}
@Override
public void setLine1(String line) {
this.line1 = line;
}
@Override
public String getLine1() {
return line1;
}
@Override
public void setLine2(String line) {
this.line2 = line;
}
@Override
public String getLine2() {
return line2;
}
@Override
public void setLine3(String line) {
this.line3 = line;
}
@Override
public String getLine3() {
return line3;
}
@Override
public void setMedia(String media) {
this.media = media;
}
@Override
public String getMedia() {
return media;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(super.toString());
sb.append(" line1=\"").append(getLine1()).append("\"");
sb.append(" line2=\"").append(getLine2()).append("\"");
sb.append(" line3=\"").append(getLine3()).append("\"");
sb.append(" media=\"").append(getMedia()).append("\"");
return sb.toString();
}
@Override
public Object clone() {
SimpleTypeaheadElement elem = new SimpleTypeaheadElement(getElementId());
elem.setScore(getScore());
elem.setTimestamp(getTimestamp());
elem.setTerms((String[])getTerms().clone());
elem.setLine1(getLine1());
elem.setLine2(getLine2());
elem.setLine3(getLine3());
elem.setMedia(getMedia());
return elem;
}
}
| apache-2.0 |
naver/pinpoint | hbase/hbase-schema/src/main/java/com/navercorp/pinpoint/hbase/schema/core/command/HbaseSchemaCommandManager.java | 6290 | /*
* Copyright 2018 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.hbase.schema.core.command;
import com.navercorp.pinpoint.hbase.schema.reader.InvalidHbaseSchemaException;
import com.navercorp.pinpoint.hbase.schema.reader.core.ChangeSet;
import com.navercorp.pinpoint.hbase.schema.reader.core.ChangeType;
import com.navercorp.pinpoint.hbase.schema.reader.core.TableChange;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
/**
* @author HyunGil Jeong
*/
public class HbaseSchemaCommandManager {
private final String namespace;
private final Compression.Algorithm compressionAlgorithm;
private final Set<TableName> affectedTables = new HashSet<>();
private final Map<TableName, TableCommand> tableCommandMap = new LinkedHashMap<>();
public HbaseSchemaCommandManager(String namespace, String compression) {
this(namespace, compression, Collections.emptyList());
}
public HbaseSchemaCommandManager(String namespace, String compression, List<HTableDescriptor> currentHtds) {
if (StringUtils.isEmpty(namespace)) {
this.namespace = NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR;
} else {
this.namespace = namespace;
}
this.compressionAlgorithm = getCompressionAlgorithm(compression);
for (HTableDescriptor htd : filterTablesByNamespace(currentHtds)) {
tableCommandMap.put(htd.getTableName(), new ModifyTableCommand(htd, this.compressionAlgorithm));
}
}
private Compression.Algorithm getCompressionAlgorithm(String compression) {
if (StringUtils.isEmpty(compression)) {
return Compression.Algorithm.NONE;
}
for (Compression.Algorithm compressionAlgorithm : Compression.Algorithm.values()) {
if (compressionAlgorithm.getName().equalsIgnoreCase(compression)) {
return compressionAlgorithm;
}
}
throw new IllegalArgumentException("Unknown compression option : " + compression);
}
private List<HTableDescriptor> filterTablesByNamespace(List<HTableDescriptor> htds) {
if (CollectionUtils.isEmpty(htds)) {
return Collections.emptyList();
}
List<HTableDescriptor> filteredHtds = new ArrayList<>();
for (HTableDescriptor htd : htds) {
TableName tableName = htd.getTableName();
String namespace = tableName.getNamespaceAsString();
if (this.namespace.equalsIgnoreCase(namespace)) {
filteredHtds.add(htd);
}
}
return filteredHtds;
}
public String getNamespace() {
return namespace;
}
public void applyChangeSet(ChangeSet changeSet) {
Objects.requireNonNull(changeSet, "changeSet");
List<TableChange> tableChanges = changeSet.getTableChanges();
try {
for (TableChange tableChange : tableChanges) {
applyTableChange(tableChange);
}
} catch (Exception e) {
throw new InvalidHbaseSchemaException("Error applying changeSet : " + changeSet.getId(), e);
}
}
private void applyTableChange(TableChange tableChange) {
ChangeType changeType = tableChange.getType();
TableName tableName = TableName.valueOf(namespace, tableChange.getName());
switch (changeType) {
case CREATE:
if (tableCommandMap.containsKey(tableName)) {
throw new IllegalArgumentException("Cannot create an existing table : " + tableName);
}
TableCommand createTableCommand = new CreateTableCommand(tableName, compressionAlgorithm, tableChange.getSplitKeys());
createTableCommand.applyConfiguration(tableChange.getTableConfiguration());
createTableCommand.applyColumnFamilyChanges(tableChange.getColumnFamilyChanges());
tableCommandMap.put(tableName, createTableCommand);
break;
case MODIFY:
TableCommand tableCommand = tableCommandMap.get(tableName);
if (tableCommand == null) {
throw new IllegalArgumentException("Cannot modify a non-existent table : " + tableName);
}
tableCommand.applyConfiguration(tableChange.getTableConfiguration());
tableCommand.applyColumnFamilyChanges(tableChange.getColumnFamilyChanges());
break;
default:
throw new UnsupportedOperationException("Invalid change type : " + changeType);
}
affectedTables.add(tableName);
}
public List<TableCommand> getCommands() {
return tableCommandMap.entrySet().stream()
.filter(e -> affectedTables.contains(e.getKey()))
.map(Map.Entry::getValue)
.collect(Collectors.toList());
}
public List<HTableDescriptor> getSchemaSnapshot() {
return tableCommandMap.entrySet().stream()
.filter(e -> affectedTables.contains(e.getKey()))
.map(Map.Entry::getValue)
.map(TableCommand::getHtd)
.map(HTableDescriptor::new)
.collect(Collectors.toList());
}
}
| apache-2.0 |
alexgarciac/testbiotea | src/ws/biotea/ld2rdf/rdf/model/bibo/PersonalCommunication.java | 7459 | /**
* pubmed.endNote.jaxb.generated by http://RDFReactor.semweb4j.org ($Id: CodeGenerator.java 1535 2008-09-09 15:44:46Z max.at.xam.de $) on 13/01/11 08:05 PM
*/
package ws.biotea.ld2rdf.rdf.model.bibo;
import org.ontoware.aifbcommons.collection.ClosableIterator;
import org.ontoware.rdf2go.exception.ModelRuntimeException;
import org.ontoware.rdf2go.model.Model;
import org.ontoware.rdf2go.model.node.BlankNode;
import org.ontoware.rdf2go.model.node.URI;
import org.ontoware.rdf2go.model.node.impl.URIImpl;
import org.ontoware.rdfreactor.runtime.Base;
import org.ontoware.rdfreactor.runtime.ReactorResult;
/**
* This class was pubmed.endNote.jaxb.generated by <a href="http://RDFReactor.semweb4j.org">RDFReactor</a> on 13/01/11 08:05 PM
*/
public class PersonalCommunication extends Event {
/** http://purl.org/ontology/bibo/PersonalCommunication */
@SuppressWarnings("hiding")
public static final URI RDFS_CLASS = new URIImpl("http://purl.org/ontology/bibo/PersonalCommunication", false);
/**
* All property-URIs with this class as domain.
* All properties of all super-classes are also available.
*/
@SuppressWarnings("hiding")
public static final URI[] MANAGED_URIS = {
};
// protected constructors needed for inheritance
/**
* Returns a Java wrapper over an RDF object, identified by URI.
* Creating two wrappers for the same instanceURI is legal.
* @param model RDF2GO Model implementation, see http://rdf2go.semweb4j.org
* @param classURI URI of RDFS class
* @param instanceIdentifier Resource that identifies this instance
* @param write if true, the statement (this, rdf:type, TYPE) is written to the model
*
* [Generated from RDFReactor template rule #c1]
*/
protected PersonalCommunication ( Model model, URI classURI, org.ontoware.rdf2go.model.node.Resource instanceIdentifier, boolean write ) {
super(model, classURI, instanceIdentifier, write);
}
// public constructors
/**
* Returns a Java wrapper over an RDF object, identified by URI.
* Creating two wrappers for the same instanceURI is legal.
* @param model RDF2GO Model implementation, see http://rdf2go.ontoware.org
* @param instanceIdentifier an RDF2Go Resource identifying this instance
* @param write if true, the statement (this, rdf:type, TYPE) is written to the model
*
* [Generated from RDFReactor template rule #c2]
*/
public PersonalCommunication ( Model model, org.ontoware.rdf2go.model.node.Resource instanceIdentifier, boolean write ) {
super(model, RDFS_CLASS, instanceIdentifier, write);
}
/**
* Returns a Java wrapper over an RDF object, identified by a URI, given as a String.
* Creating two wrappers for the same URI is legal.
* @param model RDF2GO Model implementation, see http://rdf2go.ontoware.org
* @param uriString a URI given as a String
* @param write if true, the statement (this, rdf:type, TYPE) is written to the model
* @throws ModelRuntimeException if URI syntax is wrong
*
* [Generated from RDFReactor template rule #c7]
*/
public PersonalCommunication ( Model model, String uriString, boolean write) throws ModelRuntimeException {
super(model, RDFS_CLASS, new URIImpl(uriString,false), write);
}
/**
* Returns a Java wrapper over an RDF object, identified by a blank node.
* Creating two wrappers for the same blank node is legal.
* @param model RDF2GO Model implementation, see http://rdf2go.ontoware.org
* @param bnode BlankNode of this instance
* @param write if true, the statement (this, rdf:type, TYPE) is written to the model
*
* [Generated from RDFReactor template rule #c8]
*/
public PersonalCommunication ( Model model, BlankNode bnode, boolean write ) {
super(model, RDFS_CLASS, bnode, write);
}
/**
* Returns a Java wrapper over an RDF object, identified by
* a randomly pubmed.endNote.jaxb.generated URI.
* Creating two wrappers results in different URIs.
* @param model RDF2GO Model implementation, see http://rdf2go.ontoware.org
* @param write if true, the statement (this, rdf:type, TYPE) is written to the model
*
* [Generated from RDFReactor template rule #c9]
*/
public PersonalCommunication ( Model model, boolean write ) {
super(model, RDFS_CLASS, model.newRandomUniqueURI(), write);
}
///////////////////////////////////////////////////////////////////
// typing
/**
* Return an existing instance of this class in the model. No statements are written.
* @param model an RDF2Go model
* @param instanceResource an RDF2Go resource
* @return an instance of PersonalCommunication or null if none existst
*
* [Generated from RDFReactor template rule #class0]
*/
public static PersonalCommunication getInstance(Model model, org.ontoware.rdf2go.model.node.Resource instanceResource) {
return Base.getInstance(model, instanceResource, PersonalCommunication.class);
}
/**
* Create a new instance of this class in the model.
* That is, create the statement (instanceResource, RDF.type, http://purl.org/ontology/bibo/PersonalCommunication).
* @param model an RDF2Go model
* @param instanceResource an RDF2Go resource
*
* [Generated from RDFReactor template rule #class1]
*/
public static void createInstance(Model model, org.ontoware.rdf2go.model.node.Resource instanceResource) {
Base.createInstance(model, RDFS_CLASS, instanceResource);
}
/**
* @param model an RDF2Go model
* @param instanceResource an RDF2Go resource
* @return true if instanceResource is an instance of this class in the model
*
* [Generated from RDFReactor template rule #class2]
*/
public static boolean hasInstance(Model model, org.ontoware.rdf2go.model.node.Resource instanceResource) {
return Base.hasInstance(model, RDFS_CLASS, instanceResource);
}
/**
* @param model an RDF2Go model
* @return all instances of this class in Model 'model' as RDF resources
*
* [Generated from RDFReactor template rule #class3]
*/
public static ClosableIterator<org.ontoware.rdf2go.model.node.Resource> getAllInstances(Model model) {
return Base.getAllInstances(model, RDFS_CLASS, org.ontoware.rdf2go.model.node.Resource.class);
}
/**
* @param model an RDF2Go model
* @return all instances of this class in Model 'model' as a ReactorResult,
* which can conveniently be converted to iterator, list or array.
*
* [Generated from RDFReactor template rule #class3-as]
*/
public static ReactorResult<? extends PersonalCommunication> getAllInstances_as(Model model) {
return Base.getAllInstances_as(model, RDFS_CLASS, PersonalCommunication.class );
}
/**
* Remove rdf:type PersonalCommunication from this instance. Other triples are not affected.
* To delete more, use deleteAllProperties
* @param model an RDF2Go model
* @param instanceResource an RDF2Go resource
*
* [Generated from RDFReactor template rule #class4]
*/
public static void deleteInstance(Model model, org.ontoware.rdf2go.model.node.Resource instanceResource) {
Base.deleteInstance(model, RDFS_CLASS, instanceResource);
}
/**
* Delete all (this, *, *), i.e. including rdf:type
* @param model an RDF2Go model
* @param resource
*/
public static void deleteAllProperties(Model model, org.ontoware.rdf2go.model.node.Resource instanceResource) {
Base.deleteAllProperties(model, instanceResource);
}
///////////////////////////////////////////////////////////////////
// property access methods
} | apache-2.0 |
skylot/jadx | jadx-gui/src/main/java/jadx/gui/ui/panel/IViewStateSupport.java | 211 | package jadx.gui.ui.panel;
import jadx.gui.ui.codearea.EditorViewState;
public interface IViewStateSupport {
EditorViewState getEditorViewState();
void restoreEditorViewState(EditorViewState viewState);
}
| apache-2.0 |
grzesuav/jpf-core | src/peers/gov/nasa/jpf/vm/JPF_java_util_concurrent_atomic_AtomicLongFieldUpdater.java | 5324 | /*
* Copyright (C) 2014, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
* All rights reserved.
*
* The Java Pathfinder core (jpf-core) platform is licensed under the
* Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nasa.jpf.vm;
import gov.nasa.jpf.annotation.MJI;
import gov.nasa.jpf.vm.ClassInfo;
import gov.nasa.jpf.vm.ElementInfo;
import gov.nasa.jpf.vm.FieldInfo;
import gov.nasa.jpf.vm.MJIEnv;
/**
* a full peer for the AtomicLongFieldUpdater
*/
public class JPF_java_util_concurrent_atomic_AtomicLongFieldUpdater extends AtomicFieldUpdater {
@MJI
public void $init__Ljava_lang_Class_2Ljava_lang_String_2__V (MJIEnv env, int objRef,
int tClsObjRef, int fNameRef) {
// direct Object subclass, so we don't have to call a super ctor
ClassInfo ci = env.getReferredClassInfo(tClsObjRef);
String fname = env.getStringObject(fNameRef);
FieldInfo fi = ci.getInstanceField(fname);
ClassInfo fci = fi.getTypeClassInfo();
if (!fci.isPrimitive() || !fci.getName().equals("long")) {
// that's also just an approximation, but we need to check
env.throwException("java.lang.RuntimeException", "wrong field type");
}
int fidx = fi.getFieldIndex();
env.setIntField(objRef, "fieldId", fidx);
}
@MJI
public boolean compareAndSet__Ljava_lang_Object_2JJ__Z (MJIEnv env, int objRef, int tRef, long fExpect, long fUpdate){
if (tRef == MJIEnv.NULL){
env.throwException("java.lang.NullPointerException", "AtomicFieldUpdater called on null object");
return false;
}
ThreadInfo ti = env.getThreadInfo();
ElementInfo ei = ti.getModifiableElementInfo(tRef);
FieldInfo fi = getFieldInfo( ti.getElementInfo(objRef), ei);
if (reschedulesAccess(ti, ei, fi)){
env.repeatInvocation();
return false;
}
long v = ei.getLongField(fi);
if (v == fExpect) {
ei.setLongField(fi, fUpdate);
return true;
} else {
return false;
}
}
@MJI
public boolean weakCompareAndSet__Ljava_lang_Object_2JJ__Z (MJIEnv env, int objRef, int tRef, long fExpect, long fUpdate){
return(compareAndSet__Ljava_lang_Object_2JJ__Z(env, objRef, tRef, fExpect, fUpdate));
}
@MJI
public void set__Ljava_lang_Object_2J__V (MJIEnv env, int objRef, int tRef, long fNewValue){
if (tRef == MJIEnv.NULL){
env.throwException("java.lang.NullPointerException", "AtomicFieldUpdater called on null object");
return;
}
ThreadInfo ti = env.getThreadInfo();
ElementInfo ei = ti.getModifiableElementInfo(tRef);
FieldInfo fi = getFieldInfo( ti.getElementInfo(objRef), ei);
if (reschedulesAccess(ti, ei, fi)){
env.repeatInvocation();
return;
}
ei.setLongField(fi, fNewValue);
}
@MJI
public void lazySet__Ljava_lang_Object_2J__V (MJIEnv env, int objRef, int tRef, long fNewValue){
set__Ljava_lang_Object_2J__V(env, objRef, tRef, fNewValue);
}
@MJI
public long get__Ljava_lang_Object_2__J (MJIEnv env, int objRef, int tRef){
if (tRef == MJIEnv.NULL){
env.throwException("java.lang.NullPointerException", "AtomicFieldUpdater called on null object");
return 0;
}
ThreadInfo ti = env.getThreadInfo();
ElementInfo ei = ti.getElementInfo(tRef);
FieldInfo fi = getFieldInfo( ti.getElementInfo(objRef), ei);
if (reschedulesAccess(ti, ei, fi)){
env.repeatInvocation();
return 0;
}
return ei.getLongField(fi);
}
@MJI
public long getAndSet__Ljava_lang_Object_2J__J (MJIEnv env, int objRef, int tRef, long fNewValue){
if (tRef == MJIEnv.NULL){
env.throwException("java.lang.NullPointerException", "AtomicFieldUpdater called on null object");
return 0;
}
ThreadInfo ti = env.getThreadInfo();
ElementInfo ei = ti.getModifiableElementInfo(tRef);
FieldInfo fi = getFieldInfo( ti.getElementInfo(objRef), ei);
if (reschedulesAccess(ti, ei, fi)){
env.repeatInvocation();
return 0;
}
long result = ei.getLongField(fi);
ei.setLongField(fi, fNewValue);
return result;
}
@MJI
public long getAndAdd__Ljava_lang_Object_2J__J (MJIEnv env, int objRef, int tRef, long fDelta){
if (tRef == MJIEnv.NULL){
env.throwException("java.lang.NullPointerException", "AtomicFieldUpdater called on null object");
return 0;
}
ThreadInfo ti = env.getThreadInfo();
ElementInfo ei = ti.getModifiableElementInfo(tRef);
FieldInfo fi = getFieldInfo( ti.getElementInfo(objRef), ei);
if (reschedulesAccess(ti, ei, fi)){
env.repeatInvocation();
return 0;
}
long result = ei.getLongField(fi);
ei.setLongField(fi, result + fDelta);
return result;
}
}
| apache-2.0 |
Qi4j/qi4j-sdk | extensions/cache-ehcache/src/main/java/org/apache/polygene/cache/ehcache/EhCachePoolService.java | 1137 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.cache.ehcache;
import org.apache.polygene.api.mixin.Mixins;
import org.apache.polygene.api.service.ServiceActivation;
import org.apache.polygene.spi.cache.CachePool;
@Mixins( EhCachePoolMixin.class )
public interface EhCachePoolService
extends CachePool, ServiceActivation
{
}
| apache-2.0 |
msebire/intellij-community | java/java-psi-impl/src/com/intellij/psi/impl/source/tree/java/AnnotationParamListElement.java | 5047 | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source.tree.java;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.tree.*;
import com.intellij.psi.tree.ChildRoleBase;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import com.intellij.util.CharTable;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
/**
* @author ven
*/
public class AnnotationParamListElement extends CompositeElement {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.tree.java.AnnotationParamListElement");
private static final TokenSet NAME_VALUE_PAIR_BIT_SET = TokenSet.create(JavaElementType.NAME_VALUE_PAIR);
public AnnotationParamListElement() {
super(JavaElementType.ANNOTATION_PARAMETER_LIST);
}
@Override
public int getChildRole(@NotNull ASTNode child) {
IElementType i = child.getElementType();
if (i == JavaTokenType.COMMA) {
return ChildRole.COMMA;
}
else if (i == JavaTokenType.LPARENTH) {
return ChildRole.LPARENTH;
}
else if (i == JavaTokenType.RPARENTH) {
return ChildRole.RPARENTH;
}
else if (ElementType.ANNOTATION_MEMBER_VALUE_BIT_SET.contains(i) ||
(i == JavaElementType.NAME_VALUE_PAIR && child.getFirstChildNode() != null &&
child.getFirstChildNode().getElementType() == JavaElementType.ANNOTATION_ARRAY_INITIALIZER)) {
return ChildRole.ANNOTATION_VALUE;
}
else {
return ChildRoleBase.NONE;
}
}
@Override
public ASTNode findChildByRole(int role) {
switch (role) {
default:
LOG.assertTrue(false);
return null;
case ChildRole.LPARENTH:
return findChildByType(JavaTokenType.LPARENTH);
case ChildRole.RPARENTH:
return findChildByType(JavaTokenType.RPARENTH);
}
}
@Override
public TreeElement addInternal(TreeElement first, ASTNode last, ASTNode anchor, Boolean before) {
if (first.getElementType() == JavaElementType.NAME_VALUE_PAIR && last.getElementType() == JavaElementType.NAME_VALUE_PAIR) {
ASTNode lparenth = findChildByType(JavaTokenType.LPARENTH);
if (lparenth == null) {
CharTable treeCharTab = SharedImplUtil.findCharTableByTree(this);
LeafElement created = Factory.createSingleLeafElement(JavaTokenType.LPARENTH, "(", 0, 1, treeCharTab, getManager());
super.addInternal(created, created, getFirstChildNode(), true);
}
ASTNode rparenth = findChildByType(JavaTokenType.RPARENTH);
if (rparenth == null) {
CharTable treeCharTab = SharedImplUtil.findCharTableByTree(this);
LeafElement created = Factory.createSingleLeafElement(JavaTokenType.RPARENTH, ")", 0, 1, treeCharTab, getManager());
super.addInternal(created, created, getLastChildNode(), false);
}
ASTNode[] nodes = getChildren(NAME_VALUE_PAIR_BIT_SET);
if (nodes.length == 1) {
ASTNode node = nodes[0];
if (node instanceof PsiNameValuePair) {
PsiNameValuePair pair = (PsiNameValuePair)node;
if (pair.getName() == null) {
PsiAnnotationMemberValue value = pair.getValue();
if (value != null) {
try {
PsiElementFactory factory = JavaPsiFacade.getElementFactory(getPsi().getProject());
PsiAnnotation annotation = factory.createAnnotationFromText("@AAA(value = " + value.getText() + ")", null);
replaceChild(node, annotation.getParameterList().getAttributes()[0].getNode());
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
}
}
if (anchor == null && before != null) {
anchor = findChildByType(before ? JavaTokenType.RPARENTH : JavaTokenType.LPARENTH);
}
TreeElement firstAdded = super.addInternal(first, last, anchor, before);
JavaSourceUtil.addSeparatingComma(this, first, NAME_VALUE_PAIR_BIT_SET);
return firstAdded;
}
return super.addInternal(first, last, anchor, before);
}
@Override
public void deleteChildInternal(@NotNull ASTNode child) {
if (child.getElementType() == JavaElementType.NAME_VALUE_PAIR) {
JavaSourceUtil.deleteSeparatingComma(this, child);
}
super.deleteChildInternal(child);
}
}
| apache-2.0 |
rgoldberg/guava | guava-gwt/test/com/google/common/collect/TreeTraverserTest_gwt.java | 1555 | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
public class TreeTraverserTest_gwt extends com.google.gwt.junit.client.GWTTestCase {
@Override public String getModuleName() {
return "com.google.common.collect.testModule";
}
public void testBreadthOrder() throws Exception {
com.google.common.collect.TreeTraverserTest testCase = new com.google.common.collect.TreeTraverserTest();
testCase.testBreadthOrder();
}
public void testPostOrder() throws Exception {
com.google.common.collect.TreeTraverserTest testCase = new com.google.common.collect.TreeTraverserTest();
testCase.testPostOrder();
}
public void testPreOrder() throws Exception {
com.google.common.collect.TreeTraverserTest testCase = new com.google.common.collect.TreeTraverserTest();
testCase.testPreOrder();
}
public void testUsing() throws Exception {
com.google.common.collect.TreeTraverserTest testCase = new com.google.common.collect.TreeTraverserTest();
testCase.testUsing();
}
}
| apache-2.0 |
goodwinnk/intellij-community | java/java-impl/src/com/intellij/codeInsight/intention/impl/InlineStreamMapAction.java | 12804 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.intention.impl;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.util.LambdaRefactoringUtil;
import com.intellij.util.IncorrectOperationException;
import com.siyeh.ig.psiutils.CommentTracker;
import com.siyeh.ig.psiutils.ExpressionUtils;
import com.siyeh.ig.psiutils.ParenthesesUtils;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Set;
public class InlineStreamMapAction extends PsiElementBaseIntentionAction {
private static final Logger LOG = Logger.getInstance(InlineStreamMapAction.class.getName());
private static final Set<String> MAP_METHODS =
StreamEx.of("map", "mapToInt", "mapToLong", "mapToDouble", "mapToObj", "boxed", "asLongStream", "asDoubleStream").toSet();
public static final Set<String> NEXT_METHODS = StreamEx
.of("flatMap", "flatMapToInt", "flatMapToLong", "flatMapToDouble", "forEach", "forEachOrdered", "anyMatch", "noneMatch", "allMatch")
.append(MAP_METHODS).toSet();
@Override
public boolean isAvailable(@NotNull Project project, Editor editor, @NotNull final PsiElement element) {
if (!(element instanceof PsiIdentifier)) return false;
final PsiElement parent = element.getParent();
if (!(parent instanceof PsiReferenceExpression)) return false;
final PsiElement gParent = parent.getParent();
if (!(gParent instanceof PsiMethodCallExpression)) return false;
PsiMethodCallExpression curCall = (PsiMethodCallExpression)gParent;
if (!isMapCall(curCall)) return false;
PsiMethodCallExpression nextCall = getNextExpressionToMerge(curCall);
if(nextCall == null) return false;
String key = curCall.getArgumentList().isEmpty() || nextCall.getArgumentList().isEmpty() ?
"intention.inline.map.merge.text" : "intention.inline.map.inline.text";
setText(CodeInsightBundle.message(key, element.getText(), nextCall.getMethodExpression().getReferenceName()));
return true;
}
private static boolean isMapCall(@NotNull PsiMethodCallExpression methodCallExpression) {
String name = methodCallExpression.getMethodExpression().getReferenceName();
if (name == null || !MAP_METHODS.contains(name)) return false;
final PsiExpressionList argumentList = methodCallExpression.getArgumentList();
final PsiExpression[] expressions = argumentList.getExpressions();
if (!name.startsWith("map") && expressions.length == 0) return true;
if (expressions.length != 1) return false;
if (!StreamRefactoringUtil.isRefactoringCandidate(expressions[0], true)) return false;
final PsiMethod method = methodCallExpression.resolveMethod();
if (method == null) return false;
final PsiClass containingClass = method.getContainingClass();
return InheritanceUtil.isInheritor(containingClass, CommonClassNames.JAVA_UTIL_STREAM_BASE_STREAM);
}
@Nullable
private static PsiMethodCallExpression getNextExpressionToMerge(PsiMethodCallExpression methodCallExpression) {
PsiMethodCallExpression nextCall = ExpressionUtils.getCallForQualifier(methodCallExpression);
if (nextCall == null) return null;
String nextName = nextCall.getMethodExpression().getReferenceName();
if (nextName == null || !NEXT_METHODS.contains(nextName) || translateName(methodCallExpression, nextCall) == null) return null;
PsiExpressionList argumentList = (nextCall).getArgumentList();
PsiExpression[] expressions = argumentList.getExpressions();
if(expressions.length == 0) {
if (!nextName.equals("boxed") && !nextName.equals("asLongStream") && !nextName.equals("asDoubleStream")) return null;
return nextCall;
}
if (expressions.length != 1 || !StreamRefactoringUtil.isRefactoringCandidate(expressions[0], false)) return null;
return nextCall;
}
/**
* Generate name of joint method call which combines two given calls
*
* @param prevCall previous call (assumed to be in MAP_METHODS)
* @param nextCall next call (assumed to be in NEXT_METHODS)
* @return a name of the resulting method
*/
@Nullable
private static String translateName(@NotNull PsiMethodCallExpression prevCall, @NotNull PsiMethodCallExpression nextCall) {
PsiMethod nextMethod = nextCall.resolveMethod();
if (nextMethod == null) return null;
String nextName = nextMethod.getName();
PsiMethod method = prevCall.resolveMethod();
if (method == null) return null;
PsiClass prevClass = method.getContainingClass();
if (prevClass == null) return null;
String prevClassName = prevClass.getQualifiedName();
if (prevClassName == null) return null;
String prevName = method.getName();
if (nextName.endsWith("Match") || nextName.startsWith("forEach")) return nextName;
if (nextName.equals("map")) {
return translateMap(prevName);
}
if (prevName.equals("map")) {
return translateMap(nextName);
}
if(MAP_METHODS.contains(nextName)) {
PsiType type = nextMethod.getReturnType();
if(!(type instanceof PsiClassType)) return null;
PsiClass nextClass = ((PsiClassType)type).resolve();
if(nextClass == null) return null;
String nextClassName = nextClass.getQualifiedName();
if(nextClassName == null) return null;
if(prevClassName.equals(nextClassName)) return "map";
switch(nextClassName) {
case CommonClassNames.JAVA_UTIL_STREAM_INT_STREAM:
return "mapToInt";
case CommonClassNames.JAVA_UTIL_STREAM_LONG_STREAM:
return "mapToLong";
case CommonClassNames.JAVA_UTIL_STREAM_DOUBLE_STREAM:
return "mapToDouble";
case CommonClassNames.JAVA_UTIL_STREAM_STREAM:
return "mapToObj";
default:
return null;
}
}
if(nextName.equals("flatMap") && prevClassName.equals(CommonClassNames.JAVA_UTIL_STREAM_STREAM)) {
return mapToFlatMap(prevName);
}
return null;
}
@Contract(pure = true)
@Nullable
private static String mapToFlatMap(String mapMethod) {
switch (mapMethod) {
case "map":
return "flatMap";
case "mapToInt":
return "flatMapToInt";
case "mapToLong":
return "flatMapToLong";
case "mapToDouble":
return "flatMapToDouble";
}
// Something unsupported passed: ignore
return null;
}
@Contract(pure = true)
@NotNull
private static String translateMap(String nextMethod) {
switch (nextMethod) {
case "boxed":
return "mapToObj";
case "asLongStream":
return "mapToLong";
case "asDoubleStream":
return "mapToDouble";
default:
return nextMethod;
}
}
@Override
@NotNull
public String getFamilyName() {
return CodeInsightBundle.message("intention.inline.map.family");
}
@Override
public void invoke(@NotNull Project project, Editor editor, @NotNull PsiElement element) throws IncorrectOperationException {
PsiMethodCallExpression mapCall = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class);
if(mapCall == null) return;
PsiMethodCallExpression nextCall = getNextExpressionToMerge(mapCall);
if(nextCall == null) return;
PsiReferenceExpression nextRef = nextCall.getMethodExpression();
PsiExpression nextQualifier = nextRef.getQualifierExpression();
if(nextQualifier == null) return;
String newName = translateName(mapCall, nextCall);
if(newName == null) return;
PsiLambdaExpression previousLambda = getLambda(mapCall);
LOG.assertTrue(previousLambda != null);
PsiExpression previousBody = LambdaUtil.extractSingleExpressionFromBody(previousLambda.getBody());
LOG.assertTrue(previousBody != null);
PsiLambdaExpression lambda = getLambda(nextCall);
LOG.assertTrue(lambda != null);
CommentTracker ct = new CommentTracker();
if(!lambda.isPhysical()) {
lambda = (PsiLambdaExpression)nextCall.getArgumentList().add(lambda);
}
PsiElement body = lambda.getBody();
LOG.assertTrue(body != null);
ct.markUnchanged(body);
PsiParameter[] nextParameters = lambda.getParameterList().getParameters();
LOG.assertTrue(nextParameters.length == 1);
PsiParameter[] prevParameters = previousLambda.getParameterList().getParameters();
LOG.assertTrue(prevParameters.length == 1);
PsiElementFactory factory = JavaPsiFacade.getElementFactory(project);
for(PsiReference ref : ReferencesSearch.search(nextParameters[0], new LocalSearchScope(body)).findAll()) {
PsiElement e = ref.getElement();
PsiExpression replacement = ct.markUnchanged(previousBody);
if (e.getParent() instanceof PsiExpression &&
ParenthesesUtils.areParenthesesNeeded(previousBody, (PsiExpression)e.getParent(), false)) {
replacement = factory.createExpressionFromText("(a)", e);
PsiExpression parenthesized = ((PsiParenthesizedExpression)replacement).getExpression();
LOG.assertTrue(parenthesized != null);
parenthesized.replace(previousBody);
}
ct.replace(e, replacement);
}
ct.replace(nextParameters[0], prevParameters[0]);
ExpressionUtils.bindReferenceTo(nextRef, newName);
PsiExpression prevQualifier = mapCall.getMethodExpression().getQualifierExpression();
if(prevQualifier == null) {
ct.deleteAndRestoreComments(nextQualifier);
} else {
ct.replaceAndRestoreComments(nextQualifier, prevQualifier);
}
CodeStyleManager.getInstance(project).reformat(lambda);
}
@Nullable
private static PsiLambdaExpression getLambda(PsiMethodCallExpression call) {
PsiExpression[] expressions = call.getArgumentList().getExpressions();
if(expressions.length == 1) {
PsiExpression expression = expressions[0];
if(expression instanceof PsiLambdaExpression) return (PsiLambdaExpression)expression;
if(expression instanceof PsiMethodReferenceExpression) {
return LambdaRefactoringUtil.convertMethodReferenceToLambda((PsiMethodReferenceExpression)expression, false, true);
}
return null;
}
if(expressions.length != 0) return null;
PsiMethod method = call.resolveMethod();
if(method == null) return null;
PsiClass containingClass = method.getContainingClass();
if(containingClass == null) return null;
String className = containingClass.getQualifiedName();
if(className == null) return null;
String varName;
String type;
switch (className) {
case CommonClassNames.JAVA_UTIL_STREAM_INT_STREAM:
varName = "i";
type = CommonClassNames.JAVA_LANG_INTEGER;
break;
case CommonClassNames.JAVA_UTIL_STREAM_LONG_STREAM:
varName = "l";
type = CommonClassNames.JAVA_LANG_LONG;
break;
case CommonClassNames.JAVA_UTIL_STREAM_DOUBLE_STREAM:
varName = "d";
type = CommonClassNames.JAVA_LANG_DOUBLE;
break;
default:
return null;
}
varName = JavaCodeStyleManager.getInstance(call.getProject()).suggestUniqueVariableName(varName, call, true);
String expression;
if("boxed".equals(method.getName())) {
expression = varName+" -> ("+type+")"+varName;
} else if("asLongStream".equals(method.getName())) {
expression = varName+" -> (long)"+varName;
} else if("asDoubleStream".equals(method.getName())) {
expression = varName+" -> (double)"+varName;
} else return null;
PsiElementFactory factory = JavaPsiFacade.getElementFactory(call.getProject());
return (PsiLambdaExpression)factory.createExpressionFromText(expression, call);
}
}
| apache-2.0 |
AndrewKhitrin/dbeaver | plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/actions/datasource/DataSourceTransactionModeContributor.java | 4917 | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.actions.datasource;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IContributionItem;
import org.eclipse.jface.action.Separator;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.menus.CommandContributionItem;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.core.CoreCommands;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.exec.DBCException;
import org.jkiss.dbeaver.model.exec.DBCTransactionManager;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.ui.ActionUtils;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.actions.AbstractDataSourceHandler;
import org.jkiss.utils.CommonUtils;
import java.util.List;
public class DataSourceTransactionModeContributor extends DataSourceMenuContributor
{
private static final Log log = Log.getLog(DataSourceTransactionModeContributor.class);
@Override
protected void fillContributionItems(final List<IContributionItem> menuItems)
{
IWorkbenchWindow window = UIUtils.getActiveWorkbenchWindow();
if (window == null) {
return;
}
IEditorPart activePart = window.getActivePage().getActiveEditor();
DBPDataSourceContainer container = AbstractDataSourceHandler.getDataSourceContainer(activePart);
DBPDataSource dataSource = null;
if (container != null) {
dataSource = container.getDataSource();
}
if (dataSource == null) {
return;
}
final DBPDataSourceInfo dsInfo = dataSource.getInfo();
DBCTransactionManager txnManager = DBUtils.getTransactionManager(dataSource.getDefaultInstance().getDefaultContext(false));
if (txnManager != null) {
menuItems.add(ActionUtils.makeCommandContribution(
window,
CoreCommands.CMD_TOGGLE_AUTOCOMMIT,
CommandContributionItem.STYLE_CHECK));
menuItems.add(new Separator());
// Transactions
DBPTransactionIsolation txnLevelCurrent = null;
try {
txnLevelCurrent = txnManager.getTransactionIsolation();
} catch (DBCException ex) {
log.warn("Can't determine current transaction isolation level", ex);
}
for (DBPTransactionIsolation txi : CommonUtils.safeCollection(dsInfo.getSupportedTransactionsIsolation())) {
if (!txi.isEnabled()) {
continue;
}
menuItems.add(ActionUtils.makeActionContribution(
new TransactionIsolationAction(dataSource, txi, txi.equals(txnLevelCurrent)),
true));
}
}
}
private static class TransactionIsolationAction extends Action
{
private final DBPDataSource dataSource;
private final DBPTransactionIsolation level;
private final boolean checked;
public TransactionIsolationAction(DBPDataSource dataSource, DBPTransactionIsolation level, boolean checked)
{
this.dataSource = dataSource;
this.level = level;
this.checked = checked;
}
@Override
public int getStyle()
{
return AS_RADIO_BUTTON;
}
@Override
public boolean isChecked()
{
return checked;
}
@Override
public String getText()
{
return level.getTitle();
}
@Override
public void run()
{
try {
dataSource.getContainer().setDefaultTransactionsIsolation(level);
} catch (DBException e) {
DBWorkbench.getPlatformUI().showError(
"Transactions Isolation",
"Can't set transaction isolation level to '" + level + "'",
e);
return;
}
dataSource.getContainer().persistConfiguration();
}
}
} | apache-2.0 |
igniterealtime/Smack | smack-experimental/src/main/java/org/jivesoftware/smackx/hoxt/packet/HttpOverXmppReq.java | 5953 | /**
*
* Copyright 2014 Andriy Tsykholyas, 2015 Florian Schmaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.hoxt.packet;
/**
* Represents Req IQ packet.
*
* @author Andriy Tsykholyas
* @see <a href="http://xmpp.org/extensions/xep-0332.html">XEP-0332: HTTP over XMPP transport</a>
*/
public final class HttpOverXmppReq extends AbstractHttpOverXmpp {
public static final String ELEMENT = "req";
private HttpOverXmppReq(Builder builder) {
super(ELEMENT, builder);
this.method = builder.method;
this.resource = builder.resource;
this.maxChunkSize = builder.maxChunkSize;
this.ibb = builder.ibb;
this.jingle = builder.jingle;
this.sipub = builder.sipub;
setType(Type.set);
}
private final HttpMethod method;
private final String resource;
private final int maxChunkSize;
private final boolean sipub;
private final boolean ibb;
private final boolean jingle;
@Override
protected IQChildElementXmlStringBuilder getIQHoxtChildElementBuilder(IQChildElementXmlStringBuilder builder) {
builder.attribute("method", method);
builder.attribute("resource", resource);
builder.attribute("version", getVersion());
builder.optIntAttribute("maxChunkSize", maxChunkSize);
builder.optBooleanAttributeDefaultTrue("sipub", sipub);
builder.optBooleanAttributeDefaultTrue("ibb", ibb);
builder.optBooleanAttributeDefaultTrue("jingle", jingle);
builder.rightAngleBracket();
return builder;
}
/**
* Returns method attribute.
*
* @return method attribute
*/
public HttpMethod getMethod() {
return method;
}
/**
* Returns resource attribute.
*
* @return resource attribute
*/
public String getResource() {
return resource;
}
/**
* Returns maxChunkSize attribute.
*
* @return maxChunkSize attribute
*/
public int getMaxChunkSize() {
return maxChunkSize;
}
/**
* Returns sipub attribute.
*
* @return sipub attribute
*/
public boolean isSipub() {
return sipub;
}
/**
* Returns ibb attribute.
*
* @return ibb attribute
*/
public boolean isIbb() {
return ibb;
}
/**
* Returns jingle attribute.
*
* @return jingle attribute
*/
public boolean isJingle() {
return jingle;
}
public static Builder builder() {
return new Builder();
}
/**
* A configuration builder for HttpOverXmppReq. Use {@link HttpOverXmppReq#builder()} to obtain a new instance and
* {@link #build} to build the configuration.
*/
public static final class Builder extends AbstractHttpOverXmpp.Builder<Builder, HttpOverXmppReq> {
private HttpMethod method;
private String resource;
private int maxChunkSize = -1;
private boolean sipub = true;
private boolean ibb = true;
private boolean jingle = true;
private Builder() {
}
/**
* Sets method attribute.
*
* @param method attribute
*
* @return the builder
*/
public Builder setMethod(HttpMethod method) {
this.method = method;
return this;
}
/**
* Sets resource attribute.
*
* @param resource attribute
*
* @return the builder
*/
public Builder setResource(String resource) {
this.resource = resource;
return this;
}
/**
* Sets jingle attribute.
*
* @param jingle jingle attribute
*
* @return the builder
*/
public Builder setJingle(boolean jingle) {
this.jingle = jingle;
return this;
}
/**
* Sets ibb attribute.
*
* @param ibb ibb attribute
*
* @return the builder
*/
public Builder setIbb(boolean ibb) {
this.ibb = ibb;
return this;
}
/**
* Sets sipub attribute.
*
* @param sipub sipub attribute
*
* @return the builder
*/
public Builder setSipub(boolean sipub) {
this.sipub = sipub;
return this;
}
/**
* Sets maxChunkSize attribute.
*
* @param maxChunkSize maxChunkSize attribute
*
* @return the builder
*/
public Builder setMaxChunkSize(int maxChunkSize) {
if (maxChunkSize < 256 || maxChunkSize > 65536) {
throw new IllegalArgumentException("maxChunkSize must be within [256, 65536]");
}
this.maxChunkSize = maxChunkSize;
return this;
}
@Override
public HttpOverXmppReq build() {
if (method == null) {
throw new IllegalArgumentException("Method cannot be null");
}
if (resource == null) {
throw new IllegalArgumentException("Resource cannot be null");
}
return new HttpOverXmppReq(this);
}
@Override
protected Builder getThis() {
return this;
}
}
}
| apache-2.0 |
gfyoung/elasticsearch | server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java | 11933 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing.allocation.decider;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.node.DiscoveryNodeFilters;
import org.elasticsearch.cluster.routing.RecoverySource;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import java.util.EnumSet;
import java.util.Map;
import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.IP_VALIDATOR;
import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND;
import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR;
/**
* This {@link AllocationDecider} control shard allocation by include and
* exclude filters via dynamic cluster and index routing settings.
* <p>
* This filter is used to make explicit decision on which nodes certain shard
* can / should be allocated. The decision if a shard can be allocated, must not
* be allocated or should be allocated is based on either cluster wide dynamic
* settings ({@code cluster.routing.allocation.*}) or index specific dynamic
* settings ({@code index.routing.allocation.*}). All of those settings can be
* changed at runtime via the cluster or the index update settings API.
* </p>
* Note: Cluster settings are applied first and will override index specific
* settings such that if a shard can be allocated according to the index routing
* settings it wont be allocated on a node if the cluster specific settings
* would disallow the allocation. Filters are applied in the following order:
* <ol>
* <li>{@code required} - filters required allocations.
* If any {@code required} filters are set the allocation is denied if the index is <b>not</b> in the set of {@code required} to allocate
* on the filtered node</li>
* <li>{@code include} - filters "allowed" allocations.
* If any {@code include} filters are set the allocation is denied if the index is <b>not</b> in the set of {@code include} filters for
* the filtered node</li>
* <li>{@code exclude} - filters "prohibited" allocations.
* If any {@code exclude} filters are set the allocation is denied if the index is in the set of {@code exclude} filters for the
* filtered node</li>
* </ol>
*/
public class FilterAllocationDecider extends AllocationDecider {
public static final String NAME = "filter";
private static final String CLUSTER_ROUTING_REQUIRE_GROUP_PREFIX = "cluster.routing.allocation.require";
private static final String CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX = "cluster.routing.allocation.include";
private static final String CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX = "cluster.routing.allocation.exclude";
public static final Setting.AffixSetting<String> CLUSTER_ROUTING_REQUIRE_GROUP_SETTING =
Setting.prefixKeySetting(CLUSTER_ROUTING_REQUIRE_GROUP_PREFIX + ".", (key) ->
Setting.simpleString(key, (value, map) -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.NodeScope));
public static final Setting.AffixSetting<String> CLUSTER_ROUTING_INCLUDE_GROUP_SETTING =
Setting.prefixKeySetting(CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX + ".", (key) ->
Setting.simpleString(key, (value, map) -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.NodeScope));
public static final Setting.AffixSetting<String>CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING =
Setting.prefixKeySetting(CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX + ".", (key) ->
Setting.simpleString(key, (value, map) -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.NodeScope));
/**
* The set of {@link RecoverySource.Type} values for which the
* {@link IndexMetaData#INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING} should apply.
* Note that we do not include the {@link RecoverySource.Type#SNAPSHOT} type here
* because if the snapshot is restored to a different cluster that does not contain
* the initial recovery node id, or to the same cluster where the initial recovery node
* id has been decommissioned, then the primary shards will never be allocated.
*/
static EnumSet<RecoverySource.Type> INITIAL_RECOVERY_TYPES =
EnumSet.of(RecoverySource.Type.EMPTY_STORE, RecoverySource.Type.LOCAL_SHARDS);
private volatile DiscoveryNodeFilters clusterRequireFilters;
private volatile DiscoveryNodeFilters clusterIncludeFilters;
private volatile DiscoveryNodeFilters clusterExcludeFilters;
public FilterAllocationDecider(Settings settings, ClusterSettings clusterSettings) {
super(settings);
setClusterRequireFilters(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING.getAsMap(settings));
setClusterExcludeFilters(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getAsMap(settings));
setClusterIncludeFilters(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.getAsMap(settings));
clusterSettings.addAffixMapUpdateConsumer(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, this::setClusterRequireFilters, (a,b)-> {}, true);
clusterSettings.addAffixMapUpdateConsumer(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, this::setClusterExcludeFilters, (a,b)-> {}, true);
clusterSettings.addAffixMapUpdateConsumer(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, this::setClusterIncludeFilters, (a,b)-> {}, true);
}
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
if (shardRouting.unassigned()) {
// only for unassigned - we filter allocation right after the index creation ie. for shard shrinking etc. to ensure
// that once it has been allocated post API the replicas can be allocated elsewhere without user interaction
// this is a setting that can only be set within the system!
IndexMetaData indexMd = allocation.metaData().getIndexSafe(shardRouting.index());
DiscoveryNodeFilters initialRecoveryFilters = indexMd.getInitialRecoveryFilters();
if (initialRecoveryFilters != null &&
INITIAL_RECOVERY_TYPES.contains(shardRouting.recoverySource().getType()) &&
initialRecoveryFilters.match(node.node()) == false) {
String explanation = (shardRouting.recoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) ?
"initial allocation of the shrunken index is only allowed on nodes [%s] that hold a copy of every shard in the index" :
"initial allocation of the index is only allowed on nodes [%s]";
return allocation.decision(Decision.NO, NAME, explanation, initialRecoveryFilters);
}
}
return shouldFilter(shardRouting, node, allocation);
}
@Override
public Decision canAllocate(IndexMetaData indexMetaData, RoutingNode node, RoutingAllocation allocation) {
return shouldFilter(indexMetaData, node, allocation);
}
@Override
public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return shouldFilter(shardRouting, node, allocation);
}
private Decision shouldFilter(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
Decision decision = shouldClusterFilter(node, allocation);
if (decision != null) return decision;
decision = shouldIndexFilter(allocation.metaData().getIndexSafe(shardRouting.index()), node, allocation);
if (decision != null) return decision;
return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters");
}
private Decision shouldFilter(IndexMetaData indexMd, RoutingNode node, RoutingAllocation allocation) {
Decision decision = shouldClusterFilter(node, allocation);
if (decision != null) return decision;
decision = shouldIndexFilter(indexMd, node, allocation);
if (decision != null) return decision;
return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters");
}
private Decision shouldIndexFilter(IndexMetaData indexMd, RoutingNode node, RoutingAllocation allocation) {
if (indexMd.requireFilters() != null) {
if (indexMd.requireFilters().match(node.node()) == false) {
return allocation.decision(Decision.NO, NAME, "node does not match index setting [%s] filters [%s]",
IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_PREFIX, indexMd.requireFilters());
}
}
if (indexMd.includeFilters() != null) {
if (indexMd.includeFilters().match(node.node()) == false) {
return allocation.decision(Decision.NO, NAME, "node does not match index setting [%s] filters [%s]",
IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_PREFIX, indexMd.includeFilters());
}
}
if (indexMd.excludeFilters() != null) {
if (indexMd.excludeFilters().match(node.node())) {
return allocation.decision(Decision.NO, NAME, "node matches index setting [%s] filters [%s]",
IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey(), indexMd.excludeFilters());
}
}
return null;
}
private Decision shouldClusterFilter(RoutingNode node, RoutingAllocation allocation) {
if (clusterRequireFilters != null) {
if (clusterRequireFilters.match(node.node()) == false) {
return allocation.decision(Decision.NO, NAME, "node does not match cluster setting [%s] filters [%s]",
CLUSTER_ROUTING_REQUIRE_GROUP_PREFIX, clusterRequireFilters);
}
}
if (clusterIncludeFilters != null) {
if (clusterIncludeFilters.match(node.node()) == false) {
return allocation.decision(Decision.NO, NAME, "node does not cluster setting [%s] filters [%s]",
CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX, clusterIncludeFilters);
}
}
if (clusterExcludeFilters != null) {
if (clusterExcludeFilters.match(node.node())) {
return allocation.decision(Decision.NO, NAME, "node matches cluster setting [%s] filters [%s]",
CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX, clusterExcludeFilters);
}
}
return null;
}
private void setClusterRequireFilters(Map<String, String> filters) {
clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, filters);
}
private void setClusterIncludeFilters(Map<String, String> filters) {
clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, filters);
}
private void setClusterExcludeFilters(Map<String, String> filters) {
clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, filters);
}
}
| apache-2.0 |
thomaswp/playn-xna-samples | noise/src/playn/sample/noise/Main.java | 3149 | /**
* Copyright 2011 The PlayN Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package playn.sample.noise;
import java.io.InputStream;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import javax.sound.sampled.LineEvent;
import javax.sound.sampled.LineListener;
import javax.sound.sampled.Mixer;
import javax.sound.sampled.Mixer.Info;
import javazoom.jl.decoder.JavaLayerException;
import javazoom.jl.player.Player;
public class Main {
private static final boolean USE_JLAYER = false;
public static void main(String[] args) throws InterruptedException {
for (AudioFileFormat.Type fileFormat : AudioSystem.getAudioFileTypes()) {
System.out.print(fileFormat + ", ");
}
System.out.println();
System.out.println();
for (Info info : AudioSystem.getMixerInfo()) {
System.out.println(info);
}
System.out.println();
Mixer mixer = AudioSystem.getMixer(null);
int maxLines = mixer.getMaxLines(mixer.getLineInfo());
System.out.println("maxlines=" + maxLines);
Thread.sleep(100);
play("freesoundproject_22740__FranciscoPadilla__37_Click_Finger.wav");
play("freesoundproject_28917__junggle__btn107.mp3");
Thread.sleep(1000);
System.out.println("Done");
}
private static void play(String filename) {
try {
final InputStream fis = Main.class.getResourceAsStream("resources/" + filename);
if (USE_JLAYER) {
Runnable r = new Runnable() {
@Override
public void run() {
Player player;
try {
player = new Player(fis);
player.play();
} catch (JavaLayerException e) {
e.printStackTrace();
}
}
};
new Thread(r).start();
} else {
System.out.println(filename);
System.out.println(AudioSystem.getAudioFileFormat(fis).getFormat().toString());
AudioInputStream ais = AudioSystem.getAudioInputStream(fis);
Clip clip = AudioSystem.getClip();
clip.addLineListener(new LineListener() {
@Override
public void update(LineEvent evt) {
// System.out.println(evt.getType() + ":" +
// evt.getLine());
}
});
clip.open(ais);
// for (int i = 0; i < 20; i++) {
clip.start();
// Thread.sleep(100);
// clip.stop();
// clip.setFramePosition(0);
// }
// clip.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
| apache-2.0 |
springrichclient/springrcp | spring-richclient-samples/spring-richclient-samples-simple/src/main/java/org/springframework/richclient/samples/simple/ui/ContactForm.java | 5981 | /*
* Copyright 2002-2006 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.springframework.richclient.samples.simple.ui;
import com.jgoodies.forms.layout.FormLayout;
import org.springframework.richclient.form.AbstractFocussableForm;
import org.springframework.richclient.form.FormModelHelper;
import org.springframework.richclient.form.binding.swing.NumberBinder;
import org.springframework.richclient.form.builder.FormLayoutFormBuilder;
import org.springframework.richclient.form.builder.TableFormBuilder;
import org.springframework.richclient.samples.simple.domain.Contact;
import org.springframework.richclient.samples.simple.ui.binding.TodoItemListBinding;
import javax.swing.*;
import java.util.HashMap;
/**
* Form to handle the properties of a Contact object. It uses a {@link TableFormBuilder} to construct the layout of the
* form. Contact object properties are easily bound to UI controls using the form builder's
* {@link TableFormBuilder#add(String)} method. The platform takes care of determining which kind of control to create
* based on the type of the property in question.
* @author Larry Streepy
*/
public class ContactForm extends AbstractFocussableForm
{
public ContactForm(Contact contact) {
super(FormModelHelper.createFormModel(contact, "contactForm"));
}
protected JComponent createFormControl()
{
FormLayout layout = new FormLayout("right:pref, 4dlu, fill:pref:grow, 6dlu, right:pref, 4dlu, fill:pref:grow", "default");
FormLayoutFormBuilder formBuilder = new FormLayoutFormBuilder(getBindingFactory(), layout);
formBuilder.setLabelAttributes("r, c");
formBuilder.addHorizontalSeparator("General", 7);
formBuilder.nextRow();
formBuilder.addPropertyAndLabel("lastName");
setFocusControl(formBuilder.addPropertyAndLabel("firstName", 5)[1]);
formBuilder.nextRow();
formBuilder.addPropertyAndLabel("dateOfBirth");
formBuilder.nextRow();
formBuilder.addPropertyAndLabel("homePhone");
formBuilder.addPropertyAndLabel("workPhone", 5);
formBuilder.nextRow();
formBuilder.addPropertyAndLabel("emailAddress");
formBuilder.nextRow();
formBuilder.addPropertyAndLabel("contactType");
formBuilder.nextRow();
NumberBinder binder = new NumberBinder();
binder.setLeftDecoration("€");
formBuilder.addLabel("monthlyIncome");
formBuilder.addBinding(binder.bind(getFormModel(), "monthlyIncome", new HashMap()), 3);
formBuilder.nextRow();
formBuilder.addHorizontalSeparator("Address", 7);
formBuilder.nextRow();
formBuilder.addPropertyAndLabel("address.address1");
formBuilder.nextRow();
formBuilder.addPropertyAndLabel("address.address2");
formBuilder.nextRow();
formBuilder.addPropertyAndLabel("address.address3");
formBuilder.nextRow();
formBuilder.addPropertyAndLabel("address.city");
formBuilder.nextRow();
// formBuilder.add(getBindingFactory().createBoundComboBox( "address.state", MasterLists.STATE_CODE), "colSpan=1 align=left" );
formBuilder.addPropertyAndLabel("address.state");
formBuilder.nextRow();
JComponent zipField = formBuilder.addPropertyAndLabel("address.zip")[1];
((JTextField) zipField).setColumns(8);
formBuilder.nextRow();
formBuilder.addHorizontalSeparator("Memo", 7);
formBuilder.nextRow("fill:default:grow");
formBuilder.addTextArea("memo", 1, formBuilder.getRow(), 7, 1);
formBuilder.nextRow();
formBuilder.addHorizontalSeparator("Todo items", 7);
formBuilder.nextRow("fill:default:grow");
TodoItemListBinding todoItemListBinding = new TodoItemListBinding(getFormModel(), "todoItems");
formBuilder.addBinding(todoItemListBinding, 1, formBuilder.getRow(), 7, 1);
/*
TableFormBuilder formBuilder = new TableFormBuilder(getBindingFactory());
formBuilder.setLabelAttributes("colGrId=label colSpec=right:pref");
formBuilder.addSeparator("General");
formBuilder.row();
firstNameField = formBuilder.add("firstName")[1];
formBuilder.add("lastName");
formBuilder.row();
formBuilder.add("dateOfBirth", "colSpan=1");
formBuilder.row();
formBuilder.add("homePhone");
formBuilder.add("workPhone");
formBuilder.row();
formBuilder.add("emailAddress");
formBuilder.row();
formBuilder.row();
formBuilder.add("contactType", "colSpan=1 align=left");
formBuilder.row();
formBuilder.addSeparator("Address");
formBuilder.row();
formBuilder.add("address.address1");
formBuilder.row();
formBuilder.add("address.address2");
formBuilder.row();
formBuilder.add("address.address3");
formBuilder.row();
formBuilder.add("address.city", "colSpan=1 align=left");
formBuilder.row();
// formBuilder.add(getBindingFactory().createBoundComboBox( "address.state", MasterLists.STATE_CODE), "colSpan=1 align=left" );
formBuilder.add("address.state", "colSpan=1 align=left");
formBuilder.row();
// We want to make the zip code UI field smaller than the default. The add method
// returns an array of two components, the field label and the component bound to
// the property.
JComponent zipField = formBuilder.add("address.zip", "colSpan=1 align=left")[1];
((JTextField) zipField).setColumns(8);
formBuilder.row();
*/
return formBuilder.getPanel();
}
} | apache-2.0 |
pperboires/PocDrools | drools-core/src/main/java/org/drools/reteoo/ObjectTypeNode.java | 31600 | /*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.reteoo;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.List;
import org.drools.RuleBaseConfiguration;
import org.drools.base.ClassObjectType;
import org.drools.base.DroolsQuery;
import org.drools.base.ValueType;
import org.drools.builder.conf.LRUnlinkingOption;
import org.drools.common.AbstractRuleBase;
import org.drools.common.BaseNode;
import org.drools.common.DroolsObjectInputStream;
import org.drools.common.EventFactHandle;
import org.drools.common.InternalFactHandle;
import org.drools.common.InternalWorkingMemory;
import org.drools.common.Memory;
import org.drools.common.NodeMemory;
import org.drools.common.PropagationContextImpl;
import org.drools.common.UpdateContext;
import org.drools.core.util.Iterator;
import org.drools.core.util.ObjectHashSet;
import org.drools.core.util.ObjectHashSet.ObjectEntry;
import org.drools.marshalling.impl.MarshallerReaderContext;
import org.drools.marshalling.impl.MarshallerWriteContext;
import org.drools.marshalling.impl.PersisterEnums;
import org.drools.marshalling.impl.ProtobufMessages;
import org.drools.marshalling.impl.ProtobufMessages.Timers.ExpireTimer;
import org.drools.marshalling.impl.ProtobufMessages.Timers.Timer;
import org.drools.marshalling.impl.TimersInputMarshaller;
import org.drools.marshalling.impl.TimersOutputMarshaller;
import org.drools.reteoo.ReteooWorkingMemory.WorkingMemoryReteExpireAction;
import org.drools.reteoo.RuleRemovalContext.CleanupAdapter;
import org.drools.reteoo.builder.BuildContext;
import org.drools.reteoo.compiled.CompiledNetwork;
import org.drools.rule.Declaration;
import org.drools.rule.EntryPoint;
import org.drools.rule.EvalCondition;
import org.drools.spi.Constraint;
import org.drools.spi.ObjectType;
import org.drools.spi.PropagationContext;
import org.drools.time.Job;
import org.drools.time.JobContext;
import org.drools.time.JobHandle;
import org.drools.time.TimerService;
import org.drools.time.impl.DefaultJobHandle;
import org.drools.time.impl.PointInTimeTrigger;
/**
* <code>ObjectTypeNodes<code> are responsible for filtering and propagating the matching
* fact assertions propagated from the <code>Rete</code> node using <code>ObjectType</code> interface.
* <p/>
* The assert and retract methods do not attempt to filter as this is the role of the <code>Rete</code>
* node which builds up a cache of matching <code>ObjectTypdeNodes</code>s for each asserted object, using
* the <code>matches(Object object)</code> method. Incorrect propagation in these methods is not checked and
* will result in <code>ClassCastExpcections</code> later on in the network.
* <p/>
* Filters <code>Objects</code> coming from the <code>Rete</code> using a
* <code>ObjectType</code> semantic module.
*
* @see Rete
*/
public class ObjectTypeNode extends ObjectSource
implements
ObjectSink,
Externalizable,
NodeMemory
{
// ------------------------------------------------------------
// Instance members
// ------------------------------------------------------------
private static final long serialVersionUID = 510l;
/**
* The <code>ObjectType</code> semantic module.
*/
private ObjectType objectType;
private boolean objectMemoryEnabled;
private long expirationOffset = -1;
public static final transient ExpireJob job = new ExpireJob();
private boolean queryNode;
private CompiledNetwork compiledNetwork;
/* always dirty after serialisation */
private transient boolean dirty;
/* reset counter when dirty */
private transient int otnIdCounter;
public int getOtnIdCounter() {
return otnIdCounter;
}
/** @see LRUnlinkingOption */
private boolean lrUnlinkingEnabled = false;
public ObjectTypeNode() {
}
/**
* Construct given a semantic <code>ObjectType</code> and the provided
* unique id. All <code>ObjectTypdeNode</code> have node memory.
*
* @param id The unique id for the node.
* @param objectType The semantic object-type differentiator.
*/
public ObjectTypeNode(final int id,
final EntryPointNode source,
final ObjectType objectType,
final BuildContext context) {
super( id,
context.getPartitionId(),
context.getRuleBase().getConfiguration().isMultithreadEvaluation(),
source,
context.getRuleBase().getConfiguration().getAlphaNodeHashingThreshold() );
this.objectType = objectType;
this.lrUnlinkingEnabled = context.getRuleBase().getConfiguration().isLRUnlinkingEnabled();
setObjectMemoryEnabled( context.isObjectTypeNodeMemoryEnabled() );
if ( ClassObjectType.DroolsQuery_ObjectType.isAssignableFrom( objectType ) ) {
queryNode = true;
}
this.dirty = true;
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
super.readExternal( in );
objectType = (ObjectType) in.readObject();
// this is here as not all objectTypeNodes used ClassObjectTypes in packages (i.e. rules with those nodes did not exist yet)
// and thus have no wiring targets
if ( objectType instanceof ClassObjectType ) {
objectType = ((AbstractRuleBase) ((DroolsObjectInputStream) in).getRuleBase()).getClassFieldAccessorCache().getClassObjectType( (ClassObjectType) objectType );
}
objectMemoryEnabled = in.readBoolean();
expirationOffset = in.readLong();
lrUnlinkingEnabled = in.readBoolean();
queryNode = in.readBoolean();
dirty = true;
}
public void writeExternal(ObjectOutput out) throws IOException {
super.writeExternal( out );
out.writeObject( objectType );
out.writeBoolean( objectMemoryEnabled );
out.writeLong( expirationOffset );
out.writeBoolean( lrUnlinkingEnabled );
out.writeBoolean( queryNode );
}
/**
* Retrieve the semantic <code>ObjectType</code> differentiator.
*
* @return The semantic <code>ObjectType</code> differentiator.
*/
public ObjectType getObjectType() {
return this.objectType;
}
@Override
public long calculateDeclaredMask(List<String> settableProperties) {
return 0;
}
public boolean isAssignableFrom(final ObjectType objectType) {
return this.objectType.isAssignableFrom( objectType );
}
public void setCompiledNetwork(CompiledNetwork compiledNetwork) {
this.compiledNetwork = compiledNetwork;
this.compiledNetwork.setObjectTypeNode( this );
}
/**
* Propagate the <code>FactHandleimpl</code> through the <code>Rete</code> network. All
* <code>FactHandleImpl</code> should be remembered in the node memory, so that later runtime rule attachmnents
* can have the matched facts propagated to them.
*
* @param factHandle The fact handle.
* @param object The object to assert.
* @param workingMemory The working memory session.
*/
public void assertObject(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( dirty ) {
otnIdCounter = 0;
updateTupleSinkId( this, this );
dirty = false;
}
if ( objectMemoryEnabled && !(queryNode && !((DroolsQuery) factHandle.getObject()).isOpen()) ) {
final ObjectTypeNodeMemory memory = (ObjectTypeNodeMemory) workingMemory.getNodeMemory( this );
memory.memory.add( factHandle,
false );
}
if ( compiledNetwork != null ) {
compiledNetwork.assertObject( factHandle,
context,
workingMemory );
} else {
context.setCurrentPropagatingOTN( this );
this.sink.propagateAssertObject( factHandle,
context,
workingMemory );
}
if ( context.getReaderContext() == null && this.objectType.isEvent() && this.expirationOffset >= 0 && this.expirationOffset != Long.MAX_VALUE ) {
// schedule expiration
WorkingMemoryReteExpireAction expire = new WorkingMemoryReteExpireAction( factHandle,
this );
TimerService clock = workingMemory.getTimerService();
long nextTimestamp = Math.max( clock.getCurrentTime() + this.expirationOffset,
((EventFactHandle) factHandle).getStartTimestamp() + this.expirationOffset );
JobContext jobctx = new ExpireJobContext( expire,
workingMemory );
JobHandle handle = clock.scheduleJob( job,
jobctx,
new PointInTimeTrigger( nextTimestamp,
null,
null ) );
jobctx.setJobHandle( handle );
}
}
/**
* Retract the <code>FactHandleimpl</code> from the <code>Rete</code> network. Also remove the
* <code>FactHandleImpl</code> from the node memory.
*
* @param rightTuple The fact handle.
* @param object The object to assert.
* @param workingMemory The working memory session.
*/
public void retractObject(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( dirty ) {
otnIdCounter = 0;
updateTupleSinkId( this, this );
dirty = false;
}
if ( objectMemoryEnabled && !(queryNode && !((DroolsQuery) factHandle.getObject()).isOpen()) ) {
final ObjectTypeNodeMemory memory = (ObjectTypeNodeMemory) workingMemory.getNodeMemory( this );
memory.memory.remove( factHandle );
}
for ( RightTuple rightTuple = factHandle.getFirstRightTuple(); rightTuple != null; rightTuple = rightTuple.getHandleNext() ) {
rightTuple.getRightTupleSink().retractRightTuple( rightTuple,
context,
workingMemory );
}
factHandle.clearRightTuples();
for ( LeftTuple leftTuple = factHandle.getFirstLeftTuple(); leftTuple != null; leftTuple = leftTuple.getLeftParentNext() ) {
leftTuple.getLeftTupleSink().retractLeftTuple( leftTuple,
context,
workingMemory );
}
factHandle.clearLeftTuples();
}
public void modifyObject(InternalFactHandle factHandle,
ModifyPreviousTuples modifyPreviousTuples,
PropagationContext context,
InternalWorkingMemory workingMemory) {
if ( dirty ) {
otnIdCounter = 0;
updateTupleSinkId( this, this );
dirty = false;
}
context.setObjectType( objectType );
if ( compiledNetwork != null ) {
compiledNetwork.modifyObject( factHandle,
modifyPreviousTuples,
context,
workingMemory );
} else {
this.sink.propagateModifyObject( factHandle,
modifyPreviousTuples,
context,
workingMemory );
}
}
public void updateSink(final ObjectSink sink,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( lrUnlinkingEnabled ) {
// Update sink taking into account L&R unlinking peculiarities
updateLRUnlinking( sink, context, workingMemory );
} else {
// Regular updateSink
final ObjectTypeNodeMemory memory = (ObjectTypeNodeMemory) workingMemory.getNodeMemory( this );
Iterator it = memory.memory.iterator();
for ( ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next() ) {
sink.assertObject( (InternalFactHandle) entry.getValue(),
context,
workingMemory );
}
}
}
/**
* When L&R Unlinking is enabled, updateSink() is used to populate
* a node's memory, but it has to take into account if it's propagating.
*/
private void updateLRUnlinking(final ObjectSink sink,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
final ObjectTypeNodeMemory memory = (ObjectTypeNodeMemory) workingMemory.getNodeMemory( this );
Iterator it = memory.memory.iterator();
InternalFactHandle ctxHandle = (InternalFactHandle) context.getFactHandle();
if ( !context.isPropagating( this ) ||
(context.isPropagating( this ) && context.shouldPropagateAll()) ) {
for ( ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next() ) {
// Assert everything
sink.assertObject( (InternalFactHandle) entry.getValue(),
context,
workingMemory );
}
} else {
for ( ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next() ) {
InternalFactHandle handle = (InternalFactHandle) entry.getValue();
// Exclude the current fact propagation
if ( handle.getId() != ctxHandle.getId() ) {
sink.assertObject( handle,
context,
workingMemory );
}
}
}
}
/**
* Rete needs to know that this ObjectTypeNode has been added
*/
public void attach( BuildContext context ) {
this.source.addObjectSink( this );
if (context == null) {
return;
}
// we need to call updateSink on Rete, because someone
// might have already added facts matching this ObjectTypeNode
// to working memories
for ( InternalWorkingMemory workingMemory : context.getWorkingMemories() ) {
final PropagationContextImpl propagationContext = new PropagationContextImpl( workingMemory.getNextPropagationIdCounter(),
PropagationContext.RULE_ADDITION,
null,
null,
null );
propagationContext.setEntryPoint( ((EntryPointNode) this.source).getEntryPoint() );
this.source.updateSink( this,
propagationContext,
workingMemory );
}
}
public void networkUpdated(UpdateContext updateContext) {
this.dirty = true;
}
private static void updateTupleSinkId( ObjectTypeNode otn,
ObjectSource source ) {
for ( ObjectSink sink : source.sink.getSinks() ) {
if ( sink instanceof BetaNode ) {
((BetaNode) sink).setRightInputOtnId( otn.nextOtnId() );
} else if ( sink instanceof LeftInputAdapterNode ) {
for ( LeftTupleSink liaChildSink : ((LeftInputAdapterNode) sink).getSinkPropagator().getSinks() ) {
liaChildSink.setLeftInputOtnId( otn.nextOtnId() );
}
} else if ( sink instanceof AlphaNode ) {
updateTupleSinkId( otn, (AlphaNode) sink );
}
}
}
public int nextOtnId() {
return otnIdCounter++;
}
/**
* OTN needs to override remove to avoid releasing the node ID, since OTN are
* never removed from the rulebase in the current implementation
*
* @inheritDoc
* @see org.drools.common.BaseNode#remove(org.drools.reteoo.RuleRemovalContext, org.drools.reteoo.ReteooBuilder, org.drools.common.BaseNode, org.drools.common.InternalWorkingMemory[])
*/
public void remove(RuleRemovalContext context,
ReteooBuilder builder,
BaseNode node,
InternalWorkingMemory[] workingMemories) {
doRemove( context,
builder,
node,
workingMemories );
}
/**
* OTN needs to override remove to avoid releasing the node ID, since OTN are
* never removed from the rulebase in the current implementation
*/
protected void doRemove(final RuleRemovalContext context,
final ReteooBuilder builder,
final BaseNode node,
final InternalWorkingMemory[] workingMemories) {
if ( context.getCleanupAdapter() != null ) {
for ( InternalWorkingMemory workingMemory : workingMemories ) {
CleanupAdapter adapter = context.getCleanupAdapter();
final ObjectTypeNodeMemory memory = (ObjectTypeNodeMemory) workingMemory.getNodeMemory( this );
Iterator it = memory.memory.iterator();
for ( ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next() ) {
InternalFactHandle handle = (InternalFactHandle) entry.getValue();
for ( LeftTuple leftTuple = handle.getFirstLeftTuple(); leftTuple != null; leftTuple = leftTuple.getLeftParentNext() ) {
adapter.cleanUp( leftTuple,
workingMemory );
}
}
}
context.setCleanupAdapter( null );
}
if ( !node.isInUse() ) {
removeObjectSink( (ObjectSink) node );
}
}
/**
* Creates memory for the node using PrimitiveLongMap as its optimised for storage and reteivals of Longs.
* However PrimitiveLongMap is not ideal for spase data. So it should be monitored incase its more optimal
* to switch back to a standard HashMap.
*/
public Memory createMemory(final RuleBaseConfiguration config) {
return new ObjectTypeNodeMemory();
}
public boolean isObjectMemoryEnabled() {
return this.objectMemoryEnabled;
}
public void setObjectMemoryEnabled(boolean objectMemoryEnabled) {
this.objectMemoryEnabled = objectMemoryEnabled;
}
public String toString() {
return "[ObjectTypeNode(" + this.id + ")::" + ((EntryPointNode) this.source).getEntryPoint() + " objectType=" + this.objectType + " expiration=" + this.expirationOffset + "ms ]";
}
/**
* Uses he hashCode() of the underlying ObjectType implementation.
*/
public int hashCode() {
return this.objectType.hashCode() ^ this.source.hashCode();
}
public boolean equals(final Object object) {
if ( this == object ) {
return true;
}
if ( object == null || !(object instanceof ObjectTypeNode) ) {
return false;
}
final ObjectTypeNode other = (ObjectTypeNode) object;
return this.objectType.equals( other.objectType ) && this.source.equals( other.source );
}
private boolean usesDeclaration(final Constraint[] constraints) {
boolean usesDecl = false;
for ( int i = 0; !usesDecl && i < constraints.length; i++ ) {
usesDecl = this.usesDeclaration( constraints[i] );
}
return usesDecl;
}
private boolean usesDeclaration(final Constraint constraint) {
boolean usesDecl = false;
final Declaration[] declarations = constraint.getRequiredDeclarations();
for ( int j = 0; !usesDecl && j < declarations.length; j++ ) {
usesDecl = (declarations[j].getPattern().getObjectType() == this.objectType);
}
return usesDecl;
}
private boolean usesDeclaration(final EvalCondition condition) {
boolean usesDecl = false;
final Declaration[] declarations = condition.getRequiredDeclarations();
for ( int j = 0; !usesDecl && j < declarations.length; j++ ) {
usesDecl = (declarations[j].getPattern().getObjectType() == this.objectType);
}
return usesDecl;
}
/**
* @return the entryPoint
*/
public EntryPoint getEntryPoint() {
return ((EntryPointNode) this.source).getEntryPoint();
}
public long getExpirationOffset() {
return expirationOffset;
}
public void setExpirationOffset(long expirationOffset) {
this.expirationOffset = expirationOffset;
if ( !this.objectType.getValueType().equals( ValueType.QUERY_TYPE ) ) {
if ( this.expirationOffset > 0 ) {
// override memory enabled settings
this.setObjectMemoryEnabled( true );
} else if ( this.expirationOffset == 0 ) {
// disable memory
this.setObjectMemoryEnabled( false );
}
}
}
public static class ExpireJob
implements
Job {
public void execute(JobContext ctx) {
ExpireJobContext context = (ExpireJobContext) ctx;
context.workingMemory.queueWorkingMemoryAction( context.expireAction );
}
}
public static class ExpireJobContext
implements
JobContext,
Externalizable {
public WorkingMemoryReteExpireAction expireAction;
public InternalWorkingMemory workingMemory;
public JobHandle handle;
/**
* @param workingMemory
* @param behavior
* @param behaviorContext
*/
public ExpireJobContext(WorkingMemoryReteExpireAction expireAction,
InternalWorkingMemory workingMemory) {
super();
this.expireAction = expireAction;
this.workingMemory = workingMemory;
}
public JobHandle getJobHandle() {
return this.handle;
}
public void setJobHandle(JobHandle jobHandle) {
this.handle = jobHandle;
}
public WorkingMemoryReteExpireAction getExpireAction() {
return expireAction;
}
public void setExpireAction(WorkingMemoryReteExpireAction expireAction) {
this.expireAction = expireAction;
}
public InternalWorkingMemory getWorkingMemory() {
return workingMemory;
}
public void setWorkingMemory(InternalWorkingMemory workingMemory) {
this.workingMemory = workingMemory;
}
public JobHandle getHandle() {
return handle;
}
public void setHandle(JobHandle handle) {
this.handle = handle;
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
//this.behavior = (O)
}
public void writeExternal(ObjectOutput out) throws IOException {
// TODO Auto-generated method stub
}
}
public static class ExpireJobContextTimerOutputMarshaller
implements
TimersOutputMarshaller {
public void write(JobContext jobCtx,
MarshallerWriteContext outputCtx) throws IOException {
outputCtx.writeShort( PersisterEnums.EXPIRE_TIMER );
// ExpireJob, no state
ExpireJobContext ejobCtx = (ExpireJobContext) jobCtx;
WorkingMemoryReteExpireAction expireAction = ejobCtx.getExpireAction();
outputCtx.writeInt( expireAction.getFactHandle().getId() );
outputCtx.writeUTF( expireAction.getNode().getEntryPoint().getEntryPointId() );
outputCtx.writeUTF( ((ClassObjectType) expireAction.getNode().getObjectType()).getClassType().getName() );
DefaultJobHandle jobHandle = (DefaultJobHandle) ejobCtx.getJobHandle();
PointInTimeTrigger trigger = (PointInTimeTrigger) jobHandle.getTimerJobInstance().getTrigger();
outputCtx.writeLong( trigger.hasNextFireTime().getTime() );
}
public ProtobufMessages.Timers.Timer serialize(JobContext jobCtx,
MarshallerWriteContext outputCtx) {
// ExpireJob, no state
ExpireJobContext ejobCtx = ( ExpireJobContext ) jobCtx;
WorkingMemoryReteExpireAction expireAction = ejobCtx.getExpireAction();
DefaultJobHandle jobHandle = ( DefaultJobHandle ) ejobCtx.getJobHandle();
PointInTimeTrigger trigger = ( PointInTimeTrigger ) jobHandle.getTimerJobInstance().getTrigger();
return ProtobufMessages.Timers.Timer.newBuilder()
.setType( ProtobufMessages.Timers.TimerType.EXPIRE )
.setExpire( ProtobufMessages.Timers.ExpireTimer.newBuilder()
.setHandleId( expireAction.getFactHandle().getId() )
.setEntryPointId( expireAction.getNode().getEntryPoint().getEntryPointId() )
.setClassName( ((ClassObjectType)expireAction.getNode().getObjectType()).getClassType().getName() )
.setNextFireTimestamp( trigger.hasNextFireTime().getTime() )
.build() )
.build();
}
}
public static class ExpireJobContextTimerInputMarshaller
implements
TimersInputMarshaller {
public void read(MarshallerReaderContext inCtx) throws IOException,
ClassNotFoundException {
InternalFactHandle factHandle = inCtx.handles.get( inCtx.readInt() );
String entryPointId = inCtx.readUTF();
EntryPointNode epn = ((ReteooRuleBase) inCtx.wm.getRuleBase()).getRete().getEntryPointNode( new EntryPoint( entryPointId ) );
String className = inCtx.readUTF();
Class< ? > cls = ((ReteooRuleBase) inCtx.wm.getRuleBase()).getRootClassLoader().loadClass( className );
ObjectTypeNode otn = epn.getObjectTypeNodes().get( new ClassObjectType( cls ) );
long nextTimeStamp = inCtx.readLong();
TimerService clock = inCtx.wm.getTimerService();
JobContext jobctx = new ExpireJobContext( new WorkingMemoryReteExpireAction( factHandle, otn ),
inCtx.wm );
JobHandle handle = clock.scheduleJob( job,
jobctx,
new PointInTimeTrigger( nextTimeStamp,
null,
null ) );
jobctx.setJobHandle( handle );
}
public void deserialize(MarshallerReaderContext inCtx,
Timer _timer) throws ClassNotFoundException {
ExpireTimer _expire = _timer.getExpire();
InternalFactHandle factHandle = inCtx.handles.get( _expire.getHandleId() );
EntryPointNode epn = ((ReteooRuleBase)inCtx.wm.getRuleBase()).getRete().getEntryPointNode( new EntryPoint( _expire.getEntryPointId() ) );
Class<?> cls = ((ReteooRuleBase)inCtx.wm.getRuleBase()).getRootClassLoader().loadClass( _expire.getClassName() );
ObjectTypeNode otn = epn.getObjectTypeNodes().get( new ClassObjectType( cls ) );
TimerService clock = inCtx.wm.getTimerService();
JobContext jobctx = new ExpireJobContext( new WorkingMemoryReteExpireAction(factHandle, otn),
inCtx.wm );
JobHandle handle = clock.scheduleJob( job,
jobctx,
new PointInTimeTrigger( _expire.getNextFireTimestamp(),
null,
null ) );
jobctx.setJobHandle( handle );
}
}
public void byPassModifyToBetaNode(InternalFactHandle factHandle,
ModifyPreviousTuples modifyPreviousTuples,
PropagationContext context,
InternalWorkingMemory workingMemory) {
throw new UnsupportedOperationException( "This should never get called, as the PropertyReactive first happens at the AlphaNode" );
}
public static class ObjectTypeNodeMemory implements Memory, Externalizable {
public ObjectHashSet memory = new ObjectHashSet();
public short getNodeType() {
return NodeTypeEnums.ObjectTypeNode;
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject( memory );
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
memory = (ObjectHashSet) in.readObject();
}
}
}
| apache-2.0 |
janicduplessis/buck | src/com/facebook/buck/rules/coercer/SortedSetTypeCoercer.java | 2913 | /*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rules.coercer;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.rules.CellPathResolver;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Sets;
import java.nio.file.Path;
import java.util.Collection;
import java.util.SortedSet;
public class SortedSetTypeCoercer<T extends Comparable<? super T>>
extends CollectionTypeCoercer<ImmutableSortedSet<T>, T> {
private final TypeCoercer<T> elementTypeCoercer;
SortedSetTypeCoercer(TypeCoercer<T> elementTypeCoercer) {
super(elementTypeCoercer);
this.elementTypeCoercer = elementTypeCoercer;
}
@SuppressWarnings("unchecked")
@Override
public Class<ImmutableSortedSet<T>> getOutputClass() {
return (Class<ImmutableSortedSet<T>>) (Class<?>) ImmutableSortedSet.class;
}
@Override
public Optional<ImmutableSortedSet<T>> getOptionalValue() {
return Optional.of(ImmutableSortedSet.<T>of());
}
protected void fillSortedSet(
CellPathResolver cellRoots,
ProjectFilesystem filesystem,
Path pathRelativeToProjectRoot,
SortedSet<T> builder,
Object object) throws CoerceFailedException {
if (object instanceof Collection) {
for (Object element : (Iterable<?>) object) {
// if any element failed, the entire collection fails
T coercedElement = elementTypeCoercer.coerce(
cellRoots,
filesystem,
pathRelativeToProjectRoot,
element);
boolean alreadyExists = !builder.add(coercedElement);
if (alreadyExists) {
throw new CoerceFailedException(
String.format("duplicate element \"%s\"", coercedElement));
}
}
} else {
throw CoerceFailedException.simple(object, getOutputClass());
}
}
@Override
public ImmutableSortedSet<T> coerce(
CellPathResolver cellRoots,
ProjectFilesystem filesystem,
Path pathRelativeToProjectRoot,
Object object)
throws CoerceFailedException {
final SortedSet<T> builder = Sets.newTreeSet();
fillSortedSet(
cellRoots,
filesystem,
pathRelativeToProjectRoot,
builder,
object);
return ImmutableSortedSet.copyOf(builder);
}
}
| apache-2.0 |
gfyoung/elasticsearch | server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java | 62076 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.builder;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.collapse.CollapseBuilder;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.rescore.RescorerBuilder;
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
import org.elasticsearch.search.slice.SliceBuilder;
import org.elasticsearch.search.sort.ScoreSortBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.suggest.SuggestBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
/**
* A search source builder allowing to easily build search source. Simple
* construction using
* {@link org.elasticsearch.search.builder.SearchSourceBuilder#searchSource()}.
*
* @see org.elasticsearch.action.search.SearchRequest#source(SearchSourceBuilder)
*/
public final class SearchSourceBuilder implements Writeable, ToXContentObject, Rewriteable<SearchSourceBuilder> {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(SearchSourceBuilder.class));
public static final ParseField FROM_FIELD = new ParseField("from");
public static final ParseField SIZE_FIELD = new ParseField("size");
public static final ParseField TIMEOUT_FIELD = new ParseField("timeout");
public static final ParseField TERMINATE_AFTER_FIELD = new ParseField("terminate_after");
public static final ParseField QUERY_FIELD = new ParseField("query");
public static final ParseField POST_FILTER_FIELD = new ParseField("post_filter");
public static final ParseField MIN_SCORE_FIELD = new ParseField("min_score");
public static final ParseField VERSION_FIELD = new ParseField("version");
public static final ParseField EXPLAIN_FIELD = new ParseField("explain");
public static final ParseField _SOURCE_FIELD = new ParseField("_source");
public static final ParseField STORED_FIELDS_FIELD = new ParseField("stored_fields");
public static final ParseField DOCVALUE_FIELDS_FIELD = new ParseField("docvalue_fields");
public static final ParseField SCRIPT_FIELDS_FIELD = new ParseField("script_fields");
public static final ParseField SCRIPT_FIELD = new ParseField("script");
public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure");
public static final ParseField SORT_FIELD = new ParseField("sort");
public static final ParseField TRACK_SCORES_FIELD = new ParseField("track_scores");
public static final ParseField TRACK_TOTAL_HITS_FIELD = new ParseField("track_total_hits");
public static final ParseField INDICES_BOOST_FIELD = new ParseField("indices_boost");
public static final ParseField AGGREGATIONS_FIELD = new ParseField("aggregations");
public static final ParseField AGGS_FIELD = new ParseField("aggs");
public static final ParseField HIGHLIGHT_FIELD = new ParseField("highlight");
public static final ParseField SUGGEST_FIELD = new ParseField("suggest");
public static final ParseField RESCORE_FIELD = new ParseField("rescore");
public static final ParseField STATS_FIELD = new ParseField("stats");
public static final ParseField EXT_FIELD = new ParseField("ext");
public static final ParseField PROFILE_FIELD = new ParseField("profile");
public static final ParseField SEARCH_AFTER = new ParseField("search_after");
public static final ParseField COLLAPSE = new ParseField("collapse");
public static final ParseField SLICE = new ParseField("slice");
public static final ParseField ALL_FIELDS_FIELDS = new ParseField("all_fields");
public static SearchSourceBuilder fromXContent(XContentParser parser) throws IOException {
return fromXContent(parser, true);
}
public static SearchSourceBuilder fromXContent(XContentParser parser, boolean checkTrailingTokens) throws IOException {
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.parseXContent(parser, checkTrailingTokens);
return builder;
}
/**
* A static factory method to construct a new search source.
*/
public static SearchSourceBuilder searchSource() {
return new SearchSourceBuilder();
}
/**
* A static factory method to construct new search highlights.
*/
public static HighlightBuilder highlight() {
return new HighlightBuilder();
}
private QueryBuilder queryBuilder;
private QueryBuilder postQueryBuilder;
private int from = -1;
private int size = -1;
private Boolean explain;
private Boolean version;
private List<SortBuilder<?>> sorts;
private boolean trackScores = false;
private boolean trackTotalHits = true;
private SearchAfterBuilder searchAfterBuilder;
private SliceBuilder sliceBuilder;
private Float minScore;
private TimeValue timeout = null;
private int terminateAfter = SearchContext.DEFAULT_TERMINATE_AFTER;
private StoredFieldsContext storedFieldsContext;
private List<FieldAndFormat> docValueFields;
private List<ScriptField> scriptFields;
private FetchSourceContext fetchSourceContext;
private AggregatorFactories.Builder aggregations;
private HighlightBuilder highlightBuilder;
private SuggestBuilder suggestBuilder;
private List<RescorerBuilder> rescoreBuilders;
private List<IndexBoost> indexBoosts = new ArrayList<>();
private List<String> stats;
private List<SearchExtBuilder> extBuilders = Collections.emptyList();
private boolean profile = false;
private CollapseBuilder collapse = null;
/**
* Constructs a new search source builder.
*/
public SearchSourceBuilder() {
}
/**
* Read from a stream.
*/
public SearchSourceBuilder(StreamInput in) throws IOException {
aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new);
explain = in.readOptionalBoolean();
fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
if (in.getVersion().before(Version.V_6_4_0)) {
List<String> dvFields = (List<String>) in.readGenericValue();
if (dvFields == null) {
docValueFields = null;
} else {
docValueFields = dvFields.stream()
.map(field -> new FieldAndFormat(field, null))
.collect(Collectors.toList());
}
} else {
if (in.readBoolean()) {
docValueFields = in.readList(FieldAndFormat::new);
} else {
docValueFields = null;
}
}
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
from = in.readVInt();
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
indexBoosts = in.readList(IndexBoost::new);
minScore = in.readOptionalFloat();
postQueryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
if (in.readBoolean()) {
rescoreBuilders = in.readNamedWriteableList(RescorerBuilder.class);
}
if (in.readBoolean()) {
scriptFields = in.readList(ScriptField::new);
}
size = in.readVInt();
if (in.readBoolean()) {
int size = in.readVInt();
sorts = new ArrayList<>();
for (int i = 0; i < size; i++) {
sorts.add(in.readNamedWriteable(SortBuilder.class));
}
}
if (in.readBoolean()) {
stats = in.readList(StreamInput::readString);
}
suggestBuilder = in.readOptionalWriteable(SuggestBuilder::new);
terminateAfter = in.readVInt();
timeout = in.readOptionalTimeValue();
trackScores = in.readBoolean();
version = in.readOptionalBoolean();
extBuilders = in.readNamedWriteableList(SearchExtBuilder.class);
profile = in.readBoolean();
searchAfterBuilder = in.readOptionalWriteable(SearchAfterBuilder::new);
sliceBuilder = in.readOptionalWriteable(SliceBuilder::new);
collapse = in.readOptionalWriteable(CollapseBuilder::new);
if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
trackTotalHits = in.readBoolean();
} else {
trackTotalHits = true;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalWriteable(aggregations);
out.writeOptionalBoolean(explain);
out.writeOptionalWriteable(fetchSourceContext);
if (out.getVersion().before(Version.V_6_4_0)) {
out.writeGenericValue(docValueFields == null
? null
: docValueFields.stream().map(ff -> ff.field).collect(Collectors.toList()));
} else {
out.writeBoolean(docValueFields != null);
if (docValueFields != null) {
out.writeList(docValueFields);
}
}
out.writeOptionalWriteable(storedFieldsContext);
out.writeVInt(from);
out.writeOptionalWriteable(highlightBuilder);
out.writeList(indexBoosts);
out.writeOptionalFloat(minScore);
out.writeOptionalNamedWriteable(postQueryBuilder);
out.writeOptionalNamedWriteable(queryBuilder);
boolean hasRescoreBuilders = rescoreBuilders != null;
out.writeBoolean(hasRescoreBuilders);
if (hasRescoreBuilders) {
out.writeNamedWriteableList(rescoreBuilders);
}
boolean hasScriptFields = scriptFields != null;
out.writeBoolean(hasScriptFields);
if (hasScriptFields) {
out.writeList(scriptFields);
}
out.writeVInt(size);
boolean hasSorts = sorts != null;
out.writeBoolean(hasSorts);
if (hasSorts) {
out.writeVInt(sorts.size());
for (SortBuilder<?> sort : sorts) {
out.writeNamedWriteable(sort);
}
}
boolean hasStats = stats != null;
out.writeBoolean(hasStats);
if (hasStats) {
out.writeStringList(stats);
}
out.writeOptionalWriteable(suggestBuilder);
out.writeVInt(terminateAfter);
out.writeOptionalTimeValue(timeout);
out.writeBoolean(trackScores);
out.writeOptionalBoolean(version);
out.writeNamedWriteableList(extBuilders);
out.writeBoolean(profile);
out.writeOptionalWriteable(searchAfterBuilder);
out.writeOptionalWriteable(sliceBuilder);
out.writeOptionalWriteable(collapse);
if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
out.writeBoolean(trackTotalHits);
}
}
/**
* Sets the search query for this request.
*
* @see org.elasticsearch.index.query.QueryBuilders
*/
public SearchSourceBuilder query(QueryBuilder query) {
this.queryBuilder = query;
return this;
}
/**
* Gets the query for this request
*/
public QueryBuilder query() {
return queryBuilder;
}
/**
* Sets a filter that will be executed after the query has been executed and
* only has affect on the search hits (not aggregations). This filter is
* always executed as last filtering mechanism.
*/
public SearchSourceBuilder postFilter(QueryBuilder postFilter) {
this.postQueryBuilder = postFilter;
return this;
}
/**
* Gets the post filter for this request
*/
public QueryBuilder postFilter() {
return postQueryBuilder;
}
/**
* From index to start the search from. Defaults to {@code 0}.
*/
public SearchSourceBuilder from(int from) {
if (from < 0) {
throw new IllegalArgumentException("[from] parameter cannot be negative");
}
this.from = from;
return this;
}
/**
* Gets the from index to start the search from.
**/
public int from() {
return from;
}
/**
* The number of search hits to return. Defaults to {@code 10}.
*/
public SearchSourceBuilder size(int size) {
if (size < 0) {
throw new IllegalArgumentException("[size] parameter cannot be negative, found [" + size + "]");
}
this.size = size;
return this;
}
/**
* Gets the number of search hits to return.
*/
public int size() {
return size;
}
/**
* Sets the minimum score below which docs will be filtered out.
*/
public SearchSourceBuilder minScore(float minScore) {
this.minScore = minScore;
return this;
}
/**
* Gets the minimum score below which docs will be filtered out.
*/
public Float minScore() {
return minScore;
}
/**
* Should each {@link org.elasticsearch.search.SearchHit} be returned with
* an explanation of the hit (ranking).
*/
public SearchSourceBuilder explain(Boolean explain) {
this.explain = explain;
return this;
}
/**
* Indicates whether each search hit will be returned with an explanation of
* the hit (ranking)
*/
public Boolean explain() {
return explain;
}
/**
* Should each {@link org.elasticsearch.search.SearchHit} be returned with a
* version associated with it.
*/
public SearchSourceBuilder version(Boolean version) {
this.version = version;
return this;
}
/**
* Indicates whether the document's version will be included in the search
* hits.
*/
public Boolean version() {
return version;
}
/**
* An optional timeout to control how long search is allowed to take.
*/
public SearchSourceBuilder timeout(TimeValue timeout) {
this.timeout = timeout;
return this;
}
/**
* Gets the timeout to control how long search is allowed to take.
*/
public TimeValue timeout() {
return timeout;
}
/**
* An optional terminate_after to terminate the search after collecting
* <code>terminateAfter</code> documents
*/
public SearchSourceBuilder terminateAfter(int terminateAfter) {
if (terminateAfter < 0) {
throw new IllegalArgumentException("terminateAfter must be > 0");
}
this.terminateAfter = terminateAfter;
return this;
}
/**
* Gets the number of documents to terminate after collecting.
*/
public int terminateAfter() {
return terminateAfter;
}
/**
* Adds a sort against the given field name and the sort ordering.
*
* @param name
* The name of the field
* @param order
* The sort ordering
*/
public SearchSourceBuilder sort(String name, SortOrder order) {
if (name.equals(ScoreSortBuilder.NAME)) {
return sort(SortBuilders.scoreSort().order(order));
}
return sort(SortBuilders.fieldSort(name).order(order));
}
/**
* Add a sort against the given field name.
*
* @param name
* The name of the field to sort by
*/
public SearchSourceBuilder sort(String name) {
if (name.equals(ScoreSortBuilder.NAME)) {
return sort(SortBuilders.scoreSort());
}
return sort(SortBuilders.fieldSort(name));
}
/**
* Adds a sort builder.
*/
public SearchSourceBuilder sort(SortBuilder<?> sort) {
if (sorts == null) {
sorts = new ArrayList<>();
}
sorts.add(sort);
return this;
}
/**
* Gets the bytes representing the sort builders for this request.
*/
public List<SortBuilder<?>> sorts() {
return sorts;
}
/**
* Applies when sorting, and controls if scores will be tracked as well.
* Defaults to {@code false}.
*/
public SearchSourceBuilder trackScores(boolean trackScores) {
this.trackScores = trackScores;
return this;
}
/**
* Indicates whether scores will be tracked for this request.
*/
public boolean trackScores() {
return trackScores;
}
/**
* Indicates if the total hit count for the query should be tracked.
*/
public boolean trackTotalHits() {
return trackTotalHits;
}
public SearchSourceBuilder trackTotalHits(boolean trackTotalHits) {
this.trackTotalHits = trackTotalHits;
return this;
}
/**
* The sort values that indicates which docs this request should "search after".
* The sort values of the search_after must be equal to the number of sort fields in the query and they should be
* of the same type (or parsable as such).
* Defaults to {@code null}.
*/
public Object[] searchAfter() {
if (searchAfterBuilder == null) {
return null;
}
return searchAfterBuilder.getSortValues();
}
/**
* Set the sort values that indicates which docs this request should "search after".
*/
public SearchSourceBuilder searchAfter(Object[] values) {
this.searchAfterBuilder = new SearchAfterBuilder().setSortValues(values);
return this;
}
/**
* Sets a filter that will restrict the search hits, the top hits and the aggregations to a slice of the results
* of the main query.
*/
public SearchSourceBuilder slice(SliceBuilder builder) {
this.sliceBuilder = builder;
return this;
}
/**
* Gets the slice used to filter the search hits, the top hits and the aggregations.
*/
public SliceBuilder slice() {
return sliceBuilder;
}
public CollapseBuilder collapse() {
return collapse;
}
public SearchSourceBuilder collapse(CollapseBuilder collapse) {
this.collapse = collapse;
return this;
}
/**
* Add an aggregation to perform as part of the search.
*/
public SearchSourceBuilder aggregation(AggregationBuilder aggregation) {
if (aggregations == null) {
aggregations = AggregatorFactories.builder();
}
aggregations.addAggregator(aggregation);
return this;
}
/**
* Add an aggregation to perform as part of the search.
*/
public SearchSourceBuilder aggregation(PipelineAggregationBuilder aggregation) {
if (aggregations == null) {
aggregations = AggregatorFactories.builder();
}
aggregations.addPipelineAggregator(aggregation);
return this;
}
/**
* Gets the bytes representing the aggregation builders for this request.
*/
public AggregatorFactories.Builder aggregations() {
return aggregations;
}
/**
* Adds highlight to perform as part of the search.
*/
public SearchSourceBuilder highlighter(HighlightBuilder highlightBuilder) {
this.highlightBuilder = highlightBuilder;
return this;
}
/**
* Gets the highlighter builder for this request.
*/
public HighlightBuilder highlighter() {
return highlightBuilder;
}
public SearchSourceBuilder suggest(SuggestBuilder suggestBuilder) {
this.suggestBuilder = suggestBuilder;
return this;
}
/**
* Gets the suggester builder for this request.
*/
public SuggestBuilder suggest() {
return suggestBuilder;
}
public SearchSourceBuilder addRescorer(RescorerBuilder<?> rescoreBuilder) {
if (rescoreBuilders == null) {
rescoreBuilders = new ArrayList<>();
}
rescoreBuilders.add(rescoreBuilder);
return this;
}
public SearchSourceBuilder clearRescorers() {
rescoreBuilders = null;
return this;
}
/**
* Should the query be profiled. Defaults to {@code false}
*/
public SearchSourceBuilder profile(boolean profile) {
this.profile = profile;
return this;
}
/**
* Return whether to profile query execution, or {@code null} if
* unspecified.
*/
public boolean profile() {
return profile;
}
/**
* Gets the bytes representing the rescore builders for this request.
*/
public List<RescorerBuilder> rescores() {
return rescoreBuilders;
}
/**
* Indicates whether the response should contain the stored _source for
* every hit
*/
public SearchSourceBuilder fetchSource(boolean fetch) {
FetchSourceContext fetchSourceContext = this.fetchSourceContext != null ? this.fetchSourceContext
: FetchSourceContext.FETCH_SOURCE;
this.fetchSourceContext = new FetchSourceContext(fetch, fetchSourceContext.includes(), fetchSourceContext.excludes());
return this;
}
/**
* Indicate that _source should be returned with every hit, with an
* "include" and/or "exclude" set which can include simple wildcard
* elements.
*
* @param include
* An optional include (optionally wildcarded) pattern to filter
* the returned _source
* @param exclude
* An optional exclude (optionally wildcarded) pattern to filter
* the returned _source
*/
public SearchSourceBuilder fetchSource(@Nullable String include, @Nullable String exclude) {
return fetchSource(include == null ? Strings.EMPTY_ARRAY : new String[] { include }, exclude == null ? Strings.EMPTY_ARRAY
: new String[] { exclude });
}
/**
* Indicate that _source should be returned with every hit, with an
* "include" and/or "exclude" set which can include simple wildcard
* elements.
*
* @param includes
* An optional list of include (optionally wildcarded) pattern to
* filter the returned _source
* @param excludes
* An optional list of exclude (optionally wildcarded) pattern to
* filter the returned _source
*/
public SearchSourceBuilder fetchSource(@Nullable String[] includes, @Nullable String[] excludes) {
FetchSourceContext fetchSourceContext = this.fetchSourceContext != null ? this.fetchSourceContext
: FetchSourceContext.FETCH_SOURCE;
this.fetchSourceContext = new FetchSourceContext(fetchSourceContext.fetchSource(), includes, excludes);
return this;
}
/**
* Indicate how the _source should be fetched.
*/
public SearchSourceBuilder fetchSource(@Nullable FetchSourceContext fetchSourceContext) {
this.fetchSourceContext = fetchSourceContext;
return this;
}
/**
* Gets the {@link FetchSourceContext} which defines how the _source should
* be fetched.
*/
public FetchSourceContext fetchSource() {
return fetchSourceContext;
}
/**
* Adds a stored field to load and return as part of the
* search request. If none are specified, the source of the document will be
* return.
*/
public SearchSourceBuilder storedField(String name) {
return storedFields(Collections.singletonList(name));
}
/**
* Sets the stored fields to load and return as part of the search request. If none
* are specified, the source of the document will be returned.
*/
public SearchSourceBuilder storedFields(List<String> fields) {
if (storedFieldsContext == null) {
storedFieldsContext = StoredFieldsContext.fromList(fields);
} else {
storedFieldsContext.addFieldNames(fields);
}
return this;
}
/**
* Indicates how the stored fields should be fetched.
*/
public SearchSourceBuilder storedFields(StoredFieldsContext context) {
storedFieldsContext = context;
return this;
}
/**
* Gets the stored fields context.
*/
public StoredFieldsContext storedFields() {
return storedFieldsContext;
}
/**
* Gets the docvalue fields.
*/
public List<FieldAndFormat> docValueFields() {
return docValueFields;
}
/**
* Adds a field to load from the doc values and return as part of the
* search request.
*/
public SearchSourceBuilder docValueField(String name, @Nullable String format) {
if (docValueFields == null) {
docValueFields = new ArrayList<>();
}
docValueFields.add(new FieldAndFormat(name, format));
return this;
}
/**
* Adds a field to load from the doc values and return as part of the
* search request.
*/
public SearchSourceBuilder docValueField(String name) {
return docValueField(name, null);
}
/**
* Adds a script field under the given name with the provided script.
*
* @param name
* The name of the field
* @param script
* The script
*/
public SearchSourceBuilder scriptField(String name, Script script) {
scriptField(name, script, false);
return this;
}
/**
* Adds a script field under the given name with the provided script.
*
* @param name
* The name of the field
* @param script
* The script
*/
public SearchSourceBuilder scriptField(String name, Script script, boolean ignoreFailure) {
if (scriptFields == null) {
scriptFields = new ArrayList<>();
}
scriptFields.add(new ScriptField(name, script, ignoreFailure));
return this;
}
/**
* Gets the script fields.
*/
public List<ScriptField> scriptFields() {
return scriptFields;
}
/**
* Sets the boost a specific index or alias will receive when the query is executed
* against it.
*
* @param index
* The index or alias to apply the boost against
* @param indexBoost
* The boost to apply to the index
*/
public SearchSourceBuilder indexBoost(String index, float indexBoost) {
Objects.requireNonNull(index, "index must not be null");
this.indexBoosts.add(new IndexBoost(index, indexBoost));
return this;
}
/**
* Gets the boost a specific indices or aliases will receive when the query is
* executed against them.
*/
public List<IndexBoost> indexBoosts() {
return indexBoosts;
}
/**
* The stats groups this request will be aggregated under.
*/
public SearchSourceBuilder stats(List<String> statsGroups) {
this.stats = statsGroups;
return this;
}
/**
* The stats groups this request will be aggregated under.
*/
public List<String> stats() {
return stats;
}
public SearchSourceBuilder ext(List<SearchExtBuilder> searchExtBuilders) {
this.extBuilders = Objects.requireNonNull(searchExtBuilders, "searchExtBuilders must not be null");
return this;
}
public List<SearchExtBuilder> ext() {
return extBuilders;
}
/**
* @return true if the source only has suggest
*/
public boolean isSuggestOnly() {
return suggestBuilder != null
&& queryBuilder == null && aggregations == null;
}
/**
* Rewrites this search source builder into its primitive form. e.g. by
* rewriting the QueryBuilder. If the builder did not change the identity
* reference must be returned otherwise the builder will be rewritten
* infinitely.
*/
@Override
public SearchSourceBuilder rewrite(QueryRewriteContext context) throws IOException {
assert (this.equals(shallowCopy(queryBuilder, postQueryBuilder, aggregations, sliceBuilder, sorts, rescoreBuilders,
highlightBuilder)));
QueryBuilder queryBuilder = null;
if (this.queryBuilder != null) {
queryBuilder = this.queryBuilder.rewrite(context);
}
QueryBuilder postQueryBuilder = null;
if (this.postQueryBuilder != null) {
postQueryBuilder = this.postQueryBuilder.rewrite(context);
}
AggregatorFactories.Builder aggregations = null;
if (this.aggregations != null) {
aggregations = this.aggregations.rewrite(context);
}
List<SortBuilder<?>> sorts = Rewriteable.rewrite(this.sorts, context);
List<RescorerBuilder> rescoreBuilders = Rewriteable.rewrite(this.rescoreBuilders, context);
HighlightBuilder highlightBuilder = this.highlightBuilder;
if (highlightBuilder != null) {
highlightBuilder = this.highlightBuilder.rewrite(context);
}
boolean rewritten = queryBuilder != this.queryBuilder || postQueryBuilder != this.postQueryBuilder
|| aggregations != this.aggregations || rescoreBuilders != this.rescoreBuilders || sorts != this.sorts ||
this.highlightBuilder != highlightBuilder;
if (rewritten) {
return shallowCopy(queryBuilder, postQueryBuilder, aggregations, this.sliceBuilder, sorts, rescoreBuilders, highlightBuilder);
}
return this;
}
/**
* Create a shallow copy of this builder with a new slice configuration.
*/
public SearchSourceBuilder copyWithNewSlice(SliceBuilder slice) {
return shallowCopy(queryBuilder, postQueryBuilder, aggregations, slice, sorts, rescoreBuilders, highlightBuilder);
}
/**
* Create a shallow copy of this source replaced {@link #queryBuilder}, {@link #postQueryBuilder}, and {@link #sliceBuilder}. Used by
* {@link #rewrite(QueryRewriteContext)} and {@link #copyWithNewSlice(SliceBuilder)}.
*/
private SearchSourceBuilder shallowCopy(QueryBuilder queryBuilder, QueryBuilder postQueryBuilder,
AggregatorFactories.Builder aggregations, SliceBuilder slice, List<SortBuilder<?>> sorts,
List<RescorerBuilder> rescoreBuilders, HighlightBuilder highlightBuilder) {
SearchSourceBuilder rewrittenBuilder = new SearchSourceBuilder();
rewrittenBuilder.aggregations = aggregations;
rewrittenBuilder.explain = explain;
rewrittenBuilder.extBuilders = extBuilders;
rewrittenBuilder.fetchSourceContext = fetchSourceContext;
rewrittenBuilder.docValueFields = docValueFields;
rewrittenBuilder.storedFieldsContext = storedFieldsContext;
rewrittenBuilder.from = from;
rewrittenBuilder.highlightBuilder = highlightBuilder;
rewrittenBuilder.indexBoosts = indexBoosts;
rewrittenBuilder.minScore = minScore;
rewrittenBuilder.postQueryBuilder = postQueryBuilder;
rewrittenBuilder.profile = profile;
rewrittenBuilder.queryBuilder = queryBuilder;
rewrittenBuilder.rescoreBuilders = rescoreBuilders;
rewrittenBuilder.scriptFields = scriptFields;
rewrittenBuilder.searchAfterBuilder = searchAfterBuilder;
rewrittenBuilder.sliceBuilder = slice;
rewrittenBuilder.size = size;
rewrittenBuilder.sorts = sorts;
rewrittenBuilder.stats = stats;
rewrittenBuilder.suggestBuilder = suggestBuilder;
rewrittenBuilder.terminateAfter = terminateAfter;
rewrittenBuilder.timeout = timeout;
rewrittenBuilder.trackScores = trackScores;
rewrittenBuilder.trackTotalHits = trackTotalHits;
rewrittenBuilder.version = version;
rewrittenBuilder.collapse = collapse;
return rewrittenBuilder;
}
public void parseXContent(XContentParser parser) throws IOException {
parseXContent(parser, true);
}
/**
* Parse some xContent into this SearchSourceBuilder, overwriting any values specified in the xContent. Use this if you need to set up
* different defaults than a regular SearchSourceBuilder would have and use {@link #fromXContent(XContentParser, boolean)} if you have
* normal defaults.
*
* @param parser The xContent parser.
* @param checkTrailingTokens If true throws a parsing exception when extra tokens are found after the main object.
*/
public void parseXContent(XContentParser parser, boolean checkTrailingTokens) throws IOException {
XContentParser.Token token = parser.currentToken();
String currentFieldName = null;
if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT +
"] but found [" + token + "]", parser.getTokenLocation());
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (FROM_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
from = parser.intValue();
} else if (SIZE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
size = parser.intValue();
} else if (TIMEOUT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
timeout = TimeValue.parseTimeValue(parser.text(), null, TIMEOUT_FIELD.getPreferredName());
} else if (TERMINATE_AFTER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
terminateAfter = parser.intValue();
} else if (MIN_SCORE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
minScore = parser.floatValue();
} else if (VERSION_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
version = parser.booleanValue();
} else if (EXPLAIN_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
explain = parser.booleanValue();
} else if (TRACK_SCORES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
trackScores = parser.booleanValue();
} else if (TRACK_TOTAL_HITS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
trackTotalHits = parser.booleanValue();
} else if (_SOURCE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
fetchSourceContext = FetchSourceContext.fromXContent(parser);
} else if (STORED_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
storedFieldsContext =
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), parser);
} else if (SORT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
sort(parser.text());
} else if (PROFILE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
profile = parser.booleanValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
queryBuilder = parseInnerQueryBuilder(parser);
} else if (POST_FILTER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
postQueryBuilder = parseInnerQueryBuilder(parser);
} else if (_SOURCE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
fetchSourceContext = FetchSourceContext.fromXContent(parser);
} else if (SCRIPT_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
scriptFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
scriptFields.add(new ScriptField(parser));
}
} else if (INDICES_BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
DEPRECATION_LOGGER.deprecated(
"Object format in indices_boost is deprecated, please use array format instead");
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
indexBoosts.add(new IndexBoost(currentFieldName, parser.floatValue()));
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token +
" in [" + currentFieldName + "].", parser.getTokenLocation());
}
}
} else if (AGGREGATIONS_FIELD.match(currentFieldName, parser.getDeprecationHandler())
|| AGGS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
aggregations = AggregatorFactories.parseAggregators(parser);
} else if (HIGHLIGHT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
highlightBuilder = HighlightBuilder.fromXContent(parser);
} else if (SUGGEST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
suggestBuilder = SuggestBuilder.fromXContent(parser);
} else if (SORT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
sorts = new ArrayList<>(SortBuilder.fromXContent(parser));
} else if (RESCORE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
rescoreBuilders = new ArrayList<>();
rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser));
} else if (EXT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
extBuilders = new ArrayList<>();
String extSectionName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
extSectionName = parser.currentName();
} else {
SearchExtBuilder searchExtBuilder = parser.namedObject(SearchExtBuilder.class, extSectionName, null);
if (searchExtBuilder.getWriteableName().equals(extSectionName) == false) {
throw new IllegalStateException("The parsed [" + searchExtBuilder.getClass().getName() + "] object has a "
+ "different writeable name compared to the name of the section that it was parsed from: found ["
+ searchExtBuilder.getWriteableName() + "] expected [" + extSectionName + "]");
}
extBuilders.add(searchExtBuilder);
}
}
} else if (SLICE.match(currentFieldName, parser.getDeprecationHandler())) {
sliceBuilder = SliceBuilder.fromXContent(parser);
} else if (COLLAPSE.match(currentFieldName, parser.getDeprecationHandler())) {
collapse = CollapseBuilder.fromXContent(parser);
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (STORED_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
storedFieldsContext = StoredFieldsContext.fromXContent(STORED_FIELDS_FIELD.getPreferredName(), parser);
} else if (DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
docValueFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
docValueFields.add(FieldAndFormat.fromXContent(parser));
}
} else if (INDICES_BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
indexBoosts.add(new IndexBoost(parser));
}
} else if (SORT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
sorts = new ArrayList<>(SortBuilder.fromXContent(parser));
} else if (RESCORE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
rescoreBuilders = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser));
}
} else if (STATS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
stats = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
stats.add(parser.text());
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING +
"] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
}
}
} else if (_SOURCE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
fetchSourceContext = FetchSourceContext.fromXContent(parser);
} else if (SEARCH_AFTER.match(currentFieldName, parser.getDeprecationHandler())) {
searchAfterBuilder = SearchAfterBuilder.fromXContent(parser);
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation());
}
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation());
}
}
if (checkTrailingTokens) {
token = parser.nextToken();
if (token != null) {
throw new ParsingException(parser.getTokenLocation(), "Unexpected token [" + token + "] found after the main object.");
}
}
}
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
if (from != -1) {
builder.field(FROM_FIELD.getPreferredName(), from);
}
if (size != -1) {
builder.field(SIZE_FIELD.getPreferredName(), size);
}
if (timeout != null && !timeout.equals(TimeValue.MINUS_ONE)) {
builder.field(TIMEOUT_FIELD.getPreferredName(), timeout.getStringRep());
}
if (terminateAfter != SearchContext.DEFAULT_TERMINATE_AFTER) {
builder.field(TERMINATE_AFTER_FIELD.getPreferredName(), terminateAfter);
}
if (queryBuilder != null) {
builder.field(QUERY_FIELD.getPreferredName(), queryBuilder);
}
if (postQueryBuilder != null) {
builder.field(POST_FILTER_FIELD.getPreferredName(), postQueryBuilder);
}
if (minScore != null) {
builder.field(MIN_SCORE_FIELD.getPreferredName(), minScore);
}
if (version != null) {
builder.field(VERSION_FIELD.getPreferredName(), version);
}
if (explain != null) {
builder.field(EXPLAIN_FIELD.getPreferredName(), explain);
}
if (profile) {
builder.field("profile", true);
}
if (fetchSourceContext != null) {
builder.field(_SOURCE_FIELD.getPreferredName(), fetchSourceContext);
}
if (storedFieldsContext != null) {
storedFieldsContext.toXContent(STORED_FIELDS_FIELD.getPreferredName(), builder);
}
if (docValueFields != null) {
builder.startArray(DOCVALUE_FIELDS_FIELD.getPreferredName());
for (FieldAndFormat docValueField : docValueFields) {
builder.startObject()
.field("field", docValueField.field);
if (docValueField.format != null) {
builder.field("format", docValueField.format);
}
builder.endObject();
}
builder.endArray();
}
if (scriptFields != null) {
builder.startObject(SCRIPT_FIELDS_FIELD.getPreferredName());
for (ScriptField scriptField : scriptFields) {
scriptField.toXContent(builder, params);
}
builder.endObject();
}
if (sorts != null) {
builder.startArray(SORT_FIELD.getPreferredName());
for (SortBuilder<?> sort : sorts) {
sort.toXContent(builder, params);
}
builder.endArray();
}
if (trackScores) {
builder.field(TRACK_SCORES_FIELD.getPreferredName(), true);
}
if (trackTotalHits == false) {
builder.field(TRACK_TOTAL_HITS_FIELD.getPreferredName(), false);
}
if (searchAfterBuilder != null) {
builder.array(SEARCH_AFTER.getPreferredName(), searchAfterBuilder.getSortValues());
}
if (sliceBuilder != null) {
builder.field(SLICE.getPreferredName(), sliceBuilder);
}
if (!indexBoosts.isEmpty()) {
builder.startArray(INDICES_BOOST_FIELD.getPreferredName());
for (IndexBoost ib : indexBoosts) {
builder.startObject();
builder.field(ib.index, ib.boost);
builder.endObject();
}
builder.endArray();
}
if (aggregations != null) {
builder.field(AGGREGATIONS_FIELD.getPreferredName(), aggregations);
}
if (highlightBuilder != null) {
builder.field(HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder);
}
if (suggestBuilder != null) {
builder.field(SUGGEST_FIELD.getPreferredName(), suggestBuilder);
}
if (rescoreBuilders != null) {
builder.startArray(RESCORE_FIELD.getPreferredName());
for (RescorerBuilder<?> rescoreBuilder : rescoreBuilders) {
rescoreBuilder.toXContent(builder, params);
}
builder.endArray();
}
if (stats != null) {
builder.field(STATS_FIELD.getPreferredName(), stats);
}
if (extBuilders != null && extBuilders.isEmpty() == false) {
builder.startObject(EXT_FIELD.getPreferredName());
for (SearchExtBuilder extBuilder : extBuilders) {
extBuilder.toXContent(builder, params);
}
builder.endObject();
}
if (collapse != null) {
builder.field(COLLAPSE.getPreferredName(), collapse);
}
return builder;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
innerToXContent(builder, params);
builder.endObject();
return builder;
}
public static class IndexBoost implements Writeable, ToXContentObject {
private final String index;
private final float boost;
IndexBoost(String index, float boost) {
this.index = index;
this.boost = boost;
}
IndexBoost(StreamInput in) throws IOException {
index = in.readString();
boost = in.readFloat();
}
IndexBoost(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_OBJECT) {
token = parser.nextToken();
if (token == XContentParser.Token.FIELD_NAME) {
index = parser.currentName();
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.FIELD_NAME +
"] in [" + INDICES_BOOST_FIELD + "] but found [" + token + "]", parser.getTokenLocation());
}
token = parser.nextToken();
if (token == XContentParser.Token.VALUE_NUMBER) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_NUMBER +
"] in [" + INDICES_BOOST_FIELD + "] but found [" + token + "]", parser.getTokenLocation());
}
token = parser.nextToken();
if (token != XContentParser.Token.END_OBJECT) {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.END_OBJECT +
"] in [" + INDICES_BOOST_FIELD + "] but found [" + token + "]", parser.getTokenLocation());
}
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT +
"] in [" + parser.currentName() + "] but found [" + token + "]", parser.getTokenLocation());
}
}
public String getIndex() {
return index;
}
public float getBoost() {
return boost;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(index);
out.writeFloat(boost);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(index, boost);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(index, boost);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
IndexBoost other = (IndexBoost) obj;
return Objects.equals(index, other.index)
&& Objects.equals(boost, other.boost);
}
}
public static class ScriptField implements Writeable, ToXContentFragment {
private final boolean ignoreFailure;
private final String fieldName;
private final Script script;
public ScriptField(String fieldName, Script script, boolean ignoreFailure) {
this.fieldName = fieldName;
this.script = script;
this.ignoreFailure = ignoreFailure;
}
/**
* Read from a stream.
*/
public ScriptField(StreamInput in) throws IOException {
fieldName = in.readString();
script = new Script(in);
ignoreFailure = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
script.writeTo(out);
out.writeBoolean(ignoreFailure);
}
public ScriptField(XContentParser parser) throws IOException {
boolean ignoreFailure = false;
String scriptFieldName = parser.currentName();
Script script = null;
XContentParser.Token token;
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (SCRIPT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
script = Script.parse(parser);
} else if (IGNORE_FAILURE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
ignoreFailure = parser.booleanValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
+ "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (SCRIPT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
script = Script.parse(parser);
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
+ "].", parser.getTokenLocation());
}
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
+ "].", parser.getTokenLocation());
}
}
this.ignoreFailure = ignoreFailure;
this.fieldName = scriptFieldName;
this.script = script;
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] in ["
+ parser.currentName() + "] but found [" + token + "]", parser.getTokenLocation());
}
}
public String fieldName() {
return fieldName;
}
public Script script() {
return script;
}
public boolean ignoreFailure() {
return ignoreFailure;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(fieldName);
builder.field(SCRIPT_FIELD.getPreferredName(), script);
builder.field(IGNORE_FAILURE_FIELD.getPreferredName(), ignoreFailure);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(fieldName, script, ignoreFailure);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ScriptField other = (ScriptField) obj;
return Objects.equals(fieldName, other.fieldName)
&& Objects.equals(script, other.script)
&& Objects.equals(ignoreFailure, other.ignoreFailure);
}
}
@Override
public int hashCode() {
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder,
indexBoosts, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size,
sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version,
profile, extBuilders, collapse, trackTotalHits);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
SearchSourceBuilder other = (SearchSourceBuilder) obj;
return Objects.equals(aggregations, other.aggregations)
&& Objects.equals(explain, other.explain)
&& Objects.equals(fetchSourceContext, other.fetchSourceContext)
&& Objects.equals(docValueFields, other.docValueFields)
&& Objects.equals(storedFieldsContext, other.storedFieldsContext)
&& Objects.equals(from, other.from)
&& Objects.equals(highlightBuilder, other.highlightBuilder)
&& Objects.equals(indexBoosts, other.indexBoosts)
&& Objects.equals(minScore, other.minScore)
&& Objects.equals(postQueryBuilder, other.postQueryBuilder)
&& Objects.equals(queryBuilder, other.queryBuilder)
&& Objects.equals(rescoreBuilders, other.rescoreBuilders)
&& Objects.equals(scriptFields, other.scriptFields)
&& Objects.equals(size, other.size)
&& Objects.equals(sorts, other.sorts)
&& Objects.equals(searchAfterBuilder, other.searchAfterBuilder)
&& Objects.equals(sliceBuilder, other.sliceBuilder)
&& Objects.equals(stats, other.stats)
&& Objects.equals(suggestBuilder, other.suggestBuilder)
&& Objects.equals(terminateAfter, other.terminateAfter)
&& Objects.equals(timeout, other.timeout)
&& Objects.equals(trackScores, other.trackScores)
&& Objects.equals(version, other.version)
&& Objects.equals(profile, other.profile)
&& Objects.equals(extBuilders, other.extBuilders)
&& Objects.equals(collapse, other.collapse)
&& Objects.equals(trackTotalHits, other.trackTotalHits);
}
@Override
public String toString() {
return toString(EMPTY_PARAMS);
}
public String toString(Params params) {
try {
return XContentHelper.toXContent(this, XContentType.JSON, params, true).utf8ToString();
} catch (IOException e) {
throw new ElasticsearchException(e);
}
}
}
| apache-2.0 |
chenc10/Spark-PAF | examples/src/main/java/org/apache/spark/examples/ml/JavaStopWordsRemoverExample.java | 2353 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.examples.ml;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext;
// $example on$
import java.util.Arrays;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.ml.feature.StopWordsRemover;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
// $example off$
public class JavaStopWordsRemoverExample {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("JavaStopWordsRemoverExample");
JavaSparkContext jsc = new JavaSparkContext(conf);
SQLContext jsql = new SQLContext(jsc);
// $example on$
StopWordsRemover remover = new StopWordsRemover()
.setInputCol("raw")
.setOutputCol("filtered");
JavaRDD<Row> rdd = jsc.parallelize(Arrays.asList(
RowFactory.create(Arrays.asList("I", "saw", "the", "red", "baloon")),
RowFactory.create(Arrays.asList("Mary", "had", "a", "little", "lamb"))
));
StructType schema = new StructType(new StructField[]{
new StructField(
"raw", DataTypes.createArrayType(DataTypes.StringType), false, Metadata.empty())
});
DataFrame dataset = jsql.createDataFrame(rdd, schema);
remover.transform(dataset).show();
// $example off$
jsc.stop();
}
}
| apache-2.0 |
nmldiegues/stibt | infinispan/core/src/test/java/org/infinispan/api/CacheAPITest.java | 9493 | /*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.api;
import org.infinispan.configuration.cache.CacheMode;
import org.infinispan.configuration.cache.Configuration;
import org.infinispan.configuration.cache.ConfigurationBuilder;
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.test.TestingUtil;
import org.infinispan.test.fwk.TestCacheManagerFactory;
import org.infinispan.util.concurrent.IsolationLevel;
import org.testng.annotations.Test;
import javax.transaction.NotSupportedException;
import javax.transaction.SystemException;
import javax.transaction.TransactionManager;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import static org.infinispan.test.TestingUtil.v;
import static org.testng.AssertJUnit.assertEquals;
/**
* Tests the {@link org.infinispan.Cache} public API at a high level
*
* @author <a href="mailto:manik@jboss.org">Manik Surtani</a>
*/
@Test(groups = "functional")
public abstract class CacheAPITest extends APINonTxTest {
@Override
protected EmbeddedCacheManager createCacheManager() throws Exception {
// start a single cache instance
ConfigurationBuilder cb = getDefaultStandaloneCacheConfig(true);
cb.locking().isolationLevel(getIsolationLevel());
addEviction(cb);
amend(cb);
EmbeddedCacheManager cm = TestCacheManagerFactory.createLocalCacheManager(false);
cm.defineConfiguration("test", cb.build());
cache = cm.getCache("test");
return cm;
}
protected void amend(ConfigurationBuilder cb) {
}
protected abstract IsolationLevel getIsolationLevel();
protected ConfigurationBuilder addEviction(ConfigurationBuilder cb) {
return cb;
}
/**
* Tests that the configuration contains the values expected, as well as immutability of certain elements
*/
public void testConfiguration() {
Configuration c = cache.getCacheConfiguration();
assert CacheMode.LOCAL.equals(c.clustering().cacheMode());
assert null != c.transaction().transactionManagerLookup();
}
public void testGetMembersInLocalMode() {
assert manager(cache).getAddress() == null : "Cache members should be null if running in LOCAL mode";
}
public void testRollbackAfterOverwrite() throws Exception {
String key = "key", value = "value", value2 = "value2";
int size = 0;
cache.put(key, value);
assert cache.get(key).equals(value);
size = 1;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
assert cache.keySet().contains(key);
assert cache.values().contains(value);
TestingUtil.getTransactionManager(cache).begin();
cache.put(key, value2);
assert cache.get(key).equals(value2);
size = 1;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
assert cache.keySet().contains(key);
assert cache.values().contains(value2);
TestingUtil.getTransactionManager(cache).rollback();
assert cache.get(key).equals(value);
size = 1;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
assert cache.keySet().contains(key);
assert cache.values().contains(value);
}
public void testRollbackAfterRemove() throws Exception {
String key = "key", value = "value";
int size = 0;
cache.put(key, value);
assert cache.get(key).equals(value);
size = 1;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
assert cache.keySet().contains(key);
assert cache.values().contains(value);
TestingUtil.getTransactionManager(cache).begin();
cache.remove(key);
assert cache.get(key) == null;
size = 0;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
TestingUtil.getTransactionManager(cache).rollback();
assert cache.get(key).equals(value);
size = 1;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
assert cache.keySet().contains(key);
assert cache.values().contains(value);
}
public void testRollbackAfterClear() throws Exception {
String key = "key", value = "value";
int size = 0;
cache.put(key, value);
assert cache.get(key).equals(value);
size = 1;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
assert cache.keySet().contains(key);
assert cache.values().contains(value);
final TransactionManager transactionManager = cache.getAdvancedCache().getTransactionManager();
transactionManager.begin();
log.trace("Here is where it begins: " + transactionManager.getTransaction());
cache.size();
cache.clear();
assert cache.get(key) == null;
size = 0;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
TestingUtil.getTransactionManager(cache).rollback();
assert cache.get(key).equals(value);
size = 1;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
assert cache.keySet().contains(key);
assert cache.values().contains(value);
}
public void testEntrySetEqualityInTx(Method m) throws Exception {
Map dataIn = new HashMap();
dataIn.put(1, v(m, 1));
dataIn.put(2, v(m, 2));
cache.putAll(dataIn);
TransactionManager tm = cache.getAdvancedCache().getTransactionManager();
tm.begin();
try {
Map txDataIn = new HashMap();
txDataIn.put(3, v(m, 3));
Map allEntriesIn = new HashMap(dataIn);
// Modify expectations to include data to be included
allEntriesIn.putAll(txDataIn);
// Add an entry within tx
cache.putAll(txDataIn);
Set entries = cache.entrySet();
assertEquals(allEntriesIn.entrySet(), entries);
} finally {
tm.commit();
}
}
public void testRollbackAfterPut() throws Exception {
String key = "key", value = "value", key2 = "keyTwo", value2 = "value2";
int size = 0;
cache.put(key, value);
assert cache.get(key).equals(value);
size = 1;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
assert cache.keySet().contains(key);
assert cache.values().contains(value);
TestingUtil.getTransactionManager(cache).begin();
cache.put(key2, value2);
assert cache.get(key2).equals(value2);
assert cache.keySet().contains(key2);
size = 2;
System.out.println(cache.size());
assert size == cache.size();
assert size == cache.keySet().size();
assert size == cache.values().size();
assert size == cache.entrySet().size();
assert cache.values().contains(value2);
TestingUtil.getTransactionManager(cache).rollback();
assert cache.get(key).equals(value);
size = 1;
assert size == cache.size() && size == cache.keySet().size() && size == cache.values().size() && size == cache.entrySet().size();
assert cache.keySet().contains(key);
assert cache.values().contains(value);
}
public void testSizeAfterClear() {
for (int i = 0; i < 10; i++) {
cache.put(i, "value" + i);
}
cache.clear();
assert cache.isEmpty();
}
public void testPutIfAbsentAfterRemoveInTx() throws SystemException, NotSupportedException {
String key = "key_1", old_value = "old_value";
cache.put(key, old_value);
assert cache.get(key).equals(old_value);
TestingUtil.getTransactionManager(cache).begin();
assert cache.remove(key).equals(old_value);
assert cache.get(key) == null;
// assertEquals(cache.putIfAbsent(key, new_value), null);
TestingUtil.getTransactionManager(cache).rollback();
assertEquals(old_value, cache.get(key));
}
}
| apache-2.0 |
wpstudio/blazecoinj | tools/src/main/java/com/google/bitcoin/tools/BuildCheckpoints.java | 5898 | /*
* Copyright 2013 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.tools;
import com.google.bitcoin.core.*;
import com.google.bitcoin.params.MainNetParams;
import com.google.bitcoin.store.BlockStore;
import com.google.bitcoin.store.MemoryBlockStore;
import com.google.bitcoin.utils.BriefLogFormatter;
import com.google.bitcoin.utils.Threading;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.security.DigestOutputStream;
import java.security.MessageDigest;
import java.util.Date;
import java.util.TreeMap;
import static com.google.common.base.Preconditions.checkState;
/**
* Downloads and verifies a full chain from your local peer, emitting checkpoints at each difficulty transition period
* to a file which is then signed with your key.
*/
public class BuildCheckpoints {
private static final NetworkParameters PARAMS = MainNetParams.get();
private static final File CHECKPOINTS_FILE = new File("checkpoints");
public static void main(String[] args) throws Exception {
BriefLogFormatter.init();
// Sorted map of UNIX time of block to StoredBlock object.
final TreeMap<Integer, StoredBlock> checkpoints = new TreeMap<Integer, StoredBlock>();
// Configure bitcoinj to fetch only headers, not save them to disk, connect to a local fully synced/validated
// node and to save block headers that are on interval boundaries, as long as they are <1 month old.
final BlockStore store = new MemoryBlockStore(PARAMS);
final BlockChain chain = new BlockChain(PARAMS, store);
final PeerGroup peerGroup = new PeerGroup(PARAMS, chain);
peerGroup.addAddress(InetAddress.getLocalHost());
long now = new Date().getTime() / 1000;
peerGroup.setFastCatchupTimeSecs(now);
final long oneMonthAgo = now - (86400 * CoinDefinition.checkpointDaysBack);
chain.addListener(new AbstractBlockChainListener() {
@Override
public void notifyNewBestBlock(StoredBlock block) throws VerificationException {
int height = block.getHeight();
if (height % CoinDefinition.getIntervalCheckpoints() == 0 && block.getHeader().getTimeSeconds() <= oneMonthAgo) {
// if (height % PARAMS.getInterval() == 0 && block.getHeader().getTimeSeconds() <= oneMonthAgo) {
System.out.println(String.format("Checkpointing block %s at height %d",
block.getHeader().getHash(), block.getHeight()));
checkpoints.put(height, block);
}
}
}, Threading.SAME_THREAD);
peerGroup.startAsync();
peerGroup.awaitRunning();
peerGroup.downloadBlockChain();
checkState(checkpoints.size() > 0);
// Write checkpoint data out.
final FileOutputStream fileOutputStream = new FileOutputStream(CHECKPOINTS_FILE, false);
MessageDigest digest = MessageDigest.getInstance("SHA-256");
final DigestOutputStream digestOutputStream = new DigestOutputStream(fileOutputStream, digest);
digestOutputStream.on(false);
final DataOutputStream dataOutputStream = new DataOutputStream(digestOutputStream);
dataOutputStream.writeBytes("CHECKPOINTS 1");
dataOutputStream.writeInt(0); // Number of signatures to read. Do this later.
digestOutputStream.on(true);
dataOutputStream.writeInt(checkpoints.size());
ByteBuffer buffer = ByteBuffer.allocate(StoredBlock.COMPACT_SERIALIZED_SIZE);
for (StoredBlock block : checkpoints.values()) {
block.serializeCompact(buffer);
dataOutputStream.write(buffer.array());
buffer.position(0);
}
dataOutputStream.close();
Sha256Hash checkpointsHash = new Sha256Hash(digest.digest());
System.out.println("Hash of checkpoints data is " + checkpointsHash);
digestOutputStream.close();
fileOutputStream.close();
peerGroup.stopAsync();
peerGroup.awaitTerminated();
store.close();
// Sanity check the created file.
CheckpointManager manager = new CheckpointManager(PARAMS, new FileInputStream(CHECKPOINTS_FILE));
checkState(manager.numCheckpoints() == checkpoints.size());
if (PARAMS.getId() == NetworkParameters.ID_MAINNET) {
//StoredBlock test = manager.getCheckpointBefore(1390500000); // Thu Jan 23 19:00:00 CET 2014
//checkState(test.getHeight() == 280224);
//checkState(test.getHeader().getHashAsString()
// .equals("5a4e378e1fd0cc77d9e4cfe84216366908e9352b3b5a661c7f0b590e4b077e27"));
} else if (PARAMS.getId() == NetworkParameters.ID_TESTNET) {
//StoredBlock test = manager.getCheckpointBefore(1390500000); // Thu Jan 23 19:00:00 CET 2014
//checkState(test.getHeight() == 167328);
//checkState(test.getHeader().getHashAsString()
// .equals("0000000000035ae7d5025c2538067fe7adb1cf5d5d9c31b024137d9090ed13a9"));
}
System.out.println("Checkpoints written to '" + CHECKPOINTS_FILE.getCanonicalPath() + "'.");
}
}
| apache-2.0 |
djechelon/spring-security | cas/src/main/java/org/springframework/security/cas/jackson2/CasJackson2Module.java | 2405 | /*
* Copyright 2015-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.cas.jackson2;
import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.databind.module.SimpleModule;
import org.jasig.cas.client.authentication.AttributePrincipalImpl;
import org.jasig.cas.client.validation.AssertionImpl;
import org.springframework.security.cas.authentication.CasAuthenticationToken;
import org.springframework.security.jackson2.SecurityJackson2Modules;
/**
* Jackson module for spring-security-cas. This module register
* {@link AssertionImplMixin}, {@link AttributePrincipalImplMixin} and
* {@link CasAuthenticationTokenMixin}. If no default typing enabled by default then it'll
* enable it because typing info is needed to properly serialize/deserialize objects. In
* order to use this module just add this module into your ObjectMapper configuration.
*
* <pre>
* ObjectMapper mapper = new ObjectMapper();
* mapper.registerModule(new CasJackson2Module());
* </pre> <b>Note: use {@link SecurityJackson2Modules#getModules(ClassLoader)} to get list
* of all security modules on the classpath.</b>
*
* @author Jitendra Singh.
* @since 4.2
* @see org.springframework.security.jackson2.SecurityJackson2Modules
*/
public class CasJackson2Module extends SimpleModule {
public CasJackson2Module() {
super(CasJackson2Module.class.getName(), new Version(1, 0, 0, null, null, null));
}
@Override
public void setupModule(SetupContext context) {
SecurityJackson2Modules.enableDefaultTyping(context.getOwner());
context.setMixInAnnotations(AssertionImpl.class, AssertionImplMixin.class);
context.setMixInAnnotations(AttributePrincipalImpl.class, AttributePrincipalImplMixin.class);
context.setMixInAnnotations(CasAuthenticationToken.class, CasAuthenticationTokenMixin.class);
}
}
| apache-2.0 |
mesosphere/usergrid | stack/services/src/main/java/org/apache/usergrid/management/exceptions/UnactivatedAppUserException.java | 1337 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.management.exceptions;
public class UnactivatedAppUserException extends ManagementException {
private static final long serialVersionUID = 1L;
public UnactivatedAppUserException() {
super();
}
public UnactivatedAppUserException( String arg0, Throwable arg1 ) {
super( arg0, arg1 );
}
public UnactivatedAppUserException( String arg0 ) {
super( arg0 );
}
public UnactivatedAppUserException( Throwable arg0 ) {
super( arg0 );
}
}
| apache-2.0 |
zhuyuanyan/PCCREDIT | src/java/com/cardpay/pccredit/customer/model/MarketingPlanWeb.java | 2252 | package com.cardpay.pccredit.customer.model;
import java.math.BigDecimal;
import java.util.Date;
import com.wicresoft.jrad.base.database.model.BusinessModel;
public class MarketingPlanWeb extends BusinessModel{
private static final long serialVersionUID = 1L;
private String id;
private String chineseName;
private String productName;
private String marketingTime;
private String marketingMethod;
private Date marketingEndtime;
private String endResult;
private String createWay;
private String userName;
private String displayName;
private BigDecimal countAction;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getChineseName() {
return chineseName;
}
public void setChineseName(String chineseName) {
this.chineseName = chineseName;
}
public String getProductName() {
return productName;
}
public void setProductName(String productName) {
this.productName = productName;
}
public String getMarketingTime() {
return marketingTime;
}
public void setMarketingTime(String marketingTime) {
this.marketingTime = marketingTime;
}
public String getMarketingMethod() {
return marketingMethod;
}
public void setMarketingMethod(String marketingMethod) {
this.marketingMethod = marketingMethod;
}
public Date getMarketingEndtime() {
return marketingEndtime;
}
public void setMarketingEndtime(Date marketingEndtime) {
this.marketingEndtime = marketingEndtime;
}
public String getEndResult() {
return endResult;
}
public void setEndResult(String endResult) {
this.endResult = endResult;
}
public String getCreateWay() {
return createWay;
}
public void setCreateWay(String createWay) {
this.createWay = createWay;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public BigDecimal getCountAction() {
return countAction;
}
public void setCountAction(BigDecimal countAction) {
this.countAction = countAction;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
}
| apache-2.0 |
sankarh/hive | itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestDBTokenStore.java | 4499 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.metastore.security;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hive.metastore.HMSHandler;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.security.DBTokenStore;
import org.apache.hadoop.hive.metastore.security.DelegationTokenStore.TokenStoreException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport;
import org.junit.Assert;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotSame;
import org.junit.Test;
/**
* TestDBTokenStore.
*/
public class TestDBTokenStore {
@Test
public void testDBTokenStore() throws TokenStoreException, MetaException, IOException {
DelegationTokenStore ts = new DBTokenStore();
ts.init(new HMSHandler("Test handler"), HadoopThriftAuthBridge.Server.ServerMode.METASTORE);
assertEquals(0, ts.getMasterKeys().length);
assertEquals(false,ts.removeMasterKey(-1));
try{
ts.updateMasterKey(-1, "non-existent-key");
fail("Updated non-existent key.");
} catch (TokenStoreException e) {
assertTrue(e.getCause() instanceof NoSuchObjectException);
}
int keySeq = ts.addMasterKey("key1Data");
int keySeq2 = ts.addMasterKey("key2Data");
int keySeq2same = ts.addMasterKey("key2Data");
assertEquals("keys sequential", keySeq + 1, keySeq2);
assertEquals("keys sequential", keySeq + 2, keySeq2same);
assertEquals("expected number of keys", 3, ts.getMasterKeys().length);
assertTrue(ts.removeMasterKey(keySeq));
assertTrue(ts.removeMasterKey(keySeq2same));
assertEquals("expected number of keys", 1, ts.getMasterKeys().length);
assertEquals("key2Data",ts.getMasterKeys()[0]);
ts.updateMasterKey(keySeq2, "updatedData");
assertEquals("updatedData",ts.getMasterKeys()[0]);
assertTrue(ts.removeMasterKey(keySeq2));
// tokens
assertEquals(0, ts.getAllDelegationTokenIdentifiers().size());
DelegationTokenIdentifier tokenId = new DelegationTokenIdentifier(
new Text("owner"), new Text("renewer"), new Text("realUser"));
assertNull(ts.getToken(tokenId));
assertFalse(ts.removeToken(tokenId));
DelegationTokenInformation tokenInfo = new DelegationTokenInformation(
99, "password".getBytes());
assertTrue(ts.addToken(tokenId, tokenInfo));
assertFalse(ts.addToken(tokenId, tokenInfo));
DelegationTokenInformation tokenInfoRead = ts.getToken(tokenId);
assertEquals(tokenInfo.getRenewDate(), tokenInfoRead.getRenewDate());
assertNotSame(tokenInfo, tokenInfoRead);
Assert.assertArrayEquals(HiveDelegationTokenSupport
.encodeDelegationTokenInformation(tokenInfo),
HiveDelegationTokenSupport
.encodeDelegationTokenInformation(tokenInfoRead));
List<DelegationTokenIdentifier> allIds = ts
.getAllDelegationTokenIdentifiers();
assertEquals(1, allIds.size());
Assert.assertEquals(TokenStoreDelegationTokenSecretManager
.encodeWritable(tokenId),
TokenStoreDelegationTokenSecretManager.encodeWritable(allIds
.get(0)));
assertTrue(ts.removeToken(tokenId));
assertEquals(0, ts.getAllDelegationTokenIdentifiers().size());
assertNull(ts.getToken(tokenId));
ts.close();
}
}
| apache-2.0 |
grzesuav/jpf-core | src/peers/gov/nasa/jpf/vm/JPF_java_lang_reflect_Constructor.java | 7663 | /*
* Copyright (C) 2014, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
* All rights reserved.
*
* The Java Pathfinder core (jpf-core) platform is licensed under the
* Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nasa.jpf.vm;
import java.lang.reflect.Modifier;
import gov.nasa.jpf.Config;
import gov.nasa.jpf.annotation.MJI;
import gov.nasa.jpf.util.MethodInfoRegistry;
import gov.nasa.jpf.util.RunListener;
import gov.nasa.jpf.util.RunRegistry;
/**
* native peer for rudimentary constructor reflection.
*
* Unfortunately, this is quite redundant to the Method peer, but Constructor
* is not a Method subclass, and hence we can't rely on it's initialization
*/
public class JPF_java_lang_reflect_Constructor extends NativePeer {
static MethodInfoRegistry registry;
public static boolean init (Config conf) {
// this is an example of how to handle cross-initialization between
// native peers - this might also get explicitly called by the java.lang.Class
// peer, since it creates Constructor objects. Here we have to make sure
// we only reset between JPF runs
if (registry == null){
registry = new MethodInfoRegistry();
RunRegistry.getDefaultRegistry().addListener( new RunListener() {
@Override
public void reset (RunRegistry reg){
registry = null;
}
});
}
return true;
}
static int createConstructorObject (MJIEnv env, ClassInfo ciCtor, MethodInfo mi){
// note - it is the callers responsibility to ensure Constructor is properly initialized
int regIdx = registry.registerMethodInfo(mi);
int eidx = env.newObject(ciCtor);
ElementInfo ei = env.getModifiableElementInfo(eidx);
ei.setIntField("regIdx", regIdx);
return eidx;
}
static MethodInfo getMethodInfo (MJIEnv env, int objRef){
return registry.getMethodInfo(env,objRef, "regIdx");
}
@MJI
public int getName____Ljava_lang_String_2 (MJIEnv env, int objRef) {
MethodInfo mi = getMethodInfo(env, objRef);
int nameRef = env.getReferenceField( objRef, "name");
if (nameRef == MJIEnv.NULL) {
nameRef = env.newString(mi.getName());
env.setReferenceField(objRef, "name", nameRef);
}
return nameRef;
}
// <2do> .. and some more delegations to JPF_java_lang_Method
@MJI
public int newInstance___3Ljava_lang_Object_2__Ljava_lang_Object_2 (MJIEnv env, int mthRef, int argsRef) {
ThreadInfo ti = env.getThreadInfo();
DirectCallStackFrame frame = ti.getReturnedDirectCall();
MethodInfo miCallee = getMethodInfo(env,mthRef);
if (frame == null) { // first time
ClassInfo ci = miCallee.getClassInfo();
if (ci.isAbstract()){
env.throwException("java.lang.InstantiationException");
return MJIEnv.NULL;
}
int objRef = env.newObjectOfUncheckedClass( ci);
frame = miCallee.createDirectCallStackFrame( ti, 1);
frame.setReflection();
frame.setLocalReferenceVariable(0, objRef); // (1) store the objRef for retrieval during re-exec
int argOffset = frame.setReferenceArgument(0, objRef, null);
if (!JPF_java_lang_reflect_Method.pushUnboxedArguments( env, miCallee, frame, argOffset, argsRef)) {
// we've got a IllegalArgumentException
return MJIEnv.NULL;
}
ti.pushFrame(frame);
ci.initializeClass(ti);
env.repeatInvocation();
return MJIEnv.NULL;
} else { // reflection call returned
int objRef = frame.getLocalVariable(0); // that's the object ref we stored in (1)
return objRef;
}
}
@MJI
public int getParameterTypes_____3Ljava_lang_Class_2 (MJIEnv env, int objRef){
// kind of dangerous, but we don't refer to any fields and the underlying JPF construct
// (MethodInfo) is the same, so we just delegate to avoid copying non-trivial code
return JPF_java_lang_reflect_Method.getParameterTypes (env, getMethodInfo(env,objRef));
}
@MJI
public int getAnnotations_____3Ljava_lang_annotation_Annotation_2 (MJIEnv env, int objRef){
// <2do> check if ctor annotations are inherited, which is a bit off
return JPF_java_lang_reflect_Method.getAnnotations( env, getMethodInfo(env,objRef));
}
@MJI
public int getDeclaredAnnotations_____3Ljava_lang_annotation_Annotation_2 (MJIEnv env, int objRef){
return JPF_java_lang_reflect_Method.getDeclaredAnnotations( env, getMethodInfo(env,objRef));
}
@MJI
public int getAnnotation__Ljava_lang_Class_2__Ljava_lang_annotation_Annotation_2 (MJIEnv env, int objRef, int annotationClsRef) {
return JPF_java_lang_reflect_Method.getAnnotation( env, getMethodInfo(env,objRef), annotationClsRef);
}
@MJI
public int getParameterAnnotations_____3_3Ljava_lang_annotation_Annotation_2 (MJIEnv env, int objRef){
return JPF_java_lang_reflect_Method.getParameterAnnotations( env, getMethodInfo(env,objRef));
}
@MJI
public int getModifiers____I (MJIEnv env, int objRef){
MethodInfo mi = getMethodInfo(env, objRef);
return mi.getModifiers();
}
@MJI
public int getDeclaringClass____Ljava_lang_Class_2 (MJIEnv env, int objRef){
MethodInfo mi = getMethodInfo(env, objRef);
ClassInfo ci = mi.getClassInfo();
// can't get a Constructor object w/o having initialized it's declaring class first
return ci.getClassObjectRef();
}
@MJI
public int toString____Ljava_lang_String_2 (MJIEnv env, int objRef){
StringBuilder sb = new StringBuilder();
MethodInfo mi = getMethodInfo(env, objRef);
sb.append(Modifier.toString(mi.getModifiers()));
sb.append(' ');
sb.append(mi.getClassInfo().getName());
sb.append('(');
String[] at = mi.getArgumentTypeNames();
for (int i=0; i<at.length; i++){
if (i>0) sb.append(',');
sb.append(at[i]);
}
sb.append(')');
int sref = env.newString(sb.toString());
return sref;
}
@MJI
public boolean equals__Ljava_lang_Object_2__Z (MJIEnv env, int objRef, int mthRef){
ElementInfo ei = env.getElementInfo(mthRef);
ClassInfo ci = ClassLoaderInfo.getSystemResolvedClassInfo(JPF_java_lang_Class.CONSTRUCTOR_CLASSNAME);
if (ei.getClassInfo() == ci){
MethodInfo mi1 = getMethodInfo(env, objRef);
MethodInfo mi2 = getMethodInfo(env, mthRef);
if (mi1.getClassInfo() == mi2.getClassInfo()){
if (mi1.getName().equals(mi2.getName())){
if (mi1.getReturnType().equals(mi2.getReturnType())){
byte[] params1 = mi1.getArgumentTypes();
byte[] params2 = mi2.getArgumentTypes();
if (params1.length == params2.length){
for (int i = 0; i < params1.length; i++){
if (params1[i] != params2[i])
return false;
}
return true;
}
}
}
}
}
return false;
}
@MJI
public int hashCode____I (MJIEnv env, int objRef){
MethodInfo ctor = getMethodInfo(env, objRef);
return ctor.getClassName().hashCode();
}
}
| apache-2.0 |
jonathanchristison/fabric8 | fabric/fabric-core/src/main/java/io/fabric8/service/child/ChildAutoScaler.java | 6087 | /**
* Copyright 2005-2015 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.service.child;
import io.fabric8.api.AutoScaleRequest;
import io.fabric8.api.ChildScalingRequirements;
import io.fabric8.api.Container;
import io.fabric8.api.ContainerAutoScaler;
import io.fabric8.api.Containers;
import io.fabric8.api.CreateChildContainerOptions;
import io.fabric8.api.FabricService;
import io.fabric8.api.NameValidator;
import io.fabric8.api.ProfileRequirements;
import io.fabric8.api.scr.support.Strings;
import io.fabric8.common.util.Filter;
import io.fabric8.common.util.Filters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
*/
public class ChildAutoScaler implements ContainerAutoScaler {
private static final transient Logger LOG = LoggerFactory.getLogger(ChildAutoScaler.class);
private final ChildContainerProvider containerProvider;
public ChildAutoScaler(ChildContainerProvider containerProvider) {
this.containerProvider = containerProvider;
}
@Override
public int getWeight() {
return 0;
}
@Override
public void createContainers(AutoScaleRequest request) throws Exception {
int count = request.getDelta();
String profile = request.getProfile();
String version = request.getVersion();
FabricService fabricService = request.getFabricService();
CreateChildContainerOptions.Builder builder = null;
if (fabricService != null) {
builder = createAutoScaleOptions(request, fabricService);
}
if (builder != null) {
Set<String> ignoreContainerNames = new HashSet<>();
for (int i = 0; i < count; i++) {
final CreateChildContainerOptions.Builder configuredBuilder = builder.number(1).version(version).profiles(profile);
Container[] containers = fabricService.getContainers();
NameValidator nameValidator = Containers.createNameValidator(containers);
String name = Containers.createContainerName(containers, profile, containerProvider.getScheme(), nameValidator);
ignoreContainerNames.add(name);
CreateChildContainerOptions options = configuredBuilder.name(name).build();
LOG.info("Creating container name " + name + " version " + version + " profile " + profile + " " + count + " container(s)");
fabricService.createContainers(options);
}
} else {
LOG.warn("Could not create version " + version + " profile " + profile + " due to missing autoscale configuration");
}
}
protected CreateChildContainerOptions.Builder createAutoScaleOptions(AutoScaleRequest request, FabricService fabricService) {
CreateChildContainerOptions.Builder builder = CreateChildContainerOptions.builder();
Container[] containers = fabricService.getContainers();
if (containers != null) {
List<String> containerIds = Containers.rootContainerIds(containers);
// allow the requirements to customise which root to use...
if (containerIds.isEmpty()) {
throw new IllegalStateException("No root containers are available!");
}
String rootContainer = null;
if (containerIds.size() == 1) {
rootContainer = containerIds.get(0);
} else {
rootContainer = chooseRootContainer(request, containerIds);
}
if (Strings.isNullOrBlank(rootContainer)) {
throw new IllegalStateException("Could not choose a root container from the possible IDs: " + containerIds + " with requirements: " + getChildScalingRequirements(request));
} else {
builder = builder.parent(rootContainer);
}
}
String zookeeperUrl = fabricService.getZookeeperUrl();
String zookeeperPassword = fabricService.getZookeeperPassword();
return builder.jmxUser("admin").jmxPassword(zookeeperPassword).
zookeeperUrl(zookeeperUrl).zookeeperPassword(zookeeperPassword);
}
protected String chooseRootContainer(AutoScaleRequest request, List<String> containerIds) {
ChildScalingRequirements scalingRequirements = getChildScalingRequirements(request);
if (scalingRequirements != null) {
List<String> rootContainerPatterns = scalingRequirements.getRootContainerPatterns();
if (rootContainerPatterns != null && !rootContainerPatterns.isEmpty()) {
Filter<String> filter = Filters.createStringFilters(rootContainerPatterns);
List<String> matchingRootContainers = Filters.filter(containerIds, filter);
return Filters.matchRandomElement(matchingRootContainers);
}
}
return Filters.matchRandomElement(containerIds);
}
protected ChildScalingRequirements getChildScalingRequirements(AutoScaleRequest request) {
ChildScalingRequirements scalingRequirements = null;
ProfileRequirements profileRequirements = request.getProfileRequirements();
if (profileRequirements != null) {
scalingRequirements = profileRequirements.getChildScalingRequirements();
}
return scalingRequirements;
}
@Override
public void destroyContainers(String profile, int count, List<Container> containers) {
// TODO
}
}
| apache-2.0 |
fhanik/spring-security | saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/authentication/AbstractSaml2AuthenticationRequest.java | 5211 | /*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.saml2.provider.service.authentication;
import java.nio.charset.Charset;
import org.springframework.security.saml2.provider.service.registration.Saml2MessageBinding;
import org.springframework.util.Assert;
/**
* Data holder for {@code AuthNRequest} parameters to be sent using either the
* {@link Saml2MessageBinding#POST} or {@link Saml2MessageBinding#REDIRECT} binding. Data
* will be encoded and possibly deflated, but will not be escaped for transport, ie URL
* encoded, {@link org.springframework.web.util.UriUtils#encode(String, Charset)} or HTML
* encoded, {@link org.springframework.web.util.HtmlUtils#htmlEscape(String)}.
* https://www.oasis-open.org/committees/download.php/35711/sstc-saml-core-errata-2.0-wd-06-diff.pdf
* (line 2031)
*
* @since 5.3
* @see Saml2AuthenticationRequestFactory#createPostAuthenticationRequest(Saml2AuthenticationRequestContext)
* @see Saml2AuthenticationRequestFactory#createRedirectAuthenticationRequest(Saml2AuthenticationRequestContext)
*/
public abstract class AbstractSaml2AuthenticationRequest {
private final String samlRequest;
private final String relayState;
private final String authenticationRequestUri;
/**
* Mandatory constructor for the {@link AbstractSaml2AuthenticationRequest}
* @param samlRequest - the SAMLRequest XML data, SAML encoded, cannot be empty or
* null
* @param relayState - RelayState value that accompanies the request, may be null
* @param authenticationRequestUri - The authenticationRequestUri, a URL, where to
* send the XML message, cannot be empty or null
*/
AbstractSaml2AuthenticationRequest(String samlRequest, String relayState, String authenticationRequestUri) {
Assert.hasText(samlRequest, "samlRequest cannot be null or empty");
Assert.hasText(authenticationRequestUri, "authenticationRequestUri cannot be null or empty");
this.authenticationRequestUri = authenticationRequestUri;
this.samlRequest = samlRequest;
this.relayState = relayState;
}
/**
* Returns the AuthNRequest XML value to be sent. This value is already encoded for
* transport. If {@link #getBinding()} is {@link Saml2MessageBinding#REDIRECT} the
* value is deflated and SAML encoded. If {@link #getBinding()} is
* {@link Saml2MessageBinding#POST} the value is SAML encoded.
* @return the SAMLRequest parameter value
*/
public String getSamlRequest() {
return this.samlRequest;
}
/**
* Returns the RelayState value, if present in the parameters
* @return the RelayState value, or null if not available
*/
public String getRelayState() {
return this.relayState;
}
/**
* Returns the URI endpoint that this AuthNRequest should be sent to.
* @return the URI endpoint for this message
*/
public String getAuthenticationRequestUri() {
return this.authenticationRequestUri;
}
/**
* Returns the binding this AuthNRequest will be sent and encoded with. If
* {@link Saml2MessageBinding#REDIRECT} is used, the DEFLATE encoding will be
* automatically applied.
* @return the binding this message will be sent with.
*/
public abstract Saml2MessageBinding getBinding();
/**
* A builder for {@link AbstractSaml2AuthenticationRequest} and its subclasses.
*/
public static class Builder<T extends Builder<T>> {
String authenticationRequestUri;
String samlRequest;
String relayState;
protected Builder() {
}
/**
* Casting the return as the generic subtype, when returning itself
* @return this object
*/
@SuppressWarnings("unchecked")
protected final T _this() {
return (T) this;
}
/**
* Sets the {@code RelayState} parameter that will accompany this AuthNRequest
* @param relayState the relay state value, unencoded. if null or empty, the
* parameter will be removed from the map.
* @return this object
*/
public T relayState(String relayState) {
this.relayState = relayState;
return _this();
}
/**
* Sets the {@code SAMLRequest} parameter that will accompany this AuthNRequest
* @param samlRequest the SAMLRequest parameter.
* @return this object
*/
public T samlRequest(String samlRequest) {
this.samlRequest = samlRequest;
return _this();
}
/**
* Sets the {@code authenticationRequestUri}, a URL that will receive the
* AuthNRequest message
* @param authenticationRequestUri the relay state value, unencoded.
* @return this object
*/
public T authenticationRequestUri(String authenticationRequestUri) {
this.authenticationRequestUri = authenticationRequestUri;
return _this();
}
}
}
| apache-2.0 |
nezirus/elasticsearch | core/src/main/java/org/elasticsearch/index/IndexService.java | 33997 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Sort;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLock;
import org.elasticsearch.env.ShardLockObtainFailedException;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.query.QueryCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineFactory;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexSearcherWrapper;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardClosedException;
import org.elasticsearch.index.shard.IndexingOperationListener;
import org.elasticsearch.index.shard.SearchOperationListener;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
import org.elasticsearch.index.shard.ShardPath;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.cluster.IndicesClusterStateService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.LongSupplier;
import java.util.function.Supplier;
import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
public class IndexService extends AbstractIndexComponent implements IndicesClusterStateService.AllocatedIndex<IndexShard> {
private final IndexEventListener eventListener;
private final IndexFieldDataService indexFieldData;
private final BitsetFilterCache bitsetFilterCache;
private final NodeEnvironment nodeEnv;
private final ShardStoreDeleter shardStoreDeleter;
private final IndexStore indexStore;
private final IndexSearcherWrapper searcherWrapper;
private final IndexCache indexCache;
private final MapperService mapperService;
private final NamedXContentRegistry xContentRegistry;
private final SimilarityService similarityService;
private final EngineFactory engineFactory;
private final IndexWarmer warmer;
private volatile Map<Integer, IndexShard> shards = emptyMap();
private final AtomicBoolean closed = new AtomicBoolean(false);
private final AtomicBoolean deleted = new AtomicBoolean(false);
private final IndexSettings indexSettings;
private final List<IndexingOperationListener> indexingOperationListeners;
private final List<SearchOperationListener> searchOperationListeners;
private volatile AsyncRefreshTask refreshTask;
private volatile AsyncTranslogFSync fsyncTask;
private final ThreadPool threadPool;
private final BigArrays bigArrays;
private final ScriptService scriptService;
private final Client client;
private Supplier<Sort> indexSortSupplier;
public IndexService(
IndexSettings indexSettings,
NodeEnvironment nodeEnv,
NamedXContentRegistry xContentRegistry,
SimilarityService similarityService,
ShardStoreDeleter shardStoreDeleter,
AnalysisRegistry registry,
@Nullable EngineFactory engineFactory,
CircuitBreakerService circuitBreakerService,
BigArrays bigArrays,
ThreadPool threadPool,
ScriptService scriptService,
Client client,
QueryCache queryCache,
IndexStore indexStore,
IndexEventListener eventListener,
IndexModule.IndexSearcherWrapperFactory wrapperFactory,
MapperRegistry mapperRegistry,
IndicesFieldDataCache indicesFieldDataCache,
List<SearchOperationListener> searchOperationListeners,
List<IndexingOperationListener> indexingOperationListeners) throws IOException {
super(indexSettings);
this.indexSettings = indexSettings;
this.xContentRegistry = xContentRegistry;
this.similarityService = similarityService;
this.mapperService = new MapperService(indexSettings, registry.build(indexSettings), xContentRegistry, similarityService,
mapperRegistry,
// we parse all percolator queries as they would be parsed on shard 0
() -> newQueryShardContext(0, null, System::currentTimeMillis));
this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService, mapperService);
if (indexSettings.getIndexSortConfig().hasIndexSort()) {
// we delay the actual creation of the sort order for this index because the mapping has not been merged yet.
// The sort order is validated right after the merge of the mapping later in the process.
this.indexSortSupplier = () -> indexSettings.getIndexSortConfig().buildIndexSort(
mapperService::fullName,
indexFieldData::getForField
);
} else {
this.indexSortSupplier = () -> null;
}
this.shardStoreDeleter = shardStoreDeleter;
this.bigArrays = bigArrays;
this.threadPool = threadPool;
this.scriptService = scriptService;
this.client = client;
this.eventListener = eventListener;
this.nodeEnv = nodeEnv;
this.indexStore = indexStore;
indexFieldData.setListener(new FieldDataCacheListener(this));
this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this));
this.warmer = new IndexWarmer(indexSettings.getSettings(), threadPool,
bitsetFilterCache.createListener(threadPool));
this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache);
this.engineFactory = engineFactory;
// initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE
this.searcherWrapper = wrapperFactory.newWrapper(this);
this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners);
this.searchOperationListeners = Collections.unmodifiableList(searchOperationListeners);
// kick off async ops for the first shard in this index
this.refreshTask = new AsyncRefreshTask(this);
rescheduleFsyncTask(indexSettings.getTranslogDurability());
}
public int numberOfShards() {
return shards.size();
}
public IndexEventListener getIndexEventListener() {
return this.eventListener;
}
@Override
public Iterator<IndexShard> iterator() {
return shards.values().iterator();
}
public boolean hasShard(int shardId) {
return shards.containsKey(shardId);
}
/**
* Return the shard with the provided id, or null if there is no such shard.
*/
@Override
@Nullable
public IndexShard getShardOrNull(int shardId) {
return shards.get(shardId);
}
/**
* Return the shard with the provided id, or throw an exception if it doesn't exist.
*/
public IndexShard getShard(int shardId) {
IndexShard indexShard = getShardOrNull(shardId);
if (indexShard == null) {
throw new ShardNotFoundException(new ShardId(index(), shardId));
}
return indexShard;
}
public Set<Integer> shardIds() {
return shards.keySet();
}
public IndexCache cache() {
return indexCache;
}
public IndexFieldDataService fieldData() {
return indexFieldData;
}
public IndexAnalyzers getIndexAnalyzers() {
return this.mapperService.getIndexAnalyzers();
}
public MapperService mapperService() {
return mapperService;
}
public NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
public SimilarityService similarityService() {
return similarityService;
}
public Supplier<Sort> getIndexSortSupplier() {
return indexSortSupplier;
}
public synchronized void close(final String reason, boolean delete) throws IOException {
if (closed.compareAndSet(false, true)) {
deleted.compareAndSet(false, delete);
try {
final Set<Integer> shardIds = shardIds();
for (final int shardId : shardIds) {
try {
removeShard(shardId, reason);
} catch (Exception e) {
logger.warn("failed to close shard", e);
}
}
} finally {
IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, mapperService, refreshTask, fsyncTask);
}
}
}
public String indexUUID() {
return indexSettings.getUUID();
}
// NOTE: O(numShards) cost, but numShards should be smallish?
private long getAvgShardSizeInBytes() throws IOException {
long sum = 0;
int count = 0;
for (IndexShard indexShard : this) {
sum += indexShard.store().stats().sizeInBytes();
count++;
}
if (count == 0) {
return -1L;
} else {
return sum / count;
}
}
public synchronized IndexShard createShard(ShardRouting routing) throws IOException {
final boolean primary = routing.primary();
/*
* TODO: we execute this in parallel but it's a synced method. Yet, we might
* be able to serialize the execution via the cluster state in the future. for now we just
* keep it synced.
*/
if (closed.get()) {
throw new IllegalStateException("Can't create shard " + routing.shardId() + ", closed");
}
final Settings indexSettings = this.indexSettings.getSettings();
final ShardId shardId = routing.shardId();
boolean success = false;
Store store = null;
IndexShard indexShard = null;
ShardLock lock = null;
try {
lock = nodeEnv.shardLock(shardId, TimeUnit.SECONDS.toMillis(5));
eventListener.beforeIndexShardCreated(shardId, indexSettings);
ShardPath path;
try {
path = ShardPath.loadShardPath(logger, nodeEnv, shardId, this.indexSettings);
} catch (IllegalStateException ex) {
logger.warn("{} failed to load shard path, trying to remove leftover", shardId);
try {
ShardPath.deleteLeftoverShardDirectory(logger, nodeEnv, lock, this.indexSettings);
path = ShardPath.loadShardPath(logger, nodeEnv, shardId, this.indexSettings);
} catch (Exception inner) {
ex.addSuppressed(inner);
throw ex;
}
}
if (path == null) {
// TODO: we should, instead, hold a "bytes reserved" of how large we anticipate this shard will be, e.g. for a shard
// that's being relocated/replicated we know how large it will become once it's done copying:
// Count up how many shards are currently on each data path:
Map<Path, Integer> dataPathToShardCount = new HashMap<>();
for (IndexShard shard : this) {
Path dataPath = shard.shardPath().getRootStatePath();
Integer curCount = dataPathToShardCount.get(dataPath);
if (curCount == null) {
curCount = 0;
}
dataPathToShardCount.put(dataPath, curCount + 1);
}
path = ShardPath.selectNewPathForShard(nodeEnv, shardId, this.indexSettings,
routing.getExpectedShardSize() == ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE
? getAvgShardSizeInBytes() : routing.getExpectedShardSize(),
dataPathToShardCount);
logger.debug("{} creating using a new path [{}]", shardId, path);
} else {
logger.debug("{} creating using an existing path [{}]", shardId, path);
}
if (shards.containsKey(shardId.id())) {
throw new IllegalStateException(shardId + " already exists");
}
logger.debug("creating shard_id {}", shardId);
// if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary.
final Engine.Warmer engineWarmer = (searcher) -> {
IndexShard shard = getShardOrNull(shardId.getId());
if (shard != null) {
warmer.warm(searcher, shard, IndexService.this.indexSettings);
}
};
store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock,
new StoreCloseListener(shardId, () -> eventListener.onStoreClosed(shardId)));
indexShard = new IndexShard(routing, this.indexSettings, path, store, indexSortSupplier,
indexCache, mapperService, similarityService, indexFieldData, engineFactory,
eventListener, searcherWrapper, threadPool, bigArrays, engineWarmer,
searchOperationListeners, indexingOperationListeners);
eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created");
eventListener.afterIndexShardCreated(indexShard);
shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap();
success = true;
return indexShard;
} catch (ShardLockObtainFailedException e) {
throw new IOException("failed to obtain in-memory shard lock", e);
} finally {
if (success == false) {
if (lock != null) {
IOUtils.closeWhileHandlingException(lock);
}
closeShard("initialization failed", shardId, indexShard, store, eventListener);
}
}
}
@Override
public synchronized void removeShard(int shardId, String reason) {
final ShardId sId = new ShardId(index(), shardId);
final IndexShard indexShard;
if (shards.containsKey(shardId) == false) {
return;
}
logger.debug("[{}] closing... (reason: [{}])", shardId, reason);
HashMap<Integer, IndexShard> newShards = new HashMap<>(shards);
indexShard = newShards.remove(shardId);
shards = unmodifiableMap(newShards);
closeShard(reason, sId, indexShard, indexShard.store(), indexShard.getIndexEventListener());
logger.debug("[{}] closed (reason: [{}])", shardId, reason);
}
private void closeShard(String reason, ShardId sId, IndexShard indexShard, Store store, IndexEventListener listener) {
final int shardId = sId.id();
final Settings indexSettings = this.getIndexSettings().getSettings();
try {
try {
listener.beforeIndexShardClosed(sId, indexShard, indexSettings);
} finally {
// this logic is tricky, we want to close the engine so we rollback the changes done to it
// and close the shard so no operations are allowed to it
if (indexShard != null) {
try {
// only flush we are we closed (closed index or shutdown) and if we are not deleted
final boolean flushEngine = deleted.get() == false && closed.get();
indexShard.close(reason, flushEngine);
} catch (Exception e) {
logger.debug((org.apache.logging.log4j.util.Supplier<?>)
() -> new ParameterizedMessage("[{}] failed to close index shard", shardId), e);
// ignore
}
}
// call this before we close the store, so we can release resources for it
listener.afterIndexShardClosed(sId, indexShard, indexSettings);
}
} finally {
try {
if (store != null) {
store.close();
} else {
logger.trace("[{}] store not initialized prior to closing shard, nothing to close", shardId);
}
} catch (Exception e) {
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"[{}] failed to close store on shard removal (reason: [{}])", shardId, reason), e);
}
}
}
private void onShardClose(ShardLock lock) {
if (deleted.get()) { // we remove that shards content if this index has been deleted
try {
try {
eventListener.beforeIndexShardDeleted(lock.getShardId(), indexSettings.getSettings());
} finally {
shardStoreDeleter.deleteShardStore("delete index", lock, indexSettings);
eventListener.afterIndexShardDeleted(lock.getShardId(), indexSettings.getSettings());
}
} catch (IOException e) {
shardStoreDeleter.addPendingDelete(lock.getShardId(), indexSettings);
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"[{}] failed to delete shard content - scheduled a retry", lock.getShardId().id()), e);
}
}
}
@Override
public IndexSettings getIndexSettings() {
return indexSettings;
}
/**
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via
* {@link QueryShardContext#setTypes(String...)}.
*
* Passing a {@code null} {@link IndexReader} will return a valid context, however it won't be able to make
* {@link IndexReader}-specific optimizations, such as rewriting containing range queries.
*/
public QueryShardContext newQueryShardContext(int shardId, IndexReader indexReader, LongSupplier nowInMillis) {
return new QueryShardContext(
shardId, indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(),
similarityService(), scriptService, xContentRegistry,
client, indexReader,
nowInMillis);
}
/**
* The {@link ThreadPool} to use for this index.
*/
public ThreadPool getThreadPool() {
return threadPool;
}
/**
* The {@link BigArrays} to use for this index.
*/
public BigArrays getBigArrays() {
return bigArrays;
}
/**
* The {@link ScriptService} to use for this index.
*/
public ScriptService getScriptService() {
return scriptService;
}
List<IndexingOperationListener> getIndexOperationListeners() { // pkg private for testing
return indexingOperationListeners;
}
List<SearchOperationListener> getSearchOperationListener() { // pkg private for testing
return searchOperationListeners;
}
@Override
public boolean updateMapping(IndexMetaData indexMetaData) throws IOException {
return mapperService().updateMapping(indexMetaData);
}
private class StoreCloseListener implements Store.OnClose {
private final ShardId shardId;
private final Closeable[] toClose;
StoreCloseListener(ShardId shardId, Closeable... toClose) {
this.shardId = shardId;
this.toClose = toClose;
}
@Override
public void accept(ShardLock lock) {
try {
assert lock.getShardId().equals(shardId) : "shard id mismatch, expected: " + shardId + " but got: " + lock.getShardId();
onShardClose(lock);
} finally {
try {
IOUtils.close(toClose);
} catch (IOException ex) {
logger.debug("failed to close resource", ex);
}
}
}
}
private static final class BitsetCacheListener implements BitsetFilterCache.Listener {
final IndexService indexService;
private BitsetCacheListener(IndexService indexService) {
this.indexService = indexService;
}
@Override
public void onCache(ShardId shardId, Accountable accountable) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0L;
shard.shardBitsetFilterCache().onCached(ramBytesUsed);
}
}
}
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0L;
shard.shardBitsetFilterCache().onRemoval(ramBytesUsed);
}
}
}
}
private final class FieldDataCacheListener implements IndexFieldDataCache.Listener {
final IndexService indexService;
FieldDataCacheListener(IndexService indexService) {
this.indexService = indexService;
}
@Override
public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
shard.fieldData().onCache(shardId, fieldName, ramUsage);
}
}
}
@Override
public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, long sizeInBytes) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
shard.fieldData().onRemoval(shardId, fieldName, wasEvicted, sizeInBytes);
}
}
}
}
public IndexMetaData getMetaData() {
return indexSettings.getIndexMetaData();
}
@Override
public synchronized void updateMetaData(final IndexMetaData metadata) {
final Translog.Durability oldTranslogDurability = indexSettings.getTranslogDurability();
if (indexSettings.updateIndexMetaData(metadata)) {
for (final IndexShard shard : this.shards.values()) {
try {
shard.onSettingsChanged();
} catch (Exception e) {
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"[{}] failed to notify shard about setting change", shard.shardId().id()), e);
}
}
if (refreshTask.getInterval().equals(indexSettings.getRefreshInterval()) == false) {
rescheduleRefreshTasks();
}
final Translog.Durability durability = indexSettings.getTranslogDurability();
if (durability != oldTranslogDurability) {
rescheduleFsyncTask(durability);
}
}
}
private void rescheduleFsyncTask(Translog.Durability durability) {
try {
if (fsyncTask != null) {
fsyncTask.close();
}
} finally {
fsyncTask = durability == Translog.Durability.REQUEST ? null : new AsyncTranslogFSync(this);
}
}
private void rescheduleRefreshTasks() {
try {
refreshTask.close();
} finally {
refreshTask = new AsyncRefreshTask(this);
}
}
public interface ShardStoreDeleter {
void deleteShardStore(String reason, ShardLock lock, IndexSettings indexSettings) throws IOException;
void addPendingDelete(ShardId shardId, IndexSettings indexSettings);
}
final EngineFactory getEngineFactory() {
return engineFactory;
} // pkg private for testing
final IndexSearcherWrapper getSearcherWrapper() {
return searcherWrapper;
} // pkg private for testing
final IndexStore getIndexStore() {
return indexStore;
} // pkg private for testing
private void maybeFSyncTranslogs() {
if (indexSettings.getTranslogDurability() == Translog.Durability.ASYNC) {
for (IndexShard shard : this.shards.values()) {
try {
Translog translog = shard.getTranslog();
if (translog.syncNeeded()) {
translog.sync();
}
} catch (AlreadyClosedException ex) {
// fine - continue;
} catch (IOException e) {
logger.warn("failed to sync translog", e);
}
}
}
}
private void maybeRefreshEngine() {
if (indexSettings.getRefreshInterval().millis() > 0) {
for (IndexShard shard : this.shards.values()) {
switch (shard.state()) {
case CREATED:
case RECOVERING:
case CLOSED:
continue;
case POST_RECOVERY:
case STARTED:
case RELOCATED:
try {
if (shard.isRefreshNeeded()) {
shard.refresh("schedule");
}
} catch (IndexShardClosedException | AlreadyClosedException ex) {
// fine - continue;
}
continue;
default:
throw new IllegalStateException("unknown state: " + shard.state());
}
}
}
}
abstract static class BaseAsyncTask implements Runnable, Closeable {
protected final IndexService indexService;
protected final ThreadPool threadPool;
private final TimeValue interval;
private ScheduledFuture<?> scheduledFuture;
private final AtomicBoolean closed = new AtomicBoolean(false);
private volatile Exception lastThrownException;
BaseAsyncTask(IndexService indexService, TimeValue interval) {
this.indexService = indexService;
this.threadPool = indexService.getThreadPool();
this.interval = interval;
onTaskCompletion();
}
boolean mustReschedule() {
// don't re-schedule if its closed or if we don't have a single shard here..., we are done
return indexService.closed.get() == false
&& closed.get() == false && interval.millis() > 0;
}
private synchronized void onTaskCompletion() {
if (mustReschedule()) {
if (indexService.logger.isTraceEnabled()) {
indexService.logger.trace("scheduling {} every {}", toString(), interval);
}
this.scheduledFuture = threadPool.schedule(interval, getThreadPool(), BaseAsyncTask.this);
} else {
indexService.logger.trace("scheduled {} disabled", toString());
this.scheduledFuture = null;
}
}
boolean isScheduled() {
return scheduledFuture != null;
}
@Override
public final void run() {
try {
runInternal();
} catch (Exception ex) {
if (lastThrownException == null || sameException(lastThrownException, ex) == false) {
// prevent the annoying fact of logging the same stuff all the time with an interval of 1 sec will spam all your logs
indexService.logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to run task {} - suppressing re-occurring exceptions unless the exception changes",
toString()),
ex);
lastThrownException = ex;
}
} finally {
onTaskCompletion();
}
}
private static boolean sameException(Exception left, Exception right) {
if (left.getClass() == right.getClass()) {
if (Objects.equals(left.getMessage(), right.getMessage())) {
StackTraceElement[] stackTraceLeft = left.getStackTrace();
StackTraceElement[] stackTraceRight = right.getStackTrace();
if (stackTraceLeft.length == stackTraceRight.length) {
for (int i = 0; i < stackTraceLeft.length; i++) {
if (stackTraceLeft[i].equals(stackTraceRight[i]) == false) {
return false;
}
}
return true;
}
}
}
return false;
}
protected abstract void runInternal();
protected String getThreadPool() {
return ThreadPool.Names.SAME;
}
@Override
public synchronized void close() {
if (closed.compareAndSet(false, true)) {
FutureUtils.cancel(scheduledFuture);
scheduledFuture = null;
}
}
TimeValue getInterval() {
return interval;
}
boolean isClosed() {
return this.closed.get();
}
}
/**
* FSyncs the translog for all shards of this index in a defined interval.
*/
static final class AsyncTranslogFSync extends BaseAsyncTask {
AsyncTranslogFSync(IndexService indexService) {
super(indexService, indexService.getIndexSettings().getTranslogSyncInterval());
}
@Override
protected String getThreadPool() {
return ThreadPool.Names.FLUSH;
}
@Override
protected void runInternal() {
indexService.maybeFSyncTranslogs();
}
@Override
public String toString() {
return "translog_sync";
}
}
final class AsyncRefreshTask extends BaseAsyncTask {
AsyncRefreshTask(IndexService indexService) {
super(indexService, indexService.getIndexSettings().getRefreshInterval());
}
@Override
protected void runInternal() {
indexService.maybeRefreshEngine();
}
@Override
protected String getThreadPool() {
return ThreadPool.Names.REFRESH;
}
@Override
public String toString() {
return "refresh";
}
}
AsyncRefreshTask getRefreshTask() { // for tests
return refreshTask;
}
AsyncTranslogFSync getFsyncTask() { // for tests
return fsyncTask;
}
}
| apache-2.0 |
kchilton2/incubator-rya | extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/ProjectionResultUpdater.java | 4313 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rya.indexing.pcj.fluo.app;
import static com.google.common.base.Preconditions.checkNotNull;
import org.apache.fluo.api.client.TransactionBase;
import org.apache.fluo.api.data.Bytes;
import org.apache.log4j.Logger;
import org.apache.rya.api.model.VisibilityBindingSet;
import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
import org.apache.rya.indexing.pcj.fluo.app.query.ProjectionMetadata;
import org.apache.rya.indexing.pcj.fluo.app.util.BindingSetUtil;
import org.apache.rya.indexing.pcj.storage.accumulo.VariableOrder;
import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSetSerDe;
import org.eclipse.rdf4j.query.BindingSet;
import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
import edu.umd.cs.findbugs.annotations.NonNull;
/**
* Updates the results of a Projection node when one of its children has added a
* new Binding Set to its results.
*/
@DefaultAnnotation(NonNull.class)
public class ProjectionResultUpdater extends AbstractNodeUpdater {
private static final Logger log = Logger.getLogger(QueryResultUpdater.class);
private static final VisibilityBindingSetSerDe BS_SERDE = new VisibilityBindingSetSerDe();
/**
* Updates the results of a Projection node when one of its children has added a
* new Binding Set to its results.
*
* @param tx - The transaction all Fluo queries will use. (not null)
* @param childBindingSet - A binding set that the query's child node has emmitted. (not null)
* @param projectionMetadata - The metadata of the Query whose results will be updated. (not null)
* @throws Exception A problem caused the update to fail.
*/
public void updateProjectionResults(
final TransactionBase tx,
final VisibilityBindingSet childBindingSet,
final ProjectionMetadata projectionMetadata) throws Exception {
checkNotNull(tx);
checkNotNull(childBindingSet);
checkNotNull(projectionMetadata);
log.trace(
"Transaction ID: " + tx.getStartTimestamp() + "\n" +
"Node ID: " + projectionMetadata.getNodeId() + "\n" +
"Parent Node ID: " + projectionMetadata.getParentNodeId() + "\n" +
"Child Node ID: " + projectionMetadata.getChildNodeId() + "\n" +
"Child Binding Set:\n" + childBindingSet + "\n");
// Create the query's Binding Set from the child node's binding set.
final VariableOrder queryVarOrder = projectionMetadata.getVariableOrder();
final VariableOrder projectionVarOrder = projectionMetadata.getProjectedVars();
final BindingSet queryBindingSet = BindingSetUtil.keepBindings(projectionVarOrder, childBindingSet);
VisibilityBindingSet projectedBs = new VisibilityBindingSet(queryBindingSet, childBindingSet.getVisibility());
// Create the Row Key for the result. If the child node groups results, then the key must only contain the Group By variables.
Bytes resultRow = makeRowKey(projectionMetadata.getNodeId(), queryVarOrder, projectedBs);
// Create the Binding Set that goes in the Node Value. It does contain visibilities.
final Bytes nodeValueBytes = BS_SERDE.serialize(projectedBs);
log.trace(
"Transaction ID: " + tx.getStartTimestamp() + "\n" +
"New Binding Set: " + childBindingSet + "\n");
tx.set(resultRow, FluoQueryColumns.PROJECTION_BINDING_SET, nodeValueBytes);
}
}
| apache-2.0 |
timveil/iot-truck-streaming | stream-simulator/src/main/java/com/hortonworks/streaming/interfaces/DomainObject.java | 82 | package com.hortonworks.streaming.interfaces;
public interface DomainObject {
}
| apache-2.0 |
jimczi/elasticsearch | modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java | 19649 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.queries.BlendedTermQuery;
import org.apache.lucene.queries.CommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FilterScorer;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanNotQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
import static org.hamcrest.Matchers.equalTo;
public class CandidateQueryTests extends ESSingleNodeTestCase {
private Directory directory;
private IndexWriter indexWriter;
private DocumentMapper documentMapper;
private DirectoryReader directoryReader;
private MapperService mapperService;
private PercolatorFieldMapper fieldMapper;
private PercolatorFieldMapper.FieldType fieldType;
private List<Query> queries;
private PercolateQuery.QueryStore queryStore;
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singleton(PercolatorPlugin.class);
}
@Before
public void init() throws Exception {
directory = newDirectory();
IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer());
config.setMergePolicy(NoMergePolicy.INSTANCE);
indexWriter = new IndexWriter(directory, config);
String indexName = "test";
IndexService indexService = createIndex(indexName, Settings.EMPTY);
mapperService = indexService.mapperService();
String mapper = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("int_field").field("type", "integer").endObject()
.startObject("long_field").field("type", "long").endObject()
.startObject("half_float_field").field("type", "half_float").endObject()
.startObject("float_field").field("type", "float").endObject()
.startObject("double_field").field("type", "double").endObject()
.startObject("ip_field").field("type", "ip").endObject()
.startObject("field").field("type", "keyword").endObject()
.endObject().endObject().endObject().string();
documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, true);
String queryField = "query_field";
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject(queryField).field("type", "percolator").endObject().endObject()
.endObject().endObject().string();
mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true);
fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField);
fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();
queries = new ArrayList<>();
queryStore = ctx -> docId -> this.queries.get(docId);
}
@After
public void deinit() throws Exception {
directoryReader.close();
directory.close();
}
public void testDuel() throws Exception {
List<Function<String, Query>> queryFunctions = new ArrayList<>();
queryFunctions.add((id) -> new PrefixQuery(new Term("field", id)));
queryFunctions.add((id) -> new WildcardQuery(new Term("field", id + "*")));
queryFunctions.add((id) -> new CustomQuery(new Term("field", id)));
queryFunctions.add((id) -> new SpanTermQuery(new Term("field", id)));
queryFunctions.add((id) -> new TermQuery(new Term("field", id)));
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.MUST);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT);
}
if (randomBoolean()) {
builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.MUST);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.SHOULD);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT);
}
if (randomBoolean()) {
builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.SHOULD);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setMinimumNumberShouldMatch(randomIntBetween(0, 4));
builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.SHOULD);
builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.SHOULD);
return builder.build();
});
queryFunctions.add((id) -> new MatchAllDocsQuery());
queryFunctions.add((id) -> new MatchNoDocsQuery("no reason at all"));
int numDocs = randomIntBetween(queryFunctions.size(), queryFunctions.size() * 3);
List<ParseContext.Document> documents = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
Query query = queryFunctions.get(i % queryFunctions.size()).apply(id);
addQuery(query, documents);
}
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
Iterable<? extends IndexableField> doc = Collections.singleton(new StringField("field", id, Field.Store.NO));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
Iterable<? extends IndexableField> doc = Collections.singleton(new StringField("field", "value", Field.Store.NO));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
// Empty percolator doc:
memoryIndex = new MemoryIndex();
duelRun(queryStore, memoryIndex, shardSearcher);
}
public void testDuelSpecificQueries() throws Exception {
List<ParseContext.Document> documents = new ArrayList<>();
CommonTermsQuery commonTermsQuery = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 128);
commonTermsQuery.add(new Term("field", "quick"));
commonTermsQuery.add(new Term("field", "brown"));
commonTermsQuery.add(new Term("field", "fox"));
addQuery(commonTermsQuery, documents);
BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery(new Term[]{new Term("field", "quick"),
new Term("field", "brown"), new Term("field", "fox")}, 1.0f);
addQuery(blendedTermQuery, documents);
SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("field", true)
.addClause(new SpanTermQuery(new Term("field", "quick")))
.addClause(new SpanTermQuery(new Term("field", "brown")))
.addClause(new SpanTermQuery(new Term("field", "fox")))
.build();
addQuery(spanNearQuery, documents);
SpanNearQuery spanNearQuery2 = new SpanNearQuery.Builder("field", true)
.addClause(new SpanTermQuery(new Term("field", "the")))
.addClause(new SpanTermQuery(new Term("field", "lazy")))
.addClause(new SpanTermQuery(new Term("field", "doc")))
.build();
SpanOrQuery spanOrQuery = new SpanOrQuery(
spanNearQuery,
spanNearQuery2
);
addQuery(spanOrQuery, documents);
SpanNotQuery spanNotQuery = new SpanNotQuery(spanNearQuery, spanNearQuery);
addQuery(spanNotQuery, documents);
long lowerLong = randomIntBetween(0, 256);
long upperLong = lowerLong + randomIntBetween(0, 32);
addQuery(LongPoint.newRangeQuery("long_field", lowerLong, upperLong), documents);
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
Document document = new Document();
document.add(new TextField("field", "the quick brown fox jumps over the lazy dog", Field.Store.NO));
long randomLong = randomIntBetween((int) lowerLong, (int) upperLong);
document.add(new LongPoint("long_field", randomLong));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException {
boolean requireScore = randomBoolean();
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query percolateQuery = fieldType.percolateQuery(queryStore, new BytesArray("{}"), percolateSearcher);
Query query = requireScore ? percolateQuery : new ConstantScoreQuery(percolateQuery);
TopDocs topDocs = shardSearcher.search(query, 10);
Query controlQuery = new ControlQuery(memoryIndex, queryStore);
controlQuery = requireScore ? controlQuery : new ConstantScoreQuery(controlQuery);
TopDocs controlTopDocs = shardSearcher.search(controlQuery, 10);
assertThat(topDocs.totalHits, equalTo(controlTopDocs.totalHits));
assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length));
for (int j = 0; j < topDocs.scoreDocs.length; j++) {
assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc));
assertThat(topDocs.scoreDocs[j].score, equalTo(controlTopDocs.scoreDocs[j].score));
if (requireScore) {
Explanation explain1 = shardSearcher.explain(query, topDocs.scoreDocs[j].doc);
Explanation explain2 = shardSearcher.explain(controlQuery, controlTopDocs.scoreDocs[j].doc);
assertThat(explain1.isMatch(), equalTo(explain2.isMatch()));
assertThat(explain1.getValue(), equalTo(explain2.getValue()));
}
}
}
private void addQuery(Query query, List<ParseContext.Document> docs) throws IOException {
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
mapperService.documentMapperParser(), documentMapper, null, null);
fieldMapper.processQuery(query, parseContext);
docs.add(parseContext.doc());
queries.add(query);
}
private static final class CustomQuery extends Query {
private final Term term;
private CustomQuery(Term term) {
this.term = term;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
return new TermQuery(term);
}
@Override
public String toString(String field) {
return "custom{" + field + "}";
}
@Override
public boolean equals(Object obj) {
return sameClassAs(obj);
}
@Override
public int hashCode() {
return classHash();
}
}
private static final class ControlQuery extends Query {
private final MemoryIndex memoryIndex;
private final PercolateQuery.QueryStore queryStore;
private ControlQuery(MemoryIndex memoryIndex, PercolateQuery.QueryStore queryStore) {
this.memoryIndex = memoryIndex;
this.queryStore = queryStore;
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) {
return new Weight(this) {
float _score;
@Override
public void extractTerms(Set<Term> terms) {}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
Scorer scorer = scorer(context);
if (scorer != null) {
int result = scorer.iterator().advance(doc);
if (result == doc) {
return Explanation.match(scorer.score(), "ControlQuery");
}
}
return Explanation.noMatch("ControlQuery");
}
@Override
public String toString() {
return "weight(" + ControlQuery.this + ")";
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc());
CheckedFunction<Integer, Query, IOException> leaf = queryStore.getQueries(context);
FilteredDocIdSetIterator memoryIndexIterator = new FilteredDocIdSetIterator(allDocs) {
@Override
protected boolean match(int doc) {
try {
Query query = leaf.apply(doc);
float score = memoryIndex.search(query);
if (score != 0f) {
if (needsScores) {
_score = score;
}
return true;
} else {
return false;
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
return new FilterScorer(new ConstantScoreScorer(this, 1f, memoryIndexIterator)) {
@Override
public float score() throws IOException {
return _score;
}
};
}
};
}
@Override
public String toString(String field) {
return "control{" + field + "}";
}
@Override
public boolean equals(Object obj) {
return sameClassAs(obj);
}
@Override
public int hashCode() {
return classHash();
}
}
}
| apache-2.0 |
apache/zest-qi4j | libraries/rest-client/src/main/java/org/apache/polygene/library/rest/client/api/HandlerCommand.java | 6730 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.library.rest.client.api;
import java.lang.reflect.ParameterizedType;
import org.apache.polygene.api.util.Classes;
import org.apache.polygene.library.rest.client.spi.ResponseHandler;
import org.apache.polygene.library.rest.client.spi.ResultHandler;
import org.apache.polygene.library.rest.common.link.Link;
import org.restlet.Response;
/**
* TODO
*/
public abstract class HandlerCommand
{
public static HandlerCommand refresh()
{
return new RefreshCommand();
}
public static HandlerCommand query(String relation)
{
return new QueryCommand( relation, null);
}
public static HandlerCommand query(String relation, Object requestObject)
{
return new QueryCommand( relation, requestObject);
}
public static HandlerCommand query(Link relation)
{
return new QueryLinkCommand( relation );
}
public static HandlerCommand command(String relation)
{
return new CommandRelationCommand( relation, null );
}
public static HandlerCommand command(String relation, Object requestObject)
{
return new CommandRelationCommand( relation, requestObject );
}
public static HandlerCommand command(Link link)
{
return new CommandLinkCommand( link, null );
}
public static HandlerCommand command(Link link, Object requestObject)
{
return new CommandLinkCommand( link, requestObject );
}
public static HandlerCommand delete()
{
return new DeleteCommand();
}
protected ResponseHandler responseHandler;
protected ResponseHandler processingErrorHandler;
public HandlerCommand onSuccess(ResponseHandler responseHandler)
{
this.responseHandler = responseHandler;
return this;
}
public <T> HandlerCommand onSuccess(final ResultHandler<T> resultHandler)
{
final Class<T> resultType = (Class<T>) Classes.RAW_CLASS.apply(( (ParameterizedType) resultHandler.getClass().getGenericInterfaces()[ 0 ] ).getActualTypeArguments()[0]);
this.responseHandler = new ResponseHandler()
{
@Override
public HandlerCommand handleResponse( Response response, ContextResourceClient client )
{
T result = client.getContextResourceClientFactory().readResponse( response, resultType );
return resultHandler.handleResult( result, client );
}
};
return this;
}
public HandlerCommand onProcessingError(ResponseHandler processingErrorHandler)
{
this.processingErrorHandler = processingErrorHandler;
return this;
}
public <T> HandlerCommand onProcessingError(final ResultHandler<T> resultHandler)
{
final Class<T> resultType = (Class<T>) Classes.RAW_CLASS.apply(( (ParameterizedType) resultHandler.getClass().getGenericInterfaces()[ 0 ] ).getActualTypeArguments()[0]);
this.processingErrorHandler = ( response, client ) ->
{
T result = client.getContextResourceClientFactory().readResponse( response, resultType );
return resultHandler.handleResult( result, client );
};
return this;
}
abstract HandlerCommand execute( ContextResourceClient client);
private static class RefreshCommand
extends HandlerCommand
{
@Override
HandlerCommand execute( ContextResourceClient client )
{
return client.refresh();
}
}
private static class QueryCommand
extends HandlerCommand
{
private String relation;
private Object requestObject;
private QueryCommand( String relation, Object requestObject)
{
this.relation = relation;
this.requestObject = requestObject;
}
@Override
HandlerCommand execute( ContextResourceClient client )
{
Link link = client.getResource().query( relation );
return client.query( link, requestObject, responseHandler, processingErrorHandler );
}
}
private static class QueryLinkCommand
extends HandlerCommand
{
private Link link;
private QueryLinkCommand( Link link)
{
this.link = link;
}
@Override
HandlerCommand execute( ContextResourceClient client )
{
return client.query( link, null, responseHandler, processingErrorHandler );
}
}
private static class CommandRelationCommand
extends HandlerCommand
{
private String relation;
private Object requestObject;
private CommandRelationCommand( String relation, Object requestObject )
{
this.relation = relation;
this.requestObject = requestObject;
}
@Override
HandlerCommand execute( ContextResourceClient client )
{
Link link = client.getResource().command( relation );
return client.command( link, requestObject, responseHandler, processingErrorHandler );
}
}
private static class CommandLinkCommand
extends HandlerCommand
{
private Link link;
private Object requestObject;
private CommandLinkCommand( Link link, Object requestObject )
{
this.link = link;
this.requestObject = requestObject;
}
@Override
HandlerCommand execute( ContextResourceClient client )
{
return client.command( link, requestObject, responseHandler, processingErrorHandler );
}
}
private static class DeleteCommand
extends HandlerCommand
{
@Override
HandlerCommand execute( ContextResourceClient client )
{
return client.delete(responseHandler, processingErrorHandler);
}
}
}
| apache-2.0 |
cushon/error-prone | core/src/test/java/com/google/errorprone/testdata/CommandLineFlagTestFile.java | 729 | /*
* Copyright 2014 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.testdata;
public class CommandLineFlagTestFile {
public void foo() {
return;
}
}
| apache-2.0 |
interledger/java-ilp-core | src/test/java/org/interledger/ilqp/QuoteByDestinationAmountResponseTest.java | 4010 | package org.interledger.ilqp;
import static junit.framework.TestCase.assertTrue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import org.junit.Test;
import java.math.BigInteger;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
/**
* Unit tests for {@link QuoteByDestinationAmountResponse}.
*/
public class QuoteByDestinationAmountResponseTest {
private static final BigInteger sourceAmount = BigInteger.TEN;
private static final Duration sourceHoldDuration = Duration.ZERO;
@Test
public void testBuild() throws Exception {
final QuoteByDestinationAmountResponse quoteResponse =
QuoteByDestinationAmountResponse.builder()
.sourceAmount(sourceAmount)
.sourceHoldDuration(sourceHoldDuration).build();
assertThat(quoteResponse.getSourceAmount(), is(sourceAmount));
assertThat(quoteResponse.getSourceHoldDuration(), is(sourceHoldDuration));
}
@Test
public void testZeroAmount() throws Exception {
final QuoteByDestinationAmountResponse quoteRequest =
QuoteByDestinationAmountResponse.builder()
.sourceAmount(BigInteger.ZERO)
.sourceHoldDuration(sourceHoldDuration).build();
assertThat(quoteRequest.getSourceAmount(), is(BigInteger.ZERO));
assertThat(quoteRequest.getSourceHoldDuration(), is(sourceHoldDuration));
}
@Test(expected = IllegalArgumentException.class)
public void testNegativeAmount() throws Exception {
try {
QuoteByDestinationAmountResponse.builder()
.sourceAmount(BigInteger.valueOf(-11L))
.sourceHoldDuration(sourceHoldDuration).build();
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), is("destinationAmount must be at least 0!"));
throw e;
}
}
@Test
public void testBuildWithNullValues() throws Exception {
try {
QuoteByDestinationAmountResponse.builder().build();
fail();
} catch (NullPointerException e) {
assertThat(e.getMessage(), is("sourceAmount must not be null!"));
}
try {
QuoteByDestinationAmountResponse.builder()
.sourceAmount(sourceAmount)
.build();
fail();
} catch (NullPointerException e) {
assertThat(e.getMessage(), is("sourceHoldDuration must not be null!"));
}
}
@Test
public void testEqualsHashCode() throws Exception {
final QuoteByDestinationAmountResponse quoteResponse1 =
QuoteByDestinationAmountResponse.builder()
.sourceAmount(sourceAmount)
.sourceHoldDuration(sourceHoldDuration)
.build();
final QuoteByDestinationAmountResponse quoteResponse2 =
QuoteByDestinationAmountResponse.builder()
.sourceAmount(sourceAmount)
.sourceHoldDuration(sourceHoldDuration)
.build();
assertTrue(quoteResponse1.equals(quoteResponse2));
assertTrue(quoteResponse2.equals(quoteResponse1));
assertTrue(quoteResponse1.hashCode() == quoteResponse2.hashCode());
{
final QuoteByDestinationAmountResponse quoteResponse3 = QuoteByDestinationAmountResponse
.builder()
.sourceAmount(sourceAmount)
.sourceHoldDuration(Duration.of(1L, ChronoUnit.SECONDS))
.build();
assertFalse(quoteResponse1.equals(quoteResponse3));
assertFalse(quoteResponse3.equals(quoteResponse1));
assertFalse(quoteResponse1.hashCode() == quoteResponse3.hashCode());
}
{
final QuoteByDestinationAmountResponse quoteResponse4 = QuoteByDestinationAmountResponse
.builder()
.sourceAmount(BigInteger.ONE)
.sourceHoldDuration(sourceHoldDuration)
.build();
assertFalse(quoteResponse1.equals(quoteResponse4));
assertFalse(quoteResponse4.equals(quoteResponse1));
assertFalse(quoteResponse1.hashCode() == quoteResponse4.hashCode());
}
}
} | apache-2.0 |
jelacote/product-mdm | modules/mobile-agents/windows/jax-rs/src/main/java/org/wso2/carbon/mdm/mobileservices/windows/common/exceptions/CertificateGenerationException.java | 1594 | /*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.mdm.mobileservices.windows.common.exceptions;
/**
* Exception class for Certificate generation failures in WSTEP stage.
*/
public class CertificateGenerationException extends Exception {
private String errorMessage;
public CertificateGenerationException(String message) {
super(message);
setErrorMessage(message);
}
public CertificateGenerationException(String message, Throwable cause) {
super(message, cause);
setErrorMessage(message);
}
public CertificateGenerationException(String message, Exception nestedEx) {
super(message, nestedEx);
setErrorMessage(message);
}
public CertificateGenerationException(Throwable cause) {
super(cause);
}
public CertificateGenerationException() {
super();
}
public String getErrorMessage() {
return errorMessage;
}
public void setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
}
}
| apache-2.0 |
mpgerstl/tEFMA | ch/javasoft/math/operator/compose/NullaryQuaternaryOperator.java | 3924 | /*
* =============================================================================
* Simplified BSD License, see http://www.opensource.org/licenses/
* -----------------------------------------------------------------------------
* Copyright (c) 2008-2009, Marco Terzer, Zurich, Switzerland
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Swiss Federal Institute of Technology Zurich
* nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* =============================================================================
*/
package ch.javasoft.math.operator.compose;
import ch.javasoft.math.array.ArrayOperations;
import ch.javasoft.math.operator.AbstractQuaternaryOperator;
import ch.javasoft.math.operator.BinaryOperator;
import ch.javasoft.math.operator.NullaryOperator;
import ch.javasoft.math.operator.QuaternaryOperator;
/**
* The <code>NullaryQuaternaryOperator</code> applies a {@link BinaryOperator}
* ⊕ to the result of a nested {@link NullaryOperator} and
* {@link QuaternaryOperator} instance. It is thus itself a quaternary operator:
* <pre>
* this.operate(x1, x2, x3, x4) = ( operand1.operate() ⊕ operand2.operate(x1, x2, x3, x4) )
* </pre>
*
* @type T result type of the operation
* @type A array type of result
*/
public class NullaryQuaternaryOperator<T extends Number, A> extends AbstractQuaternaryOperator<T, A> {
private final BinaryOperator<T, A> operator;
private final NullaryOperator<T, A> operand1;
private final QuaternaryOperator<T, A> operand2;
private final TempArray<A> temp;
public NullaryQuaternaryOperator(ArrayOperations<A> ops, BinaryOperator<T, A> operator, NullaryOperator<T, A> operand1, QuaternaryOperator<T, A> operand2) {
this.operator = operator;
this.operand1 = operand1;
this.operand2 = operand2;
this.temp = new TempArray<A>(ops, 2);
}
public T operate(T operand1, T operand2, T operand3, T operand4) {
final T operated1 = this.operand1.operate();
final T operated2 = this.operand2.operate(operand1, operand2, operand3, operand4);
return operator.operate(operated1, operated2);
}
public void operate(A operand1, int index1, A operand2, int index2, A operand3, int index3, A operand4, int index4, A dst, int dstIndex) {
final A tmp = temp.get();
this.operand1.operate(tmp, 0);
this.operand2.operate(operand1, index1, operand2, index2, operand3, index3, operand4, index4, tmp, 1);
this.operator.operate(tmp, 0, tmp, 1, dst, dstIndex);
}
}
| bsd-2-clause |
javachengwc/jodd | jodd-bean/src/main/java/jodd/typeconverter/impl/DoubleArrayConverter.java | 6359 | // Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.typeconverter.impl;
import jodd.typeconverter.TypeConverter;
import jodd.typeconverter.TypeConverterManagerBean;
import jodd.util.StringUtil;
import jodd.util.collection.DoubleArrayList;
import java.util.Collection;
import java.util.List;
/**
* Converts given object to <code>double[]</code>.
*/
public class DoubleArrayConverter implements TypeConverter<double[]> {
protected final TypeConverterManagerBean typeConverterManagerBean;
public DoubleArrayConverter(TypeConverterManagerBean typeConverterManagerBean) {
this.typeConverterManagerBean = typeConverterManagerBean;
}
public double[] convert(Object value) {
if (value == null) {
return null;
}
Class valueClass = value.getClass();
if (valueClass.isArray() == false) {
// source is not an array
return convertValueToArray(value);
}
// source is an array
return convertArrayToArray(value);
}
/**
* Converts type using type converter manager.
*/
protected double convertType(Object value) {
return typeConverterManagerBean.convertType(value, double.class).doubleValue();
}
/**
* Creates an array with single element.
*/
protected double[] convertToSingleElementArray(Object value) {
return new double[] {convertType(value)};
}
/**
* Converts non-array value to array. Detects various
* collection types and iterates them to make conversion
* and to create target array.
*/
protected double[] convertValueToArray(Object value) {
if (value instanceof List) {
List list = (List) value;
double[] target = new double[list.size()];
for (int i = 0; i < list.size(); i++) {
Object element = list.get(i);
target[i] = convertType(element);
}
return target;
}
if (value instanceof Collection) {
Collection collection = (Collection) value;
double[] target = new double[collection.size()];
int i = 0;
for (Object element : collection) {
target[i] = convertType(element);
i++;
}
return target;
}
if (value instanceof Iterable) {
Iterable iterable = (Iterable) value;
DoubleArrayList doubleArrayList = new DoubleArrayList();
for (Object element : iterable) {
double convertedValue = convertType(element);
doubleArrayList.add(convertedValue);
}
return doubleArrayList.toArray();
}
if (value instanceof CharSequence) {
String[] strings = StringUtil.splitc(value.toString(), ArrayConverter.NUMBER_DELIMITERS);
return convertArrayToArray(strings);
}
// everything else:
return convertToSingleElementArray(value);
}
/**
* Converts array value to array.
*/
protected double[] convertArrayToArray(Object value) {
Class valueComponentType = value.getClass().getComponentType();
if (valueComponentType == double.class) {
// equal types, no conversion needed
return (double[]) value;
}
double[] result;
if (valueComponentType.isPrimitive()) {
// convert primitive array to target array
result = convertPrimitiveArrayToArray(value, valueComponentType);
} else {
// convert object array to target array
Object[] array = (Object[]) value;
result = new double[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = convertType(array[i]);
}
}
return result;
}
/**
* Converts primitive array to target array.
*/
protected double[] convertPrimitiveArrayToArray(Object value, Class primitiveComponentType) {
double[] result = null;
if (primitiveComponentType == double[].class) {
return (double[]) value;
}
if (primitiveComponentType == int.class) {
int[] array = (int[]) value;
result = new double[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == long.class) {
long[] array = (long[]) value;
result = new double[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == float.class) {
float[] array = (float[]) value;
result = new double[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == short.class) {
short[] array = (short[]) value;
result = new double[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == byte.class) {
byte[] array = (byte[]) value;
result = new double[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == char.class) {
char[] array = (char[]) value;
result = new double[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == boolean.class) {
boolean[] array = (boolean[]) value;
result = new double[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i] ? 1 : 0;
}
}
return result;
}
} | bsd-2-clause |
mortenoh/dhis2-core | dhis-2/dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/sms/config/SmsGateway.java | 2101 | package org.hisp.dhis.sms.config;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.List;
import java.util.Set;
import org.hisp.dhis.sms.MessageResponseStatus;
import org.hisp.dhis.sms.outbound.MessageBatch;
/**
* @author Zubair <rajazubair.asghar@gmail.com>
*/
public interface SmsGateway
{
List<MessageResponseStatus> sendBatch( MessageBatch batch, SmsGatewayConfig gatewayConfig );
boolean accept( SmsGatewayConfig gatewayConfig );
MessageResponseStatus send( String subject, String text, Set<String> recipients, SmsGatewayConfig gatewayConfig );
}
| bsd-3-clause |
dhimmel/owltools | OWLTools-Solr/src/main/java/owltools/yaml/golrconfig/SolrSchemaXMLWriter.java | 9582 | package owltools.yaml.golrconfig;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import owltools.gaf.io.AbstractXmlWriter;
public class SolrSchemaXMLWriter extends AbstractXmlWriter {
//private static Logger LOG = Logger.getLogger(ConfigManager.class);
private ConfigManager config = null;
public SolrSchemaXMLWriter(ConfigManager aconfig) {
super(" "); // like emacs nXML
config = aconfig;
}
/**
* Automatically add fields to the schema depending on qualities in the current GOlrField.
*
* @param field GOlrField
* @param xml
* @throws XMLStreamException
*/
private void generateAutomaticFields(GOlrField field, XMLStreamWriter xml) throws XMLStreamException{
// Detect whether we need to automatically add _*_map mapping information.
// Work on either "list" or "closure".
Pattern pcl = Pattern.compile("(.*)_closure_label$");
Pattern pll = Pattern.compile("(.*)_list_label$");
Matcher clmatch = pcl.matcher(field.id);
Matcher llmatch = pll.matcher(field.id);
// See if it's one of the above.
String baseName = null;
String mtype = null;
if( clmatch.matches() ){
baseName = clmatch.group(1);
mtype = "_closure_map";
}else if( llmatch.matches() ){
baseName = llmatch.group(1);
mtype = "_list_map";
}
if( mtype != null ){
// NOTE: See comments below.
xml.writeComment(" Automatically created to capture mapping information ");
xml.writeComment(" between " + baseName + "_(list|closure) and " + field.id + ".");
xml.writeComment(" It is not indexed for searching (JSON blob), but may be useful to the client. ");
xml.writeStartElement("field"); // <field>
xml.writeAttribute("name", baseName + mtype);
xml.writeAttribute("type", "string");
xml.writeAttribute("required", "false");
xml.writeAttribute("multiValued", "false");
xml.writeAttribute("indexed", "false");
xml.writeAttribute("stored", "true");
xml.writeEndElement(); // </field>
}
}
/**
* Just dump out the fields of our various lists.
*
* @param config
* @param xml
* @throws XMLStreamException
*/
//private void outFields(List<? extends GOlrCoreField> fieldList, XMLStreamWriter xml) throws XMLStreamException{
private void outFields(ConfigManager config, XMLStreamWriter xml) throws XMLStreamException{
ArrayList<GOlrField> fieldList = config.getFields();
for( GOlrField field : fieldList ){
// Output any comments we found as a bunch at the top;
// this should help clarify things when fields are overloaded.
ArrayList<String> comments = config.getFieldComments(field.id);
for( String comment : comments ){
xml.writeComment(comment);
}
// Gather things up first.
String f_id = field.id;
String f_type = field.type;
// ID is the only required field.
String f_required = "false";
if( field.id.equals("id") ){
f_required = "true";
}
// Cardinality maps to multivalued.
String f_multi = "true";
if( field.cardinality.equals("single") ){
f_multi = "false";
}
String f_indexed = field.indexed;
// Write out the "main" field declaration.
xml.writeStartElement("field");
// The main variants.
xml.writeAttribute("name", f_id);
xml.writeAttribute("type", f_type);
xml.writeAttribute("required", f_required);
xml.writeAttribute("multiValued", f_multi);
xml.writeAttribute("indexed", f_indexed);
// Invariants: we'll always store.
xml.writeAttribute("stored", "true");
// Done.
xml.writeEndElement(); // </field>
// If searchable is true, create an additional field that mirrors
// the main one, but using the tokenizer (needed for edismax, etc.).
String f_searchable = field.searchable;
//LOG.info("field.searchable: " + f_searchable);
if( f_searchable.equals("true") ){
//String munged_id = f_id + config.getSearchableExtension();
String munged_id = f_id + "_searchable";
xml.writeComment("An easily searchable (TextField tokenized) version of " + f_id + ".");
xml.writeStartElement("field");
// The main variants.
xml.writeAttribute("name", munged_id);
xml.writeAttribute("type", "text_searchable");
xml.writeAttribute("required", f_required);
xml.writeAttribute("multiValued", f_multi);
// Invariants: we'll always store and index.
xml.writeAttribute("indexed", "true");
xml.writeAttribute("stored", "true");
// Done.
xml.writeEndElement(); // </field>
// Also, add the field copy soe we don't have to worry about manually loading it.
xml.writeStartElement("copyField");
// <copyField source="body" dest="teaser" maxChars="300"/>
xml.writeAttribute("source", f_id);
xml.writeAttribute("dest", munged_id);
xml.writeEndElement(); // </copyField>
}
// Add any automatically generated fields if necessary.
generateAutomaticFields(field, xml);
}
}
/**
* Dump the necessary Solr schema as a String.
*
* @return schema
* @throws XMLStreamException
*/
public String schema() throws XMLStreamException{
//OutputStream outputStream = System.out;
OutputStream outputStream = new ByteArrayOutputStream();
XMLStreamWriter xml = this.createWriter(outputStream);
xml.writeStartDocument();
///
/// Opening cruft.
///
xml.writeStartElement("schema");
xml.writeAttribute("name", "golr");
xml.writeAttribute("version", "3.6");
xml.writeStartElement("types");
// NOTE: See comments below.
xml.writeComment("Unsplit string for when text needs to be dealt with atomically.");
xml.writeComment("For example, faceted querying.");
xml.writeStartElement("fieldType");
xml.writeAttribute("name", "string");
xml.writeAttribute("class", "solr.StrField");
xml.writeAttribute("sortMissingLast", "true");
xml.writeEndElement(); // </fieldType>
// NOTE: See comments below.
xml.writeComment("Any string with spaces that needs to be treated for searching purposes.");
xml.writeComment("This will be automatically used in cases where \"searchable: true\" has been");
xml.writeComment("specified in the YAML.");
xml.writeStartElement("fieldType");
xml.writeAttribute("name", "text_searchable");
xml.writeAttribute("class", "solr.TextField");
xml.writeAttribute("positionIncrementGap", "100");
xml.writeAttribute("sortMissingLast", "true");
xml.writeStartElement("analyzer");
xml.writeStartElement("tokenizer");
xml.writeAttribute("class", "solr.StandardTokenizerFactory");
xml.writeEndElement(); // </tokenizer>
xml.writeStartElement("filter");
xml.writeAttribute("class", "solr.LowerCaseFilterFactory");
xml.writeEndElement(); // </filter>
xml.writeEndElement(); // </analyzer>
xml.writeEndElement(); // </fieldType>
// Integer.
xml.writeStartElement("fieldType");
xml.writeAttribute("name", "integer");
xml.writeAttribute("class", "solr.TrieIntField");
xml.writeAttribute("precisionStep", "0");
xml.writeAttribute("positionIncrementGap", "0");
xml.writeAttribute("sortMissingLast", "true");
xml.writeEndElement(); // </fieldType>
// True boolean.
xml.writeStartElement("fieldType");
xml.writeAttribute("name", "boolean");
xml.writeAttribute("class", "solr.BoolField");
xml.writeAttribute("sortMissingLast", "true");
xml.writeEndElement(); // </fieldType>
xml.writeEndElement(); // </types>
///
/// Fields
///
xml.writeStartElement("fields");
//xml.writeDefaultNamespace("http://www.w3.org/1999/xhtml");
// Instructions.
//xml.writeComment("START");
//xml.writeComment(" Add this and below to your schema.xml file as your schema and restart Jetty. ");
//xml.writeComment(" After this schema has been applied for the given config file, purge the index and rerun the loader (with said config file). ");
// Write out the special required "document_category" field declaration.
xml.writeComment(" A special static/fixed (by YAML conf file) field all documents have. ");
xml.writeStartElement("field"); // <field>
xml.writeAttribute("name", "document_category");
xml.writeAttribute("type", "string");
xml.writeAttribute("required", "false");
xml.writeAttribute("multiValued", "false");
xml.writeAttribute("indexed", "true");
xml.writeAttribute("stored", "true");
xml.writeEndElement(); // </field>
// // Single fixed fields--the same every time.
// outFields(config.getFixedFields(), xml);
//
// // Dynamic fields.
// outFields(config.getDynamicFields(), xml);
// Dynamic fields.
//outFields(config.getFields(), xml);
outFields(config, xml);
xml.writeEndElement(); // </fields>
///
/// Closing cruft.
///
xml.writeStartElement("uniqueKey");
xml.writeCharacters("id");
xml.writeEndElement(); // </uniqueKey>
// These are now declared in the search string:
// defaultSearchOperator defaults to OR and can be changed with "q.op".
// defaultSearchField is deprecated for using "df" in request handler.
//xml.writeStartElement("defaultSearchField");
//xml.writeCharacters("label");
//xml.writeEndElement(); // </defaultSearchField>
//xml.writeStartElement("solrQueryParser");
//xml.writeAttribute("defaultOperator", "OR");
//xml.writeEndElement(); // </solrQueryParser>
// Special STOP and wrap up.
//xml.writeComment("STOP");
xml.writeEndElement(); // </schema>
xml.writeEndDocument();
xml.close();
return outputStream.toString();
}
}
| bsd-3-clause |
hzhao/galago-git | tupleflow/src/main/java/org/lemurproject/galago/tupleflow/Order.java | 2978 | // BSD License (http://lemurproject.org)
package org.lemurproject.galago.tupleflow;
import java.util.Collection;
import java.util.Comparator;
/**
* An Order is a class that represents an ordering of a Galago Type. You won't usually
* implement this interface directly; instead, let Galago make the class for you
* with the TemplateTypeBuilder/TypeBuilderMojo tools.
*
* @author trevor
* @param <T> The ordered class.
*/
public interface Order<T> {
/// Returns the class ordered by this Order.
public Class<T> getOrderedClass();
/**
* Returns a string representation of the fields ordered by this class. For example:
* <pre>{ "+document", "-score" }</pre>
* means that this order orders first by the document number in ascending order, but
* breaks ties by the score in descending order.
*/
public String[] getOrderSpec();
/**
* Returns a comparator that applies this order to objects of type T.
* For example, if <pre>getOrderSpec() == { "+document" }</pre> and
* a.document = 5 and b.document = 7, then:
* <pre>lessThan().compare(a, b) < 0</pre>.
*/
public Comparator<T> lessThan();
/**
* lessThan().compare(a,b) = greaterThan().compare(b,a);
*/
public Comparator<T> greaterThan();
/**
* This is a hash function over an object that only uses ordered fields.
* For create, if the order is <pre>{ "+document", "-score" }</pre>, this
* hash function incorporates data from the document and score fields, but
* no other fields.
*
* @param object
* @return
*/
public int hash(T object);
/**
* Produces an OrderedWriter object that can write objects of class T in this
* order. This object assumes that its input is already correctly ordered.
* The OrderedWriter uses the ordering property to write the data in
* compressed form.
*
* @param output
* @return
*/
public Processor<T> orderedWriter(ArrayOutput output);
/**
* Produces an OrderedReader object. This object can read objects that were
* written with an OrderedWriter object produced by the Order.orderedWriter method.
*
* @param input
* @return
*/
public TypeReader<T> orderedReader(ArrayInput input);
/**
* Produces an OrderedReader object. This is just like the previous orderedReader
* method, except you can explicitly set a buffer size.
*
* @param input
* @param bufferSize
* @return
*/
public TypeReader<T> orderedReader(ArrayInput input, int bufferSize);
/**
* Produces an OrderedCombiner object. An ordered combiner merges objects
* from many OrderedReaders into a single ordered stream of objects.
*
* @param readers
* @param closeOnExit
* @return
*/
public ReaderSource<T> orderedCombiner(Collection<TypeReader<T>> readers, boolean closeOnExit);
}
| bsd-3-clause |
broadinstitute/hellbender | src/main/java/org/broadinstitute/hellbender/tools/copynumber/arguments/CopyNumberStandardArgument.java | 977 | package org.broadinstitute.hellbender.tools.copynumber.arguments;
public final class CopyNumberStandardArgument {
public static final String ANNOTATED_INTERVALS_FILE_LONG_NAME = "annotated-intervals";
public static final String COUNT_PANEL_OF_NORMALS_FILE_LONG_NAME = "count-panel-of-normals";
public static final String NUMBER_OF_EIGENSAMPLES_LONG_NAME = "number-of-eigensamples";
public static final String STANDARDIZED_COPY_RATIOS_FILE_LONG_NAME = "standardized-copy-ratios";
public static final String DENOISED_COPY_RATIOS_FILE_LONG_NAME = "denoised-copy-ratios";
public static final String ALLELIC_COUNTS_FILE_LONG_NAME = "allelic-counts";
public static final String NORMAL_ALLELIC_COUNTS_FILE_LONG_NAME = "normal-allelic-counts";
public static final String SEGMENTS_FILE_LONG_NAME = "segments";
public static final String OUTPUT_PREFIX_LONG_NAME = "output-prefix";
public static final String MODEL_LONG_NAME = "model";
}
| bsd-3-clause |
dhis2/dhis2-core | dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/webmessage/WebMessageException.java | 1931 | /*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.dxf2.webmessage;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
public class WebMessageException extends Exception
{
private WebMessage webMessage;
public WebMessageException( WebMessage webMessage )
{
this.webMessage = webMessage;
}
public WebMessage getWebMessage()
{
return webMessage;
}
}
| bsd-3-clause |
ric2b/Vivaldi-browser | chromium/chrome/browser/ui/android/default_browser_promo/java/src/org/chromium/chrome/browser/ui/default_browser_promo/DefaultBrowserPromoUtilsTest.java | 10718 | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.ui.default_browser_promo;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
import android.content.pm.ActivityInfo;
import android.content.pm.ResolveInfo;
import android.os.Build;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.Shadows;
import org.robolectric.annotation.Config;
import org.robolectric.shadows.ShadowPackageManager;
import org.chromium.base.ContextUtils;
import org.chromium.base.PackageManagerUtils;
import org.chromium.base.test.BaseRobolectricTestRunner;
import org.chromium.chrome.browser.ui.default_browser_promo.DefaultBrowserPromoUtils.DefaultBrowserState;
import java.util.ArrayList;
import java.util.List;
/**
* Unit test for {@link DefaultBrowserPromoUtils}.
*/
@RunWith(BaseRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class DefaultBrowserPromoUtilsTest {
@Mock
DefaultBrowserPromoDeps mDeps;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
}
@Test
public void testGetCurrentDefaultStateForNoDefault() {
Assert.assertEquals("Should be no default when resolve info matches no browser.",
DefaultBrowserState.NO_DEFAULT,
DefaultBrowserPromoDeps.getInstance().getCurrentDefaultBrowserState(
createResolveInfo("android", 0)));
}
@Test
public void testGetCurrentDefaultStateForOtherDefault() {
Assert.assertEquals("Should be other default when resolve info matches another browser.",
DefaultBrowserPromoUtils.DefaultBrowserState.OTHER_DEFAULT,
DefaultBrowserPromoDeps.getInstance().getCurrentDefaultBrowserState(
createResolveInfo("android", 1)));
}
@Test
public void testGetCurrentDefaultStateForChromeDefault() {
Assert.assertEquals(
"Should be chrome default when resolve info matches current package name.",
DefaultBrowserPromoUtils.DefaultBrowserState.CHROME_DEFAULT,
DefaultBrowserPromoDeps.getInstance().getCurrentDefaultBrowserState(
createResolveInfo(
ContextUtils.getApplicationContext().getPackageName(), 1)));
}
@Test
public void testIsChromePreStableInstalled() {
List<ResolveInfo> infoList = new ArrayList<>();
ShadowPackageManager packageManager =
Shadows.shadowOf(RuntimeEnvironment.application.getPackageManager());
// Setting android_manifest in the junit_binary build rule causes the current package to
// appear in the PackageManager.
packageManager.deletePackage(RuntimeEnvironment.application.getPackageName());
DefaultBrowserPromoDeps deps = DefaultBrowserPromoDeps.getInstance();
infoList.add(createResolveInfo(DefaultBrowserPromoDeps.CHROME_STABLE_PACKAGE_NAME, 1));
packageManager.addResolveInfoForIntent(PackageManagerUtils.BROWSER_INTENT, infoList);
Assert.assertFalse("Chrome stable should not be counted as a pre-stable channel",
deps.isChromePreStableInstalled());
infoList.add(createResolveInfo("com.android.chrome.123", 1));
packageManager.addResolveInfoForIntent(PackageManagerUtils.BROWSER_INTENT, infoList);
Assert.assertFalse("A random package should not be counted as a pre-stable channel",
deps.isChromePreStableInstalled());
for (String name : DefaultBrowserPromoDeps.CHROME_PACKAGE_NAMES) {
if (name.equals(DefaultBrowserPromoDeps.CHROME_STABLE_PACKAGE_NAME)) continue;
List<ResolveInfo> list = new ArrayList<>(infoList);
list.add(createResolveInfo(name, 1));
packageManager.addResolveInfoForIntent(PackageManagerUtils.BROWSER_INTENT, list);
Assert.assertTrue(name + " should be considered as a pre-stable channel",
deps.isChromePreStableInstalled());
}
}
@Test
public void testIsCurrentDefaultBrowserChrome() {
DefaultBrowserPromoDeps deps = DefaultBrowserPromoDeps.getInstance();
for (String name : DefaultBrowserPromoDeps.CHROME_PACKAGE_NAMES) {
Assert.assertTrue(name + " should be considered as a chrome channel",
deps.isCurrentDefaultBrowserChrome(createResolveInfo(name, 1)));
}
Assert.assertFalse("A random string should not be considered as a chrome channel",
deps.isCurrentDefaultBrowserChrome(
createResolveInfo("com.android.chrome.random.string", 1)));
}
@Test
public void testBasicPromo() {
setDepsMockWithDefaultValues();
Assert.assertTrue("Should promo disambiguation sheet on Q.",
DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
// --- Q above ---
@Test
public void testPromo_Q_No_Default() {
setDepsMockWithDefaultValues();
when(mDeps.isRoleAvailable(any())).thenReturn(true);
Assert.assertTrue("Should promo role manager when there is no default browser on Q+.",
DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
@Test
public void testPromo_Q_Other_Default() {
setDepsMockWithDefaultValues();
when(mDeps.isRoleAvailable(any())).thenReturn(true);
when(mDeps.getDefaultWebBrowserActivityResolveInfo())
.thenReturn(createResolveInfo("android", 1));
Assert.assertTrue("Should promo role manager when there is another default browser on Q+.",
DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
// --- P below ---
@Test
public void testNoPromo_P() {
setDepsMockWithDefaultValues();
when(mDeps.getSDKInt()).thenReturn(Build.VERSION_CODES.P);
when(mDeps.isRoleAvailable(any())).thenCallRealMethod();
Assert.assertFalse(
"Should not promo on P-.", DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
// --- prerequisites ---
@Test
public void testPromo_increasedPromoCount() {
setDepsMockWithDefaultValues();
when(mDeps.getMaxPromoCount()).thenReturn(100);
when(mDeps.getPromoCount()).thenReturn(99);
Assert.assertTrue("Should promo when promo count does not reach the upper limit.",
DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
@Test
public void testNoPromo_greaterThanMaxPromoCount() {
setDepsMockWithDefaultValues();
when(mDeps.getPromoCount()).thenReturn(1);
when(mDeps.getMaxPromoCount()).thenReturn(1);
Assert.assertFalse("Should not promo when promo count reaches the upper limit.",
DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
@Test
public void testNoPromo_featureDisabled() {
setDepsMockWithDefaultValues();
when(mDeps.isFeatureEnabled()).thenReturn(false);
Assert.assertFalse("Should not promo when the feature is disabled.",
DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
@Test
public void testNoPromo_lessThanMinSessionCount() {
setDepsMockWithDefaultValues();
when(mDeps.getSessionCount()).thenReturn(1);
when(mDeps.getMinSessionCount()).thenReturn(3);
Assert.assertFalse(
"Should not promo when session count has not reached the required amount.",
DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
@Test
public void testNoPromo_isOtherChromeDefault() {
setDepsMockWithDefaultValues();
when(mDeps.getDefaultWebBrowserActivityResolveInfo())
.thenReturn(
createResolveInfo(DefaultBrowserPromoDeps.CHROME_STABLE_PACKAGE_NAME, 1));
when(mDeps.isCurrentDefaultBrowserChrome(any())).thenCallRealMethod();
Assert.assertFalse("Should not promo when another chrome channel browser has been default.",
DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
@Test
public void testNoPromo_isCurrentChromeDefault() {
setDepsMockWithDefaultValues();
when(mDeps.getDefaultWebBrowserActivityResolveInfo())
.thenReturn(createResolveInfo(
ContextUtils.getApplicationContext().getPackageName(), 1));
Assert.assertFalse("Should not promo when chrome has been default.",
DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
@Test
public void testNoPromo_webBrowserActivityNotExist() {
setDepsMockWithDefaultValues();
when(mDeps.getDefaultWebBrowserActivityResolveInfo()).thenReturn(null);
Assert.assertFalse("Should not promo when web browser activity does not exist.",
DefaultBrowserPromoUtils.shouldShowPromo(mDeps, null));
}
private void setDepsMockWithDefaultValues() {
when(mDeps.isFeatureEnabled()).thenReturn(true);
when(mDeps.getMinSessionCount()).thenReturn(3);
when(mDeps.getSessionCount()).thenReturn(10);
when(mDeps.doesManageDefaultAppsSettingsActivityExist()).thenReturn(true);
when(mDeps.getSDKInt()).thenReturn(Build.VERSION_CODES.Q);
when(mDeps.isChromeStable()).thenReturn(false);
when(mDeps.getPromoCount()).thenReturn(0);
when(mDeps.getMaxPromoCount()).thenReturn(1);
when(mDeps.getLastPromoInterval()).thenReturn(1000);
when(mDeps.getMinPromoInterval()).thenReturn(10);
when(mDeps.isChromePreStableInstalled()).thenReturn(false);
when(mDeps.isCurrentDefaultBrowserChrome(any())).thenReturn(false);
when(mDeps.isRoleAvailable(any())).thenReturn(true);
// No Default
when(mDeps.getDefaultWebBrowserActivityResolveInfo())
.thenReturn(createResolveInfo("android", 0));
when(mDeps.getCurrentDefaultBrowserState(any())).thenCallRealMethod();
}
private ResolveInfo createResolveInfo(String packageName, int match) {
ResolveInfo resolveInfo = new ResolveInfo();
ActivityInfo activityInfo = new ActivityInfo();
activityInfo.packageName = packageName;
resolveInfo.activityInfo = activityInfo;
resolveInfo.match = match;
return resolveInfo;
}
}
| bsd-3-clause |
jdgarrett/geogig | src/core/src/main/java/org/locationtech/geogig/storage/datastream/v2_3/FloatPackedCoordinateSequence.java | 3554 | /* Copyright (c) 2017 Boundless and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/org/documents/edl-v10.html
*
* Contributors:
* David Blasby (Boundless) - initial implementation
*/
package org.locationtech.geogig.storage.datastream.v2_3;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.CoordinateSequence;
import com.vividsolutions.jts.geom.impl.PackedCoordinateSequence.Float;
import java.util.List;
/**
* Seralized form is a int[][]
* serializedForm[0] -- x coords
* serializedForm[1] -- y coords
*
* serializedForm[0][0] - int representation of first X (Float.intBitsToFloat)
* serializedForm[0][1] - 2nd X. Float.intBitsToFloat(serializedForm[0][0] + serializedForm[0][1])
*
* The ordinate list is a delta list on the Int32 form of the Float ordinate.
* This allows for exact representation as well as good VarInt encoding.
*/
public class FloatPackedCoordinateSequence extends Float {
public static final CoordinateSequence EMPTY_2D = new FloatPackedCoordinateSequence(2, 0);
public FloatPackedCoordinateSequence(final int dimensions, List<Coordinate> coords) {
super(coords.toArray(new Coordinate[coords.size()]), dimensions);
}
public FloatPackedCoordinateSequence(final int dimensions, final int initialSize) {
super(initialSize,dimensions);
}
public FloatPackedCoordinateSequence(int[][] serializedForm) {
super(deserializeCoords(serializedForm),serializedForm.length);
}
public int[][] toSerializedForm() {
int dims = this.getDimension();
boolean hasZ = dims >2;
int nCoords = size();
int[] Xs = new int[nCoords];
int[] Ys = new int[nCoords];
int[] Zs = null;
if (hasZ)
Zs = new int[nCoords];
int X=0;
int Y=0;
int Z=0;
float[] allOrdinates = getRawCoordinates();
for (int t=0;t<nCoords;t++) {
int currentX = java.lang.Float.floatToRawIntBits(allOrdinates[t*dims] );
int currentY = java.lang.Float.floatToRawIntBits(allOrdinates[t*dims +1] );
Xs[t] = currentX - X;
Ys[t] = currentY - Y;
X = currentX;
Y = currentY;
if (hasZ) {
int currentZ = java.lang.Float.floatToRawIntBits(allOrdinates[t*dims +1] );
Zs[t] = currentZ - Z;
Z = currentZ;
}
}
if (hasZ) {
return new int[][] {Xs,Ys,Zs};
}
return new int[][] {Xs,Ys};
}
private static float[] deserializeCoords(int[][] serializedForm) {
int nCoords = serializedForm[0].length;
int dims = serializedForm.length;
boolean hasZ = dims >2;
float[] result = new float[nCoords*2];
if (nCoords ==0)
return result; // empty
int X =0;
int Y=0;
int Z=0;
for (int t=0;t<nCoords;t++) {
X += serializedForm[0][t];
Y += serializedForm[1][t];
result[t*dims] = java.lang.Float.intBitsToFloat(X);
result[t*dims+1] = java.lang.Float.intBitsToFloat(Y);
if (hasZ) {
Z += serializedForm[2][t];
result[t*dims+2] = java.lang.Float.intBitsToFloat(Y);
}
}
return result;
}
}
| bsd-3-clause |
codeaudit/Foundry | Components/LearningCore/Test/gov/sandia/cognition/statistics/bayesian/BayesianUtilTest.java | 9863 | /*
* File: BayesianUtilTest.java
* Authors: Kevin R. Dixon
* Company: Sandia National Laboratories
* Project: Cognitive Foundry
*
* Copyright Apr 7, 2010, Sandia Corporation.
* Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
* license for use of this work by or on behalf of the U.S. Government.
* Export of this program may require a license from the United States
* Government. See CopyrightHistory.txt for complete details.
*
*/
package gov.sandia.cognition.statistics.bayesian;
import gov.sandia.cognition.statistics.bayesian.conjugate.UnivariateGaussianMeanBayesianEstimator;
import gov.sandia.cognition.statistics.bayesian.conjugate.UnivariateGaussianMeanVarianceBayesianEstimator;
import gov.sandia.cognition.statistics.distribution.NormalInverseGammaDistribution;
import gov.sandia.cognition.statistics.distribution.StudentTDistribution;
import gov.sandia.cognition.statistics.distribution.UnivariateGaussian;
import gov.sandia.cognition.statistics.method.ConfidenceInterval;
import gov.sandia.cognition.statistics.method.KolmogorovSmirnovConfidence;
import gov.sandia.cognition.statistics.method.StudentTConfidence;
import java.util.ArrayList;
import junit.framework.TestCase;
import java.util.Random;
/**
* Unit tests for BayesianUtilTest.
*
* @author krdixon
*/
public class BayesianUtilTest
extends TestCase
{
/**
* Random number generator to use for a fixed random seed.
*/
public final Random RANDOM = new Random( 1 );
/**
* Default tolerance of the regression tests, {@value}.
*/
public final double TOLERANCE = 1e-5;
/**
* Number of samples, {@value}.
*/
public final int NUM_SAMPLES = 1000;
/**
* Default confidence, {@value}.
*/
public final double CONFIDENCE = 0.95;
/**
* Tests for class BayesianUtilTest.
* @param testName Name of the test.
*/
public BayesianUtilTest(
String testName)
{
super(testName);
}
/**
* Tests the constructors of class BayesianUtilTest.
*/
public void testConstructors()
{
System.out.println( "Constructors" );
BayesianUtil instance = new BayesianUtil();
assertNotNull( instance );
}
/**
* Test of logLikelihood method, of class BayesianUtil.
*/
public void testLogLikelihood()
{
System.out.println("logLikelihood");
UnivariateGaussian.PDF f = new UnivariateGaussian.PDF();
ArrayList<Double> observations = f.sample(RANDOM, NUM_SAMPLES);
double result = BayesianUtil.logLikelihood(f, observations);
double logSum = 0.0;
for( Double observation : observations )
{
logSum += f.logEvaluate(observation);
}
assertEquals(logSum, result, TOLERANCE);
}
/**
* Sample
*/
public void testSampleParameter()
{
System.out.println( "sample" );
double mean = RANDOM.nextGaussian();
double variance = 1.0/RANDOM.nextDouble();
UnivariateGaussian conditional = new UnivariateGaussian( mean, variance );
UnivariateGaussianMeanVarianceBayesianEstimator estimator =
new UnivariateGaussianMeanVarianceBayesianEstimator();
ArrayList<Double> observations = conditional.sample(RANDOM, NUM_SAMPLES);
NormalInverseGammaDistribution posterior = estimator.learn(observations);
StudentTDistribution.CDF predictive =
estimator.createPredictiveDistribution(posterior).getCDF();
UnivariateGaussianMeanVarianceBayesianEstimator.Parameter parameter =
new UnivariateGaussianMeanVarianceBayesianEstimator.Parameter(
conditional, posterior);
ArrayList<? extends Double> samples = BayesianUtil.sample(
parameter, RANDOM, NUM_SAMPLES );
KolmogorovSmirnovConfidence.Statistic kstest =
KolmogorovSmirnovConfidence.evaluateNullHypothesis(samples, predictive);
System.out.println( "K-S test:\n" + kstest );
assertEquals( 1.0, kstest.getNullHypothesisProbability(), CONFIDENCE );
}
/**
* Sample
*/
public void testSample3()
{
System.out.println( "Sample3" );
double mean = RANDOM.nextGaussian();
double variance = 1.0/RANDOM.nextDouble();
UnivariateGaussian conditional = new UnivariateGaussian( mean, variance );
ArrayList<Double> observations = conditional.sample(RANDOM, NUM_SAMPLES);
UnivariateGaussianMeanBayesianEstimator instance =
new UnivariateGaussianMeanBayesianEstimator(variance);
UnivariateGaussian posterior = instance.learn(observations);
UnivariateGaussian.CDF predictive =
instance.createPredictiveDistribution(posterior).getCDF();
ArrayList<? extends Double> samples = BayesianUtil.sample(
conditional, "mean", posterior, RANDOM, NUM_SAMPLES );
KolmogorovSmirnovConfidence.Statistic kstest =
KolmogorovSmirnovConfidence.evaluateNullHypothesis(samples, predictive);
System.out.println( "K-S test:\n" + kstest );
assertEquals( 1.0, kstest.getNullHypothesisProbability(), CONFIDENCE );
}
/**
* Test of deviance method, of class BayesianUtil.
*/
public void testDeviance()
{
System.out.println("deviance");
double mean = RANDOM.nextGaussian();
double variance = 1.0/RANDOM.nextDouble();
UnivariateGaussian conditional = new UnivariateGaussian( mean, variance );
UnivariateGaussianMeanVarianceBayesianEstimator estimator =
new UnivariateGaussianMeanVarianceBayesianEstimator();
ArrayList<Double> observations = conditional.sample(RANDOM, NUM_SAMPLES);
NormalInverseGammaDistribution posterior = estimator.learn(observations);
StudentTDistribution.CDF predictive =
estimator.createPredictiveDistribution(posterior).getCDF();
UnivariateGaussianMeanVarianceBayesianEstimator.Parameter parameter =
new UnivariateGaussianMeanVarianceBayesianEstimator.Parameter(
conditional, posterior);
}
/**
* Test of expectedDeviance method, of class BayesianUtil.
*/
public void testExpectedDeviance()
{
System.out.println("expectedDeviance");
double mean = RANDOM.nextGaussian();
double variance = 1.0/RANDOM.nextDouble();
UnivariateGaussian conditional = new UnivariateGaussian( mean, variance );
UnivariateGaussianMeanVarianceBayesianEstimator estimator =
new UnivariateGaussianMeanVarianceBayesianEstimator();
ArrayList<Double> observations = conditional.sample(RANDOM, NUM_SAMPLES);
NormalInverseGammaDistribution posterior = estimator.learn(observations);
StudentTDistribution predictive =
estimator.createPredictiveDistribution(posterior);
UnivariateGaussianMeanVarianceBayesianEstimator.Parameter parameter =
new UnivariateGaussianMeanVarianceBayesianEstimator.Parameter(
conditional, posterior);
UnivariateGaussian expected = BayesianUtil.expectedDeviance(
parameter, observations, RANDOM,NUM_SAMPLES);
double result = BayesianUtil.deviance(predictive, observations);
BayesianCredibleInterval vi =
BayesianCredibleInterval.compute(expected,CONFIDENCE);
System.out.println( "Result: " + result );
System.out.println( "Interval: " + vi );
assertTrue( vi.withinInterval(result) );
}
/**
* Test of getMean method, of class UnivariateMonteCarloIntegrator.
*/
public void testGetMean()
{
System.out.println("getMean");
double mean = RANDOM.nextGaussian();
double precision = RANDOM.nextDouble();
StudentTDistribution targetDistribution =
new StudentTDistribution( 4.0, mean, precision );
System.out.println( "Target = " + targetDistribution );
int num = 100;
ArrayList<Double> means = new ArrayList<Double>( num );
ArrayList<Double> variances = new ArrayList<Double>( num );
for( int n = 0; n < num; n++ )
{
UnivariateGaussian g = BayesianUtil.getMean(
targetDistribution.sample(RANDOM, NUM_SAMPLES ) );
means.add( g.getMean() );
variances.add( g.getVariance() );
}
UnivariateGaussian sampleMeanDistribution =
UnivariateGaussian.MaximumLikelihoodEstimator.learn(means, 0.0);
UnivariateGaussian sampleVarianceDistribution =
UnivariateGaussian.MaximumLikelihoodEstimator.learn(variances, 0.0);
ConfidenceInterval ci = StudentTConfidence.INSTANCE.computeConfidenceInterval(
sampleMeanDistribution.getMean(), sampleMeanDistribution.getVariance(), num, CONFIDENCE );
System.out.println( "====== Mean ========" );
System.out.println( "Target = " + targetDistribution );
System.out.println( "Sample = " + sampleMeanDistribution );
System.out.println( "Interval = " + ci );
assertTrue( ci.withinInterval( targetDistribution.getMean() ) );
ConfidenceInterval vi = StudentTConfidence.INSTANCE.computeConfidenceInterval(
sampleVarianceDistribution.getMean(), sampleVarianceDistribution.getVariance(), num, CONFIDENCE );
double varianceTarget = targetDistribution.getVariance() / NUM_SAMPLES;
System.out.println( "====== Variance =======" );
System.out.println( "Target = " + varianceTarget );
System.out.println( "Sample = " + sampleVarianceDistribution );
System.out.println( "Interval = " + vi );
assertTrue( vi.withinInterval(varianceTarget) );
}
}
| bsd-3-clause |
MichelleAppel/WhaThisApp | app/src/androidTest/java/mobapptut/com/camera2videoimage/ApplicationTest.java | 362 | package mobapptut.com.camera2videoimage;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | mit |
selvasingh/azure-sdk-for-java | sdk/cognitiveservices/ms-azure-cs-newssearch/src/main/java/com/microsoft/azure/cognitiveservices/search/newssearch/models/ErrorSubCode.java | 2688 | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.cognitiveservices.search.newssearch.models;
import java.util.Collection;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.microsoft.rest.ExpandableStringEnum;
/**
* Defines values for ErrorSubCode.
*/
public final class ErrorSubCode extends ExpandableStringEnum<ErrorSubCode> {
/** Static value UnexpectedError for ErrorSubCode. */
public static final ErrorSubCode UNEXPECTED_ERROR = fromString("UnexpectedError");
/** Static value ResourceError for ErrorSubCode. */
public static final ErrorSubCode RESOURCE_ERROR = fromString("ResourceError");
/** Static value NotImplemented for ErrorSubCode. */
public static final ErrorSubCode NOT_IMPLEMENTED = fromString("NotImplemented");
/** Static value ParameterMissing for ErrorSubCode. */
public static final ErrorSubCode PARAMETER_MISSING = fromString("ParameterMissing");
/** Static value ParameterInvalidValue for ErrorSubCode. */
public static final ErrorSubCode PARAMETER_INVALID_VALUE = fromString("ParameterInvalidValue");
/** Static value HttpNotAllowed for ErrorSubCode. */
public static final ErrorSubCode HTTP_NOT_ALLOWED = fromString("HttpNotAllowed");
/** Static value Blocked for ErrorSubCode. */
public static final ErrorSubCode BLOCKED = fromString("Blocked");
/** Static value AuthorizationMissing for ErrorSubCode. */
public static final ErrorSubCode AUTHORIZATION_MISSING = fromString("AuthorizationMissing");
/** Static value AuthorizationRedundancy for ErrorSubCode. */
public static final ErrorSubCode AUTHORIZATION_REDUNDANCY = fromString("AuthorizationRedundancy");
/** Static value AuthorizationDisabled for ErrorSubCode. */
public static final ErrorSubCode AUTHORIZATION_DISABLED = fromString("AuthorizationDisabled");
/** Static value AuthorizationExpired for ErrorSubCode. */
public static final ErrorSubCode AUTHORIZATION_EXPIRED = fromString("AuthorizationExpired");
/**
* Creates or finds a ErrorSubCode from its string representation.
* @param name a name to look for
* @return the corresponding ErrorSubCode
*/
@JsonCreator
public static ErrorSubCode fromString(String name) {
return fromString(name, ErrorSubCode.class);
}
/**
* @return known ErrorSubCode values
*/
public static Collection<ErrorSubCode> values() {
return values(ErrorSubCode.class);
}
}
| mit |
aspose-cells/Aspose.Cells-for-Cloud | SDKs/Aspose.Cells-Cloud-SDK-for-Java/src/main/java/com/aspose/cells/model/ValidationResponse.java | 1325 | package com.aspose.cells.model;
public class ValidationResponse {
private Validation Validation = null;
private String Code = null;
private String Status = null;
/**
* getValidation
* Gets Validation
* @return Validation
*/
public Validation getValidation() {
return Validation;
}
/**
* setValidation
* Sets Validation
* @param Validation Validation
*/
public void setValidation(Validation Validation) {
this.Validation = Validation;
}
/**
* getCode
* Gets String
* @return Code
*/
public String getCode() {
return Code;
}
/**
* setCode
* Sets String
* @param Code String
*/
public void setCode(String Code) {
this.Code = Code;
}
/**
* getStatus
* Gets String
* @return Status
*/
public String getStatus() {
return Status;
}
/**
* setStatus
* Sets String
* @param Status String
*/
public void setStatus(String Status) {
this.Status = Status;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ValidationResponse {\n");
sb.append(" Validation: ").append(Validation).append("\n");
sb.append(" Code: ").append(Code).append("\n");
sb.append(" Status: ").append(Status).append("\n");
sb.append("}\n");
return sb.toString();
}
}
| mit |
ahmedvc/umple | Umplificator/UmplifiedProjects/weka-umplified-0/src/test/java/weka/filters/unsupervised/attribute/RenameNominalValuesTest.java | 5676 | /*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Copyright (C) 2002 University of Waikato
*/
package weka.filters.unsupervised.attribute;
import junit.framework.Test;
import junit.framework.TestSuite;
import weka.core.Attribute;
import weka.core.Instance;
import weka.core.Instances;
import weka.filters.AbstractFilterTest;
import weka.filters.Filter;
/**
* Tests RenameNominalValues. Run from the command line with:
* <p>
* java weka.filters.unsupervised.attribute.RemoveTest
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: 9982 $
*/
public class RenameNominalValuesTest extends AbstractFilterTest {
public RenameNominalValuesTest(String name) {
super(name);
}
/** Creates a default RenameNominalValues */
@Override
public Filter getFilter() {
return getFilter("2,5", "b:bob");
}
/** Creates a specialized Remove */
public Filter getFilter(String rangelist, String renameSpec) {
RenameNominalValues af = new RenameNominalValues();
if (rangelist.length() > 0) {
af.setSelectedAttributes(rangelist);
}
if (renameSpec.length() > 0) {
af.setValueReplacements(renameSpec);
}
return af;
}
public void testNoSelectedAttsNoReplaceSpec() {
m_Filter = getFilter();
((RenameNominalValues) m_Filter).setSelectedAttributes("");
((RenameNominalValues) m_Filter).setValueReplacements("");
Instances result = useFilter();
assertEquals(m_Instances.numInstances(), result.numInstances());
assertEquals(m_Instances.numAttributes(), result.numAttributes());
// all instances should be unchanged
for (int i = 0; i < result.numInstances(); i++) {
Instance orig = m_Instances.instance(i);
Instance filtered = result.instance(i);
for (int j = 0; j < orig.numAttributes(); j++) {
assertEquals(orig.value(j), filtered.value(j));
}
}
}
public void testTypical() {
m_Filter = getFilter();
Instances result = useFilter();
assertEquals(m_Instances.numAttributes(), result.numAttributes());
assertEquals(m_Instances.numInstances(), result.numInstances());
// shouldn't be any 'b' values in the header - they should now
// be 'bob'
Attribute first = result.attribute(1);
Attribute second = result.attribute(4);
assertEquals(first.value(2), "bob");
assertEquals(second.value(1), "bob");
// check an instance
Instance inst = result.instance(1);
assertEquals(inst.stringValue(1), "bob");
assertEquals(inst.stringValue(4), "bob");
}
public void testTypical2() {
m_Filter = getFilter("2", "b:bob");
Instances result = useFilter();
assertEquals(m_Instances.numAttributes(), result.numAttributes());
assertEquals(m_Instances.numInstances(), result.numInstances());
// shouldn't be any 'b' values in the header for attribute 2
// - they should now be 'bob'
Attribute first = result.attribute(1);
Attribute second = result.attribute(4);
assertEquals(first.value(2), "bob");
// check that the other nominal attribute is unchanged
assertEquals(second.value(1), "b");
// check an instance
Instance inst = result.instance(1);
assertEquals(inst.stringValue(1), "bob");
assertEquals(inst.stringValue(4), "b");
}
public void testInverted1() {
m_Filter = getFilter("", "b:bob");
((RenameNominalValues) m_Filter).setInvertSelection(true);
Instances result = useFilter();
assertEquals(m_Instances.numAttributes(), result.numAttributes());
assertEquals(m_Instances.numInstances(), result.numInstances());
// shouldn't be any 'b' values in the header - they should now
// be 'bob'
Attribute first = result.attribute(1);
Attribute second = result.attribute(4);
assertEquals(first.value(2), "bob");
assertEquals(second.value(1), "bob");
// check an instance
Instance inst = result.instance(1);
assertEquals(inst.stringValue(1), "bob");
assertEquals(inst.stringValue(4), "bob");
}
/**
* tests the filter in conjunction with the FilteredClassifier
*/
@Override
public void testFilteredClassifier() {
try {
Instances data = getFilteredClassifierData();
for (int i = 0; i < data.numAttributes(); i++) {
if (data.classIndex() == i)
continue;
if (data.attribute(i).isNominal()) {
((RenameNominalValues) m_FilteredClassifier.getFilter())
.setSelectedAttributes("" + (i + 1));
break;
}
}
} catch (Exception e) {
fail("Problem setting up test for FilteredClassifier: " + e.toString());
}
super.testFilteredClassifier();
}
public static Test suite() {
return new TestSuite(RenameNominalValuesTest.class);
}
public static void main(String[] args) {
junit.textui.TestRunner.run(suite());
}
}
| mit |
Jpoliachik/react-native-navigation | lib/android/app/src/main/java/com/reactnativenavigation/views/Fab.java | 2363 | package com.reactnativenavigation.views;
import android.content.Context;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffColorFilter;
import android.graphics.drawable.Drawable;
import android.support.annotation.NonNull;
import com.github.clans.fab.FloatingActionButton;
import com.reactnativenavigation.anim.FabAnimator;
import com.reactnativenavigation.anim.FabCollapseBehaviour;
import com.reactnativenavigation.interfaces.ScrollEventListener;
import com.reactnativenavigation.parse.params.Colour;
import com.reactnativenavigation.utils.ImageLoader;
import com.reactnativenavigation.utils.ImageLoadingListenerAdapter;
import java.util.Collections;
import java.util.List;
public class Fab extends FloatingActionButton implements FabAnimator {
private String id = "";
private FabCollapseBehaviour collapseBehaviour;
public Fab(Context context, String id) {
super(context);
collapseBehaviour = new FabCollapseBehaviour(this);
this.id = id;
}
public void applyIcon(String icon, Colour color) {
new ImageLoader().loadIcons(getContext(), Collections.singletonList(icon), new ImageLoadingListenerAdapter() {
@Override
public void onComplete(@NonNull List<Drawable> drawables) {
if (color.hasValue()) drawables.get(0).setColorFilter(new PorterDuffColorFilter(color.get(), PorterDuff.Mode.SRC_IN));
setImageDrawable(drawables.get(0));
}
@Override
public void onError(Throwable error) {
error.printStackTrace();
}
});
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Fab fab = (Fab) o;
return id.equals(fab.id);
}
@Override
public int hashCode() {
return id.hashCode();
}
@Override
public void show() {
show(true);
}
@Override
public void hide() {
hide(true);
}
public void enableCollapse(@NonNull ScrollEventListener scrollEventListener) {
collapseBehaviour.enableCollapse(scrollEventListener);
}
public void disableCollapse() {
collapseBehaviour.disableCollapse();
}
public String getFabId() {
return id;
}
}
| mit |
phil-lopreiato/the-blue-alliance-android | android/src/main/java/com/thebluealliance/androidclient/comparators/MatchSortByDisplayOrderComparator.java | 451 | package com.thebluealliance.androidclient.comparators;
import com.thebluealliance.androidclient.models.Match;
import java.util.Comparator;
public class MatchSortByDisplayOrderComparator implements Comparator<Match> {
@Override
public int compare(Match match, Match match2) {
return match.getDisplayOrder().compareTo(match2.getDisplayOrder());
}
@Override
public boolean equals(Object o) {
return false;
}
}
| mit |
TypeFox/che | plugins/plugin-java/che-plugin-java-ext-lang-client/src/main/java/org/eclipse/che/ide/ext/java/client/tree/JavaNodeFactory.java | 1595 | /*
* Copyright (c) 2012-2017 Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
package org.eclipse.che.ide.ext.java.client.tree;
import com.google.common.annotations.Beta;
import org.eclipse.che.ide.api.data.tree.settings.NodeSettings;
import org.eclipse.che.ide.api.resources.Container;
import org.eclipse.che.ide.ext.java.client.tree.library.JarFileNode;
import org.eclipse.che.ide.ext.java.client.tree.library.JarFolderNode;
import org.eclipse.che.ide.ext.java.client.tree.library.JarNode;
import org.eclipse.che.ide.ext.java.client.tree.library.LibrariesNode;
import org.eclipse.che.ide.ext.java.shared.Jar;
import org.eclipse.che.ide.ext.java.shared.JarEntry;
import org.eclipse.che.ide.resource.Path;
import org.eclipse.che.ide.resources.tree.ResourceNode.NodeFactory;
/** @author Vlad Zhukovskiy */
@Beta
public interface JavaNodeFactory extends NodeFactory {
PackageNode newPackage(Container resource, NodeSettings nodeSettings);
LibrariesNode newLibrariesNode(Path project, NodeSettings nodeSettings);
JarNode newJarNode(Jar jar, Path project, NodeSettings nodeSettings);
JarFolderNode newJarFolderNode(
JarEntry jarEntry, int libId, Path project, NodeSettings nodeSettings);
JarFileNode newJarFileNode(JarEntry jarEntry, int libId, Path project, NodeSettings nodeSettings);
}
| epl-1.0 |
TypeFox/che | ide/che-core-ide-api/src/main/java/org/eclipse/che/ide/api/editor/document/DocumentEventBus.java | 651 | /*
* Copyright (c) 2012-2017 Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
package org.eclipse.che.ide.api.editor.document;
import com.google.web.bindery.event.shared.SimpleEventBus;
/**
* EventBus dedicated to a document.<br>
* Sub-classed to provide strong-typing: this is a dedicated channel.
*/
public class DocumentEventBus extends SimpleEventBus {}
| epl-1.0 |
jboss-reddeer/reddeer | plugins/org.eclipse.reddeer.eclipse/src/org/eclipse/reddeer/eclipse/ui/perspectives/DatabaseDebugPerspective.java | 838 | /*******************************************************************************
* Copyright (c) 2017 Red Hat, Inc and others.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* Red Hat, Inc - initial API and implementation
*******************************************************************************/
package org.eclipse.reddeer.eclipse.ui.perspectives;
/**
* Database Debug perspective implementation
*
* @author rhopp
*
*/
public class DatabaseDebugPerspective extends AbstractPerspective {
/**
* Constructs the perspective with "Database Debug".
*/
public DatabaseDebugPerspective() {
super("Database Debug");
}
}
| epl-1.0 |
ibh-systems/packagedrone | bundles/org.eclipse.packagedrone.utils/src/org/eclipse/packagedrone/utils/io/IOConsumer.java | 709 | /*******************************************************************************
* Copyright (c) 2015 IBH SYSTEMS GmbH.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBH SYSTEMS GmbH - initial API and implementation
*******************************************************************************/
package org.eclipse.packagedrone.utils.io;
import java.io.IOException;
@FunctionalInterface
public interface IOConsumer<T>
{
public void accept ( T data ) throws IOException;
}
| epl-1.0 |
eclipse/packagedrone | bundles/org.eclipse.packagedrone.job/src/org/eclipse/packagedrone/job/ErrorInformation.java | 1548 | /*******************************************************************************
* Copyright (c) 2015 IBH SYSTEMS GmbH.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBH SYSTEMS GmbH - initial API and implementation
*******************************************************************************/
package org.eclipse.packagedrone.job;
import org.eclipse.scada.utils.ExceptionHelper;
public class ErrorInformation
{
private final String message;
private final String formatted;
private final String rootFormatted;
ErrorInformation ( final String message, final String formatted, final String rootFormatted )
{
this.message = message;
this.formatted = formatted;
this.rootFormatted = rootFormatted;
}
public String getMessage ()
{
return this.message;
}
public String getFormatted ()
{
return this.formatted;
}
public String getRootFormatted ()
{
return this.rootFormatted;
}
public static ErrorInformation createFrom ( final Throwable e )
{
if ( e == null )
{
return null;
}
return new ErrorInformation ( ExceptionHelper.getMessage ( e ), ExceptionHelper.formatted ( e ), ExceptionHelper.formatted ( ExceptionHelper.getRootCause ( e ) ) );
}
}
| epl-1.0 |
purestorage-partnerconnect/ucs-director-plugin | UCSD 5.5 Connector/FlashArray UCSD Adapter and Source Code v1.2 (UCSD 5.5)/src/com/cloupia/feature/purestorage/tasks/DeleteHostTask.java | 4846 | package com.cloupia.feature.purestorage.tasks;
import com.cisco.cuic.api.client.WorkflowInputFieldTypeDeclaration;
import com.cloupia.feature.purestorage.constants.PureConstants;
import com.cloupia.service.cIM.inframgr.TaskOutputDefinition;
import com.cloupia.service.cIM.inframgr.customactions.CustomActionLogger;
import com.cloupia.service.cIM.inframgr.customactions.CustomActionTriggerContext;
import com.purestorage.rest.host.PureHostConnection;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
public class DeleteHostTask extends GeneralTask
{
public DeleteHostTask()
{
super(PureConstants.TASK_NAME_DELETE_HOST, "com.cloupia.feature.purestorage.tasks.DeleteHostTaskConfig");
}
@Override
public void executeCustomAction(CustomActionTriggerContext context, CustomActionLogger actionlogger) throws Exception
{
DeleteHostTaskConfig config = (DeleteHostTaskConfig) context.loadConfigObject();
super.accountName = config.getAccountName();
super.executeCustomAction(context, actionlogger);
actionlogger.addInfo("finished checking DeleteHostTask accountname");
String hostName = config.getHostName();
Boolean newHostTaskFlag = config.getNewHostFlag();
Boolean existHost = config.getExistHost();
String hostGroupName = "";
String wwns = "";
String iqns = "";
StringBuilder privateVolumes = new StringBuilder();
List<String> wwnsList = CLIENT.hosts().get(hostName).getWwnList();
List<String> iqnsList = CLIENT.hosts().get(hostName).getIqnList();
List<PureHostConnection> privateConnectedVolumes = CLIENT.hosts().getPrivateConnections(hostName);
List<PureHostConnection> sharedConnectedVolumes = CLIENT.hosts().getSharedConnections(hostName);
config.setDeleteHostFlag(true);
if(newHostTaskFlag != null)
{
actionlogger.addInfo("This is a rollback task to delete a new host " + hostName);
}
else
{
existHost=false;
}
actionlogger.addInfo("Deleting host " + hostName + " on Pure FlashArray [" + flashArrayAccount.getManagementAddress() + "]");
wwns = StringUtils.join(wwnsList, ",");
iqns = StringUtils.join(iqnsList, ",");
for(PureHostConnection volumeConnection : privateConnectedVolumes)
{
String volumeName = volumeConnection.getVolumeName();
privateVolumes.append(volumeName + ",");
}
actionlogger.addInfo("private volume is " + privateVolumes);
if(sharedConnectedVolumes.size()>0)
{
hostGroupName = sharedConnectedVolumes.get(0).getHostGroupName();
actionlogger.addInfo("hostgroup name is " + hostGroupName);
}
config.setWwns(wwns);
config.setIqns(iqns);
config.setPrivateVolumes(privateVolumes.toString());
config.setHostGroupName(hostGroupName);
//config.setExistHost(existHost);
// actionlogger.addInfo("Exist Host "+existHost);
if(existHost != null && !existHost)
{
for(PureHostConnection volumeConnection : privateConnectedVolumes)
{
String volumeName = volumeConnection.getVolumeName();
CLIENT.hosts().disconnectVolume(hostName, volumeName);
}
if(!hostGroupName.equals(""))
{
List<String> tempHostList = new ArrayList<String>();
tempHostList.add(hostName);
CLIENT.hostGroups().removeHosts(hostGroupName, tempHostList);
}
CLIENT.hosts().delete(hostName);
actionlogger.addInfo("Successfully deleted host " + hostName + "on Pure FlashArray [" + flashArrayAccount.getManagementAddress() + "]");
context.getChangeTracker().undoableResourceModified("AssetType", "idstring", "DeleteHost",
"Host has been deleted on " + config.getAccountName(),
new NewHostTask().getTaskName(), new NewHostTaskConfig(config));
String hostIdentity =accountName+"@"+hostName;
context.saveOutputValue(PureConstants.TASK_OUTPUT_NAME_HOST_IDENTITY, hostIdentity);
actionlogger.addInfo("Host Identity as Output is saved");
}
else
{
actionlogger.addInfo("This host cannot be deleted!");
}
}
@Override
public TaskOutputDefinition[] getTaskOutputDefinitions()
{
TaskOutputDefinition[] ops = new TaskOutputDefinition[1];
ops[0] = new TaskOutputDefinition(
PureConstants.TASK_OUTPUT_NAME_HOST_IDENTITY,
WorkflowInputFieldTypeDeclaration.GENERIC_TEXT,
"Host Identity");
return ops;
}
}
| gpl-2.0 |
knabar/openmicroscopy | components/blitz/src/ome/formats/importer/ImportSize.java | 2895 | /*
* Copyright 2009 Glencoe Software, Inc. All rights reserved.
* Use is subject to license terms supplied in LICENSE.txt
*/
package ome.formats.importer;
import omero.model.Pixels;
/**
* Calculates the various dimensions of an image from a {@link Pixels} instance.
*
* @since Beta4.1
*/
public class ImportSize {
public final String fileName;
public final Pixels pixels;
public final String dimOrder;
public final int sizeX, sizeY, sizeZ, sizeC, sizeT, imageCount;
public final int zSize, wSize, tSize;
public ImportSize(String fileName, Pixels pixels, String dimOrder) {
this.fileName = fileName;
this.pixels = pixels;
this.dimOrder = dimOrder;
sizeZ = pixels.getSizeZ().getValue();
sizeC = pixels.getSizeC().getValue();
sizeT = pixels.getSizeT().getValue();
sizeX = pixels.getSizeX().getValue();
sizeY = pixels.getSizeY().getValue();
imageCount = sizeZ * sizeC * sizeT;
final int order = getSequenceNumber(dimOrder);
int smallOffset = 1;
switch (order) {
// ZTW sequence
case 0:
zSize = smallOffset;
tSize = zSize * sizeZ;
wSize = tSize * sizeT;
break;
// WZT sequence
case 1:
wSize = smallOffset;
zSize = wSize * sizeC;
tSize = zSize * sizeZ;
break;
// ZWT sequence
case 2:
zSize = smallOffset;
wSize = zSize * sizeZ;
tSize = wSize * sizeC;
break;
// TWZ sequence
case 3:
tSize = smallOffset;
wSize = tSize * sizeT;
zSize = wSize * sizeC;
break;
// WTZ sequence
case 4:
wSize = smallOffset;
tSize = wSize * sizeC;
zSize = tSize * sizeT;
break;
// TZW
case 5:
tSize = smallOffset;
zSize = tSize * sizeT;
wSize = zSize * sizeZ;
break;
default:
throw new RuntimeException("Bad order");
}
}
private int getSequenceNumber(String dimOrder) {
if (omero.model.enums.DimensionOrderXYZTC.value.equals(dimOrder))
return 0;
if (omero.model.enums.DimensionOrderXYCZT.value.equals(dimOrder))
return 1;
if (omero.model.enums.DimensionOrderXYZCT.value.equals(dimOrder))
return 2;
if (omero.model.enums.DimensionOrderXYTCZ.value.equals(dimOrder))
return 3;
if (omero.model.enums.DimensionOrderXYCTZ.value.equals(dimOrder))
return 4;
if (omero.model.enums.DimensionOrderXYTZC.value.equals(dimOrder))
return 5;
throw new RuntimeException(dimOrder + " not represented in "
+ "getSequenceNumber");
}
}
| gpl-2.0 |
Distrotech/icedtea7 | test/jtreg/com/sun/javatest/TestFinderQueue.java | 24262 | /*
* $Id$
*
* Copyright 1996-2008 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
package com.sun.javatest;
import java.io.File;
import java.util.Hashtable;
import java.util.Vector;
import com.sun.javatest.util.DynamicArray;
import com.sun.javatest.util.Fifo;
import com.sun.javatest.util.I18NResourceBundle;
/**
* An iterator-based interface to the tests in a test suite, as read by a test finder.
*/
public class TestFinderQueue {
/**
* This interface provides a means for TestFinder to report on events that
* might be of interest as it executes.
*/
public static interface Observer
{
/**
* Another file which needs to be read has been found.
* @param file the file which was found
*/
void found(File file);
/**
* A file is being read.
* @param file the file being read
*/
void reading(File file);
/**
* A file has been read.
* @param file the file which was read
*/
void done(File file);
/**
* A test description has been found.
* @param td the test description which was found
*/
void found(TestDescription td);
/**
* A test description which was previously found, has been rejected by
* a test filter, and so has not been put in the queue of tests to be executed.
* @param td the test description which was rejected by the filter
* @param f the filter which rejected the test
*/
void ignored(TestDescription td, TestFilter f);
/**
* A test description that was previously put in the test finder queue
* has been taken from the queue and passed back to the client caller.
* @param td the test description which was taken from the queue
*/
void done(TestDescription td);
/**
* The queue of tests has been flushed.
*/
void flushed();
/**
* An error was reported by the test finder while reading a file.
* @param msg a detail message describing the error
*/
void error(String msg);
/**
* An error was reported by the test finder while reading a file.
* @param td the test description to which the error applies
* @param msg a detail message describing the error
*/
void error(TestDescription td, String msg);
}
/**
* Create a test finder queue.
*/
public TestFinderQueue() {
}
/**
* Create a test finder queue, using a specified test finder.
* @param finder the test finder to be used to read the tests
*/
public TestFinderQueue(TestFinder finder) {
setTestFinder(finder);
}
/**
* Get the test finder being used by this object.
* @return the test finder being used by this object
* @see #setTestFinder
*/
public TestFinder getTestFinder() {
return testFinder;
}
/**
* Set the test finder to be used by this object.
* It may only be set once.
* @param finder the test finder to be used by this object
* @throws NullPointerException if the finder is null
* @throws IllegalStateException if the finder has already been set
* @see #getTestFinder
*/
public void setTestFinder(TestFinder finder) {
if (finder == null)
throw new NullPointerException();
if (testFinder != null && testFinder != finder)
throw new IllegalStateException();
this.testFinder = finder;
testFinder.setErrorHandler(new TestFinder.ErrorHandler() {
public void error(String msg) {
errorCount++;
notifier.error(msg);
}
});
}
/**
* Set an array of filters that will be used to filter the tests read by the
* test finder. Each test must be accepted by all the filters to be put in
* the queue.
* @param filters the filters to be used.
*/
public void setFilters(TestFilter[] filters) {
this.filters = filters;
}
/**
* Set the initial set of files to be read by the test finder.
* Additional files may be read as a result of reading these and subsequent files.
* @param initTests the initial set of files to be read by the test finder
*/
public synchronized void setTests(String[] initTests) {
File testSuiteRoot = testFinder.getRoot();
// make canonical copy of tests
// really ought not to be using File, since the tests may contain a trailing #xxx
File[] files;
if (initTests == null)
// ensure not null
files = new File[] {testSuiteRoot};
else {
files = new File[initTests.length];
for (int i = 0; i < initTests.length; i++) {
files[i] = new File(initTests[i]);
}
}
rootDir = (testSuiteRoot.isDirectory() ?
testSuiteRoot : new File(testSuiteRoot.getParent()));
// build up the fifo of tests to be used by readNextFile
tests = new Fifo();
currInitialFile = null;
for (int pass = 0; pass < 2; pass++) {
for (int i = 0; i < files.length; i++) {
File f = files[i];
String n = f.getName();
// in pass 0, only select initial files without #
// in pass 1, only select initial files with #
if ((n.indexOf("#") != -1) == (pass == 0))
continue;
// ensure all absolute, or if relative, make them relative
// to rootDir
if (!f.isAbsolute())
f = new File(rootDir, f.getPath());
// ensure no trailing file separator
String p = f.getPath();
if (p.endsWith(File.separator))
f = new File(p.substring(0, p.length() - 1));
tests.insert(f);
}
}
filesRemainingCount = filesToRead.size() + tests.size();
}
/**
* Set a flag indicating whether it is OK to find no tests in the
* specified set of files. If set to false, and if no tests have
* been found by the time the last file has been read, an error
* will be notified to any observers.
* @param zeroTestsOK set to true to suppress an error being generated
* if no tests are found by the time that all files have been read
*/
public void setZeroTestsOK(boolean zeroTestsOK) {
this.zeroTestsOK = zeroTestsOK;
}
/**
* Set the queue to "repeat" a set of test descriptions by putting
* them in the test found queue again.
* @param tds the test descriptions to be "found again".
* @deprecated retained for historical purposes
*/
public void repeat(TestDescription[] tds) {
if (tests == null)
tests = new Fifo(); // for now
for (int i = 0; i < tds.length; i++) {
TestDescription td = tds[i];
testDescsFound.insert(td);
testsFoundCount++;
notifier.found(td);
}
}
/**
* Get the next test description if one is available, or null when all have
* been returned.
*
* @return A test description or null.
*/
public TestDescription next() {
TestDescription td;
synchronized (this) {
while (needReadAhead() && readNextFile()) /*NO-OP*/;
// read files until there is a test description available or there
// are no more files.
while ((td = (TestDescription)(testDescsFound.remove())) == null) {
boolean ok = readNextFile();
if (!ok)
return null;
}
// note testsDone, for readAhead
testsDoneCount++;
}
notifier.done(td);
return td;
}
//--------------------------------------------------------------------------
/**
* Get the root directory for the test finder.
* @return the root directory, as set in the test finder
*/
public File getRoot() {
return rootDir;
}
//--------------------------------------------------------------------------
//
// these are all carefully arranges to not need to be synchronized
/**
* Get the number of files that have been found so far.
* @return the number of files that have been found so far
*/
public int getFilesFoundCount() {
return filesFound.size();
}
/**
* Get the number of files that have been found and read so far.
* @return the number of files that have been found and read so far
*/
public int getFilesDoneCount() {
return filesDoneCount;
}
/**
* Get the number of files that have been found but not yet read so far.
* @return the number of files that have been found but not yet read so far
*/
public int getFilesRemainingCount() {
return filesRemainingCount;
}
/**
* Get the number of tests that have been found so far.
* @return the number of tests that have been found so far
*/
public int getTestsFoundCount() {
return testsFoundCount;
}
/**
* Get the number of tests that have been read from this object so far.
* @return the number of tests that have been read from this object so far
*/
public int getTestsDoneCount() {
return testsDoneCount;
}
/**
* Get the number of tests which have been found but not yet from this
* object so far.
* @return the number of tests which have been found but not yet read
* from this object so far
*/
public int getTestsRemainingCount() {
return testDescsFound.size();
}
/**
* Get the number of errors that have been found so far by the test finder
* while reading the tests.
* @return the number of errors that have been found so far by the test finder
* while reading the tests.
*/
public int getErrorCount() {
return errorCount;
}
//--------------------------------------------------------------------------
/**
* Add an observer to monitor the progress of the TestFinder.
* @param o the observer
*/
public void addObserver(Observer o) {
notifier.addObserver(o);
}
/**
* Remove an observer form the set currently monitoring the progress
* of the TestFinder.
* @param o the observer
*/
public void removeObserver(Observer o) {
notifier.removeObserver(o);
}
//--------------------------------------------------------------------------
/**
* Set the amount of read-ahead done by the finder.
* @param mode acceptable values are as follows:
* <dl> <dt> 0: no read ahead
* <dd> Files are not read ahead more than necessary
* <dt> 1: low read ahead
* <dd> A low priority thread is created to read the test files
* when the system is otherwise idle
* <dt> 2: medium read ahead
* <dd> A low priority thread is created to read the test files
* when the system is otherwise idle. In addition, if the number
* of tests done approaches the number of tests read, then more
* tests will be read.
* <dt> 3: full and immediate read ahead
* <dd> All the tests will be read now
* </dl>
*/
public synchronized void setReadAheadMode(byte mode) {
switch (mode) {
case NO_READ_AHEAD:
case FULL_READ_AHEAD:
readAheadMode = mode;
readAheadWorker = null; // worker will note this and go away
break;
case LOW_READ_AHEAD:
case MEDIUM_READ_AHEAD:
readAheadMode = mode;
if (readAheadWorker == null) {
readAheadWorker = new Thread() {
public void run() {
// This is intended to be interruptible and
// relies on safe atomic access to worker
while ((readAheadWorker == this) && readNextFile()) /*NO-OP*/;
// when thread exits; flatten pointer if still current
synchronized (TestFinderQueue.this) {
if (readAheadWorker == this)
readAheadWorker = null;
}
}
};
readAheadWorker.setName("TestFinderQueue:Worker:" + workerIndex++);
readAheadWorker.setPriority(Thread.MIN_PRIORITY);
readAheadWorker.start();
}
break;
default:
throw new IllegalArgumentException("invalid value for mode");
}
}
/**
* A constant specifying that the test finder queue should not perform
* any read ahead.
*/
public static final byte NO_READ_AHEAD = 0;
/**
* A constant specifying the test finder queue should perform minimal
* read ahead.
*/
public static final byte LOW_READ_AHEAD = 1;
/**
* A constant specifying the test finder queue should perform medium
* (typical) read ahead.
*/
public static final byte MEDIUM_READ_AHEAD = 2;
/**
* A constant specifying the test finder queue should perform complete
* read ahead, reading all tests from the test finder before returning any
* from this object.
*/
public static final byte FULL_READ_AHEAD = 3;
/**
* Flush all readahead.
*/
public void flush() {
synchronized (this) {
filesToRead.setSize(0);
tests.flush();
testDescsFound.flush();
filesRemainingCount = 0;
}
notifier.flushed();
}
/**
* This method is called from next() to determine if any readAhead
* should be done there and then, before getting the next TestDescription
* for the client.
*/
private boolean needReadAhead() {
switch (readAheadMode) {
case FULL_READ_AHEAD:
return true;
case MEDIUM_READ_AHEAD:
// return true if not many tests read yet, or if testsDoneCount
// is greater than a certain percentage of testsFoundCount.
// This percentage increases inverse exponentially on the
// number of tests found. The intent is to try and keep
// progress meters based on testsDoneCount and testsFoundCount helpful,
// while permitting readAhead to be done.
// The formula has the following datapoints:
// testsFoundCount: 1000 percent: 18%
// testsFoundCount: 10000 percent: 86%
if (testsFoundCount < 100)
// don't let tests start until at least 100 have been read
return true;
else {
double percent = 1 - Math.exp(-0.0002 * testsFoundCount);
return (testsDoneCount > (testsFoundCount * percent));
}
default:
return false;
}
}
//---------------------------------------------------------------
private synchronized boolean readNextFile() {
if (filesToRead.isEmpty()) {
// have we finished reading an initial file and found no test descriptions in it?
// if so, inform the caller
if (currInitialFile != null
&& testsFoundCountBeforeCurrInitialFile == testsFoundCount
&& !zeroTestsOK) {
errorCount++;
notifier.error(i18n.getString("finder.noTests", currInitialFile));
}
// are there any more tests that have not been read?
// check until we find one (just one).
while (filesToRead.isEmpty() && !tests.isEmpty()) {
currInitialFile = (File)tests.remove();
foundFile(currInitialFile);
}
// if we didn't find any more initial files, there is nothing more to do
if (filesToRead.isEmpty()) {
currInitialFile = null;
return false;
}
else
testsFoundCountBeforeCurrInitialFile = testsFoundCount;
}
File f = (File)(filesToRead.lastElement());
filesToRead.setSize(filesToRead.size() - 1);
filesRemainingCount = filesToRead.size() + tests.size();
String path = f.getPath();
int index = path.indexOf('#');
if (index != -1) {
selectedId = path.substring(index + 1);
f = new File(path.substring(0, index));
}
// The filesToRead are maintained in a vector that approximates a stack:
// new entries are added at or near the end, and entries are taken from
// the end. The subtlety is to add the new files found in file in reverse
// order, so that when removed from the end, they are used in the
// correct order. This is done by inserting each new file found at a fixed
// place, corresponding to the end of the vector as it is when scan() starts.
// The net effect is to push up files added earlier in this scan, so they
// they will be used first. The overall net effect is that of a depth-first
// search for test descriptions.
fileInsertPosn = filesToRead.size();
notifier.reading(f);
try {
testFinder.read(f);
}
finally {
TestDescription[] tds = testFinder.getTests();
for (int i = 0; i < tds.length; i++) {
foundTestDescription(tds[i]);
}
File[] files = testFinder.getFiles();
for (int i = 0; i < files.length; i++) {
foundFile(files[i]);
}
// done limiting tests to this id
selectedId = null;
filesDoneCount++;
notifier.done(f);
/*
if (filesToRead.isEmpty()) {
// we have read all the files we can
// flush various tables to free up any space used
filesFound.clear();
testsInFile.clear();
}
*/
}
// read a file OK
return true;
}
/**
* Add a file to the queue of files waiting to be read.
* It will be added to the queue if it has not already been read or is
* on the queue waiting to be read, and if the finder is not looking for
* a specific test in the current file.
* @param newFile The file to be queued
*/
private void foundFile(File newFile) {
// only accept new files if not looking for a specific test in the
// current file
if (selectedId == null) {
Object prev = filesFound.put(newFile.getPath(), newFile);
if (prev == null) {
filesToRead.insertElementAt(newFile, fileInsertPosn);
notifier.found(newFile);
}
}
}
private void foundTestDescription(TestDescription td) {
// if we are not searching for a specific test, or if we are and we have
// found it, then add the test to the list of tests we have found
if (selectedId == null || selectedId.equals(td.getId())) {
if (filters != null) {
for (int i = 0; i < filters.length; i++) {
TestFilter filter = filters[i];
try {
if (!filter.accepts(td)) {
notifier.ignored(td, filter);
return;
}
}
catch (TestFilter.Fault e) {
errorCount++;
notifier.error(td, e.getMessage());
return;
}
}
}
testDescsFound.insert(td);
testsFoundCount++;
notifier.found(td);
}
}
//---------------------------------------------------------------
private static class Notifier implements Observer {
public synchronized void addObserver(Observer o) {
observers = (Observer[])DynamicArray.append(observers, o);
}
public synchronized void removeObserver(Observer o) {
observers = (Observer[])DynamicArray.remove(observers, o);
}
public synchronized void found(File file) {
for (int i = 0; i < observers.length; i++)
observers[i].found(file);
}
public synchronized void reading(File file) {
for (int i = 0; i < observers.length; i++)
observers[i].reading(file);
}
public synchronized void done(File file) {
for (int i = 0; i < observers.length; i++)
observers[i].done(file);
}
public synchronized void found(TestDescription td) {
for (int i = 0; i < observers.length; i++)
observers[i].found(td);
}
public synchronized void ignored(TestDescription td, TestFilter f) {
for (int i = 0; i < observers.length; i++)
observers[i].ignored(td, f);
}
public synchronized void done(TestDescription td) {
for (int i = 0; i < observers.length; i++)
observers[i].done(td);
}
public synchronized void flushed() {
for (int i = 0; i < observers.length; i++)
observers[i].flushed();
}
public synchronized void error(String msg) {
for (int i = 0; i < observers.length; i++)
observers[i].error(msg);
}
public synchronized void error(TestDescription td, String msg) {
for (int i = 0; i < observers.length; i++)
observers[i].error(td, msg);
}
private Observer[] observers = new Observer[0];
}
//----------member variables------------------------------------------------
private TestFinder testFinder;
private Fifo tests;
private TestFilter[] filters;
private String selectedId;
private File rootDir;
private File currInitialFile;
private int testsFoundCountBeforeCurrInitialFile;
private boolean zeroTestsOK;
private Vector filesToRead = new Vector(32, 8);
private int fileInsertPosn;
private Fifo testDescsFound = new Fifo();
private int filesRemainingCount;
private int filesDoneCount;
private int testsDoneCount;
private int testsFoundCount;
private int errorCount;
private Hashtable filesFound = new Hashtable();
private byte readAheadMode;
private Thread readAheadWorker;
private static int workerIndex;
private Notifier notifier = new Notifier();
private static I18NResourceBundle i18n = I18NResourceBundle.getBundleForClass(TestFinder.class);
}
| gpl-2.0 |
jeffgdotorg/opennms | core/snmp/impl-mock/src/main/java/org/opennms/netmgt/snmp/mock/MockSnmpStrategy.java | 10693 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2011-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.netmgt.snmp.mock;
import java.io.IOException;
import java.math.BigInteger;
import java.net.InetAddress;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import org.opennms.netmgt.snmp.CollectionTracker;
import org.opennms.netmgt.snmp.InetAddrUtils;
import org.opennms.netmgt.snmp.SnmpAgentAddress;
import org.opennms.netmgt.snmp.SnmpAgentConfig;
import org.opennms.netmgt.snmp.SnmpObjId;
import org.opennms.netmgt.snmp.SnmpStrategy;
import org.opennms.netmgt.snmp.SnmpTrapBuilder;
import org.opennms.netmgt.snmp.SnmpV1TrapBuilder;
import org.opennms.netmgt.snmp.SnmpV2TrapBuilder;
import org.opennms.netmgt.snmp.SnmpV3TrapBuilder;
import org.opennms.netmgt.snmp.SnmpV3User;
import org.opennms.netmgt.snmp.SnmpValue;
import org.opennms.netmgt.snmp.SnmpValueFactory;
import org.opennms.netmgt.snmp.SnmpWalker;
import org.opennms.netmgt.snmp.TrapNotificationListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.Resource;
public class MockSnmpStrategy implements SnmpStrategy {
private static final transient Logger LOG = LoggerFactory.getLogger(MockSnmpStrategy.class);
public static final SnmpAgentAddress ALL_AGENTS = new SnmpAgentAddress(InetAddrUtils.addr("0.0.0.0"), 161);
private static final SnmpValue[] EMPTY_SNMP_VALUE_ARRAY = new SnmpValue[0];
// TOG's enterprise ID
private static int s_enterpriseId = 5813;
private static Map<SnmpAgentAddress,PropertyOidContainer> m_loaders = new HashMap<SnmpAgentAddress,PropertyOidContainer>();
public MockSnmpStrategy() {
}
protected PropertyOidContainer getOidContainer(final SnmpAgentConfig agentConfig) {
return getOidContainer(new SnmpAgentAddress(agentConfig.getAddress(), agentConfig.getPort()));
}
protected PropertyOidContainer getOidContainer(final SnmpAgentAddress aa) {
if (m_loaders.containsKey(aa)) {
return m_loaders.get(aa);
} else {
return m_loaders.get(ALL_AGENTS);
}
}
@Override
public SnmpWalker createWalker(final SnmpAgentConfig agentConfig, final String name, final CollectionTracker tracker) {
LOG.debug("createWalker({}/{}, {}, {})", InetAddrUtils.str(agentConfig.getAddress()), agentConfig.getPort(), name, tracker.getClass().getName());
final SnmpAgentAddress aa = new SnmpAgentAddress(agentConfig.getAddress(), agentConfig.getPort());
final PropertyOidContainer oidContainer = getOidContainer(aa);
return new MockSnmpWalker(aa, agentConfig.getVersion(), oidContainer, name, tracker, agentConfig.getMaxVarsPerPdu(), agentConfig.getRetries());
}
@Override
public SnmpValue set(final SnmpAgentConfig agentConfig, final SnmpObjId oid, final SnmpValue value) {
final PropertyOidContainer oidContainer = getOidContainer(agentConfig);
if (oidContainer == null) return null;
return oidContainer.set(oid, value);
}
@Override
public SnmpValue[] set(final SnmpAgentConfig agentConfig, final SnmpObjId[] oids, final SnmpValue[] values) {
final PropertyOidContainer oidContainer = getOidContainer(agentConfig);
if (oidContainer == null) return new SnmpValue[values.length];
return oidContainer.set(oids, values);
}
@Override
public SnmpValue get(final SnmpAgentConfig agentConfig, final SnmpObjId oid) {
final PropertyOidContainer oidContainer = getOidContainer(agentConfig);
if (oidContainer == null) return null;
SnmpValue val = oidContainer.findValueForOid(oid);
if (val.isNull()) {
return null;
}
return val;
}
@Override
public SnmpValue[] get(final SnmpAgentConfig agentConfig, final SnmpObjId[] oids) {
final PropertyOidContainer container = getOidContainer(agentConfig);
if (container == null) return new SnmpValue[oids.length];
final List<SnmpValue> values = new ArrayList<>();
for (final SnmpObjId oid : oids) {
values.add(container.findValueForOid(oid));
}
return values.toArray(EMPTY_SNMP_VALUE_ARRAY);
}
@Override
public CompletableFuture<SnmpValue[]> getAsync(SnmpAgentConfig agentConfig, SnmpObjId[] oids) {
return CompletableFuture.completedFuture(get(agentConfig, oids));
}
@Override
public SnmpValue getNext(final SnmpAgentConfig agentConfig, final SnmpObjId oid) {
final PropertyOidContainer oidContainer = getOidContainer(agentConfig);
if (oidContainer == null) return null;
return oidContainer.findNextValueForOid(oid);
}
@Override
public SnmpValue[] getNext(final SnmpAgentConfig agentConfig, final SnmpObjId[] oids) {
final PropertyOidContainer oidContainer = getOidContainer(agentConfig);
if (oidContainer == null) return null;
final List<SnmpValue> values = new ArrayList<>();
for (final SnmpObjId oid : oids) {
values.add(oidContainer.findNextValueForOid(oid));
}
return values.toArray(EMPTY_SNMP_VALUE_ARRAY);
}
@Override
public SnmpValue[] getBulk(final SnmpAgentConfig agentConfig, final SnmpObjId[] oids) {
return getNext(agentConfig, oids);
}
@Override
public void registerForTraps(final TrapNotificationListener listener, final InetAddress address, final int snmpTrapPort) throws IOException {
LOG.warn("Can't register for traps. No network in the MockSnmpStrategy!");
}
@Override
public void registerForTraps(final TrapNotificationListener listener, final int snmpTrapPort) throws IOException {
LOG.warn("Can't register for traps. No network in the MockSnmpStrategy!");
}
@Override
public void registerForTraps(TrapNotificationListener listener, InetAddress address, int snmpTrapPort, List<SnmpV3User> snmpv3Users) throws IOException {
LOG.warn("Can't register for traps. No network in the MockSnmpStrategy!");
}
@Override
public void unregisterForTraps(final TrapNotificationListener listener, final InetAddress address, final int snmpTrapPort) throws IOException {
}
@Override
public void unregisterForTraps(final TrapNotificationListener listener, final int snmpTrapPort) throws IOException {
}
@Override
public SnmpValueFactory getValueFactory() {
return new MockSnmpValueFactory();
}
@Override
public SnmpV1TrapBuilder getV1TrapBuilder() {
throw new UnsupportedOperationException("Not yet implemented!");
}
@Override
public SnmpTrapBuilder getV2TrapBuilder() {
throw new UnsupportedOperationException("Not yet implemented!");
}
@Override
public SnmpV3TrapBuilder getV3TrapBuilder() {
throw new UnsupportedOperationException("Not yet implemented!");
}
@Override
public SnmpV2TrapBuilder getV2InformBuilder() {
throw new UnsupportedOperationException("Not yet implemented!");
}
@Override
public SnmpV3TrapBuilder getV3InformBuilder() {
throw new UnsupportedOperationException("Not yet implemented!");
}
@Override
public byte[] getLocalEngineID() {
// lovingly stolen from SNMP4J
final byte[] engineID = new byte[5];
engineID[0] = (byte) (0x80 | ((s_enterpriseId >> 24) & 0xFF));
engineID[1] = (byte) ((s_enterpriseId >> 16) & 0xFF);
engineID[2] = (byte) ((s_enterpriseId >> 8) & 0xFF);
engineID[3] = (byte) (s_enterpriseId & 0xFF);
final byte[] ip = InetAddrUtils.getLocalHostAddress().getAddress();
if (ip.length == 4) {
// IPv4
engineID[4] = 1;
} else if (ip.length == 16) {
// IPv6
engineID[4] = 2;
} else {
// Text
engineID[4] = 4;
}
final byte[] bytes = new byte[engineID.length+ip.length];
System.arraycopy(engineID, 0, bytes, 0, engineID.length);
System.arraycopy(ip, 0, bytes, engineID.length, ip.length);
return bytes;
}
public static void setDataForAddress(final SnmpAgentAddress agentAddress, final Resource resource) throws IOException {
m_loaders.put(agentAddress, new PropertyOidContainer(resource));
}
public static void updateIntValue(final SnmpAgentAddress agentAddress, String oid, int value) {
m_loaders.get(agentAddress).set(SnmpObjId.get(oid), new MockSnmpValueFactory().getInt32(value));
}
public static void updateStringValue(final SnmpAgentAddress agentAddress, String oid, String value) {
m_loaders.get(agentAddress).set(SnmpObjId.get(oid), new MockSnmpValueFactory().getOctetString(value.getBytes(StandardCharsets.UTF_8)));
}
public static void updateCounter32Value(final SnmpAgentAddress agentAddress, String oid, long value) {
m_loaders.get(agentAddress).set(SnmpObjId.get(oid), new MockSnmpValueFactory().getCounter32(value));
}
public static void updateCounter64Value(final SnmpAgentAddress agentAddress, String oid, BigInteger value) {
m_loaders.get(agentAddress).set(SnmpObjId.get(oid), new MockSnmpValueFactory().getCounter64(value));
}
public static void removeHost(final SnmpAgentAddress agentAddr) {
m_loaders.remove(agentAddr);
}
public static void resetData() {
m_loaders.clear();
}
}
| gpl-2.0 |
SpoonLabs/astor | examples/quixbugscompiled/quicksort/src/java_programs_test/QuicksortTest.java | 5668 | package java_programs_test;
public class QuicksortTest {
@org.junit.Test(timeout = 2000)
public void test_0() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(1,2,6,72,7,33,4)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,4,6,7,33,72]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_1() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(3,1,4,1,5,9,2,6,5,3,5,8,9,7,9,3)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,1,2,3,3,3,4,5,5,5,6,7,8,9,9,9]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_2() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(5,4,3,2,1)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,3,4,5]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_3() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(5,4,3,1,2)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,3,4,5]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_4() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(8,1,14,9,15,5,4,3,7,17,11,18,2,12,16,13,6,10)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_5() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(9,4,5,2,17,14,10,6,15,8,12,13,16,3,1,7,11)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_6() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(13,14,7,16,9,5,24,21,19,17,12,10,1,15,23,25,11,3,2,6,22,8,20,4,18)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_7() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(8,5,15,7,9,14,11,12,10,6,2,4,13,1,3)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_8() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(4,3,7,6,5,2,1)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,3,4,5,6,7]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_9() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(4,3,1,5,2)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,3,4,5]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_10() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(5,4,2,3,6,7,1)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,3,4,5,6,7]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_11() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(10,16,6,1,14,19,15,2,9,4,18,17,12,3,11,8,13,5,7)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19]", resultFormatted);
}
@org.junit.Test(timeout = 2000)
public void test_12() throws java.lang.Exception {
java.util.ArrayList result = java_programs.QUICKSORT.quicksort(new java.util.ArrayList(java.util.Arrays.asList(10,16,6,1,14,19,15,2,9,4,18)));
String resultFormatted = java_programs_test.QuixFixOracleHelper.format(result,true);
org.junit.Assert.assertEquals("[1,2,4,6,9,10,14,15,16,18,19]", resultFormatted);
}
}
| gpl-2.0 |
tkpb/Telegram | TMessagesProj/src/main/java/org/telegram/ui/Components/ColoredImageSpan.java | 1502 | package org.telegram.ui.Components;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffColorFilter;
import android.graphics.drawable.Drawable;
import android.text.style.ReplacementSpan;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
public class ColoredImageSpan extends ReplacementSpan {
int drawableColor;
Drawable drawable;
public ColoredImageSpan(@NonNull Drawable drawable) {
this.drawable = drawable;
drawable.setBounds(0, 0, drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight());
}
@Override
public int getSize(@NonNull Paint paint, CharSequence charSequence, int i, int i1, @Nullable Paint.FontMetricsInt fontMetricsInt) {
return drawable.getIntrinsicWidth();
}
@Override
public void draw(@NonNull Canvas canvas, CharSequence text, int start, int end, float x, int top, int y, int bottom, @NonNull Paint paint) {
if (drawableColor != paint.getColor()) {
drawableColor = paint.getColor();
drawable.setColorFilter(new PorterDuffColorFilter(drawableColor, PorterDuff.Mode.MULTIPLY));
}
int lineHeight = bottom - top;
int drawableHeight = drawable.getIntrinsicHeight();
int padding = (lineHeight - drawableHeight) / 2;
canvas.save();
canvas.translate(x, top + padding);
drawable.draw(canvas);
canvas.restore();
}
}
| gpl-2.0 |
rac021/blazegraph_1_5_3_cluster_2_nodes | bigdata-gas/src/main/java/com/bigdata/rdf/graph/impl/ram/RAMGASRunner.java | 3365 | package com.bigdata.rdf.graph.impl.ram;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.log4j.Logger;
import com.bigdata.rdf.graph.IGASEngine;
import com.bigdata.rdf.graph.IGraphAccessor;
import com.bigdata.rdf.graph.impl.ram.RAMGASEngine.RAMGraph;
import com.bigdata.rdf.graph.impl.ram.RAMGASEngine.RAMGraphAccessor;
import com.bigdata.rdf.graph.impl.util.GASRunnerBase;
/**
* Class for running GAS performance tests against the SAIL.
*
* @author <a href="mailto:thompsonbry@users.sourceforge.net">Bryan Thompson</a>
*/
public class RAMGASRunner<VS, ES, ST> extends GASRunnerBase<VS, ES, ST> {
private static final Logger log = Logger.getLogger(RAMGASRunner.class);
public RAMGASRunner(String[] args) throws ClassNotFoundException {
super(args);
}
protected class RAMOptionData extends GASRunnerBase<VS, ES, ST>.OptionData {
final private RAMGraph g = new RAMGraph();
public RAMGraph getGraph() {
synchronized(g) {
/*
* Note: Synchronization pattern is intended to provide
* visibility for graph traversal following a load of data into
* the graph.
*/
return g;
}
}
@Override
public void init() throws Exception {
super.init();
}
@Override
public void shutdown() {
}
@Override
public boolean handleArg(final AtomicInteger i, final String[] args) {
if (super.handleArg(i, args)) {
return true;
}
// final String arg = args[i.get()];
// if (arg.equals("-bufferMode")) {
// final String s = args[i.incrementAndGet()];
// bufferModeOverride = BufferMode.valueOf(s);
// } else if (arg.equals("-namespace")) {
// final String s = args[i.incrementAndGet()];
// namespaceOverride = s;
// } else {
// return false;
// }
return false;
}
@Override
public void report(final StringBuilder sb) {
// NOP
}
} // class SAILOptionData
@Override
protected RAMOptionData newOptionData() {
return new RAMOptionData();
}
@Override
protected IGASEngine newGASEngine() {
return new RAMGASEngine(getOptionData().nthreads);
}
@Override
protected void loadFiles() throws Exception {
final RAMOptionData opt = getOptionData();
final String[] resources = opt.loadSet.toArray(new String[0]);
new RAMGraphLoader(opt.getGraph()).loadGraph(null/* fallback */,
resources);
}
@SuppressWarnings("unchecked")
@Override
protected RAMOptionData getOptionData() {
return (RAMOptionData) super.getOptionData();
}
@Override
protected IGraphAccessor newGraphAccessor() {
return new RAMGraphAccessor(getOptionData().g);
}
/**
* Performance testing harness.
*
* @see #GASRunner(String[])
*/
@SuppressWarnings("rawtypes")
public static void main(final String[] args) throws Exception {
new RAMGASRunner(args).call();
}
}
| gpl-2.0 |
rschatz/graal-core | graal/com.oracle.graal.debug/src/com/oracle/graal/debug/DebugMetric.java | 2208 | /*
* Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.debug;
/**
* A counter for some value of interest.
*/
public interface DebugMetric {
/**
* Adds 1 to this counter if metering is {@link Debug#isMeterEnabled() enabled} or this is an
* {@linkplain #isConditional() unconditional} metric.
*/
void increment();
/**
* Adds {@code value} to this counter if metering is {@link Debug#isMeterEnabled() enabled} or
* this is an {@linkplain #isConditional() unconditional} metric.
*/
void add(long value);
/**
* Sets a flag determining if this counter is only enabled if metering is
* {@link Debug#isMeterEnabled() enabled}.
*/
void setConditional(boolean flag);
/**
* Determines if this counter is only enabled if metering is {@link Debug#isMeterEnabled()
* enabled}.
*/
boolean isConditional();
/**
* Gets the current value of this metric.
*/
long getCurrentValue();
/**
* Determines if this counter is enabled (either conditionally or unconditionally).
*/
default boolean isEnabled() {
return !isConditional() || Debug.isMeterEnabled();
}
}
| gpl-2.0 |
AKSW/DL-Learner | components-ext/src/main/java/org/dllearner/algorithms/isle/index/SemanticAnnotation.java | 1197 | /**
*
*/
package org.dllearner.algorithms.isle.index;
import org.semanticweb.owlapi.model.OWLEntity;
/**
* @author Lorenz Buehmann
*
*/
public class SemanticAnnotation extends Annotation{
private OWLEntity entity;
public SemanticAnnotation(Annotation annotation, OWLEntity entity) {
super(annotation.getReferencedDocument(), annotation.getTokens());
this.entity = entity;
}
public OWLEntity getEntity() {
return entity;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((entity == null) ? 0 : entity.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
SemanticAnnotation other = (SemanticAnnotation) obj;
if (entity == null) {
if (other.entity != null)
return false;
} else if (!entity.equals(other.entity))
return false;
return true;
}
/* (non-Javadoc)
* @see org.dllearner.algorithms.isle.index.Annotation#toString()
*/
@Override
public String toString() {
return super.toString() + "->" + entity;
}
}
| gpl-3.0 |
UnlimitedFreedom/UF-EchoPet | modules/v1_6_R3/src/main/java/com/dsh105/echopet/compat/nms/v1_6_R3/entity/EntityPet.java | 17910 | /*
* This file is part of EchoPet.
*
* EchoPet is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* EchoPet is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with EchoPet. If not, see <http://www.gnu.org/licenses/>.
*/
package com.dsh105.echopet.compat.nms.v1_6_R3.entity;
import com.dsh105.commodus.IdentUtil;
import com.dsh105.echopet.compat.api.ai.PetGoalSelector;
import com.dsh105.echopet.compat.api.entity.*;
import com.dsh105.echopet.compat.api.event.PetAttackEvent;
import com.dsh105.echopet.compat.api.event.PetRideJumpEvent;
import com.dsh105.echopet.compat.api.event.PetRideMoveEvent;
import com.dsh105.echopet.compat.api.plugin.EchoPet;
import com.dsh105.echopet.compat.api.util.Logger;
import com.dsh105.echopet.compat.api.util.MenuUtil;
import com.dsh105.echopet.compat.api.util.Perm;
import com.dsh105.echopet.compat.api.util.menu.MenuOption;
import com.dsh105.echopet.compat.api.util.menu.PetMenu;
import com.dsh105.echopet.compat.nms.v1_6_R3.entity.ai.PetGoalFloat;
import com.dsh105.echopet.compat.nms.v1_6_R3.entity.ai.PetGoalFollowOwner;
import com.dsh105.echopet.compat.nms.v1_6_R3.entity.ai.PetGoalLookAtPlayer;
import net.minecraft.server.v1_6_R3.*;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.craftbukkit.v1_6_R3.CraftWorld;
import org.bukkit.craftbukkit.v1_6_R3.entity.CraftCreature;
import org.bukkit.craftbukkit.v1_6_R3.entity.CraftLivingEntity;
import org.bukkit.craftbukkit.v1_6_R3.entity.CraftPlayer;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.util.Vector;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Random;
public abstract class EntityPet extends EntityCreature implements IAnimal, IEntityPet {
protected IPet pet;
public PetGoalSelector petGoalSelector;
protected static Field FIELD_JUMP = null;
protected double jumpHeight;
protected float rideSpeed;
public EntityLiving goalTarget = null;
public boolean shouldVanish;
public EntityPet(World world) {
super(world);
}
public EntityPet(World world, IPet pet) {
super(world);
this.pet = pet;
this.initiateEntityPet();
}
private void initiateEntityPet() {
this.resetEntitySize();
this.fireProof = true;
if (this.FIELD_JUMP == null) {
try {
this.FIELD_JUMP = EntityLiving.class.getDeclaredField("bd");
this.FIELD_JUMP.setAccessible(true);
} catch (NoSuchFieldException e) {
e.printStackTrace();
}
}
this.getBukkitEntity().setMaxHealth(pet.getPetType().getMaxHealth());
this.setHealth((float) pet.getPetType().getMaxHealth());
this.jumpHeight = EchoPet.getOptions().getRideJumpHeight(this.getPet().getPetType());
this.rideSpeed = EchoPet.getOptions().getRideSpeed(this.getPet().getPetType());
this.setPathfinding();
}
public PetType getEntityPetType() {
EntityPetType entityPetType = this.getClass().getAnnotation(EntityPetType.class);
if (entityPetType != null) {
return entityPetType.petType();
}
return null;
}
@Override
public void resizeBoundingBox(boolean flag) {
EntitySize es = this.getClass().getAnnotation(EntitySize.class);
if (es != null) {
this.setSize(flag ? (es.width() / 2) : es.width(), flag ? (es.height() / 2) : es.height());
}
}
@Override
public void resetEntitySize() {
EntitySize es = this.getClass().getAnnotation(EntitySize.class);
if (es != null) {
this.setSize(es.width(), es.height());
}
}
@Override
public void setEntitySize(float width, float height) {
this.setSize(width, height);
}
protected void setSize(float width, float height) {
this.a(width, height);
}
@Override
public boolean isPersistent() {
return true;
}
public IPet getPet() {
return this.pet;
}
public Player getPlayerOwner() {
return pet.getOwner();
}
public Location getLocation() {
return this.pet.getLocation();
}
public void setVelocity(Vector vel) {
this.motX = vel.getX();
this.motY = vel.getY();
this.motZ = vel.getZ();
this.velocityChanged = true;
}
public Random random() {
return this.random;
}
@Override
public PetGoalSelector getPetGoalSelector() {
return petGoalSelector;
}
@Override
public boolean isDead() {
return dead;
}
@Override
public void setShouldVanish(boolean flag) {
this.shouldVanish = flag;
}
@Override
public void setTarget(LivingEntity livingEntity) {
this.setGoalTarget(((CraftLivingEntity) livingEntity).getHandle());
}
@Override
public LivingEntity getTarget() {
return (LivingEntity) this.getGoalTarget().getBukkitEntity();
}
public boolean attack(Entity entity) {
return this.attack(entity, (float) this.getPet().getPetType().getAttackDamage());
}
public boolean attack(Entity entity, float damage) {
return this.attack(entity, DamageSource.mobAttack(this), damage);
}
public boolean attack(Entity entity, DamageSource damageSource, float damage) {
PetAttackEvent attackEvent = new PetAttackEvent(this.getPet(), entity.getBukkitEntity(), damage);
EchoPet.getPlugin().getServer().getPluginManager().callEvent(attackEvent);
if (!attackEvent.isCancelled()) {
if (entity instanceof EntityPlayer) {
if (!(EchoPet.getConfig().getBoolean("canAttackPlayers", false))) {
return false;
}
}
return entity.damageEntity(damageSource, (float) attackEvent.getDamage());
}
return false;
}
public void setPathfinding() {
try {
this.petGoalSelector = new PetGoalSelector();
this.getNavigation().b(true);
petGoalSelector.addGoal(new PetGoalFloat(this), 0);
petGoalSelector.addGoal(new PetGoalFollowOwner(this, this.getSizeCategory().getStartWalk(getPet().getPetType()), this.getSizeCategory().getStopWalk(getPet().getPetType()), this.getSizeCategory().getTeleport(getPet().getPetType())), 1);
petGoalSelector.addGoal(new PetGoalLookAtPlayer(this, EntityHuman.class), 2);
} catch (Exception e) {
Logger.log(Logger.LogLevel.WARNING, "Could not add PetGoals to Pet AI.", e, true);
}
}
// EntityInsentient
@Override
public boolean bf() {
return true;
}
@Override
public CraftCreature getBukkitEntity() {
return (CraftCreature) super.getBukkitEntity();
}
// Overriden from EntityInsentient - Most importantly overrides pathfinding selectors
@Override
protected void bi() {
++this.aV;
this.u();
this.getEntitySenses().a();
// If this ever happens...
if (this.petGoalSelector == null) {
this.remove(false);
return;
}
this.petGoalSelector.updateGoals();
this.getNavigation().f();
this.bk();
this.getControllerMove().c();
this.getControllerLook().a();
this.getControllerJump().b();
}
@Override
public boolean onInteract(Player p) {
if (IdentUtil.areIdentical(p, getPlayerOwner())) {
if (EchoPet.getConfig().getBoolean("pets." + this.getPet().getPetType().toString().toLowerCase().replace("_", " ") + ".interactMenu", true) && Perm.BASE_MENU.hasPerm(this.getPlayerOwner(), false, false)) {
ArrayList<MenuOption> options = MenuUtil.createOptionList(getPet().getPetType());
int size = this.getPet().getPetType() == PetType.HORSE ? 18 : 9;
PetMenu menu = new PetMenu(getPet(), options, size);
menu.open(false);
}
return true;
}
return false;
}
@Override
public boolean a(EntityHuman human) {
return onInteract((Player) human.getBukkitEntity());
}
@Override
public void setPositionRotation(double d0, double d1, double d2, float f, float f1) {
super.setPositionRotation(d0, d1, d2, f, f1);
}
public void setLocation(Location l) {
this.setLocation(l.getX(), l.getY(), l.getZ(), l.getYaw(), l.getPitch());
this.world = ((CraftWorld) l.getWorld()).getHandle();
}
public void teleport(Location l) {
this.getPet().getCraftPet().teleport(l);
}
@Override
public void remove(boolean makeSound) {
if (this.bukkitEntity != null) {
bukkitEntity.remove();
}
if (makeSound) {
String sound = this.getDeathSound();
if (sound != null) {
makeSound(this.getDeathSound(), 1.0F, 1.0F);
}
}
}
public void onLive() {
if (this.pet == null) {
this.remove(false);
return;
}
if (this.getPlayerOwner() == null || !this.getPlayerOwner().isOnline() || Bukkit.getPlayerExact(this.getPlayerOwner().getName()) == null) {
EchoPet.getManager().removePet(this.getPet(), true);
return;
}
if (pet.isOwnerRiding() && this.passenger == null && !pet.isOwnerInMountingProcess()) {
pet.ownerRidePet(false);
}
if (((CraftPlayer) this.getPlayerOwner()).getHandle().isInvisible() != this.isInvisible() && !this.shouldVanish) {
this.setInvisible(!this.isInvisible());
}
if (((CraftPlayer) this.getPlayerOwner()).getHandle().isSneaking() != this.isSneaking()) {
this.setSneaking(!this.isSneaking());
}
if (((CraftPlayer) this.getPlayerOwner()).getHandle().isSprinting() != this.isSprinting()) {
this.setSprinting(!this.isSprinting());
}
if (this.getPet().isHat()) {
this.lastYaw = this.yaw = (this.getPet().getPetType() == PetType.ENDERDRAGON ? this.getPlayerOwner().getLocation().getYaw() - 180 : this.getPlayerOwner().getLocation().getYaw());
}
if (this.getPlayerOwner().isFlying() && EchoPet.getOptions().canFly(this.getPet().getPetType())) {
Location petLoc = this.getLocation();
Location ownerLoc = this.getPlayerOwner().getLocation();
Vector v = ownerLoc.toVector().subtract(petLoc.toVector());
double x = v.getX();
double y = v.getY();
double z = v.getZ();
Vector vo = this.getPlayerOwner().getLocation().getDirection();
if (vo.getX() > 0) {
x -= 1.5;
} else if (vo.getX() < 0) {
x += 1.5;
}
if (vo.getZ() > 0) {
z -= 1.5;
} else if (vo.getZ() < 0) {
z += 1.5;
}
this.setVelocity(new Vector(x, y, z).normalize().multiply(0.3F));
}
}
// EntityInsentient
@Override
public void e(float sideMot, float forwMot) {
if (this.passenger == null || !(this.passenger instanceof EntityHuman)) {
super.e(sideMot, forwMot);
// https://github.com/Bukkit/mc-dev/blob/master/net/minecraft/server/EntityHorse.java#L914
this.Y = 0.5F;
return;
}
EntityHuman human = (EntityHuman) this.passenger;
if (human.getBukkitEntity() != this.getPlayerOwner().getPlayer()) {
super.e(sideMot, forwMot);
this.Y = 0.5F;
return;
}
this.Y = 1.0F;
this.lastYaw = this.yaw = this.passenger.yaw;
this.pitch = this.passenger.pitch * 0.5F;
this.b(this.yaw, this.pitch);
this.aP = this.aN = this.yaw;
sideMot = ((EntityLiving) this.passenger).be * 0.5F;
forwMot = ((EntityLiving) this.passenger).bf;
if (forwMot <= 0.0F) {
forwMot *= 0.25F;
}
sideMot *= 0.75F;
PetRideMoveEvent moveEvent = new PetRideMoveEvent(this.getPet(), forwMot, sideMot);
EchoPet.getPlugin().getServer().getPluginManager().callEvent(moveEvent);
if (moveEvent.isCancelled()) {
return;
}
this.i(this.rideSpeed);
super.e(moveEvent.getSidewardMotionSpeed(), moveEvent.getForwardMotionSpeed());
PetType pt = this.getPet().getPetType();
if (FIELD_JUMP != null && this.passenger != null) {
if (EchoPet.getOptions().canFly(pt)) {
try {
if (((Player) (human.getBukkitEntity())).isFlying()) {
((Player) (human.getBukkitEntity())).setFlying(false);
}
if (FIELD_JUMP.getBoolean(this.passenger)) {
PetRideJumpEvent rideEvent = new PetRideJumpEvent(this.getPet(), this.jumpHeight);
EchoPet.getPlugin().getServer().getPluginManager().callEvent(rideEvent);
if (!rideEvent.isCancelled()) {
this.motY = 0.5F;
}
}
} catch (IllegalArgumentException e) {
Logger.log(Logger.LogLevel.WARNING, "Failed to initiate Pet Flying Motion for " + this.getPlayerOwner().getName() + "'s Pet.", e, true);
} catch (IllegalAccessException e) {
Logger.log(Logger.LogLevel.WARNING, "Failed to initiate Pet Flying Motion for " + this.getPlayerOwner().getName() + "'s Pet.", e, true);
} catch (IllegalStateException e) {
Logger.log(Logger.LogLevel.WARNING, "Failed to initiate Pet Flying Motion for " + this.getPlayerOwner().getName() + "'s Pet.", e, true);
}
} else if (this.onGround) {
try {
if (FIELD_JUMP.getBoolean(this.passenger)) {
PetRideJumpEvent rideEvent = new PetRideJumpEvent(this.getPet(), this.jumpHeight);
EchoPet.getPlugin().getServer().getPluginManager().callEvent(rideEvent);
if (!rideEvent.isCancelled()) {
this.motY = rideEvent.getJumpHeight();
doJumpAnimation();
}
}
} catch (IllegalArgumentException e) {
Logger.log(Logger.LogLevel.WARNING, "Failed to initiate Pet Jumping Motion for " + this.getPlayerOwner().getName() + "'s Pet.", e, true);
} catch (IllegalAccessException e) {
Logger.log(Logger.LogLevel.WARNING, "Failed to initiate Pet Jumping Motion for " + this.getPlayerOwner().getName() + "'s Pet.", e, true);
} catch (IllegalStateException e) {
Logger.log(Logger.LogLevel.WARNING, "Failed to initiate Pet Jumping Motion for " + this.getPlayerOwner().getName() + "'s Pet.", e, true);
}
}
}
}
// EntityInsentient
@Override
protected String r() {
return this.getIdleSound();
}
// EntityInsentient
@Override
protected String aP() {
return this.getDeathSound();
}
protected abstract String getIdleSound(); //idle sound
protected abstract String getDeathSound(); //death sound
@Override
public abstract SizeCategory getSizeCategory();
// Entity
@Override
public void l_() {
super.l_();
onLive();
}
// EntityLiving
@Override
protected void a() {
super.a();
initDatawatcher();
}
// Entity
@Override
protected void a(int i, int j, int k, int l) {
super.a(i, j, k, l);
makeStepSound(i, j, k, Block.byId[l]);
}
protected void makeStepSound(int i, int j, int k, Block block) {
this.makeStepSound();
}
protected void initDatawatcher() {
}
protected void makeStepSound() {
}
protected void doJumpAnimation() {
}
@Override
public void b(NBTTagCompound nbttagcompound) {
// Do nothing with NBT
// Pets should not be stored to world save files
}
@Override
public boolean c(NBTTagCompound nbttagcompound) {
// Do nothing with NBT
// Pets should not be stored to world save files
return false;
}
@Override
public void a(NBTTagCompound nbttagcompound) {
// Do nothing with NBT
// Pets should not be stored to world save files
/*super.a(nbttagcompound);
String owner = nbttagcompound.getString("EchoPet_OwnerName");
PetType pt = this.getEntityPetType();
if (pt != null) {
this.pet = pt.getNewPetInstance(owner, this);
if (this.pet != null) {
EchoPet.getManager().loadRiderFromFile(this.getPet());
this.initiateEntityPet();
}
}*/
}
@Override
public boolean d(NBTTagCompound nbttagcompound) {
// Do nothing with NBT
// Pets should not be stored to world save files
return false;
}
@Override
public void e(NBTTagCompound nbttagcompound) {
// Do nothing with NBT
// Pets should not be stored to world save files
}
} | gpl-3.0 |
SKCraft/Applied-Energistics-2 | src/main/java/appeng/block/misc/BlockTinyTNT.java | 5795 | /*
* This file is part of Applied Energistics 2.
* Copyright (c) 2013 - 2015, AlgorithmX2, All rights reserved.
*
* Applied Energistics 2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Applied Energistics 2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Applied Energistics 2. If not, see <http://www.gnu.org/licenses/lgpl>.
*/
package appeng.block.misc;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import net.minecraft.block.Block;
import net.minecraft.block.BlockDispenser;
import net.minecraft.block.material.Material;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.projectile.EntityArrow;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.util.AxisAlignedBB;
import net.minecraft.util.IIcon;
import net.minecraft.world.Explosion;
import net.minecraft.world.World;
import cpw.mods.fml.common.registry.EntityRegistry;
import appeng.block.AEBaseBlock;
import appeng.client.render.BaseBlockRender;
import appeng.client.render.blocks.RenderTinyTNT;
import appeng.client.texture.FullIcon;
import appeng.core.AppEng;
import appeng.core.features.AEFeature;
import appeng.entity.EntityIds;
import appeng.entity.EntityTinyTNTPrimed;
import appeng.helpers.ICustomCollision;
import appeng.hooks.DispenserBehaviorTinyTNT;
public class BlockTinyTNT extends AEBaseBlock implements ICustomCollision
{
public BlockTinyTNT()
{
super( Material.tnt );
this.setLightOpacity( 1 );
this.setBlockBounds( 0.25f, 0.0f, 0.25f, 0.75f, 0.5f, 0.75f );
this.isFullSize = this.isOpaque = false;
this.setStepSound( soundTypeGrass );
this.setHardness( 0F );
this.setFeature( EnumSet.of( AEFeature.TinyTNT ) );
EntityRegistry.registerModEntity( EntityTinyTNTPrimed.class, "EntityTinyTNTPrimed", EntityIds.TINY_TNT, AppEng.instance(), 16, 4, true );
}
@Override
protected Class<? extends BaseBlockRender> getRenderer()
{
return RenderTinyTNT.class;
}
@Override
public void postInit()
{
super.postInit();
BlockDispenser.dispenseBehaviorRegistry.putObject( Item.getItemFromBlock( this ), new DispenserBehaviorTinyTNT() );
}
@Override
public IIcon getIcon( int direction, int metadata )
{
return new FullIcon( Blocks.tnt.getIcon( direction, metadata ) );
}
@Override
public boolean onActivated( World w, int x, int y, int z, EntityPlayer player, int side, float hitX, float hitY, float hitZ )
{
if( player.getCurrentEquippedItem() != null && player.getCurrentEquippedItem().getItem() == Items.flint_and_steel )
{
this.startFuse( w, x, y, z, player );
w.setBlockToAir( x, y, z );
player.getCurrentEquippedItem().damageItem( 1, player );
return true;
}
else
{
return super.onActivated( w, x, y, z, player, side, hitX, hitY, hitZ );
}
}
@Override
public void registerBlockIcons( IIconRegister iconRegistry )
{
// no images required.
}
public void startFuse( World w, int x, int y, int z, EntityLivingBase igniter )
{
if( !w.isRemote )
{
EntityTinyTNTPrimed primedTinyTNTEntity = new EntityTinyTNTPrimed( w, x + 0.5F, y + 0.5F, z + 0.5F, igniter );
w.spawnEntityInWorld( primedTinyTNTEntity );
w.playSoundAtEntity( primedTinyTNTEntity, "game.tnt.primed", 1.0F, 1.0F );
}
}
@Override
public void onBlockAdded( World w, int x, int y, int z )
{
super.onBlockAdded( w, x, y, z );
if( w.isBlockIndirectlyGettingPowered( x, y, z ) )
{
this.startFuse( w, x, y, z, null );
w.setBlockToAir( x, y, z );
}
}
@Override
public void onNeighborBlockChange( World w, int x, int y, int z, Block id )
{
if( w.isBlockIndirectlyGettingPowered( x, y, z ) )
{
this.startFuse( w, x, y, z, null );
w.setBlockToAir( x, y, z );
}
}
@Override
public void onBlockDestroyedByExplosion( World w, int x, int y, int z, Explosion exp )
{
if( !w.isRemote )
{
EntityTinyTNTPrimed primedTinyTNTEntity = new EntityTinyTNTPrimed( w, x + 0.5F, y + 0.5F, z + 0.5F, exp.getExplosivePlacedBy() );
primedTinyTNTEntity.fuse = w.rand.nextInt( primedTinyTNTEntity.fuse / 4 ) + primedTinyTNTEntity.fuse / 8;
w.spawnEntityInWorld( primedTinyTNTEntity );
}
}
@Override
public void onEntityCollidedWithBlock( World w, int x, int y, int z, Entity entity )
{
if( entity instanceof EntityArrow && !w.isRemote )
{
EntityArrow entityarrow = (EntityArrow) entity;
if( entityarrow.isBurning() )
{
this.startFuse( w, x, y, z, entityarrow.shootingEntity instanceof EntityLivingBase ? (EntityLivingBase) entityarrow.shootingEntity : null );
w.setBlockToAir( x, y, z );
}
}
}
@Override
public boolean canDropFromExplosion( Explosion exp )
{
return false;
}
@Override
public Iterable<AxisAlignedBB> getSelectedBoundingBoxesFromPool( World w, int x, int y, int z, Entity e, boolean isVisual )
{
return Collections.singletonList( AxisAlignedBB.getBoundingBox( 0.25, 0, 0.25, 0.75, 0.5, 0.75 ) );
}
@Override
public void addCollidingBlockToList( World w, int x, int y, int z, AxisAlignedBB bb, List<AxisAlignedBB> out, Entity e )
{
out.add( AxisAlignedBB.getBoundingBox( 0.25, 0, 0.25, 0.75, 0.5, 0.75 ) );
}
}
| gpl-3.0 |
andrew-hoffman/halfnes | src/main/java/com/grapeshot/halfnes/mappers/TengenRamboMapper.java | 9024 | /*
* HalfNES by Andrew Hoffman
* Licensed under the GNU GPL Version 3. See LICENSE file
*/
package com.grapeshot.halfnes.mappers;
import com.grapeshot.halfnes.*;
/**
*
* @author Andrew
*/
public class TengenRamboMapper extends Mapper {
private int whichbank = 0;
private boolean prgconfig = false, chrconfig = false,
chrmode1k = false, irqmode = false;
private int irqctrreload = 0;
private int irqctr = 0;
private boolean irqenable = false;
private boolean irqreload = false;
private int prgreg0 = 0, prgreg1 = 0, prgreg2 = 0;
private int[] chrreg = new int[8];
private boolean interrupted = false;
@Override
public void loadrom() throws BadMapperException {
//needs to be in every mapper. Fill with initial cfg
super.loadrom();
//on startup:
for (int i = 0; i < 8; ++i) {
prg_map[i] = (1024 * i);
prg_map[i + 8] = (1024 * i);
//yes this actually matters; MMC3 does NOT start up in a random state
//(at least Smash TV and TMNT3 expect certain banks w/o even setting up mapper)
}
for (int i = 1; i <= 32; ++i) {
prg_map[32 - i] = prgsize - (1024 * i);
}
for (int i = 0; i < 8; ++i) {
chr_map[i] = 0;
}
setprgregs();
//cpuram.setPrgRAMEnable(false);
}
@Override
public final void cartWrite(int addr, int data) {
if (addr < 0x8000 || addr > 0xffff) {
super.cartWrite(addr, data);
return;
}
//bankswitches here
//different register for even/odd writes
if (((addr & (utils.BIT0)) != 0)) {
//odd registers
if ((addr >= 0x8000) && (addr <= 0x9fff)) {
//bank change
//System.err.println("setting " + whichbank + " " + data + " " + prgconfig);
switch (whichbank) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
chrreg[whichbank] = data;
setupchr();
break;
case 6:
prgreg0 = data;
setprgregs();
break;
case 7:
//bank 7 always swappable, always in same place
prgreg1 = data;
setprgregs();
break;
case 8:
case 9:
//setup extra chr banks;
chrreg[whichbank - 2] = data;
break;
case 0xf:
prgreg2 = data;
setprgregs();
break;
}
} else if ((addr >= 0xA000) && (addr <= 0xbfff)) {
//prg ram write protect
//cpuram.setPrgRAMEnable(!utils.getbit(data, 7));
} else if ((addr >= 0xc000) && (addr <= 0xdfff)) {
//any value here reloads irq counter
irqreload = true;
irqmode = ((data & (utils.BIT0)) != 0);
} else if ((addr >= 0xe000) && (addr <= 0xffff)) {
//iany value here enables interrupts
irqenable = true;
}
} else {
//even registers
if ((addr >= 0x8000) && (addr <= 0x9fff)) {
//bank select
whichbank = data & 0xf;
chrmode1k = ((data & (utils.BIT5)) != 0);
prgconfig = ((data & (utils.BIT6)) != 0);
//if bit is false, 8000-9fff swappable and c000-dfff fixed to 2nd to last bank
//if bit is true, c000-dfff swappable and 8000-9fff fixed to 2nd to last bank
chrconfig = ((data & (utils.BIT7)) != 0);
//if false: 2 2k banks @ 0000-0fff, 4 1k banks in 1000-1fff
//if true: 4 1k banks @ 0000-0fff, 2 2k banks @ 1000-1fff
setupchr();
setprgregs();
} else if ((addr >= 0xA000) && (addr <= 0xbfff)) {
//mirroring setup
if (scrolltype != MirrorType.FOUR_SCREEN_MIRROR) {
setmirroring(((data & (utils.BIT0)) != 0) ? MirrorType.H_MIRROR : MirrorType.V_MIRROR);
}
} else if ((addr >= 0xc000) && (addr <= 0xdfff)) {
//value written here used to reload irq counter _@ end of scanline_
irqctrreload = data;
irqreload = true;
} else if ((addr >= 0xe000) && (addr <= 0xffff)) {
//any value here disables IRQ and acknowledges
if (interrupted) {
--cpu.interrupt;
}
interrupted = false;
irqenable = false;
irqctr = irqctrreload;
}
}
}
private void setupchr() {
if (chrconfig) {
if (chrmode1k) {
setppubank(1, 0, chrreg[2]);
setppubank(1, 1, chrreg[3]);
setppubank(1, 2, chrreg[4]);
setppubank(1, 3, chrreg[5]);
setppubank(1, 4, chrreg[0]);
setppubank(1, 5, chrreg[6]);
setppubank(1, 6, chrreg[1]);
setppubank(1, 7, chrreg[7]);
} else {
setppubank(1, 0, chrreg[2]);
setppubank(1, 1, chrreg[3]);
setppubank(1, 2, chrreg[4]);
setppubank(1, 3, chrreg[5]);
//Lowest bit of bank number IS IGNORED for the 2k banks
setppubank(2, 4, (chrreg[0] >> 1) << 1);
setppubank(2, 6, (chrreg[1] >> 1) << 1);
}
} else {
if (chrmode1k) {
setppubank(1, 0, chrreg[0]);
setppubank(1, 1, chrreg[6]);
setppubank(1, 2, chrreg[1]);
setppubank(1, 3, chrreg[7]);
setppubank(1, 4, chrreg[2]);
setppubank(1, 5, chrreg[3]);
setppubank(1, 6, chrreg[4]);
setppubank(1, 7, chrreg[5]);
} else {
setppubank(1, 4, chrreg[2]);
setppubank(1, 5, chrreg[3]);
setppubank(1, 6, chrreg[4]);
setppubank(1, 7, chrreg[5]);
setppubank(2, 0, (chrreg[0] >> 1) << 1);
setppubank(2, 2, (chrreg[1] >> 1) << 1);
}
}
}
private void setprgregs() {
//no matter what, c000-dfff is last bank
if (!prgconfig) {
//map r6 to first 8k, r7 to 2nd, rf to 3rd
for (int i = 0; i < 8; ++i) {
prg_map[i] = (1024 * (i + (prgreg0 * 8))) % prgsize;
prg_map[i + 8] = (1024 * (i + (prgreg1 * 8))) % prgsize;
prg_map[i + 16] = (1024 * (i + (prgreg2 * 8))) % prgsize;
}
} else {
//map rf to 1st 8k, r6 to 2nd, r7 to 3rd
for (int i = 0; i < 8; ++i) {
prg_map[i] = (1024 * (i + (prgreg2 * 8))) % prgsize;
prg_map[i + 8] = (1024 * (i + (prgreg0 * 8))) % prgsize;
prg_map[i + 16] = (1024 * (i + (prgreg1 * 8))) % prgsize;
}
}
}
@Override
public void notifyscanline(int scanline) {
if (irqmode) {
return;
}
//Scanline counter
if (scanline > 239 && scanline != 261) {
//clocked on LAST line of vblank and all lines of frame. Not on 240.
return;
}
if (!ppu.mmc3CounterClocking()) {
return;
}
clockscanlinecounter();
}
int remainder;
boolean intnextcycle = false;
@Override
public void cpucycle(int cycles) {
if (intnextcycle) {
intnextcycle = false;
if (!interrupted) {
++cpu.interrupt;
interrupted = true;
}
}
if (!irqmode) {
return;
}
remainder += cycles;
for (int i = 0; i < remainder; ++i) {
if ((i & 3) == 0) {
clockscanlinecounter();
}
}
remainder %= 4;
}
public void clockscanlinecounter() {
if (irqreload) {
irqreload = false;
irqctr = irqctrreload + 1;
} else if (irqctr == 0) {
irqctr = irqctrreload;
} else {
if (--irqctr == 0 && irqenable) {
intnextcycle = true;
}
}
}
private void setppubank(int banksize, int bankpos, int banknum) {
// System.err.println(banksize + ", " + bankpos + ", "+ banknum);
for (int i = 0; i < banksize; ++i) {
chr_map[i + bankpos] = (1024 * ((banknum) + i)) % chrsize;
}
}
}
| gpl-3.0 |
kevinwang/minecarft | lwjgl-source-2.8.2/src/java/org/lwjgl/util/ReadableColor.java | 4020 | /*
* Copyright (c) 2002-2008 LWJGL Project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'LWJGL' nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.lwjgl.util;
import java.nio.ByteBuffer;
/**
* Readonly interface for Colors
* @author $Author: spasi $
* @version $Revision: 3418 $
* $Id: ReadableColor.java 3418 2010-09-28 21:11:35Z spasi $
*/
public interface ReadableColor {
/**
* Return the red component (0..255)
* @return int
*/
int getRed();
/**
* Return the red component (0..255)
* @return int
*/
int getGreen();
/**
* Return the red component (0..255)
* @return int
*/
int getBlue();
/**
* Return the red component (0..255)
* @return int
*/
int getAlpha();
/**
* Return the red component
* @return int
*/
byte getRedByte();
/**
* Return the red component
* @return int
*/
byte getGreenByte();
/**
* Return the red component
* @return int
*/
byte getBlueByte();
/**
* Return the red component
* @return int
*/
byte getAlphaByte();
/**
* Write the RGBA color directly out to a ByteBuffer
* @param dest the buffer to write to
*/
void writeRGBA(ByteBuffer dest);
/**
* Write the RGB color directly out to a ByteBuffer
* @param dest the buffer to write to
*/
void writeRGB(ByteBuffer dest);
/**
* Write the ABGR color directly out to a ByteBuffer
* @param dest the buffer to write to
*/
void writeABGR(ByteBuffer dest);
/**
* Write the BGR color directly out to a ByteBuffer
* @param dest the buffer to write to
*/
void writeBGR(ByteBuffer dest);
/**
* Write the BGRA color directly out to a ByteBuffer
* @param dest the buffer to write to
*/
void writeBGRA(ByteBuffer dest);
/**
* Write the ARGB color directly out to a ByteBuffer
* @param dest the buffer to write to
*/
void writeARGB(ByteBuffer dest);
/*
* Some standard colors
*/
ReadableColor RED = new Color(255, 0, 0);
ReadableColor ORANGE = new Color(255, 128, 0);
ReadableColor YELLOW = new Color(255, 255, 0);
ReadableColor GREEN = new Color(0, 255, 0);
ReadableColor CYAN = new Color(0, 255, 255);
ReadableColor BLUE = new Color(0, 0, 255);
ReadableColor PURPLE = new Color(255, 0, 255);
ReadableColor WHITE = new Color(255, 255, 255);
ReadableColor BLACK = new Color(0, 0, 0);
ReadableColor LTGREY = new Color(192, 192, 192);
ReadableColor DKGREY = new Color(64, 64, 64);
ReadableColor GREY = new Color(128, 128, 128);
}
| gpl-3.0 |