repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
bowring/ET_Redux | src/main/java/org/earthtime/dataDictionaries/TemplatesForCsvImport.java | 4169 | /*
* TemplatesForCsvImport.java
*
* Copyright 2006-2018 James F. Bowring, CIRDLES.org, and Earth-Time.org
*
* Licensed under the Apache License,Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.dataDictionaries;
/**
*
* @author James F. Bowring
*/
public class TemplatesForCsvImport {
// september 2009 to support batch upload of csv sample files
/**
*
*/
public static final String IDTIMSLegacyDataSampleFieldNames_MIT = //
"Fraction,206/238,2-sig%,207/235,2-sig%,207/206,2-sig%,"
+ "rho 6/8-7/5,206/204,208/206,Pb*/Pbc,Pb* pg,Pbc pg,conc U,conc Pb,Th/U samp,"
+ "frac mass,age206/238,2-sig,age207/235,2-sig,age207/206,2-sig,age206/238xTh,2-sig,"
+ "age207/235xPa,2-sig,age207/206xTh,2-sig,207/206xPa,2-sig";
// April 2010 to support batch upload of csv sample files George Gehrels
/**
*
*/
public static final String LAICPMSLegacyDataSampleFieldNames_MC_UA = //
"Analysis,U(ppm),206/204,U/Th,206/207,1-sig%,207/235,1-sig%,206/238,1-sig%,"
+ "rho 6/8-7/5,"
+ "age206/238,1-sig,age207/235,1-sig,age207/206,1-sig,Best age,1-sig";
// May 2010 to support Jeff VerVoort at Washington State with single-collector LA-ICP MS
/**
*
*/
public static final String LAICPMSLegacyDataSampleFieldNames_SC_WSU_vA = //
"Analysis,207Pb/235U,1-sig abs,206Pb/238U,1-sig abs,207Pb/206Pb,1-sig abs,"
+ "Th/U,207Pb/235U,1-sig abs,206Pb/238U,1-sig abs,ignored,ignored,207Pb/206Pb,1-sig abs,"//
+ "ignored,ignored,ignored,ignored,ignored,rho 6/8-7/5";
// Sept 2010 to support Jeff VerVoort at Washington State with single-collector LA-ICP MS
/**
*
*/
public static final String LAICPMSLegacyDataSampleFieldNames_SC_WSU_vB = //
"Analysis,207Pb/235U,1-sig abs,206Pb/238U,1-sig abs,207Pb/206Pb,1-sig abs,"
+ "Th/U,207Pb/235U,1-sig abs,206Pb/238U,1-sig abs,207Pb/206Pb,1-sig abs,"//
+ "ignored,ignored,ignored,ignored,ignored,ignored,ignored,ignored,ignored,rho 6/8-7/5";
// June 2010 to support batch upload of csv sample files Matt Horstwood NIGL
/**
*
*/
public static final String LAICPMSLegacyDataSampleFieldNames_NIGL = //
"Analysis,Pb(ppm),U(ppm),206/204,1-sig%,207/206,1-sig%,207/235,1-sig%,206/238,1-sig%,"
+ "rho 6/8-7/5,"
+ "age207/206,2-sig-abs,age206/238,2-sig-abs,age207/235,2-sig-abs";
// July 2012 to support generic UPb isotopic data for Blair and Urs,etc.
/**
*
*/
public static final String GenericUPbIsotopicLegacyDataSampleFieldNames_A = //
"Sample and fractions,mass(g),# of grains,Pb conc (ppm),U conc (ppm),Th/U,Pb*/Pbc,Pbc (pg),"
+ "206Pb/204Pb,208Pb/206Pb,207Pb/206Pb,2-sig%,207Pb/235Pb,2-sig%,206Pb/238U,2-sig%,rho 6/8-7/5,"
+ "age 207Pb/206Pb,2-sig,age 207Pb/235U,2-sig,age 206Pb/238U,2-sig ";
public static final String ProjectOfLegacySamplesFieldNames_UCSB_LASS_A = //
"sample name,mineral type,grain number,spot number,position,Xpos.,Y pos.,"
+ "Pb ppm,U ppm,Th ppm,Th/U,206Pb/204Pb,2s. Abs.,207Pb/206Pb,2s. Abs.,238U/206Pb,2s. Abs.,"
+ "207Pb/235U,2s. Abs.,206Pb/238U,2s. Abs.,7/35 vs. 6/38 Rho,208Pb/232Th,2s. Abs.,"
+ "207Pb/206Pb (Ma),2s abs.,207Pb/235U (Ma),2s abs.,206Pb/238U (Ma),2s abs.,206Pb/238U <Th> (Ma),"
+ "2s abs.,208Pb/232Th (Ma),2s abs.,Si ppm,P ppm,Ca ppm,Ti ppm,Rb ppm,Sr ppm,Y ppm,Zr ppm,"
+ "La ppm,Ce ppm,Pr ppm,Nd ppm,Sm ppm,Eu ppm,Gd ppm,Tb ppm,Dy ppm,Ho ppm,Er ppm,Tm ppm,Yb ppm,Lu ppm,Hf ppm";
}
| apache-2.0 |
joachimhs/Montric | Montric.Api/src/main/java/org/eurekaj/api/datatypes/PluginConfiguration.java | 310 | package org.eurekaj.api.datatypes;
/**
* Created with IntelliJ IDEA.
* User: joahaa
* Date: 2/3/13
* Time: 9:30 PM
* To change this template use File | Settings | File Templates.
*/
public interface PluginConfiguration {
public String getPluginConfiguration();
public String getAccountName();
}
| apache-2.0 |
mayl8822/binnavi | src/main/java/com/google/security/zynamics/binnavi/Gui/GraphSettings/CDisassemblyPanel.java | 2119 | // Copyright 2011-2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.security.zynamics.binnavi.Gui.GraphSettings;
import com.google.common.base.Preconditions;
import com.google.security.zynamics.binnavi.ZyGraph.ZyGraphViewSettings;
import java.awt.GridLayout;
import javax.swing.JComboBox;
import javax.swing.border.TitledBorder;
/**
* Settings panel where disassembly settings can be configured.
*/
public final class CDisassemblyPanel extends CAbstractSettingsPanel {
/**
* Used to switch simplified memory access on/off.
*/
private final JComboBox<String> m_cbSimplifiedMemoryAccess = new JComboBox<String>();
/**
* Creates a new circular settings panel.
*
* @param settings Settings object that provides the graph settings to display.
*/
public CDisassemblyPanel(final ZyGraphViewSettings settings) {
super(new GridLayout(1, 1));
setBorder(new TitledBorder("Disassembly Settings"));
Preconditions.checkNotNull(settings, "IE00666: Settings argument can not be null");
CSettingsPanelBuilder.addComboBox(this, m_cbSimplifiedMemoryAccess,
"Simplified Variable Access" + ":",
"Simplifies variable access instructions (example: 'mov eax, [esp + var_4]' "
+ "is turned into 'mov eax, var_4)",
settings.getDisplaySettings().getSimplifiedVariableAccess());
}
@Override
public boolean updateSettings(final ZyGraphViewSettings settings) {
settings.getDisplaySettings().setSimplifiedVariableAccess(
m_cbSimplifiedMemoryAccess.getSelectedIndex() == 0);
return false;
}
}
| apache-2.0 |
lsmall/flowable-engine | modules/flowable-content-rest/src/test/java/org/flowable/content/rest/conf/common/RestConfiguration.java | 1386 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.content.rest.conf.common;
import org.flowable.common.rest.resolver.ContentTypeResolver;
import org.flowable.common.rest.resolver.DefaultContentTypeResolver;
import org.flowable.content.rest.ContentRestResponseFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @author Tijs Rademakers
*/
@Configuration
public class RestConfiguration {
@Bean()
public ContentRestResponseFactory restContentResponseFactory() {
ContentRestResponseFactory restResponseFactory = new ContentRestResponseFactory();
return restResponseFactory;
}
@Bean()
public ContentTypeResolver contentTypeResolver() {
ContentTypeResolver resolver = new DefaultContentTypeResolver();
return resolver;
}
}
| apache-2.0 |
OuZhencong/log4j2 | log4j-core/src/test/java/org/apache/logging/log4j/core/FileConfigTest.java | 3047 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.util.Map;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.ConfigurationFactory;
import org.apache.logging.log4j.status.StatusLogger;
import org.apache.logging.log4j.test.appender.ListAppender;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
*/
public class FileConfigTest {
private static final String CONFIG = "target/test-classes/log4j-test2.xml";
private static Configuration config;
private static ListAppender app;
private static LoggerContext ctx;
private final org.apache.logging.log4j.Logger logger = LogManager.getLogger("LoggerTest");
@BeforeClass
public static void setupClass() {
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY, CONFIG);
ctx = (LoggerContext) LogManager.getContext(false);
}
@AfterClass
public static void cleanupClass() {
System.clearProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY);
ctx.reconfigure();
StatusLogger.getLogger().reset();
}
@Before
public void before() {
config = ctx.getConfiguration();
for (final Map.Entry<String, Appender> entry : config.getAppenders().entrySet()) {
if (entry.getKey().equals("List")) {
app = (ListAppender) entry.getValue();
break;
}
}
assertNotNull("No Appender", app);
app.clear();
}
@Test
public void testReconfiguration() throws Exception {
final File file = new File(CONFIG);
final long orig = file.lastModified();
final long newTime = orig + 10000;
file.setLastModified(newTime);
Thread.sleep(6000);
for (int i = 0; i < 17; ++i) {
logger.debug("Reconfigure");
}
final Configuration cfg = ctx.getConfiguration();
assertNotNull("No configuration", cfg);
assertTrue("Reconfiguration failed", cfg != config);
}
}
| apache-2.0 |
ilinum/intellij-scala | SDK/yourkitProbes/src/org/jetbrains/plugins/scala/probes/Utilities.java | 5397 | package org.jetbrains.plugins.scala.probes;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
* @author Nikolay.Tropin
*/
public class Utilities {
public static String currentStackTrace() {
StackTraceElement[] stackTrace = new Exception().getStackTrace();
StringBuilder buffer = new StringBuilder();
for (int i = 2; i < stackTrace.length; i++) {
StackTraceElement element = stackTrace[i];
buffer.append(element.toString()).append("\n");
}
return buffer.toString();
}
public static String getContainingFileName(Object psiElem) {
Object file = invokeMethod("getContainingFile", null, psiElem);
return file != null ? file.toString() : "null";
}
public static String refName(Object ref) {
return invokeMethod("refName", "not found", ref).toString();
}
public static String getName(Object namedPsiElem) {
return (String) invokeMethod("getName", "not found", namedPsiElem);
}
public static String getTargetName(Object resolveResult) {
Object element = invokeMethod("getElement", null, resolveResult);
String fqn = getFQN(element);
return element != null ? element.getClass().getSimpleName() + ": " + fqn : "not found";
}
public static String getFQN(Object namedPsiElem) {
Class<?> aClass = namedPsiElem.getClass();
Method getQN = findMethodByName(aClass, "getQualifiedName");
if (getQN != null) {
Object result = invokeMethod(getQN, "not found", namedPsiElem);
return result != null ? (String) result : getName(namedPsiElem);
}
Method getContainingClass = findMethodByName(aClass, "getContainingClass");
String name = invokeMethod("getName", "not found", namedPsiElem).toString();
Object containingClass = getContainingClass != null ? invokeMethod(getContainingClass, null, namedPsiElem): null;
String qualifier = containingClass != null ? invokeMethod("getQualifiedName", "", containingClass) + "." : "";
return qualifier + "." + name;
}
public static int getOffset(Object psiElem) {
Object textRange = invokeMethod("getTextRange", null, psiElem);
if (textRange != null) {
return (Integer) invokeMethod("getStartOffset", -1, textRange);
}
else {
return -1;
}
}
public static String getText(Object psiElem) {
return (String) invokeMethod("getText", "not found", psiElem);
}
public static String toString(Object o) {
return (String) invokeMethod("toString", "toString failed", o);
}
public static String name(Object psiElem) {
return invokeMethod("name", "not found", psiElem).toString();
}
public static String presentableTextFromTypeResult(Object typeResult) {
Class<?> aClass = typeResult.getClass();
boolean success = aClass.getSimpleName().equals("Success");
boolean some = aClass.getSimpleName().equals("Some");
if (success || some) {
Object scType = invokeMethod("get", null, typeResult);
if (scType != null) return invokeMethod("presentableText", "not found", scType).toString();
}
return "not found";
}
public static String presentableText(Object scType) {
return (String) invokeMethod("presentableText", "not found", scType);
}
public static String firstComponentText(Object tuple) {
return invokeMethod("_1", "not found", tuple).toString();
}
public static boolean isStub(Object psiElement) {
return invokeMethod("getStub", null, psiElement) != null;
}
private static Object invokeMethod(String methodName, Object dflt, Object obj, Object... args) {
Class<?> aClass = obj.getClass();
Method method = findMethodByName(aClass, methodName);
if (method == null) throw new NoSuchMethodError("No method " + methodName + " found in " + obj.toString());
return invokeMethod(method, dflt, obj, args);
}
private static Object invokeMethod(Method method, Object dflt, Object obj, Object... args) {
try {
return method.invoke(obj, args);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
printException(e);
}
return dflt;
}
private static Method findMethodByName(Class<?> aClass, String methodName) {
for (Method method : aClass.getMethods()) {
if (method.getName().equals(methodName)) {
return method;
}
}
return null;
}
private static void printException(Throwable e) {
Throwable cause = e.getCause();
if (cause != null) System.out.println(e.toString() + " caused by " + cause.toString());
else System.out.println(e.toString());
}
public static long gcTime() {
long res = 0;
for (GarbageCollectorMXBean bean : ManagementFactory.getGarbageCollectorMXBeans()) {
long collectionTime = bean.getCollectionTime();
if (collectionTime > 0)
res += collectionTime;
}
return res;
}
}
| apache-2.0 |
Andproject/tools_gerrit | gerrit-httpd/src/main/java/com/google/gerrit/httpd/rpc/patch/AddReviewer.java | 5929 | // Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.httpd.rpc.patch;
import com.google.gerrit.common.data.ReviewerResult;
import com.google.gerrit.common.data.ApprovalType;
import com.google.gerrit.common.data.ApprovalTypes;
import com.google.gerrit.httpd.rpc.Handler;
import com.google.gerrit.httpd.rpc.changedetail.ChangeDetailFactory;
import com.google.gerrit.reviewdb.Account;
import com.google.gerrit.reviewdb.ApprovalCategory;
import com.google.gerrit.reviewdb.Change;
import com.google.gerrit.reviewdb.PatchSet;
import com.google.gerrit.reviewdb.PatchSetApproval;
import com.google.gerrit.reviewdb.ReviewDb;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.account.AccountResolver;
import com.google.gerrit.server.mail.AddReviewerSender;
import com.google.gerrit.server.project.ChangeControl;
import com.google.gwtorm.client.OrmException;
import com.google.inject.Inject;
import com.google.inject.assistedinject.Assisted;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
class AddReviewer extends Handler<ReviewerResult> {
interface Factory {
AddReviewer create(Change.Id changeId, Collection<String> nameOrEmails);
}
private final AddReviewerSender.Factory addReviewerSenderFactory;
private final AccountResolver accountResolver;
private final ChangeControl.Factory changeControlFactory;
private final ChangeDetailFactory.Factory changeDetailFactory;
private final ReviewDb db;
private final IdentifiedUser currentUser;
private final IdentifiedUser.GenericFactory identifiedUserFactory;
private final ApprovalCategory.Id addReviewerCategoryId;
private final Change.Id changeId;
private final Collection<String> reviewers;
@Inject
AddReviewer(final AddReviewerSender.Factory addReviewerSenderFactory,
final AccountResolver accountResolver,
final ChangeControl.Factory changeControlFactory, final ReviewDb db,
final IdentifiedUser.GenericFactory identifiedUserFactory,
final IdentifiedUser currentUser, final ApprovalTypes approvalTypes,
final ChangeDetailFactory.Factory changeDetailFactory,
@Assisted final Change.Id changeId,
@Assisted final Collection<String> nameOrEmails) {
this.addReviewerSenderFactory = addReviewerSenderFactory;
this.accountResolver = accountResolver;
this.db = db;
this.changeControlFactory = changeControlFactory;
this.identifiedUserFactory = identifiedUserFactory;
this.currentUser = currentUser;
this.changeDetailFactory = changeDetailFactory;
final List<ApprovalType> allTypes = approvalTypes.getApprovalTypes();
addReviewerCategoryId =
allTypes.get(allTypes.size() - 1).getCategory().getId();
this.changeId = changeId;
this.reviewers = nameOrEmails;
}
@Override
public ReviewerResult call() throws Exception {
final Set<Account.Id> reviewerIds = new HashSet<Account.Id>();
final ChangeControl control = changeControlFactory.validateFor(changeId);
final ReviewerResult result = new ReviewerResult();
for (final String nameOrEmail : reviewers) {
final Account account = accountResolver.find(nameOrEmail);
if (account == null) {
result.addError(new ReviewerResult.Error(
ReviewerResult.Error.Type.ACCOUNT_NOT_FOUND, nameOrEmail));
continue;
}
if (!account.isActive()) {
result.addError(new ReviewerResult.Error(
ReviewerResult.Error.Type.ACCOUNT_INACTIVE, nameOrEmail));
continue;
}
final IdentifiedUser user = identifiedUserFactory.create(account.getId());
if (!control.forUser(user).isVisible()) {
result.addError(new ReviewerResult.Error(
ReviewerResult.Error.Type.CHANGE_NOT_VISIBLE, nameOrEmail));
continue;
}
reviewerIds.add(account.getId());
}
if (reviewerIds.isEmpty()) {
return result;
}
// Add the reviewers to the database
//
final Set<Account.Id> added = new HashSet<Account.Id>();
final List<PatchSetApproval> toInsert = new ArrayList<PatchSetApproval>();
final PatchSet.Id psid = control.getChange().currentPatchSetId();
for (final Account.Id reviewer : reviewerIds) {
if (!exists(psid, reviewer)) {
// This reviewer has not entered an approval for this change yet.
//
final PatchSetApproval myca = dummyApproval(psid, reviewer);
toInsert.add(myca);
added.add(reviewer);
}
}
db.patchSetApprovals().insert(toInsert);
// Email the reviewers
//
final AddReviewerSender cm;
cm = addReviewerSenderFactory.create(control.getChange());
cm.setFrom(currentUser.getAccountId());
cm.addReviewers(added);
cm.send();
result.setChange(changeDetailFactory.create(changeId).call());
return result;
}
private boolean exists(final PatchSet.Id patchSetId,
final Account.Id reviewerId) throws OrmException {
return db.patchSetApprovals().byPatchSetUser(patchSetId, reviewerId)
.iterator().hasNext();
}
private PatchSetApproval dummyApproval(final PatchSet.Id patchSetId,
final Account.Id reviewerId) {
return new PatchSetApproval(new PatchSetApproval.Key(patchSetId,
reviewerId, addReviewerCategoryId), (short) 0);
}
}
| apache-2.0 |
williamchengit/TestRepo | solr/core/src/test/org/apache/solr/cloud/AsyncMigrateRouteKeyTest.java | 4596 | package org.apache.solr.cloud;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.common.params.CollectionParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.junit.Before;
import java.io.IOException;
public class AsyncMigrateRouteKeyTest extends MigrateRouteKeyTest {
public AsyncMigrateRouteKeyTest() {
schemaString = "schema15.xml"; // we need a string id
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
}
public void doTest() throws Exception {
waitForThingsToLevelOut(15);
multipleShardMigrateTest();
printLayout();
}
protected void checkAsyncRequestForCompletion(String asyncId) throws SolrServerException, IOException {
ModifiableSolrParams params;
String message;
params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.REQUESTSTATUS.toString());
params.set(OverseerCollectionProcessor.REQUESTID, asyncId);
// This task takes long enough to run. Also check for the current state of the task to be running.
message = sendStatusRequestWithRetry(params, 2);
assertEquals("found " + asyncId + " in running tasks", message);
// Now wait until the task actually completes successfully/fails.
message = sendStatusRequestWithRetry(params, 20);
assertEquals("Task " + asyncId + " not found in completed tasks.",
"found " + asyncId + " in completed tasks", message);
}
@Override
protected void invokeMigrateApi(String sourceCollection, String splitKey, String targetCollection) throws SolrServerException, IOException {
ModifiableSolrParams params = new ModifiableSolrParams();
String asyncId = "20140128";
params.set(CollectionParams.ACTION, CollectionParams.CollectionAction.MIGRATE.toString());
params.set("collection", sourceCollection);
params.set("target.collection", targetCollection);
params.set("split.key", splitKey);
params.set("forward.timeout", 45);
params.set("async", asyncId);
invoke(params);
checkAsyncRequestForCompletion(asyncId);
}
/**
* Helper method to send a status request with specific retry limit and return
* the message/null from the success response.
*/
private String sendStatusRequestWithRetry(ModifiableSolrParams params, int maxCounter)
throws SolrServerException, IOException {
NamedList status = null;
String state = null;
String message = null;
NamedList r;
while (maxCounter-- > 0) {
r = sendRequest(params);
status = (NamedList) r.get("status");
state = (String) status.get("state");
message = (String) status.get("msg");
if (state.equals("completed") || state.equals("failed"))
return (String) status.get("msg");
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
// Return last state?
return message;
}
protected NamedList sendRequest(ModifiableSolrParams params) throws SolrServerException, IOException {
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
String baseUrl = ((HttpSolrServer) shardToJetty.get(SHARD1).get(0).client.solrClient)
.getBaseURL();
baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
HttpSolrServer baseServer = null;
try {
baseServer = new HttpSolrServer(baseUrl);
baseServer.setConnectionTimeout(15000);
return baseServer.request(request);
} finally {
baseServer.shutdown();
}
}
}
| apache-2.0 |
barnyard/pi | freepastry/src/rice/pastry/direct/NodeRecord.java | 2087 | /*******************************************************************************
"FreePastry" Peer-to-Peer Application Development Substrate
Copyright 2002-2007, Rice University. Copyright 2006-2007, Max Planck Institute
for Software Systems. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of Rice University (RICE), Max Planck Institute for Software
Systems (MPI-SWS) nor the names of its contributors may be used to endorse or
promote products derived from this software without specific prior written
permission.
This software is provided by RICE, MPI-SWS and the contributors on an "as is"
basis, without any representations or warranties of any kind, express or implied
including, but not limited to, representations or warranties of
non-infringement, merchantability or fitness for a particular purpose. In no
event shall RICE, MPI-SWS or contributors be liable for any direct, indirect,
incidental, special, exemplary, or consequential damages (including, but not
limited to, procurement of substitute goods or services; loss of use, data, or
profits; or business interruption) however caused and on any theory of
liability, whether in contract, strict liability, or tort (including negligence
or otherwise) arising in any way out of the use of this software, even if
advised of the possibility of such damage.
*******************************************************************************/
/*
* Created on Nov 8, 2005
*/
package rice.pastry.direct;
public interface NodeRecord {
float networkDelay(NodeRecord nrb);
float proximity(NodeRecord nrb);
void markDead();
}
| apache-2.0 |
apache/maven | maven-core/src/main/java/org/apache/maven/lifecycle/internal/BuildListCalculator.java | 2740 | package org.apache.maven.lifecycle.internal;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.inject.Named;
import javax.inject.Singleton;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.lifecycle.internal.builder.BuilderCommon;
import org.apache.maven.project.MavenProject;
/**
* <strong>NOTE:</strong> This class is not part of any public api and can be changed or deleted without prior notice.
*
* @since 3.0
* @author Kristian Rosenvold
*/
@Named
@Singleton
public class BuildListCalculator
{
public ProjectBuildList calculateProjectBuilds( MavenSession session, List<TaskSegment> taskSegments )
{
List<ProjectSegment> projectBuilds = new ArrayList<>();
MavenProject rootProject = session.getTopLevelProject();
for ( TaskSegment taskSegment : taskSegments )
{
List<MavenProject> projects;
if ( taskSegment.isAggregating() )
{
projects = Collections.singletonList( rootProject );
}
else
{
projects = session.getProjects();
}
for ( MavenProject project : projects )
{
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
try
{
BuilderCommon.attachToThread( project ); // Not totally sure if this is needed for anything
MavenSession copiedSession = session.clone();
copiedSession.setCurrentProject( project );
projectBuilds.add( new ProjectSegment( project, taskSegment, copiedSession ) );
}
finally
{
Thread.currentThread().setContextClassLoader( tccl );
}
}
}
return new ProjectBuildList( projectBuilds );
}
}
| apache-2.0 |
rowillia/buck | src/com/facebook/buck/jvm/groovy/GroovycStep.java | 6134 | /*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.groovy;
import static com.google.common.base.Functions.toStringFunction;
import static com.google.common.collect.Iterables.any;
import static com.google.common.collect.Iterables.transform;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.jvm.java.Javac;
import com.facebook.buck.jvm.java.JavacOptions;
import com.facebook.buck.jvm.java.OptionsConsumer;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.util.ProcessExecutor;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedSet;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Map;
class GroovycStep implements Step {
private final Tool groovyc;
private final Optional<ImmutableList<String>> extraArguments;
private final JavacOptions javacOptions;
private final SourcePathResolver resolver;
private final Path outputDirectory;
private final ImmutableSortedSet<Path> sourceFilePaths;
private final Path pathToSrcsList;
private final ImmutableSortedSet<Path> declaredClasspathEntries;
private final ProjectFilesystem filesystem;
GroovycStep(
Tool groovyc,
Optional<ImmutableList<String>> extraArguments,
JavacOptions javacOptions,
SourcePathResolver resolver,
Path outputDirectory,
ImmutableSortedSet<Path> sourceFilePaths,
Path pathToSrcsList,
ImmutableSortedSet<Path> declaredClasspathEntries,
ProjectFilesystem filesystem) {
this.groovyc = groovyc;
this.extraArguments = extraArguments;
this.javacOptions = javacOptions;
this.resolver = resolver;
this.outputDirectory = outputDirectory;
this.sourceFilePaths = sourceFilePaths;
this.pathToSrcsList = pathToSrcsList;
this.declaredClasspathEntries = declaredClasspathEntries;
this.filesystem = filesystem;
}
@Override
public int execute(ExecutionContext context) throws IOException, InterruptedException {
ProcessBuilder processBuilder = new ProcessBuilder(createCommand());
Map<String, String> env = processBuilder.environment();
env.clear();
env.putAll(context.getEnvironment());
processBuilder.directory(filesystem.getRootPath().toAbsolutePath().toFile());
int exitCode = -1;
try {
writePathToSourcesList(sourceFilePaths);
ProcessExecutor processExecutor = context.getProcessExecutor();
exitCode = processExecutor.execute(processBuilder.start()).getExitCode();
} catch (IOException e) {
e.printStackTrace(context.getStdErr());
}
return exitCode;
}
@Override
public String getShortName() {
return Joiner.on(" ").join(groovyc.getCommandPrefix(resolver));
}
@Override
public String getDescription(ExecutionContext context) {
return Joiner.on(" ").join(createCommand());
}
private ImmutableList<String> createCommand() {
final ImmutableList.Builder<String> command = ImmutableList.builder();
command.addAll(groovyc.getCommandPrefix(resolver));
String classpath =
Joiner.on(File.pathSeparator).join(transform(declaredClasspathEntries, toStringFunction()));
command
.add("-cp")
.add(classpath.isEmpty() ? "''" : classpath)
.add("-d")
.add(outputDirectory.toString());
addCrossCompilationOptions(command);
command.addAll(extraArguments.or(ImmutableList.<String>of()));
command.add("@" + pathToSrcsList);
return command.build();
}
private void writePathToSourcesList(Iterable<Path> expandedSources) throws IOException {
filesystem.writeLinesToPath(
FluentIterable.from(expandedSources)
.transform(toStringFunction())
.transform(Javac.ARGFILES_ESCAPER),
pathToSrcsList);
}
private void addCrossCompilationOptions(final ImmutableList.Builder<String> command) {
if (shouldCrossCompile()) {
command.add("-j");
javacOptions.appendOptionsTo(new OptionsConsumer() {
@Override
public void addOptionValue(String option, String value) {
// Explicitly disallow the setting of sourcepath in a cross compilation context.
// The implementation of `appendOptionsTo` provides a blank default, which
// confuses the cross compilation step's javac (it won't find any class files
// compiled by groovyc).
if (option.equals("sourcepath")) {
return;
}
command.add("-J" + String.format("%s=%s", option, value));
}
@Override
public void addFlag(String flagName) {
command.add("-F" + flagName);
}
@Override
public void addExtras(Collection<String> extras) {
for (String extra : extras) {
if (extra.startsWith("-")) {
addFlag(extra.substring(1));
} else {
addFlag(extra);
}
}
}
}, filesystem.getAbsolutifier());
}
}
private boolean shouldCrossCompile() {
return any(sourceFilePaths, new Predicate<Path>() {
@Override
public boolean apply(Path input) {
return input.toString().endsWith(".java");
}
});
}
}
| apache-2.0 |
lewis-ing/stormpath-spring-security | core/src/main/java/com/stormpath/spring/security/authz/permission/evaluator/WildcardPermissionEvaluator.java | 3839 | /*
* Copyright 2014 Stormpath, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stormpath.spring.security.authz.permission.evaluator;
import com.stormpath.spring.security.authz.permission.Permission;
import com.stormpath.spring.security.authz.permission.WildcardPermission;
import org.springframework.security.access.PermissionEvaluator;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import java.io.Serializable;
import java.util.Collection;
/**
* A {@link PermissionEvaluator} that determines if a {@link WildcardPermission} matches a given permission.
* <h3>Usage</h3>
* In order to use it you need to configure Spring this way:
* <pre>
* <bean id="permissionEvaluator" class="com.stormpath.spring.security.authz.permission.evaluator.WildcardPermissionEvaluator"/>
* <bean id="methodExpressionHandler" class="org.springframework.security.access.expression.method.DefaultMethodSecurityExpressionHandler">
* <property name="permissionEvaluator" ref="permissionEvaluator"/>
* </bean>
* <bean id="webExpressionHandler" class="org.springframework.security.web.access.expression.DefaultWebSecurityExpressionHandler">
* <property name="permissionEvaluator" ref="permissionEvaluator"/>
* </bean>
* </pre>
* And then you can simply evaluate permissions this way using <a href="http://docs.spring.io/spring-security/site/docs/3.0.x/reference/el-access.html">Method Security Expressions</a>:
* <pre>
@PreAuthorize("hasPermission(...)")
* </pre>
* or using <a href="http://docs.spring.io/spring-security/site/docs/3.0.x/reference/taglibs.html">JSP taglibs</a>
* <pre>
* <sec:authorize access="hasPermission(...)" />
* </pre>
*
* @since 0.2.0
*/
public class WildcardPermissionEvaluator implements PermissionEvaluator {
@Override
public boolean hasPermission(Authentication authentication, Object targetDomainObject, Object permission) {
String domainObjectString = "";
if(targetDomainObject != null) {
domainObjectString = targetDomainObject + WildcardPermission.PART_DIVIDER_TOKEN;
}
//Let's construct a WildcardPermission out of the given parameters
Permission toMatch = new WildcardPermission( domainObjectString + permission);
Collection<? extends GrantedAuthority> authorities = authentication.getAuthorities();
for(GrantedAuthority authority : authorities) {
//This evaluator only compares WildcardPermissions
if (authority instanceof WildcardPermission) {
WildcardPermission wp = (WildcardPermission) authority;
//Let's delegate the actual comparison to the WildcardPermission
if(wp.implies(toMatch)){
return true;
}
}
}
return false;
}
@Override
public boolean hasPermission(Authentication authentication, Serializable targetId, String targetType, Object permission) {
String targetIdString = "";
if(targetIdString != null) {
targetIdString = WildcardPermission.PART_DIVIDER_TOKEN + targetId;
}
return hasPermission(authentication, targetType + targetIdString, permission);
}
}
| apache-2.0 |
wsldl123292/testeveryting | uframework/src/main/java/org/smart4j/framework/ds/DataSourceFactory.java | 270 | package org.smart4j.framework.ds;
import javax.sql.DataSource;
/**
* 功能:
* 作者: ldl
* 时间: 2017-08-21 22:08
*/
public interface DataSourceFactory {
/**
* 获取数据源
*
* @return 数据源
*/
DataSource getDataSource();
}
| apache-2.0 |
punkhorn/camel-upstream | core/camel-core/src/test/java/org/apache/camel/processor/async/AsyncEndpointRecipientListParallelTest.java | 2857 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.async;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.junit.Test;
public class AsyncEndpointRecipientListParallelTest extends ContextTestSupport {
private static String beforeThreadName;
private static String afterThreadName;
@Test
public void testAsyncEndpoint() throws Exception {
getMockEndpoint("mock:before").expectedBodiesReceived("Hello Camel");
getMockEndpoint("mock:after").expectedBodiesReceived("Bye Camel");
getMockEndpoint("mock:result").expectedBodiesReceived("Bye Camel");
String reply = template.requestBody("direct:start", "Hello Camel", String.class);
assertEquals("Bye Camel", reply);
assertNotEquals("Should use different threads", beforeThreadName, afterThreadName);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
context.addComponent("async", new MyAsyncComponent());
from("direct:start")
.to("mock:before")
.to("log:before")
.process(new Processor() {
public void process(Exchange exchange) throws Exception {
beforeThreadName = Thread.currentThread().getName();
}
})
.recipientList(constant("async:bye:camel")).parallelProcessing()
.process(new Processor() {
public void process(Exchange exchange) throws Exception {
afterThreadName = Thread.currentThread().getName();
}
})
.to("log:after")
.to("mock:after")
.to("mock:result");
}
};
}
} | apache-2.0 |
tupilabs/nebular | src/main/java/org/apache/commons/functor/aggregator/ArrayListBackedAggregator.java | 3634 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.functor.aggregator;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.functor.UnaryFunction;
/**
* Implementation of an aggregator which stores the data series in an
* <code>ArrayList</code>.
*
* @param <T>
* Type of object stored in the data series.
*/
public class ArrayListBackedAggregator<T> extends AbstractListBackedAggregator<T> {
/**
* Similar to {@link #ArrayListBackedAggregator(UnaryFunction, long)
* ArrayListBackedAggregator(aggregationFunction, 0L)}.
*
* @param aggregationFunction
* Aggregation function to use in {@link #evaluate()}. Throws
* <code>NullPointerException</code> if this is <code>null</code>
*/
public ArrayListBackedAggregator(UnaryFunction<List<T>, T> aggregationFunction) {
this(aggregationFunction, 0L);
}
/**
* Similar to
* {@link #ArrayListBackedAggregator(UnaryFunction, long, boolean)
* ArrayListBackedAggregator(aggregationFunction,interval,false)}.
*
* @param aggregationFunction
* Aggregation function to use in {@link #evaluate()}. Throws
* <code>NullPointerException</code> if this is <code>null</code>
* @param interval
* interval in miliseconds to reset this aggregator
*/
public ArrayListBackedAggregator(UnaryFunction<List<T>, T> aggregationFunction, long interval) {
this(aggregationFunction, interval, false);
}
/**
* Initializes an aggregator with the given function, interval and decides
* whether to use the shared timer or own timer.
*
* @param aggregationFunction
* Aggregation function to use in {@link #evaluate()}. Throws
* <code>NullPointerException</code> if this is <code>null</code>
* @param interval
* interval in miliseconds to reset this aggregator
* @param useSharedTimer
* if set to true, it shares a timer across instances as per
* {@link AbstractTimedAggregator#AbstractTimedAggregator(long,boolean)}
* , otherwise this instance will use its private timer
*/
public ArrayListBackedAggregator(UnaryFunction<List<T>, T> aggregationFunction, long interval,
boolean useSharedTimer) {
super(aggregationFunction, interval, useSharedTimer);
}
/**
* Creates an instance of <code>ArrayList</code> and returns it.
*
* @return newly created <code>ArrayList</code> (with default initial
* capacity as per JDK).
*/
@Override
protected List<T> createList() {
return new ArrayList<T>();
}
@Override
public String toString() {
return ArrayListBackedAggregator.class.getName();
}
}
| apache-2.0 |
DanielSerdyukov/droidkit-4.x | library/src/androidTest/java/droidkit/app/AlertFragment.java | 668 | package droidkit.app;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.os.Bundle;
import droidkit.annotation.OnClick;
/**
* @author Daniel Serdyukov
*/
public class AlertFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
return new AlertDialog.Builder(getActivity())
.setMessage("Test")
.setPositiveButton(android.R.string.ok, null)
.setNegativeButton(android.R.string.cancel, null)
.create();
}
@OnClick(android.R.id.button1)
void onTestClick() {
}
}
| apache-2.0 |
grs/activemq-artemis | artemis-protocols/artemis-openwire-protocol/src/main/java/org/apache/activemq/artemis/core/protocol/openwire/OpenWireProtocolManager.java | 21680 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.protocol.openwire;
import javax.jms.InvalidClientIDException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ScheduledExecutorService;
import io.netty.channel.ChannelPipeline;
import io.netty.handler.codec.LengthFieldBasedFrameDecoder;
import org.apache.activemq.advisory.AdvisorySupport;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.api.core.BaseInterceptor;
import org.apache.activemq.artemis.api.core.Interceptor;
import org.apache.activemq.artemis.api.core.RoutingType;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.client.ClusterTopologyListener;
import org.apache.activemq.artemis.api.core.client.TopologyMember;
import org.apache.activemq.artemis.core.protocol.openwire.amq.AMQConnectionContext;
import org.apache.activemq.artemis.core.protocol.openwire.amq.AMQProducerBrokerExchange;
import org.apache.activemq.artemis.core.protocol.openwire.amq.AMQSession;
import org.apache.activemq.artemis.core.remoting.impl.netty.NettyServerConnection;
import org.apache.activemq.artemis.core.server.ActiveMQServer;
import org.apache.activemq.artemis.core.server.ActiveMQServerLogger;
import org.apache.activemq.artemis.core.server.cluster.ClusterConnection;
import org.apache.activemq.artemis.core.server.cluster.ClusterManager;
import org.apache.activemq.artemis.reader.MessageUtil;
import org.apache.activemq.artemis.spi.core.protocol.ConnectionEntry;
import org.apache.activemq.artemis.spi.core.protocol.ProtocolManager;
import org.apache.activemq.artemis.spi.core.protocol.ProtocolManagerFactory;
import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection;
import org.apache.activemq.artemis.spi.core.remoting.Acceptor;
import org.apache.activemq.artemis.spi.core.remoting.Connection;
import org.apache.activemq.artemis.spi.core.security.ActiveMQSecurityManager;
import org.apache.activemq.artemis.spi.core.security.ActiveMQSecurityManager3;
import org.apache.activemq.artemis.utils.DataConstants;
import org.apache.activemq.command.ActiveMQMessage;
import org.apache.activemq.command.ActiveMQTopic;
import org.apache.activemq.command.BrokerId;
import org.apache.activemq.command.BrokerInfo;
import org.apache.activemq.command.Command;
import org.apache.activemq.command.ConnectionControl;
import org.apache.activemq.command.ConnectionInfo;
import org.apache.activemq.command.ConsumerId;
import org.apache.activemq.command.DestinationInfo;
import org.apache.activemq.command.MessageDispatch;
import org.apache.activemq.command.MessageId;
import org.apache.activemq.command.ProducerId;
import org.apache.activemq.command.ProducerInfo;
import org.apache.activemq.command.WireFormatInfo;
import org.apache.activemq.openwire.OpenWireFormat;
import org.apache.activemq.openwire.OpenWireFormatFactory;
import org.apache.activemq.state.ProducerState;
import org.apache.activemq.util.IdGenerator;
import org.apache.activemq.util.InetAddressUtil;
import org.apache.activemq.util.LongSequenceGenerator;
public class OpenWireProtocolManager implements ProtocolManager<Interceptor>, ClusterTopologyListener {
private static final List<String> websocketRegistryNames = Collections.EMPTY_LIST;
private static final IdGenerator BROKER_ID_GENERATOR = new IdGenerator();
private static final IdGenerator ID_GENERATOR = new IdGenerator();
private final LongSequenceGenerator messageIdGenerator = new LongSequenceGenerator();
private final ActiveMQServer server;
private final OpenWireProtocolManagerFactory factory;
private OpenWireFormatFactory wireFactory;
private boolean prefixPacketSize = true;
private BrokerId brokerId;
protected final ProducerId advisoryProducerId = new ProducerId();
private final CopyOnWriteArrayList<OpenWireConnection> connections = new CopyOnWriteArrayList<>();
private final Map<String, AMQConnectionContext> clientIdSet = new HashMap<>();
private String brokerName;
private final Map<String, TopologyMember> topologyMap = new ConcurrentHashMap<>();
private final LinkedList<TopologyMember> members = new LinkedList<>();
private final ScheduledExecutorService scheduledPool;
//bean properties
//http://activemq.apache.org/failover-transport-reference.html
private boolean rebalanceClusterClients = false;
private boolean updateClusterClients = false;
private boolean updateClusterClientsOnRemove = false;
//http://activemq.apache.org/activemq-inactivitymonitor.html
private long maxInactivityDuration = 30 * 1000L;
private long maxInactivityDurationInitalDelay = 10 * 1000L;
private boolean useKeepAlive = true;
private final OpenWireMessageConverter messageConverter;
private final Map<SimpleString, RoutingType> prefixes = new HashMap<>();
public OpenWireProtocolManager(OpenWireProtocolManagerFactory factory, ActiveMQServer server) {
this.factory = factory;
this.server = server;
this.wireFactory = new OpenWireFormatFactory();
// preferred prop, should be done via config
wireFactory.setCacheEnabled(false);
advisoryProducerId.setConnectionId(ID_GENERATOR.generateId());
scheduledPool = server.getScheduledPool();
this.messageConverter = new OpenWireMessageConverter(wireFactory.createWireFormat());
final ClusterManager clusterManager = this.server.getClusterManager();
ClusterConnection cc = clusterManager.getDefaultConnection(null);
if (cc != null) {
cc.addClusterTopologyListener(this);
}
}
public OpenWireFormat getNewWireFormat() {
return (OpenWireFormat) wireFactory.createWireFormat();
}
@Override
public void nodeUP(TopologyMember member, boolean last) {
if (topologyMap.put(member.getNodeId(), member) == null) {
updateClientClusterInfo();
}
}
@Override
public void nodeDown(long eventUID, String nodeID) {
if (topologyMap.remove(nodeID) != null) {
updateClientClusterInfo();
}
}
public void removeConnection(ConnectionInfo info, Throwable error) throws InvalidClientIDException {
synchronized (clientIdSet) {
String clientId = info.getClientId();
if (clientId != null) {
AMQConnectionContext context = this.clientIdSet.get(clientId);
if (context != null && context.decRefCount() == 0) {
//connection is still there and need to close
context.getConnection().disconnect(error != null);
this.connections.remove(context.getConnection());
this.clientIdSet.remove(clientId);
}
} else {
throw new InvalidClientIDException("No clientID specified for connection disconnect request");
}
}
}
public ScheduledExecutorService getScheduledPool() {
return scheduledPool;
}
public ActiveMQServer getServer() {
return server;
}
private void updateClientClusterInfo() {
synchronized (members) {
members.clear();
members.addAll(topologyMap.values());
}
for (OpenWireConnection c : this.connections) {
ConnectionControl control = newConnectionControl();
try {
c.updateClient(control);
} catch (Exception e) {
ActiveMQServerLogger.LOGGER.warn(e.getMessage(), e);
c.sendException(e);
}
}
}
@Override
public boolean acceptsNoHandshake() {
return false;
}
@Override
public ProtocolManagerFactory<Interceptor> getFactory() {
return factory;
}
@Override
public void updateInterceptors(List<BaseInterceptor> incomingInterceptors,
List<BaseInterceptor> outgoingInterceptors) {
// NO-OP
}
@Override
public ConnectionEntry createConnectionEntry(Acceptor acceptorUsed, Connection connection) {
OpenWireFormat wf = (OpenWireFormat) wireFactory.createWireFormat();
OpenWireConnection owConn = new OpenWireConnection(connection, server, server.getExecutorFactory().getExecutor(), this, wf);
owConn.sendHandshake();
//first we setup ttl to -1
//then when negotiation, we handle real ttl and delay
ConnectionEntry entry = new ConnectionEntry(owConn, null, System.currentTimeMillis(), -1);
owConn.setConnectionEntry(entry);
return entry;
}
@Override
public void removeHandler(String name) {
}
@Override
public void handleBuffer(RemotingConnection connection, ActiveMQBuffer buffer) {
}
@Override
public void addChannelHandlers(ChannelPipeline pipeline) {
// each read will have a full packet with this
pipeline.addLast("packet-decipher", new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, DataConstants.SIZE_INT));
}
@Override
public boolean isProtocol(byte[] array) {
if (array.length < 8) {
throw new IllegalArgumentException("Protocol header length changed " + array.length);
}
int start = this.prefixPacketSize ? 4 : 0;
int j = 0;
// type
if (array[start] != WireFormatInfo.DATA_STRUCTURE_TYPE) {
return false;
}
start++;
WireFormatInfo info = new WireFormatInfo();
final byte[] magic = info.getMagic();
int remainingLen = array.length - start;
int useLen = remainingLen > magic.length ? magic.length : remainingLen;
useLen += start;
// magic
for (int i = start; i < useLen; i++) {
if (array[i] != magic[j]) {
return false;
}
j++;
}
return true;
}
@Override
public void handshake(NettyServerConnection connection, ActiveMQBuffer buffer) {
}
@Override
public List<String> websocketSubprotocolIdentifiers() {
return websocketRegistryNames;
}
public void addConnection(OpenWireConnection connection, ConnectionInfo info) throws Exception {
String username = info.getUserName();
String password = info.getPassword();
if (!this.validateUser(username, password)) {
throw new SecurityException("User name [" + username + "] or password is invalid.");
}
String clientId = info.getClientId();
if (clientId == null) {
throw new InvalidClientIDException("No clientID specified for connection request");
}
synchronized (clientIdSet) {
AMQConnectionContext context;
context = clientIdSet.get(clientId);
if (context != null) {
if (info.isFailoverReconnect()) {
OpenWireConnection oldConnection = context.getConnection();
oldConnection.disconnect(true);
connections.remove(oldConnection);
connection.reconnect(context, info);
} else {
throw new InvalidClientIDException("Broker: " + getBrokerName() + " - Client: " + clientId + " already connected from " + context.getConnection().getRemoteAddress());
}
} else {
//new connection
context = connection.initContext(info);
clientIdSet.put(clientId, context);
}
connections.add(connection);
ActiveMQTopic topic = AdvisorySupport.getConnectionAdvisoryTopic();
// do not distribute passwords in advisory messages. usernames okay
ConnectionInfo copy = info.copy();
copy.setPassword("");
fireAdvisory(context, topic, copy);
// init the conn
context.getConnection().addSessions(context.getConnectionState().getSessionIds());
}
}
public void fireAdvisory(AMQConnectionContext context, ActiveMQTopic topic, Command copy) throws Exception {
this.fireAdvisory(context, topic, copy, null, null);
}
public BrokerId getBrokerId() {
// TODO: Use the Storage ID here...
if (brokerId == null) {
brokerId = new BrokerId(BROKER_ID_GENERATOR.generateId());
}
return brokerId;
}
/*
* See AdvisoryBroker.fireAdvisory()
*/
public void fireAdvisory(AMQConnectionContext context,
ActiveMQTopic topic,
Command command,
ConsumerId targetConsumerId,
String originalConnectionId) throws Exception {
ActiveMQMessage advisoryMessage = new ActiveMQMessage();
if (originalConnectionId == null) {
originalConnectionId = context.getConnectionId().getValue();
}
advisoryMessage.setStringProperty(MessageUtil.CONNECTION_ID_PROPERTY_NAME.toString(), originalConnectionId);
advisoryMessage.setStringProperty(AdvisorySupport.MSG_PROPERTY_ORIGIN_BROKER_NAME, getBrokerName());
String id = getBrokerId() != null ? getBrokerId().getValue() : "NOT_SET";
advisoryMessage.setStringProperty(AdvisorySupport.MSG_PROPERTY_ORIGIN_BROKER_ID, id);
String url = context.getConnection().getLocalAddress();
advisoryMessage.setStringProperty(AdvisorySupport.MSG_PROPERTY_ORIGIN_BROKER_URL, url);
// set the data structure
advisoryMessage.setDataStructure(command);
advisoryMessage.setPersistent(false);
advisoryMessage.setType(AdvisorySupport.ADIVSORY_MESSAGE_TYPE);
advisoryMessage.setMessageId(new MessageId(advisoryProducerId, messageIdGenerator.getNextSequenceId()));
advisoryMessage.setTargetConsumerId(targetConsumerId);
advisoryMessage.setDestination(topic);
advisoryMessage.setResponseRequired(false);
advisoryMessage.setProducerId(advisoryProducerId);
boolean originalFlowControl = context.isProducerFlowControl();
final AMQProducerBrokerExchange producerExchange = new AMQProducerBrokerExchange();
producerExchange.setConnectionContext(context);
producerExchange.setProducerState(new ProducerState(new ProducerInfo()));
try {
context.setProducerFlowControl(false);
AMQSession sess = context.getConnection().getAdvisorySession();
if (sess != null) {
sess.send(producerExchange.getProducerState().getInfo(), advisoryMessage, false);
}
} finally {
context.setProducerFlowControl(originalFlowControl);
}
}
public String getBrokerName() {
if (brokerName == null) {
try {
brokerName = InetAddressUtil.getLocalHostName().toLowerCase(Locale.ENGLISH);
} catch (Exception e) {
brokerName = server.getNodeID().toString();
}
}
return brokerName;
}
protected ConnectionControl newConnectionControl() {
ConnectionControl control = new ConnectionControl();
String uri = generateMembersURI(rebalanceClusterClients);
control.setConnectedBrokers(uri);
control.setRebalanceConnection(rebalanceClusterClients);
return control;
}
private String generateMembersURI(boolean flip) {
String uri;
StringBuffer connectedBrokers = new StringBuffer();
String separator = "";
synchronized (members) {
if (members.size() > 0) {
for (TopologyMember member : members) {
connectedBrokers.append(separator).append(member.toURI());
separator = ",";
}
// The flip exists to guarantee even distribution of URIs when sent to the client
// in case of failures you won't get all the connections failing to a single server.
if (flip && members.size() > 1) {
members.addLast(members.removeFirst());
}
}
}
uri = connectedBrokers.toString();
return uri;
}
public boolean isFaultTolerantConfiguration() {
return false;
}
public void postProcessDispatch(MessageDispatch md) {
// TODO Auto-generated method stub
}
public boolean isStopped() {
// TODO Auto-generated method stub
return false;
}
public void preProcessDispatch(MessageDispatch messageDispatch) {
// TODO Auto-generated method stub
}
public boolean isStopping() {
return false;
}
public boolean validateUser(String login, String passcode) {
boolean validated = true;
ActiveMQSecurityManager sm = server.getSecurityManager();
if (sm != null && server.getConfiguration().isSecurityEnabled()) {
if (sm instanceof ActiveMQSecurityManager3) {
validated = ((ActiveMQSecurityManager3) sm).validateUser(login, passcode, null) != null;
} else {
validated = sm.validateUser(login, passcode);
}
}
return validated;
}
public void sendBrokerInfo(OpenWireConnection connection) throws Exception {
BrokerInfo brokerInfo = new BrokerInfo();
brokerInfo.setBrokerName(getBrokerName());
brokerInfo.setBrokerId(new BrokerId("" + server.getNodeID()));
brokerInfo.setPeerBrokerInfos(null);
brokerInfo.setFaultTolerantConfiguration(false);
brokerInfo.setBrokerURL(connection.getLocalAddress());
//cluster support yet to support
brokerInfo.setPeerBrokerInfos(null);
connection.dispatch(brokerInfo);
}
public void setUpInactivityParams(OpenWireConnection connection, WireFormatInfo command) throws IOException {
long inactivityDurationToUse = command.getMaxInactivityDuration() > this.maxInactivityDuration ? this.maxInactivityDuration : command.getMaxInactivityDuration();
long inactivityDurationInitialDelayToUse = command.getMaxInactivityDurationInitalDelay() > this.maxInactivityDurationInitalDelay ? this.maxInactivityDurationInitalDelay : command.getMaxInactivityDurationInitalDelay();
boolean useKeepAliveToUse = this.maxInactivityDuration == 0L ? false : this.useKeepAlive;
connection.setUpTtl(inactivityDurationToUse, inactivityDurationInitialDelayToUse, useKeepAliveToUse);
}
/**
* URI property
*/
@SuppressWarnings("unused")
public void setRebalanceClusterClients(boolean rebalance) {
this.rebalanceClusterClients = rebalance;
}
/**
* URI property
*/
@SuppressWarnings("unused")
public boolean isRebalanceClusterClients() {
return this.rebalanceClusterClients;
}
/**
* URI property
*/
@SuppressWarnings("unused")
public void setUpdateClusterClients(boolean updateClusterClients) {
this.updateClusterClients = updateClusterClients;
}
public boolean isUpdateClusterClients() {
return this.updateClusterClients;
}
/**
* URI property
*/
@SuppressWarnings("unused")
public void setUpdateClusterClientsOnRemove(boolean updateClusterClientsOnRemove) {
this.updateClusterClientsOnRemove = updateClusterClientsOnRemove;
}
/**
* URI property
*/
@SuppressWarnings("unused")
public boolean isUpdateClusterClientsOnRemove() {
return this.updateClusterClientsOnRemove;
}
public void setBrokerName(String name) {
this.brokerName = name;
}
public boolean isUseKeepAlive() {
return useKeepAlive;
}
@SuppressWarnings("unused")
public void setUseKeepAlive(boolean useKeepAlive) {
this.useKeepAlive = useKeepAlive;
}
public long getMaxInactivityDuration() {
return maxInactivityDuration;
}
public void setMaxInactivityDuration(long maxInactivityDuration) {
this.maxInactivityDuration = maxInactivityDuration;
}
@SuppressWarnings("unused")
public long getMaxInactivityDurationInitalDelay() {
return maxInactivityDurationInitalDelay;
}
@SuppressWarnings("unused")
public void setMaxInactivityDurationInitalDelay(long maxInactivityDurationInitalDelay) {
this.maxInactivityDurationInitalDelay = maxInactivityDurationInitalDelay;
}
@Override
public void setAnycastPrefix(String anycastPrefix) {
for (String prefix : anycastPrefix.split(",")) {
prefixes.put(SimpleString.toSimpleString(prefix), RoutingType.ANYCAST);
}
}
@Override
public void setMulticastPrefix(String multicastPrefix) {
for (String prefix : multicastPrefix.split(",")) {
prefixes.put(SimpleString.toSimpleString(prefix), RoutingType.MULTICAST);
}
}
@Override
public Map<SimpleString, RoutingType> getPrefixes() {
return prefixes;
}
public List<DestinationInfo> getTemporaryDestinations() {
List<DestinationInfo> total = new ArrayList<>();
for (OpenWireConnection connection : connections) {
total.addAll(connection.getTemporaryDestinations());
}
return total;
}
}
| apache-2.0 |
viggyprabhu/geowave | geowave-accumulo/src/main/java/mil/nga/giat/geowave/accumulo/MergingCombiner.java | 1275 | package mil.nga.giat.geowave.accumulo;
import java.util.Iterator;
import mil.nga.giat.geowave.index.Mergeable;
import mil.nga.giat.geowave.index.PersistenceUtils;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.Combiner;
public class MergingCombiner extends
Combiner
{
@Override
public Value reduce(
final Key key,
final Iterator<Value> iter ) {
Mergeable currentMergeable = null;
while (iter.hasNext()) {
final Value val = iter.next();
// hopefully its never the case that null stastics are stored,
// but just in case, check
final Mergeable mergeable = getMergeable(
key,
val.get());
if (mergeable != null) {
if (currentMergeable == null) {
currentMergeable = mergeable;
}
else {
currentMergeable.merge(mergeable);
}
}
}
if (currentMergeable != null) {
return new Value(
getBinary(currentMergeable));
}
return super.getTopValue();
}
protected Mergeable getMergeable(
final Key key,
final byte[] binary ) {
return PersistenceUtils.fromBinary(
binary,
Mergeable.class);
}
protected byte[] getBinary(
final Mergeable mergeable ) {
return PersistenceUtils.toBinary(mergeable);
}
}
| apache-2.0 |
kite-sdk/kite-spring-hbase-example | src/main/java/org/kitesdk/spring/hbase/example/service/WebPageSnapshotService.java | 14580 | /**
* Copyright 2014 Cloudera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kitesdk.spring.hbase.example.service;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.jsoup.Connection;
import org.jsoup.Connection.Response;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.kitesdk.data.DatasetReader;
import org.kitesdk.data.Key;
import org.kitesdk.data.RandomAccessDataset;
import org.kitesdk.spring.hbase.example.model.WebPageRedirectModel;
import org.kitesdk.spring.hbase.example.model.WebPageSnapshotModel;
import org.kitesdk.spring.hbase.example.model.frontend.WebPageSnapshotContent;
import org.kitesdk.spring.hbase.example.model.frontend.WebPageSnapshotMeta;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.convert.ConversionService;
import org.springframework.stereotype.Component;
/**
* Service for WebPageSnapshot operations
*/
@Component
public class WebPageSnapshotService {
@Autowired
private RandomAccessDataset<WebPageSnapshotModel> webPageSnapshotModels;
@Autowired
private RandomAccessDataset<WebPageRedirectModel> webPageRedirectModels;
@Autowired
private ConversionService conversionService;
/**
* Take a snapshot of an URL. This WebPageSnapshot is stored in HBase. Returns
* the WebPageSnapshotMeta
*
* If the URL is a redirect, the snapshot is stored under the final URL
* destination. A WebPageRedirectModel is stored in the redirect table so when
* fetching snapshots, we can follow the proper redirect path.
*
* @param url
* The URL to take a snapshot of
* @return The WebPageSnapshotMeta for the page that we snapshotted.
* @throws IOException
*/
public WebPageSnapshotMeta takeSnapshot(String url) throws IOException {
WebPageSnapshotModel webPageSnapshotModel = fetchWebPage(url);
if (!webPageSnapshotModel.getUrl().equals(url)) {
// Url is different, so must have redirected. Store the redirect model
WebPageRedirectModel redirectModel = WebPageRedirectModel.newBuilder()
.setUrl(url).setDestinationUrl(webPageSnapshotModel.getUrl()).build();
webPageRedirectModels.put(redirectModel);
} else {
// If redirect exists, remove it since this URL no longer redirects
Key key = new Key.Builder(webPageRedirectModels).add("url", url).build();
WebPageRedirectModel redirectModel = webPageRedirectModels.get(key);
if (redirectModel != null) {
webPageRedirectModels.delete(key);
}
}
webPageSnapshotModels.put(webPageSnapshotModel);
return conversionService.convert(webPageSnapshotModel,
WebPageSnapshotMeta.class);
}
/**
* Get the most recent WebPageSnapshotMeta from HBase
*
* @param url
* The URL of the WebPageSnapshotMeta to get from HBase.
* @return The WebPageSnapshotMeta, or null if one doesn't exist for this URL.
*/
public WebPageSnapshotMeta getWebPageSnapshotMeta(String url) {
WebPageSnapshotModel model = getMostRecentWebPageSnapshot(url);
if (model != null) {
return conversionService.convert(model, WebPageSnapshotMeta.class);
} else {
return null;
}
}
/**
* Get the WebPageSnapshotMeta that was fetched at a particular timestamp from
* HBase
*
* @param url
* The URL of the WebPageSnapshotMeta to get from HBase.
* @param ts
* The snapshot timestamp of the WebPageSnapshotMeta to get from
* HBase.
* @return The WebPageSnapshotMeta, or null if one doesn't exist for this URL
* at this timestamp.
*/
public WebPageSnapshotMeta getWebPageSnapshotMeta(String url, long ts) {
WebPageSnapshotModel model = this.getWebPageSnapshot(url, ts);
if (model != null) {
return conversionService.convert(model, WebPageSnapshotMeta.class);
} else {
return null;
}
}
/**
* Get all WebPageSnapshotMeta from an URL that have been snapshotted since
* the "since" param.
*
* @param url
* The URL to get WebPageSnapshotMeta instances from
* @param since
* The epoch timestamp
* @return The list of WebPageSnapshotMeta instances.
*/
public List<WebPageSnapshotMeta> getWebPageSnapshotMetaSince(String url,
long since) {
return convertList(getWebPageSnapshotsSince(url, since),
WebPageSnapshotMeta.class);
}
/**
* Get the most recent WebPageSnapshotContent from HBase
*
* @param url
* The URL to fetch the most recent WebPageSnapshotContent from
* @return The WebPageSnapshotContent, or null if one doesn't exists for this
* URL.
*/
public WebPageSnapshotContent getWebPageSnapshotContent(String url) {
WebPageSnapshotModel model = getMostRecentWebPageSnapshot(url);
if (model != null) {
return conversionService.convert(model, WebPageSnapshotContent.class);
} else {
return null;
}
}
/**
* Get the WebPageSnapshotContent that was fetched at a particular timestamp
* from HBase
*
* @param url
* The URL of the WebPageSnapshotContent to get from HBase.
* @param ts
* The snapshot timestamp of the WebPageSnapshotContent to get from
* HBase.
* @return The WebPageSnapshotContent, or null if one doesn't exist for this
* URL at this timestamp.
*/
public WebPageSnapshotContent getWebPageSnapshotContent(String url, long ts) {
WebPageSnapshotModel model = getWebPageSnapshot(url, ts);
if (model != null) {
return conversionService.convert(model, WebPageSnapshotContent.class);
} else {
return null;
}
}
/**
* Get all WebPageSnapshotContent from an URL that have been snapshotted since
* the "since" param.
*
* @param url
* The URL to get WebPageSnapshotContent instances from
* @param since
* The epoch timestamp
* @return The list of WebPageSnapshotContent instances.
*/
public List<WebPageSnapshotContent> getWebPageSnapshotContentSince(
String url, long since) {
return convertList(getWebPageSnapshotsSince(url, since),
WebPageSnapshotContent.class);
}
/**
* Get the epoch timestamps for every snapshot time of an URL in HBase.
*
* @param url
* The URL of the page to get snapshot timestamps for
* @return The list of timestamps
*/
public List<Long> getSnapshotTimestamps(String url) {
url = normalizeUrl(url);
List<Long> snapshotTimestamps = new ArrayList<Long>();
DatasetReader<WebPageSnapshotModel> reader = null;
try {
reader = webPageSnapshotModels.from("url", url)
.from("fetchedAtRevTs", 0L).to("url", url)
.to("fetchedAtRevTs", Long.MAX_VALUE).newReader();
while (reader.hasNext()) {
snapshotTimestamps.add(reader.next().getFetchedAt());
}
} finally {
if (reader != null) {
reader.close();
}
}
return snapshotTimestamps;
}
/**
* Get the most recent WebPageSnapshotModel from HBase
*
* @param url
* The URL to get the snapshotted page from HBase
* @return The WebPageSnapshotModel, or null if there are no fetches for this
* URL
*/
private WebPageSnapshotModel getMostRecentWebPageSnapshot(String url) {
url = normalizeUrl(url);
DatasetReader<WebPageSnapshotModel> reader = null;
try {
// we don't know the exact timestamp in the key, but we know since keys
// are in timestamp descending order that the first row for an URL will be
// the most recent.
reader = webPageSnapshotModels.from("url", url)
.from("fetchedAtRevTs", 0L).to("url", url)
.to("fetchedAtRevTs", Long.MAX_VALUE).newReader();
if (reader.hasNext()) {
return reader.next();
} else {
return null;
}
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Get the WebPageSnapshotModel from HBase
*
* @param url
* The URL of the WebPageSnapshotModel
* @param ts
* The snapshot timestamp of the WebPageSnapshotModel
* @return The WebPageSnapshotModel, or null if there is no snapshot for the
* URL at this timestamp.
*/
private WebPageSnapshotModel getWebPageSnapshot(String url, long ts) {
url = normalizeUrl(url);
Key key = new Key.Builder(webPageSnapshotModels).add("url", url)
.add("fetchedAtRevTs", Long.MAX_VALUE - ts).build();
return webPageSnapshotModels.get(key);
}
/**
* Get WebPageSnapshotModels for an URL from HBase since the since param.
*
* @param url
* The URL of the page to fetch
* @param since
* The models to fetch since
* @return The list of models that have been fetched for an URL since the
* since param.
*/
private List<WebPageSnapshotModel> getWebPageSnapshotsSince(String url,
long since) {
url = normalizeUrl(url);
List<WebPageSnapshotModel> models = new ArrayList<WebPageSnapshotModel>();
DatasetReader<WebPageSnapshotModel> reader = null;
try {
reader = webPageSnapshotModels.from("url", url)
.from("fetchedAtRevTs", 0L).to("url", url)
.to("fetchedAtRevTs", since).newReader();
while (reader.hasNext()) {
models.add(reader.next());
}
} finally {
if (reader != null) {
reader.close();
}
}
return models;
}
/**
* Normalize an URL, which currently only consists of returning a redirect
* destination if an URL is a redirect, or otherwise the passed in url.
*
* @param url
* The url to normalize
* @return The normalized URL;
*/
private String normalizeUrl(String url) {
// If this url is a redirect, get it's destination URL to fetch from our
// HBase store since we store all snapshots under the final destination the
// page lives at.
WebPageRedirectModel redirectModel = getRedirect(url);
if (redirectModel != null) {
return redirectModel.getDestinationUrl();
} else {
return url;
}
}
/**
* Return a WebPageRedirectModel if an URL is one that redirects to a
* different source. Otherwise, returns null.
*
* @return The WebPageRedirectModel
*/
private WebPageRedirectModel getRedirect(String url) {
Key key = new Key.Builder(webPageRedirectModels).add("url", url).build();
return webPageRedirectModels.get(key);
}
/**
* Fetch the web page from the URL, parse the HTML to populate the metadata
* required by WebPageSnapshotModel, and return the constructed
* WebPageSnapshotModel.
*
* @param url
* The URL to fetch the web page from
* @return The WebPageSnapshotModel
* @throws IOException
* Thrown if there's an issue fetching the web page.
*/
private WebPageSnapshotModel fetchWebPage(String url) throws IOException {
long fetchTime = System.currentTimeMillis();
Connection connection = Jsoup.connect(url);
Response response = connection.execute();
long postFetchTime = System.currentTimeMillis();
int timeToFetch = (int) (postFetchTime - fetchTime);
Document doc = response.parse();
String destinationUrl = response.url().toString();
String title = doc.title();
String description = getDescriptionFromDocument(doc);
List<String> keywords = getKeywordsFromDocument(doc);
List<String> outlinks = getOutlinksFromDocument(doc);
return WebPageSnapshotModel.newBuilder().setUrl(destinationUrl)
.setFetchedAtRevTs(Long.MAX_VALUE - fetchTime)
.setSize(doc.html().length()).setFetchedAt(fetchTime)
.setFetchTimeMs(timeToFetch).setTitle(title)
.setDescription(description).setKeywords(keywords)
.setOutlinks(outlinks).setContent(doc.html()).build();
}
/**
* Parse the description out of the meta tag if one exists. Otherwise, return
* null
*
* @param doc
* The Document to parse
* @return The description if it exists in the HTML, otherwise null.
*/
private String getDescriptionFromDocument(Document doc) {
Elements metaDescriptionElements = doc.select("meta[name=description]");
return metaDescriptionElements.size() > 0 ? metaDescriptionElements
.attr("content") : "";
}
/**
* Parse the keywords out of the meta tag if one exists. Otherwise, return an
* empty list.
*
* @param doc
* The Document ot parse
* @return The list of keywords.
*/
private List<String> getKeywordsFromDocument(Document doc) {
List<String> keywords = new ArrayList<String>();
Elements keywordsElements = doc.select("meta[name=keywords]");
for (Element keywordsElement : keywordsElements) {
for (String keyword : keywordsElement.attr("content").split(",")) {
keywords.add(keyword.trim());
}
}
return keywords;
}
/**
* Parse the outlinks from a href tags in the document, and return them as a
* list
*
* @param doc
* The document to parse
* @return The list of outlinks as URL strings.
*/
private List<String> getOutlinksFromDocument(Document doc) {
List<String> outlinks = new ArrayList<String>();
Elements linkElements = doc.select("a[href]");
for (Element linkElement : linkElements) {
outlinks.add(linkElement.attr("href").trim());
}
return outlinks;
}
/**
* Use the conversionService to convert a list of objects to clazz
*
* @param list
* The list of objects to convert
* @param clazz
* The class to convert those objects to
* @return The list of converted objects.
*/
private <T> List<T> convertList(List<?> list, Class<T> clazz) {
List<T> returnList = new ArrayList<T>();
for (Object o : list) {
returnList.add(conversionService.convert(o, clazz));
}
return returnList;
}
}
| apache-2.0 |
avranju/qpid-jms | qpid-jms-client/src/test/java/org/apache/qpid/jms/provider/mock/MockRemotePeer.java | 4497 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.jms.provider.mock;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.qpid.jms.meta.JmsResource;
/**
* Context shared between all MockProvider instances.
*/
public class MockRemotePeer {
public static MockRemotePeer INSTANCE;
private final Map<String, MockProvider> activeProviders = new ConcurrentHashMap<String, MockProvider>();
private final MockProviderStats contextStats = new MockProviderStats();
private MockProvider lastRegistered;
private boolean offline;
private ResourceLifecycleFilter createFilter;
private ResourceLifecycleFilter startFilter;
private ResourceLifecycleFilter stopFilter;
private ResourceLifecycleFilter destroyFilter;
public void connect(MockProvider provider) throws IOException {
if (offline) {
throw new IOException();
}
if (provider != null) {
activeProviders.put(provider.getProviderId(), provider);
lastRegistered = provider;
}
}
public void disconnect(MockProvider provider) {
if (provider != null) {
activeProviders.remove(provider.getProviderId());
}
}
public void createResource(JmsResource resource) throws Exception {
if (createFilter != null) {
createFilter.onLifecycleEvent(resource);
}
}
public void startResource(JmsResource resource) throws Exception {
if (startFilter != null) {
startFilter.onLifecycleEvent(resource);
}
}
public void stopResource(JmsResource resource) throws Exception {
if (stopFilter != null) {
stopFilter.onLifecycleEvent(resource);
}
}
public void destroyResource(JmsResource resource) throws Exception {
if (destroyFilter != null) {
destroyFilter.onLifecycleEvent(resource);
}
}
public void start() {
contextStats.reset();
activeProviders.clear();
lastRegistered = null;
offline = false;
MockRemotePeer.INSTANCE = this;
}
public void shutdown() {
offline = true;
List<MockProvider> active = new ArrayList<MockProvider>(activeProviders.values());
for (MockProvider provider : active) {
provider.signalConnectionFailed();
}
activeProviders.clear();
lastRegistered = null;
MockRemotePeer.INSTANCE = null;
}
public void shutdownQuietly() {
offline = true;
activeProviders.clear();
lastRegistered = null;
MockRemotePeer.INSTANCE = null;
}
public void silentlyCloseConnectedProviders() {
List<MockProvider> active = new ArrayList<MockProvider>(activeProviders.values());
for (MockProvider provider : active) {
provider.silentlyClose();
}
}
public MockProvider getProvider(String providerId) {
return activeProviders.get(providerId);
}
public MockProvider getLastRegistered() {
return lastRegistered;
}
public MockProviderStats getContextStats() {
return contextStats;
}
public void setResourceCreateFilter(ResourceLifecycleFilter filter) {
createFilter = filter;
}
public void setResourceStartFilter(ResourceLifecycleFilter filter) {
startFilter = filter;
}
public void setResourceStopFilter(ResourceLifecycleFilter filter) {
stopFilter = filter;
}
public void setResourceDestroyFilter(ResourceLifecycleFilter filter) {
destroyFilter = filter;
}
}
| apache-2.0 |
aglne/dubbo | dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/context/annotation/DubboConfigConfigurationTest.java | 3720 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.context.annotation;
import org.apache.dubbo.config.ApplicationConfig;
import org.apache.dubbo.config.ModuleConfig;
import org.apache.dubbo.config.ProtocolConfig;
import org.apache.dubbo.config.RegistryConfig;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.core.io.support.ResourcePropertySource;
import java.io.IOException;
/**
* {@link DubboConfigConfiguration} Test
*
* @since 2.5.8
*/
public class DubboConfigConfigurationTest {
private AnnotationConfigApplicationContext context;
@BeforeEach
public void before() throws IOException {
context = new AnnotationConfigApplicationContext();
ResourcePropertySource propertySource = new ResourcePropertySource("META-INF/config.properties");
context.getEnvironment().getPropertySources().addFirst(propertySource);
}
@AfterEach
public void after() {
context.close();
}
@Test
public void testSingle() throws IOException {
context.register(DubboConfigConfiguration.Single.class);
context.refresh();
// application
ApplicationConfig applicationConfig = context.getBean("applicationBean", ApplicationConfig.class);
Assertions.assertEquals("dubbo-demo-application", applicationConfig.getName());
// module
ModuleConfig moduleConfig = context.getBean("moduleBean", ModuleConfig.class);
Assertions.assertEquals("dubbo-demo-module", moduleConfig.getName());
// registry
RegistryConfig registryConfig = context.getBean(RegistryConfig.class);
Assertions.assertEquals("zookeeper://192.168.99.100:32770", registryConfig.getAddress());
// protocol
ProtocolConfig protocolConfig = context.getBean(ProtocolConfig.class);
Assertions.assertEquals("dubbo", protocolConfig.getName());
Assertions.assertEquals(Integer.valueOf(20880), protocolConfig.getPort());
}
@Test
public void testMultiple() {
context.register(DubboConfigConfiguration.Multiple.class);
context.refresh();
// application
ApplicationConfig applicationConfig = context.getBean("applicationBean", ApplicationConfig.class);
Assertions.assertEquals("dubbo-demo-application", applicationConfig.getName());
ApplicationConfig applicationBean2 = context.getBean("applicationBean2", ApplicationConfig.class);
Assertions.assertEquals("dubbo-demo-application2", applicationBean2.getName());
ApplicationConfig applicationBean3 = context.getBean("applicationBean3", ApplicationConfig.class);
Assertions.assertEquals("dubbo-demo-application3", applicationBean3.getName());
}
}
| apache-2.0 |
mayl8822/binnavi | src/main/java/com/google/security/zynamics/binnavi/Gui/MainWindow/ProjectTree/Nodes/Module/Component/CModuleNodeComponent.java | 9352 | // Copyright 2011-2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes.Module.Component;
import com.google.common.base.Preconditions;
import com.google.security.zynamics.binnavi.CMain;
import com.google.security.zynamics.binnavi.Database.Interfaces.IDatabase;
import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes.CAbstractNodeComponent;
import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes.Data.Component.CDataNodeComponent;
import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes.Traces.Component.CTracesNodeComponent;
import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes.Views.Module.Component.CModuleViewsContainerComponent;
import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes.Views.Module.Component.CNativeFunctionViewsNodeComponent;
import com.google.security.zynamics.binnavi.disassembly.INaviAddressSpace;
import com.google.security.zynamics.binnavi.disassembly.INaviModule;
import com.google.security.zynamics.binnavi.disassembly.Modules.CModuleListenerAdapter;
import com.google.security.zynamics.binnavi.disassembly.Modules.IModuleListener;
import com.google.security.zynamics.binnavi.disassembly.types.Section;
import com.google.security.zynamics.binnavi.disassembly.types.TypeInstance;
import com.google.security.zynamics.binnavi.disassembly.views.INaviView;
import com.google.security.zynamics.binnavi.disassembly.views.IViewContainer;
import java.awt.BorderLayout;
import java.awt.EventQueue;
import java.awt.Window;
import java.util.HashMap;
import java.util.Map;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JTabbedPane;
import javax.swing.JTree;
import javax.swing.SwingUtilities;
/**
* Component that is shown when module nodes are selected in the project tree.
*/
public final class CModuleNodeComponent extends CAbstractNodeComponent {
/**
* Used for serialization.
*/
private static final long serialVersionUID = 7941100284678927190L;
/**
* Icon shown in the Overview tab.
*/
private static final Icon OVERVIEW_ICON =
new ImageIcon(CMain.class.getResource("data/projecttreeicons/project_module.png"));
/**
* Icon shown in the Functions tab.
*/
private static final Icon FUNCTIONS_ICON = new ImageIcon(
CMain.class.getResource("data/projecttreeicons/native_flowgraph_views_container.png"));
/**
* Icon shown in the Views tab.
*/
private static final Icon VIEWS_ICON =
new ImageIcon(CMain.class.getResource("data/projecttreeicons/view4.png"));
/**
* Icon shown in the Traces tab.
*/
private static final Icon TRACES_ICON =
new ImageIcon(CMain.class.getResource("data/projecttreeicons/debug_traces.png"));
/**
* Icon shown in the Data tab.
*/
private static final Icon DATA_ICON =
new ImageIcon(CMain.class.getResource("data/projecttreeicons/chart_curve.png"));
/**
* Module whose information is shown in the panel.
*/
private final INaviModule module;
/**
* Shows the individual tabs for the module.
*/
private final JTabbedPane tabbedPane = new JTabbedPane();
/**
* Shows Overview information.
*/
private final CModuleOverviewPanel overviewPanel;
/**
* Shows Functions information.
*/
private final CNativeFunctionViewsNodeComponent functionsPanel;
/**
* Shows Views information.
*/
private final CModuleViewsContainerComponent viewsPanel;
/**
* Shows Traces information.
*/
private final CTracesNodeComponent tracesPanel;
/**
* Shows Data information.
*/
private final CDataNodeComponent dataPanel;
/**
* Updates the pane on changes in the module.
*/
private final IModuleListener internalModuleListener = new InternalModuleListener();
private static final Map<INaviModule, CModuleNodeComponent> lookup =
new HashMap<>();
/**
* Creates a new component object.
*
* @param projectTree Project tree that is updated when certain events happen.
* @param database Database where the module is stored.
* @param addressSpace Address space the module belongs to (this argument can be null in case of a
* global module).
* @param module Module that provides the information displayed in the component.
* @param container Container that represents the context in which the module is displayed.
*/
public CModuleNodeComponent(final JTree projectTree, final IDatabase database,
final INaviAddressSpace addressSpace, final INaviModule module,
final IViewContainer container) {
super(new BorderLayout());
Preconditions.checkNotNull(database, "IE01977: Database argument can't be null");
this.module = Preconditions.checkNotNull(module, "IE01978: Module argument can't be null");
// Note: the order of creating tabs must not be changed since, e.g., focusSectionAddress relies
// upon a fixed index.
tabbedPane.addTab("Overview", OVERVIEW_ICON, overviewPanel =
new CModuleOverviewPanel(projectTree, database, addressSpace, module, container));
tabbedPane.addTab("Functions" + String.format(" (%d)", module.getFunctionCount()),
FUNCTIONS_ICON, functionsPanel =
new CNativeFunctionViewsNodeComponent(projectTree, database, module, container));
tabbedPane.addTab("Views" + String.format(" (%d)", module.getCustomViewCount()), VIEWS_ICON,
viewsPanel = new CModuleViewsContainerComponent(projectTree, container));
tabbedPane.addTab("Debug Traces", TRACES_ICON,
tracesPanel = new CTracesNodeComponent(projectTree, container));
tabbedPane.addTab(
"Sections and Types", DATA_ICON, dataPanel = new CDataNodeComponent(module, container));
tabbedPane.setEnabledAt(1, module.isLoaded());
tabbedPane.setEnabledAt(2, module.isLoaded());
tabbedPane.setEnabledAt(3, module.isLoaded());
tabbedPane.setEnabledAt(4, module.isLoaded());
module.addListener(internalModuleListener);
add(tabbedPane);
lookup.put(module, this);
}
public static void focusSectionAddress(
final INaviModule module, final Section section, final long address) {
final CModuleNodeComponent component = lookup.get(module);
final JTabbedPane tabbedPane = component.tabbedPane;
final Window window = SwingUtilities.getWindowAncestor(tabbedPane);
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
window.toFront();
window.repaint();
tabbedPane.setSelectedIndex(4 /* index of the binary data component */);
component.dataPanel.getDataSectionComponent().scrollToSectionAddress(section, address);
}
});
}
public static void focusTypeInstance(final INaviModule module, final TypeInstance instance) {
final CModuleNodeComponent component =
Preconditions.checkNotNull(lookup.get(module),
"Attempting to give focus to the type window, but the CModuleNodeComponent is null.");
final JTabbedPane tabbedPane = Preconditions.checkNotNull(component.tabbedPane,
"Attempting to give focus to the type window, but the corresponding tabbed pane is null.");
final Window window = Preconditions.checkNotNull(SwingUtilities.getWindowAncestor(tabbedPane),
"Attempting to give focus to the type window, but the tabbedPane has no Window ancestor.");
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
window.toFront();
window.repaint();
tabbedPane.setSelectedIndex(4 /* index of the binary data component */);
component.dataPanel.getDataSectionComponent().scrollToInstance(instance);
}
});
}
@Override
public void dispose() {
module.removeListener(internalModuleListener);
overviewPanel.dispose();
functionsPanel.dispose();
viewsPanel.dispose();
tracesPanel.dispose();
dataPanel.dispose();
lookup.remove(module);
}
/**
* Updates the pane on changes in the module.
*/
private class InternalModuleListener extends CModuleListenerAdapter {
@Override
public void addedView(final INaviModule module, final INaviView view) {
tabbedPane.setTitleAt(2, "Views" + String.format(" (%d)", module.getCustomViewCount()));
}
@Override
public void deletedView(final INaviModule module, final INaviView view) {
tabbedPane.setTitleAt(2, "Views" + String.format(" (%d)", module.getCustomViewCount()));
}
@Override
public void loadedModule(final INaviModule module) {
tabbedPane.setEnabledAt(1, module.isLoaded());
tabbedPane.setEnabledAt(2, module.isLoaded());
tabbedPane.setEnabledAt(3, module.isLoaded());
tabbedPane.setEnabledAt(4, module.isLoaded());
}
}
}
| apache-2.0 |
mythguided/hydra | hydra-filters/src/main/java/com/addthis/hydra/data/filter/bundle/BundleFilterRandomField.java | 2592 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.addthis.hydra.data.filter.bundle;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import com.addthis.bundle.core.Bundle;
import com.addthis.bundle.core.BundleField;
import com.addthis.bundle.util.AutoField;
import com.addthis.codec.annotations.FieldConfig;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* This {@link BundleFilter BundleFilter} <span class="hydra-summary">randomly selects an input field and copies it to an output field</span>.
* <p/>
* <p>The possible input fields are specified with {@link #inFields inFields}. Of the possible
* input fields only those with non-null values are considered. One of the eligible values
* is randomly selected and copied into the output field.</p>
* <p/>
* <p>Example:</p>
* <pre>
* {random-field {inFields: ["INPUT1", "INPUT2", "INPUT3"], out: "OUTPUT"}}
* </pre>
*
* @user-reference
*/
public class BundleFilterRandomField implements BundleFilter {
/**
* The possible input bundle fields from which one will be selected. This field is required.
*/
private AutoField[] inFields;
/**
* The name of the output bundle field. This field is required.
*/
private AutoField out;
@JsonCreator
public BundleFilterRandomField(@JsonProperty(value = "inFields", required = true) AutoField[] inFields,
@JsonProperty(value = "out", required = true) AutoField out) {
this.inFields = inFields;
this.out = out;
}
@Override
public boolean filter(Bundle bundle) {
List<AutoField> inFieldsShuffle = new ArrayList<>(Arrays.asList(inFields));
Collections.shuffle(inFieldsShuffle);
for (AutoField bf : inFieldsShuffle) {
if (bf != null && bf.getValue(bundle) != null) {
out.setValue(bundle, bf.getValue(bundle));
break;
}
}
return true;
}
}
| apache-2.0 |
kidaa/isis | core/metamodel/src/main/java/org/apache/isis/objectstore/jdo/metamodel/facets/object/embeddedonly/JdoEmbeddedOnlyFacet.java | 1297 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.objectstore.jdo.metamodel.facets.object.embeddedonly;
import org.apache.isis.core.metamodel.facets.MarkerFacet;
/**
* Corresponds to annotating the class with {@link Embeddable}.
* <p>
* The JPA {@link Embeddable} annotation has no attributes. However, in addition
* to this facet it does also implicitly map to
* {@link ParentedFacetDerivedFromJdoEmbeddedOnlyAnnotation}.
*/
public interface JdoEmbeddedOnlyFacet extends MarkerFacet {
}
| apache-2.0 |
ray-project/ray | java/api/src/main/java/io/ray/api/WaitResult.java | 664 | package io.ray.api;
import java.util.List;
/**
* Represents the result of a Ray.wait call. It contains 2 lists, one containing the locally
* available objects, one containing the rest.
*/
public final class WaitResult<T> {
private final List<ObjectRef<T>> ready;
private final List<ObjectRef<T>> unready;
public WaitResult(List<ObjectRef<T>> ready, List<ObjectRef<T>> unready) {
this.ready = ready;
this.unready = unready;
}
/** Get the list of ready objects. */
public List<ObjectRef<T>> getReady() {
return ready;
}
/** Get the list of unready objects. */
public List<ObjectRef<T>> getUnready() {
return unready;
}
}
| apache-2.0 |
mehdi149/OF_COMPILER_0.1 | gen-src/main/java/org/projectfloodlight/openflow/protocol/ver13/OFBsnGentableDescStatsEntryVer13.java | 13193 | // Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver13;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnGentableDescStatsEntryVer13 implements OFBsnGentableDescStatsEntry {
private static final Logger logger = LoggerFactory.getLogger(OFBsnGentableDescStatsEntryVer13.class);
// version: 1.3
final static byte WIRE_VERSION = 4;
final static int LENGTH = 48;
private final static String DEFAULT_NAME = "";
private final static long DEFAULT_BUCKETS_SIZE = 0x0L;
private final static long DEFAULT_MAX_ENTRIES = 0x0L;
// OF message fields
private final GenTableId tableId;
private final String name;
private final long bucketsSize;
private final long maxEntries;
//
// package private constructor - used by readers, builders, and factory
OFBsnGentableDescStatsEntryVer13(GenTableId tableId, String name, long bucketsSize, long maxEntries) {
if(tableId == null) {
throw new NullPointerException("OFBsnGentableDescStatsEntryVer13: property tableId cannot be null");
}
if(name == null) {
throw new NullPointerException("OFBsnGentableDescStatsEntryVer13: property name cannot be null");
}
this.tableId = tableId;
this.name = name;
this.bucketsSize = bucketsSize;
this.maxEntries = maxEntries;
}
// Accessors for OF message fields
@Override
public GenTableId getTableId() {
return tableId;
}
@Override
public String getName() {
return name;
}
@Override
public long getBucketsSize() {
return bucketsSize;
}
@Override
public long getMaxEntries() {
return maxEntries;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
public OFBsnGentableDescStatsEntry.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnGentableDescStatsEntry.Builder {
final OFBsnGentableDescStatsEntryVer13 parentMessage;
// OF message fields
private boolean tableIdSet;
private GenTableId tableId;
private boolean nameSet;
private String name;
private boolean bucketsSizeSet;
private long bucketsSize;
private boolean maxEntriesSet;
private long maxEntries;
BuilderWithParent(OFBsnGentableDescStatsEntryVer13 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public GenTableId getTableId() {
return tableId;
}
@Override
public OFBsnGentableDescStatsEntry.Builder setTableId(GenTableId tableId) {
this.tableId = tableId;
this.tableIdSet = true;
return this;
}
@Override
public String getName() {
return name;
}
@Override
public OFBsnGentableDescStatsEntry.Builder setName(String name) {
this.name = name;
this.nameSet = true;
return this;
}
@Override
public long getBucketsSize() {
return bucketsSize;
}
@Override
public OFBsnGentableDescStatsEntry.Builder setBucketsSize(long bucketsSize) {
this.bucketsSize = bucketsSize;
this.bucketsSizeSet = true;
return this;
}
@Override
public long getMaxEntries() {
return maxEntries;
}
@Override
public OFBsnGentableDescStatsEntry.Builder setMaxEntries(long maxEntries) {
this.maxEntries = maxEntries;
this.maxEntriesSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFBsnGentableDescStatsEntry build() {
GenTableId tableId = this.tableIdSet ? this.tableId : parentMessage.tableId;
if(tableId == null)
throw new NullPointerException("Property tableId must not be null");
String name = this.nameSet ? this.name : parentMessage.name;
if(name == null)
throw new NullPointerException("Property name must not be null");
long bucketsSize = this.bucketsSizeSet ? this.bucketsSize : parentMessage.bucketsSize;
long maxEntries = this.maxEntriesSet ? this.maxEntries : parentMessage.maxEntries;
//
return new OFBsnGentableDescStatsEntryVer13(
tableId,
name,
bucketsSize,
maxEntries
);
}
}
static class Builder implements OFBsnGentableDescStatsEntry.Builder {
// OF message fields
private boolean tableIdSet;
private GenTableId tableId;
private boolean nameSet;
private String name;
private boolean bucketsSizeSet;
private long bucketsSize;
private boolean maxEntriesSet;
private long maxEntries;
@Override
public GenTableId getTableId() {
return tableId;
}
@Override
public OFBsnGentableDescStatsEntry.Builder setTableId(GenTableId tableId) {
this.tableId = tableId;
this.tableIdSet = true;
return this;
}
@Override
public String getName() {
return name;
}
@Override
public OFBsnGentableDescStatsEntry.Builder setName(String name) {
this.name = name;
this.nameSet = true;
return this;
}
@Override
public long getBucketsSize() {
return bucketsSize;
}
@Override
public OFBsnGentableDescStatsEntry.Builder setBucketsSize(long bucketsSize) {
this.bucketsSize = bucketsSize;
this.bucketsSizeSet = true;
return this;
}
@Override
public long getMaxEntries() {
return maxEntries;
}
@Override
public OFBsnGentableDescStatsEntry.Builder setMaxEntries(long maxEntries) {
this.maxEntries = maxEntries;
this.maxEntriesSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
//
@Override
public OFBsnGentableDescStatsEntry build() {
if(!this.tableIdSet)
throw new IllegalStateException("Property tableId doesn't have default value -- must be set");
if(tableId == null)
throw new NullPointerException("Property tableId must not be null");
String name = this.nameSet ? this.name : DEFAULT_NAME;
if(name == null)
throw new NullPointerException("Property name must not be null");
long bucketsSize = this.bucketsSizeSet ? this.bucketsSize : DEFAULT_BUCKETS_SIZE;
long maxEntries = this.maxEntriesSet ? this.maxEntries : DEFAULT_MAX_ENTRIES;
return new OFBsnGentableDescStatsEntryVer13(
tableId,
name,
bucketsSize,
maxEntries
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnGentableDescStatsEntry> {
@Override
public OFBsnGentableDescStatsEntry readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
int length = U16.f(bb.readShort());
if(length != 48)
throw new OFParseError("Wrong length: Expected=48(48), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
GenTableId tableId = GenTableId.read2Bytes(bb);
String name = ChannelUtils.readFixedLengthString(bb, 32);
long bucketsSize = U32.f(bb.readInt());
long maxEntries = U32.f(bb.readInt());
// pad: 4 bytes
bb.skipBytes(4);
OFBsnGentableDescStatsEntryVer13 bsnGentableDescStatsEntryVer13 = new OFBsnGentableDescStatsEntryVer13(
tableId,
name,
bucketsSize,
maxEntries
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnGentableDescStatsEntryVer13);
return bsnGentableDescStatsEntryVer13;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnGentableDescStatsEntryVer13Funnel FUNNEL = new OFBsnGentableDescStatsEntryVer13Funnel();
static class OFBsnGentableDescStatsEntryVer13Funnel implements Funnel<OFBsnGentableDescStatsEntryVer13> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnGentableDescStatsEntryVer13 message, PrimitiveSink sink) {
// fixed value property length = 48
sink.putShort((short) 0x30);
message.tableId.putTo(sink);
sink.putUnencodedChars(message.name);
sink.putLong(message.bucketsSize);
sink.putLong(message.maxEntries);
// skip pad (4 bytes)
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnGentableDescStatsEntryVer13> {
@Override
public void write(ByteBuf bb, OFBsnGentableDescStatsEntryVer13 message) {
// fixed value property length = 48
bb.writeShort((short) 0x30);
message.tableId.write2Bytes(bb);
ChannelUtils.writeFixedLengthString(bb, message.name, 32);
bb.writeInt(U32.t(message.bucketsSize));
bb.writeInt(U32.t(message.maxEntries));
// pad: 4 bytes
bb.writeZero(4);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnGentableDescStatsEntryVer13(");
b.append("tableId=").append(tableId);
b.append(", ");
b.append("name=").append(name);
b.append(", ");
b.append("bucketsSize=").append(bucketsSize);
b.append(", ");
b.append("maxEntries=").append(maxEntries);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnGentableDescStatsEntryVer13 other = (OFBsnGentableDescStatsEntryVer13) obj;
if (tableId == null) {
if (other.tableId != null)
return false;
} else if (!tableId.equals(other.tableId))
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if( bucketsSize != other.bucketsSize)
return false;
if( maxEntries != other.maxEntries)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((tableId == null) ? 0 : tableId.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * (int) (bucketsSize ^ (bucketsSize >>> 32));
result = prime * (int) (maxEntries ^ (maxEntries >>> 32));
return result;
}
}
| apache-2.0 |
apache/solr | solr/core/src/test/org/apache/solr/cloud/MockScriptUpdateProcessorFactory.java | 3095 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.cloud;
import java.io.IOException;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
import org.apache.solr.util.plugin.SolrCoreAware;
/**
* The scripting update processor capability is something that is only allowed by a trusted
* configSet. The actual code lives in the /modules/scripting project, however the test for trusted
* configsets lives in TestConfigSetsAPI. This class is meant to simulate the
* ScriptUpdateProcessorFactory for this test.
*/
public class MockScriptUpdateProcessorFactory extends UpdateRequestProcessorFactory
implements SolrCoreAware {
@Override
public void inform(SolrCore core) {
if (!core.getCoreDescriptor().isConfigSetTrusted()) {
throw new SolrException(
ErrorCode.UNAUTHORIZED,
"The configset for this collection was uploaded without any authentication in place,"
+ " and this operation is not available for collections with untrusted configsets. To use this component, re-upload the configset"
+ " after enabling authentication and authorization.");
}
}
@Override
public UpdateRequestProcessor getInstance(
SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) {
return new MockScriptUpdateRequestProcessor(next);
}
private static class MockScriptUpdateRequestProcessor extends UpdateRequestProcessor {
public MockScriptUpdateRequestProcessor(UpdateRequestProcessor next) {
super(next);
}
/**
* @param cmd the update command in input containing the Document to classify
* @throws IOException If there is a low-level I/O error
*/
@Override
public void processAdd(AddUpdateCommand cmd) throws IOException {
SolrInputDocument doc = cmd.getSolrInputDocument();
doc.setField("script_added_i", "42");
super.processAdd(cmd);
}
}
}
| apache-2.0 |
lankavitharana/carbon-event-processing | components/event-processor/org.wso2.carbon.event.processor.core/src/main/java/org/wso2/carbon/event/processor/core/internal/ha/HAServiceClient.java | 913 | /**
* Copyright (c) 2005 - 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
* <p/>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.event.processor.core.internal.ha;
public interface HAServiceClient {
SnapshotData getSnapshot(int tenantId, String executionPlan, CEPMembership activeCepMembership, CEPMembership currentCepMembership) throws Exception;
}
| apache-2.0 |
deeplearning4j/deeplearning4j | nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/schedule/RampSchedule.java | 1040 | package org.nd4j.linalg.schedule;
/**
* A "Wrapper" schedule that ramps up from {@code 1/numIter * baseLR} to {@code baseLR} over numIter iterations.
* The base learning rate is determined by the underlying ISchedule, as a function of time.
* This can be used to provide a slow start, for use cases such as transfer learning.
*
* @author Alex Black
*/
public class RampSchedule implements ISchedule {
protected final ISchedule baseSchedule;
protected final int numIter;
public RampSchedule(ISchedule baseSchedule, int numIter){
this.baseSchedule = baseSchedule;
this.numIter = numIter;
}
@Override
public double valueAt(int iteration, int epoch) {
double base = baseSchedule.valueAt(iteration, epoch);
if(iteration >= numIter - 1){
return base;
}
double frac = (iteration+1) / (double)numIter;
return frac * base;
}
@Override
public ISchedule clone() {
return new RampSchedule(baseSchedule.clone(), numIter);
}
}
| apache-2.0 |
wu-sheng/sky-walking | apm-sniffer/apm-sdk-plugin/mariadb-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/jdbc/mariadb/v2/Constants.java | 1047 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.plugin.jdbc.mariadb.v2;
import org.apache.skywalking.apm.agent.core.context.tag.StringTag;
public class Constants {
public static final StringTag SQL_PARAMETERS = new StringTag("db.sql.parameters");
}
| apache-2.0 |
Randgalt/halva | halva-processor/src/main/java/io/soabase/halva/processor/implicit/PassProcessImplicits.java | 4458 | /**
* Copyright 2016 Jordan Zimmerman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.soabase.halva.processor.implicit;
import io.soabase.halva.implicit.Implicit;
import io.soabase.halva.processor.Environment;
import io.soabase.halva.processor.Pass;
import io.soabase.halva.processor.WorkItem;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
class PassProcessImplicits implements Pass
{
private final Environment environment;
private final List<WorkItem> implicitClassItems;
private final List<ContextItem> contextItems;
PassProcessImplicits(Environment environment, List<WorkItem> implicitClassItems, List<ContextItem> contextItems)
{
this.environment = environment;
this.implicitClassItems = implicitClassItems;
this.contextItems = contextItems;
}
@Override
public Optional<Pass> process()
{
List<ImplicitSpec> specs = new ArrayList<>();
implicitClassItems.forEach(item -> {
Element element = item.getElement();
do
{
if ( element.getKind() != ElementKind.CLASS )
{
environment.error(element, "@ImplicitClass can only be applied to classes");
break;
}
TypeElement typeElement = (TypeElement)element;
if ( typeElement.getModifiers().contains(Modifier.FINAL) || typeElement.getModifiers().contains(Modifier.PRIVATE) )
{
environment.error(element, "@ImplicitClass cannot be applied to final or private classes");
break;
}
List<ImplicitItem> items = new ArrayList<>();
item.getElement().getEnclosedElements().forEach(child -> {
if ( (child.getKind() == ElementKind.METHOD) || (child.getKind() == ElementKind.CONSTRUCTOR) )
{
ExecutableElement executable = (ExecutableElement)child;
boolean hasImplicits = executable.getParameters().stream().anyMatch(parameter -> parameter.getAnnotation(Implicit.class) != null);
boolean isPrivateOrFinal = executable.getModifiers().contains(Modifier.PRIVATE) || executable.getModifiers().contains(Modifier.FINAL);
if ( hasImplicits )
{
if ( executable.getModifiers().contains(Modifier.PRIVATE) )
{
environment.error(executable, "Implicits cannot be applied to private methods");
}
else if ( executable.getModifiers().contains(Modifier.FINAL) )
{
environment.error(executable, "Implicits cannot be applied to final methods");
}
else
{
items.add(new ImplicitItem(executable));
}
}
else if ( (child.getKind() == ElementKind.CONSTRUCTOR) && !isPrivateOrFinal )
{
items.add(new ImplicitItem(executable));
}
}
});
environment.getGeneratedManager().registerGenerated(typeElement, item.getAnnotationReader());
specs.add(new ImplicitSpec(typeElement, item.getAnnotationReader(), items));
} while ( false );
});
return Optional.of(new PassCreate(environment, specs, contextItems));
}
}
| apache-2.0 |
dimbleby/JGroups | src/org/jgroups/protocols/MPING.java | 11651 | package org.jgroups.protocols;
import org.jgroups.Event;
import org.jgroups.Global;
import org.jgroups.Message;
import org.jgroups.annotations.LocalAddress;
import org.jgroups.annotations.Property;
import org.jgroups.conf.PropertyConverters;
import org.jgroups.util.*;
import java.io.*;
import java.net.*;
import java.util.*;
/**
* Uses its own IP multicast socket to send and receive discovery requests/responses. Can be used in
* conjuntion with a non-UDP transport, e.g. TCP.<p>
* The discovery is <em>asymmetric</em>: discovery requests are broadcast via the multicast socket, and received via the
* multicast socket by everyone in the group. However, the discovery responses are sent back via the regular transport
* (e.g. TCP) to the sender (discovery request contained sender's regular address, e.g. 192.168.0.2:7800).
* @author Bela Ban
*/
public class MPING extends PING implements Runnable {
protected static final boolean can_bind_to_mcast_addr; // are we running on Linux ?
static {
can_bind_to_mcast_addr=(Util.checkForLinux() && !Util.checkForAndroid())
|| Util.checkForSolaris() || Util.checkForHp();
}
/* ----------------------------------------- Properties -------------------------------------------------- */
@LocalAddress
@Property(description="Bind address for multicast socket. " +
"The following special values are also recognized: GLOBAL, SITE_LOCAL, LINK_LOCAL and NON_LOOPBACK",
systemProperty={Global.BIND_ADDR})
protected InetAddress bind_addr;
@Property(name="bind_interface", converter=PropertyConverters.BindInterface.class,
description="The interface (NIC) which should be used by this transport",dependsUpon="bind_addr")
protected String bind_interface_str;
@Property(description="Time to live for discovery packets. Default is 8", systemProperty=Global.MPING_IP_TTL)
protected int ip_ttl=8;
@Property(description="Multicast address to be used for discovery", name="mcast_addr", systemProperty=Global.MPING_MCAST_ADDR,
defaultValueIPv4="230.5.6.7", defaultValueIPv6="ff0e::5:6:7")
protected InetAddress mcast_addr;
@Property(description="Multicast port for discovery packets. Default is 7555", systemProperty=Global.MPING_MCAST_PORT)
protected int mcast_port=7555;
@Property(description="If true, the transport should use all available interfaces to receive multicast messages")
protected boolean receive_on_all_interfaces;
/**
* List<NetworkInterface> of interfaces to receive multicasts on. The
* multicast receive socket will listen on all of these interfaces. This is
* a comma-separated list of IP addresses or interface names. E.g.
* "192.168.5.1,eth1,127.0.0.1". Duplicates are discarded; we only bind to
* an interface once. If this property is set, it override
* receive_on_all_interfaces.
*/
@Property(converter=PropertyConverters.NetworkInterfaceList.class, description="List of interfaces to receive multicasts on")
protected List<NetworkInterface> receive_interfaces;
/**
* If true, the transport should use all available interfaces to send
* multicast messages. This means the same multicast message is sent N
* times, so use with care
*/
@Property(description="Whether send messages are sent on all interfaces. Default is false")
protected boolean send_on_all_interfaces;
/**
* List<NetworkInterface> of interfaces to send multicasts on. The multicast send socket will send the same multicast
* message on all of these interfaces. This is a comma-separated list of IP addresses or interface names.
* E.g. "192.168.5.1,eth1,127.0.0.1". Duplicates are discarded. If this property is set, it override send_on_all_interfaces.
*/
@Property(converter=PropertyConverters.NetworkInterfaceList.class, description="List of interfaces to send multicasts on")
protected List<NetworkInterface> send_interfaces;
/* --------------------------------------------- Fields ------------------------------------------------------ */
protected MulticastSocket mcast_sock;
/** If we have multiple mcast send sockets, e.g. send_interfaces or send_on_all_interfaces enabled */
protected MulticastSocket[] mcast_send_sockets;
protected volatile Thread receiver;
public MPING() {
}
public InetAddress getBindAddr() {
return bind_addr;
}
public void setBindAddr(InetAddress bind_addr) {
this.bind_addr=bind_addr;
}
public List<NetworkInterface> getReceiveInterfaces() {
return receive_interfaces;
}
public List<NetworkInterface> getSendInterfaces() {
return send_interfaces;
}
public boolean isReceiveOnAllInterfaces() {
return receive_on_all_interfaces;
}
public boolean isSendOnAllInterfaces() {
return send_on_all_interfaces;
}
public int getTTL() {
return ip_ttl;
}
public void setTTL(int ip_ttl) {
this.ip_ttl=ip_ttl;
}
public InetAddress getMcastAddr() {
return mcast_addr;
}
public void setMcastAddr(InetAddress mcast_addr) {
this.mcast_addr=mcast_addr;
}
public void setMulticastAddress(String addr) throws UnknownHostException {
mcast_addr=InetAddress.getByName(addr);
}
public int getMcastPort() {
return mcast_port;
}
public void setMcastPort(int mcast_port) {
this.mcast_port=mcast_port;
}
@SuppressWarnings("unchecked")
public Object up(Event evt) {
if(evt.getType() == Event.CONFIG) {
if(bind_addr == null) {
Map<String,Object> config=evt.getArg();
bind_addr=(InetAddress)config.get("bind_addr");
}
return up_prot.up(evt);
}
return super.up(evt);
}
public void init() throws Exception {
super.init();
log.debug("bind_addr=%s, mcast_addr=%s, mcast_port=%d", bind_addr, mcast_addr, mcast_port);
}
public void start() throws Exception {
if(can_bind_to_mcast_addr) // https://jira.jboss.org/jira/browse/JGRP-836 - prevent cross talking on Linux
mcast_sock=Util.createMulticastSocket(getSocketFactory(), "jgroups.mping.mcast_sock", mcast_addr, mcast_port, log);
else
mcast_sock=getSocketFactory().createMulticastSocket("jgroups.mping.mcast_sock", mcast_port);
mcast_sock.setTimeToLive(ip_ttl);
if(receive_on_all_interfaces || (receive_interfaces != null && !receive_interfaces.isEmpty())) {
List<NetworkInterface> interfaces;
if(receive_interfaces != null)
interfaces=receive_interfaces;
else
interfaces=Util.getAllAvailableInterfaces();
bindToInterfaces(interfaces, mcast_sock, mcast_addr);
}
else {
if(bind_addr != null)
mcast_sock.setInterface(bind_addr);
mcast_sock.joinGroup(mcast_addr);
}
// Create mcast sender socket
if(send_on_all_interfaces || (send_interfaces != null && !send_interfaces.isEmpty())) {
List interfaces;
if(send_interfaces != null)
interfaces=send_interfaces;
else
interfaces=Util.getAllAvailableInterfaces();
mcast_send_sockets=new MulticastSocket[interfaces.size()];
int index=0;
for(Iterator it=interfaces.iterator(); it.hasNext();) {
NetworkInterface intf=(NetworkInterface)it.next();
mcast_send_sockets[index]=new MulticastSocket();
mcast_send_sockets[index].setNetworkInterface(intf);
mcast_send_sockets[index].setTimeToLive(ip_ttl);
index++;
}
}
startReceiver();
super.start();
}
public void stop() {
receiver=null;
Util.close(mcast_sock);
mcast_sock=null;
super.stop();
}
public void run() {
final byte[] receive_buf=new byte[65535];
DatagramPacket packet=new DatagramPacket(receive_buf, receive_buf.length);
while(mcast_sock != null && receiver != null && Thread.currentThread().equals(receiver)) {
packet.setData(receive_buf, 0, receive_buf.length);
try {
mcast_sock.receive(packet);
DataInput inp=new ByteArrayDataInputStream(packet.getData(), packet.getOffset(), packet.getLength());
Message msg=new Message();
msg.readFrom(inp);
if(!Objects.equals(local_addr,msg.getSrc())) // discard discovery request from self
up(msg);
}
catch(SocketException socketEx) {
break;
}
catch(Throwable ex) {
log.error(Util.getMessage("FailedReceivingPacketFrom"), packet.getSocketAddress(), ex);
}
}
log.debug("receiver thread terminated");
}
protected void bindToInterfaces(List<NetworkInterface> interfaces, MulticastSocket s, InetAddress mcast_addr) throws IOException {
SocketAddress tmp_mcast_addr=new InetSocketAddress(mcast_addr, mcast_port);
for(Iterator it=interfaces.iterator(); it.hasNext();) {
NetworkInterface i=(NetworkInterface)it.next();
for(Enumeration en2=i.getInetAddresses(); en2.hasMoreElements();) {
InetAddress addr=(InetAddress)en2.nextElement();
if ((Util.getIpStackType() == StackType.IPv4 && addr instanceof Inet4Address)
|| (Util.getIpStackType() == StackType.IPv6 && addr instanceof Inet6Address)) {
s.joinGroup(tmp_mcast_addr, i);
log.trace("joined " + tmp_mcast_addr + " on " + i.getName() + " (" + addr + ")");
break;
}
}
}
}
protected void startReceiver() {
if(receiver == null || !receiver.isAlive()) {
ThreadFactory factory=getThreadFactory();
receiver=factory != null? factory.newThread(this, "MPING") : new Thread(this, "MPING)");
receiver.setDaemon(true);
receiver.start();
log.debug("receiver thread started");
}
}
@Override
protected void sendMcastDiscoveryRequest(Message msg) {
try {
if(msg.getSrc() == null)
msg.setSrc(local_addr);
ByteArrayDataOutputStream out=new ByteArrayDataOutputStream((int)(msg.size()+1));
msg.writeTo(out);
Buffer buf=out.getBuffer();
DatagramPacket packet=new DatagramPacket(buf.getBuf(), buf.getOffset(), buf.getLength(), mcast_addr, mcast_port);
if(mcast_send_sockets != null) {
MulticastSocket s;
for(int i=0; i < mcast_send_sockets.length; i++) {
s=mcast_send_sockets[i];
try {
s.send(packet);
}
catch(Exception e) {
log.error(Util.getMessage("FailedSendingPacketOnSocket"), s);
}
}
}
else { // DEFAULT path
if(mcast_sock != null)
mcast_sock.send(packet);
}
}
catch(Exception ex) {
log.error(Util.getMessage("FailedSendingDiscoveryRequest"), ex);
}
}
}
| apache-2.0 |
incodehq/isis | core/metamodel/src/main/java/org/apache/isis/core/metamodel/facets/WhenAndWhereValueFacetAbstract.java | 1881 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.metamodel.facets;
import java.util.Map;
import org.apache.isis.applib.annotation.When;
import org.apache.isis.applib.annotation.Where;
import org.apache.isis.core.metamodel.facetapi.Facet;
import org.apache.isis.core.metamodel.facetapi.FacetHolder;
public abstract class WhenAndWhereValueFacetAbstract extends WhenValueFacetAbstract implements WhenAndWhereValueFacet {
private final Where where;
public WhenAndWhereValueFacetAbstract(final Class<? extends Facet> facetType, final FacetHolder holder, final When when, final Where where) {
super(facetType, holder, when);
this.where = where;
}
@Override
public Where where() {
return where;
}
@Override
protected String toStringValues() {
return super.toStringValues() + "; where =" + where.getFriendlyName();
}
@Override
public void appendAttributesTo(final Map<String, Object> attributeMap) {
super.appendAttributesTo(attributeMap);
attributeMap.put("where", where);
}
}
| apache-2.0 |
apache/incubator-asterixdb | asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableLocalSqlKurtosisAggregateFunction.java | 2661 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.runtime.aggregates.serializable.std;
import java.io.DataOutput;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.exceptions.SourceLocation;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class SerializableLocalSqlKurtosisAggregateFunction
extends AbstractSerializableSingleVariableStatisticsAggregateFunction {
public SerializableLocalSqlKurtosisAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context,
SourceLocation sourceLoc) throws HyracksDataException {
super(args, context, sourceLoc);
}
@Override
public void step(IFrameTupleReference tuple, byte[] state, int start, int len) throws HyracksDataException {
processDataValues(tuple, state, start, len);
}
@Override
public void finish(byte[] state, int start, int len, DataOutput result) throws HyracksDataException {
finishPartialResults(state, start, len, result);
}
@Override
public void finishPartial(byte[] state, int start, int len, DataOutput result) throws HyracksDataException {
finish(state, start, len, result);
}
@Override
protected void processNull(byte[] state, int start) {
}
@Override
protected boolean getM3Flag() {
return true;
}
@Override
protected boolean getM4Flag() {
return true;
}
@Override
protected FunctionIdentifier getFunctionIdentifier() {
return BuiltinFunctions.KURTOSIS;
}
}
| apache-2.0 |
apache/incubator-shardingsphere | shardingsphere-sql-parser/shardingsphere-sql-parser-statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/opengauss/dcl/OpenGaussAlterUserStatement.java | 1257 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.sql.parser.sql.dialect.statement.opengauss.dcl;
import lombok.ToString;
import org.apache.shardingsphere.sql.parser.sql.common.statement.dcl.AlterUserStatement;
import org.apache.shardingsphere.sql.parser.sql.dialect.statement.opengauss.OpenGaussStatement;
/**
* OpenGauss alter user statement.
*/
@ToString
public final class OpenGaussAlterUserStatement extends AlterUserStatement implements OpenGaussStatement {
}
| apache-2.0 |
viggyprabhu/geowave | geowave-store/src/main/java/mil/nga/giat/geowave/store/dimension/SpatialField.java | 4337 | package mil.nga.giat.geowave.store.dimension;
import java.nio.ByteBuffer;
import mil.nga.giat.geowave.index.ByteArrayId;
import mil.nga.giat.geowave.index.PersistenceUtils;
import mil.nga.giat.geowave.index.dimension.NumericDimensionDefinition;
import mil.nga.giat.geowave.index.dimension.bin.BinRange;
import mil.nga.giat.geowave.index.sfc.data.NumericData;
import mil.nga.giat.geowave.index.sfc.data.NumericRange;
import mil.nga.giat.geowave.store.data.field.FieldReader;
import mil.nga.giat.geowave.store.data.field.FieldWriter;
/**
* A base class for EPSG:4326 latitude/longitude fields that use JTS geometry
*
*/
abstract public class SpatialField implements
DimensionField<GeometryWrapper>
{
private NumericDimensionDefinition baseDefinition;
private final GeometryAdapter geometryAdapter;
private ByteArrayId fieldId;
protected SpatialField() {
geometryAdapter = new GeometryAdapter();
}
public SpatialField(
final NumericDimensionDefinition baseDefinition ) {
this(
baseDefinition,
GeometryAdapter.DEFAULT_GEOMETRY_FIELD_ID);
}
@Override
public NumericData getFullRange() {
return this.baseDefinition.getFullRange();
}
public SpatialField(
final NumericDimensionDefinition baseDefinition,
final ByteArrayId fieldId ) {
this.baseDefinition = baseDefinition;
this.fieldId = fieldId;
geometryAdapter = new GeometryAdapter();
}
@Override
public NumericRange getDenormalizedRange(
final BinRange range ) {
return new NumericRange(
range.getNormalizedMin(),
range.getNormalizedMax());
}
@Override
public double getRange() {
return baseDefinition.getRange();
}
@Override
public int getFixedBinIdSize() {
return 0;
}
@Override
public NumericRange getBounds() {
return baseDefinition.getBounds();
}
@Override
public double normalize(
final double value ) {
return baseDefinition.normalize(value);
}
@Override
public double denormalize(
final double value ) {
return baseDefinition.denormalize(value);
}
@Override
public BinRange[] getNormalizedRanges(
final NumericData range ) {
return baseDefinition.getNormalizedRanges(range);
}
@Override
public ByteArrayId getFieldId() {
return fieldId;
}
@Override
public FieldWriter<?, GeometryWrapper> getWriter() {
return geometryAdapter;
}
@Override
public FieldReader<GeometryWrapper> getReader() {
return geometryAdapter;
}
@Override
public NumericDimensionDefinition getBaseDefinition() {
return baseDefinition;
}
@Override
public byte[] toBinary() {
final byte[] dimensionBinary = PersistenceUtils.toBinary(baseDefinition);
final ByteBuffer buf = ByteBuffer.allocate(dimensionBinary.length + fieldId.getBytes().length + 4);
buf.putInt(fieldId.getBytes().length);
buf.put(fieldId.getBytes());
buf.put(dimensionBinary);
return buf.array();
}
@Override
public void fromBinary(
final byte[] bytes ) {
final ByteBuffer buf = ByteBuffer.wrap(bytes);
final int fieldIdLength = buf.getInt();
final byte[] fieldIdBinary = new byte[fieldIdLength];
buf.get(fieldIdBinary);
fieldId = new ByteArrayId(
fieldIdBinary);
final byte[] dimensionBinary = new byte[bytes.length - fieldIdLength - 4];
buf.get(dimensionBinary);
baseDefinition = PersistenceUtils.fromBinary(
dimensionBinary,
NumericDimensionDefinition.class);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
final String className = getClass().getName();
result = (prime * result) + ((className == null) ? 0 : className.hashCode());
result = (prime * result) + ((baseDefinition == null) ? 0 : baseDefinition.hashCode());
result = (prime * result) + ((fieldId == null) ? 0 : fieldId.hashCode());
return result;
}
@Override
public boolean equals(
final Object obj ) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final SpatialField other = (SpatialField) obj;
if (baseDefinition == null) {
if (other.baseDefinition != null) {
return false;
}
}
else if (!baseDefinition.equals(other.baseDefinition)) {
return false;
}
if (fieldId == null) {
if (other.fieldId != null) {
return false;
}
}
else if (!fieldId.equals(other.fieldId)) {
return false;
}
return true;
}
}
| apache-2.0 |
sdgdsffdsfff/zeus | slb/src/main/java/com/ctrip/zeus/service/model/Archive.java | 1412 | package com.ctrip.zeus.service.model;
import java.util.Date;
/**
* Created by zhoumy on 2016/12/15.
*/
public class Archive<T> {
private long id;
private int version;
private Date createdTime;
private T content;
private String author;
private String commitMessage;
public long getId() {
return id;
}
public Archive<T> setId(long id) {
this.id = id;
return this;
}
public int getVersion() {
return version;
}
public Archive<T> setVersion(int version) {
this.version = version;
return this;
}
public Date getCreatedTime() {
return createdTime;
}
public Archive<T> setCreatedTime(Date createdTime) {
this.createdTime = createdTime;
return this;
}
public T getContent() {
return content;
}
public Archive<T> setContent(T content) {
this.content = content;
return this;
}
public String getAuthor() {
return author;
}
public Archive<T> setAuthor(String author) {
this.author = author;
return this;
}
public String getCommitMessage() {
return commitMessage;
}
public Archive<T> setCommitMessage(String commitMessage) {
this.commitMessage = commitMessage;
return this;
}
} | apache-2.0 |
bradseefeld/AppEngine-MapReduce | test/com/google/appengine/tools/mapreduce/AppEngineJobContextTest.java | 7038 | /*
* Copyright 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.appengine.tools.mapreduce;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalMemcacheServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.appengine.tools.development.testing.LocalTaskQueueTestConfig;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
import javax.servlet.http.HttpServletRequest;
/**
* Tests {@link AppEngineJobContext}.
*
* @author frew@google.com (Fred Wulff)
*/
public class AppEngineJobContextTest extends TestCase {
private static final String SIMPLE_CONF_XML =
"<?xml version=\"1.0\"?>"
+ "<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>"
+ "<configuration>"
+ "<property>"
+ "<name>foo.bar</name>"
+ "<value>/tmp/foo</value>"
+ "</property>"
+ "</configuration>";
private final LocalServiceTestHelper helper =
new LocalServiceTestHelper(new LocalDatastoreServiceTestConfig(),
new LocalTaskQueueTestConfig(), new LocalMemcacheServiceTestConfig());
private DatastoreService ds;
private JobID jobId;
@Override
public void setUp() throws Exception {
super.setUp();
helper.setUp();
ds = DatastoreServiceFactory.getDatastoreService();
jobId = new JobID("foo", 1);
}
@Override
public void tearDown() throws Exception {
helper.tearDown();
super.tearDown();
}
public void testGetConfigurationFromRequest() {
HttpServletRequest req = createMock(HttpServletRequest.class);
expect(req.getParameter(AppEngineJobContext.CONFIGURATION_PARAMETER_NAME))
.andReturn(SIMPLE_CONF_XML);
replay(req);
Configuration conf = AppEngineJobContext.getConfigurationFromRequest(req, true);
assertEquals("/tmp/foo", conf.get("foo.bar"));
verify(req);
}
public void testGetJobContextFromRequest() {
HttpServletRequest req = createMock(HttpServletRequest.class);
JobID jobId = new JobID("foo", 1);
expect(req.getParameter(AppEngineJobContext.JOB_ID_PARAMETER_NAME))
.andReturn(jobId.toString())
.anyTimes();
replay(req);
Configuration conf = ConfigurationXmlUtil.getConfigurationFromXml(SIMPLE_CONF_XML);
persistMRState(jobId, conf);
JobContext context = new AppEngineJobContext(req, false);
assertEquals("/tmp/foo", context.getConfiguration().get("foo.bar"));
assertEquals(jobId.toString(), context.getJobID().toString());
verify(req);
}
// Creates an MR state with an empty configuration and the given job ID,
// and stores it in the datastore.
private void persistMRState(JobID jobId, Configuration conf) {
MapReduceState mrState = MapReduceState.generateInitializedMapReduceState(ds, "", jobId, 0);
mrState.setConfigurationXML(ConfigurationXmlUtil.convertConfigurationToXml(conf));
mrState.persist();
}
/**
* Ensures that {@link AppEngineJobContext#getWorkerQueue()} falls back
* to the default queue.
*/
public void testGetQueueDefault() {
HttpServletRequest req = createMock(HttpServletRequest.class);
expect(req.getHeader("X-AppEngine-QueueName")).andReturn(null);
expect(req.getParameter(AppEngineJobContext.JOB_ID_PARAMETER_NAME))
.andReturn(jobId.toString())
.anyTimes();
replay(req);
Configuration conf = new Configuration(false);
persistMRState(jobId, conf);
AppEngineJobContext context = new AppEngineJobContext(req, false);
assertEquals("default", context.getWorkerQueue().getQueueName());
verify(req);
}
/**
* Ensures that {@link AppEngineJobContext#getWorkerQueue()} uses the current
* task queue if set to non-default.
*/
public void testGetQueueRequest() {
HttpServletRequest req = createMock(HttpServletRequest.class);
expect(req.getHeader("X-AppEngine-QueueName")).andReturn("bar");
expect(req.getParameter(AppEngineJobContext.JOB_ID_PARAMETER_NAME))
.andReturn(jobId.toString())
.anyTimes();
replay(req);
Configuration conf = new Configuration(false);
persistMRState(jobId, conf);
AppEngineJobContext context = new AppEngineJobContext(req, false);
assertEquals("bar", context.getWorkerQueue().getQueueName());
verify(req);
}
/**
* Ensures that {@link AppEngineJobContext#getWorkerQueue()} uses the worker
* queue specified in the job's configuration.
*/
public void testGetQueueConfiguration() {
HttpServletRequest req = createMock(HttpServletRequest.class);
expect(req.getHeader("X-AppEngine-QueueName")).andReturn("bar");
expect(req.getParameter(AppEngineJobContext.JOB_ID_PARAMETER_NAME))
.andReturn(jobId.toString())
.anyTimes();
replay(req);
Configuration conf = new Configuration(false);
// Configuration value should take precedence
conf.set(AppEngineJobContext.WORKER_QUEUE_KEY, "baz");
persistMRState(jobId, conf);
AppEngineJobContext context = new AppEngineJobContext(req, false);
assertEquals("baz", context.getWorkerQueue().getQueueName());
verify(req);
}
/**
* Ensures that {@link AppEngineJobContext#getControllerQueue()} uses the
* controller queue specified in the job's configuration.
*/
public void testGetQueueConfigurationController() {
HttpServletRequest req = createMock(HttpServletRequest.class);
expect(req.getHeader("X-AppEngine-QueueName")).andReturn("bar");
expect(req.getParameter(AppEngineJobContext.JOB_ID_PARAMETER_NAME))
.andReturn(jobId.toString())
.anyTimes();
replay(req);
Configuration conf = new Configuration(false);
// Configuration value should take precedence
conf.set(AppEngineJobContext.CONTROLLER_QUEUE_KEY, "baz");
persistMRState(jobId, conf);
AppEngineJobContext context = new AppEngineJobContext(req, false);
assertEquals("baz", context.getControllerQueue().getQueueName());
verify(req);
}
}
| apache-2.0 |
tuwiendsg/MELA | MELA-Extensions/MELA-SpaceAndPathwayAnalysisService/src/test/java/performance/PerformanceEvalOffline.java | 6881 | /**
* Copyright 2013 Technische Universitaet Wien (TUW), Distributed Systems Group
* E184
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package performance;
/**
*
* @Author Daniel Moldovan
* @E-mail: d.moldovan@dsg.tuwien.ac.at
*
*/
public class PerformanceEvalOffline {
// public static void main(String[] args) throws JAXBException, FileNotFoundException, IOException {
// AbstractDataAccess dataAccess;
// InstantMonitoringDataAnalysisEngine instantMonitoringDataAnalysisEngine;
// ElasticitySpaceFunction elasticitySpaceFunction;
// AggregatedMonitoringDataSQLAccess aggregatedMonitoringDataSQLAccess;
//
// dataAccess = DataAccessWithAutoStructureDetection.createInstance();
//
// instantMonitoringDataAnalysisEngine = new InstantMonitoringDataAnalysisEngine();
//
// aggregatedMonitoringDataSQLAccess = new AggregatedMonitoringDataSQLAccess("mela", "mela");
//
//
// //read service configuration, metrics composition and requiremets from XML
//
// MonitoredElement serviceStructure = null;
// CompositionRulesConfiguration compositionRulesConfiguration = null;
// Requirements requirements = null;
//
// //read service structure
// {
// InputStream is = new FileInputStream("/home/daniel-tuwien/Documents/CELAR_GIT/multilevel-metrics-evaluation/MELA-Core/MELA-AnalysisService/src/test/java/testFiles/serviceDescription.xml");
// JAXBContext jc = JAXBContext.newInstance(MonitoredElement.class);
// Unmarshaller u = jc.createUnmarshaller();
//
// serviceStructure = (MonitoredElement) u.unmarshal(is);
// }
//
//
//
// //read CompositionRulesConfiguration
// {
// InputStream is = new FileInputStream("/home/daniel-tuwien/Documents/CELAR_GIT/multilevel-metrics-evaluation/MELA-Core/MELA-AnalysisService/src/test/java/testFiles/compositionRules.xml");
// JAXBContext jc = JAXBContext.newInstance(CompositionRulesConfiguration.class);
// Unmarshaller u = jc.createUnmarshaller();
//
// compositionRulesConfiguration = (CompositionRulesConfiguration) u.unmarshal(is);
// }
//
//
// //read Requirements
// {
// InputStream is = new FileInputStream("/home/daniel-tuwien/Documents/CELAR_GIT/multilevel-metrics-evaluation/MELA-Core/MELA-AnalysisService/src/test/java/testFiles/requirements.xml");
// JAXBContext jc = JAXBContext.newInstance(Requirements.class);
// Unmarshaller u = jc.createUnmarshaller();
//
// requirements = (Requirements) u.unmarshal(is);
// }
//
//
//
// String monSnapshotsCountKey = "MonitoringSnapshotsUsed";
// String monitoringSnapshotKey = "MonitoringSnapshotContruction";
// String elasticitySpaceAnalysysKey = "ElasticitySpaceAnalysis";
// String elasticityPathwayAnalysysKey = "ElasticityPathwayAnalysis";
//
// int maxMonSnapshots = 1810;
//
// //report without sql access
// {
//
//
//
// PerformanceReport performanceReport = new PerformanceReport(new String[]{monSnapshotsCountKey, monitoringSnapshotKey, elasticitySpaceAnalysysKey, elasticityPathwayAnalysysKey});
//
// //do the test
//
// for (int i = 0; i < maxMonSnapshots; i++) {
//
//// Runtime.getRuntime().gc();
//
//
// elasticitySpaceFunction = new ElSpaceDefaultFunction(serviceStructure);
// elasticitySpaceFunction.setRequirements(requirements);
//
//
//// if (i % 100 == 0) {
// System.out.println("Evaluating WITHOUT SQL on snapshot " + i);
//// }
//
// ServiceMonitoringSnapshot monitoringData = dataAccess.getMonitoredData(serviceStructure);
//
//
//
// if (monitoringData == null) {
// break;
// }
//
// //profile aggregation
// Date beforeAggregation = new Date();
// ServiceMonitoringSnapshot aggregated = aggregatedMonitoringDataSQLAccess.extractLatestMonitoringData();
// Date afterAggregation = new Date();
// performanceReport.addReportEntry(monitoringSnapshotKey, "" + (afterAggregation.getTime() - beforeAggregation.getTime()));
//
// aggregatedMonitoringDataSQLAccess.writeMonitoringData(aggregated);
//
// LightweightEncounterRateElasticityPathway elasticityPathway = null;
//
//// ElasticitySpace tempSpace = new ElasticitySpace(serviceStructure);
// List<Metric> metrics = null;
//
// performanceReport.addReportEntry(monSnapshotsCountKey, "" + i);
//
// List<ServiceMonitoringSnapshot> extractedData = aggregatedMonitoringDataSQLAccess.extractMonitoringData(0, i);
//
//
// //profile trianing el space
// Date beforeSpace = new Date();
//
// elasticitySpaceFunction.trainElasticitySpace(extractedData);
//
// Date afterSpace = new Date();
// performanceReport.addReportEntry(elasticitySpaceAnalysysKey, "" + (afterSpace.getTime() - beforeSpace.getTime()));
//
// Map<Metric, List<MetricValue>> map = elasticitySpaceFunction.getElasticitySpace().getMonitoredDataForService(serviceStructure);
//
// Date beforePathway = new Date();
// if (map != null && metrics == null) {
// metrics = new ArrayList<Metric>(map.keySet());
// //we need to know the number of weights to add in instantiation
// elasticityPathway = new LightweightEncounterRateElasticityPathway(metrics.size());
// }
//
// elasticityPathway.trainElasticityPathway(map);
// Date afterPathway = new Date();
// performanceReport.addReportEntry(elasticityPathwayAnalysysKey, "" + (afterPathway.getTime() - beforePathway.getTime()));
//
// }
//
// performanceReport.writeToCSVFile("/home/daniel-tuwien/Documents/CELAR_GIT/multilevel-metrics-evaluation/MELA-Core/MELA-AnalysisService/perfTestResults/evalOfflineWithoutSQLAccess.csv");
// }
//
//
//
// }
}
| apache-2.0 |
Gaduo/hapi-fhir | hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/model/codesystems/ConceptPropertyTypeEnumFactory.java | 3185 | package org.hl7.fhir.dstu3.model.codesystems;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Tue, Dec 6, 2016 09:42-0500 for FHIR v1.8.0
import org.hl7.fhir.dstu3.model.EnumFactory;
public class ConceptPropertyTypeEnumFactory implements EnumFactory<ConceptPropertyType> {
public ConceptPropertyType fromCode(String codeString) throws IllegalArgumentException {
if (codeString == null || "".equals(codeString))
return null;
if ("code".equals(codeString))
return ConceptPropertyType.CODE;
if ("Coding".equals(codeString))
return ConceptPropertyType.CODING;
if ("string".equals(codeString))
return ConceptPropertyType.STRING;
if ("integer".equals(codeString))
return ConceptPropertyType.INTEGER;
if ("boolean".equals(codeString))
return ConceptPropertyType.BOOLEAN;
if ("dateTime".equals(codeString))
return ConceptPropertyType.DATETIME;
throw new IllegalArgumentException("Unknown ConceptPropertyType code '"+codeString+"'");
}
public String toCode(ConceptPropertyType code) {
if (code == ConceptPropertyType.CODE)
return "code";
if (code == ConceptPropertyType.CODING)
return "Coding";
if (code == ConceptPropertyType.STRING)
return "string";
if (code == ConceptPropertyType.INTEGER)
return "integer";
if (code == ConceptPropertyType.BOOLEAN)
return "boolean";
if (code == ConceptPropertyType.DATETIME)
return "dateTime";
return "?";
}
public String toSystem(ConceptPropertyType code) {
return code.getSystem();
}
}
| apache-2.0 |
placrosse/ACaZoo | src/java/org/apache/cassandra/db/compaction/CompactionManager.java | 44285 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.util.*;
import java.util.concurrent.*;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import com.google.common.base.Throwables;
import com.google.common.collect.*;
import com.google.common.util.concurrent.RateLimiter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.cache.AutoSavingCache;
import org.apache.cassandra.concurrent.DebuggableThreadPoolExecutor;
import org.apache.cassandra.concurrent.NamedThreadFactory;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.compaction.CompactionInfo.Holder;
import org.apache.cassandra.db.index.SecondaryIndexBuilder;
import org.apache.cassandra.dht.Bounds;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.io.sstable.*;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.metrics.CompactionMetrics;
import org.apache.cassandra.repair.Validator;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.utils.*;
/**
* A singleton which manages a private executor of ongoing compactions.
* <p/>
* Scheduling for compaction is accomplished by swapping sstables to be compacted into
* a set via DataTracker. New scheduling attempts will ignore currently compacting
* sstables.
*/
public class CompactionManager implements CompactionManagerMBean
{
public static final String MBEAN_OBJECT_NAME = "org.apache.cassandra.db:type=CompactionManager";
private static final Logger logger = LoggerFactory.getLogger(CompactionManager.class);
public static final CompactionManager instance;
public static final int NO_GC = Integer.MIN_VALUE;
public static final int GC_ALL = Integer.MAX_VALUE;
// A thread local that tells us if the current thread is owned by the compaction manager. Used
// by CounterContext to figure out if it should log a warning for invalid counter shards.
public static final ThreadLocal<Boolean> isCompactionManager = new ThreadLocal<Boolean>()
{
@Override
protected Boolean initialValue()
{
return false;
}
};
static
{
instance = new CompactionManager();
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try
{
mbs.registerMBean(instance, new ObjectName(MBEAN_OBJECT_NAME));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
private final CompactionExecutor executor = new CompactionExecutor();
private final CompactionExecutor validationExecutor = new ValidationExecutor();
private final static CompactionExecutor cacheCleanupExecutor = new CacheCleanupExecutor();
private final CompactionMetrics metrics = new CompactionMetrics(executor, validationExecutor);
private final Multiset<ColumnFamilyStore> compactingCF = ConcurrentHashMultiset.create();
private final RateLimiter compactionRateLimiter = RateLimiter.create(Double.MAX_VALUE);
/**
* Gets compaction rate limiter. When compaction_throughput_mb_per_sec is 0 or node is bootstrapping,
* this returns rate limiter with the rate of Double.MAX_VALUE bytes per second.
* Rate unit is bytes per sec.
*
* @return RateLimiter with rate limit set
*/
public RateLimiter getRateLimiter()
{
double currentThroughput = DatabaseDescriptor.getCompactionThroughputMbPerSec() * 1024 * 1024;
// if throughput is set to 0, throttling is disabled
if (currentThroughput == 0 || StorageService.instance.isBootstrapMode())
currentThroughput = Double.MAX_VALUE;
if (compactionRateLimiter.getRate() != currentThroughput)
compactionRateLimiter.setRate(currentThroughput);
return compactionRateLimiter;
}
/**
* Call this whenever a compaction might be needed on the given columnfamily.
* It's okay to over-call (within reason) if a call is unnecessary, it will
* turn into a no-op in the bucketing/candidate-scan phase.
*/
/*
* pgaref just thoughs, add code here to check if Leader -> Triger new Elections!!!
*
*/
public List<Future<?>> submitBackground(final ColumnFamilyStore cfs)
{
if (cfs.isAutoCompactionDisabled())
{
logger.debug("Autocompaction is disabled");
return Collections.emptyList();
}
int count = compactingCF.count(cfs);
if (count > 0 && executor.getActiveCount() >= executor.getMaximumPoolSize())
{
logger.debug("Background compaction is still running for {}.{} ({} remaining). Skipping",
cfs.keyspace.getName(), cfs.name, count);
return Collections.emptyList();
}
logger.debug("Scheduling a background task check for {}.{} with {}",
cfs.keyspace.getName(),
cfs.name,
cfs.getCompactionStrategy().getClass().getSimpleName());
List<Future<?>> futures = new ArrayList<Future<?>>();
// we must schedule it at least once, otherwise compaction will stop for a CF until next flush
do {
futures.add(executor.submit(new BackgroundCompactionTask(cfs)));
compactingCF.add(cfs);
// if we have room for more compactions, then fill up executor
} while (executor.getActiveCount() + futures.size() < executor.getMaximumPoolSize());
return futures;
}
public boolean isCompacting(Iterable<ColumnFamilyStore> cfses)
{
for (ColumnFamilyStore cfs : cfses)
if (!cfs.getDataTracker().getCompacting().isEmpty())
return true;
return false;
}
// the actual sstables to compact are not determined until we run the BCT; that way, if new sstables
// are created between task submission and execution, we execute against the most up-to-date information
class BackgroundCompactionTask implements Runnable
{
private final ColumnFamilyStore cfs;
BackgroundCompactionTask(ColumnFamilyStore cfs)
{
this.cfs = cfs;
}
public void run()
{
try
{
logger.debug("Checking {}.{}", cfs.keyspace.getName(), cfs.name);
if (!cfs.isValid())
{
logger.debug("Aborting compaction for dropped CF");
return;
}
AbstractCompactionStrategy strategy = cfs.getCompactionStrategy();
AbstractCompactionTask task = strategy.getNextBackgroundTask(getDefaultGcBefore(cfs));
if (task == null)
{
logger.debug("No tasks available");
return;
}
task.execute(metrics);
}
finally
{
compactingCF.remove(cfs);
}
submitBackground(cfs);
}
}
private static interface AllSSTablesOperation
{
public void perform(ColumnFamilyStore store, Iterable<SSTableReader> sstables) throws IOException;
}
private void performAllSSTableOperation(final ColumnFamilyStore cfs, final AllSSTablesOperation operation) throws InterruptedException, ExecutionException
{
final Iterable<SSTableReader> sstables = cfs.markAllCompacting();
if (sstables == null)
return;
Callable<Object> runnable = new Callable<Object>()
{
public Object call() throws IOException
{
operation.perform(cfs, sstables);
cfs.getDataTracker().unmarkCompacting(sstables);
return this;
}
};
executor.submit(runnable).get();
}
public void performScrub(ColumnFamilyStore cfStore) throws InterruptedException, ExecutionException
{
performAllSSTableOperation(cfStore, new AllSSTablesOperation()
{
public void perform(ColumnFamilyStore store, Iterable<SSTableReader> sstables) throws IOException
{
doScrub(store, sstables);
}
});
}
public void performSSTableRewrite(ColumnFamilyStore cfStore, final boolean excludeCurrentVersion) throws InterruptedException, ExecutionException
{
performAllSSTableOperation(cfStore, new AllSSTablesOperation()
{
public void perform(ColumnFamilyStore cfs, Iterable<SSTableReader> sstables)
{
for (final SSTableReader sstable : sstables)
{
if (excludeCurrentVersion && sstable.descriptor.version.equals(Descriptor.Version.CURRENT))
continue;
// SSTables are marked by the caller
// NOTE: it is important that the task create one and only one sstable, even for Leveled compaction (see LeveledManifest.replace())
CompactionTask task = new CompactionTask(cfs, Collections.singletonList(sstable), NO_GC);
task.setUserDefined(true);
task.setCompactionType(OperationType.UPGRADE_SSTABLES);
task.execute(metrics);
}
}
});
}
public void performCleanup(ColumnFamilyStore cfStore, final CounterId.OneShotRenewer renewer) throws InterruptedException, ExecutionException
{
performAllSSTableOperation(cfStore, new AllSSTablesOperation()
{
public void perform(ColumnFamilyStore store, Iterable<SSTableReader> sstables) throws IOException
{
// Sort the column families in order of SSTable size, so cleanup of smaller CFs
// can free up space for larger ones
List<SSTableReader> sortedSSTables = Lists.newArrayList(sstables);
Collections.sort(sortedSSTables, new SSTableReader.SizeComparator());
doCleanupCompaction(store, sortedSSTables, renewer);
}
});
}
public void performMaximal(final ColumnFamilyStore cfStore) throws InterruptedException, ExecutionException
{
submitMaximal(cfStore, getDefaultGcBefore(cfStore)).get();
}
public Future<?> submitMaximal(final ColumnFamilyStore cfStore, final int gcBefore)
{
// here we compute the task off the compaction executor, so having that present doesn't
// confuse runWithCompactionsDisabled -- i.e., we don't want to deadlock ourselves, waiting
// for ourselves to finish/acknowledge cancellation before continuing.
final AbstractCompactionTask task = cfStore.getCompactionStrategy().getMaximalTask(gcBefore);
Runnable runnable = new WrappedRunnable()
{
protected void runMayThrow() throws IOException
{
if (task == null)
return;
task.execute(metrics);
}
};
return executor.submit(runnable);
}
public void forceUserDefinedCompaction(String dataFiles)
{
String[] filenames = dataFiles.split(",");
Multimap<Pair<String, String>, Descriptor> descriptors = ArrayListMultimap.create();
for (String filename : filenames)
{
// extract keyspace and columnfamily name from filename
Descriptor desc = Descriptor.fromFilename(filename.trim());
if (Schema.instance.getCFMetaData(desc) == null)
{
logger.warn("Schema does not exist for file {}. Skipping.", filename);
continue;
}
File directory = new File(desc.ksname + File.separator + desc.cfname);
// group by keyspace/columnfamily
Pair<Descriptor, String> p = Descriptor.fromFilename(directory, filename.trim());
Pair<String, String> key = Pair.create(p.left.ksname, p.left.cfname);
descriptors.put(key, p.left);
}
List<Future<?>> futures = new ArrayList<>();
for (Pair<String, String> key : descriptors.keySet())
{
ColumnFamilyStore cfs = Keyspace.open(key.left).getColumnFamilyStore(key.right);
futures.add(submitUserDefined(cfs, descriptors.get(key), getDefaultGcBefore(cfs)));
}
FBUtilities.waitOnFutures(futures);
}
public Future<?> submitUserDefined(final ColumnFamilyStore cfs, final Collection<Descriptor> dataFiles, final int gcBefore)
{
Runnable runnable = new WrappedRunnable()
{
protected void runMayThrow() throws IOException
{
// look up the sstables now that we're on the compaction executor, so we don't try to re-compact
// something that was already being compacted earlier.
Collection<SSTableReader> sstables = new ArrayList<SSTableReader>(dataFiles.size());
for (Descriptor desc : dataFiles)
{
// inefficient but not in a performance sensitive path
SSTableReader sstable = lookupSSTable(cfs, desc);
if (sstable == null)
{
logger.info("Will not compact {}: it is not an active sstable", desc);
}
else
{
sstables.add(sstable);
}
}
if (sstables.isEmpty())
{
logger.info("No files to compact for user defined compaction");
}
else
{
AbstractCompactionTask task = cfs.getCompactionStrategy().getUserDefinedTask(sstables, gcBefore);
if (task != null)
task.execute(metrics);
}
}
};
return executor.submit(runnable);
}
// This acquire a reference on the sstable
// This is not efficent, do not use in any critical path
private SSTableReader lookupSSTable(final ColumnFamilyStore cfs, Descriptor descriptor)
{
for (SSTableReader sstable : cfs.getSSTables())
{
// .equals() with no other changes won't work because in sstable.descriptor, the directory is an absolute path.
// We could construct descriptor with an absolute path too but I haven't found any satisfying way to do that
// (DB.getDataFileLocationForTable() may not return the right path if you have multiple volumes). Hence the
// endsWith.
if (sstable.descriptor.toString().endsWith(descriptor.toString()))
return sstable;
}
return null;
}
/**
* Does not mutate data, so is not scheduled.
*/
public Future<Object> submitValidation(final ColumnFamilyStore cfStore, final Validator validator)
{
Callable<Object> callable = new Callable<Object>()
{
public Object call() throws IOException
{
try
{
doValidationCompaction(cfStore, validator);
}
catch (Throwable e)
{
// we need to inform the remote end of our failure, otherwise it will hang on repair forever
validator.fail();
throw e;
}
return this;
}
};
return validationExecutor.submit(callable);
}
/* Used in tests. */
public void disableAutoCompaction()
{
for (String ksname : Schema.instance.getNonSystemKeyspaces())
{
for (ColumnFamilyStore cfs : Keyspace.open(ksname).getColumnFamilyStores())
cfs.disableAutoCompaction();
}
}
/**
* Deserialize everything in the CFS and re-serialize w/ the newest version. Also attempts to recover
* from bogus row keys / sizes using data from the index, and skips rows with garbage columns that resulted
* from early ByteBuffer bugs.
*
* @throws IOException
*/
private void doScrub(ColumnFamilyStore cfs, Iterable<SSTableReader> sstables) throws IOException
{
assert !cfs.isIndex();
for (final SSTableReader sstable : sstables)
scrubOne(cfs, sstable);
}
private void scrubOne(ColumnFamilyStore cfs, SSTableReader sstable) throws IOException
{
Scrubber scrubber = new Scrubber(cfs, sstable);
CompactionInfo.Holder scrubInfo = scrubber.getScrubInfo();
metrics.beginCompaction(scrubInfo);
try
{
scrubber.scrub();
}
finally
{
scrubber.close();
metrics.finishCompaction(scrubInfo);
}
if (scrubber.getNewInOrderSSTable() != null)
cfs.addSSTable(scrubber.getNewInOrderSSTable());
if (scrubber.getNewSSTable() == null)
cfs.markObsolete(Collections.singletonList(sstable), OperationType.SCRUB);
else
cfs.replaceCompactedSSTables(Collections.singletonList(sstable), Collections.singletonList(scrubber.getNewSSTable()), OperationType.SCRUB);
}
/**
* Determines if a cleanup would actually remove any data in this SSTable based
* on a set of owned ranges.
*/
static boolean needsCleanup(SSTableReader sstable, Collection<Range<Token>> ownedRanges)
{
assert !ownedRanges.isEmpty(); // cleanup checks for this
// unwrap and sort the ranges by LHS token
List<Range<Token>> sortedRanges = Range.normalize(ownedRanges);
// see if there are any keys LTE the token for the start of the first range
// (token range ownership is exclusive on the LHS.)
Range<Token> firstRange = sortedRanges.get(0);
if (sstable.first.token.compareTo(firstRange.left) <= 0)
return true;
// then, iterate over all owned ranges and see if the next key beyond the end of the owned
// range falls before the start of the next range
for (int i = 0; i < sortedRanges.size(); i++)
{
Range<Token> range = sortedRanges.get(i);
if (range.right.isMinimum())
{
// we split a wrapping range and this is the second half.
// there can't be any keys beyond this (and this is the last range)
return false;
}
DecoratedKey firstBeyondRange = sstable.firstKeyBeyond(range.right.maxKeyBound());
if (firstBeyondRange == null)
{
// we ran off the end of the sstable looking for the next key; we don't need to check any more ranges
return false;
}
if (i == (ownedRanges.size() - 1))
{
// we're at the last range and we found a key beyond the end of the range
return true;
}
Range<Token> nextRange = sortedRanges.get(i + 1);
if (!nextRange.contains(firstBeyondRange.token))
{
// we found a key in between the owned ranges
return true;
}
}
return false;
}
/**
* This function goes over each file and removes the keys that the node is not responsible for
* and only keeps keys that this node is responsible for.
*
* @throws IOException
*/
private void doCleanupCompaction(final ColumnFamilyStore cfs, Collection<SSTableReader> sstables, CounterId.OneShotRenewer renewer) throws IOException
{
assert !cfs.isIndex();
Keyspace keyspace = cfs.keyspace;
Collection<Range<Token>> ranges = StorageService.instance.getLocalRanges(keyspace.getName());
if (ranges.isEmpty())
{
logger.info("Cleanup cannot run before a node has joined the ring");
return;
}
boolean hasIndexes = cfs.indexManager.hasIndexes();
CleanupStrategy cleanupStrategy = CleanupStrategy.get(cfs, ranges, renewer);
for (SSTableReader sstable : sstables)
{
if (!hasIndexes && !new Bounds<Token>(sstable.first.token, sstable.last.token).intersects(ranges))
{
cfs.replaceCompactedSSTables(Arrays.asList(sstable), Collections.<SSTableReader>emptyList(), OperationType.CLEANUP);
continue;
}
if (!needsCleanup(sstable, ranges))
{
logger.debug("Skipping {} for cleanup; all rows should be kept", sstable);
continue;
}
CompactionController controller = new CompactionController(cfs, Collections.singleton(sstable), getDefaultGcBefore(cfs));
long start = System.nanoTime();
long totalkeysWritten = 0;
int expectedBloomFilterSize = Math.max(cfs.metadata.getIndexInterval(),
(int) (SSTableReader.getApproximateKeyCount(Arrays.asList(sstable), cfs.metadata)));
if (logger.isDebugEnabled())
logger.debug("Expected bloom filter size : {}", expectedBloomFilterSize);
logger.info("Cleaning up {}", sstable);
// Calculate the expected compacted filesize
long expectedRangeFileSize = cfs.getExpectedCompactedFileSize(Arrays.asList(sstable), OperationType.CLEANUP);
File compactionFileLocation = cfs.directories.getDirectoryForNewSSTables();
if (compactionFileLocation == null)
throw new IOException("disk full");
ICompactionScanner scanner = cleanupStrategy.getScanner(sstable, getRateLimiter());
CleanupInfo ci = new CleanupInfo(sstable, (SSTableScanner)scanner);
metrics.beginCompaction(ci);
SSTableWriter writer = createWriter(cfs,
compactionFileLocation,
expectedBloomFilterSize,
sstable);
SSTableReader newSstable = null;
try
{
while (scanner.hasNext())
{
if (ci.isStopRequested())
throw new CompactionInterruptedException(ci.getCompactionInfo());
SSTableIdentityIterator row = (SSTableIdentityIterator) scanner.next();
row = cleanupStrategy.cleanup(row);
if (row == null)
continue;
AbstractCompactedRow compactedRow = controller.getCompactedRow(row);
if (writer.append(compactedRow) != null)
totalkeysWritten++;
}
if (totalkeysWritten > 0)
newSstable = writer.closeAndOpenReader(sstable.maxDataAge);
else
writer.abort();
}
catch (Throwable e)
{
writer.abort();
throw Throwables.propagate(e);
}
finally
{
controller.close();
scanner.close();
metrics.finishCompaction(ci);
}
List<SSTableReader> results = new ArrayList<SSTableReader>(1);
if (newSstable != null)
{
results.add(newSstable);
String format = "Cleaned up to %s. %,d to %,d (~%d%% of original) bytes for %,d keys. Time: %,dms.";
long dTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
long startsize = sstable.onDiskLength();
long endsize = newSstable.onDiskLength();
double ratio = (double) endsize / (double) startsize;
logger.info(String.format(format, writer.getFilename(), startsize, endsize, (int) (ratio * 100), totalkeysWritten, dTime));
}
// flush to ensure we don't lose the tombstones on a restart, since they are not commitlog'd
cfs.indexManager.flushIndexesBlocking();
cfs.replaceCompactedSSTables(Arrays.asList(sstable), results, OperationType.CLEANUP);
}
}
private static abstract class CleanupStrategy
{
public static CleanupStrategy get(ColumnFamilyStore cfs, Collection<Range<Token>> ranges, CounterId.OneShotRenewer renewer)
{
if (cfs.indexManager.hasIndexes() || cfs.metadata.getDefaultValidator().isCommutative())
return new Full(cfs, ranges, renewer);
return new Bounded(cfs, ranges);
}
public abstract ICompactionScanner getScanner(SSTableReader sstable, RateLimiter limiter);
public abstract SSTableIdentityIterator cleanup(SSTableIdentityIterator row);
private static final class Bounded extends CleanupStrategy
{
private final Collection<Range<Token>> ranges;
public Bounded(final ColumnFamilyStore cfs, Collection<Range<Token>> ranges)
{
this.ranges = ranges;
cacheCleanupExecutor.submit(new Runnable()
{
@Override
public void run()
{
cfs.cleanupCache();
}
});
}
@Override
public ICompactionScanner getScanner(SSTableReader sstable, RateLimiter limiter)
{
return sstable.getScanner(ranges, limiter);
}
@Override
public SSTableIdentityIterator cleanup(SSTableIdentityIterator row)
{
return row;
}
}
private static final class Full extends CleanupStrategy
{
private final Collection<Range<Token>> ranges;
private final ColumnFamilyStore cfs;
private List<Column> indexedColumnsInRow;
private final CounterId.OneShotRenewer renewer;
public Full(ColumnFamilyStore cfs, Collection<Range<Token>> ranges, CounterId.OneShotRenewer renewer)
{
this.cfs = cfs;
this.ranges = ranges;
this.indexedColumnsInRow = null;
this.renewer = renewer;
}
@Override
public ICompactionScanner getScanner(SSTableReader sstable, RateLimiter limiter)
{
return sstable.getScanner(limiter);
}
@Override
public SSTableIdentityIterator cleanup(SSTableIdentityIterator row)
{
if (Range.isInRanges(row.getKey().token, ranges))
return row;
cfs.invalidateCachedRow(row.getKey());
if (indexedColumnsInRow != null)
indexedColumnsInRow.clear();
while (row.hasNext())
{
OnDiskAtom column = row.next();
if (column instanceof CounterColumn)
renewer.maybeRenew((CounterColumn) column);
if (column instanceof Column && cfs.indexManager.indexes((Column) column))
{
if (indexedColumnsInRow == null)
indexedColumnsInRow = new ArrayList<>();
indexedColumnsInRow.add((Column) column);
}
}
if (indexedColumnsInRow != null && !indexedColumnsInRow.isEmpty())
{
// acquire memtable lock here because secondary index deletion may cause a race. See CASSANDRA-3712
Keyspace.switchLock.readLock().lock();
try
{
cfs.indexManager.deleteFromIndexes(row.getKey(), indexedColumnsInRow);
}
finally
{
Keyspace.switchLock.readLock().unlock();
}
}
return null;
}
}
}
public static SSTableWriter createWriter(ColumnFamilyStore cfs,
File compactionFileLocation,
int expectedBloomFilterSize,
SSTableReader sstable)
{
FileUtils.createDirectory(compactionFileLocation);
return new SSTableWriter(cfs.getTempSSTablePath(compactionFileLocation),
expectedBloomFilterSize,
cfs.metadata,
cfs.partitioner,
SSTableMetadata.createCollector(Collections.singleton(sstable), cfs.metadata.comparator, sstable.getSSTableLevel()));
}
/**
* Performs a readonly "compaction" of all sstables in order to validate complete rows,
* but without writing the merge result
*/
private void doValidationCompaction(ColumnFamilyStore cfs, Validator validator) throws IOException
{
// this isn't meant to be race-proof, because it's not -- it won't cause bugs for a CFS to be dropped
// mid-validation, or to attempt to validate a droped CFS. this is just a best effort to avoid useless work,
// particularly in the scenario where a validation is submitted before the drop, and there are compactions
// started prior to the drop keeping some sstables alive. Since validationCompaction can run
// concurrently with other compactions, it would otherwise go ahead and scan those again.
if (!cfs.isValid())
return;
Collection<SSTableReader> sstables;
String snapshotName = validator.desc.sessionId.toString();
int gcBefore;
boolean isSnapshotValidation = cfs.snapshotExists(snapshotName);
if (isSnapshotValidation)
{
// If there is a snapshot created for the session then read from there.
sstables = cfs.getSnapshotSSTableReader(snapshotName);
// Computing gcbefore based on the current time wouldn't be very good because we know each replica will execute
// this at a different time (that's the whole purpose of repair with snaphsot). So instead we take the creation
// time of the snapshot, which should give us roughtly the same time on each replica (roughtly being in that case
// 'as good as in the non-snapshot' case)
gcBefore = cfs.gcBefore(cfs.getSnapshotCreationTime(snapshotName));
}
else
{
// flush first so everyone is validating data that is as similar as possible
StorageService.instance.forceKeyspaceFlush(cfs.keyspace.getName(), cfs.name);
// we don't mark validating sstables as compacting in DataTracker, so we have to mark them referenced
// instead so they won't be cleaned up if they do get compacted during the validation
sstables = cfs.markCurrentSSTablesReferenced();
if (validator.gcBefore > 0)
gcBefore = validator.gcBefore;
else
gcBefore = getDefaultGcBefore(cfs);
}
CompactionIterable ci = new ValidationCompactionIterable(cfs, sstables, validator.desc.range, gcBefore);
CloseableIterator<AbstractCompactedRow> iter = ci.iterator();
metrics.beginCompaction(ci);
try
{
// validate the CF as we iterate over it
validator.prepare(cfs);
while (iter.hasNext())
{
if (ci.isStopRequested())
throw new CompactionInterruptedException(ci.getCompactionInfo());
AbstractCompactedRow row = iter.next();
validator.add(row);
}
validator.complete();
}
finally
{
iter.close();
if (isSnapshotValidation)
{
for (SSTableReader sstable : sstables)
FileUtils.closeQuietly(sstable);
cfs.clearSnapshot(snapshotName);
}
else
{
SSTableReader.releaseReferences(sstables);
}
metrics.finishCompaction(ci);
}
}
/**
* Is not scheduled, because it is performing disjoint work from sstable compaction.
*/
public Future<?> submitIndexBuild(final SecondaryIndexBuilder builder)
{
Runnable runnable = new Runnable()
{
public void run()
{
metrics.beginCompaction(builder);
try
{
builder.build();
}
finally
{
metrics.finishCompaction(builder);
}
}
};
return executor.submit(runnable);
}
public Future<?> submitCacheWrite(final AutoSavingCache.Writer writer)
{
Runnable runnable = new Runnable()
{
public void run()
{
if (!AutoSavingCache.flushInProgress.add(writer.cacheType()))
{
logger.debug("Cache flushing was already in progress: skipping {}", writer.getCompactionInfo());
return;
}
try
{
metrics.beginCompaction(writer);
try
{
writer.saveCache();
}
finally
{
metrics.finishCompaction(writer);
}
}
finally
{
AutoSavingCache.flushInProgress.remove(writer.cacheType());
}
}
};
return executor.submit(runnable);
}
static int getDefaultGcBefore(ColumnFamilyStore cfs)
{
// 2ndary indexes have ExpiringColumns too, so we need to purge tombstones deleted before now. We do not need to
// add any GcGrace however since 2ndary indexes are local to a node.
return cfs.isIndex() ? (int) (System.currentTimeMillis() / 1000) : cfs.gcBefore(System.currentTimeMillis());
}
private static class ValidationCompactionIterable extends CompactionIterable
{
public ValidationCompactionIterable(ColumnFamilyStore cfs, Collection<SSTableReader> sstables, Range<Token> range, int gcBefore)
{
super(OperationType.VALIDATION,
cfs.getCompactionStrategy().getScanners(sstables, range),
new ValidationCompactionController(cfs, gcBefore));
}
}
/*
* Controller for validation compaction that always purges.
* Note that we should not call cfs.getOverlappingSSTables on the provided
* sstables because those sstables are not guaranteed to be active sstables
* (since we can run repair on a snapshot).
*/
private static class ValidationCompactionController extends CompactionController
{
public ValidationCompactionController(ColumnFamilyStore cfs, int gcBefore)
{
super(cfs, gcBefore);
}
@Override
public boolean shouldPurge(DecoratedKey key, long delTimestamp)
{
/*
* The main reason we always purge is that including gcable tombstone would mean that the
* repair digest will depends on the scheduling of compaction on the different nodes. This
* is still not perfect because gcbefore is currently dependend on the current time at which
* the validation compaction start, which while not too bad for normal repair is broken for
* repair on snapshots. A better solution would be to agree on a gcbefore that all node would
* use, and we'll do that with CASSANDRA-4932.
* Note validation compaction includes all sstables, so we don't have the problem of purging
* a tombstone that could shadow a column in another sstable, but this is doubly not a concern
* since validation compaction is read-only.
*/
return true;
}
}
public int getActiveCompactions()
{
return CompactionMetrics.getCompactions().size();
}
private static class CompactionExecutor extends ThreadPoolExecutor
{
protected CompactionExecutor(int minThreads, int maxThreads, String name, BlockingQueue<Runnable> queue)
{
super(minThreads, maxThreads, 60, TimeUnit.SECONDS, queue, new NamedThreadFactory(name, Thread.MIN_PRIORITY));
allowCoreThreadTimeOut(true);
}
private CompactionExecutor(int threadCount, String name)
{
this(threadCount, threadCount, name, new LinkedBlockingQueue<Runnable>());
}
public CompactionExecutor()
{
this(Math.max(1, DatabaseDescriptor.getConcurrentCompactors()), "CompactionExecutor");
}
protected void beforeExecute(Thread t, Runnable r)
{
// can't set this in Thread factory, so we do it redundantly here
isCompactionManager.set(true);
super.beforeExecute(t, r);
}
// modified from DebuggableThreadPoolExecutor so that CompactionInterruptedExceptions are not logged
@Override
public void afterExecute(Runnable r, Throwable t)
{
super.afterExecute(r, t);
if (t == null)
t = DebuggableThreadPoolExecutor.extractThrowable(r);
if (t != null)
{
if (t instanceof CompactionInterruptedException)
{
logger.info(t.getMessage());
logger.debug("Full interruption stack trace:", t);
}
else
{
DebuggableThreadPoolExecutor.handleOrLog(t);
}
}
}
}
private static class ValidationExecutor extends CompactionExecutor
{
public ValidationExecutor()
{
super(1, Integer.MAX_VALUE, "ValidationExecutor", new SynchronousQueue<Runnable>());
}
}
private static class CacheCleanupExecutor extends CompactionExecutor
{
public CacheCleanupExecutor()
{
super(1, "CacheCleanupExecutor");
}
}
public interface CompactionExecutorStatsCollector
{
void beginCompaction(CompactionInfo.Holder ci);
void finishCompaction(CompactionInfo.Holder ci);
}
public List<Map<String, String>> getCompactions()
{
List<Holder> compactionHolders = CompactionMetrics.getCompactions();
List<Map<String, String>> out = new ArrayList<Map<String, String>>(compactionHolders.size());
for (CompactionInfo.Holder ci : compactionHolders)
out.add(ci.getCompactionInfo().asMap());
return out;
}
public List<String> getCompactionSummary()
{
List<Holder> compactionHolders = CompactionMetrics.getCompactions();
List<String> out = new ArrayList<String>(compactionHolders.size());
for (CompactionInfo.Holder ci : compactionHolders)
out.add(ci.getCompactionInfo().toString());
return out;
}
public long getTotalBytesCompacted()
{
return metrics.bytesCompacted.count();
}
public long getTotalCompactionsCompleted()
{
return metrics.totalCompactionsCompleted.count();
}
public int getPendingTasks()
{
return metrics.pendingTasks.value();
}
public long getCompletedTasks()
{
return metrics.completedTasks.value();
}
private static class CleanupInfo extends CompactionInfo.Holder
{
private final SSTableReader sstable;
private final SSTableScanner scanner;
public CleanupInfo(SSTableReader sstable, SSTableScanner scanner)
{
this.sstable = sstable;
this.scanner = scanner;
}
public CompactionInfo getCompactionInfo()
{
try
{
return new CompactionInfo(sstable.metadata,
OperationType.CLEANUP,
scanner.getCurrentPosition(),
scanner.getLengthInBytes());
}
catch (Exception e)
{
throw new RuntimeException();
}
}
}
public void stopCompaction(String type)
{
OperationType operation = OperationType.valueOf(type);
for (Holder holder : CompactionMetrics.getCompactions())
{
if (holder.getCompactionInfo().getTaskType() == operation)
holder.stop();
}
}
public int getCoreCompactorThreads()
{
return executor.getCorePoolSize();
}
public void setCoreCompactorThreads(int number)
{
executor.setCorePoolSize(number);
}
public int getMaximumCompactorThreads()
{
return executor.getMaximumPoolSize();
}
public void setMaximumCompactorThreads(int number)
{
executor.setMaximumPoolSize(number);
}
public int getCoreValidationThreads()
{
return validationExecutor.getCorePoolSize();
}
public void setCoreValidationThreads(int number)
{
validationExecutor.setCorePoolSize(number);
}
public int getMaximumValidatorThreads()
{
return validationExecutor.getMaximumPoolSize();
}
public void setMaximumValidatorThreads(int number)
{
validationExecutor.setMaximumPoolSize(number);
}
/**
* Try to stop all of the compactions for given ColumnFamilies.
*
* Note that this method does not wait for all compactions to finish; you'll need to loop against
* isCompacting if you want that behavior.
*
* @param columnFamilies The ColumnFamilies to try to stop compaction upon.
* @param interruptValidation true if validation operations for repair should also be interrupted
*
*/
public void interruptCompactionFor(Iterable<CFMetaData> columnFamilies, boolean interruptValidation)
{
assert columnFamilies != null;
// interrupt in-progress compactions
for (Holder compactionHolder : CompactionMetrics.getCompactions())
{
CompactionInfo info = compactionHolder.getCompactionInfo();
if ((info.getTaskType() == OperationType.VALIDATION) && !interruptValidation)
continue;
if (Iterables.contains(columnFamilies, info.getCFMetaData()))
compactionHolder.stop(); // signal compaction to stop
}
}
}
| apache-2.0 |
quattor/pan | panc/src/main/java/org/quattor/pan/statement/ComputedLoadpathStatement.java | 1720 | /*
Copyright (c) 2006 Charles A. Loomis, Jr, Cedric Duprilot, and
Centre National de la Recherche Scientifique (CNRS).
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
$HeadURL: https://svn.lal.in2p3.fr/LCG/QWG/panc/trunk/src/org/quattor/pan/statement/VariableStatement.java $
$Id: VariableStatement.java 2728 2008-01-17 20:44:12Z loomis $
*/
package org.quattor.pan.statement;
import java.util.List;
import org.quattor.pan.dml.Operation;
import org.quattor.pan.dml.data.Element;
import org.quattor.pan.exceptions.SyntaxException;
import org.quattor.pan.ttemplate.Context;
import org.quattor.pan.ttemplate.SourceRange;
public class ComputedLoadpathStatement extends ComputedVariableStatement {
protected ComputedLoadpathStatement(SourceRange sourceRange, Operation dml,
boolean conditional, boolean modifiable) throws SyntaxException {
super(sourceRange, "LOADPATH", dml, conditional, modifiable);
}
@Override
public Element execute(Context context) {
super.execute(context);
// Reset the value of the LOADPATH in the context.
Element element = context.getGlobalVariable("LOADPATH");
List<String> rpaths = convertLoadpathVariable(element);
context.setRelativeLoadpaths(rpaths);
return null;
}
}
| apache-2.0 |
alibaba/fastjson | src/test/java/com/alibaba/json/bvt/parser/JSONScannerTest_null.java | 3705 | package com.alibaba.json.bvt.parser;
import org.junit.Assert;
import junit.framework.TestCase;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.parser.JSONScanner;
public class JSONScannerTest_null extends TestCase {
public void test_scan_null_0() throws Exception {
JSONScanner lexer = new JSONScanner("null");
lexer.scanNullOrNew();
}
public void test_scan_null_1() throws Exception {
JSONException error = null;
try {
JSONScanner lexer = new JSONScanner("zull");
lexer.scanNullOrNew();
} catch (JSONException e) {
error = e;
}
Assert.assertNotNull(error);
}
public void test_scan_null_2() throws Exception {
JSONException error = null;
try {
JSONScanner lexer = new JSONScanner("nzll");
lexer.scanNullOrNew();
} catch (JSONException e) {
error = e;
}
Assert.assertNotNull(error);
}
public void test_scan_null_3() throws Exception {
JSONException error = null;
try {
JSONScanner lexer = new JSONScanner("nuzl");
lexer.scanNullOrNew();
} catch (JSONException e) {
error = e;
}
Assert.assertNotNull(error);
}
public void test_scan_null_4() throws Exception {
JSONException error = null;
try {
JSONScanner lexer = new JSONScanner("nulz");
lexer.scanNullOrNew();
} catch (JSONException e) {
error = e;
}
Assert.assertNotNull(error);
}
public void test_scan_null_5() throws Exception {
JSONException error = null;
try {
JSONScanner lexer = new JSONScanner("nulle");
lexer.scanNullOrNew();
} catch (JSONException e) {
error = e;
}
Assert.assertNotNull(error);
}
public void test_scan_null_6() throws Exception {
JSONException error = null;
try {
JSONScanner lexer = new JSONScanner("null\"");
lexer.scanNullOrNew();
} catch (JSONException e) {
error = e;
}
Assert.assertNotNull(error);
}
public void test_scan_null_7() throws Exception {
JSONScanner lexer = new JSONScanner("null a");
lexer.scanNullOrNew();
}
public void test_scan_null_8() throws Exception {
JSONScanner lexer = new JSONScanner("null,");
lexer.scanNullOrNew();
}
public void test_scan_null_9() throws Exception {
JSONScanner lexer = new JSONScanner("null\na");
lexer.scanNullOrNew();
}
public void test_scan_null_10() throws Exception {
JSONScanner lexer = new JSONScanner("null\ra");
lexer.scanNullOrNew();
}
public void test_scan_null_11() throws Exception {
JSONScanner lexer = new JSONScanner("null\ta");
lexer.scanNullOrNew();
}
public void test_scan_null_12() throws Exception {
JSONScanner lexer = new JSONScanner("null\fa");
lexer.scanNullOrNew();
}
public void test_scan_null_13() throws Exception {
JSONScanner lexer = new JSONScanner("null\ba");
lexer.scanNullOrNew();
}
public void test_scan_false_14() throws Exception {
JSONScanner lexer = new JSONScanner("null}");
lexer.scanNullOrNew();
}
public void test_scan_false_15() throws Exception {
JSONScanner lexer = new JSONScanner("null]");
lexer.scanNullOrNew();
}
}
| apache-2.0 |
TommesDee/cpachecker | src/org/sosy_lab/cpachecker/cpa/abm/AbstractABMBasedRefiner.java | 6231 | /*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2013 Dirk Beyer
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.abm;
import static org.sosy_lab.cpachecker.util.AbstractStates.extractLocation;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.sosy_lab.common.Pair;
import org.sosy_lab.common.Timer;
import org.sosy_lab.common.configuration.InvalidConfigurationException;
import org.sosy_lab.cpachecker.cfa.model.CFAEdge;
import org.sosy_lab.cpachecker.core.CounterexampleInfo;
import org.sosy_lab.cpachecker.core.interfaces.ConfigurableProgramAnalysis;
import org.sosy_lab.cpachecker.core.interfaces.Precision;
import org.sosy_lab.cpachecker.core.reachedset.ReachedSet;
import org.sosy_lab.cpachecker.cpa.abm.ABMTransferRelation.BackwardARGState;
import org.sosy_lab.cpachecker.cpa.arg.ARGPath;
import org.sosy_lab.cpachecker.cpa.arg.ARGReachedSet;
import org.sosy_lab.cpachecker.cpa.arg.ARGState;
import org.sosy_lab.cpachecker.cpa.arg.AbstractARGBasedRefiner;
import org.sosy_lab.cpachecker.exceptions.CPAException;
import org.sosy_lab.cpachecker.exceptions.CPATransferException;
import com.google.common.base.Preconditions;
/**
* This is an extension of {@link AbstractARGBasedRefiner} that takes care of
* flattening the ARG before calling {@link #performRefinement0(ReachedSet)}.
*
* Warning: Although the ARG is flattened at this point, the elements in it have
* not been expanded due to performance reasons.
*/
public abstract class AbstractABMBasedRefiner extends AbstractARGBasedRefiner {
final Timer computePathTimer = new Timer();
final Timer computeSubtreeTimer = new Timer();
final Timer computeCounterexampleTimer = new Timer();
private final ABMTransferRelation transfer;
private final Map<ARGState, ARGState> pathStateToReachedState = new HashMap<>();
protected AbstractABMBasedRefiner(ConfigurableProgramAnalysis pCpa)
throws InvalidConfigurationException {
super(pCpa);
ABMCPA abmCpa = (ABMCPA)pCpa;
transfer = abmCpa.getTransferRelation();
abmCpa.getStatistics().addRefiner(this);
}
/**
* When inheriting from this class, implement this method instead of
* {@link #performRefinement(ReachedSet)}.
*/
protected abstract CounterexampleInfo performRefinement0(ARGReachedSet pReached, ARGPath pPath) throws CPAException, InterruptedException;
@Override
protected final CounterexampleInfo performRefinement(ARGReachedSet pReached, ARGPath pPath) throws CPAException, InterruptedException {
if (pPath == null) {
return CounterexampleInfo.spurious();
} else {
return performRefinement0(new ABMReachedSet(transfer, pReached, pPath, pathStateToReachedState), pPath);
}
}
@Override
protected final ARGPath computePath(ARGState pLastElement, ARGReachedSet pReachedSet) throws InterruptedException, CPATransferException {
assert pLastElement.isTarget();
pathStateToReachedState.clear();
computePathTimer.start();
try {
ARGState subgraph;
computeSubtreeTimer.start();
try {
subgraph = transfer.computeCounterexampleSubgraph(pLastElement, pReachedSet, new BackwardARGState(pLastElement.getWrappedState(), null), pathStateToReachedState);
if (subgraph == null) {
return null;
}
} finally {
computeSubtreeTimer.stop();
}
computeCounterexampleTimer.start();
try {
return computeCounterexample(subgraph);
} finally {
computeCounterexampleTimer.stop();
}
} finally {
computePathTimer.stop();
}
}
private ARGPath computeCounterexample(ARGState root) {
ARGPath path = new ARGPath();
ARGState currentElement = root;
while (currentElement.getChildren().size() > 0) {
ARGState child = currentElement.getChildren().iterator().next();
CFAEdge edge = currentElement.getEdgeToChild(child);
path.add(Pair.of(currentElement, edge));
currentElement = child;
}
path.add(Pair.of(currentElement, extractLocation(currentElement).getLeavingEdge(0)));
return path;
}
private static class ABMReachedSet extends ARGReachedSet.ForwardingARGReachedSet {
private final ABMTransferRelation transfer;
private final ARGPath path;
private final Map<ARGState, ARGState> pathStateToReachedState;
private ABMReachedSet(ABMTransferRelation pTransfer, ARGReachedSet pReached, ARGPath pPath, Map<ARGState, ARGState> pPathElementToReachedState) {
super(pReached);
this.transfer = pTransfer;
this.path = pPath;
this.pathStateToReachedState = pPathElementToReachedState;
}
@Override
public void removeSubtree(ARGState element, Precision newPrecision,
Class<? extends Precision> pPrecisionType) {
ArrayList<Precision> listP = new ArrayList<>();
listP.add(newPrecision);
ArrayList<Class<? extends Precision>> listPT = new ArrayList<>();
listPT.add(pPrecisionType);
removeSubtree(element, listP, listPT);
}
@Override
public void removeSubtree(ARGState element, List<Precision> newPrecisions, List<Class<? extends Precision>> pPrecisionTypes) {
Preconditions.checkArgument(newPrecisions.size()==pPrecisionTypes.size());
transfer.removeSubtree(delegate, path, element, newPrecisions, pPrecisionTypes, pathStateToReachedState);
}
@Override
public void removeSubtree(ARGState pE) {
throw new UnsupportedOperationException();
}
}
} | apache-2.0 |
jgribonvald/demo-spring-security-cas | src/main/java/org/esco/demo/ssc/web/rest/package-info.java | 99 | /**
*
*/
/**
* @author GIP RECIA - Julien Gribonvald
*
*/
package org.esco.demo.ssc.web.rest; | apache-2.0 |
robinxdroid/XDroidAnimation | XDroidAnimation/src/com/xdroid/animation/base/AnimationBase.java | 1441 | package com.xdroid.animation.base;
import com.xdroid.animation.interfaces.CombinableMethod;
import com.xdroid.animation.utils.ViewHelper;
import android.animation.Animator.AnimatorListener;
import android.animation.TimeInterpolator;
import android.view.View;
/**
* 动画基类
*
* @author Robin
* @since 2015-07-22 14:44:30
*
*/
public abstract class AnimationBase<T> implements CombinableMethod<T>{
/** 目标动画view */
protected View targetView;
/** 插值器 */
protected TimeInterpolator interpolator;
/** 动画时间 */
protected long duration;
/** 动画执行回调 */
protected AnimatorListener listener;
@SuppressWarnings("unchecked")
@Override
public T setInterpolator(TimeInterpolator interpolator) {
this.interpolator = interpolator;
return (T) this;
}
@SuppressWarnings("unchecked")
@Override
public T setDuration(long duration) {
this.duration = duration;
return (T) this;
}
@SuppressWarnings("unchecked")
@Override
public T setListener(AnimatorListener listener) {
this.listener = listener;
return (T) this;
}
@SuppressWarnings("unchecked")
@Override
public T setPivotX(int pivotX) {
ViewHelper.setPivotX(targetView, pivotX);
return (T) this;
}
@SuppressWarnings("unchecked")
@Override
public T setPivotY(int pivotY) {
ViewHelper.setPivotY(targetView, pivotY);
return (T) this;
}
@Override
public long getDuration() {
return duration;
}
}
| apache-2.0 |
yona-projects/yona | test/models/NotificationEventTest.java | 6203 | /**
* Yona, 21st Century Project Hosting SW
* <p>
* Copyright Yona & Yobi Authors & NAVER Corp. & NAVER LABS Corp.
* https://yona.io
**/
package models;
import models.enumeration.EventType;
import models.enumeration.ResourceType;
import org.junit.Ignore;
import org.junit.Test;
import play.i18n.Lang;
import java.util.HashSet;
import java.util.Set;
import static org.fest.assertions.Assertions.assertThat;
public class NotificationEventTest extends ModelTest<NotificationEvent> {
private NotificationEvent getNotificationEvent(ResourceType resourceType) {
NotificationEvent event = new NotificationEvent();
event.resourceType = resourceType;
event.resourceId = "1";
return event;
}
@Ignore("Test is ignored as old test with repository dependency")
@Test
public void add() {
// Given
NotificationEvent event = getNotificationEvent(ResourceType.ISSUE_POST);
// When
NotificationEvent.add(event);
// Then
assertThat(NotificationMail.find.byId(event.notificationMail.id)).isNotNull();
}
@Ignore("Test is ignored as old test with repository dependency")
@Test
public void addTwoTimes() {
// Given
NotificationEvent event = getNotificationEvent(ResourceType.ISSUE_POST);
NotificationEvent.add(event);
int numOfMails = NotificationMail.find.all().size();
// When
NotificationEvent.add(event);
// Then
assertThat(NotificationEvent.find.all().size()).isEqualTo(numOfMails);
}
@Ignore("Test is ignored as old test with repository dependency")
@Test
public void delete() {
// Given
NotificationEvent event = getNotificationEvent(ResourceType.ISSUE_POST);
NotificationEvent.add(event);
// When
event.delete();
// Then
assertThat(NotificationMail.find.byId(event.notificationMail.id)).isNull();
}
@Ignore("Test is ignored as old test with repository dependency")
@Test
public void add_with_filter() {
// Given
Issue issue = Issue.finder.byId(1L);
Project project = issue.project;
User watching_project_off = getTestUser(2L);
Watch.watch(watching_project_off, project.asResource());
UserProjectNotification.unwatchExplictly(watching_project_off, project, EventType.ISSUE_ASSIGNEE_CHANGED);
User off = getTestUser(3L);
UserProjectNotification.unwatchExplictly(off, project, EventType.ISSUE_ASSIGNEE_CHANGED);
NotificationEvent event = getNotificationEvent(ResourceType.ISSUE_POST);
event.eventType = EventType.ISSUE_ASSIGNEE_CHANGED;
event.receivers.add(watching_project_off);
event.receivers.add(off);
// When
NotificationEvent.add(event);
// Then
assertThat(event.receivers).containsOnly(off);
}
@Ignore("Test is ignored as old test with repository dependency")
@Test
public void getNewMentionedUsers1() {
// Given
String loginId = "doortts";
String oldBody = "I'm @yobi";
String newBody = "I'm @" + loginId;
// When
Set<User> newMentionedUsers = NotificationEvent.getNewMentionedUsers(oldBody, newBody);
// Then
User newMentionedUser = User.findByLoginId(loginId);
assertThat(newMentionedUsers.size() == 1).isTrue();
assertThat(newMentionedUsers.contains(newMentionedUser)).isTrue();
}
@Ignore("Test is ignored as old test with repository dependency")
@Test
public void getNewMentionedUsers2() {
// Given
String loginId = "laziel";
String oldBody = "They are @yobi and @doortts";
String newBody = "They are @" + loginId + " and @unknownUserLoginId";
// When
Set<User> newMentionedUsers = NotificationEvent.getNewMentionedUsers(oldBody, newBody);
// Then
User newMentionedUser = User.findByLoginId(loginId);
assertThat(newMentionedUsers.size() == 1).isTrue();
assertThat(newMentionedUsers.contains(newMentionedUser)).isTrue();
}
@Test
public void getMessage_eventTypeIsIssueBodyChangedWithNoParameter_returnString() {
// Given
NotificationEvent notificationEvent = getNotificationEvent(ResourceType.ISSUE_POST);
notificationEvent.eventType = EventType.ISSUE_BODY_CHANGED;
notificationEvent.oldValue = "old value";
notificationEvent.newValue = "new value";
// When
String result = notificationEvent.getMessage();
// Then
assertThat(result.length() > 0).isTrue();
}
@Test
public void getMessage_eventTypeIsIssueBodyChangedWithParameter_returnString() {
// Given
NotificationEvent notificationEvent = getNotificationEvent(ResourceType.ISSUE_POST);
notificationEvent.eventType = EventType.ISSUE_BODY_CHANGED;
notificationEvent.oldValue = "old value";
notificationEvent.newValue = "new value";
// When
String result = notificationEvent.getMessage(Lang.defaultLang());
// Then
assertThat(result.length() > 0).isTrue();
}
@Test
public void getPlainMessage_eventTypeIsIssueBodyChangedWithNoParameter_returnString() {
// Given
NotificationEvent notificationEvent = getNotificationEvent(ResourceType.ISSUE_POST);
notificationEvent.eventType = EventType.ISSUE_BODY_CHANGED;
notificationEvent.oldValue = "old value";
notificationEvent.newValue = "new value";
// When
String result = notificationEvent.getPlainMessage();
// Then
assertThat(result.length() > 0).isTrue();
}
@Test
public void getPlainMessage_eventTypeIsIssueBodyChangedWithParameter_returnString() {
// Given
NotificationEvent notificationEvent = getNotificationEvent(ResourceType.ISSUE_POST);
notificationEvent.eventType = EventType.ISSUE_BODY_CHANGED;
notificationEvent.oldValue = "old value";
notificationEvent.newValue = "new value";
// When
String result = notificationEvent.getPlainMessage(Lang.defaultLang());
// Then
assertThat(result.length() > 0).isTrue();
}
}
| apache-2.0 |
charlescapps/robot-intellij-plugin | gen/com/jivesoftware/robot/intellij/plugin/psi/RobotKeywordsTable.java | 599 | // This is a generated file. Not intended for manual editing.
package com.jivesoftware.robot.intellij.plugin.psi;
import java.util.List;
import org.jetbrains.annotations.*;
import com.intellij.psi.PsiElement;
import com.intellij.navigation.ItemPresentation;
public interface RobotKeywordsTable extends PsiElement {
@NotNull
List<RobotEmptyLine> getEmptyLineList();
@NotNull
RobotKeywordsTableHeading getKeywordsTableHeading();
@NotNull
List<RobotKeywordDefinition> getKeywordDefinitionList();
@Nullable
PsiElement getNameIdentifier();
ItemPresentation getPresentation();
}
| apache-2.0 |
apache/flex-flexunit | FlexUnit4AntTasks/src/org/flexunit/ant/LoggingUtil.java | 1135 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flexunit.ant;
public class LoggingUtil
{
public static boolean VERBOSE = false;
public static void log(String message)
{
log(message, false);
}
public static void log(String message, boolean force)
{
if(VERBOSE || force)
{
System.out.println(message);
}
}
}
| apache-2.0 |
apixandru/intellij-community | plugins/groovy/groovy-psi/src/org/jetbrains/plugins/groovy/lang/psi/typeEnhancers/GrBigDecimalConverter.java | 2210 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.psi.typeEnhancers;
import com.intellij.psi.PsiType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement;
import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.ConversionResult;
import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil;
import org.jetbrains.plugins.groovy.lang.psi.util.GroovyCommonClassNames;
import static com.intellij.psi.util.TypeConversionUtil.isFloatOrDoubleType;
public class GrBigDecimalConverter extends GrTypeConverter {
@Override
public boolean isApplicableTo(@NotNull ApplicableTo position) {
return position != ApplicableTo.METHOD_PARAMETER;
}
@Nullable
@Override
public ConversionResult isConvertibleEx(@NotNull PsiType targetType,
@NotNull PsiType actualType,
@NotNull GroovyPsiElement context,
@NotNull ApplicableTo currentPosition) {
if (TypesUtil.isClassType(actualType, GroovyCommonClassNames.JAVA_MATH_BIG_DECIMAL) && isFloatOrDoubleType(targetType))
return ConversionResult.OK;
if (TypesUtil.isClassType(targetType, GroovyCommonClassNames.JAVA_MATH_BIG_DECIMAL) && TypesUtil.isNumericType(actualType))
return ConversionResult.OK;
if (TypesUtil.isClassType(targetType, GroovyCommonClassNames.JAVA_MATH_BIG_INTEGER) && TypesUtil.isIntegralNumberType(actualType))
return ConversionResult.OK;
return null;
}
}
| apache-2.0 |
paulnguyen/cmpe279 | eclipse/Webgoat/src/org/owasp/webgoat/session/LessonSession.java | 2029 |
package org.owasp.webgoat.session;
/***************************************************************************************************
*
*
* This file is part of WebGoat, an Open Web Application Security Project utility. For details,
* please see http://www.owasp.org/
*
* Copyright (c) 2002 - 2007 Bruce Mayhew
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU General Public License as published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with this program; if
* not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*
* Getting Source ==============
*
* Source for this application is maintained at code.google.com, a repository for free software
* projects.
*
* For details, please see http://code.google.com/p/webgoat/
*
* Represents a virtual session for a lesson. Lesson-specific session data may be stored here.
*
* @author David Anderson <a href="http://www.aspectsecurity.com">Aspect Security</a>
* @created January 19, 2006
*/
public class LessonSession
{
private boolean isAuthenticated = false;
private String currentLessonScreen;
public void setAuthenticated(boolean isAuthenticated)
{
this.isAuthenticated = isAuthenticated;
}
public boolean isAuthenticated()
{
return this.isAuthenticated;
}
public void setCurrentLessonScreen(String currentLessonScreen)
{
this.currentLessonScreen = currentLessonScreen;
}
public String getCurrentLessonScreen()
{
return this.currentLessonScreen;
}
}
| apache-2.0 |
DexDevs/educate-for-oop | java/book-dietel/nadia/WordsReversed16_9.java | 487 | import java.util.Scanner;
public class WordsReversed16_9 {
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
System.out.println("Enter a Sentence");
String sentence = in.nextLine();
String[] tokens = sentence.split(" ");
for(String token :tokens){
StringBuilder buffer = new StringBuilder( token );
buffer.reverse();
System.out.print(buffer.toString() + " ");
}
}
}
| apache-2.0 |
rLadia-demo/AttacknidPatch | Attacknids/src/org/anddev/andengine/entity/layer/ZIndexSorter.java | 2244 | package org.anddev.andengine.entity.layer;
import java.util.Comparator;
import java.util.List;
import org.anddev.andengine.entity.IEntity;
import org.anddev.andengine.util.sort.InsertionSorter;
public class ZIndexSorter extends InsertionSorter<IEntity> {
// ===========================================================
// Constants
// ===========================================================
private static ZIndexSorter INSTANCE;
// ===========================================================
// Fields
// ===========================================================
private final Comparator<IEntity> mZIndexComparator = new Comparator<IEntity>() {
@Override
public int compare(final IEntity pEntityA, final IEntity pEntityB) {
return pEntityA.getZIndex() - pEntityB.getZIndex();
}
};
// ===========================================================
// Constructors
// ===========================================================
private ZIndexSorter() {
}
public static ZIndexSorter getInstance() {
if(INSTANCE == null) {
INSTANCE = new ZIndexSorter();
}
return INSTANCE;
}
// ===========================================================
// Getter & Setter
// ===========================================================
// ===========================================================
// Methods for/from SuperClass/Interfaces
// ===========================================================
// ===========================================================
// Methods
// ===========================================================
public void sort(final IEntity[] pEntities) {
this.sort(pEntities, this.mZIndexComparator);
}
public void sort(final IEntity[] pEntities, final int pStart, final int pEnd) {
this.sort(pEntities, pStart, pEnd, this.mZIndexComparator);
}
public void sort(final List<IEntity> pEntities) {
this.sort(pEntities, this.mZIndexComparator);
}
public void sort(final List<IEntity> pEntities, final int pStart, final int pEnd) {
this.sort(pEntities, pStart, pEnd, this.mZIndexComparator);
}
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
} | apache-2.0 |
wuranbo/elasticsearch | core/src/main/java/org/elasticsearch/index/IndexService.java | 37091 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLock;
import org.elasticsearch.env.ShardLockObtainFailedException;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.query.QueryCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineClosedException;
import org.elasticsearch.index.engine.EngineFactory;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexSearcherWrapper;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardClosedException;
import org.elasticsearch.index.shard.IndexingOperationListener;
import org.elasticsearch.index.shard.SearchOperationListener;
import org.elasticsearch.index.shard.ShadowIndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
import org.elasticsearch.index.shard.ShardPath;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.cluster.IndicesClusterStateService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.function.LongSupplier;
import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
public class IndexService extends AbstractIndexComponent implements IndicesClusterStateService.AllocatedIndex<IndexShard> {
private final IndexEventListener eventListener;
private final IndexFieldDataService indexFieldData;
private final BitsetFilterCache bitsetFilterCache;
private final NodeEnvironment nodeEnv;
private final ShardStoreDeleter shardStoreDeleter;
private final IndexStore indexStore;
private final IndexSearcherWrapper searcherWrapper;
private final IndexCache indexCache;
private final MapperService mapperService;
private final NamedXContentRegistry xContentRegistry;
private final SimilarityService similarityService;
private final EngineFactory engineFactory;
private final IndexWarmer warmer;
private final Consumer<ShardId> globalCheckpointSyncer;
private volatile Map<Integer, IndexShard> shards = emptyMap();
private final AtomicBoolean closed = new AtomicBoolean(false);
private final AtomicBoolean deleted = new AtomicBoolean(false);
private final IndexSettings indexSettings;
private final List<IndexingOperationListener> indexingOperationListeners;
private final List<SearchOperationListener> searchOperationListeners;
private volatile AsyncRefreshTask refreshTask;
private volatile AsyncTranslogFSync fsyncTask;
private final ThreadPool threadPool;
private final BigArrays bigArrays;
private final AsyncGlobalCheckpointTask globalCheckpointTask;
private final ScriptService scriptService;
private final ClusterService clusterService;
private final Client client;
public IndexService(IndexSettings indexSettings, NodeEnvironment nodeEnv,
NamedXContentRegistry xContentRegistry,
SimilarityService similarityService,
ShardStoreDeleter shardStoreDeleter,
AnalysisRegistry registry,
@Nullable EngineFactory engineFactory,
CircuitBreakerService circuitBreakerService,
BigArrays bigArrays,
ThreadPool threadPool,
ScriptService scriptService,
ClusterService clusterService,
Client client,
QueryCache queryCache,
IndexStore indexStore,
IndexEventListener eventListener,
IndexModule.IndexSearcherWrapperFactory wrapperFactory,
MapperRegistry mapperRegistry,
IndicesFieldDataCache indicesFieldDataCache,
Consumer<ShardId> globalCheckpointSyncer,
List<SearchOperationListener> searchOperationListeners,
List<IndexingOperationListener> indexingOperationListeners) throws IOException {
super(indexSettings);
this.indexSettings = indexSettings;
this.globalCheckpointSyncer = globalCheckpointSyncer;
this.xContentRegistry = xContentRegistry;
this.similarityService = similarityService;
this.mapperService = new MapperService(indexSettings, registry.build(indexSettings), xContentRegistry, similarityService,
mapperRegistry,
// we parse all percolator queries as they would be parsed on shard 0
() -> newQueryShardContext(0, null, () -> {
throw new IllegalArgumentException("Percolator queries are not allowed to use the current timestamp");
}));
this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService, mapperService);
this.shardStoreDeleter = shardStoreDeleter;
this.bigArrays = bigArrays;
this.threadPool = threadPool;
this.scriptService = scriptService;
this.clusterService = clusterService;
this.client = client;
this.eventListener = eventListener;
this.nodeEnv = nodeEnv;
this.indexStore = indexStore;
indexFieldData.setListener(new FieldDataCacheListener(this));
this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this));
this.warmer = new IndexWarmer(indexSettings.getSettings(), threadPool,
bitsetFilterCache.createListener(threadPool));
this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache);
this.engineFactory = engineFactory;
// initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE
this.searcherWrapper = wrapperFactory.newWrapper(this);
this.globalCheckpointTask = new AsyncGlobalCheckpointTask(this);
this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners);
this.searchOperationListeners = Collections.unmodifiableList(searchOperationListeners);
// kick off async ops for the first shard in this index
this.refreshTask = new AsyncRefreshTask(this);
rescheduleFsyncTask(indexSettings.getTranslogDurability());
}
public int numberOfShards() {
return shards.size();
}
public IndexEventListener getIndexEventListener() {
return this.eventListener;
}
@Override
public Iterator<IndexShard> iterator() {
return shards.values().iterator();
}
public boolean hasShard(int shardId) {
return shards.containsKey(shardId);
}
/**
* Return the shard with the provided id, or null if there is no such shard.
*/
@Override
@Nullable
public IndexShard getShardOrNull(int shardId) {
return shards.get(shardId);
}
/**
* Return the shard with the provided id, or throw an exception if it doesn't exist.
*/
public IndexShard getShard(int shardId) {
IndexShard indexShard = getShardOrNull(shardId);
if (indexShard == null) {
throw new ShardNotFoundException(new ShardId(index(), shardId));
}
return indexShard;
}
public Set<Integer> shardIds() {
return shards.keySet();
}
public IndexCache cache() {
return indexCache;
}
public IndexFieldDataService fieldData() {
return indexFieldData;
}
public IndexAnalyzers getIndexAnalyzers() {
return this.mapperService.getIndexAnalyzers();
}
public MapperService mapperService() {
return mapperService;
}
public NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
public SimilarityService similarityService() {
return similarityService;
}
public synchronized void close(final String reason, boolean delete) throws IOException {
if (closed.compareAndSet(false, true)) {
deleted.compareAndSet(false, delete);
try {
final Set<Integer> shardIds = shardIds();
for (final int shardId : shardIds) {
try {
removeShard(shardId, reason);
} catch (Exception e) {
logger.warn("failed to close shard", e);
}
}
} finally {
IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, mapperService, refreshTask, fsyncTask, globalCheckpointTask);
}
}
}
public String indexUUID() {
return indexSettings.getUUID();
}
// NOTE: O(numShards) cost, but numShards should be smallish?
private long getAvgShardSizeInBytes() throws IOException {
long sum = 0;
int count = 0;
for (IndexShard indexShard : this) {
sum += indexShard.store().stats().sizeInBytes();
count++;
}
if (count == 0) {
return -1L;
} else {
return sum / count;
}
}
public synchronized IndexShard createShard(ShardRouting routing) throws IOException {
final boolean primary = routing.primary();
/*
* TODO: we execute this in parallel but it's a synced method. Yet, we might
* be able to serialize the execution via the cluster state in the future. for now we just
* keep it synced.
*/
if (closed.get()) {
throw new IllegalStateException("Can't create shard " + routing.shardId() + ", closed");
}
final Settings indexSettings = this.indexSettings.getSettings();
final ShardId shardId = routing.shardId();
boolean success = false;
Store store = null;
IndexShard indexShard = null;
ShardLock lock = null;
try {
lock = nodeEnv.shardLock(shardId, TimeUnit.SECONDS.toMillis(5));
eventListener.beforeIndexShardCreated(shardId, indexSettings);
ShardPath path;
try {
path = ShardPath.loadShardPath(logger, nodeEnv, shardId, this.indexSettings);
} catch (IllegalStateException ex) {
logger.warn("{} failed to load shard path, trying to remove leftover", shardId);
try {
ShardPath.deleteLeftoverShardDirectory(logger, nodeEnv, lock, this.indexSettings);
path = ShardPath.loadShardPath(logger, nodeEnv, shardId, this.indexSettings);
} catch (Exception inner) {
ex.addSuppressed(inner);
throw ex;
}
}
if (path == null) {
// TODO: we should, instead, hold a "bytes reserved" of how large we anticipate this shard will be, e.g. for a shard
// that's being relocated/replicated we know how large it will become once it's done copying:
// Count up how many shards are currently on each data path:
Map<Path, Integer> dataPathToShardCount = new HashMap<>();
for (IndexShard shard : this) {
Path dataPath = shard.shardPath().getRootStatePath();
Integer curCount = dataPathToShardCount.get(dataPath);
if (curCount == null) {
curCount = 0;
}
dataPathToShardCount.put(dataPath, curCount + 1);
}
path = ShardPath.selectNewPathForShard(nodeEnv, shardId, this.indexSettings,
routing.getExpectedShardSize() == ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE
? getAvgShardSizeInBytes() : routing.getExpectedShardSize(),
dataPathToShardCount);
logger.debug("{} creating using a new path [{}]", shardId, path);
} else {
logger.debug("{} creating using an existing path [{}]", shardId, path);
}
if (shards.containsKey(shardId.id())) {
throw new IllegalStateException(shardId + " already exists");
}
logger.debug("creating shard_id {}", shardId);
// if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary.
final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false ||
(primary && IndexMetaData.isOnSharedFilesystem(indexSettings));
final Engine.Warmer engineWarmer = (searcher) -> {
IndexShard shard = getShardOrNull(shardId.getId());
if (shard != null) {
warmer.warm(searcher, shard, IndexService.this.indexSettings);
}
};
store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock,
new StoreCloseListener(shardId, canDeleteShardContent, () -> eventListener.onStoreClosed(shardId)));
if (useShadowEngine(primary, indexSettings)) {
indexShard = new ShadowIndexShard(routing, this.indexSettings, path, store, indexCache, mapperService, similarityService,
indexFieldData, engineFactory, eventListener, searcherWrapper, threadPool, bigArrays, engineWarmer,
searchOperationListeners);
// no indexing listeners - shadow engines don't index
} else {
indexShard = new IndexShard(routing, this.indexSettings, path, store, indexCache, mapperService, similarityService,
indexFieldData, engineFactory, eventListener, searcherWrapper, threadPool, bigArrays, engineWarmer,
() -> globalCheckpointSyncer.accept(shardId),
searchOperationListeners, indexingOperationListeners);
}
eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created");
eventListener.afterIndexShardCreated(indexShard);
shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap();
success = true;
return indexShard;
} catch (ShardLockObtainFailedException e) {
throw new IOException("failed to obtain in-memory shard lock", e);
} finally {
if (success == false) {
if (lock != null) {
IOUtils.closeWhileHandlingException(lock);
}
closeShard("initialization failed", shardId, indexShard, store, eventListener);
}
}
}
static boolean useShadowEngine(boolean primary, Settings indexSettings) {
return primary == false && IndexMetaData.isIndexUsingShadowReplicas(indexSettings);
}
@Override
public synchronized void removeShard(int shardId, String reason) {
final ShardId sId = new ShardId(index(), shardId);
final IndexShard indexShard;
if (shards.containsKey(shardId) == false) {
return;
}
logger.debug("[{}] closing... (reason: [{}])", shardId, reason);
HashMap<Integer, IndexShard> newShards = new HashMap<>(shards);
indexShard = newShards.remove(shardId);
shards = unmodifiableMap(newShards);
closeShard(reason, sId, indexShard, indexShard.store(), indexShard.getIndexEventListener());
logger.debug("[{}] closed (reason: [{}])", shardId, reason);
}
private void closeShard(String reason, ShardId sId, IndexShard indexShard, Store store, IndexEventListener listener) {
final int shardId = sId.id();
final Settings indexSettings = this.getIndexSettings().getSettings();
try {
try {
listener.beforeIndexShardClosed(sId, indexShard, indexSettings);
} finally {
// this logic is tricky, we want to close the engine so we rollback the changes done to it
// and close the shard so no operations are allowed to it
if (indexShard != null) {
try {
// only flush we are we closed (closed index or shutdown) and if we are not deleted
final boolean flushEngine = deleted.get() == false && closed.get();
indexShard.close(reason, flushEngine);
} catch (Exception e) {
logger.debug((Supplier<?>) () -> new ParameterizedMessage("[{}] failed to close index shard", shardId), e);
// ignore
}
}
// call this before we close the store, so we can release resources for it
listener.afterIndexShardClosed(sId, indexShard, indexSettings);
}
} finally {
try {
if (store != null) {
store.close();
} else {
logger.trace("[{}] store not initialized prior to closing shard, nothing to close", shardId);
}
} catch (Exception e) {
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"[{}] failed to close store on shard removal (reason: [{}])", shardId, reason), e);
}
}
}
private void onShardClose(ShardLock lock, boolean ownsShard) {
if (deleted.get()) { // we remove that shards content if this index has been deleted
try {
if (ownsShard) {
try {
eventListener.beforeIndexShardDeleted(lock.getShardId(), indexSettings.getSettings());
} finally {
shardStoreDeleter.deleteShardStore("delete index", lock, indexSettings);
eventListener.afterIndexShardDeleted(lock.getShardId(), indexSettings.getSettings());
}
}
} catch (IOException e) {
shardStoreDeleter.addPendingDelete(lock.getShardId(), indexSettings);
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"[{}] failed to delete shard content - scheduled a retry", lock.getShardId().id()), e);
}
}
}
@Override
public IndexSettings getIndexSettings() {
return indexSettings;
}
/**
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via
* {@link QueryShardContext#setTypes(String...)}.
*
* Passing a {@code null} {@link IndexReader} will return a valid context, however it won't be able to make
* {@link IndexReader}-specific optimizations, such as rewriting containing range queries.
*/
public QueryShardContext newQueryShardContext(int shardId, IndexReader indexReader, LongSupplier nowInMillis) {
return new QueryShardContext(
shardId, indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(),
similarityService(), scriptService, xContentRegistry,
client, indexReader,
nowInMillis);
}
/**
* The {@link ThreadPool} to use for this index.
*/
public ThreadPool getThreadPool() {
return threadPool;
}
/**
* The {@link BigArrays} to use for this index.
*/
public BigArrays getBigArrays() {
return bigArrays;
}
/**
* The {@link ScriptService} to use for this index.
*/
public ScriptService getScriptService() {
return scriptService;
}
List<IndexingOperationListener> getIndexOperationListeners() { // pkg private for testing
return indexingOperationListeners;
}
List<SearchOperationListener> getSearchOperationListener() { // pkg private for testing
return searchOperationListeners;
}
@Override
public boolean updateMapping(IndexMetaData indexMetaData) throws IOException {
return mapperService().updateMapping(indexMetaData);
}
private class StoreCloseListener implements Store.OnClose {
private final ShardId shardId;
private final boolean ownsShard;
private final Closeable[] toClose;
public StoreCloseListener(ShardId shardId, boolean ownsShard, Closeable... toClose) {
this.shardId = shardId;
this.ownsShard = ownsShard;
this.toClose = toClose;
}
@Override
public void handle(ShardLock lock) {
try {
assert lock.getShardId().equals(shardId) : "shard id mismatch, expected: " + shardId + " but got: " + lock.getShardId();
onShardClose(lock, ownsShard);
} finally {
try {
IOUtils.close(toClose);
} catch (IOException ex) {
logger.debug("failed to close resource", ex);
}
}
}
}
private static final class BitsetCacheListener implements BitsetFilterCache.Listener {
final IndexService indexService;
private BitsetCacheListener(IndexService indexService) {
this.indexService = indexService;
}
@Override
public void onCache(ShardId shardId, Accountable accountable) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0L;
shard.shardBitsetFilterCache().onCached(ramBytesUsed);
}
}
}
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0L;
shard.shardBitsetFilterCache().onRemoval(ramBytesUsed);
}
}
}
}
private final class FieldDataCacheListener implements IndexFieldDataCache.Listener {
final IndexService indexService;
public FieldDataCacheListener(IndexService indexService) {
this.indexService = indexService;
}
@Override
public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
shard.fieldData().onCache(shardId, fieldName, ramUsage);
}
}
}
@Override
public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, long sizeInBytes) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
shard.fieldData().onRemoval(shardId, fieldName, wasEvicted, sizeInBytes);
}
}
}
}
public IndexMetaData getMetaData() {
return indexSettings.getIndexMetaData();
}
@Override
public synchronized void updateMetaData(final IndexMetaData metadata) {
final Translog.Durability oldTranslogDurability = indexSettings.getTranslogDurability();
if (indexSettings.updateIndexMetaData(metadata)) {
for (final IndexShard shard : this.shards.values()) {
try {
shard.onSettingsChanged();
} catch (Exception e) {
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"[{}] failed to notify shard about setting change", shard.shardId().id()), e);
}
}
if (refreshTask.getInterval().equals(indexSettings.getRefreshInterval()) == false) {
rescheduleRefreshTasks();
}
final Translog.Durability durability = indexSettings.getTranslogDurability();
if (durability != oldTranslogDurability) {
rescheduleFsyncTask(durability);
}
}
// update primary terms
for (final IndexShard shard : this.shards.values()) {
shard.updatePrimaryTerm(metadata.primaryTerm(shard.shardId().id()));
}
}
private void rescheduleFsyncTask(Translog.Durability durability) {
try {
if (fsyncTask != null) {
fsyncTask.close();
}
} finally {
fsyncTask = durability == Translog.Durability.REQUEST ? null : new AsyncTranslogFSync(this);
}
}
private void rescheduleRefreshTasks() {
try {
refreshTask.close();
} finally {
refreshTask = new AsyncRefreshTask(this);
}
}
public interface ShardStoreDeleter {
void deleteShardStore(String reason, ShardLock lock, IndexSettings indexSettings) throws IOException;
void addPendingDelete(ShardId shardId, IndexSettings indexSettings);
}
final EngineFactory getEngineFactory() {
return engineFactory;
} // pkg private for testing
final IndexSearcherWrapper getSearcherWrapper() {
return searcherWrapper;
} // pkg private for testing
final IndexStore getIndexStore() {
return indexStore;
} // pkg private for testing
private void maybeFSyncTranslogs() {
if (indexSettings.getTranslogDurability() == Translog.Durability.ASYNC) {
for (IndexShard shard : this.shards.values()) {
try {
Translog translog = shard.getTranslog();
if (translog.syncNeeded()) {
translog.sync();
}
} catch (EngineClosedException | AlreadyClosedException ex) {
// fine - continue;
} catch (IOException e) {
logger.warn("failed to sync translog", e);
}
}
}
}
private void maybeRefreshEngine() {
if (indexSettings.getRefreshInterval().millis() > 0) {
for (IndexShard shard : this.shards.values()) {
switch (shard.state()) {
case CREATED:
case RECOVERING:
case CLOSED:
continue;
case POST_RECOVERY:
case STARTED:
case RELOCATED:
try {
if (shard.isRefreshNeeded()) {
shard.refresh("schedule");
}
} catch (IndexShardClosedException | AlreadyClosedException ex) {
// fine - continue;
}
continue;
default:
throw new IllegalStateException("unknown state: " + shard.state());
}
}
}
}
private void maybeUpdateGlobalCheckpoints() {
for (IndexShard shard : this.shards.values()) {
if (shard.routingEntry().primary()) {
switch (shard.state()) {
case CREATED:
case RECOVERING:
case CLOSED:
case RELOCATED:
continue;
case POST_RECOVERY:
case STARTED:
try {
shard.updateGlobalCheckpointOnPrimary();
} catch (EngineClosedException | AlreadyClosedException ex) {
// fine - continue, the shard was concurrently closed on us.
}
continue;
default:
throw new IllegalStateException("unknown state: " + shard.state());
}
}
}
}
abstract static class BaseAsyncTask implements Runnable, Closeable {
protected final IndexService indexService;
protected final ThreadPool threadPool;
private final TimeValue interval;
private ScheduledFuture<?> scheduledFuture;
private final AtomicBoolean closed = new AtomicBoolean(false);
private volatile Exception lastThrownException;
BaseAsyncTask(IndexService indexService, TimeValue interval) {
this.indexService = indexService;
this.threadPool = indexService.getThreadPool();
this.interval = interval;
onTaskCompletion();
}
boolean mustReschedule() {
// don't re-schedule if its closed or if we don't have a single shard here..., we are done
return indexService.closed.get() == false
&& closed.get() == false && interval.millis() > 0;
}
private synchronized void onTaskCompletion() {
if (mustReschedule()) {
if (indexService.logger.isTraceEnabled()) {
indexService.logger.trace("scheduling {} every {}", toString(), interval);
}
this.scheduledFuture = threadPool.schedule(interval, getThreadPool(), BaseAsyncTask.this);
} else {
indexService.logger.trace("scheduled {} disabled", toString());
this.scheduledFuture = null;
}
}
boolean isScheduled() {
return scheduledFuture != null;
}
@Override
public final void run() {
try {
runInternal();
} catch (Exception ex) {
if (lastThrownException == null || sameException(lastThrownException, ex) == false) {
// prevent the annoying fact of logging the same stuff all the time with an interval of 1 sec will spam all your logs
indexService.logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to run task {} - suppressing re-occurring exceptions unless the exception changes",
toString()),
ex);
lastThrownException = ex;
}
} finally {
onTaskCompletion();
}
}
private static boolean sameException(Exception left, Exception right) {
if (left.getClass() == right.getClass()) {
if (Objects.equals(left.getMessage(), right.getMessage())) {
StackTraceElement[] stackTraceLeft = left.getStackTrace();
StackTraceElement[] stackTraceRight = right.getStackTrace();
if (stackTraceLeft.length == stackTraceRight.length) {
for (int i = 0; i < stackTraceLeft.length; i++) {
if (stackTraceLeft[i].equals(stackTraceRight[i]) == false) {
return false;
}
}
return true;
}
}
}
return false;
}
protected abstract void runInternal();
protected String getThreadPool() {
return ThreadPool.Names.SAME;
}
@Override
public synchronized void close() {
if (closed.compareAndSet(false, true)) {
FutureUtils.cancel(scheduledFuture);
scheduledFuture = null;
}
}
TimeValue getInterval() {
return interval;
}
boolean isClosed() {
return this.closed.get();
}
}
/**
* FSyncs the translog for all shards of this index in a defined interval.
*/
static final class AsyncTranslogFSync extends BaseAsyncTask {
AsyncTranslogFSync(IndexService indexService) {
super(indexService, indexService.getIndexSettings().getTranslogSyncInterval());
}
@Override
protected String getThreadPool() {
return ThreadPool.Names.FLUSH;
}
@Override
protected void runInternal() {
indexService.maybeFSyncTranslogs();
}
@Override
public String toString() {
return "translog_sync";
}
}
final class AsyncRefreshTask extends BaseAsyncTask {
AsyncRefreshTask(IndexService indexService) {
super(indexService, indexService.getIndexSettings().getRefreshInterval());
}
@Override
protected void runInternal() {
indexService.maybeRefreshEngine();
}
@Override
protected String getThreadPool() {
return ThreadPool.Names.REFRESH;
}
@Override
public String toString() {
return "refresh";
}
}
final class AsyncGlobalCheckpointTask extends BaseAsyncTask {
AsyncGlobalCheckpointTask(IndexService indexService) {
super(indexService, indexService.getIndexSettings().getGlobalCheckpointInterval());
}
@Override
protected void runInternal() {
indexService.maybeUpdateGlobalCheckpoints();
}
@Override
public String toString() {
return "global_checkpoint";
}
}
AsyncRefreshTask getRefreshTask() { // for tests
return refreshTask;
}
AsyncTranslogFSync getFsyncTask() { // for tests
return fsyncTask;
}
AsyncGlobalCheckpointTask getGlobalCheckpointTask() { // for tests
return globalCheckpointTask;
}
}
| apache-2.0 |
mgmik/jfunk | jfunk-web/src/main/java/com/mgmtp/jfunk/web/ChromeDriverProvider.java | 1523 | /*
* Copyright (c) 2015 mgm technology partners GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mgmtp.jfunk.web;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.support.events.WebDriverEventListener;
import com.google.common.eventbus.EventBus;
import com.mgmtp.jfunk.common.util.Configuration;
/**
* @author rnaegele
*/
public class ChromeDriverProvider extends BaseWebDriverProvider {
@Inject
protected ChromeDriverProvider(final Configuration config, final Set<WebDriverEventListener> eventListeners,
final Map<String, DesiredCapabilities> capabilitiesMap, final EventBus eventBus) {
super(config, eventListeners, capabilitiesMap, eventBus);
}
@Override
protected WebDriver createWebDriver(final DesiredCapabilities capabilities) {
return new ChromeDriver(capabilities);
}
}
| apache-2.0 |
vdr007/ThriftyPaxos | src/applications/h2/src/tools/org/h2/build/doc/XMLChecker.java | 5100 | /*
* Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.build.doc;
import java.io.File;
import java.io.FileReader;
import java.util.Stack;
import org.h2.util.IOUtils;
/**
* This class checks that the HTML and XML part of the source code
* is well-formed XML.
*/
public class XMLChecker {
/**
* This method is called when executing this application from the command
* line.
*
* @param args the command line parameters
*/
public static void main(String... args) throws Exception {
new XMLChecker().run(args);
}
private void run(String... args) throws Exception {
String dir = ".";
for (int i = 0; i < args.length; i++) {
if ("-dir".equals(args[i])) {
dir = args[++i];
}
}
process(dir + "/src");
process(dir + "/docs");
}
private void process(String path) throws Exception {
if (path.endsWith("/CVS") || path.endsWith("/.svn")) {
return;
}
File file = new File(path);
if (file.isDirectory()) {
for (String name : file.list()) {
process(path + "/" + name);
}
} else {
processFile(path);
}
}
private static void processFile(String fileName) throws Exception {
int idx = fileName.lastIndexOf('.');
if (idx < 0) {
return;
}
String suffix = fileName.substring(idx + 1);
if (!suffix.equals("html") && !suffix.equals("xml") && !suffix.equals("jsp")) {
return;
}
// System.out.println("Checking file:" + fileName);
FileReader reader = new FileReader(fileName);
String s = IOUtils.readStringAndClose(reader, -1);
Exception last = null;
try {
checkXML(s, !suffix.equals("xml"));
} catch (Exception e) {
last = e;
System.out.println("ERROR in file " + fileName + " " + e.toString());
}
if (last != null) {
last.printStackTrace();
}
}
private static void checkXML(String xml, boolean html) throws Exception {
// String lastElement = null;
// <li>: replace <li>([^\r]*[^<]*) with <li>$1</li>
// use this for html file, for example if <li> is not closed
String[] noClose = {};
XMLParser parser = new XMLParser(xml);
Stack<Object[]> stack = new Stack<Object[]>();
boolean rootElement = false;
while (true) {
int event = parser.next();
if (event == XMLParser.END_DOCUMENT) {
break;
} else if (event == XMLParser.START_ELEMENT) {
if (stack.size() == 0) {
if (rootElement) {
throw new Exception("Second root element at " + parser.getRemaining());
}
rootElement = true;
}
String name = parser.getName();
if (html) {
for (String n : noClose) {
if (name.equals(n)) {
name = null;
break;
}
}
}
if (name != null) {
stack.add(new Object[] { name, parser.getPos() });
}
} else if (event == XMLParser.END_ELEMENT) {
String name = parser.getName();
if (html) {
for (String n : noClose) {
if (name.equals(n)) {
throw new Exception("Unnecessary closing element "
+ name + " at " + parser.getRemaining());
}
}
}
while (true) {
Object[] pop = stack.pop();
String p = (String) pop[0];
if (p.equals(name)) {
break;
}
String remaining = xml.substring((Integer) pop[1]);
if (remaining.length() > 100) {
remaining = remaining.substring(0, 100);
}
throw new Exception("Unclosed element " + p + " at " + remaining);
}
} else if (event == XMLParser.CHARACTERS) {
// lastElement = parser.getText();
} else if (event == XMLParser.DTD) {
// ignore
} else if (event == XMLParser.COMMENT) {
// ignore
} else {
int eventType = parser.getEventType();
throw new Exception("Unexpected event " + eventType + " at "
+ parser.getRemaining());
}
}
if (stack.size() != 0) {
throw new Exception("Unclosed root element");
}
}
}
| apache-2.0 |
wuranbo/elasticsearch | core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java | 20808 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.PointRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.search.MatchQuery;
import org.elasticsearch.index.search.MatchQuery.Type;
import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.hamcrest.Matcher;
import java.io.IOException;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import static org.hamcrest.CoreMatchers.either;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuilder> {
@Override
protected MatchQueryBuilder doCreateTestQueryBuilder() {
String fieldName = randomFrom(STRING_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME,
DOUBLE_FIELD_NAME, DATE_FIELD_NAME);
if (fieldName.equals(DATE_FIELD_NAME)) {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
}
Object value;
if (fieldName.equals(STRING_FIELD_NAME)) {
int terms = randomIntBetween(0, 3);
StringBuilder builder = new StringBuilder();
for (int i = 0; i < terms; i++) {
builder.append(randomAsciiOfLengthBetween(1, 10)).append(" ");
}
value = builder.toString().trim();
} else {
value = getRandomValueForFieldName(fieldName);
}
MatchQueryBuilder matchQuery = new MatchQueryBuilder(fieldName, value);
matchQuery.operator(randomFrom(Operator.values()));
if (randomBoolean()) {
if (fieldName.equals(DATE_FIELD_NAME)) {
// tokenized dates would trigger parse errors
matchQuery.analyzer(randomFrom("keyword", "whitespace"));
} else {
matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace"));
}
}
if (fieldName.equals(STRING_FIELD_NAME) && randomBoolean()) {
matchQuery.fuzziness(randomFuzziness(fieldName));
}
if (randomBoolean()) {
matchQuery.prefixLength(randomIntBetween(0, 10));
}
if (randomBoolean()) {
matchQuery.maxExpansions(randomIntBetween(1, 1000));
}
if (randomBoolean()) {
matchQuery.minimumShouldMatch(randomMinimumShouldMatch());
}
if (randomBoolean()) {
matchQuery.fuzzyRewrite(getRandomRewriteMethod());
}
if (randomBoolean()) {
matchQuery.fuzzyTranspositions(randomBoolean());
}
if (randomBoolean()) {
matchQuery.lenient(randomBoolean());
}
if (randomBoolean()) {
matchQuery.zeroTermsQuery(randomFrom(MatchQuery.ZeroTermsQuery.values()));
}
if (randomBoolean()) {
matchQuery.cutoffFrequency((float) 10 / randomIntBetween(1, 100));
}
return matchQuery;
}
@Override
protected Map<String, MatchQueryBuilder> getAlternateVersions() {
Map<String, MatchQueryBuilder> alternateVersions = new HashMap<>();
MatchQueryBuilder matchQuery = new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
String contentString = "{\n" +
" \"match\" : {\n" +
" \"" + matchQuery.fieldName() + "\" : \"" + matchQuery.value() + "\"\n" +
" }\n" +
"}";
alternateVersions.put(contentString, matchQuery);
return alternateVersions;
}
@Override
protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
assertThat(query, notNullValue());
if (query instanceof MatchAllDocsQuery) {
assertThat(queryBuilder.zeroTermsQuery(), equalTo(ZeroTermsQuery.ALL));
return;
}
switch (queryBuilder.type()) {
case BOOLEAN:
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class))
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)).or(instanceOf(MatchNoDocsQuery.class))
.or(instanceOf(PointRangeQuery.class)));
break;
case PHRASE:
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class))
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class))
.or(instanceOf(PointRangeQuery.class)));
break;
case PHRASE_PREFIX:
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class))
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class))
.or(instanceOf(PointRangeQuery.class)));
break;
}
QueryShardContext context = searchContext.getQueryShardContext();
MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName());
if (query instanceof TermQuery && fieldType != null) {
String queryValue = queryBuilder.value().toString();
if (queryBuilder.analyzer() == null || queryBuilder.analyzer().equals("simple")) {
queryValue = queryValue.toLowerCase(Locale.ROOT);
}
Query expectedTermQuery = fieldType.termQuery(queryValue, context);
assertEquals(expectedTermQuery, query);
}
if (query instanceof BooleanQuery) {
BooleanQuery bq = (BooleanQuery) query;
if (queryBuilder.minimumShouldMatch() != null) {
// calculate expected minimumShouldMatch value
int optionalClauses = 0;
for (BooleanClause c : bq.clauses()) {
if (c.getOccur() == BooleanClause.Occur.SHOULD) {
optionalClauses++;
}
}
int msm = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch());
assertThat(bq.getMinimumNumberShouldMatch(), equalTo(msm));
}
if (queryBuilder.analyzer() == null && queryBuilder.value().toString().length() > 0) {
assertEquals(bq.clauses().size(), queryBuilder.value().toString().split(" ").length);
}
}
if (query instanceof ExtendedCommonTermsQuery) {
assertTrue(queryBuilder.cutoffFrequency() != null);
ExtendedCommonTermsQuery ectq = (ExtendedCommonTermsQuery) query;
assertEquals(queryBuilder.cutoffFrequency(), ectq.getMaxTermFrequency(), Float.MIN_VALUE);
}
if (query instanceof FuzzyQuery) {
assertTrue(queryBuilder.fuzziness() != null);
FuzzyQuery fuzzyQuery = (FuzzyQuery) query;
// depending on analyzer being set or not we can have term lowercased along the way, so to simplify test we just
// compare lowercased terms here
String originalTermLc = queryBuilder.value().toString().toLowerCase(Locale.ROOT);
String actualTermLc = fuzzyQuery.getTerm().text().toLowerCase(Locale.ROOT);
Matcher<String> termLcMatcher = equalTo(originalTermLc);
if ("false".equals(originalTermLc) || "true".equals(originalTermLc)) {
// Booleans become t/f when querying a boolean field
termLcMatcher = either(termLcMatcher).or(equalTo(originalTermLc.substring(0, 1)));
}
assertThat(actualTermLc, termLcMatcher);
assertThat(queryBuilder.prefixLength(), equalTo(fuzzyQuery.getPrefixLength()));
assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions()));
}
if (query instanceof PointRangeQuery) {
// TODO
}
}
public void testIllegalValues() {
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchQueryBuilder(null, "value"));
assertEquals("[match] requires fieldName", e.getMessage());
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchQueryBuilder("fieldName", null));
assertEquals("[match] requires query value", e.getMessage());
}
MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text");
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.prefixLength(-1));
assertEquals("[match] requires prefix length to be non-negative.", e.getMessage());
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> matchQuery.maxExpansions(randomIntBetween(-10, 0)));
assertEquals("[match] requires maxExpansions to be positive.", e.getMessage());
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.operator(null));
assertEquals("[match] requires operator to be non-null", e.getMessage());
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.type(null));
assertEquals("[match] requires type to be non-null", e.getMessage());
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.zeroTermsQuery(null));
assertEquals("[match] requires zeroTermsQuery to be non-null", e.getMessage());
}
matchQuery.analyzer("bogusAnalyzer");
{
QueryShardException e = expectThrows(QueryShardException.class, () -> matchQuery.toQuery(createShardContext()));
assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found"));
}
}
public void testSimpleMatchQuery() throws IOException {
String json = "{\n" +
" \"match\" : {\n" +
" \"message\" : {\n" +
" \"query\" : \"to be or not to be\",\n" +
" \"operator\" : \"AND\",\n" +
" \"prefix_length\" : 0,\n" +
" \"max_expansions\" : 50,\n" +
" \"fuzzy_transpositions\" : true,\n" +
" \"lenient\" : false,\n" +
" \"zero_terms_query\" : \"ALL\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
"}";
MatchQueryBuilder qb = (MatchQueryBuilder) parseQuery(json);
checkGeneratedJson(json, qb);
assertEquals(json, "to be or not to be", qb.value());
assertEquals(json, Operator.AND, qb.operator());
}
public void testLegacyMatchPhrasePrefixQuery() throws IOException {
MatchQueryBuilder expectedQB = new MatchQueryBuilder("message", "to be or not to be");
expectedQB.type(Type.PHRASE_PREFIX);
expectedQB.slop(2);
expectedQB.maxExpansions(30);
String json = "{\n" +
" \"match\" : {\n" +
" \"message\" : {\n" +
" \"query\" : \"to be or not to be\",\n" +
" \"type\" : \"phrase_prefix\",\n" +
" \"operator\" : \"OR\",\n" +
" \"slop\" : 2,\n" +
" \"prefix_length\" : 0,\n" +
" \"max_expansions\" : 30,\n" +
" \"fuzzy_transpositions\" : true,\n" +
" \"lenient\" : false,\n" +
" \"zero_terms_query\" : \"NONE\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
"}";
MatchQueryBuilder qb = (MatchQueryBuilder) parseQuery(json, ParseFieldMatcher.EMPTY);
checkGeneratedJson(json, qb);
assertEquals(json, expectedQB, qb);
assertSerialization(qb);
assertWarnings("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]",
"Deprecated field [slop] used, replaced by [match_phrase query]");
}
public void testLegacyMatchPhraseQuery() throws IOException {
MatchQueryBuilder expectedQB = new MatchQueryBuilder("message", "to be or not to be");
expectedQB.type(Type.PHRASE);
expectedQB.slop(2);
String json = "{\n" +
" \"match\" : {\n" +
" \"message\" : {\n" +
" \"query\" : \"to be or not to be\",\n" +
" \"type\" : \"phrase\",\n" +
" \"operator\" : \"OR\",\n" +
" \"slop\" : 2,\n" +
" \"prefix_length\" : 0,\n" +
" \"max_expansions\" : 50,\n" +
" \"fuzzy_transpositions\" : true,\n" +
" \"lenient\" : false,\n" +
" \"zero_terms_query\" : \"NONE\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
"}";
MatchQueryBuilder qb = (MatchQueryBuilder) parseQuery(json, ParseFieldMatcher.EMPTY);
checkGeneratedJson(json, qb);
assertEquals(json, expectedQB, qb);
assertSerialization(qb);
assertWarnings("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]",
"Deprecated field [slop] used, replaced by [match_phrase query]");
}
public void testFuzzinessOnNonStringField() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
MatchQueryBuilder query = new MatchQueryBuilder(INT_FIELD_NAME, 42);
query.fuzziness(randomFuzziness(INT_FIELD_NAME));
QueryShardContext context = createShardContext();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> query.toQuery(context));
assertEquals("Can only use fuzzy queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
e.getMessage());
query.analyzer("keyword"); // triggers a different code path
e = expectThrows(IllegalArgumentException.class,
() -> query.toQuery(context));
assertEquals("Can only use fuzzy queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
e.getMessage());
query.lenient(true);
query.toQuery(context); // no exception
query.analyzer(null);
query.toQuery(context); // no exception
}
public void testExactOnUnsupportedField() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
MatchQueryBuilder query = new MatchQueryBuilder(GEO_POINT_FIELD_NAME, "2,3");
QueryShardContext context = createShardContext();
QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(context));
assertEquals("Geo fields do not support exact searching, use dedicated geo queries instead: [mapped_geo_point]", e.getMessage());
query.lenient(true);
query.toQuery(context); // no exception
}
public void testParseFailsWithMultipleFields() throws IOException {
String json = "{\n" +
" \"match\" : {\n" +
" \"message1\" : {\n" +
" \"query\" : \"this is a test\"\n" +
" },\n" +
" \"message2\" : {\n" +
" \"query\" : \"this is a test\"\n" +
" }\n" +
" }\n" +
"}";
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
assertEquals("[match] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage());
String shortJson = "{\n" +
" \"match\" : {\n" +
" \"message1\" : \"this is a test\",\n" +
" \"message2\" : \"this is a test\"\n" +
" }\n" +
"}";
e = expectThrows(ParsingException.class, () -> parseQuery(shortJson));
assertEquals("[match] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage());
}
public void testParseFailsWithTermsArray() throws Exception {
String json1 = "{\n" +
" \"match\" : {\n" +
" \"message1\" : {\n" +
" \"query\" : [\"term1\", \"term2\"]\n" +
" }\n" +
" }\n" +
"}";
expectThrows(ParsingException.class, () -> parseQuery(json1));
String json2 = "{\n" +
" \"match\" : {\n" +
" \"message1\" : [\"term1\", \"term2\"]\n" +
" }\n" +
"}";
expectThrows(IllegalStateException.class, () -> parseQuery(json2));
}
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
mapperService.merge("t_boost", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("t_boost",
"string_boost", "type=text,boost=4").string()), MapperService.MergeReason.MAPPING_UPDATE, false);
}
public void testMatchPhrasePrefixWithBoost() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
QueryShardContext context = createShardContext();
assumeTrue("test runs only when the index version is on or after V_5_0_0_alpha1",
context.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1));
{
// field boost is applied on a single term query
MatchPhrasePrefixQueryBuilder builder = new MatchPhrasePrefixQueryBuilder("string_boost", "foo");
Query query = builder.toQuery(context);
assertThat(query, instanceOf(BoostQuery.class));
assertThat(((BoostQuery) query).getBoost(), equalTo(4f));
Query innerQuery = ((BoostQuery) query).getQuery();
assertThat(innerQuery, instanceOf(MultiPhrasePrefixQuery.class));
}
{
// field boost is ignored on phrase query
MatchPhrasePrefixQueryBuilder builder = new MatchPhrasePrefixQueryBuilder("string_boost", "foo bar");
Query query = builder.toQuery(context);
assertThat(query, instanceOf(MultiPhrasePrefixQuery.class));
}
}
}
| apache-2.0 |
apache/solr | solr/solrj/src/java/org/apache/solr/client/solrj/io/eq/package-info.java | 899 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Equalitors for the Streaming Aggregation API */
package org.apache.solr.client.solrj.io.eq;
| apache-2.0 |
max3163/jmeter | test/src/org/apache/jmeter/assertions/ResponseAssertionTest.java | 11266 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.assertions;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.threads.JMeterContext;
import org.apache.jmeter.threads.JMeterContextService;
import org.apache.jmeter.threads.JMeterVariables;
import org.junit.Before;
import org.junit.Test;
public class ResponseAssertionTest {
public ResponseAssertionTest() {
}
private ResponseAssertion assertion;
private SampleResult sample;
private AssertionResult result;
@Before
public void setUp() throws MalformedURLException {
JMeterContext jmctx = JMeterContextService.getContext();
assertion = new ResponseAssertion();
assertion.setThreadContext(jmctx);
sample = new SampleResult();
JMeterVariables vars = new JMeterVariables();
jmctx.setVariables(vars);
jmctx.setPreviousResult(sample);
sample.setResponseData("response Data\nline 2\n\nEOF", null);
sample.setURL(new URL("http://localhost/Sampler/Data/"));
sample.setResponseCode("401");
sample.setResponseHeaders("X-Header: abcd");
sample.setRequestHeaders("X-reqHeader: cdef");
}
@Test
public void testResponseAssertionEquals() throws Exception{
assertion.unsetNotType();
assertion.setToEqualsType();
assertion.setTestFieldURL();
assertion.addTestString("Sampler Label");
assertion.addTestString("Sampler labelx");
result = assertion.getResult(sample);
assertFailed();
assertion.setToNotType();
assertion.clearTestStrings();
assertion.addTestString("Sampler LabeL");
assertion.addTestString("Sampler Labelx");
result = assertion.getResult(sample);
assertPassed();
}
@Test
public void testResponseAssertionResponseHeaders() throws Exception{
assertion.unsetNotType();
assertion.setToEqualsType();
assertion.setTestFieldResponseHeaders();
assertion.addTestString("X-Header: abcd");
assertion.addTestString("X-Header: abcdx");
result = assertion.getResult(sample);
assertFailed();
assertion.clearTestStrings();
assertion.addTestString("X-Header: abcd");
result = assertion.getResult(sample);
assertPassed();
}
@Test
public void testResponseAssertionRequestHeaders() throws Exception{
assertion.unsetNotType();
assertion.setToEqualsType();
assertion.setTestFieldRequestHeaders();
assertion.addTestString("X-reqHeader: cdef");
assertion.addTestString("X-reqHeader: cdefx");
result = assertion.getResult(sample);
assertFailed();
assertion.clearTestStrings();
assertion.addTestString("X-reqHeader: cdef");
result = assertion.getResult(sample);
assertPassed();
}
@Test
public void testResponseAssertionContains() throws Exception{
assertion.unsetNotType();
assertion.setToContainsType();
assertion.setTestFieldURL();
assertion.addTestString("Sampler");
assertion.addTestString("Label");
assertion.addTestString(" x");
result = assertion.getResult(sample);
assertFailed();
assertion.setToNotType();
result = assertion.getResult(sample);
assertFailed();
assertion.clearTestStrings();
assertion.addTestString("r l");
result = assertion.getResult(sample);
assertPassed();
assertion.unsetNotType();
assertion.setTestFieldResponseData();
assertion.clearTestStrings();
assertion.addTestString("line 2");
result = assertion.getResult(sample);
assertPassed();
assertion.clearTestStrings();
assertion.addTestString("line 2");
assertion.addTestString("NOTINSAMPLEDATA");
result = assertion.getResult(sample);
assertFailed();
assertion.clearTestStrings();
assertion.setToOrType();
assertion.addTestString("line 2");
assertion.addTestString("NOTINSAMPLEDATA");
result = assertion.getResult(sample);
assertPassed();
assertion.unsetOrType();
assertion.clearTestStrings();
assertion.setToOrType();
assertion.addTestString("NOTINSAMPLEDATA");
assertion.addTestString("line 2");
result = assertion.getResult(sample);
assertPassed();
assertion.unsetOrType();
assertion.clearTestStrings();
assertion.setToOrType();
assertion.addTestString("NOTINSAMPLEDATA");
assertion.addTestString("NOTINSAMPLEDATA2");
result = assertion.getResult(sample);
assertFailed();
assertion.unsetOrType();
assertion.clearTestStrings();
assertion.setToOrType();
assertion.setToNotType();
assertion.addTestString("line 2");
assertion.addTestString("NOTINSAMPLEDATA2");
result = assertion.getResult(sample);
assertPassed();
assertion.unsetOrType();
assertion.unsetNotType();
assertion.clearTestStrings();
assertion.setToNotType();
assertion.addTestString("NOTINSAMPLEDATA");
result = assertion.getResult(sample);
assertPassed();
assertion.unsetNotType();
assertion.clearTestStrings();
assertion.addTestString("(?s)line \\d+.*EOF");
result = assertion.getResult(sample);
assertPassed();
assertion.setTestFieldResponseCode();
assertion.clearTestStrings();
assertion.addTestString("401");
result = assertion.getResult(sample);
assertPassed();
}
// Bug 46831 - check can match dollars
@Test
public void testResponseAssertionContainsDollar() throws Exception {
sample.setResponseData("value=\"${ID}\" Group$ctl00$drpEmails", null);
assertion.unsetNotType();
assertion.setToContainsType();
assertion.setTestFieldResponseData();
assertion.addTestString("value=\"\\${ID}\" Group\\$ctl00\\$drpEmails");
result = assertion.getResult(sample);
assertPassed();
}
@Test
public void testResponseAssertionSubstring() throws Exception{
assertion.unsetNotType();
assertion.setToSubstringType();
assertion.setTestFieldURL();
assertion.addTestString("Sampler");
assertion.addTestString("Label");
assertion.addTestString("+(");
result = assertion.getResult(sample);
assertFailed();
assertion.setToNotType();
result = assertion.getResult(sample);
assertFailed();
assertion.clearTestStrings();
assertion.addTestString("r l");
result = assertion.getResult(sample);
assertPassed();
assertion.unsetNotType();
assertion.setTestFieldResponseData();
assertion.clearTestStrings();
assertion.addTestString("line 2");
result = assertion.getResult(sample);
assertPassed();
assertion.clearTestStrings();
assertion.addTestString("line 2\n\nEOF");
result = assertion.getResult(sample);
assertPassed();
assertion.setTestFieldResponseCode();
assertion.clearTestStrings();
assertion.addTestString("401");
result = assertion.getResult(sample);
assertPassed();
}
//TODO - need a lot more tests
private void assertPassed() throws Exception{
assertNull(result.getFailureMessage(),result.getFailureMessage());
assertFalse("Not expecting error: "+result.getFailureMessage(),result.isError());
assertFalse("Not expecting error",result.isError());
assertFalse("Not expecting failure",result.isFailure());
}
private void assertFailed() throws Exception{
assertNotNull(result.getFailureMessage());
assertFalse("Should not be: Response was null","Response was null".equals(result.getFailureMessage()));
assertFalse("Not expecting error: "+result.getFailureMessage(),result.isError());
assertTrue("Expecting failure",result.isFailure());
}
private AtomicInteger failed;
@Test
public void testThreadSafety() throws Exception {
Thread[] threads = new Thread[100];
CountDownLatch latch = new CountDownLatch(threads.length);
for (int i = 0; i < threads.length; i++) {
threads[i] = new TestThread(latch);
}
failed = new AtomicInteger(0);
for (Thread thread : threads) {
thread.start();
}
latch.await();
assertEquals(failed.get(), 0);
}
class TestThread extends Thread {
static final String TEST_STRING = "DAbale arroz a la zorra el abad.";
// Used to be 'dábale', but caused trouble on Gump. Reasons
// unknown.
static final String TEST_PATTERN = ".*A.*\\.";
private CountDownLatch latch;
public TestThread(CountDownLatch latch) {
this.latch = latch;
}
@Override
public void run() {
try {
ResponseAssertion assertion = new ResponseAssertion();
assertion.setTestFieldResponseData();
assertion.setToContainsType();
assertion.addTestString(TEST_PATTERN);
SampleResult response = new SampleResult();
response.setResponseData(TEST_STRING, null);
for (int i = 0; i < 100; i++) {
AssertionResult result;
result = assertion.getResult(response);
if (result.isFailure() || result.isError()) {
failed.incrementAndGet();
}
}
} finally {
latch.countDown();
}
}
}
}
| apache-2.0 |
mythguided/hydra | hydra-main/src/main/java/com/addthis/hydra/util/LogUtil.java | 2383 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.addthis.hydra.util;
import com.addthis.basis.util.RollingLog;
import com.addthis.basis.util.LessStrings;
import com.addthis.codec.config.Configs;
import com.addthis.hydra.task.output.TaskDataOutput;
import com.google.common.base.Throwables;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LogUtil {
private static final Logger log = LoggerFactory.getLogger(LogUtil.class);
private static final DateTimeFormatter format = DateTimeFormat.forPattern("yyMMdd-HHmmss.SSS");
/**
* @param eventLog - rolling event log
* @param log - backup basic logger, only used if eventLog is null
* @param output - the KV data to output
*/
public static void log(RollingLog eventLog, Logger log, StringMapHelper output) {
if (eventLog == null) {
log.warn(output.toString() + "----> EventLog was null redirecting to stdout");
} else {
String msg = LessStrings.cat("<", format.print(System.currentTimeMillis()), ">");
output.add("timestamp", msg);
eventLog.writeLine(output.createKVPairs().toString());
}
}
public static TaskDataOutput newBundleOutputFromConfig(String name) {
Config outputConfig = ConfigFactory.load().getConfig("hydra.log.events").getConfig(name);
try {
TaskDataOutput output = Configs.decodeObject(TaskDataOutput.class, outputConfig);
output.init();
return output;
} catch (Throwable ex) {
log.error("error while trying to create bundle output named {}", name, ex);
throw Throwables.propagate(ex);
}
}
}
| apache-2.0 |
fengshao0907/joda-beans | src/test/java/org/joda/beans/TestImmutableMinimal.java | 1368 | /*
* Copyright 2001-2014 Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joda.beans;
import static org.testng.Assert.assertEquals;
import org.joda.beans.gen.ImmMinimal;
import org.joda.beans.gen.ImmPerson;
import org.testng.annotations.Test;
/**
* Test buildScope=private, style=minimal.
*/
@Test
public class TestImmutableMinimal {
public void test_builder() {
ImmMinimal bean = ImmMinimal.meta().builder()
.setString("number", "12")
.setString("street", "Park Lane")
.setString("city", "Smallville")
.set("owner", ImmPerson.builder().forename("John").surname("Doggett").build())
.build();
assertEquals(bean.getCity(), "Smallville");
assertEquals(bean.getStreet(), "Park Lane");
}
}
| apache-2.0 |
kiuby88/brooklyn-tosca | brooklyn-tosca-transformer/src/main/java/io/cloudsoft/tosca/a4c/brooklyn/ApplicationSpecsBuilder.java | 720 | package io.cloudsoft.tosca.a4c.brooklyn;
import java.util.Map;
import org.apache.brooklyn.api.entity.Application;
import org.apache.brooklyn.api.entity.EntitySpec;
/**
* An ApplicationSpecsBuilder is used to create Brooklyn EntitySpecs from A Tosca Application. Once the specs have been
* created, it is used to convert Tosca Policies into Brooklyn Policies
* @param <A> The type of ToscaApplication
*/
public interface ApplicationSpecsBuilder<A extends ToscaApplication> {
String TOSCA_TEMPLATE_ID = "tosca.template.id";
Map<String, EntitySpec<?>> getSpecs(A toscaApplication);
void addPolicies(EntitySpec<? extends Application> rootSpec, A toscaApplication, Map<String, EntitySpec<?>> specs);
}
| apache-2.0 |
flofreud/aws-sdk-java | aws-java-sdk-redshift/src/main/java/com/amazonaws/services/redshift/model/transform/DescribeSnapshotCopyGrantsResultStaxUnmarshaller.java | 3142 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.redshift.model.transform;
import java.util.Map;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Map.Entry;
import javax.xml.stream.events.XMLEvent;
import com.amazonaws.services.redshift.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.MapEntry;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* DescribeSnapshotCopyGrantsResult StAX Unmarshaller
*/
public class DescribeSnapshotCopyGrantsResultStaxUnmarshaller implements
Unmarshaller<DescribeSnapshotCopyGrantsResult, StaxUnmarshallerContext> {
public DescribeSnapshotCopyGrantsResult unmarshall(
StaxUnmarshallerContext context) throws Exception {
DescribeSnapshotCopyGrantsResult describeSnapshotCopyGrantsResult = new DescribeSnapshotCopyGrantsResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 2;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return describeSnapshotCopyGrantsResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("Marker", targetDepth)) {
describeSnapshotCopyGrantsResult
.setMarker(StringStaxUnmarshaller.getInstance()
.unmarshall(context));
continue;
}
if (context.testExpression(
"SnapshotCopyGrants/SnapshotCopyGrant", targetDepth)) {
describeSnapshotCopyGrantsResult
.withSnapshotCopyGrants(SnapshotCopyGrantStaxUnmarshaller
.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return describeSnapshotCopyGrantsResult;
}
}
}
}
private static DescribeSnapshotCopyGrantsResultStaxUnmarshaller instance;
public static DescribeSnapshotCopyGrantsResultStaxUnmarshaller getInstance() {
if (instance == null)
instance = new DescribeSnapshotCopyGrantsResultStaxUnmarshaller();
return instance;
}
}
| apache-2.0 |
swift-lang/swift-k | cogkit/modules/karajan/src/org/globus/cog/karajan/compiled/nodes/EventWait.java | 4870 | /*
* Swift Parallel Scripting Language (http://swift-lang.org)
* Code from Java CoG Kit Project (see notice below) with modifications.
*
* Copyright 2005-2014 University of Chicago
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// ----------------------------------------------------------------------
//This code is developed as part of the Java CoG Kit project
//The terms of the license can be found at http://www.cogkit.org/license
//This message may not be removed or altered.
//----------------------------------------------------------------------
/*
* Created on Oct 14, 2004
*/
package org.globus.cog.karajan.compiled.nodes;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.List;
import k.rt.ConditionalYield;
import k.rt.ExecutionException;
import k.rt.FutureCounter;
import k.rt.Stack;
import k.thr.LWThread;
import k.thr.Yield;
import org.globus.cog.karajan.analyzer.ChannelRef;
import org.globus.cog.karajan.analyzer.Signature;
public class EventWait extends InternalFunction {
private ChannelRef<Object> c_vargs;
@Override
protected Signature getSignature() {
return new Signature(params("..."));
}
@Override
public synchronized void runBody(LWThread thr) {
int i = thr.checkSliceAndPopState(2);
Stack stack = thr.getStack();
try {
switch (i) {
case 0:
super.runBody(thr);
i++;
case 1:
waitForEvent(thr, stack);
break;
default:
}
}
catch (Yield y) {
y.getState().push(i, 2);
throw y;
}
}
private void waitForEvent(LWThread thr, Stack stack) {
k.rt.Channel<Object> vargs = c_vargs.get(stack);
FutureCounter counter = new FutureCounter(vargs.size());
for (Object arg : vargs) {
if (!(arg instanceof List)) {
throw new ExecutionException("Each argument must be a list");
}
List<?> list = (List<?>) arg;
if (list.size() != 3) {
throw new ExecutionException("Each argument must be a list containing 3 items");
}
try {
String ret = (String) list.get(0);
String type = (String) list.get(1);
Object source = list.get(2);
if ("java.awt.events.ActionEvent".equals(type)) {
addListener(source, "addActionListener", ActionListener.class, counter);
}
else if ("java.awt.events.WindowEvent".equals(type)) {
addListener(source, "addWindowListener", WindowListener.class, counter);
}
else {
throw new ExecutionException("Unknown event type: " + type);
}
}
catch (Exception e) {
throw new ExecutionException("Exception caught while adding listener", e);
}
}
throw new ConditionalYield(2, 3, counter);
}
protected void addListener(Object source, String methodName, Class<?> argType, FutureCounter counter) {
try {
Method method = source.getClass().getMethod(methodName, new Class[] { argType });
method.invoke(source, new Object[] { this });
}
catch (SecurityException e) {
throw new ExecutionException("No access to " + methodName + " method", e);
}
catch (NoSuchMethodException e) {
throw new ExecutionException("Object does not have a " + methodName + "("
+ argType.toString() + ") method", e);
}
catch (ExecutionException e) {
throw new ExecutionException("Unsupported event type: " + argType.getName(), e);
}
catch (IllegalAccessException e) {
throw new ExecutionException(
"Cannot invoke " + methodName + " on " + source.toString(), e);
}
catch (InvocationTargetException e) {
throw new ExecutionException(methodName + " threw an exception", e);
}
}
public static class Listener implements ActionListener, WindowListener {
private FutureCounter counter;
public Listener(FutureCounter counter) {
this.counter = counter;
}
public synchronized void actionPerformed(ActionEvent e) {
counter.dec();
}
public void windowActivated(WindowEvent e) {
}
public void windowClosed(WindowEvent e) {
}
public void windowClosing(WindowEvent e) {
counter.dec();
}
public void windowDeactivated(WindowEvent e) {
}
public void windowDeiconified(WindowEvent e) {
}
public void windowIconified(WindowEvent e) {
}
public void windowOpened(WindowEvent e) {
}
}
} | apache-2.0 |
schildbach/bitcoinj | core/src/main/java/org/bitcoinj/core/VersionMessage.java | 14614 | /*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.core;
import com.google.common.net.InetAddresses;
import javax.annotation.Nullable;
import java.io.IOException;
import java.io.OutputStream;
import java.math.BigInteger;
import java.net.InetAddress;
import java.nio.charset.StandardCharsets;
import java.util.Locale;
import java.util.Objects;
/**
* <p>A VersionMessage holds information exchanged during connection setup with another peer. Most of the fields are not
* particularly interesting. The subVer field, since BIP 14, acts as a User-Agent string would. You can and should
* append to or change the subVer for your own software so other implementations can identify it, and you can look at
* the subVer field received from other nodes to see what they are running.</p>
*
* <p>After creating yourself a VersionMessage, you can pass it to {@link PeerGroup#setVersionMessage(VersionMessage)}
* to ensure it will be used for each new connection.</p>
*
* <p>Instances of this class are not safe for use by multiple threads.</p>
*/
public class VersionMessage extends Message {
/** The version of this library release, as a string. */
public static final String BITCOINJ_VERSION = "0.16-SNAPSHOT";
/** The value that is prepended to the subVer field of this application. */
public static final String LIBRARY_SUBVER = "/bitcoinj:" + BITCOINJ_VERSION + "/";
/** A service bit that denotes whether the peer has a full copy of the block chain or not. */
public static final int NODE_NETWORK = 1 << 0;
/** A service bit that denotes whether the peer supports the getutxos message or not. */
public static final int NODE_GETUTXOS = 1 << 1;
/** A service bit that denotes whether the peer supports BIP37 bloom filters or not. The service bit is defined in BIP111. */
public static final int NODE_BLOOM = 1 << 2;
/** Indicates that a node can be asked for blocks and transactions including witness data. */
public static final int NODE_WITNESS = 1 << 3;
/** A service bit that denotes whether the peer has at least the last two days worth of blockchain (BIP159). */
public static final int NODE_NETWORK_LIMITED = 1 << 10;
/** A service bit used by Bitcoin-ABC to announce Bitcoin Cash nodes. */
public static final int NODE_BITCOIN_CASH = 1 << 5;
/**
* The version number of the protocol spoken.
*/
public int clientVersion;
/**
* Flags defining what optional services are supported.
*/
public long localServices;
/**
* What the other side believes the current time to be, in seconds.
*/
public long time;
/**
* The network address of the node receiving this message.
*/
public PeerAddress receivingAddr;
/**
* The network address of the node emitting this message. Not used.
*/
public PeerAddress fromAddr;
/**
* User-Agent as defined in <a href="https://github.com/bitcoin/bips/blob/master/bip-0014.mediawiki">BIP 14</a>.
* Bitcoin Core sets it to something like "/Satoshi:0.9.1/".
*/
public String subVer;
/**
* How many blocks are in the chain, according to the other side.
*/
public long bestHeight;
/**
* Whether or not to relay tx invs before a filter is received.
* See <a href="https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki#extensions-to-existing-messages">BIP 37</a>.
*/
public boolean relayTxesBeforeFilter;
public VersionMessage(NetworkParameters params, byte[] payload) throws ProtocolException {
super(params, payload, 0);
}
// It doesn't really make sense to ever lazily parse a version message or to retain the backing bytes.
// If you're receiving this on the wire you need to check the protocol version and it will never need to be sent
// back down the wire.
public VersionMessage(NetworkParameters params, int newBestHeight) {
super(params);
clientVersion = params.getProtocolVersionNum(NetworkParameters.ProtocolVersion.CURRENT);
localServices = 0;
time = System.currentTimeMillis() / 1000;
// Note that the Bitcoin Core doesn't do anything with these, and finding out your own external IP address
// is kind of tricky anyway, so we just put nonsense here for now.
InetAddress localhost = InetAddresses.forString("127.0.0.1");
receivingAddr = new PeerAddress(params, localhost, params.getPort(), clientVersion, BigInteger.ZERO);
receivingAddr.setParent(this);
fromAddr = new PeerAddress(params, localhost, params.getPort(), clientVersion, BigInteger.ZERO);
fromAddr.setParent(this);
subVer = LIBRARY_SUBVER;
bestHeight = newBestHeight;
relayTxesBeforeFilter = true;
length = 4 + 8 + 8 + receivingAddr.getMessageSize() + fromAddr.getMessageSize() + 8
+ VarInt.sizeOf(subVer.length()) + subVer.length() + 4 + 1;
}
@Override
protected void parse() throws ProtocolException {
clientVersion = (int) readUint32();
localServices = readUint64().longValue();
time = readUint64().longValue();
receivingAddr = new PeerAddress(params, payload, cursor, 0, this, serializer);
cursor += receivingAddr.getMessageSize();
if (clientVersion >= 106) {
fromAddr = new PeerAddress(params, payload, cursor, 0, this, serializer);
cursor += fromAddr.getMessageSize();
// uint64 localHostNonce (random data)
// We don't care about the localhost nonce. It's used to detect connecting back to yourself in cases where
// there are NATs and proxies in the way. However we don't listen for inbound connections so it's
// irrelevant.
readUint64();
// string subVer (currently "")
subVer = readStr();
// int bestHeight (size of known block chain).
bestHeight = readUint32();
if (clientVersion >= params.getProtocolVersionNum(NetworkParameters.ProtocolVersion.BLOOM_FILTER)) {
relayTxesBeforeFilter = readBytes(1)[0] != 0;
} else {
relayTxesBeforeFilter = true;
}
} else {
// Default values for flags which may not be sent by old nodes
fromAddr = null;
subVer = "";
bestHeight = 0;
relayTxesBeforeFilter = true;
}
length = cursor - offset;
}
@Override
public void bitcoinSerializeToStream(OutputStream buf) throws IOException {
Utils.uint32ToByteStreamLE(clientVersion, buf);
Utils.uint32ToByteStreamLE(localServices, buf);
Utils.uint32ToByteStreamLE(localServices >> 32, buf);
Utils.uint32ToByteStreamLE(time, buf);
Utils.uint32ToByteStreamLE(time >> 32, buf);
receivingAddr.bitcoinSerializeToStream(buf);
if (clientVersion >= 106) {
fromAddr.bitcoinSerializeToStream(buf);
// Next up is the "local host nonce", this is to detect the case of connecting
// back to yourself. We don't care about this as we won't be accepting inbound
// connections.
Utils.uint32ToByteStreamLE(0, buf);
Utils.uint32ToByteStreamLE(0, buf);
// Now comes subVer.
byte[] subVerBytes = subVer.getBytes(StandardCharsets.UTF_8);
buf.write(new VarInt(subVerBytes.length).encode());
buf.write(subVerBytes);
// Size of known block chain.
Utils.uint32ToByteStreamLE(bestHeight, buf);
if (clientVersion >= params.getProtocolVersionNum(NetworkParameters.ProtocolVersion.BLOOM_FILTER)) {
buf.write(relayTxesBeforeFilter ? 1 : 0);
}
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
VersionMessage other = (VersionMessage) o;
return other.bestHeight == bestHeight &&
other.clientVersion == clientVersion &&
other.localServices == localServices &&
other.time == time &&
other.subVer.equals(subVer) &&
other.receivingAddr.equals(receivingAddr) &&
other.fromAddr.equals(fromAddr) &&
other.relayTxesBeforeFilter == relayTxesBeforeFilter;
}
@Override
public int hashCode() {
return Objects.hash(bestHeight, clientVersion, localServices,
time, subVer, receivingAddr, fromAddr, relayTxesBeforeFilter);
}
@Override
public String toString() {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("\n");
stringBuilder.append("client version: ").append(clientVersion).append("\n");
stringBuilder.append("local services: ").append(localServices).append("\n");
stringBuilder.append("time: ").append(time).append("\n");
stringBuilder.append("receiving addr: ").append(receivingAddr).append("\n");
stringBuilder.append("from addr: ").append(fromAddr).append("\n");
stringBuilder.append("sub version: ").append(subVer).append("\n");
stringBuilder.append("best height: ").append(bestHeight).append("\n");
stringBuilder.append("delay tx relay: ").append(!relayTxesBeforeFilter).append("\n");
return stringBuilder.toString();
}
public VersionMessage duplicate() {
VersionMessage v = new VersionMessage(params, (int) bestHeight);
v.clientVersion = clientVersion;
v.localServices = localServices;
v.time = time;
v.receivingAddr = receivingAddr;
v.fromAddr = fromAddr;
v.subVer = subVer;
v.relayTxesBeforeFilter = relayTxesBeforeFilter;
return v;
}
/**
* <p>Appends the given user-agent information to the subVer field. The subVer is composed of a series of
* name:version pairs separated by slashes in the form of a path. For example a typical subVer field for bitcoinj
* users might look like "/bitcoinj:0.13/MultiBit:1.2/" where libraries come further to the left.</p>
*
* <p>There can be as many components as you feel a need for, and the version string can be anything, but it is
* recommended to use A.B.C where A = major, B = minor and C = revision for software releases, and dates for
* auto-generated source repository snapshots. A valid subVer begins and ends with a slash, therefore name
* and version are not allowed to contain such characters.</p>
*
* <p>Anything put in the "comments" field will appear in brackets and may be used for platform info, or anything
* else. For example, calling {@code appendToSubVer("MultiBit", "1.0", "Windows")} will result in a subVer being
* set of "/bitcoinj:1.0/MultiBit:1.0(Windows)/". Therefore the / ( and ) characters are reserved in all these
* components. If you don't want to add a comment (recommended), pass null.</p>
*
* <p>See <a href="https://github.com/bitcoin/bips/blob/master/bip-0014.mediawiki">BIP 14</a> for more information.</p>
*
* @param comments Optional (can be null) platform or other node specific information.
* @throws IllegalArgumentException if name, version or comments contains invalid characters.
*/
public void appendToSubVer(String name, String version, @Nullable String comments) {
checkSubVerComponent(name);
checkSubVerComponent(version);
if (comments != null) {
checkSubVerComponent(comments);
subVer = subVer.concat(String.format(Locale.US, "%s:%s(%s)/", name, version, comments));
} else {
subVer = subVer.concat(String.format(Locale.US, "%s:%s/", name, version));
}
}
private static void checkSubVerComponent(String component) {
if (component.contains("/") || component.contains("(") || component.contains(")"))
throw new IllegalArgumentException("name contains invalid characters");
}
/**
* Returns true if the clientVersion field is {@link NetworkParameters.ProtocolVersion#PONG} or higher.
* If it is then {@link Peer#ping()} is usable.
*/
public boolean isPingPongSupported() {
return clientVersion >= params.getProtocolVersionNum(NetworkParameters.ProtocolVersion.PONG);
}
/**
* Returns true if the peer supports bloom filtering according to BIP37 and BIP111.
*/
public boolean isBloomFilteringSupported() {
if (clientVersion >= params.getProtocolVersionNum(NetworkParameters.ProtocolVersion.BLOOM_FILTER)
&& clientVersion < params.getProtocolVersionNum(NetworkParameters.ProtocolVersion.BLOOM_FILTER_BIP111))
return true;
if ((localServices & NODE_BLOOM) == NODE_BLOOM)
return true;
return false;
}
/** Returns true if the protocol version and service bits both indicate support for the getutxos message. */
public boolean isGetUTXOsSupported() {
return clientVersion >= GetUTXOsMessage.MIN_PROTOCOL_VERSION &&
(localServices & NODE_GETUTXOS) == NODE_GETUTXOS;
}
/** Returns true if a peer can be asked for blocks and transactions including witness data. */
public boolean isWitnessSupported() {
return (localServices & NODE_WITNESS) == NODE_WITNESS;
}
/**
* Returns true if the version message indicates the sender has a full copy of the block chain, or false if it's
* running in client mode (only has the headers).
*/
public boolean hasBlockChain() {
return (localServices & NODE_NETWORK) == NODE_NETWORK;
}
/** Returns true if the peer has at least the last two days worth of blockchain (BIP159). */
public boolean hasLimitedBlockChain() {
return hasBlockChain() || (localServices & NODE_NETWORK_LIMITED) == NODE_NETWORK_LIMITED;
}
}
| apache-2.0 |
trask/glowroot | agent/plugins/http-client-plugin/src/main/java/org/glowroot/agent/plugin/httpclient/ApacheHttpClient3xAspect.java | 3747 | /*
* Copyright 2016-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.glowroot.agent.plugin.httpclient;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.URI;
import org.apache.commons.httpclient.URIException;
import org.glowroot.agent.plugin.api.Agent;
import org.glowroot.agent.plugin.api.MessageSupplier;
import org.glowroot.agent.plugin.api.ThreadContext;
import org.glowroot.agent.plugin.api.TimerName;
import org.glowroot.agent.plugin.api.TraceEntry;
import org.glowroot.agent.plugin.api.checker.Nullable;
import org.glowroot.agent.plugin.api.weaving.BindParameter;
import org.glowroot.agent.plugin.api.weaving.BindThrowable;
import org.glowroot.agent.plugin.api.weaving.BindTraveler;
import org.glowroot.agent.plugin.api.weaving.OnBefore;
import org.glowroot.agent.plugin.api.weaving.OnReturn;
import org.glowroot.agent.plugin.api.weaving.OnThrow;
import org.glowroot.agent.plugin.api.weaving.Pointcut;
import org.glowroot.agent.plugin.httpclient._.Uris;
public class ApacheHttpClient3xAspect {
@Pointcut(className = "org.apache.commons.httpclient.HttpClient", methodName = "executeMethod",
methodParameterTypes = {"org.apache.commons.httpclient.HostConfiguration",
"org.apache.commons.httpclient.HttpMethod",
"org.apache.commons.httpclient.HttpState"},
nestingGroup = "http-client", timerName = "http client request")
public static class ExecuteMethodAdvice {
private static final TimerName timerName = Agent.getTimerName(ExecuteMethodAdvice.class);
@OnBefore
public static @Nullable TraceEntry onBefore(ThreadContext context,
@SuppressWarnings("unused") @BindParameter @Nullable HostConfiguration hostConfiguration,
@BindParameter @Nullable HttpMethod methodObj) {
if (methodObj == null) {
return null;
}
String method = methodObj.getName();
if (method == null) {
method = "";
} else {
method += " ";
}
String uri;
try {
URI uriObj = methodObj.getURI();
if (uriObj == null) {
uri = "";
} else {
uri = uriObj.getURI();
if (uri == null) {
uri = "";
}
}
} catch (URIException e) {
uri = "";
}
return context.startServiceCallEntry("HTTP", method + Uris.stripQueryString(uri),
MessageSupplier.create("http client request: {}{}", method, uri),
timerName);
}
@OnReturn
public static void onReturn(@BindTraveler @Nullable TraceEntry traceEntry) {
if (traceEntry != null) {
traceEntry.end();
}
}
@OnThrow
public static void onThrow(@BindThrowable Throwable t,
@BindTraveler TraceEntry traceEntry) {
traceEntry.endWithError(t);
}
}
}
| apache-2.0 |
ErikKringen/kafka | streams/src/test/java/org/apache/kafka/streams/integration/EosIntegrationTest.java | 35756 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.integration;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.requests.IsolationLevel;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.LongSerializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.InvalidStateStoreException;
import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster;
import org.apache.kafka.streams.integration.utils.IntegrationTestUtils;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Transformer;
import org.apache.kafka.streams.kstream.TransformerSupplier;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.StateStoreSupplier;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.QueryableStoreTypes;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.test.IntegrationTest;
import org.apache.kafka.test.StreamsTestUtils;
import org.apache.kafka.test.TestCondition;
import org.apache.kafka.test.TestUtils;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@Category({IntegrationTest.class})
public class EosIntegrationTest {
private static final int NUM_BROKERS = 3;
private static final int MAX_POLL_INTERVAL_MS = 5 * 1000;
private static final int MAX_WAIT_TIME_MS = 60 * 1000;
@ClassRule
public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS, new Properties() {
{
put("auto.create.topics.enable", false);
}
});
private static String applicationId;
private final static String CONSUMER_GROUP_ID = "readCommitted";
private final static String SINGLE_PARTITION_INPUT_TOPIC = "singlePartitionInputTopic";
private final static String SINGLE_PARTITION_THROUGH_TOPIC = "singlePartitionThroughTopic";
private final static String SINGLE_PARTITION_OUTPUT_TOPIC = "singlePartitionOutputTopic";
private final static int NUM_TOPIC_PARTITIONS = 2;
private final static String MULTI_PARTITION_INPUT_TOPIC = "multiPartitionInputTopic";
private final static String MULTI_PARTITION_THROUGH_TOPIC = "multiPartitionThroughTopic";
private final static String MULTI_PARTITION_OUTPUT_TOPIC = "multiPartitionOutputTopic";
private final String storeName = "store";
private AtomicBoolean errorInjected;
private AtomicBoolean injectGC;
private volatile boolean doGC = true;
private AtomicInteger commitRequested;
private Throwable uncaughtException;
private int testNumber = 0;
@Before
public void createTopics() throws Exception {
applicationId = "appId-" + ++testNumber;
CLUSTER.deleteTopicsAndWait(
SINGLE_PARTITION_INPUT_TOPIC, MULTI_PARTITION_INPUT_TOPIC,
SINGLE_PARTITION_THROUGH_TOPIC, MULTI_PARTITION_THROUGH_TOPIC,
SINGLE_PARTITION_OUTPUT_TOPIC, MULTI_PARTITION_OUTPUT_TOPIC);
CLUSTER.createTopics(SINGLE_PARTITION_INPUT_TOPIC, SINGLE_PARTITION_THROUGH_TOPIC, SINGLE_PARTITION_OUTPUT_TOPIC);
CLUSTER.createTopic(MULTI_PARTITION_INPUT_TOPIC, NUM_TOPIC_PARTITIONS, 1);
CLUSTER.createTopic(MULTI_PARTITION_THROUGH_TOPIC, NUM_TOPIC_PARTITIONS, 1);
CLUSTER.createTopic(MULTI_PARTITION_OUTPUT_TOPIC, NUM_TOPIC_PARTITIONS, 1);
}
@Test
public void shouldBeAbleToRunWithEosEnabled() throws Exception {
runSimpleCopyTest(1, SINGLE_PARTITION_INPUT_TOPIC, null, SINGLE_PARTITION_OUTPUT_TOPIC);
}
@Test
public void shouldBeAbleToRestartAfterClose() throws Exception {
runSimpleCopyTest(2, SINGLE_PARTITION_INPUT_TOPIC, null, SINGLE_PARTITION_OUTPUT_TOPIC);
}
@Test
public void shouldBeAbleToCommitToMultiplePartitions() throws Exception {
runSimpleCopyTest(1, SINGLE_PARTITION_INPUT_TOPIC, null, MULTI_PARTITION_OUTPUT_TOPIC);
}
@Test
public void shouldBeAbleToCommitMultiplePartitionOffsets() throws Exception {
runSimpleCopyTest(1, MULTI_PARTITION_INPUT_TOPIC, null, SINGLE_PARTITION_OUTPUT_TOPIC);
}
@Test
public void shouldBeAbleToRunWithTwoSubtopologies() throws Exception {
runSimpleCopyTest(1, SINGLE_PARTITION_INPUT_TOPIC, SINGLE_PARTITION_THROUGH_TOPIC, SINGLE_PARTITION_OUTPUT_TOPIC);
}
@Test
public void shouldBeAbleToRunWithTwoSubtopologiesAndMultiplePartitions() throws Exception {
runSimpleCopyTest(1, MULTI_PARTITION_INPUT_TOPIC, MULTI_PARTITION_THROUGH_TOPIC, MULTI_PARTITION_OUTPUT_TOPIC);
}
private void runSimpleCopyTest(final int numberOfRestarts,
final String inputTopic,
final String throughTopic,
final String outputTopic) throws Exception {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Long, Long> input = builder.stream(inputTopic);
KStream<Long, Long> output = input;
if (throughTopic != null) {
output = input.through(throughTopic);
}
output.to(outputTopic);
for (int i = 0; i < numberOfRestarts; ++i) {
final long factor = i;
final KafkaStreams streams = new KafkaStreams(
builder.build(),
StreamsTestUtils.getStreamsConfig(
applicationId,
CLUSTER.bootstrapServers(),
Serdes.LongSerde.class.getName(),
Serdes.LongSerde.class.getName(),
new Properties() {
{
put(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), 1);
put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE);
}
}));
try {
streams.start();
final List<KeyValue<Long, Long>> inputData = prepareData(factor * 100, factor * 100 + 10L, 0L, 1L);
IntegrationTestUtils.produceKeyValuesSynchronously(
inputTopic,
inputData,
TestUtils.producerConfig(CLUSTER.bootstrapServers(), LongSerializer.class, LongSerializer.class),
CLUSTER.time
);
final List<KeyValue<Long, Long>> committedRecords
= IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(
TestUtils.consumerConfig(
CLUSTER.bootstrapServers(),
CONSUMER_GROUP_ID,
LongDeserializer.class,
LongDeserializer.class,
new Properties() {
{
put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.name().toLowerCase(Locale.ROOT));
}
}),
outputTopic,
inputData.size()
);
checkResultPerKey(committedRecords, inputData);
} finally {
streams.close();
}
}
}
private void checkResultPerKey(final List<KeyValue<Long, Long>> result,
final List<KeyValue<Long, Long>> expectedResult) {
final Set<Long> allKeys = new HashSet<>();
addAllKeys(allKeys, result);
addAllKeys(allKeys, expectedResult);
for (final Long key : allKeys) {
assertThat(getAllRecordPerKey(key, result), equalTo(getAllRecordPerKey(key, expectedResult)));
}
}
private void addAllKeys(final Set<Long> allKeys, final List<KeyValue<Long, Long>> records) {
for (final KeyValue<Long, Long> record : records) {
allKeys.add(record.key);
}
}
private List<KeyValue<Long, Long>> getAllRecordPerKey(final Long key, final List<KeyValue<Long, Long>> records) {
final List<KeyValue<Long, Long>> recordsPerKey = new ArrayList<>(records.size());
for (final KeyValue<Long, Long> record : records) {
if (record.key.equals(key)) {
recordsPerKey.add(record);
}
}
return recordsPerKey;
}
@Test
public void shouldBeAbleToPerformMultipleTransactions() throws Exception {
final StreamsBuilder builder = new StreamsBuilder();
builder.stream(SINGLE_PARTITION_INPUT_TOPIC).to(SINGLE_PARTITION_OUTPUT_TOPIC);
final KafkaStreams streams = new KafkaStreams(
builder.build(),
StreamsTestUtils.getStreamsConfig(
applicationId,
CLUSTER.bootstrapServers(),
Serdes.LongSerde.class.getName(),
Serdes.LongSerde.class.getName(),
new Properties() {
{
put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE);
}
}));
try {
streams.start();
final List<KeyValue<Long, Long>> firstBurstOfData = prepareData(0L, 5L, 0L);
final List<KeyValue<Long, Long>> secondBurstOfData = prepareData(5L, 8L, 0L);
IntegrationTestUtils.produceKeyValuesSynchronously(
SINGLE_PARTITION_INPUT_TOPIC,
firstBurstOfData,
TestUtils.producerConfig(CLUSTER.bootstrapServers(), LongSerializer.class, LongSerializer.class),
CLUSTER.time
);
final List<KeyValue<Long, Long>> firstCommittedRecords
= IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(
TestUtils.consumerConfig(
CLUSTER.bootstrapServers(),
CONSUMER_GROUP_ID,
LongDeserializer.class,
LongDeserializer.class,
new Properties() {
{
put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.name().toLowerCase(Locale.ROOT));
}
}),
SINGLE_PARTITION_OUTPUT_TOPIC,
firstBurstOfData.size()
);
assertThat(firstCommittedRecords, equalTo(firstBurstOfData));
IntegrationTestUtils.produceKeyValuesSynchronously(
SINGLE_PARTITION_INPUT_TOPIC,
secondBurstOfData,
TestUtils.producerConfig(CLUSTER.bootstrapServers(), LongSerializer.class, LongSerializer.class),
CLUSTER.time
);
final List<KeyValue<Long, Long>> secondCommittedRecords
= IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(
TestUtils.consumerConfig(
CLUSTER.bootstrapServers(),
CONSUMER_GROUP_ID,
LongDeserializer.class,
LongDeserializer.class,
new Properties() {
{
put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.name().toLowerCase(Locale.ROOT));
}
}),
SINGLE_PARTITION_OUTPUT_TOPIC,
secondBurstOfData.size()
);
assertThat(secondCommittedRecords, equalTo(secondBurstOfData));
} finally {
streams.close();
}
}
@Test
public void shouldNotViolateEosIfOneTaskFails() throws Exception {
// this test writes 10 + 5 + 5 records per partition (running with 2 partitions)
// the app is supposed to copy all 40 records into the output topic
// the app commits after each 10 records per partition, and thus will have 2*5 uncommitted writes
//
// the failure gets inject after 20 committed and 30 uncommitted records got received
// -> the failure only kills one thread
// after fail over, we should read 40 committed records (even if 50 record got written)
final KafkaStreams streams = getKafkaStreams(false, "appDir", 2);
try {
streams.start();
final List<KeyValue<Long, Long>> committedDataBeforeFailure = prepareData(0L, 10L, 0L, 1L);
final List<KeyValue<Long, Long>> uncommittedDataBeforeFailure = prepareData(10L, 15L, 0L, 1L);
final List<KeyValue<Long, Long>> dataBeforeFailure = new ArrayList<>();
dataBeforeFailure.addAll(committedDataBeforeFailure);
dataBeforeFailure.addAll(uncommittedDataBeforeFailure);
final List<KeyValue<Long, Long>> dataAfterFailure = prepareData(15L, 20L, 0L, 1L);
writeInputData(committedDataBeforeFailure);
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return commitRequested.get() == 2;
}
}, MAX_WAIT_TIME_MS, "SteamsTasks did not request commit.");
writeInputData(uncommittedDataBeforeFailure);
final List<KeyValue<Long, Long>> uncommittedRecords = readResult(dataBeforeFailure.size(), null);
final List<KeyValue<Long, Long>> committedRecords = readResult(committedDataBeforeFailure.size(), CONSUMER_GROUP_ID);
checkResultPerKey(committedRecords, committedDataBeforeFailure);
checkResultPerKey(uncommittedRecords, dataBeforeFailure);
errorInjected.set(true);
writeInputData(dataAfterFailure);
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return uncaughtException != null;
}
}, MAX_WAIT_TIME_MS, "Should receive uncaught exception from one StreamThread.");
final List<KeyValue<Long, Long>> allCommittedRecords = readResult(
committedDataBeforeFailure.size() + uncommittedDataBeforeFailure.size() + dataAfterFailure.size(),
CONSUMER_GROUP_ID + "_ALL");
final List<KeyValue<Long, Long>> committedRecordsAfterFailure = readResult(
uncommittedDataBeforeFailure.size() + dataAfterFailure.size(),
CONSUMER_GROUP_ID);
final List<KeyValue<Long, Long>> allExpectedCommittedRecordsAfterRecovery = new ArrayList<>();
allExpectedCommittedRecordsAfterRecovery.addAll(committedDataBeforeFailure);
allExpectedCommittedRecordsAfterRecovery.addAll(uncommittedDataBeforeFailure);
allExpectedCommittedRecordsAfterRecovery.addAll(dataAfterFailure);
final List<KeyValue<Long, Long>> expectedCommittedRecordsAfterRecovery = new ArrayList<>();
expectedCommittedRecordsAfterRecovery.addAll(uncommittedDataBeforeFailure);
expectedCommittedRecordsAfterRecovery.addAll(dataAfterFailure);
checkResultPerKey(allCommittedRecords, allExpectedCommittedRecordsAfterRecovery);
checkResultPerKey(committedRecordsAfterFailure, expectedCommittedRecordsAfterRecovery);
} finally {
streams.close();
}
}
@Test
public void shouldNotViolateEosIfOneTaskFailsWithState() throws Exception {
// this test updates a store with 10 + 5 + 5 records per partition (running with 2 partitions)
// the app is supposed to emit all 40 update records into the output topic
// the app commits after each 10 records per partition, and thus will have 2*5 uncommitted writes
// and store updates (ie, another 5 uncommitted writes to a changelog topic per partition)
//
// the failure gets inject after 20 committed and 30 uncommitted records got received
// -> the failure only kills one thread
// after fail over, we should read 40 committed records and the state stores should contain the correct sums
// per key (even if some records got processed twice)
final KafkaStreams streams = getKafkaStreams(true, "appDir", 2);
try {
streams.start();
final List<KeyValue<Long, Long>> committedDataBeforeFailure = prepareData(0L, 10L, 0L, 1L);
final List<KeyValue<Long, Long>> uncommittedDataBeforeFailure = prepareData(10L, 15L, 0L, 1L);
final List<KeyValue<Long, Long>> dataBeforeFailure = new ArrayList<>();
dataBeforeFailure.addAll(committedDataBeforeFailure);
dataBeforeFailure.addAll(uncommittedDataBeforeFailure);
final List<KeyValue<Long, Long>> dataAfterFailure = prepareData(15L, 20L, 0L, 1L);
writeInputData(committedDataBeforeFailure);
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return commitRequested.get() == 2;
}
}, MAX_WAIT_TIME_MS, "SteamsTasks did not request commit.");
writeInputData(uncommittedDataBeforeFailure);
final List<KeyValue<Long, Long>> uncommittedRecords = readResult(dataBeforeFailure.size(), null);
final List<KeyValue<Long, Long>> committedRecords = readResult(committedDataBeforeFailure.size(), CONSUMER_GROUP_ID);
final List<KeyValue<Long, Long>> expectedResultBeforeFailure = computeExpectedResult(dataBeforeFailure);
checkResultPerKey(committedRecords, computeExpectedResult(committedDataBeforeFailure));
checkResultPerKey(uncommittedRecords, expectedResultBeforeFailure);
verifyStateStore(streams, getMaxPerKey(expectedResultBeforeFailure));
errorInjected.set(true);
writeInputData(dataAfterFailure);
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return uncaughtException != null;
}
}, MAX_WAIT_TIME_MS, "Should receive uncaught exception from one StreamThread.");
final List<KeyValue<Long, Long>> allCommittedRecords = readResult(
committedDataBeforeFailure.size() + uncommittedDataBeforeFailure.size() + dataAfterFailure.size(),
CONSUMER_GROUP_ID + "_ALL");
final List<KeyValue<Long, Long>> committedRecordsAfterFailure = readResult(
uncommittedDataBeforeFailure.size() + dataAfterFailure.size(),
CONSUMER_GROUP_ID);
final List<KeyValue<Long, Long>> allExpectedCommittedRecordsAfterRecovery = new ArrayList<>();
allExpectedCommittedRecordsAfterRecovery.addAll(committedDataBeforeFailure);
allExpectedCommittedRecordsAfterRecovery.addAll(uncommittedDataBeforeFailure);
allExpectedCommittedRecordsAfterRecovery.addAll(dataAfterFailure);
final List<KeyValue<Long, Long>> expectedResult = computeExpectedResult(allExpectedCommittedRecordsAfterRecovery);
checkResultPerKey(allCommittedRecords, expectedResult);
checkResultPerKey(committedRecordsAfterFailure, expectedResult.subList(committedDataBeforeFailure.size(), expectedResult.size()));
verifyStateStore(streams, getMaxPerKey(expectedResult));
} finally {
streams.close();
}
}
@Test
public void shouldNotViolateEosIfOneTaskGetsFencedUsingIsolatedAppInstances() throws Exception {
// this test writes 10 + 5 + 5 + 10 records per partition (running with 2 partitions)
// the app is supposed to copy all 60 records into the output topic
// the app commits after each 10 records per partition, and thus will have 2*5 uncommitted writes
//
// a GC pause gets inject after 20 committed and 30 uncommitted records got received
// -> the GC pause only affects one thread and should trigger a rebalance
// after rebalancing, we should read 40 committed records (even if 50 record got written)
//
// afterwards, the "stalling" thread resumes, and another rebalance should get triggered
// we write the remaining 20 records and verify to read 60 result records
final KafkaStreams streams1 = getKafkaStreams(false, "appDir1", 1);
final KafkaStreams streams2 = getKafkaStreams(false, "appDir2", 1);
try {
streams1.start();
streams2.start();
final List<KeyValue<Long, Long>> committedDataBeforeGC = prepareData(0L, 10L, 0L, 1L);
final List<KeyValue<Long, Long>> uncommittedDataBeforeGC = prepareData(10L, 15L, 0L, 1L);
final List<KeyValue<Long, Long>> dataBeforeGC = new ArrayList<>();
dataBeforeGC.addAll(committedDataBeforeGC);
dataBeforeGC.addAll(uncommittedDataBeforeGC);
final List<KeyValue<Long, Long>> dataToTriggerFirstRebalance = prepareData(15L, 20L, 0L, 1L);
final List<KeyValue<Long, Long>> dataAfterSecondRebalance = prepareData(20L, 30L, 0L, 1L);
writeInputData(committedDataBeforeGC);
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return commitRequested.get() == 2;
}
}, MAX_WAIT_TIME_MS, "SteamsTasks did not request commit.");
writeInputData(uncommittedDataBeforeGC);
final List<KeyValue<Long, Long>> uncommittedRecords = readResult(dataBeforeGC.size(), null);
final List<KeyValue<Long, Long>> committedRecords = readResult(committedDataBeforeGC.size(), CONSUMER_GROUP_ID);
checkResultPerKey(committedRecords, committedDataBeforeGC);
checkResultPerKey(uncommittedRecords, dataBeforeGC);
injectGC.set(true);
writeInputData(dataToTriggerFirstRebalance);
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return streams1.allMetadata().size() == 1 && streams2.allMetadata().size() == 1 &&
(streams1.allMetadata().iterator().next().topicPartitions().size() == 2
|| streams2.allMetadata().iterator().next().topicPartitions().size() == 2);
}
}, MAX_WAIT_TIME_MS, "Should have rebalanced.");
final List<KeyValue<Long, Long>> committedRecordsAfterRebalance = readResult(
uncommittedDataBeforeGC.size() + dataToTriggerFirstRebalance.size(),
CONSUMER_GROUP_ID);
final List<KeyValue<Long, Long>> expectedCommittedRecordsAfterRebalance = new ArrayList<>();
expectedCommittedRecordsAfterRebalance.addAll(uncommittedDataBeforeGC);
expectedCommittedRecordsAfterRebalance.addAll(dataToTriggerFirstRebalance);
checkResultPerKey(committedRecordsAfterRebalance, expectedCommittedRecordsAfterRebalance);
doGC = false;
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return streams1.allMetadata().size() == 1 && streams2.allMetadata().size() == 1
&& streams1.allMetadata().iterator().next().topicPartitions().size() == 1
&& streams2.allMetadata().iterator().next().topicPartitions().size() == 1;
}
}, MAX_WAIT_TIME_MS, "Should have rebalanced.");
writeInputData(dataAfterSecondRebalance);
final List<KeyValue<Long, Long>> allCommittedRecords = readResult(
committedDataBeforeGC.size() + uncommittedDataBeforeGC.size()
+ dataToTriggerFirstRebalance.size() + dataAfterSecondRebalance.size(),
CONSUMER_GROUP_ID + "_ALL");
final List<KeyValue<Long, Long>> allExpectedCommittedRecordsAfterRecovery = new ArrayList<>();
allExpectedCommittedRecordsAfterRecovery.addAll(committedDataBeforeGC);
allExpectedCommittedRecordsAfterRecovery.addAll(uncommittedDataBeforeGC);
allExpectedCommittedRecordsAfterRecovery.addAll(dataToTriggerFirstRebalance);
allExpectedCommittedRecordsAfterRecovery.addAll(dataAfterSecondRebalance);
checkResultPerKey(allCommittedRecords, allExpectedCommittedRecordsAfterRecovery);
} finally {
streams1.close();
streams2.close();
}
}
private List<KeyValue<Long, Long>> prepareData(final long fromInclusive, final long toExclusive, final Long... keys) {
final List<KeyValue<Long, Long>> data = new ArrayList<>();
for (final Long k : keys) {
for (long v = fromInclusive; v < toExclusive; ++v) {
data.add(new KeyValue<>(k, v));
}
}
return data;
}
private KafkaStreams getKafkaStreams(final boolean withState, final String appDir, final int numberOfStreamsThreads) {
commitRequested = new AtomicInteger(0);
errorInjected = new AtomicBoolean(false);
injectGC = new AtomicBoolean(false);
final StreamsBuilder builder = new StreamsBuilder();
String[] storeNames = null;
if (withState) {
storeNames = new String[] {storeName};
final StateStoreSupplier storeSupplier = Stores.create(storeName)
.withLongKeys()
.withLongValues()
.persistent()
.build();
builder.addStateStore(storeSupplier);
}
final KStream<Long, Long> input = builder.stream(MULTI_PARTITION_INPUT_TOPIC);
input.transform(new TransformerSupplier<Long, Long, KeyValue<Long, Long>>() {
@SuppressWarnings("unchecked")
@Override
public Transformer<Long, Long, KeyValue<Long, Long>> get() {
return new Transformer<Long, Long, KeyValue<Long, Long>>() {
ProcessorContext context;
KeyValueStore<Long, Long> state = null;
@Override
public void init(final ProcessorContext context) {
this.context = context;
if (withState) {
state = (KeyValueStore<Long, Long>) context.getStateStore(storeName);
}
}
@Override
public KeyValue<Long, Long> transform(final Long key, final Long value) {
if (errorInjected.compareAndSet(true, false)) {
// only tries to fail once on one of the task
throw new RuntimeException("Injected test exception.");
}
if (injectGC.compareAndSet(true, false)) {
while (doGC) {
try {
Thread.sleep(100);
} catch (final InterruptedException e) {
throw new RuntimeException(e);
}
}
}
if ((value + 1) % 10 == 0) {
context.commit();
commitRequested.incrementAndGet();
}
if (state != null) {
Long sum = state.get(key);
if (sum == null) {
sum = value;
} else {
sum += value;
}
state.put(key, sum);
context.forward(key, sum);
return null;
}
return new KeyValue<>(key, value);
}
@Override
public KeyValue<Long, Long> punctuate(final long timestamp) {
return null;
}
@Override
public void close() { }
};
} }, storeNames)
.to(SINGLE_PARTITION_OUTPUT_TOPIC);
final KafkaStreams streams = new KafkaStreams(
builder.build(),
StreamsTestUtils.getStreamsConfig(
applicationId,
CLUSTER.bootstrapServers(),
Serdes.LongSerde.class.getName(),
Serdes.LongSerde.class.getName(),
new Properties() {
{
put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE);
put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numberOfStreamsThreads);
put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, -1);
put(StreamsConfig.consumerPrefix(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG), 5 * 1000);
put(StreamsConfig.consumerPrefix(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG), 5 * 1000 - 1);
put(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG), MAX_POLL_INTERVAL_MS);
put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath() + File.separator + appDir);
put(StreamsConfig.APPLICATION_SERVER_CONFIG, "dummy:2142");
}
}));
streams.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(final Thread t, final Throwable e) {
if (uncaughtException != null) {
e.printStackTrace(System.err);
fail("Should only get one uncaught exception from Streams.");
}
uncaughtException = e;
}
});
return streams;
}
private void writeInputData(final List<KeyValue<Long, Long>> records) throws Exception {
IntegrationTestUtils.produceKeyValuesSynchronously(
MULTI_PARTITION_INPUT_TOPIC,
records,
TestUtils.producerConfig(CLUSTER.bootstrapServers(), LongSerializer.class, LongSerializer.class),
CLUSTER.time
);
}
private List<KeyValue<Long, Long>> readResult(final int numberOfRecords,
final String groupId) throws Exception {
if (groupId != null) {
return IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(
TestUtils.consumerConfig(
CLUSTER.bootstrapServers(),
groupId,
LongDeserializer.class,
LongDeserializer.class,
new Properties() {
{
put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.name().toLowerCase(Locale.ROOT));
}
}),
SINGLE_PARTITION_OUTPUT_TOPIC,
numberOfRecords
);
}
// read uncommitted
return IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(
TestUtils.consumerConfig(CLUSTER.bootstrapServers(), LongDeserializer.class, LongDeserializer.class),
SINGLE_PARTITION_OUTPUT_TOPIC,
numberOfRecords
);
}
private List<KeyValue<Long, Long>> computeExpectedResult(final List<KeyValue<Long, Long>> input) {
final List<KeyValue<Long, Long>> expectedResult = new ArrayList<>(input.size());
final HashMap<Long, Long> sums = new HashMap<>();
for (final KeyValue<Long, Long> record : input) {
Long sum = sums.get(record.key);
if (sum == null) {
sum = record.value;
} else {
sum += record.value;
}
sums.put(record.key, sum);
expectedResult.add(new KeyValue<>(record.key, sum));
}
return expectedResult;
}
private Set<KeyValue<Long, Long>> getMaxPerKey(final List<KeyValue<Long, Long>> input) {
final Set<KeyValue<Long, Long>> expectedResult = new HashSet<>(input.size());
final HashMap<Long, Long> maxPerKey = new HashMap<>();
for (final KeyValue<Long, Long> record : input) {
final Long max = maxPerKey.get(record.key);
if (max == null || record.value > max) {
maxPerKey.put(record.key, record.value);
}
}
for (final Map.Entry<Long, Long> max : maxPerKey.entrySet()) {
expectedResult.add(new KeyValue<>(max.getKey(), max.getValue()));
}
return expectedResult;
}
private void verifyStateStore(final KafkaStreams streams, final Set<KeyValue<Long, Long>> expectedStoreContent) {
ReadOnlyKeyValueStore<Long, Long> store = null;
final long maxWaitingTime = System.currentTimeMillis() + 300000L;
while (System.currentTimeMillis() < maxWaitingTime) {
try {
store = streams.store(storeName, QueryableStoreTypes.<Long, Long>keyValueStore());
break;
} catch (final InvalidStateStoreException okJustRetry) {
try {
Thread.sleep(5000L);
} catch (final Exception ignore) { }
}
}
final KeyValueIterator<Long, Long> it = store.all();
while (it.hasNext()) {
assertTrue(expectedStoreContent.remove(it.next()));
}
assertTrue(expectedStoreContent.isEmpty());
}
}
| apache-2.0 |
lunifera/lunifera-sharky-m2m | Jnect/org.jnect.bodymodel/src/org/jnect/bodymodel/impl/BodyImpl.java | 49467 | /*******************************************************************************
* Copyright (c) 2012 jnect.org.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* ${user} - initial API and implementation
*******************************************************************************/
package org.jnect.bodymodel.impl;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.util.EObjectResolvingEList;
import org.jnect.bodymodel.Body;
import org.jnect.bodymodel.BodymodelPackage;
import org.jnect.bodymodel.CenterHip;
import org.jnect.bodymodel.CenterShoulder;
import org.jnect.bodymodel.Head;
import org.jnect.bodymodel.HumanLink;
import org.jnect.bodymodel.LeftAnkle;
import org.jnect.bodymodel.LeftElbow;
import org.jnect.bodymodel.LeftFoot;
import org.jnect.bodymodel.LeftHand;
import org.jnect.bodymodel.LeftHip;
import org.jnect.bodymodel.LeftKnee;
import org.jnect.bodymodel.LeftShoulder;
import org.jnect.bodymodel.LeftWrist;
import org.jnect.bodymodel.RightAnkle;
import org.jnect.bodymodel.RightElbow;
import org.jnect.bodymodel.RightFoot;
import org.jnect.bodymodel.RightHand;
import org.jnect.bodymodel.RightHip;
import org.jnect.bodymodel.RightKnee;
import org.jnect.bodymodel.RightShoulder;
import org.jnect.bodymodel.RightWrist;
import org.jnect.bodymodel.Spine;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Body</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getHead <em>Head</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getCenterShoulder <em>Center Shoulder</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getLeftShoulder <em>Left Shoulder</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getRightShoulder <em>Right Shoulder</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getLeftElbow <em>Left Elbow</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getRightElbow <em>Right Elbow</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getLeftWrist <em>Left Wrist</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getRightWrist <em>Right Wrist</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getLeftHand <em>Left Hand</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getRightHand <em>Right Hand</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getSpine <em>Spine</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getCenterHip <em>Center Hip</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getLeftHip <em>Left Hip</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getRightHip <em>Right Hip</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getLeftKnee <em>Left Knee</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getRightKnee <em>Right Knee</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getLeftFoot <em>Left Foot</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getRightFoot <em>Right Foot</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getLinks <em>Links</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getLeftAnkle <em>Left Ankle</em>}</li>
* <li>{@link org.jnect.bodymodel.impl.BodyImpl#getRightAnkle <em>Right Ankle</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class BodyImpl extends EObjectImpl implements Body {
/**
* The cached value of the '{@link #getHead() <em>Head</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHead()
* @generated
* @ordered
*/
protected Head head;
/**
* The cached value of the '{@link #getCenterShoulder() <em>Center Shoulder</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCenterShoulder()
* @generated
* @ordered
*/
protected CenterShoulder centerShoulder;
/**
* The cached value of the '{@link #getLeftShoulder() <em>Left Shoulder</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLeftShoulder()
* @generated
* @ordered
*/
protected LeftShoulder leftShoulder;
/**
* The cached value of the '{@link #getRightShoulder() <em>Right Shoulder</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRightShoulder()
* @generated
* @ordered
*/
protected RightShoulder rightShoulder;
/**
* The cached value of the '{@link #getLeftElbow() <em>Left Elbow</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLeftElbow()
* @generated
* @ordered
*/
protected LeftElbow leftElbow;
/**
* The cached value of the '{@link #getRightElbow() <em>Right Elbow</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRightElbow()
* @generated
* @ordered
*/
protected RightElbow rightElbow;
/**
* The cached value of the '{@link #getLeftWrist() <em>Left Wrist</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLeftWrist()
* @generated
* @ordered
*/
protected LeftWrist leftWrist;
/**
* The cached value of the '{@link #getRightWrist() <em>Right Wrist</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRightWrist()
* @generated
* @ordered
*/
protected RightWrist rightWrist;
/**
* The cached value of the '{@link #getLeftHand() <em>Left Hand</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLeftHand()
* @generated
* @ordered
*/
protected LeftHand leftHand;
/**
* The cached value of the '{@link #getRightHand() <em>Right Hand</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRightHand()
* @generated
* @ordered
*/
protected RightHand rightHand;
/**
* The cached value of the '{@link #getSpine() <em>Spine</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSpine()
* @generated
* @ordered
*/
protected Spine spine;
/**
* The cached value of the '{@link #getCenterHip() <em>Center Hip</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCenterHip()
* @generated
* @ordered
*/
protected CenterHip centerHip;
/**
* The cached value of the '{@link #getLeftHip() <em>Left Hip</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLeftHip()
* @generated
* @ordered
*/
protected LeftHip leftHip;
/**
* The cached value of the '{@link #getRightHip() <em>Right Hip</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRightHip()
* @generated
* @ordered
*/
protected RightHip rightHip;
/**
* The cached value of the '{@link #getLeftKnee() <em>Left Knee</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLeftKnee()
* @generated
* @ordered
*/
protected LeftKnee leftKnee;
/**
* The cached value of the '{@link #getRightKnee() <em>Right Knee</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRightKnee()
* @generated
* @ordered
*/
protected RightKnee rightKnee;
/**
* The cached value of the '{@link #getLeftFoot() <em>Left Foot</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLeftFoot()
* @generated
* @ordered
*/
protected LeftFoot leftFoot;
/**
* The cached value of the '{@link #getRightFoot() <em>Right Foot</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRightFoot()
* @generated
* @ordered
*/
protected RightFoot rightFoot;
/**
* The cached value of the '{@link #getLinks() <em>Links</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLinks()
* @generated
* @ordered
*/
protected EList<HumanLink> links;
/**
* The cached value of the '{@link #getLeftAnkle() <em>Left Ankle</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLeftAnkle()
* @generated
* @ordered
*/
protected LeftAnkle leftAnkle;
/**
* The cached value of the '{@link #getRightAnkle() <em>Right Ankle</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRightAnkle()
* @generated
* @ordered
*/
protected RightAnkle rightAnkle;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected BodyImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return BodymodelPackage.Literals.BODY;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Head getHead() {
return head;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetHead(Head newHead, NotificationChain msgs) {
Head oldHead = head;
head = newHead;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__HEAD, oldHead, newHead);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setHead(Head newHead) {
if (newHead != head) {
NotificationChain msgs = null;
if (head != null)
msgs = ((InternalEObject)head).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__HEAD, null, msgs);
if (newHead != null)
msgs = ((InternalEObject)newHead).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__HEAD, null, msgs);
msgs = basicSetHead(newHead, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__HEAD, newHead, newHead));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public CenterShoulder getCenterShoulder() {
return centerShoulder;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetCenterShoulder(CenterShoulder newCenterShoulder, NotificationChain msgs) {
CenterShoulder oldCenterShoulder = centerShoulder;
centerShoulder = newCenterShoulder;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__CENTER_SHOULDER, oldCenterShoulder, newCenterShoulder);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setCenterShoulder(CenterShoulder newCenterShoulder) {
if (newCenterShoulder != centerShoulder) {
NotificationChain msgs = null;
if (centerShoulder != null)
msgs = ((InternalEObject)centerShoulder).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__CENTER_SHOULDER, null, msgs);
if (newCenterShoulder != null)
msgs = ((InternalEObject)newCenterShoulder).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__CENTER_SHOULDER, null, msgs);
msgs = basicSetCenterShoulder(newCenterShoulder, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__CENTER_SHOULDER, newCenterShoulder, newCenterShoulder));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LeftShoulder getLeftShoulder() {
return leftShoulder;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetLeftShoulder(LeftShoulder newLeftShoulder, NotificationChain msgs) {
LeftShoulder oldLeftShoulder = leftShoulder;
leftShoulder = newLeftShoulder;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_SHOULDER, oldLeftShoulder, newLeftShoulder);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLeftShoulder(LeftShoulder newLeftShoulder) {
if (newLeftShoulder != leftShoulder) {
NotificationChain msgs = null;
if (leftShoulder != null)
msgs = ((InternalEObject)leftShoulder).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_SHOULDER, null, msgs);
if (newLeftShoulder != null)
msgs = ((InternalEObject)newLeftShoulder).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_SHOULDER, null, msgs);
msgs = basicSetLeftShoulder(newLeftShoulder, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_SHOULDER, newLeftShoulder, newLeftShoulder));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RightShoulder getRightShoulder() {
return rightShoulder;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetRightShoulder(RightShoulder newRightShoulder, NotificationChain msgs) {
RightShoulder oldRightShoulder = rightShoulder;
rightShoulder = newRightShoulder;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_SHOULDER, oldRightShoulder, newRightShoulder);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRightShoulder(RightShoulder newRightShoulder) {
if (newRightShoulder != rightShoulder) {
NotificationChain msgs = null;
if (rightShoulder != null)
msgs = ((InternalEObject)rightShoulder).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_SHOULDER, null, msgs);
if (newRightShoulder != null)
msgs = ((InternalEObject)newRightShoulder).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_SHOULDER, null, msgs);
msgs = basicSetRightShoulder(newRightShoulder, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_SHOULDER, newRightShoulder, newRightShoulder));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LeftElbow getLeftElbow() {
return leftElbow;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetLeftElbow(LeftElbow newLeftElbow, NotificationChain msgs) {
LeftElbow oldLeftElbow = leftElbow;
leftElbow = newLeftElbow;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_ELBOW, oldLeftElbow, newLeftElbow);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLeftElbow(LeftElbow newLeftElbow) {
if (newLeftElbow != leftElbow) {
NotificationChain msgs = null;
if (leftElbow != null)
msgs = ((InternalEObject)leftElbow).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_ELBOW, null, msgs);
if (newLeftElbow != null)
msgs = ((InternalEObject)newLeftElbow).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_ELBOW, null, msgs);
msgs = basicSetLeftElbow(newLeftElbow, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_ELBOW, newLeftElbow, newLeftElbow));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RightElbow getRightElbow() {
return rightElbow;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetRightElbow(RightElbow newRightElbow, NotificationChain msgs) {
RightElbow oldRightElbow = rightElbow;
rightElbow = newRightElbow;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_ELBOW, oldRightElbow, newRightElbow);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRightElbow(RightElbow newRightElbow) {
if (newRightElbow != rightElbow) {
NotificationChain msgs = null;
if (rightElbow != null)
msgs = ((InternalEObject)rightElbow).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_ELBOW, null, msgs);
if (newRightElbow != null)
msgs = ((InternalEObject)newRightElbow).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_ELBOW, null, msgs);
msgs = basicSetRightElbow(newRightElbow, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_ELBOW, newRightElbow, newRightElbow));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LeftWrist getLeftWrist() {
return leftWrist;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetLeftWrist(LeftWrist newLeftWrist, NotificationChain msgs) {
LeftWrist oldLeftWrist = leftWrist;
leftWrist = newLeftWrist;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_WRIST, oldLeftWrist, newLeftWrist);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLeftWrist(LeftWrist newLeftWrist) {
if (newLeftWrist != leftWrist) {
NotificationChain msgs = null;
if (leftWrist != null)
msgs = ((InternalEObject)leftWrist).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_WRIST, null, msgs);
if (newLeftWrist != null)
msgs = ((InternalEObject)newLeftWrist).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_WRIST, null, msgs);
msgs = basicSetLeftWrist(newLeftWrist, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_WRIST, newLeftWrist, newLeftWrist));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RightWrist getRightWrist() {
return rightWrist;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetRightWrist(RightWrist newRightWrist, NotificationChain msgs) {
RightWrist oldRightWrist = rightWrist;
rightWrist = newRightWrist;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_WRIST, oldRightWrist, newRightWrist);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRightWrist(RightWrist newRightWrist) {
if (newRightWrist != rightWrist) {
NotificationChain msgs = null;
if (rightWrist != null)
msgs = ((InternalEObject)rightWrist).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_WRIST, null, msgs);
if (newRightWrist != null)
msgs = ((InternalEObject)newRightWrist).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_WRIST, null, msgs);
msgs = basicSetRightWrist(newRightWrist, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_WRIST, newRightWrist, newRightWrist));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LeftHand getLeftHand() {
return leftHand;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetLeftHand(LeftHand newLeftHand, NotificationChain msgs) {
LeftHand oldLeftHand = leftHand;
leftHand = newLeftHand;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_HAND, oldLeftHand, newLeftHand);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLeftHand(LeftHand newLeftHand) {
if (newLeftHand != leftHand) {
NotificationChain msgs = null;
if (leftHand != null)
msgs = ((InternalEObject)leftHand).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_HAND, null, msgs);
if (newLeftHand != null)
msgs = ((InternalEObject)newLeftHand).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_HAND, null, msgs);
msgs = basicSetLeftHand(newLeftHand, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_HAND, newLeftHand, newLeftHand));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RightHand getRightHand() {
return rightHand;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetRightHand(RightHand newRightHand, NotificationChain msgs) {
RightHand oldRightHand = rightHand;
rightHand = newRightHand;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_HAND, oldRightHand, newRightHand);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRightHand(RightHand newRightHand) {
if (newRightHand != rightHand) {
NotificationChain msgs = null;
if (rightHand != null)
msgs = ((InternalEObject)rightHand).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_HAND, null, msgs);
if (newRightHand != null)
msgs = ((InternalEObject)newRightHand).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_HAND, null, msgs);
msgs = basicSetRightHand(newRightHand, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_HAND, newRightHand, newRightHand));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Spine getSpine() {
return spine;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetSpine(Spine newSpine, NotificationChain msgs) {
Spine oldSpine = spine;
spine = newSpine;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__SPINE, oldSpine, newSpine);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSpine(Spine newSpine) {
if (newSpine != spine) {
NotificationChain msgs = null;
if (spine != null)
msgs = ((InternalEObject)spine).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__SPINE, null, msgs);
if (newSpine != null)
msgs = ((InternalEObject)newSpine).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__SPINE, null, msgs);
msgs = basicSetSpine(newSpine, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__SPINE, newSpine, newSpine));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public CenterHip getCenterHip() {
return centerHip;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetCenterHip(CenterHip newCenterHip, NotificationChain msgs) {
CenterHip oldCenterHip = centerHip;
centerHip = newCenterHip;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__CENTER_HIP, oldCenterHip, newCenterHip);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setCenterHip(CenterHip newCenterHip) {
if (newCenterHip != centerHip) {
NotificationChain msgs = null;
if (centerHip != null)
msgs = ((InternalEObject)centerHip).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__CENTER_HIP, null, msgs);
if (newCenterHip != null)
msgs = ((InternalEObject)newCenterHip).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__CENTER_HIP, null, msgs);
msgs = basicSetCenterHip(newCenterHip, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__CENTER_HIP, newCenterHip, newCenterHip));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LeftHip getLeftHip() {
return leftHip;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetLeftHip(LeftHip newLeftHip, NotificationChain msgs) {
LeftHip oldLeftHip = leftHip;
leftHip = newLeftHip;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_HIP, oldLeftHip, newLeftHip);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLeftHip(LeftHip newLeftHip) {
if (newLeftHip != leftHip) {
NotificationChain msgs = null;
if (leftHip != null)
msgs = ((InternalEObject)leftHip).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_HIP, null, msgs);
if (newLeftHip != null)
msgs = ((InternalEObject)newLeftHip).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_HIP, null, msgs);
msgs = basicSetLeftHip(newLeftHip, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_HIP, newLeftHip, newLeftHip));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RightHip getRightHip() {
return rightHip;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetRightHip(RightHip newRightHip, NotificationChain msgs) {
RightHip oldRightHip = rightHip;
rightHip = newRightHip;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_HIP, oldRightHip, newRightHip);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRightHip(RightHip newRightHip) {
if (newRightHip != rightHip) {
NotificationChain msgs = null;
if (rightHip != null)
msgs = ((InternalEObject)rightHip).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_HIP, null, msgs);
if (newRightHip != null)
msgs = ((InternalEObject)newRightHip).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_HIP, null, msgs);
msgs = basicSetRightHip(newRightHip, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_HIP, newRightHip, newRightHip));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LeftKnee getLeftKnee() {
return leftKnee;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetLeftKnee(LeftKnee newLeftKnee, NotificationChain msgs) {
LeftKnee oldLeftKnee = leftKnee;
leftKnee = newLeftKnee;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_KNEE, oldLeftKnee, newLeftKnee);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLeftKnee(LeftKnee newLeftKnee) {
if (newLeftKnee != leftKnee) {
NotificationChain msgs = null;
if (leftKnee != null)
msgs = ((InternalEObject)leftKnee).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_KNEE, null, msgs);
if (newLeftKnee != null)
msgs = ((InternalEObject)newLeftKnee).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_KNEE, null, msgs);
msgs = basicSetLeftKnee(newLeftKnee, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_KNEE, newLeftKnee, newLeftKnee));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RightKnee getRightKnee() {
return rightKnee;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetRightKnee(RightKnee newRightKnee, NotificationChain msgs) {
RightKnee oldRightKnee = rightKnee;
rightKnee = newRightKnee;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_KNEE, oldRightKnee, newRightKnee);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRightKnee(RightKnee newRightKnee) {
if (newRightKnee != rightKnee) {
NotificationChain msgs = null;
if (rightKnee != null)
msgs = ((InternalEObject)rightKnee).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_KNEE, null, msgs);
if (newRightKnee != null)
msgs = ((InternalEObject)newRightKnee).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_KNEE, null, msgs);
msgs = basicSetRightKnee(newRightKnee, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_KNEE, newRightKnee, newRightKnee));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LeftFoot getLeftFoot() {
return leftFoot;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetLeftFoot(LeftFoot newLeftFoot, NotificationChain msgs) {
LeftFoot oldLeftFoot = leftFoot;
leftFoot = newLeftFoot;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_FOOT, oldLeftFoot, newLeftFoot);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLeftFoot(LeftFoot newLeftFoot) {
if (newLeftFoot != leftFoot) {
NotificationChain msgs = null;
if (leftFoot != null)
msgs = ((InternalEObject)leftFoot).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_FOOT, null, msgs);
if (newLeftFoot != null)
msgs = ((InternalEObject)newLeftFoot).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_FOOT, null, msgs);
msgs = basicSetLeftFoot(newLeftFoot, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_FOOT, newLeftFoot, newLeftFoot));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RightFoot getRightFoot() {
return rightFoot;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetRightFoot(RightFoot newRightFoot, NotificationChain msgs) {
RightFoot oldRightFoot = rightFoot;
rightFoot = newRightFoot;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_FOOT, oldRightFoot, newRightFoot);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRightFoot(RightFoot newRightFoot) {
if (newRightFoot != rightFoot) {
NotificationChain msgs = null;
if (rightFoot != null)
msgs = ((InternalEObject)rightFoot).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_FOOT, null, msgs);
if (newRightFoot != null)
msgs = ((InternalEObject)newRightFoot).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_FOOT, null, msgs);
msgs = basicSetRightFoot(newRightFoot, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_FOOT, newRightFoot, newRightFoot));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<HumanLink> getLinks() {
if (links == null) {
links = new EObjectResolvingEList<HumanLink>(HumanLink.class, this, BodymodelPackage.BODY__LINKS);
}
return links;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LeftAnkle getLeftAnkle() {
return leftAnkle;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetLeftAnkle(LeftAnkle newLeftAnkle, NotificationChain msgs) {
LeftAnkle oldLeftAnkle = leftAnkle;
leftAnkle = newLeftAnkle;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_ANKLE, oldLeftAnkle, newLeftAnkle);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLeftAnkle(LeftAnkle newLeftAnkle) {
if (newLeftAnkle != leftAnkle) {
NotificationChain msgs = null;
if (leftAnkle != null)
msgs = ((InternalEObject)leftAnkle).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_ANKLE, null, msgs);
if (newLeftAnkle != null)
msgs = ((InternalEObject)newLeftAnkle).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__LEFT_ANKLE, null, msgs);
msgs = basicSetLeftAnkle(newLeftAnkle, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__LEFT_ANKLE, newLeftAnkle, newLeftAnkle));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RightAnkle getRightAnkle() {
return rightAnkle;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetRightAnkle(RightAnkle newRightAnkle, NotificationChain msgs) {
RightAnkle oldRightAnkle = rightAnkle;
rightAnkle = newRightAnkle;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_ANKLE, oldRightAnkle, newRightAnkle);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRightAnkle(RightAnkle newRightAnkle) {
if (newRightAnkle != rightAnkle) {
NotificationChain msgs = null;
if (rightAnkle != null)
msgs = ((InternalEObject)rightAnkle).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_ANKLE, null, msgs);
if (newRightAnkle != null)
msgs = ((InternalEObject)newRightAnkle).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BodymodelPackage.BODY__RIGHT_ANKLE, null, msgs);
msgs = basicSetRightAnkle(newRightAnkle, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BodymodelPackage.BODY__RIGHT_ANKLE, newRightAnkle, newRightAnkle));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case BodymodelPackage.BODY__HEAD:
return basicSetHead(null, msgs);
case BodymodelPackage.BODY__CENTER_SHOULDER:
return basicSetCenterShoulder(null, msgs);
case BodymodelPackage.BODY__LEFT_SHOULDER:
return basicSetLeftShoulder(null, msgs);
case BodymodelPackage.BODY__RIGHT_SHOULDER:
return basicSetRightShoulder(null, msgs);
case BodymodelPackage.BODY__LEFT_ELBOW:
return basicSetLeftElbow(null, msgs);
case BodymodelPackage.BODY__RIGHT_ELBOW:
return basicSetRightElbow(null, msgs);
case BodymodelPackage.BODY__LEFT_WRIST:
return basicSetLeftWrist(null, msgs);
case BodymodelPackage.BODY__RIGHT_WRIST:
return basicSetRightWrist(null, msgs);
case BodymodelPackage.BODY__LEFT_HAND:
return basicSetLeftHand(null, msgs);
case BodymodelPackage.BODY__RIGHT_HAND:
return basicSetRightHand(null, msgs);
case BodymodelPackage.BODY__SPINE:
return basicSetSpine(null, msgs);
case BodymodelPackage.BODY__CENTER_HIP:
return basicSetCenterHip(null, msgs);
case BodymodelPackage.BODY__LEFT_HIP:
return basicSetLeftHip(null, msgs);
case BodymodelPackage.BODY__RIGHT_HIP:
return basicSetRightHip(null, msgs);
case BodymodelPackage.BODY__LEFT_KNEE:
return basicSetLeftKnee(null, msgs);
case BodymodelPackage.BODY__RIGHT_KNEE:
return basicSetRightKnee(null, msgs);
case BodymodelPackage.BODY__LEFT_FOOT:
return basicSetLeftFoot(null, msgs);
case BodymodelPackage.BODY__RIGHT_FOOT:
return basicSetRightFoot(null, msgs);
case BodymodelPackage.BODY__LEFT_ANKLE:
return basicSetLeftAnkle(null, msgs);
case BodymodelPackage.BODY__RIGHT_ANKLE:
return basicSetRightAnkle(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case BodymodelPackage.BODY__HEAD:
return getHead();
case BodymodelPackage.BODY__CENTER_SHOULDER:
return getCenterShoulder();
case BodymodelPackage.BODY__LEFT_SHOULDER:
return getLeftShoulder();
case BodymodelPackage.BODY__RIGHT_SHOULDER:
return getRightShoulder();
case BodymodelPackage.BODY__LEFT_ELBOW:
return getLeftElbow();
case BodymodelPackage.BODY__RIGHT_ELBOW:
return getRightElbow();
case BodymodelPackage.BODY__LEFT_WRIST:
return getLeftWrist();
case BodymodelPackage.BODY__RIGHT_WRIST:
return getRightWrist();
case BodymodelPackage.BODY__LEFT_HAND:
return getLeftHand();
case BodymodelPackage.BODY__RIGHT_HAND:
return getRightHand();
case BodymodelPackage.BODY__SPINE:
return getSpine();
case BodymodelPackage.BODY__CENTER_HIP:
return getCenterHip();
case BodymodelPackage.BODY__LEFT_HIP:
return getLeftHip();
case BodymodelPackage.BODY__RIGHT_HIP:
return getRightHip();
case BodymodelPackage.BODY__LEFT_KNEE:
return getLeftKnee();
case BodymodelPackage.BODY__RIGHT_KNEE:
return getRightKnee();
case BodymodelPackage.BODY__LEFT_FOOT:
return getLeftFoot();
case BodymodelPackage.BODY__RIGHT_FOOT:
return getRightFoot();
case BodymodelPackage.BODY__LINKS:
return getLinks();
case BodymodelPackage.BODY__LEFT_ANKLE:
return getLeftAnkle();
case BodymodelPackage.BODY__RIGHT_ANKLE:
return getRightAnkle();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case BodymodelPackage.BODY__HEAD:
setHead((Head)newValue);
return;
case BodymodelPackage.BODY__CENTER_SHOULDER:
setCenterShoulder((CenterShoulder)newValue);
return;
case BodymodelPackage.BODY__LEFT_SHOULDER:
setLeftShoulder((LeftShoulder)newValue);
return;
case BodymodelPackage.BODY__RIGHT_SHOULDER:
setRightShoulder((RightShoulder)newValue);
return;
case BodymodelPackage.BODY__LEFT_ELBOW:
setLeftElbow((LeftElbow)newValue);
return;
case BodymodelPackage.BODY__RIGHT_ELBOW:
setRightElbow((RightElbow)newValue);
return;
case BodymodelPackage.BODY__LEFT_WRIST:
setLeftWrist((LeftWrist)newValue);
return;
case BodymodelPackage.BODY__RIGHT_WRIST:
setRightWrist((RightWrist)newValue);
return;
case BodymodelPackage.BODY__LEFT_HAND:
setLeftHand((LeftHand)newValue);
return;
case BodymodelPackage.BODY__RIGHT_HAND:
setRightHand((RightHand)newValue);
return;
case BodymodelPackage.BODY__SPINE:
setSpine((Spine)newValue);
return;
case BodymodelPackage.BODY__CENTER_HIP:
setCenterHip((CenterHip)newValue);
return;
case BodymodelPackage.BODY__LEFT_HIP:
setLeftHip((LeftHip)newValue);
return;
case BodymodelPackage.BODY__RIGHT_HIP:
setRightHip((RightHip)newValue);
return;
case BodymodelPackage.BODY__LEFT_KNEE:
setLeftKnee((LeftKnee)newValue);
return;
case BodymodelPackage.BODY__RIGHT_KNEE:
setRightKnee((RightKnee)newValue);
return;
case BodymodelPackage.BODY__LEFT_FOOT:
setLeftFoot((LeftFoot)newValue);
return;
case BodymodelPackage.BODY__RIGHT_FOOT:
setRightFoot((RightFoot)newValue);
return;
case BodymodelPackage.BODY__LINKS:
getLinks().clear();
getLinks().addAll((Collection<? extends HumanLink>)newValue);
return;
case BodymodelPackage.BODY__LEFT_ANKLE:
setLeftAnkle((LeftAnkle)newValue);
return;
case BodymodelPackage.BODY__RIGHT_ANKLE:
setRightAnkle((RightAnkle)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case BodymodelPackage.BODY__HEAD:
setHead((Head)null);
return;
case BodymodelPackage.BODY__CENTER_SHOULDER:
setCenterShoulder((CenterShoulder)null);
return;
case BodymodelPackage.BODY__LEFT_SHOULDER:
setLeftShoulder((LeftShoulder)null);
return;
case BodymodelPackage.BODY__RIGHT_SHOULDER:
setRightShoulder((RightShoulder)null);
return;
case BodymodelPackage.BODY__LEFT_ELBOW:
setLeftElbow((LeftElbow)null);
return;
case BodymodelPackage.BODY__RIGHT_ELBOW:
setRightElbow((RightElbow)null);
return;
case BodymodelPackage.BODY__LEFT_WRIST:
setLeftWrist((LeftWrist)null);
return;
case BodymodelPackage.BODY__RIGHT_WRIST:
setRightWrist((RightWrist)null);
return;
case BodymodelPackage.BODY__LEFT_HAND:
setLeftHand((LeftHand)null);
return;
case BodymodelPackage.BODY__RIGHT_HAND:
setRightHand((RightHand)null);
return;
case BodymodelPackage.BODY__SPINE:
setSpine((Spine)null);
return;
case BodymodelPackage.BODY__CENTER_HIP:
setCenterHip((CenterHip)null);
return;
case BodymodelPackage.BODY__LEFT_HIP:
setLeftHip((LeftHip)null);
return;
case BodymodelPackage.BODY__RIGHT_HIP:
setRightHip((RightHip)null);
return;
case BodymodelPackage.BODY__LEFT_KNEE:
setLeftKnee((LeftKnee)null);
return;
case BodymodelPackage.BODY__RIGHT_KNEE:
setRightKnee((RightKnee)null);
return;
case BodymodelPackage.BODY__LEFT_FOOT:
setLeftFoot((LeftFoot)null);
return;
case BodymodelPackage.BODY__RIGHT_FOOT:
setRightFoot((RightFoot)null);
return;
case BodymodelPackage.BODY__LINKS:
getLinks().clear();
return;
case BodymodelPackage.BODY__LEFT_ANKLE:
setLeftAnkle((LeftAnkle)null);
return;
case BodymodelPackage.BODY__RIGHT_ANKLE:
setRightAnkle((RightAnkle)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case BodymodelPackage.BODY__HEAD:
return head != null;
case BodymodelPackage.BODY__CENTER_SHOULDER:
return centerShoulder != null;
case BodymodelPackage.BODY__LEFT_SHOULDER:
return leftShoulder != null;
case BodymodelPackage.BODY__RIGHT_SHOULDER:
return rightShoulder != null;
case BodymodelPackage.BODY__LEFT_ELBOW:
return leftElbow != null;
case BodymodelPackage.BODY__RIGHT_ELBOW:
return rightElbow != null;
case BodymodelPackage.BODY__LEFT_WRIST:
return leftWrist != null;
case BodymodelPackage.BODY__RIGHT_WRIST:
return rightWrist != null;
case BodymodelPackage.BODY__LEFT_HAND:
return leftHand != null;
case BodymodelPackage.BODY__RIGHT_HAND:
return rightHand != null;
case BodymodelPackage.BODY__SPINE:
return spine != null;
case BodymodelPackage.BODY__CENTER_HIP:
return centerHip != null;
case BodymodelPackage.BODY__LEFT_HIP:
return leftHip != null;
case BodymodelPackage.BODY__RIGHT_HIP:
return rightHip != null;
case BodymodelPackage.BODY__LEFT_KNEE:
return leftKnee != null;
case BodymodelPackage.BODY__RIGHT_KNEE:
return rightKnee != null;
case BodymodelPackage.BODY__LEFT_FOOT:
return leftFoot != null;
case BodymodelPackage.BODY__RIGHT_FOOT:
return rightFoot != null;
case BodymodelPackage.BODY__LINKS:
return links != null && !links.isEmpty();
case BodymodelPackage.BODY__LEFT_ANKLE:
return leftAnkle != null;
case BodymodelPackage.BODY__RIGHT_ANKLE:
return rightAnkle != null;
}
return super.eIsSet(featureID);
}
} //BodyImpl
| apache-2.0 |
christianherta/BBC-DaaS | bbcdaas_themehandlerweb/bbcdaas_themehandlerweb/src/main/java/de/bbcdaas/themehandlerweb/business/ThemeHandlerBusiness.java | 13620 | package de.bbcdaas.themehandlerweb.business;
import de.bbcdaas.common.beans.Term;
import de.bbcdaas.common.beans.ThemeCloud;
import de.bbcdaas.common.beans.User;
import de.bbcdaas.common.dao.api.JavaPersistenceAPI;
import de.bbcdaas.common.util.FileReader;
import de.bbcdaas.themehandlerweb.beans.SessionContainer;
import de.bbcdaas.themehandlerweb.constants.Constants;
import de.bbcdaas.themehandlerweb.dao.impl.jpa.ThemeHandlerWebDaoImpl;
import de.bbcdaas.themehandlerweb.domains.UserEntity;
import de.bbcdaas.webservices.api.taghandler.TagHandlerWebserviceAPI;
import de.bbcdaas.webservices.api.taghandler.beans.SyntagCloudResult;
import de.bbcdaas.webservices.api.taghandler.beans.TermsResult;
import de.bbcdaas.webservices.api.themehandler.ThemeHandlerWebserviceAPI;
import de.bbcdaas.webservices.api.themehandler.beans.ThemeCloudResult;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.commons.configuration.Configuration;
import org.apache.log4j.Logger;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
/**
* Business logic of the theme handler web application.
* @author Robert Illers
*/
public class ThemeHandlerBusiness {
private Logger logger = Logger.getLogger(this.getClass());
/**
* Return the current session.
* @return HttpSession
*/
public static HttpSession getSession() {
ServletRequestAttributes attr = (ServletRequestAttributes)RequestContextHolder.
currentRequestAttributes();
return attr.getRequest().getSession(true);
}
/**
*
* @param request
* @param user
*/
public void initUserSession(HttpServletRequest request, UserDetails user) {
SessionContainer sessionContainer = (SessionContainer)request.getSession().getAttribute(Constants.KEY_SESSION_CONTAINER);
if (sessionContainer == null) {
sessionContainer = new SessionContainer();
}
// load user list
for (GrantedAuthority authority :user.getAuthorities()) {
if (authority.getAuthority().equals("ROLE_ADMIN")) {
sessionContainer.setUserList(this.getAllUser());
}
}
// load configuration
Configuration config = new FileReader().readPropertiesConfig(new StringBuilder("/properties/").
append(Constants.CONFIGURATION_FILE_NAME).toString(),
FileReader.FILE_OPENING_TYPE.ABSOLUTE, FileReader.FILE_OPENING_TYPE.RELATIVE, true);
String restBaseURI = config.getString(Constants.CONFIG_PARAM_REST_BASE_URI);
sessionContainer.setRestBaseURI(restBaseURI);
request.getSession().setAttribute(Constants.KEY_SESSION_CONTAINER, sessionContainer);
}
/**
*
* @return
*/
private List<User> getAllUser() {
List<User> users = new ArrayList<User>();
ThemeHandlerWebDaoImpl dao = new ThemeHandlerWebDaoImpl();
dao.setJavaPersistenceAPI(new JavaPersistenceAPI("PU"));
List<UserEntity> userEntities = dao.getAllUser();
for (UserEntity userEntity : userEntities) {
User user = new User();
user.setName(userEntity.getLoginName());
user.setId(userEntity.getID());
user.setRole(userEntity.getUserRole());
users.add(user);
}
return users;
}
/**
*
* @param userName
* @param password
* @param role
*/
public User addUser(String userName, String password, int role) {
ThemeHandlerWebDaoImpl dao = new ThemeHandlerWebDaoImpl();
dao.setJavaPersistenceAPI(new JavaPersistenceAPI("PU"));
User user = null;
dao.insertUser(userName, password, role);
UserEntity userEntity = dao.getUserByName(userName);
if (userEntity != null) {
user = new User();
user.setId(userEntity.getID());
user.setName(userEntity.getLoginName());
user.setRole(userEntity.getUserRole());
}
return user;
}
/**
*
* @param userID
*/
public boolean deleteUser(int userID) {
ThemeHandlerWebDaoImpl dao = new ThemeHandlerWebDaoImpl();
dao.setJavaPersistenceAPI(new JavaPersistenceAPI("PU"));
return dao.deleteUser(userID);
}
/**
* Returns the current user object (spring security).
* @return UserDetails
*/
public UserDetails getUserDetails() {
SecurityContext context = SecurityContextHolder.getContext();
Object principal = context.getAuthentication().getPrincipal();
if (principal != null && principal instanceof UserDetails) {
return (UserDetails)principal;
}
return null;
}
/**
* Prepares the theme cloud form or resets it if parameter clear is set.
* @param clear if tru, form will be reseted
*/
public void initThemeCloudForm(boolean clear) {
SessionContainer sessionContainer = (SessionContainer)getSession().getAttribute(Constants.KEY_SESSION_CONTAINER);
if (sessionContainer == null) {
sessionContainer = new SessionContainer();
}
if (clear) {
sessionContainer.getThemeCloudSessionBean().setA(Constants.DEFAULT_A);
sessionContainer.getThemeCloudSessionBean().setB(Constants.DEFAULT_B);
sessionContainer.getThemeCloudSessionBean().setMinSyntag(Constants.DEFAULT_MIN_TOP_TERM_SYNTAG);
sessionContainer.getThemeCloudSessionBean().setSyntagmaticEntityTermFactor(Constants.DEFAULT_SYNTAGMATIC_ENTITY_TERM_FACTOR);
sessionContainer.getThemeCloudSessionBean().getSyntagTerms().clear();
sessionContainer.getThemeCloudSessionBean().getThemeCloud().clear();
sessionContainer.getThemeCloudSessionBean().getColoredThemeCloud().clear();
sessionContainer.getThemeCloudSessionBean().setThemeCloudName("");
sessionContainer.getThemeCloudSessionBean().getThemeClouds().clear();
}
if (sessionContainer.getThemeCloudSessionBean().getThemeClouds().isEmpty()) {
sessionContainer.getThemeCloudSessionBean().setThemeClouds(this.getThemeClouds(sessionContainer.getRestBaseURI()));
}
getSession().setAttribute(Constants.KEY_SESSION_CONTAINER, sessionContainer);
}
/**
* Gets all ThemeClouds stored in theme handler database via rest webservice.
* @return List<ThemeCloud>
*/
public List<ThemeCloud> getThemeClouds(String restBaseURI) {
ThemeHandlerWebserviceAPI webservices = new ThemeHandlerWebserviceAPI(restBaseURI);
ThemeCloudResult result = webservices.getAllThemeClouds();
if (result != null) {
logger.debug("ThemeClouds:");
for (ThemeCloud themeCloud : result.getThemeClouds()) {
logger.debug("Name: "+themeCloud.getThemeCloudName()+", User: "+themeCloud.getUser().getName());
}
return result.getThemeClouds();
}
return new ArrayList<ThemeCloud>();
}
/**
* Prepares the theme cloud form bys initializing current options and values.
* @param request
* @param sessionContainer
*/
public void getBasicParameter(HttpServletRequest request, SessionContainer sessionContainer) {
String themeCloudName = request.getParameter("themeCloudName");
if (themeCloudName == null) {
themeCloudName = "";
}
sessionContainer.getThemeCloudSessionBean().setThemeCloudName(themeCloudName);
String themeCloudCreatorActive = request.getParameter("themeCloudCreatorActive");
String themeCloudViewerActive = request.getParameter("themeCloudViewerActive");
String syntagCloudParameterVisible = request.getParameter("syntagCloudParameterVisible");
if (themeCloudCreatorActive != null) {
sessionContainer.getThemeCloudSessionBean().setThemeCloudCreatorActive(Integer.parseInt(themeCloudCreatorActive));
}
if (themeCloudViewerActive != null) {
sessionContainer.getThemeCloudSessionBean().setThemeCloudViewerActive(Integer.parseInt(themeCloudViewerActive));
}
if (syntagCloudParameterVisible != null) {
sessionContainer.getThemeCloudSessionBean().setSyntagCloudParameterVisible(Integer.parseInt(syntagCloudParameterVisible));
}
String minSyntag_String = request.getParameter("minSyntag");
String syntagmaticEntityTermFactor_String = request.getParameter("syntagmaticEntityTermFactor");
String a_String = request.getParameter("factor_a");
String b_String = request.getParameter("factor_b");
float minSyntag = Constants.DEFAULT_MIN_TOP_TERM_SYNTAG;
float syntagmaticEntityTermFactor = Constants.DEFAULT_SYNTAGMATIC_ENTITY_TERM_FACTOR;
float a = Constants.DEFAULT_A;
float b = Constants.DEFAULT_B;
if (minSyntag_String != null && !minSyntag_String.isEmpty()) {
try {
float f = Float.parseFloat(minSyntag_String);
minSyntag = f;
} catch(NumberFormatException e) {
logger.error("parameter minSyntag is no float, using default value.");
}
}
if (syntagmaticEntityTermFactor_String != null && !syntagmaticEntityTermFactor_String.isEmpty()) {
try {
float f = Float.parseFloat(syntagmaticEntityTermFactor_String);
syntagmaticEntityTermFactor = f;
} catch(NumberFormatException e) {
logger.error("parameter syntagmaticEntityTermFactor is no float, using default value.");
}
}
if (a_String != null && !a_String.isEmpty()) {
try {
float f = Float.parseFloat(a_String);
a = f;
} catch(NumberFormatException e) {
logger.error("parameter a is no float, using default value.");
}
}
if (b_String != null && !b_String.isEmpty()) {
try {
float f = Float.parseFloat(b_String);
b = f;
} catch(NumberFormatException e) {
logger.error("parameter b is no float, using default value.");
}
}
sessionContainer.getThemeCloudSessionBean().setMinSyntag(minSyntag);
sessionContainer.getThemeCloudSessionBean().setSyntagmaticEntityTermFactor(syntagmaticEntityTermFactor);
sessionContainer.getThemeCloudSessionBean().setA(a);
sessionContainer.getThemeCloudSessionBean().setB(b);
}
/**
* Sends a rest request to the taghandler service for a syntagmatic term cloud
* that matches to the current theme cloud the user created.
* @param sessionContainer
* @param minSyntag
* @param syntagmaticEntityTermFactor
* @param a
* @param b
*/
public void calcSyntagCloud(SessionContainer sessionContainer, Float minSyntag,
Float syntagmaticEntityTermFactor, Float a, Float b) {
TagHandlerWebserviceAPI webservices = new TagHandlerWebserviceAPI(sessionContainer.getRestBaseURI());
StringBuilder themeCloudTermValues = new StringBuilder();
int i = 0;
// build term values parameter
for (Term term : sessionContainer.getThemeCloudSessionBean().getWhiteThemeCloud()) {
if (i != 0) {
themeCloudTermValues.append(",");
}
themeCloudTermValues.append(term.getValue());
i++;
}
sessionContainer.getThemeCloudSessionBean().getSyntagTerms().clear();
// call the webservice
SyntagCloudResult result = webservices.getSyntagTermCloud(themeCloudTermValues.toString(),
minSyntag.toString(), syntagmaticEntityTermFactor.toString(), a.toString(), b.toString());
if (result != null) {
// add received terms to syntag cloud if term is not already in theme cloud
for (Term syntagTerm : result.getSyntagCloudTerms()) {
if (!sessionContainer.getThemeCloudSessionBean().getThemeCloud().contains(syntagTerm)) {
sessionContainer.getThemeCloudSessionBean().getSyntagTerms().add(syntagTerm);
}
}
}
}
/**
* Adds terms from the syntag cloud or the term input field to the theme cloud
* if term is in term lexicon.
* @param sessionContainer
* @param termsToAdd
* @param minSyntag
* @param syntagmaticEntityTermFactor
* @param a
* @param b
*/
public void addTermToTermCloud(SessionContainer sessionContainer, List<Term> termsToAdd,
Float minSyntag, Float syntagmaticEntityTermFactor, Float a, Float b) {
TagHandlerWebserviceAPI webservices = new TagHandlerWebserviceAPI(sessionContainer.getRestBaseURI());
// verify terms
for (Term term : termsToAdd) {
if (!term.isValid()) {
TermsResult result = webservices.getTerm(term.getValue());
if (result != null && !result.getTerms().isEmpty()) {
term.setId(result.getTerms().get(0).getId());
}
}
// add only existing terms that are not already in theme cloud
if (term.isValid() && !sessionContainer.getThemeCloudSessionBean().getThemeCloud().contains(term)) {
sessionContainer.getThemeCloudSessionBean().getThemeCloud().add(term);
sessionContainer.getThemeCloudSessionBean().rebuildColoredThemeCloud();
logger.info("Term with value '"+term.getValue()+"' added to theme cloud.");
} else {
logger.info("No term found for input value '"+term.getValue()+"'.");
}
}
// compute new syntag cloud
if (!termsToAdd.isEmpty()) {
this.calcSyntagCloud(sessionContainer, minSyntag, syntagmaticEntityTermFactor, a, b);
}
}
/**
*
* @param request
* @param themeCloudName
* @param themeCloudTerms
* @param userID
*/
public void saveThemeCloud(String themeCloudName, List<Term> themeCloudTerms, String userName, String restBaseURI) {
ThemeHandlerWebserviceAPI services = new ThemeHandlerWebserviceAPI(restBaseURI);
User user = new User();
user.setName(userName);
ThemeCloud themeCloud = new ThemeCloud();
themeCloud.setThemeCloudName(themeCloudName);
themeCloud.setTerms(themeCloudTerms);
themeCloud.setUser(user);
services.saveNewThemeCloud(themeCloud);
}
/**
*
* @param request
* @param themeCloudID
* @return
*/
public boolean deleteThemeCloud(String themeCloudName, String restBaseURI) {
ThemeHandlerWebserviceAPI webservices = new ThemeHandlerWebserviceAPI(restBaseURI);
webservices.deleteThemeCloud(themeCloudName);
// TODO: use response from webservice to check if delete succeeded
return true;
}
}
| apache-2.0 |
ykai2/Shanbei | xListView/src/main/java/me/maxwin/view/IXListViewLoadMore.java | 92 | package me.maxwin.view;
public interface IXListViewLoadMore {
public void onLoadMore();
}
| apache-2.0 |
wmedvede/drools | drools-examples/src/main/java/org/drools/examples/diagnostics/Question.java | 1536 | package org.drools.examples.diagnostics;
public class Question {
private String id;
private String text;
public Question(String id, String text) {
this.id = id;
this.text = text;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
// public void setBooleanAnswer(String booleanAnswer) {
// if ( booleanAnswer.trim().equals("y") ) {
// this.booleanAnswer = true;
// } else if ( booleanAnswer.trim().equals("y") ) {
// this.booleanAnswer = false;
// } else {
// throw new RuntimeException("String number be 'y' or 'c' it was " + booleanAnswer );
// }
// }
@Override
public String toString() {
return "Question{" +
"id=" + id +
", text='" + text + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) { return true; }
if (o == null || getClass() != o.getClass()) { return false; }
Question question = (Question) o;
if (!id.equals(question.id)) { return false; }
if (!text.equals(question.text)) { return false; }
return true;
}
@Override
public int hashCode() {
int result = id.hashCode();
result = 31 * result + text.hashCode();
return result;
}
}
| apache-2.0 |
cezarpirvu/PracticalTest02_2 | gen/ro/pub/cs/systems/pdsd/practicaltest02var04/R.java | 3549 | /* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* aapt tool from the resource data it found. It
* should not be modified by hand.
*/
package ro.pub.cs.systems.pdsd.practicaltest02var04;
public final class R {
public static final class attr {
}
public static final class dimen {
/** Default screen margins, per the Android Design guidelines.
Example customization of dimensions originally defined in res/values/dimens.xml
(such as screen margins) for screens with more than 820dp of available width. This
would include 7" and 10" devices in landscape (~960dp and ~1280dp respectively).
*/
public static final int activity_horizontal_margin=0x7f040000;
public static final int activity_vertical_margin=0x7f040001;
}
public static final class drawable {
public static final int ic_launcher=0x7f020000;
}
public static final class id {
public static final int action_settings=0x7f080008;
public static final int client_address_edit_text=0x7f080002;
public static final int client_port_edit_text=0x7f080003;
public static final int connect_button=0x7f080001;
public static final int get_page_content=0x7f080005;
public static final int google_results_web_view=0x7f080007;
public static final int page_content_text_view=0x7f080006;
public static final int server_port_edit_text=0x7f080000;
public static final int url_edit_text=0x7f080004;
}
public static final class layout {
public static final int activity_practical_test02_var04_main=0x7f030000;
}
public static final class menu {
public static final int practical_test02_var04_main=0x7f070000;
}
public static final class string {
public static final int action_settings=0x7f050002;
public static final int app_name=0x7f050000;
public static final int client=0x7f050006;
public static final int client_address=0x7f050007;
public static final int client_port=0x7f050008;
public static final int connect=0x7f050005;
public static final int get_page_content=0x7f05000a;
public static final int hello_world=0x7f050001;
public static final int server=0x7f050003;
public static final int server_port=0x7f050004;
public static final int url=0x7f050009;
}
public static final class style {
/**
Base application theme, dependent on API level. This theme is replaced
by AppBaseTheme from res/values-vXX/styles.xml on newer devices.
Theme customizations available in newer API levels can go in
res/values-vXX/styles.xml, while customizations related to
backward-compatibility can go here.
Base application theme for API 11+. This theme completely replaces
AppBaseTheme from res/values/styles.xml on API 11+ devices.
API 11 theme customizations can go here.
Base application theme for API 14+. This theme completely replaces
AppBaseTheme from BOTH res/values/styles.xml and
res/values-v11/styles.xml on API 14+ devices.
API 14 theme customizations can go here.
*/
public static final int AppBaseTheme=0x7f060000;
/** Application theme.
All customizations that are NOT specific to a particular API-level can go here.
*/
public static final int AppTheme=0x7f060001;
}
}
| apache-2.0 |
saltlab/Pangor | js-classify/src/ca/ubc/ece/salt/pangor/analysis/argumentorder/InsertedIfVisitor.java | 926 | package ca.ubc.ece.salt.pangor.analysis.argumentorder;
import java.util.HashSet;
import java.util.Set;
import org.mozilla.javascript.ast.AstNode;
import org.mozilla.javascript.ast.IfStatement;
import org.mozilla.javascript.ast.NodeVisitor;
import ca.ubc.ece.salt.gumtree.ast.ClassifiedASTNode.ChangeType;
/**
* Visitor for storing inserted if statements. The store nodes serve as start
* point for ArgumentOrderAnalysis.
*/
public class InsertedIfVisitor implements NodeVisitor {
public Set<IfStatement> storedNodes = new HashSet<>();
@Override
public boolean visit(AstNode node) {
/*
* If it is not an if statement, continue
*/
if (!(node instanceof IfStatement))
return true;
IfStatement statement = (IfStatement) node;
/*
* If the entire block was inserted, we store it
*/
if (statement.getChangeType() == ChangeType.INSERTED) {
storedNodes.add(statement);
}
return true;
}
} | apache-2.0 |
smulikHakipod/zb4osgi | zb4o-ha-driver/src/main/java/it/cnr/isti/zigbee/ha/cluster/impl/other_profiles/smart_energy/MessagingImpl.java | 2116 | /*
Copyright 2008-2013 CNR-ISTI, http://isti.cnr.it
Institute of Information Science and Technologies
of the Italian National Research Council
See the NOTICE file distributed with this work for additional
information regarding copyright ownership
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package it.cnr.isti.zigbee.ha.cluster.impl.other_profiles.smart_energy;
import it.cnr.isti.zigbee.api.ZigBeeDevice;
import it.cnr.isti.zigbee.ha.cluster.glue.other_profiles.smart_energy.Messaging;
import it.cnr.isti.zigbee.se.zcl.library.impl.MessagingCluster;
import it.cnr.isti.zigbee.zcl.library.api.core.Attribute;
import it.cnr.isti.zigbee.zcl.library.api.core.Subscription;
/**
*
* @author <a href="mailto:giancarlo.riolo@isti.cnr.it">Giancarlo Riolo</a>
* @version $LastChangedRevision: $ ($LastChangedDate: $)
*
*/
public class MessagingImpl implements Messaging {
private MessagingCluster messageCluster;
public MessagingImpl(ZigBeeDevice zbDevice) {
messageCluster = new MessagingCluster(zbDevice);
}
public int getId() {
return messageCluster.getId();
}
public String getName() {
return messageCluster.getName();
}
public Subscription[] getActiveSubscriptions() {
return messageCluster.getActiveSubscriptions();
}
public Attribute[] getAttributes() {
return messageCluster.getAvailableAttributes();
}
public Attribute getAttribute(int id) {
Attribute[] attributes = messageCluster.getAvailableAttributes();
for (int i = 0; i < attributes.length; i++) {
if (attributes[i].getId() == id)
return attributes[i];
}
return null;
}
} | apache-2.0 |
vipinraj/Spark | core/src/main/java/org/jivesoftware/spark/ui/ChatRoom.java | 44537 | /*
* Copyright (C) 2004-2011 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.spark.ui;
import org.jivesoftware.resource.Res;
import org.jivesoftware.resource.SparkRes;
import org.jivesoftware.smack.ConnectionListener;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.StanzaListener;
import org.jivesoftware.smack.packet.Message;
import org.jivesoftware.smack.packet.Stanza;
import org.jivesoftware.smack.roster.Roster;
import org.jivesoftware.smack.roster.RosterEntry;
import org.jivesoftware.smackx.chatstates.ChatState;
import org.jivesoftware.smackx.jiveproperties.packet.JivePropertiesExtension;
import org.jivesoftware.spark.ChatAreaSendField;
import org.jivesoftware.spark.SparkManager;
import org.jivesoftware.spark.component.BackgroundPanel;
import org.jivesoftware.spark.component.RolloverButton;
import org.jivesoftware.spark.plugin.ContextMenuListener;
import org.jivesoftware.spark.ui.rooms.GroupChatRoom;
import org.jivesoftware.spark.util.GraphicUtils;
import org.jivesoftware.spark.util.SwingWorker;
import org.jivesoftware.spark.util.TaskEngine;
import org.jivesoftware.spark.util.UIComponentRegistry;
import org.jivesoftware.spark.util.log.Log;
import org.jivesoftware.sparkimpl.settings.local.LocalPreferences;
import org.jivesoftware.sparkimpl.settings.local.SettingsManager;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.BadLocationException;
import javax.swing.text.Document;
import java.awt.*;
import java.awt.event.*;
import java.io.File;
import java.util.*;
import java.util.List;
/**
* The base implementation of all ChatRoom conversations. You would implement this class to have most types of Chat.
*/
public abstract class ChatRoom extends BackgroundPanel implements ActionListener, StanzaListener, DocumentListener, ConnectionListener, FocusListener, ContextMenuListener, ChatFrameToFrontListener {
private final JPanel chatPanel;
private final JSplitPane splitPane;
private JSplitPane verticalSplit;
private final JLabel notificationLabel;
private final TranscriptWindow transcriptWindow;
private final ChatAreaSendField chatAreaButton;
private final ChatToolBar toolbar;
private final JScrollPane textScroller;
private final JPanel bottomPanel;
private final JPanel editorWrapperBar;
private final JPanel editorBarRight;
private final JPanel editorBarLeft;
private JPanel chatWindowPanel;
private int unreadMessageCount;
private boolean mousePressed;
private List<ChatRoomClosingListener> closingListeners = new ArrayList<>();
private ChatRoomTransferHandler transferHandler;
private final List<String> packetIDList;
private final List<MessageListener> messageListeners;
private List<Message> transcript;
private List<FileDropListener> fileDropListeners;
private MouseAdapter transcriptWindowMouseListener;
private KeyAdapter chatEditorKeyListener;
private ChatFrame _chatFrame;
private RolloverButton _alwaysOnTopItem;
private boolean _isAlwaysOnTopActive;
// Chat state
private TimerTask typingTimerTask;
private long lastNotificationSentTime;
private ChatState lastNotificationSent;
private long pauseTimePeriod = 2000;
private long inactiveTimePeriod = 120000;
/**
* Initializes the base layout and base background color.
*/
protected ChatRoom() {
chatPanel = new JPanel(new GridBagLayout());
transcriptWindow = UIComponentRegistry.createTranscriptWindow();
splitPane = new JSplitPane();
packetIDList = new ArrayList<>();
notificationLabel = new JLabel();
toolbar = new ChatToolBar();
bottomPanel = new JPanel();
messageListeners = new ArrayList<>();
transcript = new ArrayList<>();
editorWrapperBar = new JPanel(new BorderLayout());
editorBarLeft = new JPanel(new FlowLayout(FlowLayout.LEFT, 1, 1));
editorBarRight = new JPanel(new FlowLayout(FlowLayout.RIGHT, 1, 1));
editorWrapperBar.add(editorBarLeft, BorderLayout.WEST);
editorWrapperBar.add(editorBarRight, BorderLayout.EAST);
fileDropListeners = new ArrayList<>();
transcriptWindowMouseListener = new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
if(e.getClickCount()!=2){
getChatInputEditor().requestFocus();
}
}
public void mouseReleased(MouseEvent e) {
mousePressed = false;
if (transcriptWindow.getSelectedText() == null) {
getChatInputEditor().requestFocus();
}
}
public void mousePressed(MouseEvent e) {
mousePressed = true;
}
};
transcriptWindow.addMouseListener(transcriptWindowMouseListener);
chatAreaButton = new ChatAreaSendField(Res.getString("button.send"));
textScroller = new JScrollPane(transcriptWindow);
textScroller.setBackground(transcriptWindow.getBackground());
textScroller.getViewport().setBackground(Color.white);
transcriptWindow.setBackground(Color.white);
getChatInputEditor().setSelectedTextColor((Color)UIManager.get("ChatInput.SelectedTextColor"));
getChatInputEditor().setSelectionColor((Color)UIManager.get("ChatInput.SelectionColor"));
setLayout(new GridBagLayout());
// Remove Default Beveled Borders
splitPane.setBorder(null);
splitPane.setOneTouchExpandable(false);
// Add Vertical Split Pane
verticalSplit = new JSplitPane(JSplitPane.VERTICAL_SPLIT);
add(verticalSplit, new GridBagConstraints(0, 1, 1, 1, 1.0, 1.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0));
verticalSplit.setBorder(null);
verticalSplit.setOneTouchExpandable(false);
verticalSplit.setTopComponent(splitPane);
textScroller.setAutoscrolls(true);
// For the first 5*150ms we wait for transcript to load and move
// scrollpane to max postion if size of scrollpane changed
textScroller.getVerticalScrollBar().addAdjustmentListener(new AdjustmentListener() {
private boolean scrollAtStart = false;
@Override
public void adjustmentValueChanged(AdjustmentEvent e) {
if (!scrollAtStart) {
scrollAtStart = true;
SwingWorker thread = new SwingWorker() {
@Override
public Object construct() {
int start = textScroller.getVerticalScrollBar().getMaximum();
int second;
int i = 0;
do {
try {
Thread.sleep(150);
second = textScroller.getVerticalScrollBar().getMaximum();
if (start == second) {
++i;
} else {
scrollToBottom();
getTranscriptWindow().repaint();
}
start = second;
} catch (InterruptedException e) {
e.printStackTrace();
}
} while (i < 5);
return null;
}
};
thread.start();
}
}
});
// Speed up scrolling. It was way too slow.
textScroller.getVerticalScrollBar().setBlockIncrement(200);
textScroller.getVerticalScrollBar().setUnitIncrement(20);
chatWindowPanel = new JPanel();
chatWindowPanel.setLayout(new GridBagLayout());
chatWindowPanel.add(textScroller, new GridBagConstraints(0, 10, 1, 1, 1.0, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0));
chatWindowPanel.setOpaque(false);
// Layout Components
chatPanel.add(chatWindowPanel, new GridBagConstraints(0, 1, 1, 1, 1.0, 1.0, GridBagConstraints.CENTER,
GridBagConstraints.BOTH, getChatPanelInsets(), 0, 0));
// Add Chat Panel to Split Pane
splitPane.setLeftComponent(chatPanel);
// Add edit buttons to Chat Room
editorBarLeft.setOpaque(false);
chatPanel.setOpaque(false);
bottomPanel.setOpaque(false);
splitPane.setOpaque(false);
bottomPanel.setLayout(new GridBagLayout());
bottomPanel.add(chatAreaButton, new GridBagConstraints(0, 1, 5, 1, 1.0, 1.0, GridBagConstraints.WEST,
GridBagConstraints.BOTH, getChatAreaInsets(), 0, 35));
bottomPanel.add(editorWrapperBar, new GridBagConstraints(0, 0, 5, 1, 0.0, 0.0, GridBagConstraints.WEST,
GridBagConstraints.HORIZONTAL, getEditorWrapperInsets(), 0, 0));
// Set bottom panel border
bottomPanel.setBorder(BorderFactory.createMatteBorder(0, 0, 1, 0, new Color(197, 213, 230)));
verticalSplit.setOpaque(false);
verticalSplit.setBottomComponent(bottomPanel);
verticalSplit.setResizeWeight(1.0);
verticalSplit.setDividerSize(2);
// Add listener to send button
chatAreaButton.getButton().addActionListener(this);
// Add Key Listener to Send Field
getChatInputEditor().getDocument().addDocumentListener(this);
// Add Key Listener to Send Field
chatEditorKeyListener = new KeyAdapter() {
public void keyPressed(KeyEvent e) {
checkForEnter(e);
}
};
getChatInputEditor().addKeyListener(chatEditorKeyListener);
getChatInputEditor().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke("ctrl F4"), "closeTheRoom");
getChatInputEditor().getActionMap().put("closeTheRoom", new AbstractAction("closeTheRoom") {
private static final long serialVersionUID = 1L;
public void actionPerformed(ActionEvent evt) {
// Leave this chat.
closeChatRoom();
}
});
getChatInputEditor().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke("ctrl SPACE"), "handleCompletion");
getChatInputEditor().getActionMap().put("handleCompletion", new AbstractAction("handleCompletion") {
private static final long serialVersionUID = 1L;
public void actionPerformed(ActionEvent evt) {
// handle name completion.
try {
handleNickNameCompletion();
} catch (ChatRoomNotFoundException e) {
Log.error("ctlr-space nickname find", e);
}
}
});
_isAlwaysOnTopActive = SettingsManager.getLocalPreferences().isChatWindowAlwaysOnTop();
_alwaysOnTopItem = UIComponentRegistry.getButtonFactory().createAlwaysOnTop(_isAlwaysOnTopActive);
_alwaysOnTopItem.addActionListener( actionEvent -> {
if (!_isAlwaysOnTopActive)
{
SettingsManager.getLocalPreferences().setChatWindowAlwaysOnTop(true);
_chatFrame.setWindowAlwaysOnTop(true);
_isAlwaysOnTopActive = true;
_alwaysOnTopItem.setIcon(SparkRes.getImageIcon("FRAME_ALWAYS_ON_TOP_ACTIVE"));
}
else
{
SettingsManager.getLocalPreferences().setChatWindowAlwaysOnTop(false);
_chatFrame.setWindowAlwaysOnTop(false);
_isAlwaysOnTopActive = false;
_alwaysOnTopItem.setIcon(SparkRes.getImageIcon("FRAME_ALWAYS_ON_TOP_DEACTIVE"));
}
} );
editorBarRight.add(_alwaysOnTopItem);
// Initially, set the right pane to null to keep it empty.
getSplitPane().setRightComponent(null);
notificationLabel.setIcon(SparkRes.getImageIcon(SparkRes.BLANK_IMAGE));
getTranscriptWindow().addContextMenuListener(this);
transferHandler = new ChatRoomTransferHandler(this);
getTranscriptWindow().setTransferHandler(transferHandler);
getChatInputEditor().setTransferHandler(transferHandler);
addToolbar();
// Add Connection Listener
SparkManager.getConnection().addConnectionListener(this);
// Add Focus Listener
addFocusListener(this);
setChatState( ChatState.active );
createChatStateTimerTask();
scrollToBottom();
}
protected void createChatStateTimerTask() {
typingTimerTask = new TimerTask() {
public void run() {
final long lastUpdate = System.currentTimeMillis() - lastNotificationSentTime;
switch ( lastNotificationSent ) {
case paused:
case active:
if ( lastUpdate > inactiveTimePeriod ) {
setChatState( ChatState.inactive );
}
break;
case composing:
if ( lastUpdate > pauseTimePeriod ) {
setChatState( ChatState.paused );
}
break;
}
}
};
TaskEngine.getInstance().scheduleAtFixedRate(typingTimerTask, pauseTimePeriod /2, pauseTimePeriod / 2);
}
/**
* Sends a chat state to all peers.
*
* @param state the chat state.
*/
protected abstract void sendChatState( ChatState state ) throws SmackException.NotConnectedException;
/**
* Sets the chat state, causing an update to be sent to all peers if the new state warrants an update.
*
* @param state the chat state (never null).
*/
public final void setChatState(ChatState state)
{
if ( state == null ) {
throw new IllegalArgumentException( "Argument 'state' cannot be null." );
}
// Only sent out a chat state notification when it is different from the last one that was transmitted...
final boolean isDifferentState = lastNotificationSent != state;
// ... unless it's 'composing' - that can be repeated every so many seconds.
final boolean isStillComposing = state == ChatState.composing && System.currentTimeMillis() - lastNotificationSentTime > 2000;
final long now = System.currentTimeMillis();
if ( isDifferentState || isStillComposing )
{
try
{
sendChatState( state );
} catch ( SmackException.NotConnectedException e ) {
Log.warning( "Unable to update the chat state to " + state, e );
}
lastNotificationSent = state;
lastNotificationSentTime = now;
}
}
/**
* Handles the Nickname Completion dialog, when Pressing CTRL + SPACE<br>
* it searches for matches in the current GroupchatList and also in the
* Roster
*
* @throws ChatRoomNotFoundException
* when for some reason the GroupChatRoom cannot be found, this
* should <u>not</u> happen, since we retrieve it from the
* ActiveWindowTab and thus <u>can be ignored</u>
*/
private void handleNickNameCompletion() throws ChatRoomNotFoundException
{
// Search for a name that starts with the same word as the last word in the chat input editor.
final String text = getChatInputEditor().getText();
if ( text == null || text.isEmpty() )
{
return;
}
final int lastSpaceCharacterIndex = text.lastIndexOf( ' ' ); // -1 when space does not occur.
final String needle = text.substring( lastSpaceCharacterIndex + 1 );
final Set<String> matches = new TreeSet<>( String::compareToIgnoreCase );
if ( SparkManager.getChatManager().getChatContainer().getActiveChatRoom() instanceof GroupChatRoom )
{
final GroupChatRoom activeChatRoom = (GroupChatRoom) SparkManager.getChatManager().getChatContainer().getActiveChatRoom();
for ( String participant : activeChatRoom.getParticipants() )
{
final String nickname = participant.substring( participant.lastIndexOf( "/" ) + 1 );
if ( nickname.toLowerCase().startsWith( needle.toLowerCase() ) )
{
matches.add( nickname );
}
}
}
else
{
for ( RosterEntry re : Roster.getInstanceFor( SparkManager.getConnection() ).getEntries() )
{
// Use the name if available, otherwise the localpart of the JID.
final String username;
if ( re.getName() != null )
{
username = re.getName();
}
else
{
username = re.getUser().substring( 0, re.getUser().indexOf( '@' ) );
}
if ( username.toLowerCase().startsWith( needle.toLowerCase() ) )
{
matches.add( username );
}
}
}
if ( matches.size() == 1 )
{
// If we only have 1 match, that match can be used immediately.
getChatInputEditor().appendText( matches.iterator().next().substring( needle.length() ) );
}
else
{
// More than one match: create Popupmenu and let the user select one.
final JPopupMenu popup = new JPopupMenu();
for ( final String match : matches )
{
final JMenuItem menuItem = new JMenuItem( match );
popup.add( menuItem );
menuItem.addActionListener( new AbstractAction()
{
@Override
public void actionPerformed( ActionEvent e )
{
getChatInputEditor().appendText( match.substring( needle.length() ) );
popup.setVisible( false );
}
} );
}
popup.show( SparkManager.getChatManager().getChatContainer(),
getChatInputEditor().getCaret().getMagicCaretPosition().x,
SparkManager.getChatManager().getChatContainer().getHeight() - 20 );
}
}
// I would normally use the command pattern, but
// have no real use when dealing with just a couple options.
public void actionPerformed(ActionEvent e) {
sendMessage();
// Clear send field and disable send button
getChatInputEditor().clear();
chatAreaButton.getButton().setEnabled(false);
}
/**
* Creates and sends a message object from the text in
* the Send Field, using the default nickname specified in your
* Chat Preferences.
*/
protected abstract void sendMessage();
/**
* Creates a Message object from the given text and delegates to the room
* for sending.
*
* @param text the text to send.
*/
protected abstract void sendMessage(String text);
/**
* Sends the current message.
*
* @param message - the message to send.
*/
public abstract void sendMessage(Message message);
/**
* Returns the nickname of the current agent as specified in Chat
* Preferences.
*
* @return the nickname of the agent.
*/
public String getNickname() {
LocalPreferences pref = SettingsManager.getLocalPreferences();
return pref.getNickname();
}
/**
* The main entry point when receiving any messages. This will
* either handle a message from a customer or delegate itself
* as an agent handler.
*
* @param message - the message receieved.
*/
public void insertMessage(Message message) {
// Fire Message Filters
SparkManager.getChatManager().filterIncomingMessage(this, message);
SparkManager.getChatManager().fireGlobalMessageReceievedListeners(this, message);
addToTranscript(message, true);
fireMessageReceived(message);
SparkManager.getWorkspace().getTranscriptPlugin().persistChatRoom(this);
}
/**
* Add a <code>ChatResponse</chat> to the current discussion chat area.
*
* @param message the message to add to the transcript list
* @param updateDate true if you wish the date label to be updated with the
* date and time the message was received.
*/
public void addToTranscript(Message message, boolean updateDate) {
// Create message to persist.
final Message newMessage = new Message();
newMessage.setTo(message.getTo());
newMessage.setFrom(message.getFrom());
newMessage.setBody(message.getBody());
final Map<String, Object> properties = new HashMap<>();
properties.put( "date", new Date() );
newMessage.addExtension( new JivePropertiesExtension( properties ) );
transcript.add(newMessage);
// Add current date if this is the current agent
if (updateDate && transcriptWindow.getLastUpdated() != null) {
// Set new label date
notificationLabel.setIcon(SparkRes.getImageIcon(SparkRes.SMALL_ABOUT_IMAGE));
notificationLabel.setText(Res.getString("message.last.message.received", SparkManager.DATE_SECOND_FORMATTER.format(transcriptWindow.getLastUpdated())));
}
scrollToBottom();
}
/**
* Adds a new message to the transcript history.
*
* @param to who the message is to.
* @param from who the message was from.
* @param body the body of the message.
* @param date when the message was received.
*/
public void addToTranscript(String to, String from, String body, Date date) {
final Message newMessage = new Message();
newMessage.setTo(to);
newMessage.setFrom(from);
newMessage.setBody(body);
final Map<String, Object> properties = new HashMap<>();
properties.put( "date", new Date() );
newMessage.addExtension( new JivePropertiesExtension( properties ) );
transcript.add(newMessage);
}
/**
* Scrolls the chat window to the bottom.
*/
public void scrollToBottom() {
if (mousePressed) {
return;
}
int lengthOfChat = transcriptWindow.getDocument().getLength();
transcriptWindow.setCaretPosition(lengthOfChat);
try {
final JScrollBar scrollBar = textScroller.getVerticalScrollBar();
EventQueue.invokeLater( () -> scrollBar.setValue(scrollBar.getMaximum()) );
}
catch (Exception e) {
Log.error(e);
}
}
/**
* Checks to see if the Send button should be enabled.
*
* @param e - the documentevent to react to.
*/
protected void checkForText(DocumentEvent e) {
final int length = e.getDocument().getLength();
if (length > 0) {
chatAreaButton.getButton().setEnabled(true);
}
else {
chatAreaButton.getButton().setEnabled(false);
}
}
/**
* Requests valid focus to the SendField.
*/
public void positionCursor() {
getChatInputEditor().setCaretPosition(getChatInputEditor().getCaretPosition());
chatAreaButton.getChatInputArea().requestFocusInWindow();
}
/**
* Disable the chat room. This is called when a chat has been either transfered over or
* the customer has left the chat room.
*/
public abstract void leaveChatRoom();
/**
* Process incoming packets.
*
* @param stanza - the packet to process
*/
public void processPacket(Stanza stanza) {
}
/**
* Returns the SendField component.
*
* @return the SendField ChatSendField.
*/
public ChatInputEditor getChatInputEditor() {
return chatAreaButton.getChatInputArea();
}
/**
* Returns the chatWindow components.
*
* @return the ChatWindow component.
*/
public TranscriptWindow getTranscriptWindow() {
return transcriptWindow;
}
/**
* Checks to see if enter was pressed and validates room.
*
* @param e the KeyEvent
*/
private void checkForEnter(KeyEvent e) {
final KeyStroke keyStroke = KeyStroke.getKeyStroke(e.getKeyCode(), e.getModifiers());
if (!keyStroke.equals(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, KeyEvent.SHIFT_DOWN_MASK)) &&
e.getKeyChar() == KeyEvent.VK_ENTER) {
e.consume();
sendMessage();
getChatInputEditor().setText("");
getChatInputEditor().setCaretPosition(0);
SparkManager.getWorkspace().getTranscriptPlugin().persistChatRoom(this);
}
else if (keyStroke.equals(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, KeyEvent.SHIFT_DOWN_MASK))) {
final Document document = getChatInputEditor().getDocument();
try {
document.insertString(getChatInputEditor().getCaretPosition(), "\n", null);
getChatInputEditor().requestFocusInWindow();
chatAreaButton.getButton().setEnabled(true);
}
catch (BadLocationException badLoc) {
Log.error("Error when checking for enter:", badLoc);
}
}
}
/**
* Add a {@link MessageListener} to the current ChatRoom.
*
* @param listener - the MessageListener to add to the current ChatRoom.
*/
public void addMessageListener(MessageListener listener) {
messageListeners.add(listener);
}
/**
* Remove the specified {@link MessageListener } from the current ChatRoom.
*
* @param listener - the MessageListener to remove from the current ChatRoom.
*/
public void removeMessageListener(MessageListener listener) {
messageListeners.remove(listener);
}
/**
* Notifies all message listeners that
*
* @param message the message received.
*/
private void fireMessageReceived( Message message )
{
for ( MessageListener listener : messageListeners )
{
try
{
listener.messageReceived( this, message );
}
catch ( Exception e )
{
Log.error( "A MessageListener (" + listener + ") threw an exception while processing a 'message received' event for message: " + message, e );
}
}
}
/**
* Notifies all <code>MessageListener</code> that a message has been sent.
*
* @param message the message sent.
*/
protected void fireMessageSent( Message message )
{
for ( MessageListener listener : messageListeners )
{
try
{
listener.messageSent( this, message );
}
catch ( Exception e )
{
Log.error( "A MessageListener (" + listener + ") threw an exception while processing a 'message sent' event for message: " + message, e );
}
}
}
/**
* Returns a map of the current Chat Transcript which is a list of all
* ChatResponses and their order. You should retrieve this map to get
* any current chat transcript state.
*
* @return - the map of current chat responses.
*/
public List<Message> getTranscripts() {
return transcript;
}
/**
* Disables the ChatRoom toolbar.
*/
public void disableToolbar() {
final int count = editorBarLeft.getComponentCount();
for (int i = 0; i < count; i++) {
final Object o = editorBarLeft.getComponent(i);
if (o instanceof RolloverButton) {
final RolloverButton rb = (RolloverButton)o;
rb.setEnabled(false);
}
}
}
/**
* Enable the ChatRoom toolbar.
*/
public void enableToolbar() {
final int count = editorBarLeft.getComponentCount();
for (int i = 0; i < count; i++) {
final Object o = editorBarLeft.getComponent(i);
if (o instanceof RolloverButton) {
final RolloverButton rb = (RolloverButton)o;
rb.setEnabled(true);
}
}
}
/**
* Checks to see if the Send Button should be enabled depending on the
* current update in SendField.
*
* @param event the DocumentEvent from the sendField.
*/
public void removeUpdate(DocumentEvent event) {
checkForText(event);
}
/**
* Checks to see if the Send button should be enabled.
*
* @param docEvent the document event.
*/
public void changedUpdate(DocumentEvent docEvent) {
// Do nothing.
}
/**
* Return the splitpane used in this chat room.
*
* @return the splitpane used in this chat room.
*/
public JSplitPane getSplitPane() {
return splitPane;
}
/**
* Returns the ChatPanel that contains the ChatWindow and SendField.
*
* @return the ChatPanel.
*/
public JPanel getChatPanel() {
return chatPanel;
}
/**
* Close the ChatRoom.
*/
public void closeChatRoom() {
fireClosingListeners();
setChatState(ChatState.gone);
if (typingTimerTask != null) {
TaskEngine.getInstance().cancelScheduledTask(typingTimerTask);
typingTimerTask = null;
}
getTranscriptWindow().removeContextMenuListener(this);
getTranscriptWindow().removeMouseListener(transcriptWindowMouseListener);
getChatInputEditor().removeKeyListener(chatEditorKeyListener);
this.removeAll();
textScroller.getViewport().remove(transcriptWindow);
// Remove Connection Listener
SparkManager.getConnection().removeConnectionListener(this);
getTranscriptWindow().setTransferHandler(null);
getChatInputEditor().setTransferHandler(null);
transferHandler = null;
packetIDList.clear();
messageListeners.clear();
fileDropListeners.clear();
getChatInputEditor().close();
getChatInputEditor().getActionMap().remove("closeTheRoom");
chatAreaButton.getButton().removeActionListener(this);
bottomPanel.remove(chatAreaButton);
_chatFrame.removeWindowToFrontListener(this);
}
/**
* Get the <code>Icon</code> to be used in the tab holding
* this ChatRoom.
*
* @return - <code>Icon</code> to use
*/
public abstract Icon getTabIcon();
/**
* Get the roomname to use for this ChatRoom. This is expected to be a bare jid.
*
* @return - the Roomname of this ChatRoom.
*/
public abstract String getRoomname();
/**
* Get the title to use in the tab holding this ChatRoom.
*
* @return - the title to use.
*/
public abstract String getTabTitle();
/**
* Returns the title of this room to use. The title
* will be used in the title bar of the ChatRoom.
*
* @return - the title of this ChatRoom.
*/
public abstract String getRoomTitle();
/**
* Returns the <code>Message.Type</code> specific to this
* chat room.
* GroupChat is Message.Type.groupchat
* Normal Chat is Message.TYPE.NORMAL
*
* @return the ChatRooms Message.TYPE
*/
public abstract Message.Type getChatType();
/**
* Returns whether or not this ChatRoom is active. Note: carrying
* a conversation rather than being disabled, as it would be
* transcript mode.
*
* @return true if the chat room is active.
*/
public abstract boolean isActive();
/**
* Returns the notification label. The notification label notifies the
* user of chat room activity, such as the date of the last message
* and typing notifications.
*
* @return the notification label.
*/
public JLabel getNotificationLabel() {
return notificationLabel;
}
/**
* Adds a packetID to the packedIDList. The packetIDLlist
* keeps track of all messages coming into the chatroom.
*
* @param packetID the packetID to add.
*/
public void addPacketID(String packetID) {
packetIDList.add(packetID);
}
/**
* Checks if the packetID has already been used.
*
* @param packetID the packetID to check for.
* @return true if the packetID already exists.
*/
public boolean packetIDExists(String packetID) {
return packetIDList.contains(packetID);
}
/**
* Returns this instance of the chatroom.
*
* @return the current ChatRoom instance.
*/
public ChatRoom getChatRoom() {
return this;
}
/**
* Returns the toolbar used on top of the chat room.
*
* @return the toolbar used on top of this chat room.
*/
public ChatToolBar getToolBar() {
return toolbar;
}
protected void addToolbar() {
add(toolbar, new GridBagConstraints(0, 0, 1, 1, 1.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
}
public void insertUpdate(DocumentEvent e) {
// Meant to be overriden
checkForText(e);
setChatState( ChatState.composing );
}
/**
* Override to save transcript in preferred room style.
*/
public void saveTranscript() {
getTranscriptWindow().saveTranscript(getTabTitle() + ".html", getTranscripts(), null);
}
/**
* Used for the top toolbar.
*/
public class ChatToolBar extends JPanel {
private static final long serialVersionUID = 5926527530611601841L;
private JPanel buttonPanel;
/**
* Default Constructor.
*/
public ChatToolBar() {
buttonPanel = new JPanel();
buttonPanel.setLayout(new FlowLayout(FlowLayout.LEFT, 2, 0));
// Set Layout
setLayout(new GridBagLayout());
buttonPanel.setOpaque(false);
add(buttonPanel, new GridBagConstraints(1, 1, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHEAST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
setOpaque(false);
}
/**
* Adds a new ChatRoomButton the CommandBar.
*
* @param button the button.
*/
public void addChatRoomButton(ChatRoomButton button) {
buttonPanel.add(button);
// Make all JButtons the same size
Component[] comps = buttonPanel.getComponents();
final int no = comps != null ? comps.length : 0;
final List<Component> buttons = new ArrayList<>();
for (int i = 0; i < no; i++) {
try {
Component component = comps[i];
if (component instanceof JButton) {
buttons.add(component);
}
}
catch (NullPointerException e) {
Log.error(e);
}
}
GraphicUtils.makeSameSize(buttons.toArray(new JComponent[buttons.size()]));
}
/**
* Removes the ChatRoomButton from the CommandBar.
*
* @param button the button.
*/
public void removeChatRoomButton(ChatRoomButton button) {
buttonPanel.remove(button);
}
}
/**
* Returns the number of unread messages in this ChatRoom.
*
* @return the number of unread messages.
*/
public int getUnreadMessageCount() {
return unreadMessageCount;
}
/**
* Increases the number of unread messages by 1.
*/
public void increaseUnreadMessageCount() {
unreadMessageCount++;
}
/**
* Resets the number of unread messages.
*/
public void clearUnreadMessageCount() {
unreadMessageCount = 0;
}
/**
* Returns the bottom panel used in the ChatRoom.
*
* @return the bottomPane;
*/
public JPanel getBottomPanel() {
return bottomPanel;
}
/**
* Returns the Container which holds the ChatWindow.
*
* @return the Container.
*/
public JPanel getChatWindowPanel() {
return chatWindowPanel;
}
/**
* Adds a new <code>FileDropListener</code> to allow for Drag and Drop notifications
* of objects onto the ChatWindow.
*
* @param listener the listener.
*/
public void addFileDropListener(FileDropListener listener) {
fileDropListeners.add(listener);
}
/**
* Remove the <code>FileDropListener</code> from ChatRoom.
*
* @param listener the listener.
*/
public void removeFileDropListener(FileDropListener listener) {
fileDropListeners.remove(listener);
}
/**
* Notify all users that a collection of files has been dropped onto the ChatRoom.
*
* @param files the files dropped.
*/
public void fireFileDropListeners( Collection<File> files )
{
for ( FileDropListener listener : fileDropListeners )
{
try
{
listener.filesDropped( files, this );
}
catch ( Exception e )
{
Log.error( "A FileDropListener (" + listener + ") threw an exception while processing a 'files dropped' event.", e );
}
}
}
/**
* Returns the panel which contains the toolbar items, such as spell checker.
*
* @return the panel which contains the lower toolbar items.
*/
public JPanel getEditorBar() {
return editorBarLeft;
}
/**
* Returns the panel next to the editor bar<br>
* for use with system buttons, like room controlling or toggle stay-on-top
*
* @return
*/
public JPanel getRoomControllerBar() {
return editorBarRight;
}
/**
* Adds a <code>ChatRoomClosingListener</code> to this ChatRoom. A ChatRoomClosingListener
* is notified whenever this room is closing.
*
* @param listener the ChatRoomClosingListener.
*/
public void addClosingListener(ChatRoomClosingListener listener)
{
closingListeners.add( listener );
}
/**
* Removes a <code>ChatRoomClosingListener</code> from this ChatRoom.
*
* @param listener the ChatRoomClosingListener.
*/
public void removeClosingListener(ChatRoomClosingListener listener)
{
closingListeners.remove( listener );
}
/**
* Notifies all <code>ChatRoomClosingListener</code> that this ChatRoom is closing.
*/
private void fireClosingListeners()
{
for ( final ChatRoomClosingListener listener : new ArrayList<>( closingListeners ) ) // Listener can call #removeClosingListener. Prevent ConcurrentModificationException by using a clone.
{
try
{
listener.closing();
}
catch ( Exception e )
{
Log.error( "A ChatRoomClosingListener (" + listener + ") threw an exception while processing a 'closing' event.", e );
}
}
closingListeners.clear();
}
/**
* Returns the ScrollPane that contains the TranscriptWindow.
*
* @return the <code>TranscriptWindow</code> ScrollPane.
*/
public JScrollPane getScrollPaneForTranscriptWindow() {
return textScroller;
}
/**
* Return the "Send" button.
*
* @return the send button.
*/
public JButton getSendButton() {
return chatAreaButton.getButton();
}
/**
* Returns the VerticalSplitPane used in this ChatRoom.
*
* @return the VerticalSplitPane.
*/
public JSplitPane getVerticalSlipPane() {
return verticalSplit;
}
public void focusGained(FocusEvent focusEvent) {
validate();
invalidate();
repaint();
if(focusEvent.getComponent().equals(getChatInputEditor())) {
setChatState( ChatState.active );
}
}
public void poppingUp(Object component, JPopupMenu popup) {
Action saveAction = new AbstractAction() {
private static final long serialVersionUID = -3582301239832606653L;
public void actionPerformed(ActionEvent actionEvent) {
saveTranscript();
}
};
saveAction.putValue(Action.NAME, Res.getString("action.save"));
saveAction.putValue(Action.SMALL_ICON, SparkRes.getImageIcon(SparkRes.SAVE_AS_16x16));
popup.add(saveAction);
}
public void poppingDown(JPopupMenu popup) {
}
public boolean handleDefaultAction(MouseEvent e) {
return false;
}
public void focusLost(FocusEvent focusEvent) {
if(focusEvent.getComponent().equals(getChatInputEditor())) {
setChatState( ChatState.inactive );
}
}
/**
* Implementation of this method should return the last time this chat room
* sent or recieved a message.
*
* @return the last time (in system milliseconds) that the room last recieved a message.
*/
public abstract long getLastActivity();
public void connectionClosed() {
}
public void connectionClosedOnError(Exception e) {
}
public void reconnectingIn(int seconds) {
}
public void reconnectionSuccessful() {
}
public void reconnectionFailed(Exception e) {
}
public void updateStatus(boolean active)
{
_alwaysOnTopItem.setSelected(active);
}
public void registeredToFrame(ChatFrame chatframe)
{
this._chatFrame = chatframe;
_chatFrame.addWindowToFronListener(this);
}
protected JPanel getEditorWrapperBar() {
return editorWrapperBar;
}
protected JPanel getEditorBarRight() {
return editorBarRight;
}
protected JPanel getEditorBarLeft() {
return editorBarLeft;
}
protected JScrollPane getTextScroller() {
return textScroller;
}
protected Insets getChatPanelInsets() {
return new Insets(0, 5, 0, 5);
}
protected Insets getChatAreaInsets() {
return new Insets(0, 5, 5, 5);
}
protected Insets getEditorWrapperInsets() {
return new Insets(0, 5, 0, 5);
}
public void addChatRoomComponent(JComponent component) {
editorBarLeft.add(component);
}
public void addChatRoomButton(ChatRoomButton button) {
addChatRoomButton(button, false);
}
public void addChatRoomButton(ChatRoomButton button, boolean forceRepaint) {
toolbar.addChatRoomButton(button);
if (forceRepaint) {
toolbar.invalidate();
toolbar.repaint();
}
}
public void showToolbar() {
toolbar.setVisible(true);
}
public void hideToolbar() {
toolbar.setVisible(false);
}
public void addEditorComponent(JComponent component) {
editorBarLeft.add(component);
}
public void removeEditorComponent(JComponent component) {
editorBarLeft.remove(component);
}
public void addControllerButton(RolloverButton button) {
editorBarRight.add(button, 0);
}
}
| apache-2.0 |
amoghmargoor/incubator-calcite | core/src/test/java/org/apache/calcite/sql/parser/SqlParserTest.java | 212387 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql.parser;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.avatica.util.Quoting;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlSetOption;
import org.apache.calcite.sql.pretty.SqlPrettyWriter;
import org.apache.calcite.test.DiffTestCase;
import org.apache.calcite.test.SqlValidatorTestCase;
import org.apache.calcite.util.Bug;
import org.apache.calcite.util.ConversionUtil;
import org.apache.calcite.util.TestUtil;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.junit.Ignore;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
* A <code>SqlParserTest</code> is a unit-test for
* {@link SqlParser the SQL parser}.
*/
public class SqlParserTest {
//~ Static fields/initializers ---------------------------------------------
private static final String ANY = "(?s).*";
private static final ThreadLocal<boolean[]> LINUXIFY =
new ThreadLocal<boolean[]>() {
@Override protected boolean[] initialValue() {
return new boolean[] {true};
}
};
Quoting quoting = Quoting.DOUBLE_QUOTE;
Casing unquotedCasing = Casing.TO_UPPER;
Casing quotedCasing = Casing.UNCHANGED;
//~ Constructors -----------------------------------------------------------
public SqlParserTest() {
}
//~ Methods ----------------------------------------------------------------
// Helper functions -------------------------------------------------------
protected Tester getTester() {
return new TesterImpl();
}
protected void check(
String sql,
String expected) {
sql(sql).ok(expected);
}
protected Sql sql(String sql) {
return new Sql(sql);
}
private SqlParser getSqlParser(String sql) {
return SqlParser.create(sql,
SqlParser.configBuilder()
.setQuoting(quoting)
.setUnquotedCasing(unquotedCasing)
.setQuotedCasing(quotedCasing)
.build());
}
protected SqlNode parseStmt(String sql) throws SqlParseException {
return getSqlParser(sql).parseStmt();
}
protected void checkExp(
String sql,
String expected) {
getTester().checkExp(sql, expected);
}
protected SqlNode parseExpression(String sql) throws SqlParseException {
return getSqlParser(sql).parseExpression();
}
protected SqlAbstractParserImpl.Metadata getParserMetadata() {
return getSqlParser("").getMetadata();
}
protected void checkExpSame(String sql) {
checkExp(sql, sql);
}
protected void checkFails(
String sql,
String expectedMsgPattern) {
sql(sql).fails(expectedMsgPattern);
}
/**
* Tests that an expression throws an exception which matches the given
* pattern.
*/
protected void checkExpFails(
String sql,
String expectedMsgPattern) {
getTester().checkExpFails(sql, expectedMsgPattern);
}
/**
* Tests that when there is an error, non-reserved keywords such as "A",
* "ABSOLUTE" (which naturally arise whenever a production uses
* "<IDENTIFIER>") are removed, but reserved words such as "AND"
* remain.
*/
@Test public void testExceptionCleanup() {
checkFails(
"select 0.5e1^.1^ from sales.emps",
"(?s).*Encountered \".1\" at line 1, column 13.\n"
+ "Was expecting one of:\n"
+ " \"FROM\" ...\n"
+ " \",\" ...\n"
+ " \"AS\" ...\n"
+ " <IDENTIFIER> ...\n"
+ " <QUOTED_IDENTIFIER> ...\n"
+ ".*");
}
@Test public void testInvalidToken() {
// Causes problems to the test infrastructure because the token mgr
// throws a java.lang.Error. The usual case is that the parser throws
// an exception.
checkFails(
"values (a^#^b)",
"Lexical error at line 1, column 10\\. Encountered: \"#\" \\(35\\), after : \"\"");
}
@Test public void testDerivedColumnList() {
check("select * from emp as e (empno, gender) where true",
"SELECT *\n"
+ "FROM `EMP` AS `E` (`EMPNO`, `GENDER`)\n"
+ "WHERE TRUE");
}
@Test public void testDerivedColumnListInJoin() {
check(
"select * from emp as e (empno, gender) join dept as d (deptno, dname) on emp.deptno = dept.deptno",
"SELECT *\n"
+ "FROM `EMP` AS `E` (`EMPNO`, `GENDER`)\n"
+ "INNER JOIN `DEPT` AS `D` (`DEPTNO`, `DNAME`) ON (`EMP`.`DEPTNO` = `DEPT`.`DEPTNO`)");
}
@Ignore
@Test public void testDerivedColumnListNoAs() {
check("select * from emp e (empno, gender) where true", "foo");
}
// jdbc syntax
@Ignore
@Test public void testEmbeddedCall() {
checkExp("{call foo(?, ?)}", "foo");
}
@Ignore
@Test public void testEmbeddedFunction() {
checkExp("{? = call bar (?, ?)}", "foo");
}
@Test public void testColumnAliasWithAs() {
check(
"select 1 as foo from emp",
"SELECT 1 AS `FOO`\n"
+ "FROM `EMP`");
}
@Test public void testColumnAliasWithoutAs() {
check("select 1 foo from emp",
"SELECT 1 AS `FOO`\n"
+ "FROM `EMP`");
}
@Test public void testEmbeddedDate() {
checkExp("{d '1998-10-22'}", "DATE '1998-10-22'");
}
@Test public void testEmbeddedTime() {
checkExp("{t '16:22:34'}", "TIME '16:22:34'");
}
@Test public void testEmbeddedTimestamp() {
checkExp("{ts '1998-10-22 16:22:34'}", "TIMESTAMP '1998-10-22 16:22:34'");
}
@Test public void testNot() {
check(
"select not true, not false, not null, not unknown from t",
"SELECT (NOT TRUE), (NOT FALSE), (NOT NULL), (NOT UNKNOWN)\n"
+ "FROM `T`");
}
@Test public void testBooleanPrecedenceAndAssociativity() {
check(
"select * from t where true and false",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (TRUE AND FALSE)");
check(
"select * from t where null or unknown and unknown",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (NULL OR (UNKNOWN AND UNKNOWN))");
check(
"select * from t where true and (true or true) or false",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((TRUE AND (TRUE OR TRUE)) OR FALSE)");
check(
"select * from t where 1 and true",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (1 AND TRUE)");
}
@Test public void testIsBooleans() {
String[] inOuts = {"NULL", "TRUE", "FALSE", "UNKNOWN"};
for (String inOut : inOuts) {
check(
"select * from t where nOt fAlSe Is " + inOut,
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((NOT FALSE) IS " + inOut + ")");
check(
"select * from t where c1=1.1 IS NOT " + inOut,
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((`C1` = 1.1) IS NOT " + inOut + ")");
}
}
@Test public void testIsBooleanPrecedenceAndAssociativity() {
check("select * from t where x is unknown is not unknown",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((`X` IS UNKNOWN) IS NOT UNKNOWN)");
check("select 1 from t where not true is unknown",
"SELECT 1\n"
+ "FROM `T`\n"
+ "WHERE ((NOT TRUE) IS UNKNOWN)");
check(
"select * from t where x is unknown is not unknown is false is not false"
+ " is true is not true is null is not null",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((((((((`X` IS UNKNOWN) IS NOT UNKNOWN) IS FALSE) IS NOT FALSE) IS TRUE) IS NOT TRUE) IS NULL) IS NOT NULL)");
// combine IS postfix operators with infix (AND) and prefix (NOT) ops
check(
"select * from t where x is unknown is false and x is unknown is true or not y is unknown is not null",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((((`X` IS UNKNOWN) IS FALSE) AND ((`X` IS UNKNOWN) IS TRUE)) OR (((NOT `Y`) IS UNKNOWN) IS NOT NULL))");
}
@Test public void testEqualNotEqual() {
checkExp("'abc'=123", "('abc' = 123)");
checkExp("'abc'<>123", "('abc' <> 123)");
checkExp("'abc'<>123='def'<>456", "((('abc' <> 123) = 'def') <> 456)");
checkExp("'abc'<>123=('def'<>456)", "(('abc' <> 123) = ('def' <> 456))");
}
@Test public void testBangEqualIsBad() {
// Quoth www.ocelot.ca:
// "Other relators besides '=' are what you'd expect if
// you've used any programming language: > and >= and < and <=. The
// only potential point of confusion is that the operator for 'not
// equals' is <> as in BASIC. There are many texts which will tell
// you that != is SQL's not-equals operator; those texts are false;
// it's one of those unstampoutable urban myths."
checkFails("'abc'^!^=123",
"Lexical error at line 1, column 6\\. Encountered: \"!\" \\(33\\), after : \"\"");
}
@Test public void testBetween() {
check(
"select * from t where price between 1 and 2",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (`PRICE` BETWEEN ASYMMETRIC 1 AND 2)");
check(
"select * from t where price between symmetric 1 and 2",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (`PRICE` BETWEEN SYMMETRIC 1 AND 2)");
check(
"select * from t where price not between symmetric 1 and 2",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (`PRICE` NOT BETWEEN SYMMETRIC 1 AND 2)");
check(
"select * from t where price between ASYMMETRIC 1 and 2+2*2",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (`PRICE` BETWEEN ASYMMETRIC 1 AND (2 + (2 * 2)))");
check(
"select * from t where price > 5 and price not between 1 + 2 and 3 * 4 AnD price is null",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (((`PRICE` > 5) AND (`PRICE` NOT BETWEEN ASYMMETRIC (1 + 2) AND (3 * 4))) AND (`PRICE` IS NULL))");
check(
"select * from t where price > 5 and price between 1 + 2 and 3 * 4 + price is null",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((`PRICE` > 5) AND ((`PRICE` BETWEEN ASYMMETRIC (1 + 2) AND ((3 * 4) + `PRICE`)) IS NULL))");
check(
"select * from t where price > 5 and price between 1 + 2 and 3 * 4 or price is null",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (((`PRICE` > 5) AND (`PRICE` BETWEEN ASYMMETRIC (1 + 2) AND (3 * 4))) OR (`PRICE` IS NULL))");
check(
"values a between c and d and e and f between g and h",
"VALUES (ROW((((`A` BETWEEN ASYMMETRIC `C` AND `D`) AND `E`) AND (`F` BETWEEN ASYMMETRIC `G` AND `H`))))");
checkFails(
"values a between b or c^",
".*BETWEEN operator has no terminating AND");
checkFails(
"values a ^between^",
"(?s).*Encountered \"between <EOF>\" at line 1, column 10.*");
checkFails(
"values a between symmetric 1^",
".*BETWEEN operator has no terminating AND");
// precedence of BETWEEN is higher than AND and OR, but lower than '+'
check(
"values a between b and c + 2 or d and e",
"VALUES (ROW(((`A` BETWEEN ASYMMETRIC `B` AND (`C` + 2)) OR (`D` AND `E`))))");
// '=' and BETWEEN have same precedence, and are left-assoc
check(
"values x = a between b and c = d = e",
"VALUES (ROW(((((`X` = `A`) BETWEEN ASYMMETRIC `B` AND `C`) = `D`) = `E`)))");
// AND doesn't match BETWEEN if it's between parentheses!
check(
"values a between b or (c and d) or e and f",
"VALUES (ROW((`A` BETWEEN ASYMMETRIC ((`B` OR (`C` AND `D`)) OR `E`) AND `F`)))");
}
@Test public void testOperateOnColumn() {
check(
"select c1*1,c2 + 2,c3/3,c4-4,c5*c4 from t",
"SELECT (`C1` * 1), (`C2` + 2), (`C3` / 3), (`C4` - 4), (`C5` * `C4`)\n"
+ "FROM `T`");
}
@Test public void testRow() {
check(
"select t.r.\"EXPR$1\", t.r.\"EXPR$0\" from (select (1,2) r from sales.depts) t",
"SELECT `T`.`R`.`EXPR$1`, `T`.`R`.`EXPR$0`\n"
+ "FROM (SELECT (ROW(1, 2)) AS `R`\n"
+ "FROM `SALES`.`DEPTS`) AS `T`");
check(
"select t.r.\"EXPR$1\".\"EXPR$2\" "
+ "from (select ((1,2),(3,4,5)) r from sales.depts) t",
"SELECT `T`.`R`.`EXPR$1`.`EXPR$2`\n"
+ "FROM (SELECT (ROW((ROW(1, 2)), (ROW(3, 4, 5)))) AS `R`\n"
+ "FROM `SALES`.`DEPTS`) AS `T`");
check(
"select t.r.\"EXPR$1\".\"EXPR$2\" "
+ "from (select ((1,2),(3,4,5,6)) r from sales.depts) t",
"SELECT `T`.`R`.`EXPR$1`.`EXPR$2`\n"
+ "FROM (SELECT (ROW((ROW(1, 2)), (ROW(3, 4, 5, 6)))) AS `R`\n"
+ "FROM `SALES`.`DEPTS`) AS `T`");
}
@Test public void testOverlaps() {
checkExp(
"(x,xx) overlaps (y,yy)",
"((`X`, `XX`) OVERLAPS (`Y`, `YY`))");
checkExp(
"(x,xx) overlaps (y,yy) or false",
"(((`X`, `XX`) OVERLAPS (`Y`, `YY`)) OR FALSE)");
checkExp(
"true and not (x,xx) overlaps (y,yy) or false",
"((TRUE AND (NOT ((`X`, `XX`) OVERLAPS (`Y`, `YY`)))) OR FALSE)");
checkExpFails(
"^(x,xx,xxx) overlaps (y,yy)^ or false",
"(?s).*Illegal overlaps expression.*");
checkExpFails(
"true or ^(x,xx,xxx) overlaps (y,yy,yyy)^ or false",
"(?s).*Illegal overlaps expression.*");
checkExpFails(
"^(x,xx) overlaps (y,yy,yyy)^ or false",
"(?s).*Illegal overlaps expression.*");
}
@Test public void testIsDistinctFrom() {
check(
"select x is distinct from y from t",
"SELECT (`X` IS DISTINCT FROM `Y`)\n"
+ "FROM `T`");
check(
"select * from t where x is distinct from y",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (`X` IS DISTINCT FROM `Y`)");
check(
"select * from t where x is distinct from (4,5,6)",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (`X` IS DISTINCT FROM (ROW(4, 5, 6)))");
check(
"select * from t where true is distinct from true",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (TRUE IS DISTINCT FROM TRUE)");
check(
"select * from t where true is distinct from true is true",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((TRUE IS DISTINCT FROM TRUE) IS TRUE)");
}
@Test public void testIsNotDistinct() {
check(
"select x is not distinct from y from t",
"SELECT (`X` IS NOT DISTINCT FROM `Y`)\n"
+ "FROM `T`");
check(
"select * from t where true is not distinct from true",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (TRUE IS NOT DISTINCT FROM TRUE)");
}
@Test public void testFloor() {
checkExp("floor(1.5)", "FLOOR(1.5)");
checkExp("floor(x)", "FLOOR(`X`)");
checkExp("floor(x to hour)", "FLOOR(`X` TO HOUR)");
checkExp("ceil(x to hour)", "CEIL(`X` TO HOUR)");
checkExp("ceil(x + interval '1' minute to second)",
"CEIL((`X` + INTERVAL '1' MINUTE TO SECOND))");
checkExp("ceil((x + interval '1' minute) to second)",
"CEIL((`X` + INTERVAL '1' MINUTE) TO SECOND)");
checkExp("ceil(x + (interval '1:23' minute to second))",
"CEIL((`X` + INTERVAL '1:23' MINUTE TO SECOND))");
checkExp("ceil(x + interval '1:23' minute to second to second)",
"CEIL((`X` + INTERVAL '1:23' MINUTE TO SECOND) TO SECOND)");
}
@Test public void testCast() {
checkExp("cast(x as boolean)", "CAST(`X` AS BOOLEAN)");
checkExp("cast(x as integer)", "CAST(`X` AS INTEGER)");
checkExp("cast(x as varchar(1))", "CAST(`X` AS VARCHAR(1))");
checkExp("cast(x as date)", "CAST(`X` AS DATE)");
checkExp("cast(x as time)", "CAST(`X` AS TIME)");
checkExp("cast(x as timestamp)", "CAST(`X` AS TIMESTAMP)");
checkExp("cast(x as time(0))", "CAST(`X` AS TIME(0))");
checkExp("cast(x as timestamp(0))", "CAST(`X` AS TIMESTAMP(0))");
checkExp("cast(x as decimal(1,1))", "CAST(`X` AS DECIMAL(1, 1))");
checkExp("cast(x as char(1))", "CAST(`X` AS CHAR(1))");
checkExp("cast(x as binary(1))", "CAST(`X` AS BINARY(1))");
checkExp("cast(x as varbinary(1))", "CAST(`X` AS VARBINARY(1))");
checkExp("cast(x as tinyint)", "CAST(`X` AS TINYINT)");
checkExp("cast(x as smallint)", "CAST(`X` AS SMALLINT)");
checkExp("cast(x as bigint)", "CAST(`X` AS BIGINT)");
checkExp("cast(x as real)", "CAST(`X` AS REAL)");
checkExp("cast(x as double)", "CAST(`X` AS DOUBLE)");
checkExp("cast(x as decimal)", "CAST(`X` AS DECIMAL)");
checkExp("cast(x as decimal(0))", "CAST(`X` AS DECIMAL(0))");
checkExp("cast(x as decimal(1,2))", "CAST(`X` AS DECIMAL(1, 2))");
checkExp("cast('foo' as bar)", "CAST('foo' AS `BAR`)");
}
@Test public void testCastFails() {
}
@Test public void testLikeAndSimilar() {
check(
"select * from t where x like '%abc%'",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (`X` LIKE '%abc%')");
check(
"select * from t where x+1 not siMilaR to '%abc%' ESCAPE 'e'",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((`X` + 1) NOT SIMILAR TO '%abc%' ESCAPE 'e')");
// LIKE has higher precedence than AND
check(
"select * from t where price > 5 and x+2*2 like y*3+2 escape (select*from t)",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((`PRICE` > 5) AND ((`X` + (2 * 2)) LIKE ((`Y` * 3) + 2) ESCAPE (SELECT *\n"
+ "FROM `T`)))");
check(
"values a and b like c",
"VALUES (ROW((`A` AND (`B` LIKE `C`))))");
// LIKE has higher precedence than AND
check(
"values a and b like c escape d and e",
"VALUES (ROW(((`A` AND (`B` LIKE `C` ESCAPE `D`)) AND `E`)))");
// LIKE has same precedence as '='; LIKE is right-assoc, '=' is left
check(
"values a = b like c = d",
"VALUES (ROW(((`A` = `B`) LIKE (`C` = `D`))))");
// Nested LIKE
check(
"values a like b like c escape d",
"VALUES (ROW((`A` LIKE (`B` LIKE `C` ESCAPE `D`))))");
check(
"values a like b like c escape d and false",
"VALUES (ROW(((`A` LIKE (`B` LIKE `C` ESCAPE `D`)) AND FALSE)))");
check(
"values a like b like c like d escape e escape f",
"VALUES (ROW((`A` LIKE (`B` LIKE (`C` LIKE `D` ESCAPE `E`) ESCAPE `F`))))");
// Mixed LIKE and SIMILAR TO
check(
"values a similar to b like c similar to d escape e escape f",
"VALUES (ROW((`A` SIMILAR TO (`B` LIKE (`C` SIMILAR TO `D` ESCAPE `E`) ESCAPE `F`))))");
// FIXME should fail at "escape"
checkFails(
"select * from t ^where^ escape 'e'",
"(?s).*Encountered \"where escape\" at .*");
// LIKE with +
check(
"values a like b + c escape d",
"VALUES (ROW((`A` LIKE (`B` + `C`) ESCAPE `D`)))");
// LIKE with ||
check(
"values a like b || c escape d",
"VALUES (ROW((`A` LIKE (`B` || `C`) ESCAPE `D`)))");
// ESCAPE with no expression
// FIXME should fail at "escape"
checkFails(
"values a ^like^ escape d",
"(?s).*Encountered \"like escape\" at .*");
// ESCAPE with no expression
checkFails(
"values a like b || c ^escape^ and false",
"(?s).*Encountered \"escape and\" at line 1, column 22.*");
// basic SIMILAR TO
check(
"select * from t where x similar to '%abc%'",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (`X` SIMILAR TO '%abc%')");
check(
"select * from t where x+1 not siMilaR to '%abc%' ESCAPE 'e'",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((`X` + 1) NOT SIMILAR TO '%abc%' ESCAPE 'e')");
// SIMILAR TO has higher precedence than AND
check(
"select * from t where price > 5 and x+2*2 SIMILAR TO y*3+2 escape (select*from t)",
"SELECT *\n"
+ "FROM `T`\n"
+ "WHERE ((`PRICE` > 5) AND ((`X` + (2 * 2)) SIMILAR TO ((`Y` * 3) + 2) ESCAPE (SELECT *\n"
+ "FROM `T`)))");
// Mixed LIKE and SIMILAR TO
check(
"values a similar to b like c similar to d escape e escape f",
"VALUES (ROW((`A` SIMILAR TO (`B` LIKE (`C` SIMILAR TO `D` ESCAPE `E`) ESCAPE `F`))))");
// SIMILAR TO with subquery
check(
"values a similar to (select * from t where a like b escape c) escape d",
"VALUES (ROW((`A` SIMILAR TO (SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (`A` LIKE `B` ESCAPE `C`)) ESCAPE `D`)))");
}
@Test public void testFoo() {
}
@Test public void testArthimeticOperators() {
checkExp("1-2+3*4/5/6-7", "(((1 - 2) + (((3 * 4) / 5) / 6)) - 7)");
checkExp("power(2,3)", "POWER(2, 3)");
checkExp("aBs(-2.3e-2)", "ABS(-2.3E-2)");
checkExp("MOD(5 ,\t\f\r\n2)", "MOD(5, 2)");
checkExp("ln(5.43 )", "LN(5.43)");
checkExp("log10(- -.2 )", "LOG10((- -0.2))");
}
@Test public void testExists() {
check(
"select * from dept where exists (select 1 from emp where emp.deptno = dept.deptno)",
"SELECT *\n"
+ "FROM `DEPT`\n"
+ "WHERE (EXISTS (SELECT 1\n"
+ "FROM `EMP`\n"
+ "WHERE (`EMP`.`DEPTNO` = `DEPT`.`DEPTNO`)))");
}
@Test public void testExistsInWhere() {
check(
"select * from emp where 1 = 2 and exists (select 1 from dept) and 3 = 4",
"SELECT *\n"
+ "FROM `EMP`\n"
+ "WHERE (((1 = 2) AND (EXISTS (SELECT 1\n"
+ "FROM `DEPT`))) AND (3 = 4))");
}
@Test public void testFromWithAs() {
check("select 1 from emp as e where 1",
"SELECT 1\n"
+ "FROM `EMP` AS `E`\n"
+ "WHERE 1");
}
@Test public void testConcat() {
checkExp("'a' || 'b'", "('a' || 'b')");
}
@Test public void testReverseSolidus() {
checkExp("'\\'", "'\\'");
}
@Test public void testSubstring() {
checkExp("substring('a' \n FROM \t 1)", "SUBSTRING('a' FROM 1)");
checkExp("substring('a' FROM 1 FOR 3)", "SUBSTRING('a' FROM 1 FOR 3)");
checkExp(
"substring('a' FROM 'reg' FOR '\\')",
"SUBSTRING('a' FROM 'reg' FOR '\\')");
checkExp(
"substring('a', 'reg', '\\')",
"SUBSTRING('a' FROM 'reg' FOR '\\')");
checkExp("substring('a', 1, 2)", "SUBSTRING('a' FROM 1 FOR 2)");
checkExp("substring('a' , 1)", "SUBSTRING('a' FROM 1)");
}
@Test public void testFunction() {
check("select substring('Eggs and ham', 1, 3 + 2) || ' benedict' from emp",
"SELECT (SUBSTRING('Eggs and ham' FROM 1 FOR (3 + 2)) || ' benedict')\n"
+ "FROM `EMP`");
checkExp(
"log10(1)\r\n+power(2, mod(\r\n3\n\t\t\f\n,ln(4))*log10(5)-6*log10(7/abs(8)+9))*power(10,11)",
"(LOG10(1) + (POWER(2, ((MOD(3, LN(4)) * LOG10(5)) - (6 * LOG10(((7 / ABS(8)) + 9))))) * POWER(10, 11)))");
}
@Test public void testFunctionWithDistinct() {
checkExp("count(DISTINCT 1)", "COUNT(DISTINCT 1)");
checkExp("count(ALL 1)", "COUNT(ALL 1)");
checkExp("count(1)", "COUNT(1)");
check("select count(1), count(distinct 2) from emp",
"SELECT COUNT(1), COUNT(DISTINCT 2)\n"
+ "FROM `EMP`");
}
@Test public void testFunctionInFunction() {
checkExp("ln(power(2,2))", "LN(POWER(2, 2))");
}
@Test public void testFunctionNamedArgument() {
checkExp("foo(x => 1)",
"`FOO`(`X` => 1)");
checkExp("foo(x => 1, \"y\" => 'a', z => x <= y)",
"`FOO`(`X` => 1, `y` => 'a', `Z` => (`X` <= `Y`))");
checkExpFails("foo(x.y ^=>^ 1)",
"(?s).*Encountered \"=>\" at .*");
checkExpFails("foo(a => 1, x.y ^=>^ 2, c => 3)",
"(?s).*Encountered \"=>\" at .*");
}
@Test public void testFunctionDefaultArgument() {
checkExp("foo(1, DEFAULT, default, 'default', \"default\", 3)",
"`FOO`(1, DEFAULT, DEFAULT, 'default', `default`, 3)");
checkExp("foo(DEFAULT)",
"`FOO`(DEFAULT)");
checkExp("foo(x => 1, DEFAULT)",
"`FOO`(`X` => 1, DEFAULT)");
checkExp("foo(y => DEFAULT, x => 1)",
"`FOO`(`Y` => DEFAULT, `X` => 1)");
checkExp("foo(x => 1, y => DEFAULT)",
"`FOO`(`X` => 1, `Y` => DEFAULT)");
check("select sum(DISTINCT DEFAULT) from t group by x",
"SELECT SUM(DISTINCT DEFAULT)\n"
+ "FROM `T`\n"
+ "GROUP BY `X`");
checkExpFails("foo(x ^+^ DEFAULT)",
"(?s).*Encountered \"\\+ DEFAULT\" at .*");
checkExpFails("foo(0, x ^+^ DEFAULT + y)",
"(?s).*Encountered \"\\+ DEFAULT\" at .*");
checkExpFails("foo(0, DEFAULT ^+^ y)",
"(?s).*Encountered \"\\+\" at .*");
}
@Test public void testAggregateFilter() {
sql("select sum(sal) filter (where gender = 'F') as femaleSal,\n"
+ " sum(sal) filter (where true) allSal,\n"
+ " count(distinct deptno) filter (where (deptno < 40))\n"
+ "from emp")
.ok("SELECT (SUM(`SAL`) FILTER (WHERE (`GENDER` = 'F'))) AS `FEMALESAL`,"
+ " (SUM(`SAL`) FILTER (WHERE TRUE)) AS `ALLSAL`,"
+ " (COUNT(DISTINCT `DEPTNO`) FILTER (WHERE (`DEPTNO` < 40)))\n"
+ "FROM `EMP`");
}
@Test public void testGroup() {
check(
"select deptno, min(foo) as x from emp group by deptno, gender",
"SELECT `DEPTNO`, MIN(`FOO`) AS `X`\n"
+ "FROM `EMP`\n"
+ "GROUP BY `DEPTNO`, `GENDER`");
}
@Test public void testGroupEmpty() {
check(
"select count(*) from emp group by ()",
"SELECT COUNT(*)\n"
+ "FROM `EMP`\n"
+ "GROUP BY ()");
check(
"select count(*) from emp group by () having 1 = 2 order by 3",
"SELECT COUNT(*)\n"
+ "FROM `EMP`\n"
+ "GROUP BY ()\n"
+ "HAVING (1 = 2)\n"
+ "ORDER BY 3");
// Used to be invalid, valid now that we support grouping sets.
sql("select 1 from emp group by (), x")
.ok("SELECT 1\n"
+ "FROM `EMP`\n"
+ "GROUP BY (), `X`");
// Used to be invalid, valid now that we support grouping sets.
sql("select 1 from emp group by x, ()")
.ok("SELECT 1\n"
+ "FROM `EMP`\n"
+ "GROUP BY `X`, ()");
// parentheses do not an empty GROUP BY make
check(
"select 1 from emp group by (empno + deptno)",
"SELECT 1\n"
+ "FROM `EMP`\n"
+ "GROUP BY (`EMPNO` + `DEPTNO`)");
}
@Test public void testHavingAfterGroup() {
check(
"select deptno from emp group by deptno, emp having count(*) > 5 and 1 = 2 order by 5, 2",
"SELECT `DEPTNO`\n"
+ "FROM `EMP`\n"
+ "GROUP BY `DEPTNO`, `EMP`\n"
+ "HAVING ((COUNT(*) > 5) AND (1 = 2))\n"
+ "ORDER BY 5, 2");
}
@Test public void testHavingBeforeGroupFails() {
checkFails(
"select deptno from emp having count(*) > 5 and deptno < 4 ^group^ by deptno, emp",
"(?s).*Encountered \"group\" at .*");
}
@Test public void testHavingNoGroup() {
check(
"select deptno from emp having count(*) > 5",
"SELECT `DEPTNO`\n"
+ "FROM `EMP`\n"
+ "HAVING (COUNT(*) > 5)");
}
@Test public void testGroupingSets() {
sql("select deptno from emp\n"
+ "group by grouping sets (deptno, (deptno, gender), ())")
.ok("SELECT `DEPTNO`\n"
+ "FROM `EMP`\n"
+ "GROUP BY GROUPING SETS(`DEPTNO`, (`DEPTNO`, `GENDER`), ())");
// Grouping sets must have parentheses
sql("select deptno from emp\n"
+ "group by grouping sets ^deptno^, (deptno, gender), ()")
.fails("(?s).*Encountered \"deptno\" at line 2, column 24.\n"
+ "Was expecting:\n"
+ " \"\\(\" .*");
// Nested grouping sets, cube, rollup, grouping sets all OK
sql("select deptno from emp\n"
+ "group by grouping sets (deptno, grouping sets (e, d), (),\n"
+ " cube (x, y), rollup(p, q))\n"
+ "order by a")
.ok("SELECT `DEPTNO`\n"
+ "FROM `EMP`\n"
+ "GROUP BY GROUPING SETS(`DEPTNO`, GROUPING SETS(`E`, `D`), (), CUBE(`X`, `Y`), ROLLUP(`P`, `Q`))\n"
+ "ORDER BY `A`");
sql("select deptno from emp\n"
+ "group by grouping sets (())")
.ok("SELECT `DEPTNO`\n"
+ "FROM `EMP`\n"
+ "GROUP BY GROUPING SETS(())");
}
@Test public void testGroupByCube() {
sql("select deptno from emp\n"
+ "group by cube ((a, b), (c, d))")
.ok("SELECT `DEPTNO`\n"
+ "FROM `EMP`\n"
+ "GROUP BY CUBE((`A`, `B`), (`C`, `D`))");
}
@Test public void testGroupByCube2() {
sql("select deptno from emp\n"
+ "group by cube ((a, b), (c, d)) order by a")
.ok("SELECT `DEPTNO`\n"
+ "FROM `EMP`\n"
+ "GROUP BY CUBE((`A`, `B`), (`C`, `D`))\n"
+ "ORDER BY `A`");
sql("select deptno from emp\n"
+ "group by cube (^)")
.fails("(?s)Encountered \"\\)\" at .*");
}
@Test public void testGroupByRollup() {
sql("select deptno from emp\n"
+ "group by rollup (deptno, deptno + 1, gender)")
.ok("SELECT `DEPTNO`\n"
+ "FROM `EMP`\n"
+ "GROUP BY ROLLUP(`DEPTNO`, (`DEPTNO` + 1), `GENDER`)");
// Nested rollup not ok
sql("select deptno from emp\n"
+ "group by rollup (deptno^, rollup(e, d))")
.fails("(?s)Encountered \", rollup\" at .*");
}
@Test public void testGrouping() {
sql("select deptno, grouping(deptno) from emp\n"
+ "group by grouping sets (deptno, (deptno, gender), ())")
.ok("SELECT `DEPTNO`, (GROUPING(`DEPTNO`))\n"
+ "FROM `EMP`\n"
+ "GROUP BY GROUPING SETS(`DEPTNO`, (`DEPTNO`, `GENDER`), ())");
}
@Test public void testWith() {
check(
"with femaleEmps as (select * from emps where gender = 'F')"
+ "select deptno from femaleEmps",
"WITH `FEMALEEMPS` AS (SELECT *\n"
+ "FROM `EMPS`\n"
+ "WHERE (`GENDER` = 'F')) (SELECT `DEPTNO`\n"
+ "FROM `FEMALEEMPS`)");
}
@Test public void testWith2() {
check(
"with femaleEmps as (select * from emps where gender = 'F'),\n"
+ "marriedFemaleEmps(x, y) as (select * from femaleEmps where maritaStatus = 'M')\n"
+ "select deptno from femaleEmps",
"WITH `FEMALEEMPS` AS (SELECT *\n"
+ "FROM `EMPS`\n"
+ "WHERE (`GENDER` = 'F')), `MARRIEDFEMALEEMPS` (`X`, `Y`) AS (SELECT *\n"
+ "FROM `FEMALEEMPS`\n"
+ "WHERE (`MARITASTATUS` = 'M')) (SELECT `DEPTNO`\n"
+ "FROM `FEMALEEMPS`)");
}
@Test public void testWithFails() {
checkFails("with femaleEmps as ^select^ * from emps where gender = 'F'\n"
+ "select deptno from femaleEmps",
"(?s)Encountered \"select\" at .*");
}
@Test public void testWithValues() {
check(
"with v(i,c) as (values (1, 'a'), (2, 'bb'))\n"
+ "select c, i from v",
"WITH `V` (`I`, `C`) AS (VALUES (ROW(1, 'a')), (ROW(2, 'bb'))) (SELECT `C`, `I`\n"
+ "FROM `V`)");
}
@Test public void testWithNestedFails() {
// SQL standard does not allow WITH to contain WITH
checkFails("with emp2 as (select * from emp)\n"
+ "^with^ dept2 as (select * from dept)\n"
+ "select 1 as one from emp, dept",
"(?s)Encountered \"with\" at .*");
}
@Test public void testWithNestedInSubquery() {
// SQL standard does not allow sub-query to contain WITH but we do
check("with emp2 as (select * from emp)\n"
+ "(\n"
+ " with dept2 as (select * from dept)\n"
+ " select 1 as one from empDept)",
"WITH `EMP2` AS (SELECT *\n"
+ "FROM `EMP`) (WITH `DEPT2` AS (SELECT *\n"
+ "FROM `DEPT`) (SELECT 1 AS `ONE`\n"
+ "FROM `EMPDEPT`))");
}
@Test public void testWithUnion() {
// Per the standard WITH ... SELECT ... UNION is valid even without parens.
check("with emp2 as (select * from emp)\n"
+ "select * from emp2\n"
+ "union\n"
+ "select * from emp2\n",
"WITH `EMP2` AS (SELECT *\n"
+ "FROM `EMP`) (SELECT *\n"
+ "FROM `EMP2`\n"
+ "UNION\n"
+ "SELECT *\n"
+ "FROM `EMP2`)");
}
@Test public void testIdentifier() {
checkExp("ab", "`AB`");
checkExp(" \"a \"\" b!c\"", "`a \" b!c`");
checkExpFails(" ^`^a \" b!c`", "(?s).*Encountered.*");
checkExp("\"x`y`z\"", "`x``y``z`");
checkExpFails("^`^x`y`z`", "(?s).*Encountered.*");
checkExp("myMap[field] + myArray[1 + 2]",
"(`MYMAP`[`FIELD`] + `MYARRAY`[(1 + 2)])");
}
@Test public void testBackTickIdentifier() {
quoting = Quoting.BACK_TICK;
checkExp("ab", "`AB`");
checkExp(" `a \" b!c`", "`a \" b!c`");
checkExpFails(" ^\"^a \"\" b!c\"", "(?s).*Encountered.*");
checkExpFails("^\"^x`y`z\"", "(?s).*Encountered.*");
checkExp("`x``y``z`", "`x``y``z`");
checkExp("myMap[field] + myArray[1 + 2]",
"(`MYMAP`[`FIELD`] + `MYARRAY`[(1 + 2)])");
}
@Test public void testBracketIdentifier() {
quoting = Quoting.BRACKET;
checkExp("ab", "`AB`");
checkExp(" [a \" b!c]", "`a \" b!c`");
checkExpFails(" ^`^a \" b!c`", "(?s).*Encountered.*");
checkExpFails(" ^\"^a \"\" b!c\"", "(?s).*Encountered.*");
checkExp("[x`y`z]", "`x``y``z`");
checkExpFails("^\"^x`y`z\"", "(?s).*Encountered.*");
checkExpFails("^`^x``y``z`", "(?s).*Encountered.*");
checkExp("[anything [even brackets]] is].[ok]",
"`anything [even brackets] is`.`ok`");
// What would be a call to the 'item' function in DOUBLE_QUOTE and BACK_TICK
// is a table alias.
check("select * from myMap[field], myArray[1 + 2]",
"SELECT *\n"
+ "FROM `MYMAP` AS `field`,\n"
+ "`MYARRAY` AS `1 + 2`");
check("select * from myMap [field], myArray [1 + 2]",
"SELECT *\n"
+ "FROM `MYMAP` AS `field`,\n"
+ "`MYARRAY` AS `1 + 2`");
}
@Test public void testBackTickQuery() {
quoting = Quoting.BACK_TICK;
check(
"select `x`.`b baz` from `emp` as `x` where `x`.deptno in (10, 20)",
"SELECT `x`.`b baz`\n"
+ "FROM `emp` AS `x`\n"
+ "WHERE (`x`.`DEPTNO` IN (10, 20))");
}
@Test public void testInList() {
check(
"select * from emp where deptno in (10, 20) and gender = 'F'",
"SELECT *\n"
+ "FROM `EMP`\n"
+ "WHERE ((`DEPTNO` IN (10, 20)) AND (`GENDER` = 'F'))");
}
@Test public void testInListEmptyFails() {
checkFails(
"select * from emp where deptno in (^)^ and gender = 'F'",
"(?s).*Encountered \"\\)\" at line 1, column 36\\..*");
}
@Test public void testInQuery() {
check(
"select * from emp where deptno in (select deptno from dept)",
"SELECT *\n"
+ "FROM `EMP`\n"
+ "WHERE (`DEPTNO` IN (SELECT `DEPTNO`\n"
+ "FROM `DEPT`))");
}
/**
* Tricky for the parser - looks like "IN (scalar, scalar)" but isn't.
*/
@Test public void testInQueryWithComma() {
check(
"select * from emp where deptno in (select deptno from dept group by 1, 2)",
"SELECT *\n"
+ "FROM `EMP`\n"
+ "WHERE (`DEPTNO` IN (SELECT `DEPTNO`\n"
+ "FROM `DEPT`\n"
+ "GROUP BY 1, 2))");
}
@Test public void testInSetop() {
check(
"select * from emp where deptno in ((select deptno from dept union select * from dept)"
+ "except select * from dept) and false",
"SELECT *\n"
+ "FROM `EMP`\n"
+ "WHERE ((`DEPTNO` IN ((SELECT `DEPTNO`\n"
+ "FROM `DEPT`\n"
+ "UNION\n"
+ "SELECT *\n"
+ "FROM `DEPT`)\n"
+ "EXCEPT\n"
+ "SELECT *\n"
+ "FROM `DEPT`)) AND FALSE)");
}
@Test public void testUnion() {
check(
"select * from a union select * from a",
"(SELECT *\n"
+ "FROM `A`\n"
+ "UNION\n"
+ "SELECT *\n"
+ "FROM `A`)");
check(
"select * from a union all select * from a",
"(SELECT *\n"
+ "FROM `A`\n"
+ "UNION ALL\n"
+ "SELECT *\n"
+ "FROM `A`)");
check(
"select * from a union distinct select * from a",
"(SELECT *\n"
+ "FROM `A`\n"
+ "UNION\n"
+ "SELECT *\n"
+ "FROM `A`)");
}
@Test public void testUnionOrder() {
check(
"select a, b from t "
+ "union all "
+ "select x, y from u "
+ "order by 1 asc, 2 desc",
"(SELECT `A`, `B`\n"
+ "FROM `T`\n"
+ "UNION ALL\n"
+ "SELECT `X`, `Y`\n"
+ "FROM `U`)\n"
+ "ORDER BY 1, 2 DESC");
}
@Test public void testOrderUnion() {
// ORDER BY inside UNION not allowed
sql("select a from t order by a\n"
+ "^union^ all\n"
+ "select b from t order by b")
.fails("(?s).*Encountered \"union\" at .*");
}
@Test public void testLimitUnion() {
// LIMIT inside UNION not allowed
sql("select a from t limit 10\n"
+ "^union^ all\n"
+ "select b from t order by b")
.fails("(?s).*Encountered \"union\" at .*");
}
@Test public void testUnionOfNonQueryFails() {
checkFails(
"select 1 from emp union ^2^ + 5",
"Non-query expression encountered in illegal context");
}
/**
* In modern SQL, a query can occur almost everywhere that an expression
* can. This test tests the few exceptions.
*/
@Test public void testQueryInIllegalContext() {
checkFails(
"select 0, multiset[^(^select * from emp), 2] from dept",
"Query expression encountered in illegal context");
checkFails(
"select 0, multiset[1, ^(^select * from emp), 2, 3] from dept",
"Query expression encountered in illegal context");
}
@Test public void testExcept() {
check(
"select * from a except select * from a",
"(SELECT *\n"
+ "FROM `A`\n"
+ "EXCEPT\n"
+ "SELECT *\n"
+ "FROM `A`)");
check(
"select * from a except all select * from a",
"(SELECT *\n"
+ "FROM `A`\n"
+ "EXCEPT ALL\n"
+ "SELECT *\n"
+ "FROM `A`)");
check(
"select * from a except distinct select * from a",
"(SELECT *\n"
+ "FROM `A`\n"
+ "EXCEPT\n"
+ "SELECT *\n"
+ "FROM `A`)");
}
@Test public void testIntersect() {
check(
"select * from a intersect select * from a",
"(SELECT *\n"
+ "FROM `A`\n"
+ "INTERSECT\n"
+ "SELECT *\n"
+ "FROM `A`)");
check(
"select * from a intersect all select * from a",
"(SELECT *\n"
+ "FROM `A`\n"
+ "INTERSECT ALL\n"
+ "SELECT *\n"
+ "FROM `A`)");
check(
"select * from a intersect distinct select * from a",
"(SELECT *\n"
+ "FROM `A`\n"
+ "INTERSECT\n"
+ "SELECT *\n"
+ "FROM `A`)");
}
@Test public void testJoinCross() {
check(
"select * from a as a2 cross join b",
"SELECT *\n"
+ "FROM `A` AS `A2`\n"
+ "CROSS JOIN `B`");
}
@Test public void testJoinOn() {
check(
"select * from a left join b on 1 = 1 and 2 = 2 where 3 = 3",
"SELECT *\n"
+ "FROM `A`\n"
+ "LEFT JOIN `B` ON ((1 = 1) AND (2 = 2))\n"
+ "WHERE (3 = 3)");
}
@Test public void testJoinOnParentheses() {
if (!Bug.TODO_FIXED) {
return;
}
check(
"select * from a\n"
+ " left join (b join c as c1 on 1 = 1) on 2 = 2\n"
+ "where 3 = 3",
"SELECT *\n"
+ "FROM `A`\n"
+ "LEFT JOIN (`B` INNER JOIN `C` AS `C1` ON (1 = 1)) ON (2 = 2)\n"
+ "WHERE (3 = 3)");
}
/**
* Same as {@link #testJoinOnParentheses()} but fancy aliases.
*/
@Test public void testJoinOnParenthesesPlus() {
if (!Bug.TODO_FIXED) {
return;
}
check(
"select * from a\n"
+ " left join (b as b1 (x, y) join (select * from c) c1 on 1 = 1) on 2 = 2\n"
+ "where 3 = 3",
"SELECT *\n"
+ "FROM `A`\n"
+ "LEFT JOIN (`B` AS `B1` (`X`, `Y`) INNER JOIN (SELECT *\n"
+ "FROM `C`) AS `C1` ON (1 = 1)) ON (2 = 2)\n"
+ "WHERE (3 = 3)");
}
@Test public void testExplicitTableInJoin() {
check(
"select * from a left join (table b) on 2 = 2 where 3 = 3",
"SELECT *\n"
+ "FROM `A`\n"
+ "LEFT JOIN (TABLE `B`) ON (2 = 2)\n"
+ "WHERE (3 = 3)");
}
@Test public void testSubqueryInJoin() {
if (!Bug.TODO_FIXED) {
return;
}
check(
"select * from (select * from a cross join b) as ab\n"
+ " left join ((table c) join d on 2 = 2) on 3 = 3\n"
+ " where 4 = 4",
"SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM `A`\n"
+ "CROSS JOIN `B`) AS `AB`\n"
+ "LEFT JOIN ((TABLE `C`) INNER JOIN `D` ON (2 = 2)) ON (3 = 3)\n"
+ "WHERE (4 = 4)");
}
@Test public void testOuterJoinNoiseWord() {
check(
"select * from a left outer join b on 1 = 1 and 2 = 2 where 3 = 3",
"SELECT *\n"
+ "FROM `A`\n"
+ "LEFT JOIN `B` ON ((1 = 1) AND (2 = 2))\n"
+ "WHERE (3 = 3)");
}
@Test public void testJoinQuery() {
check(
"select * from a join (select * from b) as b2 on true",
"SELECT *\n"
+ "FROM `A`\n"
+ "INNER JOIN (SELECT *\n"
+ "FROM `B`) AS `B2` ON TRUE");
}
@Test public void testFullInnerJoinFails() {
// cannot have more than one of INNER, FULL, LEFT, RIGHT, CROSS
checkFails(
"select * from a ^full^ inner join b",
"(?s).*Encountered \"full inner\" at line 1, column 17.*");
}
@Test public void testFullOuterJoin() {
// OUTER is an optional extra to LEFT, RIGHT, or FULL
check(
"select * from a full outer join b",
"SELECT *\n"
+ "FROM `A`\n"
+ "FULL JOIN `B`");
}
@Test public void testInnerOuterJoinFails() {
checkFails(
"select * from a ^inner^ outer join b",
"(?s).*Encountered \"inner outer\" at line 1, column 17.*");
}
@Ignore
@Test public void testJoinAssociativity() {
// joins are left-associative
// 1. no parens needed
check(
"select * from (a natural left join b) left join c on b.c1 = c.c1",
"SELECT *\n"
+ "FROM (`A` NATURAL LEFT JOIN `B`) LEFT JOIN `C` ON (`B`.`C1` = `C`.`C1`)\n");
// 2. parens needed
check(
"select * from a natural left join (b left join c on b.c1 = c.c1)",
"SELECT *\n"
+ "FROM (`A` NATURAL LEFT JOIN `B`) LEFT JOIN `C` ON (`B`.`C1` = `C`.`C1`)\n");
// 3. same as 1
check(
"select * from a natural left join b left join c on b.c1 = c.c1",
"SELECT *\n"
+ "FROM (`A` NATURAL LEFT JOIN `B`) LEFT JOIN `C` ON (`B`.`C1` = `C`.`C1`)\n");
}
// Note: "select * from a natural cross join b" is actually illegal SQL
// ("cross" is the only join type which cannot be modified with the
// "natural") but the parser allows it; we and catch it at validate time
@Test public void testNaturalCrossJoin() {
check(
"select * from a natural cross join b",
"SELECT *\n"
+ "FROM `A`\n"
+ "NATURAL CROSS JOIN `B`");
}
@Test public void testJoinUsing() {
check(
"select * from a join b using (x)",
"SELECT *\n"
+ "FROM `A`\n"
+ "INNER JOIN `B` USING (`X`)");
checkFails(
"select * from a join b using (^)^ where c = d",
"(?s).*Encountered \"[)]\" at line 1, column 31.*");
}
@Test public void testTableSample() {
check(
"select * from ("
+ " select * "
+ " from emp "
+ " join dept on emp.deptno = dept.deptno"
+ " where gender = 'F'"
+ " order by sal) tablesample substitute('medium')",
"SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM `EMP`\n"
+ "INNER JOIN `DEPT` ON (`EMP`.`DEPTNO` = `DEPT`.`DEPTNO`)\n"
+ "WHERE (`GENDER` = 'F')\n"
+ "ORDER BY `SAL`) TABLESAMPLE SUBSTITUTE('MEDIUM')");
check(
"select * "
+ "from emp as x tablesample substitute('medium') "
+ "join dept tablesample substitute('lar' /* split */ 'ge') on x.deptno = dept.deptno",
"SELECT *\n"
+ "FROM `EMP` AS `X` TABLESAMPLE SUBSTITUTE('MEDIUM')\n"
+ "INNER JOIN `DEPT` TABLESAMPLE SUBSTITUTE('LARGE') ON (`X`.`DEPTNO` = `DEPT`.`DEPTNO`)");
check(
"select * "
+ "from emp as x tablesample bernoulli(50)",
"SELECT *\n"
+ "FROM `EMP` AS `X` TABLESAMPLE BERNOULLI(50.0)");
}
@Test public void testLiteral() {
checkExpSame("'foo'");
checkExpSame("100");
check(
"select 1 as one, 'x' as x, null as n from emp",
"SELECT 1 AS `ONE`, 'x' AS `X`, NULL AS `N`\n"
+ "FROM `EMP`");
// Even though it looks like a date, it's just a string.
checkExp("'2004-06-01'", "'2004-06-01'");
checkExp("-.25", "-0.25");
checkExpSame("TIMESTAMP '2004-06-01 15:55:55'");
checkExpSame("TIMESTAMP '2004-06-01 15:55:55.900'");
checkExp(
"TIMESTAMP '2004-06-01 15:55:55.1234'",
"TIMESTAMP '2004-06-01 15:55:55.123'");
checkExp(
"TIMESTAMP '2004-06-01 15:55:55.1236'",
"TIMESTAMP '2004-06-01 15:55:55.124'");
checkExp(
"TIMESTAMP '2004-06-01 15:55:55.9999'",
"TIMESTAMP '2004-06-01 15:55:56.000'");
checkExpSame("NULL");
}
@Test public void testContinuedLiteral() {
checkExp(
"'abba'\n'abba'",
"'abba'\n'abba'");
checkExp(
"'abba'\n'0001'",
"'abba'\n'0001'");
checkExp(
"N'yabba'\n'dabba'\n'doo'",
"_ISO-8859-1'yabba'\n'dabba'\n'doo'");
checkExp(
"_iso-8859-1'yabba'\n'dabba'\n'don''t'",
"_ISO-8859-1'yabba'\n'dabba'\n'don''t'");
checkExp(
"x'01aa'\n'03ff'",
"X'01AA'\n'03FF'");
// a bad hexstring
checkFails(
"x'01aa'\n^'vvvv'^",
"Binary literal string must contain only characters '0' - '9', 'A' - 'F'");
}
@Test public void testMixedFrom() {
// REVIEW: Is this syntax even valid?
check(
"select * from a join b using (x), c join d using (y)",
"SELECT *\n"
+ "FROM `A`\n"
+ "INNER JOIN `B` USING (`X`),\n"
+ "`C`\n"
+ "INNER JOIN `D` USING (`Y`)");
}
@Test public void testMixedStar() {
check(
"select emp.*, 1 as foo from emp, dept",
"SELECT `EMP`.*, 1 AS `FOO`\n"
+ "FROM `EMP`,\n"
+ "`DEPT`");
}
@Test public void testSchemaTableStar() {
sql("select schem.emp.*, emp.empno * dept.deptno\n"
+ "from schem.emp, dept")
.ok("SELECT `SCHEM`.`EMP`.*, (`EMP`.`EMPNO` * `DEPT`.`DEPTNO`)\n"
+ "FROM `SCHEM`.`EMP`,\n"
+ "`DEPT`");
}
@Test public void testCatalogSchemaTableStar() {
sql("select cat.schem.emp.* from cat.schem.emp")
.ok("SELECT `CAT`.`SCHEM`.`EMP`.*\n"
+ "FROM `CAT`.`SCHEM`.`EMP`");
}
@Test public void testAliasedStar() {
// OK in parser; validator will give error
sql("select emp.* as foo from emp")
.ok("SELECT `EMP`.* AS `FOO`\n"
+ "FROM `EMP`");
}
@Test public void testNotExists() {
check(
"select * from dept where not not exists (select * from emp) and true",
"SELECT *\n"
+ "FROM `DEPT`\n"
+ "WHERE ((NOT (NOT (EXISTS (SELECT *\n"
+ "FROM `EMP`)))) AND TRUE)");
}
@Test public void testOrder() {
check(
"select * from emp order by empno, gender desc, deptno asc, empno asc, name desc",
"SELECT *\n"
+ "FROM `EMP`\n"
+ "ORDER BY `EMPNO`, `GENDER` DESC, `DEPTNO`, `EMPNO`, `NAME` DESC");
}
@Test public void testOrderNullsFirst() {
check(
"select * from emp order by gender desc nulls last, deptno asc nulls first, empno nulls last",
"SELECT *\n"
+ "FROM `EMP`\n"
+ "ORDER BY `GENDER` DESC NULLS LAST, `DEPTNO` NULLS FIRST, `EMPNO` NULLS LAST");
}
@Test public void testOrderInternal() {
check(
"(select * from emp order by empno) union select * from emp",
"((SELECT *\n"
+ "FROM `EMP`\n"
+ "ORDER BY `EMPNO`)\n"
+ "UNION\n"
+ "SELECT *\n"
+ "FROM `EMP`)");
check(
"select * from (select * from t order by x, y) where a = b",
"SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM `T`\n"
+ "ORDER BY `X`, `Y`)\n"
+ "WHERE (`A` = `B`)");
}
@Test public void testOrderIllegalInExpression() {
check(
"select (select 1 from foo order by x,y) from t where a = b",
"SELECT (SELECT 1\n"
+ "FROM `FOO`\n"
+ "ORDER BY `X`, `Y`)\n"
+ "FROM `T`\n"
+ "WHERE (`A` = `B`)");
checkFails(
"select (1 ^order^ by x, y) from t where a = b",
"ORDER BY unexpected");
}
@Test public void testOrderOffsetFetch() {
check(
"select a from foo order by b, c offset 1 row fetch first 2 row only",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "ORDER BY `B`, `C`\n"
+ "OFFSET 1 ROWS\n"
+ "FETCH NEXT 2 ROWS ONLY");
// as above, but ROWS rather than ROW
check(
"select a from foo order by b, c offset 1 rows fetch first 2 rows only",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "ORDER BY `B`, `C`\n"
+ "OFFSET 1 ROWS\n"
+ "FETCH NEXT 2 ROWS ONLY");
// as above, but NEXT (means same as FIRST)
check(
"select a from foo order by b, c offset 1 rows fetch next 3 rows only",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "ORDER BY `B`, `C`\n"
+ "OFFSET 1 ROWS\n"
+ "FETCH NEXT 3 ROWS ONLY");
// as above, but omit the ROWS noise word after OFFSET. This is not
// compatible with SQL:2008 but allows the Postgres syntax
// "LIMIT ... OFFSET".
check(
"select a from foo order by b, c offset 1 fetch next 3 rows only",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "ORDER BY `B`, `C`\n"
+ "OFFSET 1 ROWS\n"
+ "FETCH NEXT 3 ROWS ONLY");
// as above, omit OFFSET
check(
"select a from foo order by b, c fetch next 3 rows only",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "ORDER BY `B`, `C`\n"
+ "FETCH NEXT 3 ROWS ONLY");
// FETCH, no ORDER BY or OFFSET
check(
"select a from foo fetch next 4 rows only",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "FETCH NEXT 4 ROWS ONLY");
// OFFSET, no ORDER BY or FETCH
check(
"select a from foo offset 1 row",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "OFFSET 1 ROWS");
// OFFSET and FETCH, no ORDER BY
check(
"select a from foo offset 1 row fetch next 3 rows only",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "OFFSET 1 ROWS\n"
+ "FETCH NEXT 3 ROWS ONLY");
// missing ROWS after FETCH
checkFails(
"select a from foo offset 1 fetch next 3 ^only^",
"(?s).*Encountered \"only\" at .*");
// FETCH before OFFSET is illegal
checkFails(
"select a from foo fetch next 3 rows only ^offset^ 1",
"(?s).*Encountered \"offset\" at .*");
}
/**
* "LIMIT ... OFFSET ..." is the postgres equivalent of SQL:2008
* "OFFSET ... FETCH". It all maps down to a parse tree that looks like
* SQL:2008.
*/
@Test public void testLimit() {
check(
"select a from foo order by b, c limit 2 offset 1",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "ORDER BY `B`, `C`\n"
+ "OFFSET 1 ROWS\n"
+ "FETCH NEXT 2 ROWS ONLY");
check(
"select a from foo order by b, c limit 2",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "ORDER BY `B`, `C`\n"
+ "FETCH NEXT 2 ROWS ONLY");
check(
"select a from foo order by b, c offset 1",
"SELECT `A`\n"
+ "FROM `FOO`\n"
+ "ORDER BY `B`, `C`\n"
+ "OFFSET 1 ROWS");
}
@Test public void testSqlInlineComment() {
check(
"select 1 from t --this is a comment\n",
"SELECT 1\n"
+ "FROM `T`");
check(
"select 1 from t--\n",
"SELECT 1\n"
+ "FROM `T`");
check(
"select 1 from t--this is a comment\n"
+ "where a>b-- this is comment\n",
"SELECT 1\n"
+ "FROM `T`\n"
+ "WHERE (`A` > `B`)");
check(
"select 1 from t\n--select",
"SELECT 1\n"
+ "FROM `T`");
}
@Test public void testMultilineComment() {
// on single line
check(
"select 1 /* , 2 */, 3 from t",
"SELECT 1, 3\n"
+ "FROM `T`");
// on several lines
check(
"select /* 1,\n"
+ " 2, \n"
+ " */ 3 from t",
"SELECT 3\n"
+ "FROM `T`");
// stuff inside comment
check(
"values ( /** 1, 2 + ** */ 3)",
"VALUES (ROW(3))");
// comment in string is preserved
check(
"values ('a string with /* a comment */ in it')",
"VALUES (ROW('a string with /* a comment */ in it'))");
// SQL:2003, 5.2, syntax rule # 8 "There shall be no <separator>
// separating the <minus sign>s of a <simple comment introducer>".
check(
"values (- -1\n"
+ ")",
"VALUES (ROW((- -1)))");
check(
"values (--1+\n"
+ "2)",
"VALUES (ROW(2))");
// end of multiline commment without start
if (Bug.FRG73_FIXED) {
checkFails("values (1 */ 2)", "xx");
}
// SQL:2003, 5.2, syntax rule #10 "Within a <bracket comment context>,
// any <solidus> immediately followed by an <asterisk> without any
// intervening <separator> shall be considered to be the <bracketed
// comment introducer> for a <separator> that is a <bracketed
// comment>".
// comment inside a comment
// Spec is unclear what should happen, but currently it crashes the
// parser, and that's bad
if (Bug.FRG73_FIXED) {
check("values (1 + /* comment /* inner comment */ */ 2)", "xx");
}
// single-line comment inside multiline comment is illegal
//
// SQL-2003, 5.2: "Note 63 - Conforming programs should not place
// <simple comment> within a <bracketed comment> because if such a
// <simple comment> contains the sequence of characeters "*/" without
// a preceding "/*" in the same <simple comment>, it will prematurely
// terminate the containing <bracketed comment>.
if (Bug.FRG73_FIXED) {
checkFails(
"values /* multiline contains -- singline */ \n"
+ " (1)",
"xxx");
}
// non-terminated multiline comment inside singleline comment
if (Bug.FRG73_FIXED) {
// Test should fail, and it does, but it should give "*/" as the
// erroneous token.
checkFails(
"values ( -- rest of line /* a comment \n"
+ " 1, ^*/^ 2)",
"Encountered \"/\\*\" at");
}
check(
"values (1 + /* comment -- rest of line\n"
+ " rest of comment */ 2)",
"VALUES (ROW((1 + 2)))");
// multiline comment inside singleline comment
check(
"values -- rest of line /* a comment */ \n"
+ "(1)",
"VALUES (ROW(1))");
// non-terminated multiline comment inside singleline comment
check(
"values -- rest of line /* a comment \n"
+ "(1)",
"VALUES (ROW(1))");
// even if comment abuts the tokens at either end, it becomes a space
check(
"values ('abc'/* a comment*/'def')",
"VALUES (ROW('abc'\n'def'))");
// comment which starts as soon as it has begun
check(
"values /**/ (1)",
"VALUES (ROW(1))");
}
// expressions
@Test public void testParseNumber() {
// Exacts
checkExp("1", "1");
checkExp("+1.", "1");
checkExp("-1", "-1");
checkExp("- -1", "(- -1)");
checkExp("1.0", "1.0");
checkExp("-3.2", "-3.2");
checkExp("1.", "1");
checkExp(".1", "0.1");
checkExp("2500000000", "2500000000");
checkExp("5000000000", "5000000000");
// Approximates
checkExp("1e1", "1E1");
checkExp("+1e1", "1E1");
checkExp("1.1e1", "1.1E1");
checkExp("1.1e+1", "1.1E1");
checkExp("1.1e-1", "1.1E-1");
checkExp("+1.1e-1", "1.1E-1");
checkExp("1.E3", "1E3");
checkExp("1.e-3", "1E-3");
checkExp("1.e+3", "1E3");
checkExp(".5E3", "5E2");
checkExp("+.5e3", "5E2");
checkExp("-.5E3", "-5E2");
checkExp(".5e-32", "5E-33");
// Mix integer/decimals/approx
checkExp("3. + 2", "(3 + 2)");
checkExp("1++2+3", "((1 + 2) + 3)");
checkExp("1- -2", "(1 - -2)");
checkExp(
"1++2.3e-4++.5e-6++.7++8",
"((((1 + 2.3E-4) + 5E-7) + 0.7) + 8)");
checkExp(
"1- -2.3e-4 - -.5e-6 -\n"
+ "-.7++8",
"((((1 - -2.3E-4) - -5E-7) - -0.7) + 8)");
checkExp("1+-2.*-3.e-1/-4", "(1 + ((-2 * -3E-1) / -4))");
}
@Test public void testParseNumberFails() {
checkFails(
"SELECT 0.5e1^.1^ from t",
"(?s).*Encountered .*\\.1.* at line 1.*");
}
@Test public void testMinusPrefixInExpression() {
checkExp("-(1+2)", "(- (1 + 2))");
}
// operator precedence
@Test public void testPrecedence0() {
checkExp("1 + 2 * 3 * 4 + 5", "((1 + ((2 * 3) * 4)) + 5)");
}
@Test public void testPrecedence1() {
checkExp("1 + 2 * (3 * (4 + 5))", "(1 + (2 * (3 * (4 + 5))))");
}
@Test public void testPrecedence2() {
checkExp("- - 1", "(- -1)"); // two prefices
}
@Test public void testPrecedence3() {
checkExp("- 1 is null", "(-1 IS NULL)"); // prefix vs. postfix
}
@Test public void testPrecedence4() {
checkExp("1 - -2", "(1 - -2)"); // infix, prefix '-'
}
@Test public void testPrecedence5() {
checkExp("1++2", "(1 + 2)"); // infix, prefix '+'
checkExp("1+ +2", "(1 + 2)"); // infix, prefix '+'
}
@Test public void testPrecedenceSetOps() {
check(
"select * from a union "
+ "select * from b intersect "
+ "select * from c intersect "
+ "select * from d except "
+ "select * from e except "
+ "select * from f union "
+ "select * from g",
"((((SELECT *\n"
+ "FROM `A`\n"
+ "UNION\n"
+ "((SELECT *\n"
+ "FROM `B`\n"
+ "INTERSECT\n"
+ "SELECT *\n"
+ "FROM `C`)\n"
+ "INTERSECT\n"
+ "SELECT *\n"
+ "FROM `D`))\n"
+ "EXCEPT\n"
+ "SELECT *\n"
+ "FROM `E`)\n"
+ "EXCEPT\n"
+ "SELECT *\n"
+ "FROM `F`)\n"
+ "UNION\n"
+ "SELECT *\n"
+ "FROM `G`)");
}
@Test public void testQueryInFrom() {
// one query with 'as', the other without
check(
"select * from (select * from emp) as e join (select * from dept) d",
"SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM `EMP`) AS `E`\n"
+ "INNER JOIN (SELECT *\n"
+ "FROM `DEPT`) AS `D`");
}
@Test public void testQuotesInString() {
checkExp("'a''b'", "'a''b'");
checkExp("'''x'", "'''x'");
checkExp("''", "''");
checkExp(
"'Quoted strings aren''t \"hard\"'",
"'Quoted strings aren''t \"hard\"'");
}
@Test public void testScalarQueryInWhere() {
check(
"select * from emp where 3 = (select count(*) from dept where dept.deptno = emp.deptno)",
"SELECT *\n"
+ "FROM `EMP`\n"
+ "WHERE (3 = (SELECT COUNT(*)\n"
+ "FROM `DEPT`\n"
+ "WHERE (`DEPT`.`DEPTNO` = `EMP`.`DEPTNO`)))");
}
@Test public void testScalarQueryInSelect() {
check(
"select x, (select count(*) from dept where dept.deptno = emp.deptno) from emp",
"SELECT `X`, (SELECT COUNT(*)\n"
+ "FROM `DEPT`\n"
+ "WHERE (`DEPT`.`DEPTNO` = `EMP`.`DEPTNO`))\n"
+ "FROM `EMP`");
}
@Test public void testSelectList() {
check(
"select * from emp, dept",
"SELECT *\n"
+ "FROM `EMP`,\n"
+ "`DEPT`");
}
@Test public void testSelectList3() {
check(
"select 1, emp.*, 2 from emp",
"SELECT 1, `EMP`.*, 2\n"
+ "FROM `EMP`");
}
@Test public void testSelectList4() {
checkFails(
"select ^from^ emp",
"(?s).*Encountered \"from\" at line .*");
}
@Test public void testStar() {
check(
"select * from emp",
"SELECT *\n"
+ "FROM `EMP`");
}
@Test public void testSelectDistinct() {
check(
"select distinct foo from bar",
"SELECT DISTINCT `FOO`\n"
+ "FROM `BAR`");
}
@Test public void testSelectAll() {
// "unique" is the default -- so drop the keyword
check(
"select * from (select all foo from bar) as xyz",
"SELECT *\n"
+ "FROM (SELECT ALL `FOO`\n"
+ "FROM `BAR`) AS `XYZ`");
}
@Test public void testSelectStream() {
sql("select stream foo from bar")
.ok("SELECT STREAM `FOO`\n"
+ "FROM `BAR`");
}
@Test public void testSelectStreamDistinct() {
sql("select stream distinct foo from bar")
.ok("SELECT STREAM DISTINCT `FOO`\n"
+ "FROM `BAR`");
}
@Test public void testWhere() {
check(
"select * from emp where empno > 5 and gender = 'F'",
"SELECT *\n"
+ "FROM `EMP`\n"
+ "WHERE ((`EMPNO` > 5) AND (`GENDER` = 'F'))");
}
@Test public void testNestedSelect() {
check(
"select * from (select * from emp)",
"SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM `EMP`)");
}
@Test public void testValues() {
check("values(1,'two')", "VALUES (ROW(1, 'two'))");
}
@Test public void testValuesExplicitRow() {
check("values row(1,'two')", "VALUES (ROW(1, 'two'))");
}
@Test public void testFromValues() {
check(
"select * from (values(1,'two'), 3, (4, 'five'))",
"SELECT *\n"
+ "FROM (VALUES (ROW(1, 'two')), (ROW(3)), (ROW(4, 'five')))");
}
@Test public void testFromValuesWithoutParens() {
checkFails(
"select 1 from ^values^('x')",
"Encountered \"values\" at line 1, column 15\\.\n"
+ "Was expecting one of:\n"
+ " <IDENTIFIER> \\.\\.\\.\n"
+ " <QUOTED_IDENTIFIER> \\.\\.\\.\n"
+ " <BACK_QUOTED_IDENTIFIER> \\.\\.\\.\n"
+ " <BRACKET_QUOTED_IDENTIFIER> \\.\\.\\.\n"
+ " <UNICODE_QUOTED_IDENTIFIER> \\.\\.\\.\n"
+ " \"LATERAL\" \\.\\.\\.\n"
+ " \"\\(\" \\.\\.\\.\n"
+ " \"UNNEST\" \\.\\.\\.\n"
+ " \"TABLE\" \\.\\.\\.\n"
+ " ");
}
@Test public void testEmptyValues() {
checkFails(
"select * from (values^(^))",
"(?s).*Encountered \"\\( \\)\" at .*");
}
/** Test case for
* <a href="https://issues.apache.org/jira/browse/CALCITE-493">[CALCITE-493]
* Add EXTEND clause, for defining columns and their types at query/DML
* time</a>. */
@Test public void testTableExtend() {
sql("select * from emp extend (x int, y varchar(10) not null)")
.ok("SELECT *\n"
+ "FROM `EMP` EXTEND (`X` INTEGER, `Y` VARCHAR(10))");
sql("select * from emp extend (x int, y varchar(10) not null) where true")
.ok("SELECT *\n"
+ "FROM `EMP` EXTEND (`X` INTEGER, `Y` VARCHAR(10))\n"
+ "WHERE TRUE");
// with table alias
sql("select * from emp extend (x int, y varchar(10) not null) as t")
.ok("SELECT *\n"
+ "FROM `EMP` EXTEND (`X` INTEGER, `Y` VARCHAR(10)) AS `T`");
// as previous, without AS
sql("select * from emp extend (x int, y varchar(10) not null) t")
.ok("SELECT *\n"
+ "FROM `EMP` EXTEND (`X` INTEGER, `Y` VARCHAR(10)) AS `T`");
// with table alias and column alias list
sql("select * from emp extend (x int, y varchar(10) not null) as t(a, b)")
.ok("SELECT *\n"
+ "FROM `EMP` EXTEND (`X` INTEGER, `Y` VARCHAR(10)) AS `T` (`A`, `B`)");
// as previous, without AS
sql("select * from emp extend (x int, y varchar(10) not null) t(a, b)")
.ok("SELECT *\n"
+ "FROM `EMP` EXTEND (`X` INTEGER, `Y` VARCHAR(10)) AS `T` (`A`, `B`)");
// omit EXTEND
sql("select * from emp (x int, y varchar(10) not null) t(a, b)")
.ok("SELECT *\n"
+ "FROM `EMP` EXTEND (`X` INTEGER, `Y` VARCHAR(10)) AS `T` (`A`, `B`)");
sql("select * from emp (x int, y varchar(10) not null) where x = y")
.ok("SELECT *\n"
+ "FROM `EMP` EXTEND (`X` INTEGER, `Y` VARCHAR(10))\n"
+ "WHERE (`X` = `Y`)");
}
@Test public void testExplicitTable() {
check("table emp", "(TABLE `EMP`)");
// FIXME should fail at "123"
checkFails(
"^table^ 123",
"(?s)Encountered \"table 123\" at line 1, column 1\\.\n.*");
}
@Test public void testExplicitTableOrdered() {
check(
"table emp order by name",
"(TABLE `EMP`)\n"
+ "ORDER BY `NAME`");
}
@Test public void testSelectFromExplicitTable() {
check(
"select * from (table emp)",
"SELECT *\n"
+ "FROM (TABLE `EMP`)");
}
@Test public void testSelectFromBareExplicitTableFails() {
// FIXME should fail at "emp"
checkFails(
"select * from ^table^ emp",
"(?s).*Encountered \"table emp\" at .*");
checkFails(
"select * from (^table^ (select empno from emp))",
"(?s)Encountered \"table \\(\".*");
}
@Test public void testCollectionTable() {
check(
"select * from table(ramp(3, 4))",
"SELECT *\n"
+ "FROM TABLE(`RAMP`(3, 4))");
}
@Test public void testCollectionTableWithCursorParam() {
check(
"select * from table(dedup(cursor(select * from emps),'name'))",
"SELECT *\n"
+ "FROM TABLE(`DEDUP`((CURSOR ((SELECT *\n"
+ "FROM `EMPS`))), 'name'))");
}
@Test public void testCollectionTableWithColumnListParam() {
check(
"select * from table(dedup(cursor(select * from emps),"
+ "row(empno, name)))",
"SELECT *\n"
+ "FROM TABLE(`DEDUP`((CURSOR ((SELECT *\n"
+ "FROM `EMPS`))), (ROW(`EMPNO`, `NAME`))))");
}
@Test public void testIllegalCursors() {
checkFails(
"select ^cursor^(select * from emps) from emps",
"CURSOR expression encountered in illegal context");
checkFails(
"call p(^cursor^(select * from emps))",
"CURSOR expression encountered in illegal context");
checkFails(
"select f(^cursor^(select * from emps)) from emps",
"CURSOR expression encountered in illegal context");
}
@Test public void testExplain() {
check(
"explain plan for select * from emps",
"EXPLAIN PLAN INCLUDING ATTRIBUTES WITH IMPLEMENTATION FOR\n"
+ "SELECT *\n"
+ "FROM `EMPS`");
}
@Test public void testExplainWithImpl() {
check(
"explain plan with implementation for select * from emps",
"EXPLAIN PLAN INCLUDING ATTRIBUTES WITH IMPLEMENTATION FOR\n"
+ "SELECT *\n"
+ "FROM `EMPS`");
}
@Test public void testExplainWithoutImpl() {
check(
"explain plan without implementation for select * from emps",
"EXPLAIN PLAN INCLUDING ATTRIBUTES WITHOUT IMPLEMENTATION FOR\n"
+ "SELECT *\n"
+ "FROM `EMPS`");
}
@Test public void testExplainWithType() {
check(
"explain plan with type for (values (true))",
"EXPLAIN PLAN INCLUDING ATTRIBUTES WITH TYPE FOR\n"
+ "(VALUES (ROW(TRUE)))");
}
@Test public void testDescribeSchema() {
check("describe schema A",
"DESCRIBE SCHEMA `A`");
// Currently DESCRIBE DATABASE, DESCRIBE CATALOG become DESCRIBE SCHEMA.
// See [CALCITE-1221] Implement DESCRIBE DATABASE, CATALOG, STATEMENT
check("describe database A",
"DESCRIBE SCHEMA `A`");
check("describe catalog A",
"DESCRIBE SCHEMA `A`");
}
@Test public void testDescribeTable() {
check("describe emps",
"DESCRIBE TABLE `EMPS`");
check("describe \"emps\"",
"DESCRIBE TABLE `emps`");
check("describe s.emps",
"DESCRIBE TABLE `S`.`EMPS`");
check("describe db.c.s.emps",
"DESCRIBE TABLE `DB`.`C`.`S`.`EMPS`");
check("describe emps col1",
"DESCRIBE TABLE `EMPS` `COL1`");
// table keyword is OK
check("describe table emps col1",
"DESCRIBE TABLE `EMPS` `COL1`");
// character literal for column name not ok
checkFails("describe emps ^'col_'^",
"(?s).*Encountered \"\\\\'col_\\\\'\" at .*");
// composite column name not ok
checkFails("describe emps c1^.^c2",
"(?s).*Encountered \"\\.\" at .*");
}
@Test public void testDescribeStatement() {
// Currently DESCRIBE STATEMENT becomes EXPLAIN.
// See [CALCITE-1221] Implement DESCRIBE DATABASE, CATALOG, STATEMENT
final String expected0 = ""
+ "EXPLAIN PLAN INCLUDING ATTRIBUTES WITH IMPLEMENTATION FOR\n"
+ "SELECT *\n"
+ "FROM `EMPS`";
check("describe statement select * from emps", expected0);
final String expected1 = ""
+ "EXPLAIN PLAN INCLUDING ATTRIBUTES WITH IMPLEMENTATION FOR\n"
+ "(SELECT *\n"
+ "FROM `EMPS`\n"
+ "ORDER BY 2)";
check("describe statement select * from emps order by 2",
expected1);
check("describe select * from emps", expected0);
check("describe (select * from emps)", expected0);
check("describe statement (select * from emps)", expected0);
final String expected2 = ""
+ "EXPLAIN PLAN INCLUDING ATTRIBUTES WITH IMPLEMENTATION FOR\n"
+ "(SELECT `DEPTNO`\n"
+ "FROM `EMPS`\n"
+ "UNION\n"
+ "SELECT `DEPTNO`\n"
+ "FROM `DEPTS`)";
check("describe select deptno from emps union select deptno from depts",
expected2);
final String expected3 = ""
+ "EXPLAIN PLAN INCLUDING ATTRIBUTES WITH IMPLEMENTATION FOR\n"
+ "INSERT INTO `EMPS`\n"
+ "(VALUES (ROW(1, 'a')))";
check("describe insert into emps values (1, 'a')", expected3);
// only allow query or DML, not explain, inside describe
checkFails("^describe^ explain plan for select * from emps",
"(?s).*Encountered \"describe explain\" at .*");
checkFails("describe ^statement^ explain plan for select * from emps",
"(?s).*Encountered \"statement explain\" at .*");
}
@Test public void testInsertSelect() {
check(
"insert into emps select * from emps",
"INSERT INTO `EMPS`\n"
+ "(SELECT *\n"
+ "FROM `EMPS`)");
}
@Test public void testInsertUnion() {
check(
"insert into emps select * from emps1 union select * from emps2",
"INSERT INTO `EMPS`\n"
+ "(SELECT *\n"
+ "FROM `EMPS1`\n"
+ "UNION\n"
+ "SELECT *\n"
+ "FROM `EMPS2`)");
}
@Test public void testInsertValues() {
check(
"insert into emps values (1,'Fredkin')",
"INSERT INTO `EMPS`\n"
+ "(VALUES (ROW(1, 'Fredkin')))");
}
@Test public void testInsertColumnList() {
check(
"insert into emps(x,y) select * from emps",
"INSERT INTO `EMPS` (`X`, `Y`)\n"
+ "(SELECT *\n"
+ "FROM `EMPS`)");
}
@Test public void testExplainInsert() {
check(
"explain plan for insert into emps1 select * from emps2",
"EXPLAIN PLAN INCLUDING ATTRIBUTES WITH IMPLEMENTATION FOR\n"
+ "INSERT INTO `EMPS1`\n"
+ "(SELECT *\n"
+ "FROM `EMPS2`)");
}
@Test public void testUpsertValues() {
sql("upsert into emps values (1,'Fredkin')")
.ok("UPSERT INTO `EMPS`\n"
+ "(VALUES (ROW(1, 'Fredkin')))");
}
@Test public void testUpsertSelect() {
sql("upsert into emps select * from emp as e")
.ok("UPSERT INTO `EMPS`\n"
+ "(SELECT *\n"
+ "FROM `EMP` AS `E`)");
}
@Test public void testExplainUpsert() {
sql("explain plan for upsert into emps1 values (1, 2)")
.ok("EXPLAIN PLAN INCLUDING ATTRIBUTES WITH IMPLEMENTATION FOR\n"
+ "UPSERT INTO `EMPS1`\n"
+ "(VALUES (ROW(1, 2)))");
}
@Test public void testDelete() {
check("delete from emps", "DELETE FROM `EMPS`");
}
@Test public void testDeleteWhere() {
check(
"delete from emps where empno=12",
"DELETE FROM `EMPS`\n"
+ "WHERE (`EMPNO` = 12)");
}
@Test public void testUpdate() {
sql("update emps set empno = empno + 1, sal = sal - 1 where empno=12")
.ok("UPDATE `EMPS` SET `EMPNO` = (`EMPNO` + 1)\n"
+ ", `SAL` = (`SAL` - 1)\n"
+ "WHERE (`EMPNO` = 12)");
}
@Test public void testMergeSelectSource() {
check(
"merge into emps e "
+ "using (select * from tempemps where deptno is null) t "
+ "on e.empno = t.empno "
+ "when matched then update "
+ "set name = t.name, deptno = t.deptno, salary = t.salary * .1 "
+ "when not matched then insert (name, dept, salary) "
+ "values(t.name, 10, t.salary * .15)",
"MERGE INTO `EMPS` AS `E`\n"
+ "USING (SELECT *\n"
+ "FROM `TEMPEMPS`\n"
+ "WHERE (`DEPTNO` IS NULL)) AS `T`\n"
+ "ON (`E`.`EMPNO` = `T`.`EMPNO`)\n"
+ "WHEN MATCHED THEN UPDATE SET `NAME` = `T`.`NAME`\n"
+ ", `DEPTNO` = `T`.`DEPTNO`\n"
+ ", `SALARY` = (`T`.`SALARY` * 0.1)\n"
+ "WHEN NOT MATCHED THEN INSERT (`NAME`, `DEPT`, `SALARY`) "
+ "(VALUES (ROW(`T`.`NAME`, 10, (`T`.`SALARY` * 0.15))))");
}
@Test public void testMergeTableRefSource() {
check(
"merge into emps e "
+ "using tempemps as t "
+ "on e.empno = t.empno "
+ "when matched then update "
+ "set name = t.name, deptno = t.deptno, salary = t.salary * .1 "
+ "when not matched then insert (name, dept, salary) "
+ "values(t.name, 10, t.salary * .15)",
"MERGE INTO `EMPS` AS `E`\n"
+ "USING `TEMPEMPS` AS `T`\n"
+ "ON (`E`.`EMPNO` = `T`.`EMPNO`)\n"
+ "WHEN MATCHED THEN UPDATE SET `NAME` = `T`.`NAME`\n"
+ ", `DEPTNO` = `T`.`DEPTNO`\n"
+ ", `SALARY` = (`T`.`SALARY` * 0.1)\n"
+ "WHEN NOT MATCHED THEN INSERT (`NAME`, `DEPT`, `SALARY`) "
+ "(VALUES (ROW(`T`.`NAME`, 10, (`T`.`SALARY` * 0.15))))");
}
@Test public void testBitStringNotImplemented() {
// Bit-string is longer part of the SQL standard. We do not support it.
checkFails(
"select B^'1011'^ || 'foobar' from (values (true))",
"(?s).*Encountered \"\\\\'1011\\\\'\" at line 1, column 9.*");
}
@Test public void testHexAndBinaryString() {
checkExp("x''=X'2'", "(X'' = X'2')");
checkExp("x'fffff'=X''", "(X'FFFFF' = X'')");
checkExp(
"x'1' \t\t\f\r \n"
+ "'2'--hi this is a comment'FF'\r\r\t\f \n"
+ "'34'",
"X'1'\n'2'\n'34'");
checkExp(
"x'1' \t\t\f\r \n"
+ "'000'--\n"
+ "'01'",
"X'1'\n'000'\n'01'");
checkExp(
"x'1234567890abcdef'=X'fFeEdDcCbBaA'",
"(X'1234567890ABCDEF' = X'FFEEDDCCBBAA')");
// Check the inital zeroes don't get trimmed somehow
checkExp("x'001'=X'000102'", "(X'001' = X'000102')");
}
@Test public void testHexAndBinaryStringFails() {
checkFails(
"select ^x'FeedGoats'^ from t",
"Binary literal string must contain only characters '0' - '9', 'A' - 'F'");
checkFails(
"select ^x'abcdefG'^ from t",
"Binary literal string must contain only characters '0' - '9', 'A' - 'F'");
checkFails(
"select x'1' ^x'2'^ from t",
"(?s).*Encountered .x.*2.* at line 1, column 13.*");
// valid syntax, but should fail in the validator
check(
"select x'1' '2' from t",
"SELECT X'1'\n"
+ "'2'\n"
+ "FROM `T`");
}
@Test public void testStringLiteral() {
checkExp("_latin1'hi'", "_LATIN1'hi'");
checkExp(
"N'is it a plane? no it''s superman!'",
"_ISO-8859-1'is it a plane? no it''s superman!'");
checkExp("n'lowercase n'", "_ISO-8859-1'lowercase n'");
checkExp("'boring string'", "'boring string'");
checkExp("_iSo-8859-1'bye'", "_ISO-8859-1'bye'");
checkExp(
"'three' \n ' blind'\n' mice'",
"'three'\n' blind'\n' mice'");
checkExp(
"'three' -- comment \n ' blind'\n' mice'",
"'three'\n' blind'\n' mice'");
checkExp(
"N'bye' \t\r\f\f\n' bye'",
"_ISO-8859-1'bye'\n' bye'");
checkExp(
"_iso-8859-1'bye' \n\n--\n-- this is a comment\n' bye'",
"_ISO-8859-1'bye'\n' bye'");
// newline in string literal
checkExp("'foo\rbar'", "'foo\rbar'");
checkExp("'foo\nbar'", "'foo\nbar'");
// prevent test infrastructure from converting \r\n to \n
boolean[] linuxify = LINUXIFY.get();
try {
linuxify[0] = false;
checkExp("'foo\r\nbar'", "'foo\r\nbar'");
} finally {
linuxify[0] = true;
}
}
@Test public void testStringLiteralFails() {
checkFails(
"select N ^'space'^",
"(?s).*Encountered .*space.* at line 1, column ...*");
checkFails(
"select _latin1 \n^'newline'^",
"(?s).*Encountered.*newline.* at line 2, column ...*");
checkFails(
"select ^_unknown-charset''^ from (values(true))",
"Unknown character set 'unknown-charset'");
// valid syntax, but should give a validator error
check(
"select N'1' '2' from t",
"SELECT _ISO-8859-1'1'\n'2'\n"
+ "FROM `T`");
}
@Test public void testStringLiteralChain() {
final String fooBar =
"'foo'\n"
+ "'bar'";
final String fooBarBaz =
"'foo'\n"
+ "'bar'\n"
+ "'baz'";
checkExp(" 'foo'\r'bar'", fooBar);
checkExp(" 'foo'\r\n'bar'", fooBar);
checkExp(" 'foo'\r\n\r\n'bar' \n 'baz'", fooBarBaz);
checkExp(" 'foo' /* a comment */ 'bar'", fooBar);
checkExp(" 'foo' -- a comment\r\n 'bar'", fooBar);
// String literals not separated by comment or newline are OK in
// parser, should fail in validator.
checkExp(" 'foo' 'bar'", fooBar);
}
@Test public void testCaseExpression() {
// implicit simple "ELSE NULL" case
checkExp(
"case \t col1 when 1 then 'one' end",
"(CASE WHEN (`COL1` = 1) THEN 'one' ELSE NULL END)");
// implicit searched "ELSE NULL" case
checkExp(
"case when nbr is false then 'one' end",
"(CASE WHEN (`NBR` IS FALSE) THEN 'one' ELSE NULL END)");
// multiple WHENs
checkExp(
"case col1 when \n1.2 then 'one' when 2 then 'two' else 'three' end",
"(CASE WHEN (`COL1` = 1.2) THEN 'one' WHEN (`COL1` = 2) THEN 'two' ELSE 'three' END)");
// subqueries as case expression operands
checkExp(
"case (select * from emp) when 1 then 2 end",
"(CASE WHEN ((SELECT *\n"
+ "FROM `EMP`) = 1) THEN 2 ELSE NULL END)");
checkExp(
"case 1 when (select * from emp) then 2 end",
"(CASE WHEN (1 = (SELECT *\n"
+ "FROM `EMP`)) THEN 2 ELSE NULL END)");
checkExp(
"case 1 when 2 then (select * from emp) end",
"(CASE WHEN (1 = 2) THEN (SELECT *\n"
+ "FROM `EMP`) ELSE NULL END)");
checkExp(
"case 1 when 2 then 3 else (select * from emp) end",
"(CASE WHEN (1 = 2) THEN 3 ELSE (SELECT *\n"
+ "FROM `EMP`) END)");
checkExp(
"case x when 2, 4 then 3 else 4 end",
"(CASE WHEN (`X` IN (2, 4)) THEN 3 ELSE 4 END)");
// comma-list must not be empty
checkFails(
"case x when 2, 4 then 3 ^when^ then 5 else 4 end",
"(?s)Encountered \"when then\" at .*");
// commas not allowed in boolean case
checkFails(
"case when b1, b2 ^when^ 2, 4 then 3 else 4 end",
"(?s)Encountered \"when\" at .*");
}
@Test public void testCaseExpressionFails() {
// Missing 'END'
checkFails(
"select case col1 when 1 then 'one' ^from^ t",
"(?s).*from.*");
// Wrong 'WHEN'
checkFails(
"select case col1 ^when1^ then 'one' end from t",
"(?s).*when1.*");
}
@Test public void testNullIf() {
checkExp(
"nullif(v1,v2)",
"NULLIF(`V1`, `V2`)");
checkExpFails(
"1 ^+^ nullif + 3",
"(?s)Encountered \"\\+ nullif \\+\" at line 1, column 3.*");
}
@Test public void testCoalesce() {
checkExp(
"coalesce(v1)",
"COALESCE(`V1`)");
checkExp(
"coalesce(v1,v2)",
"COALESCE(`V1`, `V2`)");
checkExp(
"coalesce(v1,v2,v3)",
"COALESCE(`V1`, `V2`, `V3`)");
}
@Test public void testLiteralCollate() {
if (!Bug.FRG78_FIXED) {
return;
}
checkExp(
"'string' collate latin1$sv_SE$mega_strength",
"'string' COLLATE ISO-8859-1$sv_SE$mega_strength");
checkExp(
"'a long '\n'string' collate latin1$sv_SE$mega_strength",
"'a long ' 'string' COLLATE ISO-8859-1$sv_SE$mega_strength");
checkExp(
"x collate iso-8859-6$ar_LB$1",
"`X` COLLATE ISO-8859-6$ar_LB$1");
checkExp(
"x.y.z collate shift_jis$ja_JP$2",
"`X`.`Y`.`Z` COLLATE SHIFT_JIS$ja_JP$2");
checkExp(
"'str1'='str2' collate latin1$sv_SE",
"('str1' = 'str2' COLLATE ISO-8859-1$sv_SE$primary)");
checkExp(
"'str1' collate latin1$sv_SE>'str2'",
"('str1' COLLATE ISO-8859-1$sv_SE$primary > 'str2')");
checkExp(
"'str1' collate latin1$sv_SE<='str2' collate latin1$sv_FI",
"('str1' COLLATE ISO-8859-1$sv_SE$primary <= 'str2' COLLATE ISO-8859-1$sv_FI$primary)");
}
@Test public void testCharLength() {
checkExp("char_length('string')", "CHAR_LENGTH('string')");
checkExp("character_length('string')", "CHARACTER_LENGTH('string')");
}
@Test public void testPosition() {
checkExp(
"posiTion('mouse' in 'house')",
"POSITION('mouse' IN 'house')");
}
// check date/time functions.
@Test public void testTimeDate() {
// CURRENT_TIME - returns time w/ timezone
checkExp("CURRENT_TIME(3)", "CURRENT_TIME(3)");
// checkFails("SELECT CURRENT_TIME() FROM foo",
// "SELECT CURRENT_TIME() FROM `FOO`");
checkExp("CURRENT_TIME", "`CURRENT_TIME`");
checkExp("CURRENT_TIME(x+y)", "CURRENT_TIME((`X` + `Y`))");
// LOCALTIME returns time w/o TZ
checkExp("LOCALTIME(3)", "LOCALTIME(3)");
// checkFails("SELECT LOCALTIME() FROM foo",
// "SELECT LOCALTIME() FROM `FOO`");
checkExp("LOCALTIME", "`LOCALTIME`");
checkExp("LOCALTIME(x+y)", "LOCALTIME((`X` + `Y`))");
// LOCALTIMESTAMP - returns timestamp w/o TZ
checkExp("LOCALTIMESTAMP(3)", "LOCALTIMESTAMP(3)");
// checkFails("SELECT LOCALTIMESTAMP() FROM foo",
// "SELECT LOCALTIMESTAMP() FROM `FOO`");
checkExp("LOCALTIMESTAMP", "`LOCALTIMESTAMP`");
checkExp("LOCALTIMESTAMP(x+y)", "LOCALTIMESTAMP((`X` + `Y`))");
// CURRENT_DATE - returns DATE
checkExp("CURRENT_DATE(3)", "CURRENT_DATE(3)");
// checkFails("SELECT CURRENT_DATE() FROM foo",
// "SELECT CURRENT_DATE() FROM `FOO`");
checkExp("CURRENT_DATE", "`CURRENT_DATE`");
// checkFails("SELECT CURRENT_DATE(x+y) FROM foo",
// "CURRENT_DATE((`X` + `Y`))");
// CURRENT_TIMESTAMP - returns timestamp w/ TZ
checkExp("CURRENT_TIMESTAMP(3)", "CURRENT_TIMESTAMP(3)");
// checkFails("SELECT CURRENT_TIMESTAMP() FROM foo",
// "SELECT CURRENT_TIMESTAMP() FROM `FOO`");
checkExp("CURRENT_TIMESTAMP", "`CURRENT_TIMESTAMP`");
checkExp("CURRENT_TIMESTAMP(x+y)", "CURRENT_TIMESTAMP((`X` + `Y`))");
// Date literals
checkExp("DATE '2004-12-01'", "DATE '2004-12-01'");
checkExp("TIME '12:01:01'", "TIME '12:01:01'");
checkExp("TIME '12:01:01.'", "TIME '12:01:01'");
checkExp("TIME '12:01:01.000'", "TIME '12:01:01.000'");
checkExp("TIME '12:01:01.001'", "TIME '12:01:01.001'");
checkExp(
"TIMESTAMP '2004-12-01 12:01:01'",
"TIMESTAMP '2004-12-01 12:01:01'");
checkExp(
"TIMESTAMP '2004-12-01 12:01:01.1'",
"TIMESTAMP '2004-12-01 12:01:01.1'");
checkExp(
"TIMESTAMP '2004-12-01 12:01:01.'",
"TIMESTAMP '2004-12-01 12:01:01'");
checkExpSame("TIMESTAMP '2004-12-01 12:01:01.1'");
// Failures.
checkFails("^DATE '12/21/99'^", "(?s).*Illegal DATE literal.*");
checkFails("^TIME '1230:33'^", "(?s).*Illegal TIME literal.*");
checkFails("^TIME '12:00:00 PM'^", "(?s).*Illegal TIME literal.*");
checkFails(
"^TIMESTAMP '12-21-99, 12:30:00'^",
"(?s).*Illegal TIMESTAMP literal.*");
}
/**
* Tests for casting to/from date/time types.
*/
@Test public void testDateTimeCast() {
// checkExp("CAST(DATE '2001-12-21' AS CHARACTER VARYING)",
// "CAST(2001-12-21)");
checkExp("CAST('2001-12-21' AS DATE)", "CAST('2001-12-21' AS DATE)");
checkExp("CAST(12 AS DATE)", "CAST(12 AS DATE)");
checkFails(
"CAST('2000-12-21' AS DATE ^NOT^ NULL)",
"(?s).*Encountered \"NOT\" at line 1, column 27.*");
checkFails(
"CAST('foo' as ^1^)",
"(?s).*Encountered \"1\" at line 1, column 15.*");
checkExp(
"Cast(DATE '2004-12-21' AS VARCHAR(10))",
"CAST(DATE '2004-12-21' AS VARCHAR(10))");
}
@Test public void testTrim() {
checkExp(
"trim('mustache' FROM 'beard')",
"TRIM(BOTH 'mustache' FROM 'beard')");
checkExp("trim('mustache')", "TRIM(BOTH ' ' FROM 'mustache')");
checkExp(
"trim(TRAILING FROM 'mustache')",
"TRIM(TRAILING ' ' FROM 'mustache')");
checkExp(
"trim(bOth 'mustache' FROM 'beard')",
"TRIM(BOTH 'mustache' FROM 'beard')");
checkExp(
"trim( lEaDing 'mustache' FROM 'beard')",
"TRIM(LEADING 'mustache' FROM 'beard')");
checkExp(
"trim(\r\n\ttrailing\n 'mustache' FROM 'beard')",
"TRIM(TRAILING 'mustache' FROM 'beard')");
checkExp(
"trim (coalesce(cast(null as varchar(2)))||"
+ "' '||coalesce('junk ',''))",
"TRIM(BOTH ' ' FROM ((COALESCE(CAST(NULL AS VARCHAR(2))) || "
+ "' ') || COALESCE('junk ', '')))");
checkFails(
"trim(^from^ 'beard')",
"(?s).*'FROM' without operands preceding it is illegal.*");
}
@Test public void testConvertAndTranslate() {
checkExp(
"convert('abc' using conversion)",
"CONVERT('abc' USING `CONVERSION`)");
checkExp(
"translate('abc' using lazy_translation)",
"TRANSLATE('abc' USING `LAZY_TRANSLATION`)");
}
@Test public void testOverlay() {
checkExp(
"overlay('ABCdef' placing 'abc' from 1)",
"OVERLAY('ABCdef' PLACING 'abc' FROM 1)");
checkExp(
"overlay('ABCdef' placing 'abc' from 1 for 3)",
"OVERLAY('ABCdef' PLACING 'abc' FROM 1 FOR 3)");
}
@Test public void testJdbcFunctionCall() {
checkExp("{fn apa(1,'1')}", "{fn APA(1, '1') }");
checkExp("{ Fn apa(log10(ln(1))+2)}", "{fn APA((LOG10(LN(1)) + 2)) }");
checkExp("{fN apa(*)}", "{fn APA(*) }");
checkExp("{ FN\t\r\n apa()}", "{fn APA() }");
checkExp("{fn insert()}", "{fn INSERT() }");
}
@Test public void testWindowReference() {
checkExp("sum(sal) over (w)", "(SUM(`SAL`) OVER (`W`))");
// Only 1 window reference allowed
checkExpFails(
"sum(sal) over (w ^w1^ partition by deptno)",
"(?s)Encountered \"w1\" at.*");
}
@Test public void testWindowInSubquery() {
check(
"select * from ( select sum(x) over w, sum(y) over w from s window w as (range interval '1' minute preceding))",
"SELECT *\n"
+ "FROM (SELECT (SUM(`X`) OVER `W`), (SUM(`Y`) OVER `W`)\n"
+ "FROM `S`\n"
+ "WINDOW `W` AS (RANGE INTERVAL '1' MINUTE PRECEDING))");
}
@Test public void testWindowSpec() {
// Correct syntax
check(
"select count(z) over w as foo from Bids window w as (partition by y + yy, yyy order by x rows between 2 preceding and 2 following)",
"SELECT (COUNT(`Z`) OVER `W`) AS `FOO`\n"
+ "FROM `BIDS`\n"
+ "WINDOW `W` AS (PARTITION BY (`Y` + `YY`), `YYY` ORDER BY `X` ROWS BETWEEN 2 PRECEDING AND 2 FOLLOWING)");
check(
"select count(*) over w from emp window w as (rows 2 preceding)",
"SELECT (COUNT(*) OVER `W`)\n"
+ "FROM `EMP`\n"
+ "WINDOW `W` AS (ROWS 2 PRECEDING)");
// Chained string literals are valid syntax. They are unlikely to be
// semantically valid, because intervals are usually numeric or
// datetime.
// Note: literal chain is not yet replaced with combined literal
// since we are just parsing, and not validating the sql.
check(
"select count(*) over w from emp window w as (\n"
+ " rows 'foo' 'bar'\n"
+ " 'baz' preceding)",
"SELECT (COUNT(*) OVER `W`)\n"
+ "FROM `EMP`\n"
+ "WINDOW `W` AS (ROWS 'foo'\n'bar'\n'baz' PRECEDING)");
// Partition clause out of place. Found after ORDER BY
checkFails(
"select count(z) over w as foo \n"
+ "from Bids window w as (partition by y order by x ^partition^ by y)",
"(?s).*Encountered \"partition\".*");
checkFails(
"select count(z) over w as foo from Bids window w as (order by x ^partition^ by y)",
"(?s).*Encountered \"partition\".*");
// Cannot partition by subquery
checkFails(
"select sum(a) over (partition by ^(^select 1 from t), x) from t2",
"Query expression encountered in illegal context");
// AND is required in BETWEEN clause of window frame
checkFails(
"select sum(x) over (order by x range between unbounded preceding ^unbounded^ following)",
"(?s).*Encountered \"unbounded\".*");
// WINDOW keyword is not permissible.
// FIXME should fail at "window"
checkFails(
"select sum(x) ^over^ window (order by x) from bids",
"(?s).*Encountered \"over window\".*");
// ORDER BY must be before Frame spec
checkFails(
"select sum(x) over (rows 2 preceding ^order^ by x) from emp",
"(?s).*Encountered \"order\".*");
}
@Test public void testWindowSpecPartial() {
// ALLOW PARTIAL is the default, and is omitted when the statement is
// unparsed.
check(
"select sum(x) over (order by x allow partial) from bids",
"SELECT (SUM(`X`) OVER (ORDER BY `X`))\n"
+ "FROM `BIDS`");
check(
"select sum(x) over (order by x) from bids",
"SELECT (SUM(`X`) OVER (ORDER BY `X`))\n"
+ "FROM `BIDS`");
check(
"select sum(x) over (order by x disallow partial) from bids",
"SELECT (SUM(`X`) OVER (ORDER BY `X` DISALLOW PARTIAL))\n"
+ "FROM `BIDS`");
check(
"select sum(x) over (order by x) from bids",
"SELECT (SUM(`X`) OVER (ORDER BY `X`))\n"
+ "FROM `BIDS`");
}
@Test public void testAs() {
// AS is optional for column aliases
check(
"select x y from t",
"SELECT `X` AS `Y`\n"
+ "FROM `T`");
check(
"select x AS y from t",
"SELECT `X` AS `Y`\n"
+ "FROM `T`");
check(
"select sum(x) y from t group by z",
"SELECT SUM(`X`) AS `Y`\n"
+ "FROM `T`\n"
+ "GROUP BY `Z`");
// Even after OVER
check(
"select count(z) over w foo from Bids window w as (order by x)",
"SELECT (COUNT(`Z`) OVER `W`) AS `FOO`\n"
+ "FROM `BIDS`\n"
+ "WINDOW `W` AS (ORDER BY `X`)");
// AS is optional for table correlation names
final String expected =
"SELECT `X`\n"
+ "FROM `T` AS `T1`";
check("select x from t as t1", expected);
check("select x from t t1", expected);
// AS is required in WINDOW declaration
checkFails(
"select sum(x) over w from bids window w ^(order by x)",
"(?s).*Encountered \"\\(\".*");
// Error if OVER and AS are in wrong order
checkFails(
"select count(*) as foo ^over^ w from Bids window w (order by x)",
"(?s).*Encountered \"over\".*");
}
@Test public void testAsAliases() {
check(
"select x from t as t1 (a, b) where foo",
"SELECT `X`\n"
+ "FROM `T` AS `T1` (`A`, `B`)\n"
+ "WHERE `FOO`");
check(
"select x from (values (1, 2), (3, 4)) as t1 (\"a\", b) where \"a\" > b",
"SELECT `X`\n"
+ "FROM (VALUES (ROW(1, 2)), (ROW(3, 4))) AS `T1` (`a`, `B`)\n"
+ "WHERE (`a` > `B`)");
// must have at least one column
checkFails(
"select x from (values (1, 2), (3, 4)) as t1 ^(^)",
"(?s).*Encountered \"\\( \\)\" at .*");
// cannot have expressions
checkFails(
"select x from t as t1 (x ^+^ y)",
"(?s).*Was expecting one of:\n"
+ " \"\\)\" \\.\\.\\.\n"
+ " \",\" \\.\\.\\..*");
// cannot have compound identifiers
checkFails(
"select x from t as t1 (x^.^y)",
"(?s).*Was expecting one of:\n"
+ " \"\\)\" \\.\\.\\.\n"
+ " \",\" \\.\\.\\..*");
}
@Test public void testOver() {
checkExp(
"sum(sal) over ()",
"(SUM(`SAL`) OVER ())");
checkExp(
"sum(sal) over (partition by x, y)",
"(SUM(`SAL`) OVER (PARTITION BY `X`, `Y`))");
checkExp(
"sum(sal) over (order by x desc, y asc)",
"(SUM(`SAL`) OVER (ORDER BY `X` DESC, `Y`))");
checkExp(
"sum(sal) over (rows 5 preceding)",
"(SUM(`SAL`) OVER (ROWS 5 PRECEDING))");
checkExp(
"sum(sal) over (range between interval '1' second preceding and interval '1' second following)",
"(SUM(`SAL`) OVER (RANGE BETWEEN INTERVAL '1' SECOND PRECEDING AND INTERVAL '1' SECOND FOLLOWING))");
checkExp(
"sum(sal) over (range between interval '1:03' hour preceding and interval '2' minute following)",
"(SUM(`SAL`) OVER (RANGE BETWEEN INTERVAL '1:03' HOUR PRECEDING AND INTERVAL '2' MINUTE FOLLOWING))");
checkExp(
"sum(sal) over (range between interval '5' day preceding and current row)",
"(SUM(`SAL`) OVER (RANGE BETWEEN INTERVAL '5' DAY PRECEDING AND CURRENT ROW))");
checkExp(
"sum(sal) over (range interval '5' day preceding)",
"(SUM(`SAL`) OVER (RANGE INTERVAL '5' DAY PRECEDING))");
checkExp(
"sum(sal) over (range between unbounded preceding and current row)",
"(SUM(`SAL`) OVER (RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW))");
checkExp(
"sum(sal) over (range unbounded preceding)",
"(SUM(`SAL`) OVER (RANGE UNBOUNDED PRECEDING))");
checkExp(
"sum(sal) over (range between current row and unbounded preceding)",
"(SUM(`SAL`) OVER (RANGE BETWEEN CURRENT ROW AND UNBOUNDED PRECEDING))");
checkExp(
"sum(sal) over (range between current row and unbounded following)",
"(SUM(`SAL`) OVER (RANGE BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING))");
checkExp(
"sum(sal) over (range between 6 preceding and interval '1:03' hour preceding)",
"(SUM(`SAL`) OVER (RANGE BETWEEN 6 PRECEDING AND INTERVAL '1:03' HOUR PRECEDING))");
checkExp(
"sum(sal) over (range between interval '1' second following and interval '5' day following)",
"(SUM(`SAL`) OVER (RANGE BETWEEN INTERVAL '1' SECOND FOLLOWING AND INTERVAL '5' DAY FOLLOWING))");
}
@Test public void testElementFunc() {
checkExp("element(a)", "ELEMENT(`A`)");
}
@Test public void testCardinalityFunc() {
checkExp("cardinality(a)", "CARDINALITY(`A`)");
}
@Test public void testMemberOf() {
checkExp("a member of b", "(`A` MEMBER OF `B`)");
checkExp(
"a member of multiset[b]",
"(`A` MEMBER OF (MULTISET[`B`]))");
}
@Test public void testSubMultisetrOf() {
checkExp("a submultiset of b", "(`A` SUBMULTISET OF `B`)");
}
@Test public void testIsASet() {
checkExp("b is a set", "(`B` IS A SET)");
checkExp("a is a set", "(`A` IS A SET)");
}
@Test public void testMultiset() {
checkExp("multiset[1]", "(MULTISET[1])");
checkExp("multiset[1,2.3]", "(MULTISET[1, 2.3])");
checkExp("multiset[1, '2']", "(MULTISET[1, '2'])");
checkExp("multiset[ROW(1,2)]", "(MULTISET[(ROW(1, 2))])");
checkExp(
"multiset[ROW(1,2),ROW(3,4)]",
"(MULTISET[(ROW(1, 2)), (ROW(3, 4))])");
checkExp(
"multiset(select*from T)",
"(MULTISET ((SELECT *\n"
+ "FROM `T`)))");
}
@Test public void testMultisetUnion() {
checkExp("a multiset union b", "(`A` MULTISET UNION `B`)");
checkExp("a multiset union all b", "(`A` MULTISET UNION ALL `B`)");
checkExp("a multiset union distinct b", "(`A` MULTISET UNION `B`)");
}
@Test public void testMultisetExcept() {
checkExp("a multiset EXCEPT b", "(`A` MULTISET EXCEPT `B`)");
checkExp("a multiset EXCEPT all b", "(`A` MULTISET EXCEPT ALL `B`)");
checkExp("a multiset EXCEPT distinct b", "(`A` MULTISET EXCEPT `B`)");
}
@Test public void testMultisetIntersect() {
checkExp("a multiset INTERSECT b", "(`A` MULTISET INTERSECT `B`)");
checkExp(
"a multiset INTERSECT all b",
"(`A` MULTISET INTERSECT ALL `B`)");
checkExp(
"a multiset INTERSECT distinct b",
"(`A` MULTISET INTERSECT `B`)");
}
@Test public void testMultisetMixed() {
checkExp(
"multiset[1] MULTISET union b",
"((MULTISET[1]) MULTISET UNION `B`)");
checkExp(
"a MULTISET union b multiset intersect c multiset except d multiset union e",
"(((`A` MULTISET UNION (`B` MULTISET INTERSECT `C`)) MULTISET EXCEPT `D`) MULTISET UNION `E`)");
}
@Test public void testMapItem() {
checkExp("a['foo']", "`A`['foo']");
checkExp("a['x' || 'y']", "`A`[('x' || 'y')]");
checkExp("a['foo'] ['bar']", "`A`['foo']['bar']");
checkExp("a['foo']['bar']", "`A`['foo']['bar']");
}
@Test public void testMapItemPrecedence() {
checkExp("1 + a['foo'] * 3", "(1 + (`A`['foo'] * 3))");
checkExp("1 * a['foo'] + 3", "((1 * `A`['foo']) + 3)");
checkExp("a['foo']['bar']", "`A`['foo']['bar']");
checkExp("a[b['foo' || 'bar']]", "`A`[`B`[('foo' || 'bar')]]");
}
@Test public void testArrayElement() {
checkExp("a[1]", "`A`[1]");
checkExp("a[b[1]]", "`A`[`B`[1]]");
checkExp("a[b[1 + 2] + 3]", "`A`[(`B`[(1 + 2)] + 3)]");
}
@Test public void testArrayValueConstructor() {
checkExp("array[1, 2]", "(ARRAY[1, 2])");
checkExp("array [1, 2]", "(ARRAY[1, 2])"); // with space
// parser allows empty array; validator will reject it
checkExp("array[]", "(ARRAY[])");
checkExp(
"array[(1, 'a'), (2, 'b')]",
"(ARRAY[(ROW(1, 'a')), (ROW(2, 'b'))])");
}
@Test public void testMapValueConstructor() {
checkExp("map[1, 'x', 2, 'y']", "(MAP[1, 'x', 2, 'y'])");
checkExp("map [1, 'x', 2, 'y']", "(MAP[1, 'x', 2, 'y'])");
checkExp("map[]", "(MAP[])");
}
/**
* Runs tests for INTERVAL... YEAR that should pass both parser and
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalYearPositive() {
// default precision
checkExp(
"interval '1' year",
"INTERVAL '1' YEAR");
checkExp(
"interval '99' year",
"INTERVAL '99' YEAR");
// explicit precision equal to default
checkExp(
"interval '1' year(2)",
"INTERVAL '1' YEAR(2)");
checkExp(
"interval '99' year(2)",
"INTERVAL '99' YEAR(2)");
// max precision
checkExp(
"interval '2147483647' year(10)",
"INTERVAL '2147483647' YEAR(10)");
// min precision
checkExp(
"interval '0' year(1)",
"INTERVAL '0' YEAR(1)");
// alternate precision
checkExp(
"interval '1234' year(4)",
"INTERVAL '1234' YEAR(4)");
// sign
checkExp(
"interval '+1' year",
"INTERVAL '+1' YEAR");
checkExp(
"interval '-1' year",
"INTERVAL '-1' YEAR");
checkExp(
"interval +'1' year",
"INTERVAL '1' YEAR");
checkExp(
"interval +'+1' year",
"INTERVAL '+1' YEAR");
checkExp(
"interval +'-1' year",
"INTERVAL '-1' YEAR");
checkExp(
"interval -'1' year",
"INTERVAL -'1' YEAR");
checkExp(
"interval -'+1' year",
"INTERVAL -'+1' YEAR");
checkExp(
"interval -'-1' year",
"INTERVAL -'-1' YEAR");
}
/**
* Runs tests for INTERVAL... YEAR TO MONTH that should pass both parser and
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalYearToMonthPositive() {
// default precision
checkExp(
"interval '1-2' year to month",
"INTERVAL '1-2' YEAR TO MONTH");
checkExp(
"interval '99-11' year to month",
"INTERVAL '99-11' YEAR TO MONTH");
checkExp(
"interval '99-0' year to month",
"INTERVAL '99-0' YEAR TO MONTH");
// explicit precision equal to default
checkExp(
"interval '1-2' year(2) to month",
"INTERVAL '1-2' YEAR(2) TO MONTH");
checkExp(
"interval '99-11' year(2) to month",
"INTERVAL '99-11' YEAR(2) TO MONTH");
checkExp(
"interval '99-0' year(2) to month",
"INTERVAL '99-0' YEAR(2) TO MONTH");
// max precision
checkExp(
"interval '2147483647-11' year(10) to month",
"INTERVAL '2147483647-11' YEAR(10) TO MONTH");
// min precision
checkExp(
"interval '0-0' year(1) to month",
"INTERVAL '0-0' YEAR(1) TO MONTH");
// alternate precision
checkExp(
"interval '2006-2' year(4) to month",
"INTERVAL '2006-2' YEAR(4) TO MONTH");
// sign
checkExp(
"interval '-1-2' year to month",
"INTERVAL '-1-2' YEAR TO MONTH");
checkExp(
"interval '+1-2' year to month",
"INTERVAL '+1-2' YEAR TO MONTH");
checkExp(
"interval +'1-2' year to month",
"INTERVAL '1-2' YEAR TO MONTH");
checkExp(
"interval +'-1-2' year to month",
"INTERVAL '-1-2' YEAR TO MONTH");
checkExp(
"interval +'+1-2' year to month",
"INTERVAL '+1-2' YEAR TO MONTH");
checkExp(
"interval -'1-2' year to month",
"INTERVAL -'1-2' YEAR TO MONTH");
checkExp(
"interval -'-1-2' year to month",
"INTERVAL -'-1-2' YEAR TO MONTH");
checkExp(
"interval -'+1-2' year to month",
"INTERVAL -'+1-2' YEAR TO MONTH");
}
/**
* Runs tests for INTERVAL... MONTH that should pass both parser and
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalMonthPositive() {
// default precision
checkExp(
"interval '1' month",
"INTERVAL '1' MONTH");
checkExp(
"interval '99' month",
"INTERVAL '99' MONTH");
// explicit precision equal to default
checkExp(
"interval '1' month(2)",
"INTERVAL '1' MONTH(2)");
checkExp(
"interval '99' month(2)",
"INTERVAL '99' MONTH(2)");
// max precision
checkExp(
"interval '2147483647' month(10)",
"INTERVAL '2147483647' MONTH(10)");
// min precision
checkExp(
"interval '0' month(1)",
"INTERVAL '0' MONTH(1)");
// alternate precision
checkExp(
"interval '1234' month(4)",
"INTERVAL '1234' MONTH(4)");
// sign
checkExp(
"interval '+1' month",
"INTERVAL '+1' MONTH");
checkExp(
"interval '-1' month",
"INTERVAL '-1' MONTH");
checkExp(
"interval +'1' month",
"INTERVAL '1' MONTH");
checkExp(
"interval +'+1' month",
"INTERVAL '+1' MONTH");
checkExp(
"interval +'-1' month",
"INTERVAL '-1' MONTH");
checkExp(
"interval -'1' month",
"INTERVAL -'1' MONTH");
checkExp(
"interval -'+1' month",
"INTERVAL -'+1' MONTH");
checkExp(
"interval -'-1' month",
"INTERVAL -'-1' MONTH");
}
/**
* Runs tests for INTERVAL... DAY that should pass both parser and
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalDayPositive() {
// default precision
checkExp(
"interval '1' day",
"INTERVAL '1' DAY");
checkExp(
"interval '99' day",
"INTERVAL '99' DAY");
// explicit precision equal to default
checkExp(
"interval '1' day(2)",
"INTERVAL '1' DAY(2)");
checkExp(
"interval '99' day(2)",
"INTERVAL '99' DAY(2)");
// max precision
checkExp(
"interval '2147483647' day(10)",
"INTERVAL '2147483647' DAY(10)");
// min precision
checkExp(
"interval '0' day(1)",
"INTERVAL '0' DAY(1)");
// alternate precision
checkExp(
"interval '1234' day(4)",
"INTERVAL '1234' DAY(4)");
// sign
checkExp(
"interval '+1' day",
"INTERVAL '+1' DAY");
checkExp(
"interval '-1' day",
"INTERVAL '-1' DAY");
checkExp(
"interval +'1' day",
"INTERVAL '1' DAY");
checkExp(
"interval +'+1' day",
"INTERVAL '+1' DAY");
checkExp(
"interval +'-1' day",
"INTERVAL '-1' DAY");
checkExp(
"interval -'1' day",
"INTERVAL -'1' DAY");
checkExp(
"interval -'+1' day",
"INTERVAL -'+1' DAY");
checkExp(
"interval -'-1' day",
"INTERVAL -'-1' DAY");
}
/**
* Runs tests for INTERVAL... DAY TO HOUR that should pass both parser and
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalDayToHourPositive() {
// default precision
checkExp(
"interval '1 2' day to hour",
"INTERVAL '1 2' DAY TO HOUR");
checkExp(
"interval '99 23' day to hour",
"INTERVAL '99 23' DAY TO HOUR");
checkExp(
"interval '99 0' day to hour",
"INTERVAL '99 0' DAY TO HOUR");
// explicit precision equal to default
checkExp(
"interval '1 2' day(2) to hour",
"INTERVAL '1 2' DAY(2) TO HOUR");
checkExp(
"interval '99 23' day(2) to hour",
"INTERVAL '99 23' DAY(2) TO HOUR");
checkExp(
"interval '99 0' day(2) to hour",
"INTERVAL '99 0' DAY(2) TO HOUR");
// max precision
checkExp(
"interval '2147483647 23' day(10) to hour",
"INTERVAL '2147483647 23' DAY(10) TO HOUR");
// min precision
checkExp(
"interval '0 0' day(1) to hour",
"INTERVAL '0 0' DAY(1) TO HOUR");
// alternate precision
checkExp(
"interval '2345 2' day(4) to hour",
"INTERVAL '2345 2' DAY(4) TO HOUR");
// sign
checkExp(
"interval '-1 2' day to hour",
"INTERVAL '-1 2' DAY TO HOUR");
checkExp(
"interval '+1 2' day to hour",
"INTERVAL '+1 2' DAY TO HOUR");
checkExp(
"interval +'1 2' day to hour",
"INTERVAL '1 2' DAY TO HOUR");
checkExp(
"interval +'-1 2' day to hour",
"INTERVAL '-1 2' DAY TO HOUR");
checkExp(
"interval +'+1 2' day to hour",
"INTERVAL '+1 2' DAY TO HOUR");
checkExp(
"interval -'1 2' day to hour",
"INTERVAL -'1 2' DAY TO HOUR");
checkExp(
"interval -'-1 2' day to hour",
"INTERVAL -'-1 2' DAY TO HOUR");
checkExp(
"interval -'+1 2' day to hour",
"INTERVAL -'+1 2' DAY TO HOUR");
}
/**
* Runs tests for INTERVAL... DAY TO MINUTE that should pass both parser and
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalDayToMinutePositive() {
// default precision
checkExp(
"interval '1 2:3' day to minute",
"INTERVAL '1 2:3' DAY TO MINUTE");
checkExp(
"interval '99 23:59' day to minute",
"INTERVAL '99 23:59' DAY TO MINUTE");
checkExp(
"interval '99 0:0' day to minute",
"INTERVAL '99 0:0' DAY TO MINUTE");
// explicit precision equal to default
checkExp(
"interval '1 2:3' day(2) to minute",
"INTERVAL '1 2:3' DAY(2) TO MINUTE");
checkExp(
"interval '99 23:59' day(2) to minute",
"INTERVAL '99 23:59' DAY(2) TO MINUTE");
checkExp(
"interval '99 0:0' day(2) to minute",
"INTERVAL '99 0:0' DAY(2) TO MINUTE");
// max precision
checkExp(
"interval '2147483647 23:59' day(10) to minute",
"INTERVAL '2147483647 23:59' DAY(10) TO MINUTE");
// min precision
checkExp(
"interval '0 0:0' day(1) to minute",
"INTERVAL '0 0:0' DAY(1) TO MINUTE");
// alternate precision
checkExp(
"interval '2345 6:7' day(4) to minute",
"INTERVAL '2345 6:7' DAY(4) TO MINUTE");
// sign
checkExp(
"interval '-1 2:3' day to minute",
"INTERVAL '-1 2:3' DAY TO MINUTE");
checkExp(
"interval '+1 2:3' day to minute",
"INTERVAL '+1 2:3' DAY TO MINUTE");
checkExp(
"interval +'1 2:3' day to minute",
"INTERVAL '1 2:3' DAY TO MINUTE");
checkExp(
"interval +'-1 2:3' day to minute",
"INTERVAL '-1 2:3' DAY TO MINUTE");
checkExp(
"interval +'+1 2:3' day to minute",
"INTERVAL '+1 2:3' DAY TO MINUTE");
checkExp(
"interval -'1 2:3' day to minute",
"INTERVAL -'1 2:3' DAY TO MINUTE");
checkExp(
"interval -'-1 2:3' day to minute",
"INTERVAL -'-1 2:3' DAY TO MINUTE");
checkExp(
"interval -'+1 2:3' day to minute",
"INTERVAL -'+1 2:3' DAY TO MINUTE");
}
/**
* Runs tests for INTERVAL... DAY TO SECOND that should pass both parser and
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalDayToSecondPositive() {
// default precision
checkExp(
"interval '1 2:3:4' day to second",
"INTERVAL '1 2:3:4' DAY TO SECOND");
checkExp(
"interval '99 23:59:59' day to second",
"INTERVAL '99 23:59:59' DAY TO SECOND");
checkExp(
"interval '99 0:0:0' day to second",
"INTERVAL '99 0:0:0' DAY TO SECOND");
checkExp(
"interval '99 23:59:59.999999' day to second",
"INTERVAL '99 23:59:59.999999' DAY TO SECOND");
checkExp(
"interval '99 0:0:0.0' day to second",
"INTERVAL '99 0:0:0.0' DAY TO SECOND");
// explicit precision equal to default
checkExp(
"interval '1 2:3:4' day(2) to second",
"INTERVAL '1 2:3:4' DAY(2) TO SECOND");
checkExp(
"interval '99 23:59:59' day(2) to second",
"INTERVAL '99 23:59:59' DAY(2) TO SECOND");
checkExp(
"interval '99 0:0:0' day(2) to second",
"INTERVAL '99 0:0:0' DAY(2) TO SECOND");
checkExp(
"interval '99 23:59:59.999999' day to second(6)",
"INTERVAL '99 23:59:59.999999' DAY TO SECOND(6)");
checkExp(
"interval '99 0:0:0.0' day to second(6)",
"INTERVAL '99 0:0:0.0' DAY TO SECOND(6)");
// max precision
checkExp(
"interval '2147483647 23:59:59' day(10) to second",
"INTERVAL '2147483647 23:59:59' DAY(10) TO SECOND");
checkExp(
"interval '2147483647 23:59:59.999999999' day(10) to second(9)",
"INTERVAL '2147483647 23:59:59.999999999' DAY(10) TO SECOND(9)");
// min precision
checkExp(
"interval '0 0:0:0' day(1) to second",
"INTERVAL '0 0:0:0' DAY(1) TO SECOND");
checkExp(
"interval '0 0:0:0.0' day(1) to second(1)",
"INTERVAL '0 0:0:0.0' DAY(1) TO SECOND(1)");
// alternate precision
checkExp(
"interval '2345 6:7:8' day(4) to second",
"INTERVAL '2345 6:7:8' DAY(4) TO SECOND");
checkExp(
"interval '2345 6:7:8.9012' day(4) to second(4)",
"INTERVAL '2345 6:7:8.9012' DAY(4) TO SECOND(4)");
// sign
checkExp(
"interval '-1 2:3:4' day to second",
"INTERVAL '-1 2:3:4' DAY TO SECOND");
checkExp(
"interval '+1 2:3:4' day to second",
"INTERVAL '+1 2:3:4' DAY TO SECOND");
checkExp(
"interval +'1 2:3:4' day to second",
"INTERVAL '1 2:3:4' DAY TO SECOND");
checkExp(
"interval +'-1 2:3:4' day to second",
"INTERVAL '-1 2:3:4' DAY TO SECOND");
checkExp(
"interval +'+1 2:3:4' day to second",
"INTERVAL '+1 2:3:4' DAY TO SECOND");
checkExp(
"interval -'1 2:3:4' day to second",
"INTERVAL -'1 2:3:4' DAY TO SECOND");
checkExp(
"interval -'-1 2:3:4' day to second",
"INTERVAL -'-1 2:3:4' DAY TO SECOND");
checkExp(
"interval -'+1 2:3:4' day to second",
"INTERVAL -'+1 2:3:4' DAY TO SECOND");
}
/**
* Runs tests for INTERVAL... HOUR that should pass both parser and
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalHourPositive() {
// default precision
checkExp(
"interval '1' hour",
"INTERVAL '1' HOUR");
checkExp(
"interval '99' hour",
"INTERVAL '99' HOUR");
// explicit precision equal to default
checkExp(
"interval '1' hour(2)",
"INTERVAL '1' HOUR(2)");
checkExp(
"interval '99' hour(2)",
"INTERVAL '99' HOUR(2)");
// max precision
checkExp(
"interval '2147483647' hour(10)",
"INTERVAL '2147483647' HOUR(10)");
// min precision
checkExp(
"interval '0' hour(1)",
"INTERVAL '0' HOUR(1)");
// alternate precision
checkExp(
"interval '1234' hour(4)",
"INTERVAL '1234' HOUR(4)");
// sign
checkExp(
"interval '+1' hour",
"INTERVAL '+1' HOUR");
checkExp(
"interval '-1' hour",
"INTERVAL '-1' HOUR");
checkExp(
"interval +'1' hour",
"INTERVAL '1' HOUR");
checkExp(
"interval +'+1' hour",
"INTERVAL '+1' HOUR");
checkExp(
"interval +'-1' hour",
"INTERVAL '-1' HOUR");
checkExp(
"interval -'1' hour",
"INTERVAL -'1' HOUR");
checkExp(
"interval -'+1' hour",
"INTERVAL -'+1' HOUR");
checkExp(
"interval -'-1' hour",
"INTERVAL -'-1' HOUR");
}
/**
* Runs tests for INTERVAL... HOUR TO MINUTE that should pass both parser
* and validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalHourToMinutePositive() {
// default precision
checkExp(
"interval '2:3' hour to minute",
"INTERVAL '2:3' HOUR TO MINUTE");
checkExp(
"interval '23:59' hour to minute",
"INTERVAL '23:59' HOUR TO MINUTE");
checkExp(
"interval '99:0' hour to minute",
"INTERVAL '99:0' HOUR TO MINUTE");
// explicit precision equal to default
checkExp(
"interval '2:3' hour(2) to minute",
"INTERVAL '2:3' HOUR(2) TO MINUTE");
checkExp(
"interval '23:59' hour(2) to minute",
"INTERVAL '23:59' HOUR(2) TO MINUTE");
checkExp(
"interval '99:0' hour(2) to minute",
"INTERVAL '99:0' HOUR(2) TO MINUTE");
// max precision
checkExp(
"interval '2147483647:59' hour(10) to minute",
"INTERVAL '2147483647:59' HOUR(10) TO MINUTE");
// min precision
checkExp(
"interval '0:0' hour(1) to minute",
"INTERVAL '0:0' HOUR(1) TO MINUTE");
// alternate precision
checkExp(
"interval '2345:7' hour(4) to minute",
"INTERVAL '2345:7' HOUR(4) TO MINUTE");
// sign
checkExp(
"interval '-1:3' hour to minute",
"INTERVAL '-1:3' HOUR TO MINUTE");
checkExp(
"interval '+1:3' hour to minute",
"INTERVAL '+1:3' HOUR TO MINUTE");
checkExp(
"interval +'2:3' hour to minute",
"INTERVAL '2:3' HOUR TO MINUTE");
checkExp(
"interval +'-2:3' hour to minute",
"INTERVAL '-2:3' HOUR TO MINUTE");
checkExp(
"interval +'+2:3' hour to minute",
"INTERVAL '+2:3' HOUR TO MINUTE");
checkExp(
"interval -'2:3' hour to minute",
"INTERVAL -'2:3' HOUR TO MINUTE");
checkExp(
"interval -'-2:3' hour to minute",
"INTERVAL -'-2:3' HOUR TO MINUTE");
checkExp(
"interval -'+2:3' hour to minute",
"INTERVAL -'+2:3' HOUR TO MINUTE");
}
/**
* Runs tests for INTERVAL... HOUR TO SECOND that should pass both parser
* and validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalHourToSecondPositive() {
// default precision
checkExp(
"interval '2:3:4' hour to second",
"INTERVAL '2:3:4' HOUR TO SECOND");
checkExp(
"interval '23:59:59' hour to second",
"INTERVAL '23:59:59' HOUR TO SECOND");
checkExp(
"interval '99:0:0' hour to second",
"INTERVAL '99:0:0' HOUR TO SECOND");
checkExp(
"interval '23:59:59.999999' hour to second",
"INTERVAL '23:59:59.999999' HOUR TO SECOND");
checkExp(
"interval '99:0:0.0' hour to second",
"INTERVAL '99:0:0.0' HOUR TO SECOND");
// explicit precision equal to default
checkExp(
"interval '2:3:4' hour(2) to second",
"INTERVAL '2:3:4' HOUR(2) TO SECOND");
checkExp(
"interval '99:59:59' hour(2) to second",
"INTERVAL '99:59:59' HOUR(2) TO SECOND");
checkExp(
"interval '99:0:0' hour(2) to second",
"INTERVAL '99:0:0' HOUR(2) TO SECOND");
checkExp(
"interval '23:59:59.999999' hour to second(6)",
"INTERVAL '23:59:59.999999' HOUR TO SECOND(6)");
checkExp(
"interval '99:0:0.0' hour to second(6)",
"INTERVAL '99:0:0.0' HOUR TO SECOND(6)");
// max precision
checkExp(
"interval '2147483647:59:59' hour(10) to second",
"INTERVAL '2147483647:59:59' HOUR(10) TO SECOND");
checkExp(
"interval '2147483647:59:59.999999999' hour(10) to second(9)",
"INTERVAL '2147483647:59:59.999999999' HOUR(10) TO SECOND(9)");
// min precision
checkExp(
"interval '0:0:0' hour(1) to second",
"INTERVAL '0:0:0' HOUR(1) TO SECOND");
checkExp(
"interval '0:0:0.0' hour(1) to second(1)",
"INTERVAL '0:0:0.0' HOUR(1) TO SECOND(1)");
// alternate precision
checkExp(
"interval '2345:7:8' hour(4) to second",
"INTERVAL '2345:7:8' HOUR(4) TO SECOND");
checkExp(
"interval '2345:7:8.9012' hour(4) to second(4)",
"INTERVAL '2345:7:8.9012' HOUR(4) TO SECOND(4)");
// sign
checkExp(
"interval '-2:3:4' hour to second",
"INTERVAL '-2:3:4' HOUR TO SECOND");
checkExp(
"interval '+2:3:4' hour to second",
"INTERVAL '+2:3:4' HOUR TO SECOND");
checkExp(
"interval +'2:3:4' hour to second",
"INTERVAL '2:3:4' HOUR TO SECOND");
checkExp(
"interval +'-2:3:4' hour to second",
"INTERVAL '-2:3:4' HOUR TO SECOND");
checkExp(
"interval +'+2:3:4' hour to second",
"INTERVAL '+2:3:4' HOUR TO SECOND");
checkExp(
"interval -'2:3:4' hour to second",
"INTERVAL -'2:3:4' HOUR TO SECOND");
checkExp(
"interval -'-2:3:4' hour to second",
"INTERVAL -'-2:3:4' HOUR TO SECOND");
checkExp(
"interval -'+2:3:4' hour to second",
"INTERVAL -'+2:3:4' HOUR TO SECOND");
}
/**
* Runs tests for INTERVAL... MINUTE that should pass both parser and
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalMinutePositive() {
// default precision
checkExp(
"interval '1' minute",
"INTERVAL '1' MINUTE");
checkExp(
"interval '99' minute",
"INTERVAL '99' MINUTE");
// explicit precision equal to default
checkExp(
"interval '1' minute(2)",
"INTERVAL '1' MINUTE(2)");
checkExp(
"interval '99' minute(2)",
"INTERVAL '99' MINUTE(2)");
// max precision
checkExp(
"interval '2147483647' minute(10)",
"INTERVAL '2147483647' MINUTE(10)");
// min precision
checkExp(
"interval '0' minute(1)",
"INTERVAL '0' MINUTE(1)");
// alternate precision
checkExp(
"interval '1234' minute(4)",
"INTERVAL '1234' MINUTE(4)");
// sign
checkExp(
"interval '+1' minute",
"INTERVAL '+1' MINUTE");
checkExp(
"interval '-1' minute",
"INTERVAL '-1' MINUTE");
checkExp(
"interval +'1' minute",
"INTERVAL '1' MINUTE");
checkExp(
"interval +'+1' minute",
"INTERVAL '+1' MINUTE");
checkExp(
"interval +'+1' minute",
"INTERVAL '+1' MINUTE");
checkExp(
"interval -'1' minute",
"INTERVAL -'1' MINUTE");
checkExp(
"interval -'+1' minute",
"INTERVAL -'+1' MINUTE");
checkExp(
"interval -'-1' minute",
"INTERVAL -'-1' MINUTE");
}
/**
* Runs tests for INTERVAL... MINUTE TO SECOND that should pass both parser
* and validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalMinuteToSecondPositive() {
// default precision
checkExp(
"interval '2:4' minute to second",
"INTERVAL '2:4' MINUTE TO SECOND");
checkExp(
"interval '59:59' minute to second",
"INTERVAL '59:59' MINUTE TO SECOND");
checkExp(
"interval '99:0' minute to second",
"INTERVAL '99:0' MINUTE TO SECOND");
checkExp(
"interval '59:59.999999' minute to second",
"INTERVAL '59:59.999999' MINUTE TO SECOND");
checkExp(
"interval '99:0.0' minute to second",
"INTERVAL '99:0.0' MINUTE TO SECOND");
// explicit precision equal to default
checkExp(
"interval '2:4' minute(2) to second",
"INTERVAL '2:4' MINUTE(2) TO SECOND");
checkExp(
"interval '59:59' minute(2) to second",
"INTERVAL '59:59' MINUTE(2) TO SECOND");
checkExp(
"interval '99:0' minute(2) to second",
"INTERVAL '99:0' MINUTE(2) TO SECOND");
checkExp(
"interval '99:59.999999' minute to second(6)",
"INTERVAL '99:59.999999' MINUTE TO SECOND(6)");
checkExp(
"interval '99:0.0' minute to second(6)",
"INTERVAL '99:0.0' MINUTE TO SECOND(6)");
// max precision
checkExp(
"interval '2147483647:59' minute(10) to second",
"INTERVAL '2147483647:59' MINUTE(10) TO SECOND");
checkExp(
"interval '2147483647:59.999999999' minute(10) to second(9)",
"INTERVAL '2147483647:59.999999999' MINUTE(10) TO SECOND(9)");
// min precision
checkExp(
"interval '0:0' minute(1) to second",
"INTERVAL '0:0' MINUTE(1) TO SECOND");
checkExp(
"interval '0:0.0' minute(1) to second(1)",
"INTERVAL '0:0.0' MINUTE(1) TO SECOND(1)");
// alternate precision
checkExp(
"interval '2345:8' minute(4) to second",
"INTERVAL '2345:8' MINUTE(4) TO SECOND");
checkExp(
"interval '2345:7.8901' minute(4) to second(4)",
"INTERVAL '2345:7.8901' MINUTE(4) TO SECOND(4)");
// sign
checkExp(
"interval '-3:4' minute to second",
"INTERVAL '-3:4' MINUTE TO SECOND");
checkExp(
"interval '+3:4' minute to second",
"INTERVAL '+3:4' MINUTE TO SECOND");
checkExp(
"interval +'3:4' minute to second",
"INTERVAL '3:4' MINUTE TO SECOND");
checkExp(
"interval +'-3:4' minute to second",
"INTERVAL '-3:4' MINUTE TO SECOND");
checkExp(
"interval +'+3:4' minute to second",
"INTERVAL '+3:4' MINUTE TO SECOND");
checkExp(
"interval -'3:4' minute to second",
"INTERVAL -'3:4' MINUTE TO SECOND");
checkExp(
"interval -'-3:4' minute to second",
"INTERVAL -'-3:4' MINUTE TO SECOND");
checkExp(
"interval -'+3:4' minute to second",
"INTERVAL -'+3:4' MINUTE TO SECOND");
}
/**
* Runs tests for INTERVAL... SECOND that should pass both parser and
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXPositive() tests.
*/
public void subTestIntervalSecondPositive() {
// default precision
checkExp(
"interval '1' second",
"INTERVAL '1' SECOND");
checkExp(
"interval '99' second",
"INTERVAL '99' SECOND");
// explicit precision equal to default
checkExp(
"interval '1' second(2)",
"INTERVAL '1' SECOND(2)");
checkExp(
"interval '99' second(2)",
"INTERVAL '99' SECOND(2)");
checkExp(
"interval '1' second(2,6)",
"INTERVAL '1' SECOND(2, 6)");
checkExp(
"interval '99' second(2,6)",
"INTERVAL '99' SECOND(2, 6)");
// max precision
checkExp(
"interval '2147483647' second(10)",
"INTERVAL '2147483647' SECOND(10)");
checkExp(
"interval '2147483647.999999999' second(9,9)",
"INTERVAL '2147483647.999999999' SECOND(9, 9)");
// min precision
checkExp(
"interval '0' second(1)",
"INTERVAL '0' SECOND(1)");
checkExp(
"interval '0.0' second(1,1)",
"INTERVAL '0.0' SECOND(1, 1)");
// alternate precision
checkExp(
"interval '1234' second(4)",
"INTERVAL '1234' SECOND(4)");
checkExp(
"interval '1234.56789' second(4,5)",
"INTERVAL '1234.56789' SECOND(4, 5)");
// sign
checkExp(
"interval '+1' second",
"INTERVAL '+1' SECOND");
checkExp(
"interval '-1' second",
"INTERVAL '-1' SECOND");
checkExp(
"interval +'1' second",
"INTERVAL '1' SECOND");
checkExp(
"interval +'+1' second",
"INTERVAL '+1' SECOND");
checkExp(
"interval +'-1' second",
"INTERVAL '-1' SECOND");
checkExp(
"interval -'1' second",
"INTERVAL -'1' SECOND");
checkExp(
"interval -'+1' second",
"INTERVAL -'+1' SECOND");
checkExp(
"interval -'-1' second",
"INTERVAL -'-1' SECOND");
}
/**
* Runs tests for INTERVAL... YEAR that should pass parser but fail
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalYearFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL '-' YEAR",
"INTERVAL '-' YEAR");
checkExp(
"INTERVAL '1-2' YEAR",
"INTERVAL '1-2' YEAR");
checkExp(
"INTERVAL '1.2' YEAR",
"INTERVAL '1.2' YEAR");
checkExp(
"INTERVAL '1 2' YEAR",
"INTERVAL '1 2' YEAR");
checkExp(
"INTERVAL '1-2' YEAR(2)",
"INTERVAL '1-2' YEAR(2)");
checkExp(
"INTERVAL 'bogus text' YEAR",
"INTERVAL 'bogus text' YEAR");
// negative field values
checkExp(
"INTERVAL '--1' YEAR",
"INTERVAL '--1' YEAR");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
checkExp(
"INTERVAL '100' YEAR",
"INTERVAL '100' YEAR");
checkExp(
"INTERVAL '100' YEAR(2)",
"INTERVAL '100' YEAR(2)");
checkExp(
"INTERVAL '1000' YEAR(3)",
"INTERVAL '1000' YEAR(3)");
checkExp(
"INTERVAL '-1000' YEAR(3)",
"INTERVAL '-1000' YEAR(3)");
checkExp(
"INTERVAL '2147483648' YEAR(10)",
"INTERVAL '2147483648' YEAR(10)");
checkExp(
"INTERVAL '-2147483648' YEAR(10)",
"INTERVAL '-2147483648' YEAR(10)");
// precision > maximum
checkExp(
"INTERVAL '1' YEAR(11)",
"INTERVAL '1' YEAR(11)");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0' YEAR(0)",
"INTERVAL '0' YEAR(0)");
}
/**
* Runs tests for INTERVAL... YEAR TO MONTH that should pass parser but fail
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalYearToMonthFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL '-' YEAR TO MONTH",
"INTERVAL '-' YEAR TO MONTH");
checkExp(
"INTERVAL '1' YEAR TO MONTH",
"INTERVAL '1' YEAR TO MONTH");
checkExp(
"INTERVAL '1:2' YEAR TO MONTH",
"INTERVAL '1:2' YEAR TO MONTH");
checkExp(
"INTERVAL '1.2' YEAR TO MONTH",
"INTERVAL '1.2' YEAR TO MONTH");
checkExp(
"INTERVAL '1 2' YEAR TO MONTH",
"INTERVAL '1 2' YEAR TO MONTH");
checkExp(
"INTERVAL '1:2' YEAR(2) TO MONTH",
"INTERVAL '1:2' YEAR(2) TO MONTH");
checkExp(
"INTERVAL 'bogus text' YEAR TO MONTH",
"INTERVAL 'bogus text' YEAR TO MONTH");
// negative field values
checkExp(
"INTERVAL '--1-2' YEAR TO MONTH",
"INTERVAL '--1-2' YEAR TO MONTH");
checkExp(
"INTERVAL '1--2' YEAR TO MONTH",
"INTERVAL '1--2' YEAR TO MONTH");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
// plus >max value for mid/end fields
checkExp(
"INTERVAL '100-0' YEAR TO MONTH",
"INTERVAL '100-0' YEAR TO MONTH");
checkExp(
"INTERVAL '100-0' YEAR(2) TO MONTH",
"INTERVAL '100-0' YEAR(2) TO MONTH");
checkExp(
"INTERVAL '1000-0' YEAR(3) TO MONTH",
"INTERVAL '1000-0' YEAR(3) TO MONTH");
checkExp(
"INTERVAL '-1000-0' YEAR(3) TO MONTH",
"INTERVAL '-1000-0' YEAR(3) TO MONTH");
checkExp(
"INTERVAL '2147483648-0' YEAR(10) TO MONTH",
"INTERVAL '2147483648-0' YEAR(10) TO MONTH");
checkExp(
"INTERVAL '-2147483648-0' YEAR(10) TO MONTH",
"INTERVAL '-2147483648-0' YEAR(10) TO MONTH");
checkExp(
"INTERVAL '1-12' YEAR TO MONTH",
"INTERVAL '1-12' YEAR TO MONTH");
// precision > maximum
checkExp(
"INTERVAL '1-1' YEAR(11) TO MONTH",
"INTERVAL '1-1' YEAR(11) TO MONTH");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0-0' YEAR(0) TO MONTH",
"INTERVAL '0-0' YEAR(0) TO MONTH");
}
/**
* Runs tests for INTERVAL... MONTH that should pass parser but fail
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalMonthFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL '-' MONTH",
"INTERVAL '-' MONTH");
checkExp(
"INTERVAL '1-2' MONTH",
"INTERVAL '1-2' MONTH");
checkExp(
"INTERVAL '1.2' MONTH",
"INTERVAL '1.2' MONTH");
checkExp(
"INTERVAL '1 2' MONTH",
"INTERVAL '1 2' MONTH");
checkExp(
"INTERVAL '1-2' MONTH(2)",
"INTERVAL '1-2' MONTH(2)");
checkExp(
"INTERVAL 'bogus text' MONTH",
"INTERVAL 'bogus text' MONTH");
// negative field values
checkExp(
"INTERVAL '--1' MONTH",
"INTERVAL '--1' MONTH");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
checkExp(
"INTERVAL '100' MONTH",
"INTERVAL '100' MONTH");
checkExp(
"INTERVAL '100' MONTH(2)",
"INTERVAL '100' MONTH(2)");
checkExp(
"INTERVAL '1000' MONTH(3)",
"INTERVAL '1000' MONTH(3)");
checkExp(
"INTERVAL '-1000' MONTH(3)",
"INTERVAL '-1000' MONTH(3)");
checkExp(
"INTERVAL '2147483648' MONTH(10)",
"INTERVAL '2147483648' MONTH(10)");
checkExp(
"INTERVAL '-2147483648' MONTH(10)",
"INTERVAL '-2147483648' MONTH(10)");
// precision > maximum
checkExp(
"INTERVAL '1' MONTH(11)",
"INTERVAL '1' MONTH(11)");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0' MONTH(0)",
"INTERVAL '0' MONTH(0)");
}
/**
* Runs tests for INTERVAL... DAY that should pass parser but fail
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalDayFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL '-' DAY",
"INTERVAL '-' DAY");
checkExp(
"INTERVAL '1-2' DAY",
"INTERVAL '1-2' DAY");
checkExp(
"INTERVAL '1.2' DAY",
"INTERVAL '1.2' DAY");
checkExp(
"INTERVAL '1 2' DAY",
"INTERVAL '1 2' DAY");
checkExp(
"INTERVAL '1:2' DAY",
"INTERVAL '1:2' DAY");
checkExp(
"INTERVAL '1-2' DAY(2)",
"INTERVAL '1-2' DAY(2)");
checkExp(
"INTERVAL 'bogus text' DAY",
"INTERVAL 'bogus text' DAY");
// negative field values
checkExp(
"INTERVAL '--1' DAY",
"INTERVAL '--1' DAY");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
checkExp(
"INTERVAL '100' DAY",
"INTERVAL '100' DAY");
checkExp(
"INTERVAL '100' DAY(2)",
"INTERVAL '100' DAY(2)");
checkExp(
"INTERVAL '1000' DAY(3)",
"INTERVAL '1000' DAY(3)");
checkExp(
"INTERVAL '-1000' DAY(3)",
"INTERVAL '-1000' DAY(3)");
checkExp(
"INTERVAL '2147483648' DAY(10)",
"INTERVAL '2147483648' DAY(10)");
checkExp(
"INTERVAL '-2147483648' DAY(10)",
"INTERVAL '-2147483648' DAY(10)");
// precision > maximum
checkExp(
"INTERVAL '1' DAY(11)",
"INTERVAL '1' DAY(11)");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0' DAY(0)",
"INTERVAL '0' DAY(0)");
}
/**
* Runs tests for INTERVAL... DAY TO HOUR that should pass parser but fail
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalDayToHourFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL '-' DAY TO HOUR",
"INTERVAL '-' DAY TO HOUR");
checkExp(
"INTERVAL '1' DAY TO HOUR",
"INTERVAL '1' DAY TO HOUR");
checkExp(
"INTERVAL '1:2' DAY TO HOUR",
"INTERVAL '1:2' DAY TO HOUR");
checkExp(
"INTERVAL '1.2' DAY TO HOUR",
"INTERVAL '1.2' DAY TO HOUR");
checkExp(
"INTERVAL '1 x' DAY TO HOUR",
"INTERVAL '1 x' DAY TO HOUR");
checkExp(
"INTERVAL ' ' DAY TO HOUR",
"INTERVAL ' ' DAY TO HOUR");
checkExp(
"INTERVAL '1:2' DAY(2) TO HOUR",
"INTERVAL '1:2' DAY(2) TO HOUR");
checkExp(
"INTERVAL 'bogus text' DAY TO HOUR",
"INTERVAL 'bogus text' DAY TO HOUR");
// negative field values
checkExp(
"INTERVAL '--1 1' DAY TO HOUR",
"INTERVAL '--1 1' DAY TO HOUR");
checkExp(
"INTERVAL '1 -1' DAY TO HOUR",
"INTERVAL '1 -1' DAY TO HOUR");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
// plus >max value for mid/end fields
checkExp(
"INTERVAL '100 0' DAY TO HOUR",
"INTERVAL '100 0' DAY TO HOUR");
checkExp(
"INTERVAL '100 0' DAY(2) TO HOUR",
"INTERVAL '100 0' DAY(2) TO HOUR");
checkExp(
"INTERVAL '1000 0' DAY(3) TO HOUR",
"INTERVAL '1000 0' DAY(3) TO HOUR");
checkExp(
"INTERVAL '-1000 0' DAY(3) TO HOUR",
"INTERVAL '-1000 0' DAY(3) TO HOUR");
checkExp(
"INTERVAL '2147483648 0' DAY(10) TO HOUR",
"INTERVAL '2147483648 0' DAY(10) TO HOUR");
checkExp(
"INTERVAL '-2147483648 0' DAY(10) TO HOUR",
"INTERVAL '-2147483648 0' DAY(10) TO HOUR");
checkExp(
"INTERVAL '1 24' DAY TO HOUR",
"INTERVAL '1 24' DAY TO HOUR");
// precision > maximum
checkExp(
"INTERVAL '1 1' DAY(11) TO HOUR",
"INTERVAL '1 1' DAY(11) TO HOUR");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0 0' DAY(0) TO HOUR",
"INTERVAL '0 0' DAY(0) TO HOUR");
}
/**
* Runs tests for INTERVAL... DAY TO MINUTE that should pass parser but fail
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalDayToMinuteFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL ' :' DAY TO MINUTE",
"INTERVAL ' :' DAY TO MINUTE");
checkExp(
"INTERVAL '1' DAY TO MINUTE",
"INTERVAL '1' DAY TO MINUTE");
checkExp(
"INTERVAL '1 2' DAY TO MINUTE",
"INTERVAL '1 2' DAY TO MINUTE");
checkExp(
"INTERVAL '1:2' DAY TO MINUTE",
"INTERVAL '1:2' DAY TO MINUTE");
checkExp(
"INTERVAL '1.2' DAY TO MINUTE",
"INTERVAL '1.2' DAY TO MINUTE");
checkExp(
"INTERVAL 'x 1:1' DAY TO MINUTE",
"INTERVAL 'x 1:1' DAY TO MINUTE");
checkExp(
"INTERVAL '1 x:1' DAY TO MINUTE",
"INTERVAL '1 x:1' DAY TO MINUTE");
checkExp(
"INTERVAL '1 1:x' DAY TO MINUTE",
"INTERVAL '1 1:x' DAY TO MINUTE");
checkExp(
"INTERVAL '1 1:2:3' DAY TO MINUTE",
"INTERVAL '1 1:2:3' DAY TO MINUTE");
checkExp(
"INTERVAL '1 1:1:1.2' DAY TO MINUTE",
"INTERVAL '1 1:1:1.2' DAY TO MINUTE");
checkExp(
"INTERVAL '1 1:2:3' DAY(2) TO MINUTE",
"INTERVAL '1 1:2:3' DAY(2) TO MINUTE");
checkExp(
"INTERVAL '1 1' DAY(2) TO MINUTE",
"INTERVAL '1 1' DAY(2) TO MINUTE");
checkExp(
"INTERVAL 'bogus text' DAY TO MINUTE",
"INTERVAL 'bogus text' DAY TO MINUTE");
// negative field values
checkExp(
"INTERVAL '--1 1:1' DAY TO MINUTE",
"INTERVAL '--1 1:1' DAY TO MINUTE");
checkExp(
"INTERVAL '1 -1:1' DAY TO MINUTE",
"INTERVAL '1 -1:1' DAY TO MINUTE");
checkExp(
"INTERVAL '1 1:-1' DAY TO MINUTE",
"INTERVAL '1 1:-1' DAY TO MINUTE");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
// plus >max value for mid/end fields
checkExp(
"INTERVAL '100 0' DAY TO MINUTE",
"INTERVAL '100 0' DAY TO MINUTE");
checkExp(
"INTERVAL '100 0' DAY(2) TO MINUTE",
"INTERVAL '100 0' DAY(2) TO MINUTE");
checkExp(
"INTERVAL '1000 0' DAY(3) TO MINUTE",
"INTERVAL '1000 0' DAY(3) TO MINUTE");
checkExp(
"INTERVAL '-1000 0' DAY(3) TO MINUTE",
"INTERVAL '-1000 0' DAY(3) TO MINUTE");
checkExp(
"INTERVAL '2147483648 0' DAY(10) TO MINUTE",
"INTERVAL '2147483648 0' DAY(10) TO MINUTE");
checkExp(
"INTERVAL '-2147483648 0' DAY(10) TO MINUTE",
"INTERVAL '-2147483648 0' DAY(10) TO MINUTE");
checkExp(
"INTERVAL '1 24:1' DAY TO MINUTE",
"INTERVAL '1 24:1' DAY TO MINUTE");
checkExp(
"INTERVAL '1 1:60' DAY TO MINUTE",
"INTERVAL '1 1:60' DAY TO MINUTE");
// precision > maximum
checkExp(
"INTERVAL '1 1' DAY(11) TO MINUTE",
"INTERVAL '1 1' DAY(11) TO MINUTE");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0 0' DAY(0) TO MINUTE",
"INTERVAL '0 0' DAY(0) TO MINUTE");
}
/**
* Runs tests for INTERVAL... DAY TO SECOND that should pass parser but fail
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalDayToSecondFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL ' ::' DAY TO SECOND",
"INTERVAL ' ::' DAY TO SECOND");
checkExp(
"INTERVAL ' ::.' DAY TO SECOND",
"INTERVAL ' ::.' DAY TO SECOND");
checkExp(
"INTERVAL '1' DAY TO SECOND",
"INTERVAL '1' DAY TO SECOND");
checkExp(
"INTERVAL '1 2' DAY TO SECOND",
"INTERVAL '1 2' DAY TO SECOND");
checkExp(
"INTERVAL '1:2' DAY TO SECOND",
"INTERVAL '1:2' DAY TO SECOND");
checkExp(
"INTERVAL '1.2' DAY TO SECOND",
"INTERVAL '1.2' DAY TO SECOND");
checkExp(
"INTERVAL '1 1:2' DAY TO SECOND",
"INTERVAL '1 1:2' DAY TO SECOND");
checkExp(
"INTERVAL '1 1:2:x' DAY TO SECOND",
"INTERVAL '1 1:2:x' DAY TO SECOND");
checkExp(
"INTERVAL '1:2:3' DAY TO SECOND",
"INTERVAL '1:2:3' DAY TO SECOND");
checkExp(
"INTERVAL '1:1:1.2' DAY TO SECOND",
"INTERVAL '1:1:1.2' DAY TO SECOND");
checkExp(
"INTERVAL '1 1:2' DAY(2) TO SECOND",
"INTERVAL '1 1:2' DAY(2) TO SECOND");
checkExp(
"INTERVAL '1 1' DAY(2) TO SECOND",
"INTERVAL '1 1' DAY(2) TO SECOND");
checkExp(
"INTERVAL 'bogus text' DAY TO SECOND",
"INTERVAL 'bogus text' DAY TO SECOND");
checkExp(
"INTERVAL '2345 6:7:8901' DAY TO SECOND(4)",
"INTERVAL '2345 6:7:8901' DAY TO SECOND(4)");
// negative field values
checkExp(
"INTERVAL '--1 1:1:1' DAY TO SECOND",
"INTERVAL '--1 1:1:1' DAY TO SECOND");
checkExp(
"INTERVAL '1 -1:1:1' DAY TO SECOND",
"INTERVAL '1 -1:1:1' DAY TO SECOND");
checkExp(
"INTERVAL '1 1:-1:1' DAY TO SECOND",
"INTERVAL '1 1:-1:1' DAY TO SECOND");
checkExp(
"INTERVAL '1 1:1:-1' DAY TO SECOND",
"INTERVAL '1 1:1:-1' DAY TO SECOND");
checkExp(
"INTERVAL '1 1:1:1.-1' DAY TO SECOND",
"INTERVAL '1 1:1:1.-1' DAY TO SECOND");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
// plus >max value for mid/end fields
checkExp(
"INTERVAL '100 0' DAY TO SECOND",
"INTERVAL '100 0' DAY TO SECOND");
checkExp(
"INTERVAL '100 0' DAY(2) TO SECOND",
"INTERVAL '100 0' DAY(2) TO SECOND");
checkExp(
"INTERVAL '1000 0' DAY(3) TO SECOND",
"INTERVAL '1000 0' DAY(3) TO SECOND");
checkExp(
"INTERVAL '-1000 0' DAY(3) TO SECOND",
"INTERVAL '-1000 0' DAY(3) TO SECOND");
checkExp(
"INTERVAL '2147483648 0' DAY(10) TO SECOND",
"INTERVAL '2147483648 0' DAY(10) TO SECOND");
checkExp(
"INTERVAL '-2147483648 0' DAY(10) TO SECOND",
"INTERVAL '-2147483648 0' DAY(10) TO SECOND");
checkExp(
"INTERVAL '1 24:1:1' DAY TO SECOND",
"INTERVAL '1 24:1:1' DAY TO SECOND");
checkExp(
"INTERVAL '1 1:60:1' DAY TO SECOND",
"INTERVAL '1 1:60:1' DAY TO SECOND");
checkExp(
"INTERVAL '1 1:1:60' DAY TO SECOND",
"INTERVAL '1 1:1:60' DAY TO SECOND");
checkExp(
"INTERVAL '1 1:1:1.0000001' DAY TO SECOND",
"INTERVAL '1 1:1:1.0000001' DAY TO SECOND");
checkExp(
"INTERVAL '1 1:1:1.0001' DAY TO SECOND(3)",
"INTERVAL '1 1:1:1.0001' DAY TO SECOND(3)");
// precision > maximum
checkExp(
"INTERVAL '1 1' DAY(11) TO SECOND",
"INTERVAL '1 1' DAY(11) TO SECOND");
checkExp(
"INTERVAL '1 1' DAY TO SECOND(10)",
"INTERVAL '1 1' DAY TO SECOND(10)");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0 0:0:0' DAY(0) TO SECOND",
"INTERVAL '0 0:0:0' DAY(0) TO SECOND");
checkExp(
"INTERVAL '0 0:0:0' DAY TO SECOND(0)",
"INTERVAL '0 0:0:0' DAY TO SECOND(0)");
}
/**
* Runs tests for INTERVAL... HOUR that should pass parser but fail
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalHourFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL '-' HOUR",
"INTERVAL '-' HOUR");
checkExp(
"INTERVAL '1-2' HOUR",
"INTERVAL '1-2' HOUR");
checkExp(
"INTERVAL '1.2' HOUR",
"INTERVAL '1.2' HOUR");
checkExp(
"INTERVAL '1 2' HOUR",
"INTERVAL '1 2' HOUR");
checkExp(
"INTERVAL '1:2' HOUR",
"INTERVAL '1:2' HOUR");
checkExp(
"INTERVAL '1-2' HOUR(2)",
"INTERVAL '1-2' HOUR(2)");
checkExp(
"INTERVAL 'bogus text' HOUR",
"INTERVAL 'bogus text' HOUR");
// negative field values
checkExp(
"INTERVAL '--1' HOUR",
"INTERVAL '--1' HOUR");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
checkExp(
"INTERVAL '100' HOUR",
"INTERVAL '100' HOUR");
checkExp(
"INTERVAL '100' HOUR(2)",
"INTERVAL '100' HOUR(2)");
checkExp(
"INTERVAL '1000' HOUR(3)",
"INTERVAL '1000' HOUR(3)");
checkExp(
"INTERVAL '-1000' HOUR(3)",
"INTERVAL '-1000' HOUR(3)");
checkExp(
"INTERVAL '2147483648' HOUR(10)",
"INTERVAL '2147483648' HOUR(10)");
checkExp(
"INTERVAL '-2147483648' HOUR(10)",
"INTERVAL '-2147483648' HOUR(10)");
// negative field values
checkExp(
"INTERVAL '--1' HOUR",
"INTERVAL '--1' HOUR");
// precision > maximum
checkExp(
"INTERVAL '1' HOUR(11)",
"INTERVAL '1' HOUR(11)");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0' HOUR(0)",
"INTERVAL '0' HOUR(0)");
}
/**
* Runs tests for INTERVAL... HOUR TO MINUTE that should pass parser but
* fail validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalHourToMinuteFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL ':' HOUR TO MINUTE",
"INTERVAL ':' HOUR TO MINUTE");
checkExp(
"INTERVAL '1' HOUR TO MINUTE",
"INTERVAL '1' HOUR TO MINUTE");
checkExp(
"INTERVAL '1:x' HOUR TO MINUTE",
"INTERVAL '1:x' HOUR TO MINUTE");
checkExp(
"INTERVAL '1.2' HOUR TO MINUTE",
"INTERVAL '1.2' HOUR TO MINUTE");
checkExp(
"INTERVAL '1 2' HOUR TO MINUTE",
"INTERVAL '1 2' HOUR TO MINUTE");
checkExp(
"INTERVAL '1:2:3' HOUR TO MINUTE",
"INTERVAL '1:2:3' HOUR TO MINUTE");
checkExp(
"INTERVAL '1 2' HOUR(2) TO MINUTE",
"INTERVAL '1 2' HOUR(2) TO MINUTE");
checkExp(
"INTERVAL 'bogus text' HOUR TO MINUTE",
"INTERVAL 'bogus text' HOUR TO MINUTE");
// negative field values
checkExp(
"INTERVAL '--1:1' HOUR TO MINUTE",
"INTERVAL '--1:1' HOUR TO MINUTE");
checkExp(
"INTERVAL '1:-1' HOUR TO MINUTE",
"INTERVAL '1:-1' HOUR TO MINUTE");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
// plus >max value for mid/end fields
checkExp(
"INTERVAL '100:0' HOUR TO MINUTE",
"INTERVAL '100:0' HOUR TO MINUTE");
checkExp(
"INTERVAL '100:0' HOUR(2) TO MINUTE",
"INTERVAL '100:0' HOUR(2) TO MINUTE");
checkExp(
"INTERVAL '1000:0' HOUR(3) TO MINUTE",
"INTERVAL '1000:0' HOUR(3) TO MINUTE");
checkExp(
"INTERVAL '-1000:0' HOUR(3) TO MINUTE",
"INTERVAL '-1000:0' HOUR(3) TO MINUTE");
checkExp(
"INTERVAL '2147483648:0' HOUR(10) TO MINUTE",
"INTERVAL '2147483648:0' HOUR(10) TO MINUTE");
checkExp(
"INTERVAL '-2147483648:0' HOUR(10) TO MINUTE",
"INTERVAL '-2147483648:0' HOUR(10) TO MINUTE");
checkExp(
"INTERVAL '1:24' HOUR TO MINUTE",
"INTERVAL '1:24' HOUR TO MINUTE");
// precision > maximum
checkExp(
"INTERVAL '1:1' HOUR(11) TO MINUTE",
"INTERVAL '1:1' HOUR(11) TO MINUTE");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0:0' HOUR(0) TO MINUTE",
"INTERVAL '0:0' HOUR(0) TO MINUTE");
}
/**
* Runs tests for INTERVAL... HOUR TO SECOND that should pass parser but
* fail validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalHourToSecondFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL '::' HOUR TO SECOND",
"INTERVAL '::' HOUR TO SECOND");
checkExp(
"INTERVAL '::.' HOUR TO SECOND",
"INTERVAL '::.' HOUR TO SECOND");
checkExp(
"INTERVAL '1' HOUR TO SECOND",
"INTERVAL '1' HOUR TO SECOND");
checkExp(
"INTERVAL '1 2' HOUR TO SECOND",
"INTERVAL '1 2' HOUR TO SECOND");
checkExp(
"INTERVAL '1:2' HOUR TO SECOND",
"INTERVAL '1:2' HOUR TO SECOND");
checkExp(
"INTERVAL '1.2' HOUR TO SECOND",
"INTERVAL '1.2' HOUR TO SECOND");
checkExp(
"INTERVAL '1 1:2' HOUR TO SECOND",
"INTERVAL '1 1:2' HOUR TO SECOND");
checkExp(
"INTERVAL '1:2:x' HOUR TO SECOND",
"INTERVAL '1:2:x' HOUR TO SECOND");
checkExp(
"INTERVAL '1:x:3' HOUR TO SECOND",
"INTERVAL '1:x:3' HOUR TO SECOND");
checkExp(
"INTERVAL '1:1:1.x' HOUR TO SECOND",
"INTERVAL '1:1:1.x' HOUR TO SECOND");
checkExp(
"INTERVAL '1 1:2' HOUR(2) TO SECOND",
"INTERVAL '1 1:2' HOUR(2) TO SECOND");
checkExp(
"INTERVAL '1 1' HOUR(2) TO SECOND",
"INTERVAL '1 1' HOUR(2) TO SECOND");
checkExp(
"INTERVAL 'bogus text' HOUR TO SECOND",
"INTERVAL 'bogus text' HOUR TO SECOND");
checkExp(
"INTERVAL '6:7:8901' HOUR TO SECOND(4)",
"INTERVAL '6:7:8901' HOUR TO SECOND(4)");
// negative field values
checkExp(
"INTERVAL '--1:1:1' HOUR TO SECOND",
"INTERVAL '--1:1:1' HOUR TO SECOND");
checkExp(
"INTERVAL '1:-1:1' HOUR TO SECOND",
"INTERVAL '1:-1:1' HOUR TO SECOND");
checkExp(
"INTERVAL '1:1:-1' HOUR TO SECOND",
"INTERVAL '1:1:-1' HOUR TO SECOND");
checkExp(
"INTERVAL '1:1:1.-1' HOUR TO SECOND",
"INTERVAL '1:1:1.-1' HOUR TO SECOND");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
// plus >max value for mid/end fields
checkExp(
"INTERVAL '100:0:0' HOUR TO SECOND",
"INTERVAL '100:0:0' HOUR TO SECOND");
checkExp(
"INTERVAL '100:0:0' HOUR(2) TO SECOND",
"INTERVAL '100:0:0' HOUR(2) TO SECOND");
checkExp(
"INTERVAL '1000:0:0' HOUR(3) TO SECOND",
"INTERVAL '1000:0:0' HOUR(3) TO SECOND");
checkExp(
"INTERVAL '-1000:0:0' HOUR(3) TO SECOND",
"INTERVAL '-1000:0:0' HOUR(3) TO SECOND");
checkExp(
"INTERVAL '2147483648:0:0' HOUR(10) TO SECOND",
"INTERVAL '2147483648:0:0' HOUR(10) TO SECOND");
checkExp(
"INTERVAL '-2147483648:0:0' HOUR(10) TO SECOND",
"INTERVAL '-2147483648:0:0' HOUR(10) TO SECOND");
checkExp(
"INTERVAL '1:60:1' HOUR TO SECOND",
"INTERVAL '1:60:1' HOUR TO SECOND");
checkExp(
"INTERVAL '1:1:60' HOUR TO SECOND",
"INTERVAL '1:1:60' HOUR TO SECOND");
checkExp(
"INTERVAL '1:1:1.0000001' HOUR TO SECOND",
"INTERVAL '1:1:1.0000001' HOUR TO SECOND");
checkExp(
"INTERVAL '1:1:1.0001' HOUR TO SECOND(3)",
"INTERVAL '1:1:1.0001' HOUR TO SECOND(3)");
// precision > maximum
checkExp(
"INTERVAL '1:1:1' HOUR(11) TO SECOND",
"INTERVAL '1:1:1' HOUR(11) TO SECOND");
checkExp(
"INTERVAL '1:1:1' HOUR TO SECOND(10)",
"INTERVAL '1:1:1' HOUR TO SECOND(10)");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0:0:0' HOUR(0) TO SECOND",
"INTERVAL '0:0:0' HOUR(0) TO SECOND");
checkExp(
"INTERVAL '0:0:0' HOUR TO SECOND(0)",
"INTERVAL '0:0:0' HOUR TO SECOND(0)");
}
/**
* Runs tests for INTERVAL... MINUTE that should pass parser but fail
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalMinuteFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL '-' MINUTE",
"INTERVAL '-' MINUTE");
checkExp(
"INTERVAL '1-2' MINUTE",
"INTERVAL '1-2' MINUTE");
checkExp(
"INTERVAL '1.2' MINUTE",
"INTERVAL '1.2' MINUTE");
checkExp(
"INTERVAL '1 2' MINUTE",
"INTERVAL '1 2' MINUTE");
checkExp(
"INTERVAL '1:2' MINUTE",
"INTERVAL '1:2' MINUTE");
checkExp(
"INTERVAL '1-2' MINUTE(2)",
"INTERVAL '1-2' MINUTE(2)");
checkExp(
"INTERVAL 'bogus text' MINUTE",
"INTERVAL 'bogus text' MINUTE");
// negative field values
checkExp(
"INTERVAL '--1' MINUTE",
"INTERVAL '--1' MINUTE");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
checkExp(
"INTERVAL '100' MINUTE",
"INTERVAL '100' MINUTE");
checkExp(
"INTERVAL '100' MINUTE(2)",
"INTERVAL '100' MINUTE(2)");
checkExp(
"INTERVAL '1000' MINUTE(3)",
"INTERVAL '1000' MINUTE(3)");
checkExp(
"INTERVAL '-1000' MINUTE(3)",
"INTERVAL '-1000' MINUTE(3)");
checkExp(
"INTERVAL '2147483648' MINUTE(10)",
"INTERVAL '2147483648' MINUTE(10)");
checkExp(
"INTERVAL '-2147483648' MINUTE(10)",
"INTERVAL '-2147483648' MINUTE(10)");
// precision > maximum
checkExp(
"INTERVAL '1' MINUTE(11)",
"INTERVAL '1' MINUTE(11)");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0' MINUTE(0)",
"INTERVAL '0' MINUTE(0)");
}
/**
* Runs tests for INTERVAL... MINUTE TO SECOND that should pass parser but
* fail validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalMinuteToSecondFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL ':' MINUTE TO SECOND",
"INTERVAL ':' MINUTE TO SECOND");
checkExp(
"INTERVAL ':.' MINUTE TO SECOND",
"INTERVAL ':.' MINUTE TO SECOND");
checkExp(
"INTERVAL '1' MINUTE TO SECOND",
"INTERVAL '1' MINUTE TO SECOND");
checkExp(
"INTERVAL '1 2' MINUTE TO SECOND",
"INTERVAL '1 2' MINUTE TO SECOND");
checkExp(
"INTERVAL '1.2' MINUTE TO SECOND",
"INTERVAL '1.2' MINUTE TO SECOND");
checkExp(
"INTERVAL '1 1:2' MINUTE TO SECOND",
"INTERVAL '1 1:2' MINUTE TO SECOND");
checkExp(
"INTERVAL '1:x' MINUTE TO SECOND",
"INTERVAL '1:x' MINUTE TO SECOND");
checkExp(
"INTERVAL 'x:3' MINUTE TO SECOND",
"INTERVAL 'x:3' MINUTE TO SECOND");
checkExp(
"INTERVAL '1:1.x' MINUTE TO SECOND",
"INTERVAL '1:1.x' MINUTE TO SECOND");
checkExp(
"INTERVAL '1 1:2' MINUTE(2) TO SECOND",
"INTERVAL '1 1:2' MINUTE(2) TO SECOND");
checkExp(
"INTERVAL '1 1' MINUTE(2) TO SECOND",
"INTERVAL '1 1' MINUTE(2) TO SECOND");
checkExp(
"INTERVAL 'bogus text' MINUTE TO SECOND",
"INTERVAL 'bogus text' MINUTE TO SECOND");
checkExp(
"INTERVAL '7:8901' MINUTE TO SECOND(4)",
"INTERVAL '7:8901' MINUTE TO SECOND(4)");
// negative field values
checkExp(
"INTERVAL '--1:1' MINUTE TO SECOND",
"INTERVAL '--1:1' MINUTE TO SECOND");
checkExp(
"INTERVAL '1:-1' MINUTE TO SECOND",
"INTERVAL '1:-1' MINUTE TO SECOND");
checkExp(
"INTERVAL '1:1.-1' MINUTE TO SECOND",
"INTERVAL '1:1.-1' MINUTE TO SECOND");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
// plus >max value for mid/end fields
checkExp(
"INTERVAL '100:0' MINUTE TO SECOND",
"INTERVAL '100:0' MINUTE TO SECOND");
checkExp(
"INTERVAL '100:0' MINUTE(2) TO SECOND",
"INTERVAL '100:0' MINUTE(2) TO SECOND");
checkExp(
"INTERVAL '1000:0' MINUTE(3) TO SECOND",
"INTERVAL '1000:0' MINUTE(3) TO SECOND");
checkExp(
"INTERVAL '-1000:0' MINUTE(3) TO SECOND",
"INTERVAL '-1000:0' MINUTE(3) TO SECOND");
checkExp(
"INTERVAL '2147483648:0' MINUTE(10) TO SECOND",
"INTERVAL '2147483648:0' MINUTE(10) TO SECOND");
checkExp(
"INTERVAL '-2147483648:0' MINUTE(10) TO SECOND",
"INTERVAL '-2147483648:0' MINUTE(10) TO SECOND");
checkExp(
"INTERVAL '1:60' MINUTE TO SECOND",
"INTERVAL '1:60' MINUTE TO SECOND");
checkExp(
"INTERVAL '1:1.0000001' MINUTE TO SECOND",
"INTERVAL '1:1.0000001' MINUTE TO SECOND");
checkExp(
"INTERVAL '1:1:1.0001' MINUTE TO SECOND(3)",
"INTERVAL '1:1:1.0001' MINUTE TO SECOND(3)");
// precision > maximum
checkExp(
"INTERVAL '1:1' MINUTE(11) TO SECOND",
"INTERVAL '1:1' MINUTE(11) TO SECOND");
checkExp(
"INTERVAL '1:1' MINUTE TO SECOND(10)",
"INTERVAL '1:1' MINUTE TO SECOND(10)");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0:0' MINUTE(0) TO SECOND",
"INTERVAL '0:0' MINUTE(0) TO SECOND");
checkExp(
"INTERVAL '0:0' MINUTE TO SECOND(0)",
"INTERVAL '0:0' MINUTE TO SECOND(0)");
}
/**
* Runs tests for INTERVAL... SECOND that should pass parser but fail
* validator. A substantially identical set of tests exists in
* SqlValidatorTest, and any changes here should be synchronized there.
* Similarly, any changes to tests here should be echoed appropriately to
* each of the other 12 subTestIntervalXXXFailsValidation() tests.
*/
public void subTestIntervalSecondFailsValidation() {
// Qualifier - field mismatches
checkExp(
"INTERVAL ':' SECOND",
"INTERVAL ':' SECOND");
checkExp(
"INTERVAL '.' SECOND",
"INTERVAL '.' SECOND");
checkExp(
"INTERVAL '1-2' SECOND",
"INTERVAL '1-2' SECOND");
checkExp(
"INTERVAL '1.x' SECOND",
"INTERVAL '1.x' SECOND");
checkExp(
"INTERVAL 'x.1' SECOND",
"INTERVAL 'x.1' SECOND");
checkExp(
"INTERVAL '1 2' SECOND",
"INTERVAL '1 2' SECOND");
checkExp(
"INTERVAL '1:2' SECOND",
"INTERVAL '1:2' SECOND");
checkExp(
"INTERVAL '1-2' SECOND(2)",
"INTERVAL '1-2' SECOND(2)");
checkExp(
"INTERVAL 'bogus text' SECOND",
"INTERVAL 'bogus text' SECOND");
// negative field values
checkExp(
"INTERVAL '--1' SECOND",
"INTERVAL '--1' SECOND");
checkExp(
"INTERVAL '1.-1' SECOND",
"INTERVAL '1.-1' SECOND");
// Field value out of range
// (default, explicit default, alt, neg alt, max, neg max)
checkExp(
"INTERVAL '100' SECOND",
"INTERVAL '100' SECOND");
checkExp(
"INTERVAL '100' SECOND(2)",
"INTERVAL '100' SECOND(2)");
checkExp(
"INTERVAL '1000' SECOND(3)",
"INTERVAL '1000' SECOND(3)");
checkExp(
"INTERVAL '-1000' SECOND(3)",
"INTERVAL '-1000' SECOND(3)");
checkExp(
"INTERVAL '2147483648' SECOND(10)",
"INTERVAL '2147483648' SECOND(10)");
checkExp(
"INTERVAL '-2147483648' SECOND(10)",
"INTERVAL '-2147483648' SECOND(10)");
checkExp(
"INTERVAL '1.0000001' SECOND",
"INTERVAL '1.0000001' SECOND");
checkExp(
"INTERVAL '1.0000001' SECOND(2)",
"INTERVAL '1.0000001' SECOND(2)");
checkExp(
"INTERVAL '1.0001' SECOND(2, 3)",
"INTERVAL '1.0001' SECOND(2, 3)");
checkExp(
"INTERVAL '1.000000001' SECOND(2, 9)",
"INTERVAL '1.000000001' SECOND(2, 9)");
// precision > maximum
checkExp(
"INTERVAL '1' SECOND(11)",
"INTERVAL '1' SECOND(11)");
checkExp(
"INTERVAL '1.1' SECOND(1, 10)",
"INTERVAL '1.1' SECOND(1, 10)");
// precision < minimum allowed)
// note: parser will catch negative values, here we
// just need to check for 0
checkExp(
"INTERVAL '0' SECOND(0)",
"INTERVAL '0' SECOND(0)");
checkExp(
"INTERVAL '0' SECOND(1, 0)",
"INTERVAL '0' SECOND(1, 0)");
}
/**
* Runs tests for each of the thirteen different main types of INTERVAL
* qualifiers (YEAR, YEAR TO MONTH, etc.) Tests in this section fall into
* two categories:
*
* <ul>
* <li>xxxPositive: tests that should pass parser and validator</li>
* <li>xxxFailsValidation: tests that should pass parser but fail validator
* </li>
* </ul>
*
* A substantially identical set of tests exists in SqlValidatorTest, and
* any changes here should be synchronized there.
*/
@Test public void testIntervalLiterals() {
subTestIntervalYearPositive();
subTestIntervalYearToMonthPositive();
subTestIntervalMonthPositive();
subTestIntervalDayPositive();
subTestIntervalDayToHourPositive();
subTestIntervalDayToMinutePositive();
subTestIntervalDayToSecondPositive();
subTestIntervalHourPositive();
subTestIntervalHourToMinutePositive();
subTestIntervalHourToSecondPositive();
subTestIntervalMinutePositive();
subTestIntervalMinuteToSecondPositive();
subTestIntervalSecondPositive();
subTestIntervalYearFailsValidation();
subTestIntervalYearToMonthFailsValidation();
subTestIntervalMonthFailsValidation();
subTestIntervalDayFailsValidation();
subTestIntervalDayToHourFailsValidation();
subTestIntervalDayToMinuteFailsValidation();
subTestIntervalDayToSecondFailsValidation();
subTestIntervalHourFailsValidation();
subTestIntervalHourToMinuteFailsValidation();
subTestIntervalHourToSecondFailsValidation();
subTestIntervalMinuteFailsValidation();
subTestIntervalMinuteToSecondFailsValidation();
subTestIntervalSecondFailsValidation();
}
@Test public void testUnparseableIntervalQualifiers() {
// No qualifier
checkExpFails(
"interval '1^'^",
"Encountered \"<EOF>\" at line 1, column 12\\.\n"
+ "Was expecting one of:\n"
+ " \"YEAR\" \\.\\.\\.\n"
+ " \"MONTH\" \\.\\.\\.\n"
+ " \"DAY\" \\.\\.\\.\n"
+ " \"HOUR\" \\.\\.\\.\n"
+ " \"MINUTE\" \\.\\.\\.\n"
+ " \"SECOND\" \\.\\.\\.\n"
+ " ");
// illegal qualifiers, no precision in either field
checkExpFails(
"interval '1' year ^to^ year",
"(?s)Encountered \"to year\" at line 1, column 19.\n"
+ "Was expecting one of:\n"
+ " <EOF> \n"
+ " \"NOT\" \\.\\.\\..*");
checkExpFails("interval '1-2' year ^to^ day", ANY);
checkExpFails("interval '1-2' year ^to^ hour", ANY);
checkExpFails("interval '1-2' year ^to^ minute", ANY);
checkExpFails("interval '1-2' year ^to^ second", ANY);
checkExpFails("interval '1-2' month ^to^ year", ANY);
checkExpFails("interval '1-2' month ^to^ month", ANY);
checkExpFails("interval '1-2' month ^to^ day", ANY);
checkExpFails("interval '1-2' month ^to^ hour", ANY);
checkExpFails("interval '1-2' month ^to^ minute", ANY);
checkExpFails("interval '1-2' month ^to^ second", ANY);
checkExpFails("interval '1-2' day ^to^ year", ANY);
checkExpFails("interval '1-2' day ^to^ month", ANY);
checkExpFails("interval '1-2' day ^to^ day", ANY);
checkExpFails("interval '1-2' hour ^to^ year", ANY);
checkExpFails("interval '1-2' hour ^to^ month", ANY);
checkExpFails("interval '1-2' hour ^to^ day", ANY);
checkExpFails("interval '1-2' hour ^to^ hour", ANY);
checkExpFails("interval '1-2' minute ^to^ year", ANY);
checkExpFails("interval '1-2' minute ^to^ month", ANY);
checkExpFails("interval '1-2' minute ^to^ day", ANY);
checkExpFails("interval '1-2' minute ^to^ hour", ANY);
checkExpFails("interval '1-2' minute ^to^ minute", ANY);
checkExpFails("interval '1-2' second ^to^ year", ANY);
checkExpFails("interval '1-2' second ^to^ month", ANY);
checkExpFails("interval '1-2' second ^to^ day", ANY);
checkExpFails("interval '1-2' second ^to^ hour", ANY);
checkExpFails("interval '1-2' second ^to^ minute", ANY);
checkExpFails("interval '1-2' second ^to^ second", ANY);
// illegal qualifiers, including precision in start field
checkExpFails("interval '1' year(3) ^to^ year", ANY);
checkExpFails("interval '1-2' year(3) ^to^ day", ANY);
checkExpFails("interval '1-2' year(3) ^to^ hour", ANY);
checkExpFails("interval '1-2' year(3) ^to^ minute", ANY);
checkExpFails("interval '1-2' year(3) ^to^ second", ANY);
checkExpFails("interval '1-2' month(3) ^to^ year", ANY);
checkExpFails("interval '1-2' month(3) ^to^ month", ANY);
checkExpFails("interval '1-2' month(3) ^to^ day", ANY);
checkExpFails("interval '1-2' month(3) ^to^ hour", ANY);
checkExpFails("interval '1-2' month(3) ^to^ minute", ANY);
checkExpFails("interval '1-2' month(3) ^to^ second", ANY);
checkExpFails("interval '1-2' day(3) ^to^ year", ANY);
checkExpFails("interval '1-2' day(3) ^to^ month", ANY);
checkExpFails("interval '1-2' hour(3) ^to^ year", ANY);
checkExpFails("interval '1-2' hour(3) ^to^ month", ANY);
checkExpFails("interval '1-2' hour(3) ^to^ day", ANY);
checkExpFails("interval '1-2' minute(3) ^to^ year", ANY);
checkExpFails("interval '1-2' minute(3) ^to^ month", ANY);
checkExpFails("interval '1-2' minute(3) ^to^ day", ANY);
checkExpFails("interval '1-2' minute(3) ^to^ hour", ANY);
checkExpFails("interval '1-2' second(3) ^to^ year", ANY);
checkExpFails("interval '1-2' second(3) ^to^ month", ANY);
checkExpFails("interval '1-2' second(3) ^to^ day", ANY);
checkExpFails("interval '1-2' second(3) ^to^ hour", ANY);
checkExpFails("interval '1-2' second(3) ^to^ minute", ANY);
// illegal qualfiers, including precision in end field
checkExpFails("interval '1' year ^to^ year(2)", ANY);
checkExpFails("interval '1-2' year to month^(^2)", ANY);
checkExpFails("interval '1-2' year ^to^ day(2)", ANY);
checkExpFails("interval '1-2' year ^to^ hour(2)", ANY);
checkExpFails("interval '1-2' year ^to^ minute(2)", ANY);
checkExpFails("interval '1-2' year ^to^ second(2)", ANY);
checkExpFails("interval '1-2' year ^to^ second(2,6)", ANY);
checkExpFails("interval '1-2' month ^to^ year(2)", ANY);
checkExpFails("interval '1-2' month ^to^ month(2)", ANY);
checkExpFails("interval '1-2' month ^to^ day(2)", ANY);
checkExpFails("interval '1-2' month ^to^ hour(2)", ANY);
checkExpFails("interval '1-2' month ^to^ minute(2)", ANY);
checkExpFails("interval '1-2' month ^to^ second(2)", ANY);
checkExpFails("interval '1-2' month ^to^ second(2,6)", ANY);
checkExpFails("interval '1-2' day ^to^ year(2)", ANY);
checkExpFails("interval '1-2' day ^to^ month(2)", ANY);
checkExpFails("interval '1-2' day ^to^ day(2)", ANY);
checkExpFails("interval '1-2' day to hour^(^2)", ANY);
checkExpFails("interval '1-2' day to minute^(^2)", ANY);
checkExpFails("interval '1-2' day to second(2^,^6)", ANY);
checkExpFails("interval '1-2' hour ^to^ year(2)", ANY);
checkExpFails("interval '1-2' hour ^to^ month(2)", ANY);
checkExpFails("interval '1-2' hour ^to^ day(2)", ANY);
checkExpFails("interval '1-2' hour ^to^ hour(2)", ANY);
checkExpFails("interval '1-2' hour to minute^(^2)", ANY);
checkExpFails("interval '1-2' hour to second(2^,^6)", ANY);
checkExpFails("interval '1-2' minute ^to^ year(2)", ANY);
checkExpFails("interval '1-2' minute ^to^ month(2)", ANY);
checkExpFails("interval '1-2' minute ^to^ day(2)", ANY);
checkExpFails("interval '1-2' minute ^to^ hour(2)", ANY);
checkExpFails("interval '1-2' minute ^to^ minute(2)", ANY);
checkExpFails("interval '1-2' minute to second(2^,^6)", ANY);
checkExpFails("interval '1-2' second ^to^ year(2)", ANY);
checkExpFails("interval '1-2' second ^to^ month(2)", ANY);
checkExpFails("interval '1-2' second ^to^ day(2)", ANY);
checkExpFails("interval '1-2' second ^to^ hour(2)", ANY);
checkExpFails("interval '1-2' second ^to^ minute(2)", ANY);
checkExpFails("interval '1-2' second ^to^ second(2)", ANY);
checkExpFails("interval '1-2' second ^to^ second(2,6)", ANY);
// illegal qualfiers, including precision in start and end field
checkExpFails("interval '1' year(3) ^to^ year(2)", ANY);
checkExpFails("interval '1-2' year(3) to month^(^2)", ANY);
checkExpFails("interval '1-2' year(3) ^to^ day(2)", ANY);
checkExpFails("interval '1-2' year(3) ^to^ hour(2)", ANY);
checkExpFails("interval '1-2' year(3) ^to^ minute(2)", ANY);
checkExpFails("interval '1-2' year(3) ^to^ second(2)", ANY);
checkExpFails("interval '1-2' year(3) ^to^ second(2,6)", ANY);
checkExpFails("interval '1-2' month(3) ^to^ year(2)", ANY);
checkExpFails("interval '1-2' month(3) ^to^ month(2)", ANY);
checkExpFails("interval '1-2' month(3) ^to^ day(2)", ANY);
checkExpFails("interval '1-2' month(3) ^to^ hour(2)", ANY);
checkExpFails("interval '1-2' month(3) ^to^ minute(2)", ANY);
checkExpFails("interval '1-2' month(3) ^to^ second(2)", ANY);
checkExpFails("interval '1-2' month(3) ^to^ second(2,6)", ANY);
checkExpFails("interval '1-2' day(3) ^to^ year(2)", ANY);
checkExpFails("interval '1-2' day(3) ^to^ month(2)", ANY);
checkExpFails("interval '1-2' day(3) ^to^ day(2)", ANY);
checkExpFails("interval '1-2' day(3) to hour^(^2)", ANY);
checkExpFails("interval '1-2' day(3) to minute^(^2)", ANY);
checkExpFails("interval '1-2' day(3) to second(2^,^6)", ANY);
checkExpFails("interval '1-2' hour(3) ^to^ year(2)", ANY);
checkExpFails("interval '1-2' hour(3) ^to^ month(2)", ANY);
checkExpFails("interval '1-2' hour(3) ^to^ day(2)", ANY);
checkExpFails("interval '1-2' hour(3) ^to^ hour(2)", ANY);
checkExpFails("interval '1-2' hour(3) to minute^(^2)", ANY);
checkExpFails("interval '1-2' hour(3) to second(2^,^6)", ANY);
checkExpFails("interval '1-2' minute(3) ^to^ year(2)", ANY);
checkExpFails("interval '1-2' minute(3) ^to^ month(2)", ANY);
checkExpFails("interval '1-2' minute(3) ^to^ day(2)", ANY);
checkExpFails("interval '1-2' minute(3) ^to^ hour(2)", ANY);
checkExpFails("interval '1-2' minute(3) ^to^ minute(2)", ANY);
checkExpFails("interval '1-2' minute(3) to second(2^,^6)", ANY);
checkExpFails("interval '1-2' second(3) ^to^ year(2)", ANY);
checkExpFails("interval '1-2' second(3) ^to^ month(2)", ANY);
checkExpFails("interval '1-2' second(3) ^to^ day(2)", ANY);
checkExpFails("interval '1-2' second(3) ^to^ hour(2)", ANY);
checkExpFails("interval '1-2' second(3) ^to^ minute(2)", ANY);
checkExpFails("interval '1-2' second(3) ^to^ second(2)", ANY);
checkExpFails("interval '1-2' second(3) ^to^ second(2,6)", ANY);
// precision of -1 (< minimum allowed)
// FIXME should fail at "-" or "-1"
checkExpFails("INTERVAL '0' YEAR^(^-1)", ANY);
checkExpFails("INTERVAL '0-0' YEAR^(^-1) TO MONTH", ANY);
checkExpFails("INTERVAL '0' MONTH^(^-1)", ANY);
checkExpFails("INTERVAL '0' DAY^(^-1)", ANY);
checkExpFails("INTERVAL '0 0' DAY^(^-1) TO HOUR", ANY);
checkExpFails("INTERVAL '0 0' DAY^(^-1) TO MINUTE", ANY);
checkExpFails("INTERVAL '0 0:0:0' DAY^(^-1) TO SECOND", ANY);
checkExpFails("INTERVAL '0 0:0:0' DAY TO SECOND^(^-1)", ANY);
checkExpFails("INTERVAL '0' HOUR^(^-1)", ANY);
checkExpFails("INTERVAL '0:0' HOUR^(^-1) TO MINUTE", ANY);
checkExpFails("INTERVAL '0:0:0' HOUR^(^-1) TO SECOND", ANY);
checkExpFails("INTERVAL '0:0:0' HOUR TO SECOND^(^-1)", ANY);
checkExpFails("INTERVAL '0' MINUTE^(^-1)", ANY);
checkExpFails("INTERVAL '0:0' MINUTE^(^-1) TO SECOND", ANY);
checkExpFails("INTERVAL '0:0' MINUTE TO SECOND^(^-1)", ANY);
checkExpFails("INTERVAL '0' SECOND^(^-1)", ANY);
checkExpFails("INTERVAL '0' SECOND(1^,^ -1)", ANY);
// These may actually be legal per SQL2003, as the first field is
// "more significant" than the last, but we do not support them
checkExpFails("interval '1' day(3) ^to^ day", ANY);
checkExpFails("interval '1' hour(3) ^to^ hour", ANY);
checkExpFails("interval '1' minute(3) ^to^ minute", ANY);
checkExpFails("interval '1' second(3) ^to^ second", ANY);
checkExpFails("interval '1' second(3,1) ^to^ second", ANY);
checkExpFails("interval '1' second(2,3) ^to^ second", ANY);
checkExpFails("interval '1' second(2,2) ^to^ second(3)", ANY);
}
@Test public void testMiscIntervalQualifier() {
checkExp("interval '-' day", "INTERVAL '-' DAY");
checkExpFails(
"interval '1 2:3:4.567' day to hour ^to^ second",
"(?s)Encountered \"to\" at.*");
checkExpFails(
"interval '1:2' minute to second(2^,^ 2)",
"(?s)Encountered \",\" at.*");
checkExp(
"interval '1:x' hour to minute",
"INTERVAL '1:x' HOUR TO MINUTE");
checkExp(
"interval '1:x:2' hour to second",
"INTERVAL '1:x:2' HOUR TO SECOND");
}
@Test public void testIntervalOperators() {
checkExp("-interval '1' day", "(- INTERVAL '1' DAY)");
checkExp(
"interval '1' day + interval '1' day",
"(INTERVAL '1' DAY + INTERVAL '1' DAY)");
checkExp(
"interval '1' day - interval '1:2:3' hour to second",
"(INTERVAL '1' DAY - INTERVAL '1:2:3' HOUR TO SECOND)");
checkExp("interval -'1' day", "INTERVAL -'1' DAY");
checkExp("interval '-1' day", "INTERVAL '-1' DAY");
checkExpFails(
"interval 'wael was here^'^",
"(?s)Encountered \"<EOF>\".*");
checkExp(
"interval 'wael was here' HOUR",
"INTERVAL 'wael was here' HOUR"); // ok in parser, not in validator
}
@Test public void testDateMinusDate() {
checkExp("(date1 - date2) HOUR", "((`DATE1` - `DATE2`) HOUR)");
checkExp(
"(date1 - date2) YEAR TO MONTH",
"((`DATE1` - `DATE2`) YEAR TO MONTH)");
checkExp(
"(date1 - date2) HOUR > interval '1' HOUR",
"(((`DATE1` - `DATE2`) HOUR) > INTERVAL '1' HOUR)");
checkExpFails(
"^(date1 + date2) second^",
"(?s).*Illegal expression. Was expecting ..DATETIME - DATETIME. INTERVALQUALIFIER.*");
checkExpFails(
"^(date1,date2,date2) second^",
"(?s).*Illegal expression. Was expecting ..DATETIME - DATETIME. INTERVALQUALIFIER.*");
}
@Test public void testExtract() {
checkExp("extract(year from x)", "EXTRACT(YEAR FROM `X`)");
checkExp("extract(month from x)", "EXTRACT(MONTH FROM `X`)");
checkExp("extract(day from x)", "EXTRACT(DAY FROM `X`)");
checkExp("extract(hour from x)", "EXTRACT(HOUR FROM `X`)");
checkExp("extract(minute from x)", "EXTRACT(MINUTE FROM `X`)");
checkExp("extract(second from x)", "EXTRACT(SECOND FROM `X`)");
checkExpFails(
"extract(day ^to^ second from x)",
"(?s)Encountered \"to\".*");
}
@Test public void testIntervalArithmetics() {
checkExp(
"TIME '23:59:59' - interval '1' hour ",
"(TIME '23:59:59' - INTERVAL '1' HOUR)");
checkExp(
"TIMESTAMP '2000-01-01 23:59:59.1' - interval '1' hour ",
"(TIMESTAMP '2000-01-01 23:59:59.1' - INTERVAL '1' HOUR)");
checkExp(
"DATE '2000-01-01' - interval '1' hour ",
"(DATE '2000-01-01' - INTERVAL '1' HOUR)");
checkExp(
"TIME '23:59:59' + interval '1' hour ",
"(TIME '23:59:59' + INTERVAL '1' HOUR)");
checkExp(
"TIMESTAMP '2000-01-01 23:59:59.1' + interval '1' hour ",
"(TIMESTAMP '2000-01-01 23:59:59.1' + INTERVAL '1' HOUR)");
checkExp(
"DATE '2000-01-01' + interval '1' hour ",
"(DATE '2000-01-01' + INTERVAL '1' HOUR)");
checkExp(
"interval '1' hour + TIME '23:59:59' ",
"(INTERVAL '1' HOUR + TIME '23:59:59')");
checkExp("interval '1' hour * 8", "(INTERVAL '1' HOUR * 8)");
checkExp("1 * interval '1' hour", "(1 * INTERVAL '1' HOUR)");
checkExp("interval '1' hour / 8", "(INTERVAL '1' HOUR / 8)");
}
@Test public void testIntervalCompare() {
checkExp(
"interval '1' hour = interval '1' second",
"(INTERVAL '1' HOUR = INTERVAL '1' SECOND)");
checkExp(
"interval '1' hour <> interval '1' second",
"(INTERVAL '1' HOUR <> INTERVAL '1' SECOND)");
checkExp(
"interval '1' hour < interval '1' second",
"(INTERVAL '1' HOUR < INTERVAL '1' SECOND)");
checkExp(
"interval '1' hour <= interval '1' second",
"(INTERVAL '1' HOUR <= INTERVAL '1' SECOND)");
checkExp(
"interval '1' hour > interval '1' second",
"(INTERVAL '1' HOUR > INTERVAL '1' SECOND)");
checkExp(
"interval '1' hour >= interval '1' second",
"(INTERVAL '1' HOUR >= INTERVAL '1' SECOND)");
}
@Test public void testCastToInterval() {
checkExp("cast(x as interval year)", "CAST(`X` AS INTERVAL YEAR)");
checkExp("cast(x as interval month)", "CAST(`X` AS INTERVAL MONTH)");
checkExp(
"cast(x as interval year to month)",
"CAST(`X` AS INTERVAL YEAR TO MONTH)");
checkExp("cast(x as interval day)", "CAST(`X` AS INTERVAL DAY)");
checkExp("cast(x as interval hour)", "CAST(`X` AS INTERVAL HOUR)");
checkExp("cast(x as interval minute)", "CAST(`X` AS INTERVAL MINUTE)");
checkExp("cast(x as interval second)", "CAST(`X` AS INTERVAL SECOND)");
checkExp(
"cast(x as interval day to hour)",
"CAST(`X` AS INTERVAL DAY TO HOUR)");
checkExp(
"cast(x as interval day to minute)",
"CAST(`X` AS INTERVAL DAY TO MINUTE)");
checkExp(
"cast(x as interval day to second)",
"CAST(`X` AS INTERVAL DAY TO SECOND)");
checkExp(
"cast(x as interval hour to minute)",
"CAST(`X` AS INTERVAL HOUR TO MINUTE)");
checkExp(
"cast(x as interval hour to second)",
"CAST(`X` AS INTERVAL HOUR TO SECOND)");
checkExp(
"cast(x as interval minute to second)",
"CAST(`X` AS INTERVAL MINUTE TO SECOND)");
checkExp(
"cast(interval '3-2' year to month as CHAR(5))",
"CAST(INTERVAL '3-2' YEAR TO MONTH AS CHAR(5))");
}
@Test public void testTimestampAddAndDiff() {
Map<String, List<String>> tsi = ImmutableMap.<String, List<String>>builder()
.put("MICROSECOND",
Arrays.asList("FRAC_SECOND", "MICROSECOND",
"SQL_TSI_FRAC_SECOND", "SQL_TSI_MICROSECOND"))
.put("SECOND", Arrays.asList("SECOND", "SQL_TSI_SECOND"))
.put("MINUTE", Arrays.asList("MINUTE", "SQL_TSI_MINUTE"))
.put("HOUR", Arrays.asList("HOUR", "SQL_TSI_HOUR"))
.put("DAY", Arrays.asList("DAY", "SQL_TSI_DAY"))
.put("WEEK", Arrays.asList("WEEK", "SQL_TSI_WEEK"))
.put("MONTH", Arrays.asList("MONTH", "SQL_TSI_MONTH"))
.put("QUARTER", Arrays.asList("QUARTER", "SQL_TSI_QUARTER"))
.put("YEAR", Arrays.asList("YEAR", "SQL_TSI_YEAR"))
.build();
List<String> functions = ImmutableList.<String>builder()
.add("timestampadd(%1$s, 12, %2$scurrent_timestamp%2$s)")
.add("timestampdiff(%1$s, %2$scurrent_timestamp%2$s, %2$scurrent_timestamp%2$s)")
.build();
for (Map.Entry<String, List<String>> intervalGroup : tsi.entrySet()) {
for (String function : functions) {
for (String interval : intervalGroup.getValue()) {
checkExp(String.format(function, interval, ""),
String.format(function, intervalGroup.getKey(), "`").toUpperCase());
}
}
}
checkExpFails("timestampadd(^incorrect^, 1, current_timestamp)",
"(?s).*Was expecting one of.*");
checkExpFails("timestampdiff(^incorrect^, current_timestamp, current_timestamp)",
"(?s).*Was expecting one of.*");
}
@Test public void testTimestampAdd() {
final String sql = "select * from t\n"
+ "where timestampadd(sql_tsi_month, 5, hiredate) < curdate";
final String expected = "SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (TIMESTAMPADD(MONTH, 5, `HIREDATE`) < `CURDATE`)";
sql(sql).ok(expected);
}
@Test public void testTimestampDiff() {
final String sql = "select * from t\n"
+ "where timestampdiff(frac_second, 5, hiredate) < curdate";
final String expected = "SELECT *\n"
+ "FROM `T`\n"
+ "WHERE (TIMESTAMPDIFF(MICROSECOND, 5, `HIREDATE`) < `CURDATE`)";
sql(sql).ok(expected);
}
@Test public void testUnnest() {
check(
"select*from unnest(x)",
"SELECT *\n"
+ "FROM (UNNEST(`X`))");
check(
"select*from unnest(x) AS T",
"SELECT *\n"
+ "FROM (UNNEST(`X`)) AS `T`");
// UNNEST cannot be first word in query
checkFails(
"^unnest^(x)",
"(?s)Encountered \"unnest\" at.*");
}
@Test public void testUnnestWithOrdinality() {
sql("select * from unnest(x) with ordinality")
.ok("SELECT *\n"
+ "FROM (UNNEST(`X`) WITH ORDINALITY)");
sql("select*from unnest(x) with ordinality AS T")
.ok("SELECT *\n"
+ "FROM (UNNEST(`X`) WITH ORDINALITY) AS `T`");
sql("select*from unnest(x) with ordinality AS T(c, o)")
.ok("SELECT *\n"
+ "FROM (UNNEST(`X`) WITH ORDINALITY) AS `T` (`C`, `O`)");
sql("select*from unnest(x) as T ^with^ ordinality")
.fails("(?s)Encountered \"with\" at .*");
}
@Test public void testParensInFrom() {
// UNNEST may not occur within parentheses.
// FIXME should fail at "unnest"
checkFails(
"select *from ^(^unnest(x))",
"(?s)Encountered \"\\( unnest\" at .*");
// <table-name> may not occur within parentheses.
checkFails(
"select * from (^emp^)",
"(?s)Non-query expression encountered in illegal context.*");
// <table-name> may not occur within parentheses.
checkFails(
"select * from (^emp^ as x)",
"(?s)Non-query expression encountered in illegal context.*");
// <table-name> may not occur within parentheses.
checkFails(
"select * from (^emp^) as x",
"(?s)Non-query expression encountered in illegal context.*");
// Parentheses around JOINs are OK, and sometimes necessary.
if (false) {
// todo:
check(
"select * from (emp join dept using (deptno))",
"xx");
check(
"select * from (emp join dept using (deptno)) join foo using (x)",
"xx");
}
}
@Test public void testProcedureCall() {
check("call blubber(5)", "(CALL `BLUBBER`(5))");
check("call \"blubber\"(5)", "(CALL `blubber`(5))");
check("call whale.blubber(5)", "(CALL `WHALE`.`BLUBBER`(5))");
}
@Test public void testNewSpecification() {
checkExp("new udt()", "(NEW `UDT`())");
checkExp("new my.udt(1, 'hey')", "(NEW `MY`.`UDT`(1, 'hey'))");
checkExp("new udt() is not null", "((NEW `UDT`()) IS NOT NULL)");
checkExp("1 + new udt()", "(1 + (NEW `UDT`()))");
}
@Test public void testMultisetCast() {
checkExp(
"cast(multiset[1] as double multiset)",
"CAST((MULTISET[1]) AS DOUBLE MULTISET)");
}
@Test public void testAddCarets() {
assertEquals(
"values (^foo^)",
SqlParserUtil.addCarets("values (foo)", 1, 9, 1, 12));
assertEquals(
"abc^def",
SqlParserUtil.addCarets("abcdef", 1, 4, 1, 4));
assertEquals(
"abcdef^",
SqlParserUtil.addCarets("abcdef", 1, 7, 1, 7));
}
@Test public void testMetadata() {
SqlAbstractParserImpl.Metadata metadata = getParserMetadata();
assertTrue(metadata.isReservedFunctionName("ABS"));
assertFalse(metadata.isReservedFunctionName("FOO"));
assertTrue(metadata.isContextVariableName("CURRENT_USER"));
assertTrue(metadata.isContextVariableName("CURRENT_CATALOG"));
assertTrue(metadata.isContextVariableName("CURRENT_SCHEMA"));
assertFalse(metadata.isContextVariableName("ABS"));
assertFalse(metadata.isContextVariableName("FOO"));
assertTrue(metadata.isNonReservedKeyword("A"));
assertTrue(metadata.isNonReservedKeyword("KEY"));
assertFalse(metadata.isNonReservedKeyword("SELECT"));
assertFalse(metadata.isNonReservedKeyword("FOO"));
assertFalse(metadata.isNonReservedKeyword("ABS"));
assertTrue(metadata.isKeyword("ABS"));
assertTrue(metadata.isKeyword("CURRENT_USER"));
assertTrue(metadata.isKeyword("CURRENT_CATALOG"));
assertTrue(metadata.isKeyword("CURRENT_SCHEMA"));
assertTrue(metadata.isKeyword("KEY"));
assertTrue(metadata.isKeyword("SELECT"));
assertTrue(metadata.isKeyword("HAVING"));
assertTrue(metadata.isKeyword("A"));
assertFalse(metadata.isKeyword("BAR"));
assertTrue(metadata.isReservedWord("SELECT"));
assertTrue(metadata.isReservedWord("CURRENT_CATALOG"));
assertTrue(metadata.isReservedWord("CURRENT_SCHEMA"));
assertFalse(metadata.isReservedWord("KEY"));
String jdbcKeywords = metadata.getJdbcKeywords();
assertTrue(jdbcKeywords.contains(",COLLECT,"));
assertTrue(!jdbcKeywords.contains(",SELECT,"));
}
/** Generates a copy of {@code reference.md} with the current set of key
* words. Fails if the copy is different from the original. */
@Test public void testGenerateKeyWords() throws IOException {
// inUrl = "file:/home/x/calcite/core/target/test-classes/hsqldb-model.json"
String path = "hsqldb-model.json";
final URL inUrl = SqlParserTest.class.getResource("/" + path);
String x = inUrl.getFile();
assert x.endsWith(path);
x = x.substring(0, x.length() - path.length());
assert x.endsWith("core/target/test-classes/");
x = x.substring(0, x.length() - "core/target/test-classes/".length());
final File base = new File(x);
final File inFile = new File(base, "site/_docs/reference.md");
final File outFile = new File(base, "core/target/surefire/reference.md");
outFile.getParentFile().mkdirs();
try (BufferedReader r = new BufferedReader(new FileReader(inFile));
PrintWriter w = new PrintWriter(new FileWriter(outFile))) {
String line;
int stage = 0;
while ((line = r.readLine()) != null) {
if (line.equals("{% comment %} end {% endcomment %}")) {
++stage;
}
if (stage != 1) {
w.println(line);
}
if (line.equals("{% comment %} start {% endcomment %}")) {
++stage;
SqlAbstractParserImpl.Metadata metadata = getParserMetadata();
int z = 0;
for (String s : metadata.getTokens()) {
if (z++ > 0) {
w.println(",");
}
if (metadata.isKeyword(s)) {
w.print(metadata.isReservedWord(s) ? ("**" + s + "**") : s);
}
}
w.println(".");
}
}
}
String diff = DiffTestCase.diff(outFile, inFile);
if (!diff.isEmpty()) {
throw new AssertionError("Mismatch between " + outFile
+ " and " + inFile + ":\n"
+ diff);
}
}
@Test public void testTabStop() {
check(
"SELECT *\n\tFROM mytable",
"SELECT *\n"
+ "FROM `MYTABLE`");
// make sure that the tab stops do not affect the placement of the
// error tokens
checkFails(
"SELECT *\tFROM mytable\t\tWHERE x ^=^ = y AND b = 1",
"(?s).*Encountered \"= =\" at line 1, column 32\\..*");
}
@Test public void testLongIdentifiers() {
StringBuilder ident128Builder = new StringBuilder();
for (int i = 0; i < 128; i++) {
ident128Builder.append((char) ('a' + (i % 26)));
}
String ident128 = ident128Builder.toString();
String ident128Upper = ident128.toUpperCase(Locale.US);
String ident129 = "x" + ident128;
String ident129Upper = ident129.toUpperCase(Locale.US);
check(
"select * from " + ident128,
"SELECT *\n"
+ "FROM `" + ident128Upper + "`");
checkFails(
"select * from ^" + ident129 + "^",
"Length of identifier '" + ident129Upper
+ "' must be less than or equal to 128 characters");
check(
"select " + ident128 + " from mytable",
"SELECT `" + ident128Upper + "`\n"
+ "FROM `MYTABLE`");
checkFails(
"select ^" + ident129 + "^ from mytable",
"Length of identifier '" + ident129Upper
+ "' must be less than or equal to 128 characters");
}
/**
* Tests that you can't quote the names of builtin functions.
*
* @see org.apache.calcite.test.SqlValidatorTest#testQuotedFunction()
*/
@Test public void testQuotedFunction() {
checkExpFails(
"\"CAST\"(1 ^as^ double)",
"(?s).*Encountered \"as\" at .*");
checkExpFails(
"\"POSITION\"('b' ^in^ 'alphabet')",
"(?s).*Encountered \"in \\\\'alphabet\\\\'\" at .*");
checkExpFails(
"\"OVERLAY\"('a' ^PLAcing^ 'b' from 1)",
"(?s).*Encountered \"PLAcing\" at.*");
checkExpFails(
"\"SUBSTRING\"('a' ^from^ 1)",
"(?s).*Encountered \"from\" at .*");
}
@Test public void testUnicodeLiteral() {
// Note that here we are constructing a SQL statement which directly
// contains Unicode characters (not SQL Unicode escape sequences). The
// escaping here is Java-only, so by the time it gets to the SQL
// parser, the literal already contains Unicode characters.
String in1 =
"values _UTF16'"
+ ConversionUtil.TEST_UNICODE_STRING + "'";
String out1 =
"VALUES (ROW(_UTF16'"
+ ConversionUtil.TEST_UNICODE_STRING + "'))";
check(in1, out1);
// Without the U& prefix, escapes are left unprocessed
String in2 =
"values '"
+ ConversionUtil.TEST_UNICODE_SQL_ESCAPED_LITERAL + "'";
String out2 =
"VALUES (ROW('"
+ ConversionUtil.TEST_UNICODE_SQL_ESCAPED_LITERAL + "'))";
check(in2, out2);
// Likewise, even with the U& prefix, if some other escape
// character is specified, then the backslash-escape
// sequences are not interpreted
String in3 =
"values U&'"
+ ConversionUtil.TEST_UNICODE_SQL_ESCAPED_LITERAL
+ "' UESCAPE '!'";
String out3 =
"VALUES (ROW(_UTF16'"
+ ConversionUtil.TEST_UNICODE_SQL_ESCAPED_LITERAL + "'))";
check(in3, out3);
}
@Test public void testUnicodeEscapedLiteral() {
// Note that here we are constructing a SQL statement which
// contains SQL-escaped Unicode characters to be handled
// by the SQL parser.
String in =
"values U&'"
+ ConversionUtil.TEST_UNICODE_SQL_ESCAPED_LITERAL + "'";
String out =
"VALUES (ROW(_UTF16'"
+ ConversionUtil.TEST_UNICODE_STRING + "'))";
check(in, out);
// Verify that we can override with an explicit escape character
check(in.replaceAll("\\\\", "!") + "UESCAPE '!'", out);
}
@Test public void testIllegalUnicodeEscape() {
checkExpFails(
"U&'abc' UESCAPE '!!'",
".*must be exactly one character.*");
checkExpFails(
"U&'abc' UESCAPE ''",
".*must be exactly one character.*");
checkExpFails(
"U&'abc' UESCAPE '0'",
".*hex digit.*");
checkExpFails(
"U&'abc' UESCAPE 'a'",
".*hex digit.*");
checkExpFails(
"U&'abc' UESCAPE 'F'",
".*hex digit.*");
checkExpFails(
"U&'abc' UESCAPE ' '",
".*whitespace.*");
checkExpFails(
"U&'abc' UESCAPE '+'",
".*plus sign.*");
checkExpFails(
"U&'abc' UESCAPE '\"'",
".*double quote.*");
checkExpFails(
"'abc' UESCAPE ^'!'^",
".*without Unicode literal introducer.*");
checkExpFails(
"^U&'\\0A'^",
".*is not exactly four hex digits.*");
checkExpFails(
"^U&'\\wxyz'^",
".*is not exactly four hex digits.*");
}
@Test public void testSqlOptions() throws SqlParseException {
SqlNode node =
SqlParser.create("alter system set schema = true").parseStmt();
SqlSetOption opt = (SqlSetOption) node;
assertThat(opt.getScope(), equalTo("SYSTEM"));
SqlPrettyWriter writer = new SqlPrettyWriter(SqlDialect.CALCITE);
assertThat(writer.format(opt.getName()), equalTo("\"SCHEMA\""));
writer = new SqlPrettyWriter(SqlDialect.CALCITE);
assertThat(writer.format(opt.getValue()), equalTo("TRUE"));
writer = new SqlPrettyWriter(SqlDialect.CALCITE);
assertThat(writer.format(opt),
equalTo("ALTER SYSTEM SET \"SCHEMA\" = TRUE"));
check("alter system set \"a number\" = 1",
"ALTER SYSTEM SET `a number` = 1");
check("alter system set flag = false",
"ALTER SYSTEM SET `FLAG` = FALSE");
check("alter system set approx = -12.3450",
"ALTER SYSTEM SET `APPROX` = -12.3450");
check("alter system set onOff = on",
"ALTER SYSTEM SET `ONOFF` = `ON`");
check("alter system set onOff = off",
"ALTER SYSTEM SET `ONOFF` = `OFF`");
check("alter system set baz = foo",
"ALTER SYSTEM SET `BAZ` = `FOO`");
check("alter system set \"a\".\"number\" = 1",
"ALTER SYSTEM SET `a`.`number` = 1");
check("set approx = -12.3450",
"SET `APPROX` = -12.3450");
node = SqlParser.create("reset schema").parseStmt();
opt = (SqlSetOption) node;
assertThat(opt.getScope(), equalTo(null));
writer = new SqlPrettyWriter(SqlDialect.CALCITE);
assertThat(writer.format(opt.getName()), equalTo("\"SCHEMA\""));
assertThat(opt.getValue(), equalTo(null));
writer = new SqlPrettyWriter(SqlDialect.CALCITE);
assertThat(writer.format(opt),
equalTo("RESET \"SCHEMA\""));
check("alter system RESET flag",
"ALTER SYSTEM RESET `FLAG`");
check("reset onOff",
"RESET `ONOFF`");
check("reset \"this\".\"is\".\"sparta\"",
"RESET `this`.`is`.`sparta`");
check("alter system reset all",
"ALTER SYSTEM RESET `ALL`");
check("reset all",
"RESET `ALL`");
// expressions not allowed
checkFails("alter system set aString = 'abc' ^||^ 'def' ",
"(?s)Encountered \"\\|\\|\" at line 1, column 34\\..*");
// multiple assignments not allowed
checkFails("alter system set x = 1^,^ y = 2",
"(?s)Encountered \",\" at line 1, column 23\\..*");
}
@Test public void testSequence() {
sql("select next value for my_schema.my_seq from t")
.ok("SELECT (NEXT VALUE FOR `MY_SCHEMA`.`MY_SEQ`)\n"
+ "FROM `T`");
sql("select next value for my_schema.my_seq as s from t")
.ok("SELECT (NEXT VALUE FOR `MY_SCHEMA`.`MY_SEQ`) AS `S`\n"
+ "FROM `T`");
sql("select next value for my_seq as s from t")
.ok("SELECT (NEXT VALUE FOR `MY_SEQ`) AS `S`\n"
+ "FROM `T`");
sql("select 1 + next value for s + current value for s from t")
.ok("SELECT ((1 + (NEXT VALUE FOR `S`)) + (CURRENT VALUE FOR `S`))\n"
+ "FROM `T`");
sql("select 1 from t where next value for my_seq < 10")
.ok("SELECT 1\n"
+ "FROM `T`\n"
+ "WHERE ((NEXT VALUE FOR `MY_SEQ`) < 10)");
sql("select 1 from t\n"
+ "where next value for my_seq < 10 fetch next 3 rows only")
.ok("SELECT 1\n"
+ "FROM `T`\n"
+ "WHERE ((NEXT VALUE FOR `MY_SEQ`) < 10)\n"
+ "FETCH NEXT 3 ROWS ONLY");
sql("insert into t values next value for my_seq, current value for my_seq")
.ok("INSERT INTO `T`\n"
+ "(VALUES (ROW((NEXT VALUE FOR `MY_SEQ`))), (ROW((CURRENT VALUE FOR `MY_SEQ`))))");
sql("insert into t values (1, current value for my_seq)")
.ok("INSERT INTO `T`\n"
+ "(VALUES (ROW(1, (CURRENT VALUE FOR `MY_SEQ`))))");
}
//~ Inner Interfaces -------------------------------------------------------
/**
* Callback to control how test actions are performed.
*/
protected interface Tester {
void check(String sql, String expected);
void checkExp(String sql, String expected);
void checkFails(String sql, String expectedMsgPattern);
void checkExpFails(String sql, String expectedMsgPattern);
}
//~ Inner Classes ----------------------------------------------------------
/**
* Default implementation of {@link Tester}.
*/
protected class TesterImpl implements Tester {
public void check(
String sql,
String expected) {
final SqlNode sqlNode = parseStmtAndHandleEx(sql);
// no dialect, always parenthesize
String actual = sqlNode.toSqlString(null, true).getSql();
if (LINUXIFY.get()[0]) {
actual = Util.toLinux(actual);
}
TestUtil.assertEqualsVerbose(expected, actual);
}
protected SqlNode parseStmtAndHandleEx(String sql) {
final SqlNode sqlNode;
try {
sqlNode = parseStmt(sql);
} catch (SqlParseException e) {
e.printStackTrace();
String message = "Received error while parsing SQL '" + sql
+ "'; error is:\n"
+ e.toString();
throw new AssertionError(message);
}
return sqlNode;
}
public void checkExp(
String sql,
String expected) {
final SqlNode sqlNode = parseExpressionAndHandleEx(sql);
String actual = sqlNode.toSqlString(null, true).getSql();
if (LINUXIFY.get()[0]) {
actual = Util.toLinux(actual);
}
TestUtil.assertEqualsVerbose(expected, actual);
}
protected SqlNode parseExpressionAndHandleEx(String sql) {
final SqlNode sqlNode;
try {
sqlNode = parseExpression(sql);
} catch (SqlParseException e) {
String message = "Received error while parsing SQL '" + sql
+ "'; error is:\n"
+ e.toString();
throw new RuntimeException(message, e);
}
return sqlNode;
}
public void checkFails(
String sql,
String expectedMsgPattern) {
SqlParserUtil.StringAndPos sap = SqlParserUtil.findPos(sql);
Throwable thrown = null;
try {
final SqlNode sqlNode = parseStmt(sap.sql);
Util.discard(sqlNode);
} catch (Throwable ex) {
thrown = ex;
}
SqlValidatorTestCase.checkEx(thrown, expectedMsgPattern, sap);
}
/**
* Tests that an expression throws an exception which matches the given
* pattern.
*/
public void checkExpFails(
String sql,
String expectedMsgPattern) {
SqlParserUtil.StringAndPos sap = SqlParserUtil.findPos(sql);
Throwable thrown = null;
try {
final SqlNode sqlNode = parseExpression(sap.sql);
Util.discard(sqlNode);
} catch (Throwable ex) {
thrown = ex;
}
SqlValidatorTestCase.checkEx(thrown, expectedMsgPattern, sap);
}
}
/**
* Implementation of {@link Tester} which makes sure that the results of
* unparsing a query are consistent with the original query.
*/
public class UnparsingTesterImpl extends TesterImpl {
public void check(String sql, String expected) {
SqlNode sqlNode = parseStmtAndHandleEx(sql);
// Unparse with no dialect, always parenthesize.
final String actual = sqlNode.toSqlString(null, true).getSql();
assertEquals(expected, linux(actual));
// Unparse again in Calcite dialect (which we can parse), and
// minimal parentheses.
final String sql1 =
sqlNode.toSqlString(SqlDialect.CALCITE, false).getSql();
// Parse and unparse again.
SqlNode sqlNode2;
final Quoting q = quoting;
try {
quoting = Quoting.DOUBLE_QUOTE;
sqlNode2 = parseStmtAndHandleEx(sql1);
} finally {
quoting = q;
}
final String sql2 =
sqlNode2.toSqlString(SqlDialect.CALCITE, false).getSql();
// Should be the same as we started with.
assertEquals(sql1, sql2);
// Now unparse again in the null dialect.
// If the unparser is not including sufficient parens to override
// precedence, the problem will show up here.
final String actual2 = sqlNode2.toSqlString(null, true).getSql();
assertEquals(expected, linux(actual2));
}
public void checkExp(String sql, String expected) {
SqlNode sqlNode = parseExpressionAndHandleEx(sql);
// Unparse with no dialect, always parenthesize.
final String actual = sqlNode.toSqlString(null, true).getSql();
assertEquals(expected, linux(actual));
// Unparse again in Calcite dialect (which we can parse), and
// minimal parentheses.
final String sql1 =
sqlNode.toSqlString(SqlDialect.CALCITE, false).getSql();
// Parse and unparse again.
SqlNode sqlNode2;
final Quoting q = quoting;
try {
quoting = Quoting.DOUBLE_QUOTE;
sqlNode2 = parseExpressionAndHandleEx(sql1);
} finally {
quoting = q;
}
final String sql2 =
sqlNode2.toSqlString(SqlDialect.CALCITE, false).getSql();
// Should be the same as we started with.
assertEquals(sql1, sql2);
// Now unparse again in the null dialect.
// If the unparser is not including sufficient parens to override
// precedence, the problem will show up here.
final String actual2 = sqlNode2.toSqlString(null, true).getSql();
assertEquals(expected, linux(actual2));
}
public void checkFails(String sql, String expectedMsgPattern) {
// Do nothing. We're not interested in unparsing invalid SQL
}
public void checkExpFails(String sql, String expectedMsgPattern) {
// Do nothing. We're not interested in unparsing invalid SQL
}
}
private String linux(String s) {
if (LINUXIFY.get()[0]) {
s = Util.toLinux(s);
}
return s;
}
/** Helper class for building fluent code such as
* {@code sql("values 1").ok();}. */
private class Sql {
private final String sql;
Sql(String sql) {
this.sql = sql;
}
public void ok(String expected) {
getTester().check(sql, expected);
}
public void fails(String expectedMsgPattern) {
getTester().checkFails(sql, expectedMsgPattern);
}
}
}
// End SqlParserTest.java
| apache-2.0 |
sergecodd/FireFox-OS | B2G/gecko/mobile/android/base/httpclientandroidlib/impl/cookie/DateParseException.java | 1762 | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package ch.boye.httpclientandroidlib.impl.cookie;
import ch.boye.httpclientandroidlib.annotation.Immutable;
/**
* An exception to indicate an error parsing a date string.
*
* @see DateUtils
*
*
* @since 4.0
*/
@Immutable
public class DateParseException extends Exception {
private static final long serialVersionUID = 4417696455000643370L;
/**
*
*/
public DateParseException() {
super();
}
/**
* @param message the exception message
*/
public DateParseException(String message) {
super(message);
}
}
| apache-2.0 |
google/ExoPlayer | library/extractor/src/main/java/com/google/android/exoplayer2/extractor/ts/DtsReader.java | 6649 | /*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.extractor.ts;
import static java.lang.Math.min;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.DtsUtil;
import com.google.android.exoplayer2.extractor.ExtractorOutput;
import com.google.android.exoplayer2.extractor.TrackOutput;
import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.ParsableByteArray;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/** Parses a continuous DTS byte stream and extracts individual samples. */
public final class DtsReader implements ElementaryStreamReader {
private static final int STATE_FINDING_SYNC = 0;
private static final int STATE_READING_HEADER = 1;
private static final int STATE_READING_SAMPLE = 2;
private static final int HEADER_SIZE = 18;
private final ParsableByteArray headerScratchBytes;
@Nullable private final String language;
private @MonotonicNonNull String formatId;
private @MonotonicNonNull TrackOutput output;
private int state;
private int bytesRead;
// Used to find the header.
private int syncBytes;
// Used when parsing the header.
private long sampleDurationUs;
private @MonotonicNonNull Format format;
private int sampleSize;
// Used when reading the samples.
private long timeUs;
/**
* Constructs a new reader for DTS elementary streams.
*
* @param language Track language.
*/
public DtsReader(@Nullable String language) {
headerScratchBytes = new ParsableByteArray(new byte[HEADER_SIZE]);
state = STATE_FINDING_SYNC;
timeUs = C.TIME_UNSET;
this.language = language;
}
@Override
public void seek() {
state = STATE_FINDING_SYNC;
bytesRead = 0;
syncBytes = 0;
timeUs = C.TIME_UNSET;
}
@Override
public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) {
idGenerator.generateNewId();
formatId = idGenerator.getFormatId();
output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_AUDIO);
}
@Override
public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) {
if (pesTimeUs != C.TIME_UNSET) {
timeUs = pesTimeUs;
}
}
@Override
public void consume(ParsableByteArray data) {
Assertions.checkStateNotNull(output); // Asserts that createTracks has been called.
while (data.bytesLeft() > 0) {
switch (state) {
case STATE_FINDING_SYNC:
if (skipToNextSync(data)) {
state = STATE_READING_HEADER;
}
break;
case STATE_READING_HEADER:
if (continueRead(data, headerScratchBytes.getData(), HEADER_SIZE)) {
parseHeader();
headerScratchBytes.setPosition(0);
output.sampleData(headerScratchBytes, HEADER_SIZE);
state = STATE_READING_SAMPLE;
}
break;
case STATE_READING_SAMPLE:
int bytesToRead = min(data.bytesLeft(), sampleSize - bytesRead);
output.sampleData(data, bytesToRead);
bytesRead += bytesToRead;
if (bytesRead == sampleSize) {
if (timeUs != C.TIME_UNSET) {
output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null);
timeUs += sampleDurationUs;
}
state = STATE_FINDING_SYNC;
}
break;
default:
throw new IllegalStateException();
}
}
}
@Override
public void packetFinished() {
// Do nothing.
}
/**
* Continues a read from the provided {@code source} into a given {@code target}. It's assumed
* that the data should be written into {@code target} starting from an offset of zero.
*
* @param source The source from which to read.
* @param target The target into which data is to be read.
* @param targetLength The target length of the read.
* @return Whether the target length was reached.
*/
private boolean continueRead(ParsableByteArray source, byte[] target, int targetLength) {
int bytesToRead = min(source.bytesLeft(), targetLength - bytesRead);
source.readBytes(target, bytesRead, bytesToRead);
bytesRead += bytesToRead;
return bytesRead == targetLength;
}
/**
* Locates the next SYNC value in the buffer, advancing the position to the byte that immediately
* follows it. If SYNC was not located, the position is advanced to the limit.
*
* @param pesBuffer The buffer whose position should be advanced.
* @return Whether SYNC was found.
*/
private boolean skipToNextSync(ParsableByteArray pesBuffer) {
while (pesBuffer.bytesLeft() > 0) {
syncBytes <<= 8;
syncBytes |= pesBuffer.readUnsignedByte();
if (DtsUtil.isSyncWord(syncBytes)) {
byte[] headerData = headerScratchBytes.getData();
headerData[0] = (byte) ((syncBytes >> 24) & 0xFF);
headerData[1] = (byte) ((syncBytes >> 16) & 0xFF);
headerData[2] = (byte) ((syncBytes >> 8) & 0xFF);
headerData[3] = (byte) (syncBytes & 0xFF);
bytesRead = 4;
syncBytes = 0;
return true;
}
}
return false;
}
/** Parses the sample header. */
@RequiresNonNull("output")
private void parseHeader() {
byte[] frameData = headerScratchBytes.getData();
if (format == null) {
format = DtsUtil.parseDtsFormat(frameData, formatId, language, null);
output.format(format);
}
sampleSize = DtsUtil.getDtsFrameSize(frameData);
// In this class a sample is an access unit (frame in DTS), but the format's sample rate
// specifies the number of PCM audio samples per second.
sampleDurationUs =
(int)
(C.MICROS_PER_SECOND * DtsUtil.parseDtsAudioSampleCount(frameData) / format.sampleRate);
}
}
| apache-2.0 |
pinterest/secor | src/main/java/com/pinterest/secor/common/SecorConfig.java | 24422 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.pinterest.secor.common;
import com.google.common.base.Strings;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.ConfigurationUtils;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.TimeZone;
/**
* One-stop shop for Secor configuration options.
*
* @author Pawel Garbacki (pawel@pinterest.com)
*/
public class SecorConfig {
private static final Logger LOG = LoggerFactory.getLogger(SecorConfig.class);
private final PropertiesConfiguration mProperties;
private static final ThreadLocal<SecorConfig> mSecorConfig = new ThreadLocal<SecorConfig>() {
@Override
protected SecorConfig initialValue() {
// Load the default configuration file first
Properties systemProperties = System.getProperties();
String configProperty = systemProperties.getProperty("config");
PropertiesConfiguration properties;
try {
properties = new PropertiesConfiguration(configProperty);
} catch (ConfigurationException e) {
throw new RuntimeException("Error loading configuration from " + configProperty, e);
}
for (final Map.Entry<Object, Object> entry : systemProperties.entrySet()) {
properties.setProperty(entry.getKey().toString(), entry.getValue());
}
if (LOG.isDebugEnabled()) {
LOG.debug("Configuration: {}", ConfigurationUtils.toString(properties));
}
return new SecorConfig(properties);
}
};
public static SecorConfig load() throws ConfigurationException {
return mSecorConfig.get();
}
/**
* Exposed for testability
*
* @param properties properties config
*/
public SecorConfig(PropertiesConfiguration properties) {
mProperties = properties;
}
public String getKafkaSeedBrokerHost() {
return getString("kafka.seed.broker.host");
}
public int getKafkaSeedBrokerPort() {
return getInt("kafka.seed.broker.port");
}
public String getKafkaZookeeperPath() {
return getString("kafka.zookeeper.path");
}
public String getZookeeperQuorum() {
return StringUtils.join(getStringArray("zookeeper.quorum"), ',');
}
public int getConsumerTimeoutMs() {
return getInt("kafka.consumer.timeout.ms");
}
public String getConsumerAutoOffsetReset() {
return getString("kafka.consumer.auto.offset.reset");
}
public String[] getKafkaTopicList() {
return getStringArray("kafka.new.consumer.topic.list");
}
public String getNewConsumerAutoOffsetReset() {
return getString("kafka.new.consumer.auto.offset.reset");
}
public int getNewConsumerPollTimeoutSeconds() {
return getInt("kafka.new.consumer.poll.timeout.seconds");
}
public String getNewConsumerRequestTimeoutMs() {
return getString("kafka.new.consumer.request.timeout.ms");
}
public String getSslKeyPassword() {
return getString("kafka.new.consumer.ssl.key.password");
}
public String getSslKeystoreLocation() {
return getString("kafka.new.consumer.ssl.keystore.location");
}
public String getSslKeystorePassword() {
return getString("kafka.new.consumer.ssl.keystore.password");
}
public String getSslTruststoreLocation() {
return getString("kafka.new.consumer.ssl.truststore.location");
}
public String getSslTruststorePassword() {
return getString("kafka.new.consumer.ssl.truststore.password");
}
public String getIsolationLevel() {
return getString("kafka.new.consumer.isolation.level");
}
public String getMaxPollIntervalMs() {
return getString("kafka.new.consumer.max.poll.interval.ms");
}
public String getMaxPollRecords() {
return getString("kafka.new.consumer.max.poll.records");
}
public String getSaslClientCallbackHandlerClass() {
return getString("kafka.new.consumer.sasl.client.callback.handler.class");
}
public String getSaslJaasConfig() {
return getString("kafka.new.consumer.sasl.jaas.config");
}
public String getSaslKerberosServiceName() {
return getString("kafka.new.consumer.sasl.kerberos.service.name");
}
public String getSaslLoginCallbackHandlerClass() {
return getString("kafka.new.consumer.sasl.login.callback.handler.class");
}
public String getSaslLoginClass() {
return getString("kafka.new.consumer.sasl.login.class");
}
public String getSaslMechanism() {
return getString("kafka.new.consumer.sasl.mechanism");
}
public String getSecurityProtocol() {
return getString("kafka.new.consumer.security.protocol");
}
public String getSslEnabledProtocol() {
return getString("kafka.new.consumer.ssl.enabled.protocols");
}
public String getSslKeystoreType() {
return getString("kafka.new.consumer.ssl.keystore.type");
}
public String getSslProtocol() {
return getString("kafka.new.consumer.ssl.protocol");
}
public String getSslProvider() {
return getString("kafka.new.consumer.ssl.provider");
}
public String getSslTruststoreType() {
return getString("kafka.new.consumer.ssl.truststore.type");
}
public String getNewConsumerPartitionAssignmentStrategyClass() {
return getString("kafka.new.consumer.partition.assignment.strategy.class");
}
public String getPartitionAssignmentStrategy() {
return getString("kafka.partition.assignment.strategy");
}
public String getRebalanceMaxRetries() {
return getString("kafka.rebalance.max.retries");
}
public String getRebalanceBackoffMs() {
return getString("kafka.rebalance.backoff.ms");
}
public String getFetchMessageMaxBytes() {
return getString("kafka.fetch.message.max.bytes");
}
public String getSocketReceiveBufferBytes() {
return getString("kafka.socket.receive.buffer.bytes");
}
public String getFetchMinBytes() {
return getString("kafka.fetch.min.bytes");
}
public String getFetchMaxBytes() {
return getString("kafka.fetch.max.bytes");
}
public String getFetchWaitMaxMs() {
return getString("kafka.fetch.wait.max.ms");
}
public String getDualCommitEnabled() {
return getString("kafka.dual.commit.enabled");
}
public String getOffsetsStorage() {
return getString("kafka.offsets.storage");
}
public boolean useKafkaTimestamp() {
return getBoolean("kafka.useTimestamp", false);
}
public String getKafkaMessageTimestampClass() {
return getString("kafka.message.timestamp.className");
}
public String getKafkaMessageIteratorClass() {
return getString("kafka.message.iterator.className");
}
public String getKafkaClientClass() {
return getString("kafka.client.className");
}
public int getGeneration() {
return getInt("secor.generation");
}
public int getConsumerThreads() {
return getInt("secor.consumer.threads");
}
public int getMaxBadMessages() {
return getInt("secor.consumer.max_bad_messages", 1000);
}
public long getMaxFileSizeBytes() {
return getLong("secor.max.file.size.bytes");
}
public long getMaxFileAgeSeconds() {
return getLong("secor.max.file.age.seconds");
}
public int getMaxActiveFiles() {
return getInt("secor.max.file.count", -1);
}
public boolean getUploadOnShutdown() {
return getBoolean("secor.upload.on.shutdown");
}
public boolean getUploadLastSeenOffset() {
return getBoolean("secor.upload.last.seen.offset", false);
}
public boolean getDeterministicUpload() {
return getBoolean("secor.upload.deterministic");
}
public long getMaxFileTimestampRangeMillis() {
return getLong("secor.max.file.timestamp.range.millis");
}
public long getMaxInputPayloadSizeBytes() {
return getLong("secor.max.input.payload.size.bytes");
}
public boolean getFileAgeYoungest() {
return getBoolean("secor.file.age.youngest");
}
public long getOffsetsPerPartition() {
return getLong("secor.offsets.per.partition");
}
public int getMessagesPerSecond() {
return getInt("secor.messages.per.second");
}
public String getS3FileSystem() { return getString("secor.s3.filesystem"); }
public boolean getSeparateContainersForTopics() {
return getString("secor.swift.containers.for.each.topic").toLowerCase().equals("true");
}
public String getSwiftContainer() {
return getString("secor.swift.container");
}
public String getSwiftPath() {
return getString("secor.swift.path");
}
public String getS3Bucket() {
return getString("secor.s3.bucket");
}
public String getS3Path() {
return getString("secor.s3.path");
}
public String getS3AlternativePath() {
return getString("secor.s3.alternative.path");
}
public String getS3AlterPathDate() {
return getString("secor.s3.alter.path.date");
}
public String getS3Prefix() {
return getS3FileSystem() + "://" + getS3Bucket() + "/" + getS3Path();
}
public String getLocalPath() {
return getString("secor.local.path");
}
public String getKafkaTopicFilter() {
return getString("secor.kafka.topic_filter");
}
public String getKafkaTopicBlacklist() {
return getString("secor.kafka.topic_blacklist");
}
public String getKafkaTopicUploadAtMinuteMarkFilter() { return getString("secor.kafka.upload_at_minute_mark.topic_filter");}
public int getUploadMinuteMark(){ return getInt("secor.upload.minute_mark");}
public String getKafkaGroup() {
return getString("secor.kafka.group");
}
public int getZookeeperSessionTimeoutMs() {
return getInt("zookeeper.session.timeout.ms");
}
public int getZookeeperSyncTimeMs() {
return getInt("zookeeper.sync.time.ms");
}
public String getSchemaRegistryUrl(){ return getString("schema.registry.url"); }
public String getMessageParserClass() {
return getString("secor.message.parser.class");
}
public String getUploaderClass() {
return getString("secor.upload.class", "com.pinterest.secor.uploader.Uploader");
}
public String getUploadManagerClass() {
return getString("secor.upload.manager.class");
}
public String getMessageTransformerClass(){
return getString("secor.message.transformer.class");
}
public int getTopicPartitionForgetSeconds() {
return getInt("secor.topic_partition.forget.seconds");
}
public int getLocalLogDeleteAgeHours() {
return getInt("secor.local.log.delete.age.hours");
}
public String getFileExtension() {
return getString("secor.file.extension");
}
public int getOstrichPort() {
return getInt("ostrich.port");
}
public String getCloudService() {
return getString("cloud.service");
}
public String getAwsAccessKey() {
return getString("aws.access.key");
}
public String getAwsSecretKey() {
return getString("aws.secret.key");
}
public String getAwsSessionToken() {
return getString("aws.session.token", "");
}
public String getAwsEndpoint() {
return getString("aws.endpoint");
}
public String getAwsRole() {
return getString("aws.role");
}
public boolean getAwsClientPathStyleAccess() {
return getBoolean("aws.client.pathstyleaccess", false);
}
public boolean getAwsProxyEnabled(){
return getBoolean("aws.proxy.isEnabled");
}
public String getAwsProxyHttpHost() {
return getString("aws.proxy.http.host");
}
public int getAwsProxyHttpPort() {
return getInt("aws.proxy.http.port");
}
public String getAwsRegion() {
return getString("aws.region");
}
public String getAwsSseType() {
return getString("aws.sse.type");
}
public String getAwsSseKmsKey() {
return getString("aws.sse.kms.key");
}
public String getAwsSseCustomerKey() {
return getString("aws.sse.customer.key");
}
public String getSwiftTenant() {
return getString("swift.tenant");
}
public String getSwiftRegion() {
return getString("swift.region");
}
public String getSwiftUsername() {
return getString("swift.username");
}
public String getSwiftPassword() {
return getString("swift.password");
}
public String getSwiftAuthUrl() {
return getString("swift.auth.url");
}
public String getSwiftPublic() {
return getString("swift.public");
}
public String getSwiftPort() {
return getString("swift.port");
}
public String getSwiftGetAuth() {
return getString("swift.use.get.auth");
}
public String getSwiftApiKey() {
return getString("swift.api.key");
}
public String getQuboleApiToken() {
return getString("qubole.api.token");
}
public String getTsdbHostport() {
return getString("tsdb.hostport");
}
public String getStatsDHostPort() {
return getString("statsd.hostport");
}
public boolean getStatsDPrefixWithConsumerGroup(){
return getBoolean("statsd.prefixWithConsumerGroup");
}
public boolean getStatsdDogstatdsTagsEnabled() {
return getBoolean("statsd.dogstatsd.tags.enabled");
}
public String[] getStatsDDogstatsdConstantTags() {
return getStringArray("statsd.dogstatsd.constant.tags");
}
public String getMonitoringBlacklistTopics() {
return getString("monitoring.blacklist.topics");
}
public String getMonitoringPrefix() {
return getString("monitoring.prefix");
}
public long getMonitoringIntervalSeconds() {
return getLong("monitoring.interval.seconds");
}
public String getMessageTimestampName() {
return getString("message.timestamp.name");
}
public String getMessageTimestampNameSeparator() {
return getString("message.timestamp.name.separator");
}
public int getMessageTimestampId() {
return getInt("message.timestamp.id");
}
public String getMessageTimestampType() {
return getString("message.timestamp.type");
}
public String getMessageTimestampInputPattern() {
return getString("message.timestamp.input.pattern");
}
public boolean isMessageTimestampRequired() {
return mProperties.getBoolean("message.timestamp.required");
}
public long getMessageTimestampSkewMaxMs() { return getLong("message.timestamp.skew.max.ms"); }
public String getMessageSplitFieldName() {
return getString("message.split.field.name");
}
public int getFinalizerLookbackPeriods() {
return getInt("secor.finalizer.lookback.periods", 10);
}
public String getHivePrefix() {
return getString("secor.hive.prefix");
}
public String getHiveTableName(String topic) {
String key = "secor.hive.table.name." + topic;
return mProperties.getString(key, null);
}
public boolean getQuboleEnabled() {
return getBoolean("secor.enable.qubole");
}
public long getQuboleTimeoutMs() {
return getLong("secor.qubole.timeout.ms");
}
public String getCompressionCodec() {
return getString("secor.compression.codec");
}
public int getMaxMessageSizeBytes() {
return getInt("secor.max.message.size.bytes");
}
public String getFileReaderWriterFactory() {
return getString("secor.file.reader.writer.factory");
}
public String getFileReaderDelimiter(){
String readerDelimiter = getString("secor.file.reader.Delimiter");
if (readerDelimiter.length() > 1) {
throw new RuntimeException("secor.file.reader.Delimiter length can not be greater than 1 character");
}
return readerDelimiter;
}
public String getFileWriterDelimiter(){
String writerDelimiter = getString("secor.file.writer.Delimiter");
if (writerDelimiter.length() > 1) {
throw new RuntimeException("secor.file.writer.Delimiter length can not be greater than 1 character");
}
return writerDelimiter;
}
public String getZookeeperPath() {
return getString("secor.zookeeper.path");
}
public String getGsCredentialsPath() {
return getString("secor.gs.credentials.path");
}
public String getGsBucket() {
return getString("secor.gs.bucket");
}
public String getGsPath() {
return getString("secor.gs.path");
}
public double getGsRateLimit() {
return getDouble("secor.gs.tasks.ratelimit.pr.second", 10.0);
}
public int getGsThreadPoolSize() {
return getInt("secor.gs.threadpool.fixed.size", 256);
}
public int getGsConnectTimeoutInMs() {
return getInt("secor.gs.connect.timeout.ms", 3 * 60000);
}
public int getGsReadTimeoutInMs() {
return getInt("secor.gs.read.timeout.ms", 3 * 60000);
}
public boolean getGsDirectUpload() {
return getBoolean("secor.gs.upload.direct");
}
public int getFinalizerDelaySeconds() {
return getInt("partitioner.finalizer.delay.seconds");
}
public boolean getS3MD5HashPrefix() {
return getBoolean("secor.s3.prefix.md5hash");
}
public String getAzureEndpointsProtocol() { return getString("secor.azure.endpoints.protocol"); }
public String getAzureAccountName() { return getString("secor.azure.account.name"); }
public String getAzureAccountKey() { return getString("secor.azure.account.key"); }
public String getAzureContainer() { return getString("secor.azure.container.name"); }
public String getAzurePath() { return getString("secor.azure.path"); }
public Map<String, String> getProtobufMessageClassPerTopic() {
String prefix = "secor.protobuf.message.class";
Iterator<String> keys = mProperties.getKeys(prefix);
Map<String, String> protobufClasses = new HashMap<String, String>();
while (keys.hasNext()) {
String key = keys.next();
String className = mProperties.getString(key);
protobufClasses.put(key.substring(prefix.length() + 1), className);
}
return protobufClasses;
}
public Map<String, String> getMessageFormatPerTopic() {
String prefix = "secor.topic.message.format";
Iterator<String> keys = mProperties.getKeys(prefix);
Map<String, String> topicMessageFormats = new HashMap<String, String>();
while (keys.hasNext()) {
String key = keys.next();
String topic = mProperties.getString(key);
topicMessageFormats.put(key.substring(prefix.length() + 1), topic);
}
return topicMessageFormats;
}
public Map<String, String> getThriftMessageClassPerTopic() {
String prefix = "secor.thrift.message.class";
Iterator<String> keys = mProperties.getKeys(prefix);
Map<String, String> thriftClasses = new HashMap<String, String>();
while (keys.hasNext()) {
String key = keys.next();
String className = mProperties.getString(key);
thriftClasses.put(key.substring(prefix.length() + 1), className);
}
return thriftClasses;
}
public TimeZone getTimeZone() {
String timezone = getString("secor.parser.timezone");
return Strings.isNullOrEmpty(timezone) ? TimeZone.getTimeZone("UTC") : TimeZone.getTimeZone(timezone);
}
public boolean getBoolean(String name, boolean defaultValue) {
return mProperties.getBoolean(name, defaultValue);
}
public boolean getBoolean(String name) {
return mProperties.getBoolean(name);
}
public void checkProperty(String name) {
if (!mProperties.containsKey(name)) {
throw new RuntimeException("Failed to find required configuration option '" +
name + "'.");
}
}
public String getString(String name) {
checkProperty(name);
return mProperties.getString(name);
}
public String getString(String name, String defaultValue) {
return mProperties.getString(name, defaultValue);
}
public int getInt(String name) {
checkProperty(name);
return mProperties.getInt(name);
}
public int getInt(String name, int defaultValue) {
return mProperties.getInt(name, defaultValue);
}
public double getDouble(String name, double defaultValue) {
return mProperties.getDouble(name, defaultValue);
}
public long getLong(String name) {
return mProperties.getLong(name);
}
public String[] getStringArray(String name) {
return mProperties.getStringArray(name);
}
public String getThriftProtocolClass() {
return mProperties.getString("secor.thrift.protocol.class");
}
public String getMetricsCollectorClass() {
return getString("secor.monitoring.metrics.collector.class");
}
public boolean getMicroMeterCollectorJmxEnabled() {
return getBoolean("secor.monitoring.metrics.collector.micrometer.jmx.enabled", false);
}
public boolean getMicroMeterCollectorStatsdEnabled() {
return getBoolean("secor.monitoring.metrics.collector.micrometer.statsd.enabled", false);
}
public boolean getMicroMeterCollectorPrometheusEnabled() {
return getBoolean("secor.monitoring.metrics.collector.micrometer.prometheus.enabled", false);
}
public int getMicroMeterCacheSize() {
return getInt("secor.monitoring.metrics.collector.micrometer.cache.size", 500);
}
/**
* This method is used for fetching all the properties which start with the given prefix.
* It returns a Map of all those key-val.
*
* e.g.
* a.b.c=val1
* a.b.d=val2
* a.b.e=val3
*
* If prefix is a.b then,
* These will be fetched as a map {c = val1, d = val2, e = val3}
*
* @param prefix property prefix
* @return
*/
public Map<String, String> getPropertyMapForPrefix(String prefix) {
Iterator<String> keys = mProperties.getKeys(prefix);
Map<String, String> map = new HashMap<String, String>();
while (keys.hasNext()) {
String key = keys.next();
String value = mProperties.getString(key);
map.put(key.substring(prefix.length() + 1), value);
}
return map;
}
public Map<String, String> getORCMessageSchema() {
return getPropertyMapForPrefix("secor.orc.message.schema");
}
public Map<String, String> getAvroMessageSchema() {
return getPropertyMapForPrefix("secor.avro.message.schema");
}
public String getORCSchemaProviderClass(){
return getString("secor.orc.schema.provider");
}
}
| apache-2.0 |
youngwookim/presto | presto-main/src/main/java/io/prestosql/execution/ClusterSizeMonitor.java | 7187 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.execution;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.units.Duration;
import io.prestosql.execution.scheduler.NodeSchedulerConfig;
import io.prestosql.metadata.AllNodes;
import io.prestosql.metadata.InternalNodeManager;
import io.prestosql.spi.PrestoException;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.annotation.concurrent.GuardedBy;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.function.Consumer;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static io.airlift.concurrent.Threads.threadsNamed;
import static io.airlift.units.Duration.nanosSince;
import static io.prestosql.spi.StandardErrorCode.GENERIC_INSUFFICIENT_RESOURCES;
import static io.prestosql.spi.StandardErrorCode.SERVER_STARTING_UP;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
public class ClusterSizeMonitor
{
private final InternalNodeManager nodeManager;
private final boolean includeCoordinator;
private final int initializationMinCount;
private final Duration initializationMaxWait;
private final int executionMinCount;
private final Duration executionMaxWait;
private final ScheduledExecutorService executor;
private final long createNanos = System.nanoTime();
private final Consumer<AllNodes> listener = this::updateAllNodes;
@GuardedBy("this")
private int currentCount;
@GuardedBy("this")
private final List<SettableFuture<?>> futures = new ArrayList<>();
@GuardedBy("this")
private boolean minimumWorkerRequirementMet;
@Inject
public ClusterSizeMonitor(InternalNodeManager nodeManager, NodeSchedulerConfig nodeSchedulerConfig, QueryManagerConfig queryManagerConfig)
{
this(
nodeManager,
requireNonNull(nodeSchedulerConfig, "nodeSchedulerConfig is null").isIncludeCoordinator(),
requireNonNull(queryManagerConfig, "queryManagerConfig is null").getInitializationRequiredWorkers(),
queryManagerConfig.getInitializationTimeout(),
queryManagerConfig.getRequiredWorkers(),
queryManagerConfig.getRequiredWorkersMaxWait());
}
public ClusterSizeMonitor(
InternalNodeManager nodeManager,
boolean includeCoordinator,
int initializationMinCount,
Duration initializationMaxWait,
int executionMinCount,
Duration executionMaxWait)
{
this.nodeManager = requireNonNull(nodeManager, "nodeManager is null");
this.includeCoordinator = includeCoordinator;
checkArgument(initializationMinCount >= 0, "initializationMinCount is negative");
this.initializationMinCount = initializationMinCount;
this.initializationMaxWait = requireNonNull(initializationMaxWait, "initializationMaxWait is null");
checkArgument(executionMinCount >= 0, "executionMinCount is negative");
this.executionMinCount = executionMinCount;
this.executionMaxWait = requireNonNull(executionMaxWait, "executionMaxWait is null");
this.executor = newSingleThreadScheduledExecutor(threadsNamed("node-monitor-%s"));
}
@PostConstruct
public void start()
{
nodeManager.addNodeChangeListener(listener);
updateAllNodes(nodeManager.getAllNodes());
}
@PreDestroy
public void stop()
{
nodeManager.removeNodeChangeListener(listener);
}
public synchronized void verifyInitialMinimumWorkersRequirement()
{
if (minimumWorkerRequirementMet) {
return;
}
if (currentCount < initializationMinCount && nanosSince(createNanos).compareTo(initializationMaxWait) < 0) {
throw new PrestoException(SERVER_STARTING_UP, format("Cluster is still initializing, there are insufficient active worker nodes (%s) to run query", currentCount));
}
minimumWorkerRequirementMet = true;
}
/**
* Returns a listener that completes when the minimum number of workers for the cluster has been met.
* Note: caller should not add a listener using the direct executor, as this can delay the
* notifications for other listeners.
*/
public synchronized ListenableFuture<?> waitForMinimumWorkers()
{
if (currentCount >= executionMinCount) {
return immediateFuture(null);
}
SettableFuture<?> future = SettableFuture.create();
futures.add(future);
// if future does not finish in wait period, complete with an exception
ScheduledFuture<?> timeoutTask = executor.schedule(
() -> {
synchronized (this) {
future.setException(new PrestoException(
GENERIC_INSUFFICIENT_RESOURCES,
format("Insufficient active worker nodes. Waited %s for at least %s workers, but only %s workers are active", executionMaxWait, executionMinCount, currentCount)));
}
},
executionMaxWait.toMillis(),
MILLISECONDS);
// remove future if finished (e.g., canceled, timed out)
future.addListener(() -> {
timeoutTask.cancel(true);
removeFuture(future);
}, executor);
return future;
}
private synchronized void removeFuture(SettableFuture<?> future)
{
futures.remove(future);
}
private synchronized void updateAllNodes(AllNodes allNodes)
{
if (includeCoordinator) {
currentCount = allNodes.getActiveNodes().size();
}
else {
currentCount = Sets.difference(allNodes.getActiveNodes(), allNodes.getActiveCoordinators()).size();
}
if (currentCount >= executionMinCount) {
ImmutableList<SettableFuture<?>> listeners = ImmutableList.copyOf(futures);
futures.clear();
executor.submit(() -> listeners.forEach(listener -> listener.set(null)));
}
}
}
| apache-2.0 |
mythguided/hydra | hydra-data/src/test/java/com/addthis/hydra/data/tree/prop/DataCopyTest.java | 5101 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.addthis.hydra.data.tree.prop;
import java.util.List;
import com.addthis.bundle.core.Bundle;
import com.addthis.bundle.core.list.ListBundle;
import com.addthis.bundle.util.AutoField;
import com.addthis.bundle.value.ValueArray;
import com.addthis.bundle.value.ValueFactory;
import com.addthis.bundle.value.ValueMap;
import com.addthis.codec.config.Configs;
import com.addthis.hydra.data.tree.DataTreeNode;
import com.addthis.hydra.data.tree.DataTreeNodeUpdater;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
public class DataCopyTest {
private static DataTreeNodeUpdater generateUpdater(Bundle bundle) {
return new DataTreeNodeUpdater() {
@Override public Bundle getBundle() {
return bundle;
}
@Override public int getCountValue() {
return 0;
}
@Override public long getAssignmentValue() {
return 0;
}
};
}
@Test
public void copyValue() throws Exception {
AutoField foo = AutoField.newAutoField("foo");
DataCopy.Config config = Configs.decodeObject(DataCopy.Config.class, "key: {foo: foo}");
Bundle bundle = new ListBundle();
DataCopy dataCopy = config.newInstance();
foo.setValue(bundle, ValueFactory.create("bar"));
assertNull(dataCopy.getValue("foo"));
dataCopy.updateChildData(generateUpdater(bundle), null, config);
assertEquals(ValueFactory.create("bar"), dataCopy.getValue("foo"));
}
@Test
public void copyFields() throws Exception {
AutoField foo = AutoField.newAutoField("foo");
AutoField bar = AutoField.newAutoField("bar");
DataCopy.Config config = Configs.decodeObject(DataCopy.Config.class, "fields: {foo: bar}");
Bundle bundle = new ListBundle();
DataCopy dataCopy = config.newInstance();
foo.setValue(bundle, ValueFactory.create("hello"));
bar.setValue(bundle, ValueFactory.create("world"));
assertNull(dataCopy.getValue("foo"));
dataCopy.updateChildData(generateUpdater(bundle), null, config);
assertEquals(ValueFactory.create("world"), dataCopy.getValue("hello"));
}
@Test
public void copySet() throws Exception {
AutoField foo = AutoField.newAutoField("foo");
DataCopy.Config config = Configs.decodeObject(DataCopy.Config.class, "set: {foo: bar}");
Bundle bundle = new ListBundle();
DataCopy dataCopy = config.newInstance();
ValueArray valueArray = ValueFactory.createArray(3);
valueArray.add(ValueFactory.create("foo"));
valueArray.add(ValueFactory.create("foo"));
valueArray.add(ValueFactory.create("bar"));
valueArray.add(ValueFactory.create("baz"));
foo.setValue(bundle, valueArray);
assertNull(dataCopy.getValue("foo"));
dataCopy.updateChildData(generateUpdater(bundle), null, config);
List<DataTreeNode> nodes = dataCopy.getNodes(null, "");
assertEquals(3, nodes.size());
assertNotNull(nodes.get(0).getNode("bar"));
assertNotNull(nodes.get(1).getNode("bar"));
assertNotNull(nodes.get(2).getNode("bar"));
}
@Test
public void copyMap() throws Exception {
AutoField foo = AutoField.newAutoField("foo");
DataCopy.Config config = Configs.decodeObject(DataCopy.Config.class, "map: foo");
Bundle bundle = new ListBundle();
DataCopy dataCopy = config.newInstance();
ValueMap valueMap = ValueFactory.createMap();
valueMap.put("bar", ValueFactory.create("baz"));
valueMap.put("hello", ValueFactory.create("world"));
foo.setValue(bundle, valueMap);
assertNull(dataCopy.getValue("foo"));
assertEquals(0, dataCopy.getNodes(null, "").size());
dataCopy.updateChildData(generateUpdater(bundle), null, config);
List<DataTreeNode> nodes = dataCopy.getNodes(null, "");
assertEquals(2, nodes.size());
DataTreeNode bar = nodes.get(0);
DataTreeNode hello = nodes.get(1);
if (bar.getName().compareTo(hello.getName()) > 0) {
DataTreeNode temp = bar;
bar = hello;
hello = temp;
}
assertEquals("bar", bar.getName());
assertEquals("hello", hello.getName());
assertNotNull(bar.getNode("baz"));
assertNotNull(hello.getNode("world"));
}
}
| apache-2.0 |
zhouluoyang/openfire | src/plugins/restAPI/src/java/org/jivesoftware/openfire/plugin/rest/entity/SessionEntities.java | 607 | package org.jivesoftware.openfire.plugin.rest.entity;
import java.util.List;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name = "sessions")
public class SessionEntities {
List<SessionEntity> sessions;
public SessionEntities() {
}
public SessionEntities(List<SessionEntity> sessions) {
this.sessions = sessions;
}
@XmlElement(name = "session")
public List<SessionEntity> getSessions() {
return sessions;
}
public void setSessions(List<SessionEntity> sessions) {
this.sessions = sessions;
}
}
| apache-2.0 |
tushargosavi/apex-core | engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanTest.java | 43836 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.stram.plan.logical;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
import javax.validation.Valid;
import javax.validation.Validation;
import javax.validation.ValidationException;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
import javax.validation.constraints.AssertTrue;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.esotericsoftware.kryo.DefaultSerializer;
import com.esotericsoftware.kryo.serializers.JavaSerializer;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.datatorrent.api.AffinityRule;
import com.datatorrent.api.AffinityRule.Type;
import com.datatorrent.api.AffinityRulesSet;
import com.datatorrent.api.Attribute;
import com.datatorrent.api.Context.DAGContext;
import com.datatorrent.api.Context.OperatorContext;
import com.datatorrent.api.Context.PortContext;
import com.datatorrent.api.DAG.Locality;
import com.datatorrent.api.DefaultInputPort;
import com.datatorrent.api.DefaultOutputPort;
import com.datatorrent.api.InputOperator;
import com.datatorrent.api.Module;
import com.datatorrent.api.Operator;
import com.datatorrent.api.Partitioner;
import com.datatorrent.api.Sink;
import com.datatorrent.api.StreamCodec;
import com.datatorrent.api.StringCodec;
import com.datatorrent.api.annotation.InputPortFieldAnnotation;
import com.datatorrent.api.annotation.OperatorAnnotation;
import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
import com.datatorrent.common.partitioner.StatelessPartitioner;
import com.datatorrent.common.util.BaseOperator;
import com.datatorrent.common.util.DefaultDelayOperator;
import com.datatorrent.netlet.util.Slice;
import com.datatorrent.stram.engine.GenericTestOperator;
import com.datatorrent.stram.engine.TestGeneratorInputOperator;
import com.datatorrent.stram.engine.TestNonOptionalOutportInputOperator;
import com.datatorrent.stram.engine.TestOutputOperator;
import com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta;
import com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta;
import com.datatorrent.stram.plan.logical.visitor.CycleDetector;
import com.datatorrent.stram.support.StramTestSupport.MemoryStorageAgent;
import com.datatorrent.stram.support.StramTestSupport.RegexMatcher;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
public class LogicalPlanTest
{
private LogicalPlan dag;
@Before
public void setUp()
{
dag = new LogicalPlan();
}
public static class ValidationOperator extends BaseOperator
{
public final transient DefaultOutputPort<Object> goodOutputPort = new DefaultOutputPort<>();
public final transient DefaultOutputPort<Object> badOutputPort = new DefaultOutputPort<>();
}
public static class CounterOperator extends BaseOperator
{
public final transient InputPort<Object> countInputPort = new DefaultInputPort<Object>()
{
@Override
public final void process(Object payload)
{
}
};
}
@Test
public void testLogicalPlanSerialization() throws Exception
{
dag.setAttribute(OperatorContext.STORAGE_AGENT, new MemoryStorageAgent());
ValidationOperator validationNode = dag.addOperator("validationNode", ValidationOperator.class);
CounterOperator countGoodNode = dag.addOperator("countGoodNode", CounterOperator.class);
CounterOperator countBadNode = dag.addOperator("countBadNode", CounterOperator.class);
//ConsoleOutputOperator echoBadNode = dag.addOperator("echoBadNode", ConsoleOutputOperator.class);
// good tuples to counter operator
dag.addStream("goodTuplesStream", validationNode.goodOutputPort, countGoodNode.countInputPort);
// bad tuples to separate stream and echo operator
// (stream with 2 outputs)
dag.addStream("badTuplesStream", validationNode.badOutputPort, countBadNode.countInputPort);
Assert.assertEquals("number root operators", 1, dag.getRootOperators().size());
Assert.assertEquals("root operator id", "validationNode", dag.getRootOperators().get(0).getName());
dag.getContextAttributes(countGoodNode).put(OperatorContext.SPIN_MILLIS, 10);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
LogicalPlan.write(dag, bos);
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
LogicalPlan dagClone = LogicalPlan.read(bis);
Assert.assertNotNull(dagClone);
Assert.assertEquals("number operators in clone", dag.getAllOperators().size(), dagClone.getAllOperators().size());
Assert.assertEquals("number root operators in clone", 1, dagClone.getRootOperators().size());
Assert.assertTrue("root operator in operators", dagClone.getAllOperators().contains(dagClone.getRootOperators().get(0)));
Operator countGoodNodeClone = dagClone.getOperatorMeta("countGoodNode").getOperator();
Assert.assertEquals("", new Integer(10), dagClone.getContextAttributes(countGoodNodeClone).get(OperatorContext.SPIN_MILLIS));
}
@Test
public void testDeleteOperator()
{
TestGeneratorInputOperator input = dag.addOperator("input1", TestGeneratorInputOperator.class);
GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class);
dag.addStream("s0", input.outport, o1.inport1);
StreamMeta s1 = dag.addStream("s1", o1.outport1, o2.inport1);
dag.validate();
Assert.assertEquals("", 3, dag.getAllOperators().size());
dag.removeOperator(o2);
s1.remove();
dag.validate();
Assert.assertEquals("", 2, dag.getAllOperators().size());
}
public static class ValidationTestOperator extends BaseOperator implements InputOperator
{
@NotNull
@Pattern(regexp = ".*malhar.*", message = "Value has to contain 'malhar'!")
private String stringField1;
@Min(2)
private int intField1;
@AssertTrue(message = "stringField1 should end with intField1")
private boolean isValidConfiguration()
{
return stringField1.endsWith(String.valueOf(intField1));
}
private String getterProperty2 = "";
@NotNull
public String getProperty2()
{
return getterProperty2;
}
public void setProperty2(String s)
{
// annotations need to be on the getter
getterProperty2 = s;
}
private String[] stringArrayField;
public String[] getStringArrayField()
{
return stringArrayField;
}
public void setStringArrayField(String[] stringArrayField)
{
this.stringArrayField = stringArrayField;
}
public class Nested
{
@NotNull
private String property = "";
public String getProperty()
{
return property;
}
public void setProperty(String property)
{
this.property = property;
}
}
@Valid
private final Nested nestedBean = new Nested();
private String stringProperty2;
public String getStringProperty2()
{
return stringProperty2;
}
public void setStringProperty2(String stringProperty2)
{
this.stringProperty2 = stringProperty2;
}
private Map<String, String> mapProperty = Maps.newHashMap();
public Map<String, String> getMapProperty()
{
return mapProperty;
}
public void setMapProperty(Map<String, String> mapProperty)
{
this.mapProperty = mapProperty;
}
@Override
public void emitTuples()
{
// Emit no tuples
}
}
@Test
public void testOperatorValidation()
{
ValidationTestOperator bean = new ValidationTestOperator();
bean.stringField1 = "malhar1";
bean.intField1 = 1;
// ensure validation standalone produces expected results
ValidatorFactory factory =
Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
Set<ConstraintViolation<ValidationTestOperator>> constraintViolations =
validator.validate(bean);
Assert.assertEquals("" + constraintViolations,1, constraintViolations.size());
ConstraintViolation<ValidationTestOperator> cv = constraintViolations.iterator().next();
Assert.assertEquals("", bean.intField1, cv.getInvalidValue());
Assert.assertEquals("", "intField1", cv.getPropertyPath().toString());
// ensure DAG validation produces matching results
bean = dag.addOperator("testOperator", bean);
try {
dag.validate();
Assert.fail("should throw ConstraintViolationException");
} catch (ValidationException e) {
//Assert.assertEquals("violation details", constraintViolations, e.getConstraintViolations());
//String expRegex = ".*ValidationTestOperator\\{name=null}, propertyPath='intField1', message='must be greater than or equal to 2',.*value=1}]";
//Assert.assertThat("exception message", e.getMessage(), RegexMatcher.matches(expRegex));
}
try {
bean.intField1 = 3;
dag.validate();
Assert.fail("should throw ConstraintViolationException");
} catch (ValidationException e) {
//ConstraintViolation<?> cv2 = e.getConstraintViolations().iterator().next();
//Assert.assertEquals("" + e.getConstraintViolations(), 1, constraintViolations.size());
//Assert.assertEquals("", false, cv2.getInvalidValue());
//Assert.assertEquals("", "validConfiguration", cv2.getPropertyPath().toString());
}
bean.stringField1 = "malhar3";
// annotated getter
try {
bean.getterProperty2 = null;
dag.validate();
Assert.fail("should throw ConstraintViolationException");
} catch (ValidationException e) {
//ConstraintViolation<?> cv2 = e.getConstraintViolations().iterator().next();
//Assert.assertEquals("" + e.getConstraintViolations(), 1, constraintViolations.size());
//Assert.assertEquals("", null, cv2.getInvalidValue());
//Assert.assertEquals("", "property2", cv2.getPropertyPath().toString());
}
bean.getterProperty2 = "";
// nested property
try {
bean.nestedBean.property = null;
dag.validate();
Assert.fail("should throw ConstraintViolationException");
} catch (ValidationException e) {
//ConstraintViolation<?> cv2 = e.getConstraintViolations().iterator().next();
//Assert.assertEquals("" + e.getConstraintViolations(), 1, constraintViolations.size());
//Assert.assertEquals("", null, cv2.getInvalidValue());
//Assert.assertEquals("", "nestedBean.property", cv2.getPropertyPath().toString());
}
bean.nestedBean.property = "";
// all valid
dag.validate();
}
@OperatorAnnotation(partitionable = false)
public static class TestOperatorAnnotationOperator extends BaseOperator
{
@InputPortFieldAnnotation(optional = true)
public final transient DefaultInputPort<Object> input1 = new DefaultInputPort<Object>()
{
@Override
public void process(Object tuple)
{
}
};
}
class NoInputPortOperator extends BaseOperator
{
}
@Test
public void testValidationForNonInputRootOperator()
{
NoInputPortOperator x = dag.addOperator("x", new NoInputPortOperator());
try {
dag.validate();
Assert.fail("should fail because root operator is not input operator");
} catch (ValidationException e) {
// expected
}
}
@OperatorAnnotation(partitionable = false)
public static class TestOperatorAnnotationOperator2 extends BaseOperator implements Partitioner<TestOperatorAnnotationOperator2>
{
@Override
public Collection<Partition<TestOperatorAnnotationOperator2>> definePartitions(Collection<Partition<TestOperatorAnnotationOperator2>> partitions, PartitioningContext context)
{
return null;
}
@Override
public void partitioned(Map<Integer, Partition<TestOperatorAnnotationOperator2>> partitions)
{
}
}
@Test
public void testOperatorAnnotation()
{
TestGeneratorInputOperator input = dag.addOperator("input1", TestGeneratorInputOperator.class);
TestOperatorAnnotationOperator operator = dag.addOperator("operator1", TestOperatorAnnotationOperator.class);
dag.addStream("Connection", input.outport, operator.input1);
dag.setOperatorAttribute(operator, OperatorContext.PARTITIONER, new StatelessPartitioner<TestOperatorAnnotationOperator>(2));
try {
dag.validate();
Assert.fail("should raise operator is not partitionable for operator1");
} catch (ValidationException e) {
//Assert.assertEquals("", "Operator " + dag.getMeta(operator).getName() + " provides partitioning capabilities but the annotation on the operator class declares it non partitionable!", e.getMessage());
}
dag.setOperatorAttribute(operator, OperatorContext.PARTITIONER, null);
dag.setInputPortAttribute(operator.input1, PortContext.PARTITION_PARALLEL, true);
try {
dag.validate();
Assert.fail("should raise operator is not partitionable for operator1");
} catch (ValidationException e) {
//Assert.assertEquals("", "Operator " + dag.getMeta(operator).getName() + " is not partitionable but PARTITION_PARALLEL attribute is set", e.getMessage());
}
dag.setInputPortAttribute(operator.input1, PortContext.PARTITION_PARALLEL, false);
dag.validate();
dag.removeOperator(operator);
TestOperatorAnnotationOperator2 operator2 = dag.addOperator("operator2", TestOperatorAnnotationOperator2.class);
try {
dag.validate();
Assert.fail("should raise operator is not partitionable for operator2");
} catch (ValidationException e) {
//Assert.assertEquals("Operator " + dag.getMeta(operator2).getName() + " provides partitioning capabilities but the annotation on the operator class declares it non partitionable!", e.getMessage());
}
}
@Test(expected = IllegalArgumentException.class)
public void testNullOperatorName()
{
dag.addOperator(null, BaseOperator.class);
}
@Test(expected = IllegalArgumentException.class)
public void testEmptyOperatorName()
{
dag.addOperator("", BaseOperator.class);
}
@Test(expected = IllegalArgumentException.class)
public void testNullStreamId()
{
GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
dag.addStream(null, o1.outport1, o1.inport1, o1.inport2 );
}
@Test(expected = IllegalArgumentException.class)
public void testEmptyStreamId()
{
GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
dag.addStream("", o1.outport1, o1.inport1 );
}
@Test(expected = IllegalArgumentException.class)
public void testEmptyModuleName()
{
Module testModule = mock(Module.class);
dag.addModule("", testModule);
}
@Test(expected = IllegalArgumentException.class)
public void testNullModuleName()
{
Module testModule = mock(Module.class);
dag.addModule(null, testModule);
}
@Test
public void testPortConnectionValidation()
{
TestNonOptionalOutportInputOperator input = dag.addOperator("input1", TestNonOptionalOutportInputOperator.class);
try {
dag.validate();
Assert.fail("should raise port not connected for input1.outputPort1");
} catch (ValidationException e) {
//Assert.assertEquals("", "Output port connection required: input1.outport1", e.getMessage());
}
GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
dag.addStream("stream1", input.outport1, o1.inport1);
dag.validate();
// required input
dag.addOperator("counter", CounterOperator.class);
try {
dag.validate();
} catch (ValidationException e) {
//Assert.assertEquals("", "Input port connection required: counter.countInputPort", e.getMessage());
}
}
@Test
public void testAtMostOnceProcessingModeValidation()
{
TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
GenericTestOperator amoOper = dag.addOperator("amoOper", GenericTestOperator.class);
dag.setOperatorAttribute(amoOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_MOST_ONCE);
dag.addStream("input1.outport", input1.outport, amoOper.inport1);
dag.addStream("input2.outport", input2.outport, amoOper.inport2);
GenericTestOperator outputOper = dag.addOperator("outputOper", GenericTestOperator.class);
dag.setOperatorAttribute(outputOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_LEAST_ONCE);
dag.addStream("aloOper.outport1", amoOper.outport1, outputOper.inport1);
try {
dag.validate();
Assert.fail("Exception expected for " + outputOper);
} catch (ValidationException ve) {
//Assert.assertEquals("", ve.getMessage(), "Processing mode outputOper/AT_LEAST_ONCE not valid for source amoOper/AT_MOST_ONCE");
}
dag.setOperatorAttribute(outputOper, OperatorContext.PROCESSING_MODE, null);
dag.validate();
OperatorMeta outputOperOm = dag.getMeta(outputOper);
Assert.assertEquals("" + outputOperOm.getAttributes(), Operator.ProcessingMode.AT_MOST_ONCE, outputOperOm.getValue(OperatorContext.PROCESSING_MODE));
}
@Test
public void testExactlyOnceProcessingModeValidation()
{
TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
GenericTestOperator amoOper = dag.addOperator("amoOper", GenericTestOperator.class);
dag.setOperatorAttribute(amoOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.EXACTLY_ONCE);
dag.addStream("input1.outport", input1.outport, amoOper.inport1);
dag.addStream("input2.outport", input2.outport, amoOper.inport2);
GenericTestOperator outputOper = dag.addOperator("outputOper", GenericTestOperator.class);
dag.addStream("aloOper.outport1", amoOper.outport1, outputOper.inport1);
try {
dag.validate();
Assert.fail("Exception expected for " + outputOper);
} catch (ValidationException ve) {
//Assert.assertEquals("", ve.getMessage(), "Processing mode for outputOper should be AT_MOST_ONCE for source amoOper/EXACTLY_ONCE");
}
dag.setOperatorAttribute(outputOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_LEAST_ONCE);
try {
dag.validate();
Assert.fail("Exception expected for " + outputOper);
} catch (ValidationException ve) {
//Assert.assertEquals("", ve.getMessage(), "Processing mode outputOper/AT_LEAST_ONCE not valid for source amoOper/EXACTLY_ONCE");
}
// AT_MOST_ONCE is valid
dag.setOperatorAttribute(outputOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_MOST_ONCE);
dag.validate();
}
@Test
public void testLocalityValidation()
{
TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
StreamMeta s1 = dag.addStream("input1.outport", input1.outport, o1.inport1).setLocality(Locality.THREAD_LOCAL);
dag.validate();
TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
dag.addStream("input2.outport", input2.outport, o1.inport2);
try {
dag.validate();
Assert.fail("Exception expected for " + o1);
} catch (ValidationException ve) {
//Assert.assertThat("", ve.getMessage(), RegexMatcher.matches("Locality THREAD_LOCAL invalid for operator .* with multiple input streams .*"));
}
s1.setLocality(null);
dag.validate();
}
private class TestAnnotationsOperator extends BaseOperator implements InputOperator
{
//final public transient DefaultOutputPort<Object> outport1 = new DefaultOutputPort<Object>();
@OutputPortFieldAnnotation(optional = false)
public final transient DefaultOutputPort<Object> outport2 = new DefaultOutputPort<>();
@Override
public void emitTuples()
{
// Emit Nothing
}
}
private class TestAnnotationsOperator2 extends BaseOperator implements InputOperator
{
public final transient DefaultOutputPort<Object> outport1 = new DefaultOutputPort<>();
@Override
public void emitTuples()
{
// Emit Nothing
}
}
private class TestAnnotationsOperator3 extends BaseOperator implements InputOperator
{
@OutputPortFieldAnnotation(optional = true)
public final transient DefaultOutputPort<Object> outport1 = new DefaultOutputPort<>();
@OutputPortFieldAnnotation(optional = true)
public final transient DefaultOutputPort<Object> outport2 = new DefaultOutputPort<>();
@Override
public void emitTuples()
{
// Emit Nothing
}
}
@Test
public void testOutputPortAnnotation()
{
TestAnnotationsOperator ta1 = dag.addOperator("testAnnotationsOperator", new TestAnnotationsOperator());
try {
dag.validate();
Assert.fail("should raise: port connection required");
} catch (ValidationException e) {
//Assert.assertEquals("", "Output port connection required: testAnnotationsOperator.outport2", e.getMessage());
}
TestOutputOperator o2 = dag.addOperator("sink", new TestOutputOperator());
dag.addStream("s1", ta1.outport2, o2.inport);
dag.validate();
TestAnnotationsOperator2 ta2 = dag.addOperator("multiOutputPorts1", new TestAnnotationsOperator2());
dag.validate();
TestOutputOperator o3 = dag.addOperator("o3", new TestOutputOperator());
dag.addStream("s2", ta2.outport1, o3.inport);
dag.addOperator("multiOutputPorts3", new TestAnnotationsOperator3());
dag.validate();
}
/**
* Operator that can be used with default Java serialization instead of Kryo
*/
@DefaultSerializer(JavaSerializer.class)
public static class JdkSerializableOperator extends BaseOperator implements Serializable
{
private static final long serialVersionUID = -4024202339520027097L;
public abstract class SerializableInputPort<T> implements InputPort<T>, Sink<T>, java.io.Serializable
{
private static final long serialVersionUID = 1L;
@Override
public Sink<T> getSink()
{
return this;
}
@Override
public void setConnected(boolean connected)
{
}
@Override
public void setup(PortContext context)
{
}
@Override
public void teardown()
{
}
@Override
public StreamCodec<T> getStreamCodec()
{
return null;
}
}
@InputPortFieldAnnotation(optional = true)
public final InputPort<Object> inport1 = new SerializableInputPort<Object>()
{
private static final long serialVersionUID = 1L;
@Override
public final void put(Object payload)
{
}
@Override
public int getCount(boolean reset)
{
return 0;
}
};
}
@Test
public void testJdkSerializableOperator() throws Exception
{
dag.addOperator("o1", new JdkSerializableOperator());
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
LogicalPlan.write(dag, outStream);
outStream.close();
LogicalPlan clonedDag = LogicalPlan.read(new ByteArrayInputStream(outStream.toByteArray()));
JdkSerializableOperator o1Clone = (JdkSerializableOperator)clonedDag.getOperatorMeta("o1").getOperator();
Assert.assertNotNull("port object null", o1Clone.inport1);
}
@Test
public void testAttributeValuesSerializableCheck() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException
{
Attribute<Object> attr = new Attribute<>(new TestAttributeValue(), new Object2String());
Field nameField = Attribute.class.getDeclaredField("name");
nameField.setAccessible(true);
nameField.set(attr, "Test_Attribute");
nameField.setAccessible(false);
assertNotNull(attr);
// Dag attribute not serializable test
dag.setAttribute(attr, new TestAttributeValue());
try {
dag.validate();
Assert.fail("Setting not serializable attribute should throw exception");
} catch (ValidationException e) {
//assertEquals("Validation Exception should match ", "Attribute value(s) for Test_Attribute in com.datatorrent.api.DAG are not serializable", e.getMessage());
}
// Operator attribute not serializable test
dag = new LogicalPlan();
TestGeneratorInputOperator operator = dag.addOperator("TestOperator", TestGeneratorInputOperator.class);
dag.setOperatorAttribute(operator, attr, new TestAttributeValue());
try {
dag.validate();
Assert.fail("Setting not serializable attribute should throw exception");
} catch (ValidationException e) {
//assertEquals("Validation Exception should match ", "Attribute value(s) for Test_Attribute in TestOperator are not serializable", e.getMessage());
}
// Output Port attribute not serializable test
dag = new LogicalPlan();
operator = dag.addOperator("TestOperator", TestGeneratorInputOperator.class);
dag.setOutputPortAttribute(operator.outport, attr, new TestAttributeValue());
try {
dag.validate();
Assert.fail("Setting not serializable attribute should throw exception");
} catch (ValidationException e) {
//assertEquals("Validation Exception should match ", "Attribute value(s) for Test_Attribute in TestOperator.outport are not serializable", e.getMessage());
}
// Input Port attribute not serializable test
dag = new LogicalPlan();
GenericTestOperator operator1 = dag.addOperator("TestOperator", GenericTestOperator.class);
dag.setInputPortAttribute(operator1.inport1, attr, new TestAttributeValue());
try {
dag.validate();
Assert.fail("Setting non serializable attribute should throw exception");
} catch (ValidationException e) {
//assertEquals("Validation Exception should match ", "Attribute value(s) for Test_Attribute in TestOperator.inport1 are not serializable", e.getMessage());
}
}
private static class Object2String implements StringCodec<Object>
{
@Override
public Object fromString(String string)
{
// Stub method for testing - do nothing
return null;
}
@Override
public String toString(Object pojo)
{
// Stub method for testing - do nothing
return null;
}
}
private static class TestAttributeValue
{
}
private static class TestStreamCodec implements StreamCodec<Object>
{
@Override
public Object fromByteArray(Slice fragment)
{
return fragment.stringValue();
}
@Override
public Slice toByteArray(Object o)
{
byte[] b = o.toString().getBytes();
return new Slice(b, 0, b.length);
}
@Override
public int getPartition(Object o)
{
return o.hashCode() / 2;
}
}
public static class TestPortCodecOperator extends BaseOperator
{
public final transient DefaultInputPort<Object> inport1 = new DefaultInputPort<Object>()
{
@Override
public void process(Object tuple)
{
}
@Override
public StreamCodec<Object> getStreamCodec()
{
return new TestStreamCodec();
}
};
@OutputPortFieldAnnotation(optional = true)
public final transient DefaultOutputPort<Object> outport = new DefaultOutputPort<>();
}
/*
@Test
public void testStreamCodec() throws Exception {
TestGeneratorInputOperator input = dag.addOperator("input", TestGeneratorInputOperator.class);
GenericTestOperator gto1 = dag.addOperator("gto1", GenericTestOperator.class);
StreamMeta stream1 = dag.addStream("s1", input.outport, gto1.inport1);
StreamCodec<?> codec1 = new TestStreamCodec();
dag.setInputPortAttribute(gto1.inport1, PortContext.STREAM_CODEC, codec1);
dag.validate();
//Assert.assertEquals("Stream codec not set", stream1.getStreamCodec(), codec1);
GenericTestOperator gto2 = dag.addOperator("gto2", GenericTestOperator.class);
GenericTestOperator gto3 = dag.addOperator("gto3", GenericTestOperator.class);
StreamMeta stream2 = dag.addStream("s2", gto1.outport1, gto2.inport1, gto3.inport1);
dag.setInputPortAttribute(gto2.inport1, PortContext.STREAM_CODEC, codec1);
try {
dag.validate();
} catch (ValidationException e) {
String msg = e.getMessage();
if (!msg.startsWith("Stream codec not set on input port") || !msg.contains("gto3")
|| !msg.contains(codec1.toString()) || !msg.endsWith("was specified on another port")) {
Assert.fail(String.format("LogicalPlan validation error msg: %s", msg));
}
}
dag.setInputPortAttribute(gto3.inport1, PortContext.STREAM_CODEC, codec1);
dag.validate();
//Assert.assertEquals("Stream codec not set", stream2.getStreamCodec(), codec1);
StreamCodec<?> codec2 = new TestStreamCodec();
dag.setInputPortAttribute(gto3.inport1, PortContext.STREAM_CODEC, codec2);
try {
dag.validate();
} catch (ValidationException e) {
String msg = e.getMessage();
if (!msg.startsWith("Conflicting stream codec set on input port") || !msg.contains("gto3")
|| !msg.contains(codec2.toString()) || !msg.endsWith("was specified on another port")) {
Assert.fail(String.format("LogicalPlan validation error msg: %s", msg));
}
}
dag.setInputPortAttribute(gto3.inport1, PortContext.STREAM_CODEC, codec1);
TestPortCodecOperator pco = dag.addOperator("pco", TestPortCodecOperator.class);
StreamMeta stream3 = dag.addStream("s3", gto2.outport1, pco.inport1);
dag.validate();
//Assert.assertEquals("Stream codec class not set", stream3.getCodecClass(), TestStreamCodec.class);
dag.setInputPortAttribute(pco.inport1, PortContext.STREAM_CODEC, codec2);
dag.validate();
//Assert.assertEquals("Stream codec not set", stream3.getStreamCodec(), codec2);
}
*/
/*
@Test
public void testCheckpointableWithinAppWindowAnnotation()
{
TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
GenericTestOperator x = dag.addOperator("x", new GenericTestOperator());
dag.addStream("Stream1", input1.outport, x.inport1);
dag.setOperatorAttribute(x, OperatorContext.CHECKPOINT_WINDOW_COUNT, 15);
dag.setOperatorAttribute(x, OperatorContext.APPLICATION_WINDOW_COUNT, 30);
dag.validate();
TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
CheckpointableWithinAppWindowOperator y = dag.addOperator("y", new CheckpointableWithinAppWindowOperator());
dag.addStream("Stream2", input2.outport, y.inport1);
dag.setOperatorAttribute(y, OperatorContext.CHECKPOINT_WINDOW_COUNT, 15);
dag.setOperatorAttribute(y, OperatorContext.APPLICATION_WINDOW_COUNT, 30);
dag.validate();
TestGeneratorInputOperator input3 = dag.addOperator("input3", TestGeneratorInputOperator.class);
NotCheckpointableWithinAppWindowOperator z = dag.addOperator("z", new NotCheckpointableWithinAppWindowOperator());
dag.addStream("Stream3", input3.outport, z.inport1);
dag.setOperatorAttribute(z, OperatorContext.CHECKPOINT_WINDOW_COUNT, 15);
dag.setOperatorAttribute(z, OperatorContext.APPLICATION_WINDOW_COUNT, 30);
try {
dag.validate();
Assert.fail("should fail because chekpoint window count is not a factor of application window count");
} catch (ValidationException e) {
// expected
}
dag.setOperatorAttribute(z, OperatorContext.CHECKPOINT_WINDOW_COUNT, 30);
dag.validate();
dag.setOperatorAttribute(z, OperatorContext.CHECKPOINT_WINDOW_COUNT, 45);
try {
dag.validate();
Assert.fail("should fail because chekpoint window count is not a factor of application window count");
} catch (ValidationException e) {
// expected
}
}
*/
@OperatorAnnotation(checkpointableWithinAppWindow = true)
class CheckpointableWithinAppWindowOperator extends GenericTestOperator
{
}
@OperatorAnnotation(checkpointableWithinAppWindow = false)
class NotCheckpointableWithinAppWindowOperator extends GenericTestOperator
{
}
@Test
public void testInputPortHiding()
{
TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
Operator2 operator2 = dag.addOperator("operator2", new Operator2());
dag.addStream("Stream1", input1.outport, operator2.input);
dag.validate();
}
@Test
public void testInvalidInputPortConnection()
{
TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
Operator1 operator1 = dag.addOperator("operator3", new Operator3());
dag.addStream("Stream1", input1.outport, operator1.input);
try {
dag.validate();
} catch (ValidationException ex) {
//Assert.assertTrue("validation message", ex.getMessage().startsWith("Invalid port connected"));
return;
}
Assert.fail();
}
@Test
public void testAffinityRulesDagValidation()
{
TestGeneratorInputOperator o1 = dag.addOperator("O1", new TestGeneratorInputOperator());
GenericTestOperator o2 = dag.addOperator("O2", new GenericTestOperator());
GenericTestOperator o3 = dag.addOperator("O3", new GenericTestOperator());
dag.addStream("stream1", o1.outport, o2.inport1).setLocality(Locality.THREAD_LOCAL);
StreamMeta stream2 = dag.addStream("stream2", o2.outport1, o3.inport1).setLocality(Locality.CONTAINER_LOCAL);
AffinityRulesSet ruleSet = new AffinityRulesSet();
// Valid case:
List<AffinityRule> rules = new ArrayList<>();
ruleSet.setAffinityRules(rules);
AffinityRule rule1 = new AffinityRule(Type.AFFINITY, Locality.CONTAINER_LOCAL, false, "O1", "O3");
rules.add(rule1);
dag.setAttribute(DAGContext.AFFINITY_RULES_SET, ruleSet);
dag.validate();
// Locality conflicts with affinity rules case:
AffinityRule rule2 = new AffinityRule(Type.ANTI_AFFINITY, Locality.NODE_LOCAL, false, "O2", "O3");
rules.add(rule2);
try {
dag.validate();
Assert.fail("DAG validation should fail due to conflicting rules");
} catch (ValidationException e) {
Assert.assertEquals("Anti Affinity rule for operators O2 & O3 conflicts with affinity rules or Stream locality", e.getMessage());
}
// Change Stream2 locality to Node to check if validation passes
stream2.setLocality(Locality.RACK_LOCAL);
dag.validate();
// Add anti-affinity rule conflicting with rule1
AffinityRule rule3 = new AffinityRule(Type.ANTI_AFFINITY, Locality.NODE_LOCAL, false, "O1", "O3");
rules.add(rule3);
try {
dag.validate();
Assert.fail("DAG validation should fail due to conflicting rules");
} catch (ValidationException e) {
Assert.assertEquals("Anti Affinity rule for operators O1 & O3 conflicts with affinity rules or Stream locality", e.getMessage());
}
// Change rule1 to Rack local to see if dag validation passes
rules.clear();
rule1.setLocality(Locality.RACK_LOCAL);
rules.add(rule1);
rules.add(rule2);
rules.add(rule3);
dag.validate();
// Add conflicting rules and set relaxLocality for one rule
AffinityRule rule4 = new AffinityRule(Type.ANTI_AFFINITY, Locality.NODE_LOCAL, true, "O1", "O2");
rules.add(rule4);
dag.validate();
// Set conflicting host locality and check if it fails validation
rules.clear();
AffinityRule rule = new AffinityRule(Type.ANTI_AFFINITY, Locality.NODE_LOCAL, false, "O2", "O3");
rules.add(rule);
dag.getMeta(o2).getAttributes().put(OperatorContext.LOCALITY_HOST, "host1");
dag.getMeta(o3).getAttributes().put(OperatorContext.LOCALITY_HOST, "host1");
try {
dag.validate();
Assert.fail("DAG validation should fail due to conflicting host locality");
} catch (ValidationException e) {
Assert.assertEquals("Host Locality for operators: O2(host: host1) & O3(host: host1) conflict with anti-affinity rules", e.getMessage());
}
// Set conflicting affinity and different host locality for node-local
// operators
rules.clear();
rule = new AffinityRule(Type.AFFINITY, Locality.NODE_LOCAL, false, "O2", "O3");
rules.add(rule);
dag.getMeta(o2).getAttributes().put(OperatorContext.LOCALITY_HOST, "host1");
dag.getMeta(o3).getAttributes().put(OperatorContext.LOCALITY_HOST, "host2");
try {
dag.validate();
Assert.fail("DAG validation should fail due to conflicting host locality");
} catch (ValidationException e) {
Assert.assertEquals("Host Locality for operators: O2(host: host1) & O3(host: host2) conflicts with affinity rules", e.getMessage());
}
// Check affinity Thread local validation for non-connected operators
dag.getAttributes().get(DAGContext.AFFINITY_RULES_SET).getAffinityRules().clear();
rule = new AffinityRule(Type.AFFINITY, Locality.THREAD_LOCAL, false, "O1", "O3");
rules.add(rule);
try {
dag.validate();
Assert.fail("DAG validation should fail due to conflicting host locality");
} catch (ValidationException e) {
Assert.assertEquals("Affinity rule specified THREAD_LOCAL affinity for operators O1 & O3 which are not connected by stream", e.getMessage());
}
// Check indirect conflict
dag = new LogicalPlan();
o1 = dag.addOperator("O1", new TestGeneratorInputOperator());
o2 = dag.addOperator("O2", new GenericTestOperator());
o3 = dag.addOperator("O3", new GenericTestOperator());
GenericTestOperator o4 = dag.addOperator("O4", new GenericTestOperator());
GenericTestOperator o5 = dag.addOperator("O5", new GenericTestOperator());
dag.addStream("stream1", o1.outport, o2.inport1, o3.inport1).setLocality(Locality.NODE_LOCAL);
dag.addStream("stream2", o3.outport1, o4.inport1);
dag.addStream("stream3", o2.outport1, o5.inport1);
rules.clear();
// O3 and O5 cannot have NODE_LOCAL anti-affinity now, since they already have NODE_LOCAL affinity
rules.add(new AffinityRule(Type.AFFINITY, Locality.CONTAINER_LOCAL, false, "O1", "O5"));
rules.add(new AffinityRule(Type.ANTI_AFFINITY, Locality.NODE_LOCAL, false, "O3", "O5"));
ruleSet = new AffinityRulesSet();
ruleSet.setAffinityRules(rules);
dag.setAttribute(DAGContext.AFFINITY_RULES_SET, ruleSet);
try {
dag.validate();
Assert.fail("dag validation should fail due to conflicting affinity rules");
} catch (ValidationException e) {
Assert.assertEquals("Anti Affinity rule for operators O3 & O5 conflicts with affinity rules or Stream locality", e.getMessage());
}
}
class Operator1 extends BaseOperator
{
public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
{
@Override
public void process(Object tuple)
{
}
};
}
class Operator2 extends Operator1
{
public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
{
@Override
public void process(Object tuple)
{
}
};
}
class Operator3 extends Operator1
{
@InputPortFieldAnnotation(optional = true)
public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
{
@Override
public void process(Object tuple)
{
}
};
}
@Test
public void testOutputPortHiding()
{
Operator5 operator5 = dag.addOperator("input", new Operator5());
Operator2 operator2 = dag.addOperator("operator2", new Operator2());
dag.addStream("Stream1", operator5.output, operator2.input);
dag.validate();
}
@Test(expected = ValidationException.class)
public void testInvalidOutputPortConnection()
{
Operator4 operator4 = dag.addOperator("input", new Operator5());
Operator3 operator3 = dag.addOperator("operator3", new Operator3());
dag.addStream("Stream1", operator4.output, operator3.input);
dag.validate();
}
class Operator4 extends BaseOperator implements InputOperator
{
public final transient DefaultOutputPort<Object> output = new DefaultOutputPort<>();
@Override
public void emitTuples()
{
}
}
class Operator5 extends Operator4
{
public final transient DefaultOutputPort<Object> output = new DefaultOutputPort<>();
}
/*
These were tests for operator semantics that verified if an operator class implements InputOperator then the same class should not declare input ports.
This would be done later when we are able to verify user code at compile-time.
validation()
{
if (n.getOperator() instanceof InputOperator) {
try {
for (Class<?> clazz : n.getOperator().getClass().getInterfaces()) {
if (clazz.getName().equals(InputOperator.class.getName())) {
for (Field field : n.getOperator().getClass().getDeclaredFields()) {
field.setAccessible(true);
Object declaredObject = field.get(n.getOperator());
if (declaredObject instanceof InputPort) {
throw new ValidationException("Operator class implements InputOperator and also declares input ports: " + n.name);
}
}
break;
}
}
}
catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
@Test
public void testInvalidInputOperatorDeclaration()
{
TestGeneratorInputOperator.InvalidInputOperator inputOperator = dag.addOperator("input", new TestGeneratorInputOperator.InvalidInputOperator());
GenericTestOperator operator2 = dag.addOperator("operator2", GenericTestOperator.class);
dag.addStream("stream1", inputOperator.outport, operator2.inport1);
try {
dag.validate();
fail("validation should fail");
}
catch (ValidationException e) {
// expected
}
}
@Test
public void testValidInputOperatorDeclaration()
{
TestGeneratorInputOperator.ValidGenericOperator operator1 = dag.addOperator("input", new TestGeneratorInputOperator.ValidGenericOperator());
GenericTestOperator operator2 = dag.addOperator("operator2", GenericTestOperator.class);
dag.addStream("stream1", operator1.outport, operator2.inport1);
dag.validate();
}
*/
}
| apache-2.0 |
tsdl2013/Android_ScrollTable | lib/src/main/java/com/socoolby/scroll_table/lib/ScrollTableView.java | 4569 | package com.socoolby.scroll_table.lib;
import android.content.Context;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TableLayout;
import android.widget.TableRow;
import android.widget.TextView;
import com.socoolby.scroll_table.unit.Func;
import com.socoolby.scroll_table.widget.CustomScrollView;
/**
* Created by socoolby on 3/24/14.
*/
public class ScrollTableView extends LinearLayout{
private final static String TAG="ScrollTableView";
CustomScrollView cv_scrolltable_header_vertical,cv_crolltable_header_horizontion,cv_crolltable_content;
LinearLayout ll_crolltable_header_horicontion,ll_crolltable_header_veritcal;
TableLayout content_crolltable_table;
private RelativeLayout root_layout;
public ScrollTableView(Context context) {
super(context);
initView();
}
public ScrollTableView(Context context, AttributeSet attrs) {
super(context, attrs);
initView();
}
// public ScrollTableView(Context context, AttributeSet attrs, int defStyle) {
// super(context, attrs, defStyle);
// }
private void initView()
{
LayoutInflater layoutInflater = LayoutInflater.from(getContext());
root_layout=(RelativeLayout)layoutInflater.inflate(R.layout.layout_scrolltable, null);
this.addView(root_layout);
cv_crolltable_content=(CustomScrollView)root_layout.findViewById(R.id.cv_crolltable_content);
cv_crolltable_header_horizontion=(CustomScrollView)root_layout.findViewById(R.id.cv_crolltable_header_horizontion);
cv_scrolltable_header_vertical = (CustomScrollView)root_layout.findViewById(R.id.cv_scrolltable_header_vertical);
ll_crolltable_header_horicontion=(LinearLayout)root_layout.findViewById(R.id.ll_crolltable_header_horicontion);
ll_crolltable_header_veritcal=(LinearLayout)root_layout.findViewById(R.id.ll_crolltable_header_veritcal);
content_crolltable_table=(TableLayout)root_layout.findViewById(R.id.content_crolltable_table);
cv_crolltable_content.setAbortAnimationListner(new CustomScrollView.AbortAnimationListener() {
@Override
public void abortAnimation() {
cv_crolltable_header_horizontion.setAbortAnimateStop();
cv_scrolltable_header_vertical.setAbortAnimateStop();
}
});
cv_crolltable_content.setScrollListener(new CustomScrollView.ScrollListener() {
@Override
public void scrollTo(int x, int y) {
cv_crolltable_header_horizontion.scrollTo(x,0);
cv_scrolltable_header_vertical.scrollTo(0,y);
}
});
cv_crolltable_content.setFlingListener(new CustomScrollView.FlingListener() {
@Override
public void flingListener(int velocityX, int velocityY) {
cv_crolltable_header_horizontion.fling(velocityX,velocityY);
cv_scrolltable_header_vertical.fling(velocityX,velocityY);
}
});
}
public void setData(String title_horizontal[],String []title_vertical,String [][]content)
{
int count_horizontal=title_horizontal.length;
int count_vertical=title_vertical.length;
Func.Log(TAG,"setData count_horizontal:"+count_horizontal+" count_vertical:"+count_vertical);
for (int i = 0; i <count_horizontal ; i++) {
LinearLayout view=getView(title_horizontal[i]);
if(ll_crolltable_header_horicontion==null)
Func.Log(TAG,"view is null");
ll_crolltable_header_horicontion.addView(view);
}
for (int i = 0; i < count_vertical; i++) {
LinearLayout view=getView(title_vertical[i]);
ll_crolltable_header_veritcal.addView(view);
}
for(int i=0;i<count_vertical;i++)
{
TableRow row = new TableRow(getContext());
for (int j = 0; j < count_horizontal; j++) {
LinearLayout view=getView(content[i][j]);
row.addView(view);
}
content_crolltable_table.addView(row);
}
}
private LinearLayout getView(String contnet)
{
LinearLayout lay=(LinearLayout)LayoutInflater.from(getContext()).inflate(R.layout.scrolltable_item, null);
TextView view = (TextView) lay.findViewById(R.id.tv_scrolltable_item_tv);
view.setText(contnet);
return lay;
}
}
| apache-2.0 |
renhui/android_career | framework/NotesListActivity/src/net/micode/notes/gtask/exception/NetworkFailureException.java | 1109 | /*
* Copyright (c) 2010-2011, The MiCode Open Source Community (www.micode.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.micode.notes.gtask.exception;
public class NetworkFailureException extends Exception {
private static final long serialVersionUID = 2107610287180234136L;
public NetworkFailureException() {
super();
}
public NetworkFailureException(String paramString) {
super(paramString);
}
public NetworkFailureException(String paramString, Throwable paramThrowable) {
super(paramString, paramThrowable);
}
}
| apache-2.0 |
qiujuer/Blink | Android/Blink/library/src/main/java/net/qiujuer/blink/box/FileSendPacket.java | 1847 | /*
* Copyright (C) 2014 Qiujuer <qiujuer@live.cn>
* WebSite http://www.qiujuer.net
* Created 04/16/2015
* Changed 04/19/2015
* Version 1.0.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.qiujuer.blink.box;
import net.qiujuer.blink.listener.SendListener;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
/**
* File send class
*/
public class FileSendPacket extends BaseSendPacket<File> {
public FileSendPacket(File file) {
this(file, null);
}
public FileSendPacket(File entity, SendListener listener) {
super(entity, PacketType.FILE, listener);
mLength = mEntity.length();
}
@Override
public boolean startPacket() {
try {
mStream = new FileInputStream(mEntity);
return true;
} catch (FileNotFoundException e) {
return false;
}
}
@Override
public void endPacket() {
closeStream();
}
@Override
public short readInfo(byte[] buffer, int index) {
try {
byte[] info = mEntity.getName().getBytes("UTF-8");
short len = (short) info.length;
System.arraycopy(info, 0, buffer, index, len);
return len;
} catch (Exception e) {
return 0;
}
}
}
| artistic-2.0 |
vaginessa/XInternalSD | src/com/pyler/xinternalsd/Preferences.java | 5100 | package com.pyler.xinternalsd;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.preference.EditTextPreference;
import android.preference.MultiSelectListPreference;
import android.preference.Preference;
import android.preference.PreferenceCategory;
import android.preference.PreferenceFragment;
import android.preference.PreferenceManager;
public class Preferences extends Activity {
public static Context context;
public static SharedPreferences prefs;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = getApplicationContext();
getFragmentManager().beginTransaction()
.replace(android.R.id.content, new Settings()).commit();
}
@SuppressWarnings("deprecation")
public static class Settings extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getPreferenceManager()
.setSharedPreferencesMode(MODE_WORLD_READABLE);
addPreferencesFromResource(R.xml.preferences);
prefs = PreferenceManager.getDefaultSharedPreferences(context);
PreferenceCategory appSettings = (PreferenceCategory) findPreference("app_settings");
Preference externalSdCardFullAccess = findPreference("external_sdcard_full_access");
EditTextPreference internalSdPath = (EditTextPreference) findPreference("internal_sdcard_path");
Preference includeSystemApps = findPreference("include_system_apps");
includeSystemApps
.setOnPreferenceChangeListener(new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(
Preference preference, Object newValue) {
reloadAppsList();
return true;
}
});
reloadAppsList();
String customInternalSdPath = prefs.getString(
"internal_sdcard_path", "");
if (!customInternalSdPath.isEmpty()) {
internalSdPath.setSummary(customInternalSdPath);
}
String extStorage = System.getenv("SECONDARY_STORAGE");
if (extStorage != null && !extStorage.isEmpty()
&& customInternalSdPath.isEmpty()) {
String externalSd = extStorage.split(":")[0];
internalSdPath.setSummary(externalSd);
internalSdPath.setText(externalSd);
prefs.edit().putString("internal_sdcard_path", externalSd)
.apply();
}
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
appSettings.removePreference(externalSdCardFullAccess);
}
}
public void reloadAppsList() {
new LoadApps().execute();
}
public boolean isAllowedApp(ApplicationInfo appInfo) {
boolean isAllowedApp = true;
boolean includeSystemApps = prefs.getBoolean("include_system_apps",
false);
if ((appInfo.flags & ApplicationInfo.FLAG_SYSTEM) != 0
&& !includeSystemApps) {
isAllowedApp = false;
}
return isAllowedApp;
}
public class LoadApps extends AsyncTask<Void, Void, Void> {
MultiSelectListPreference enabledApps = (MultiSelectListPreference) findPreference("enable_for_apps");
MultiSelectListPreference disabledApps = (MultiSelectListPreference) findPreference("disable_for_apps");
List<CharSequence> appNames = new ArrayList<CharSequence>();
List<CharSequence> packageNames = new ArrayList<CharSequence>();
PackageManager pm = context.getPackageManager();
List<ApplicationInfo> packages = pm
.getInstalledApplications(PackageManager.GET_META_DATA);
@Override
protected void onPreExecute() {
enabledApps.setEnabled(false);
disabledApps.setEnabled(false);
}
@Override
protected Void doInBackground(Void... arg0) {
List<String[]> sortedApps = new ArrayList<String[]>();
for (ApplicationInfo app : packages) {
if (isAllowedApp(app)) {
sortedApps.add(new String[] {
app.packageName,
app.loadLabel(context.getPackageManager())
.toString() });
}
}
Collections.sort(sortedApps, new Comparator<String[]>() {
@Override
public int compare(String[] entry1, String[] entry2) {
return entry1[1].compareToIgnoreCase(entry2[1]);
}
});
for (int i = 0; i < sortedApps.size(); i++) {
appNames.add(sortedApps.get(i)[1]);
packageNames.add(sortedApps.get(i)[0]);
}
return null;
}
@Override
protected void onPostExecute(Void result) {
CharSequence[] appNamesList = appNames
.toArray(new CharSequence[appNames.size()]);
CharSequence[] packageNamesList = packageNames
.toArray(new CharSequence[packageNames.size()]);
enabledApps.setEntries(appNamesList);
enabledApps.setEntryValues(packageNamesList);
enabledApps.setEnabled(true);
disabledApps.setEntries(appNamesList);
disabledApps.setEntryValues(packageNamesList);
disabledApps.setEnabled(true);
}
}
}
}
| bsd-2-clause |
SenshiSentou/SourceFight | slick_dev/trunk/Slick/src/org/newdawn/slick/opengl/PNGDecoder.java | 24708 | /*
* Copyright (c) 2008-2010, Matthias Mann
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Matthias Mann nor the names of its contributors may
* be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.newdawn.slick.opengl;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.zip.CRC32;
import java.util.zip.DataFormatException;
import java.util.zip.Inflater;
/**
* A PNGDecoder. The slick PNG decoder is based on this class :)
*
* @author Matthias Mann
*/
public class PNGDecoder {
public enum Format {
ALPHA(1, true), LUMINANCE(1, false), LUMINANCE_ALPHA(2, true), RGB(3,
false), RGBA(4, true), BGRA(4, true), ABGR(4, true);
final int numComponents;
final boolean hasAlpha;
private Format(int numComponents, boolean hasAlpha) {
this.numComponents = numComponents;
this.hasAlpha = hasAlpha;
}
public int getNumComponents() {
return numComponents;
}
public boolean isHasAlpha() {
return hasAlpha;
}
}
private static final byte[] SIGNATURE = { (byte) 137, 80, 78, 71, 13, 10,
26, 10 };
private static final int IHDR = 0x49484452;
private static final int PLTE = 0x504C5445;
private static final int tRNS = 0x74524E53;
private static final int IDAT = 0x49444154;
private static final int IEND = 0x49454E44;
private static final byte COLOR_GREYSCALE = 0;
private static final byte COLOR_TRUECOLOR = 2;
private static final byte COLOR_INDEXED = 3;
private static final byte COLOR_GREYALPHA = 4;
private static final byte COLOR_TRUEALPHA = 6;
private final InputStream input;
private final CRC32 crc;
private final byte[] buffer;
private int chunkLength;
private int chunkType;
private int chunkRemaining;
private int width;
private int height;
private int bitdepth;
private int colorType;
private int bytesPerPixel;
private byte[] palette;
private byte[] paletteA;
private byte[] transPixel;
public PNGDecoder(InputStream input) throws IOException {
this.input = input;
this.crc = new CRC32();
this.buffer = new byte[4096];
readFully(buffer, 0, SIGNATURE.length);
if (!checkSignature(buffer)) {
throw new IOException("Not a valid PNG file");
}
openChunk(IHDR);
readIHDR();
closeChunk();
searchIDAT: for (;;) {
openChunk();
switch (chunkType) {
case IDAT:
break searchIDAT;
case PLTE:
readPLTE();
break;
case tRNS:
readtRNS();
break;
}
closeChunk();
}
if (colorType == COLOR_INDEXED && palette == null) {
throw new IOException("Missing PLTE chunk");
}
}
public int getHeight() {
return height;
}
public int getWidth() {
return width;
}
/**
* Checks if the image has a real alpha channel. This method does not check
* for the presence of a tRNS chunk.
*
* @return true if the image has an alpha channel
* @see #hasAlpha()
*/
public boolean hasAlphaChannel() {
return colorType == COLOR_TRUEALPHA || colorType == COLOR_GREYALPHA;
}
/**
* Checks if the image has transparency information either from an alpha
* channel or from a tRNS chunk.
*
* @return true if the image has transparency
* @see #hasAlphaChannel()
* @see #overwriteTRNS(byte, byte, byte)
*/
public boolean hasAlpha() {
return hasAlphaChannel() || paletteA != null || transPixel != null;
}
public boolean isRGB() {
return colorType == COLOR_TRUEALPHA || colorType == COLOR_TRUECOLOR
|| colorType == COLOR_INDEXED;
}
/**
* Overwrites the tRNS chunk entry to make a selected color transparent.
* <p>
* This can only be invoked when the image has no alpha channel.
* </p>
* <p>
* Calling this method causes {@link #hasAlpha()} to return true.
* </p>
*
* @param r
* the red component of the color to make transparent
* @param g
* the green component of the color to make transparent
* @param b
* the blue component of the color to make transparent
* @throws UnsupportedOperationException
* if the tRNS chunk data can't be set
* @see #hasAlphaChannel()
*/
public void overwriteTRNS(byte r, byte g, byte b) {
if (hasAlphaChannel()) {
throw new UnsupportedOperationException(
"image has an alpha channel");
}
byte[] pal = this.palette;
if (pal == null) {
transPixel = new byte[] { 0, r, 0, g, 0, b };
} else {
paletteA = new byte[pal.length / 3];
for (int i = 0, j = 0; i < pal.length; i += 3, j++) {
if (pal[i] != r || pal[i + 1] != g || pal[i + 2] != b) {
paletteA[j] = (byte) 0xFF;
}
}
}
}
/**
* Computes the implemented format conversion for the desired format.
*
* @param fmt
* the desired format
* @return format which best matches the desired format
* @throws UnsupportedOperationException
* if this PNG file can't be decoded
*/
public Format decideTextureFormat(Format fmt) {
switch (colorType) {
case COLOR_TRUECOLOR:
switch (fmt) {
case ABGR:
case RGBA:
case BGRA:
case RGB:
return fmt;
default:
return Format.RGB;
}
case COLOR_TRUEALPHA:
switch (fmt) {
case ABGR:
case RGBA:
case BGRA:
case RGB:
return fmt;
default:
return Format.RGBA;
}
case COLOR_GREYSCALE:
switch (fmt) {
case LUMINANCE:
case ALPHA:
return fmt;
default:
return Format.LUMINANCE;
}
case COLOR_GREYALPHA:
return Format.LUMINANCE_ALPHA;
case COLOR_INDEXED:
switch (fmt) {
case ABGR:
case RGBA:
case BGRA:
return fmt;
default:
return Format.RGBA;
}
default:
throw new UnsupportedOperationException("Not yet implemented");
}
}
/**
* Decodes the image into the specified buffer. The first line is placed at
* the current position. After decode the buffer position is at the end of
* the last line.
*
* @param buffer
* the buffer
* @param stride
* the stride in bytes from start of a line to start of the next
* line, can be negative.
* @param fmt
* the target format into which the image should be decoded.
* @throws IOException
* if a read or data error occurred
* @throws IllegalArgumentException
* if the start position of a line falls outside the buffer
* @throws UnsupportedOperationException
* if the image can't be decoded into the desired format
*/
public void decode(ByteBuffer buffer, int stride, Format fmt)
throws IOException {
final int offset = buffer.position();
final int lineSize = ((width * bitdepth + 7) / 8) * bytesPerPixel;
byte[] curLine = new byte[lineSize + 1];
byte[] prevLine = new byte[lineSize + 1];
byte[] palLine = (bitdepth < 8) ? new byte[width + 1] : null;
final Inflater inflater = new Inflater();
try {
for (int y = 0; y < height; y++) {
readChunkUnzip(inflater, curLine, 0, curLine.length);
unfilter(curLine, prevLine);
buffer.position(offset + y * stride);
switch (colorType) {
case COLOR_TRUECOLOR:
switch (fmt) {
case ABGR:
copyRGBtoABGR(buffer, curLine);
break;
case RGBA:
copyRGBtoRGBA(buffer, curLine);
break;
case BGRA:
copyRGBtoBGRA(buffer, curLine);
break;
case RGB:
copy(buffer, curLine);
break;
default:
throw new UnsupportedOperationException(
"Unsupported format for this image");
}
break;
case COLOR_TRUEALPHA:
switch (fmt) {
case ABGR:
copyRGBAtoABGR(buffer, curLine);
break;
case RGBA:
copy(buffer, curLine);
break;
case BGRA:
copyRGBAtoBGRA(buffer, curLine);
break;
case RGB:
copyRGBAtoRGB(buffer, curLine);
break;
default:
throw new UnsupportedOperationException(
"Unsupported format for this image");
}
break;
case COLOR_GREYSCALE:
switch (fmt) {
case LUMINANCE:
case ALPHA:
copy(buffer, curLine);
break;
default:
throw new UnsupportedOperationException(
"Unsupported format for this image");
}
break;
case COLOR_GREYALPHA:
switch (fmt) {
case LUMINANCE_ALPHA:
copy(buffer, curLine);
break;
default:
throw new UnsupportedOperationException(
"Unsupported format for this image");
}
break;
case COLOR_INDEXED:
switch (bitdepth) {
case 8:
palLine = curLine;
break;
case 4:
expand4(curLine, palLine);
break;
case 2:
expand2(curLine, palLine);
break;
case 1:
expand1(curLine, palLine);
break;
default:
throw new UnsupportedOperationException(
"Unsupported bitdepth for this image");
}
switch (fmt) {
case ABGR:
copyPALtoABGR(buffer, palLine);
break;
case RGBA:
copyPALtoRGBA(buffer, palLine);
break;
case BGRA:
copyPALtoBGRA(buffer, palLine);
break;
default:
throw new UnsupportedOperationException(
"Unsupported format for this image");
}
break;
default:
throw new UnsupportedOperationException(
"Not yet implemented");
}
byte[] tmp = curLine;
curLine = prevLine;
prevLine = tmp;
}
} finally {
inflater.end();
}
}
/**
* Decodes the image into the specified buffer. The last line is placed at
* the current position. After decode the buffer position is at the end of
* the first line.
*
* @param buffer
* the buffer
* @param stride
* the stride in bytes from start of a line to start of the next
* line, must be positive.
* @param fmt
* the target format into which the image should be decoded.
* @throws IOException
* if a read or data error occurred
* @throws IllegalArgumentException
* if the start position of a line falls outside the buffer
* @throws UnsupportedOperationException
* if the image can't be decoded into the desired format
*/
public void decodeFlipped(ByteBuffer buffer, int stride, Format fmt)
throws IOException {
if (stride <= 0) {
throw new IllegalArgumentException("stride");
}
int pos = buffer.position();
int posDelta = (height - 1) * stride;
buffer.position(pos + posDelta);
decode(buffer, -stride, fmt);
buffer.position(buffer.position() + posDelta);
}
private void copy(ByteBuffer buffer, byte[] curLine) {
buffer.put(curLine, 1, curLine.length - 1);
}
private void copyRGBtoABGR(ByteBuffer buffer, byte[] curLine) {
if (transPixel != null) {
byte tr = transPixel[1];
byte tg = transPixel[3];
byte tb = transPixel[5];
for (int i = 1, n = curLine.length; i < n; i += 3) {
byte r = curLine[i];
byte g = curLine[i + 1];
byte b = curLine[i + 2];
byte a = (byte) 0xFF;
if (r == tr && g == tg && b == tb) {
a = 0;
}
buffer.put(a).put(b).put(g).put(r);
}
} else {
for (int i = 1, n = curLine.length; i < n; i += 3) {
buffer.put((byte) 0xFF).put(curLine[i + 2]).put(curLine[i + 1])
.put(curLine[i]);
}
}
}
private void copyRGBtoRGBA(ByteBuffer buffer, byte[] curLine) {
if (transPixel != null) {
byte tr = transPixel[1];
byte tg = transPixel[3];
byte tb = transPixel[5];
for (int i = 1, n = curLine.length; i < n; i += 3) {
byte r = curLine[i];
byte g = curLine[i + 1];
byte b = curLine[i + 2];
byte a = (byte) 0xFF;
if (r == tr && g == tg && b == tb) {
a = 0;
}
buffer.put(r).put(g).put(b).put(a);
}
} else {
for (int i = 1, n = curLine.length; i < n; i += 3) {
buffer.put(curLine[i]).put(curLine[i + 1]).put(curLine[i + 2])
.put((byte) 0xFF);
}
}
}
private void copyRGBtoBGRA(ByteBuffer buffer, byte[] curLine) {
if (transPixel != null) {
byte tr = transPixel[1];
byte tg = transPixel[3];
byte tb = transPixel[5];
for (int i = 1, n = curLine.length; i < n; i += 3) {
byte r = curLine[i];
byte g = curLine[i + 1];
byte b = curLine[i + 2];
byte a = (byte) 0xFF;
if (r == tr && g == tg && b == tb) {
a = 0;
}
buffer.put(b).put(g).put(r).put(a);
}
} else {
for (int i = 1, n = curLine.length; i < n; i += 3) {
buffer.put(curLine[i + 2]).put(curLine[i + 1]).put(curLine[i])
.put((byte) 0xFF);
}
}
}
private void copyRGBAtoABGR(ByteBuffer buffer, byte[] curLine) {
for (int i = 1, n = curLine.length; i < n; i += 4) {
buffer.put(curLine[i + 3]).put(curLine[i + 2]).put(curLine[i + 1])
.put(curLine[i]);
}
}
private void copyRGBAtoBGRA(ByteBuffer buffer, byte[] curLine) {
for (int i = 1, n = curLine.length; i < n; i += 4) {
buffer.put(curLine[i + 2]).put(curLine[i + 1]).put(curLine[i])
.put(curLine[i + 3]);
}
}
private void copyRGBAtoRGB(ByteBuffer buffer, byte[] curLine) {
for (int i = 1, n = curLine.length; i < n; i += 4) {
buffer.put(curLine[i]).put(curLine[i + 1]).put(curLine[i + 2]);
}
}
private void copyPALtoABGR(ByteBuffer buffer, byte[] curLine) {
if (paletteA != null) {
for (int i = 1, n = curLine.length; i < n; i += 1) {
int idx = curLine[i] & 255;
byte r = palette[idx * 3 + 0];
byte g = palette[idx * 3 + 1];
byte b = palette[idx * 3 + 2];
byte a = paletteA[idx];
buffer.put(a).put(b).put(g).put(r);
}
} else {
for (int i = 1, n = curLine.length; i < n; i += 1) {
int idx = curLine[i] & 255;
byte r = palette[idx * 3 + 0];
byte g = palette[idx * 3 + 1];
byte b = palette[idx * 3 + 2];
byte a = (byte) 0xFF;
buffer.put(a).put(b).put(g).put(r);
}
}
}
private void copyPALtoRGBA(ByteBuffer buffer, byte[] curLine) {
if (paletteA != null) {
for (int i = 1, n = curLine.length; i < n; i += 1) {
int idx = curLine[i] & 255;
byte r = palette[idx * 3 + 0];
byte g = palette[idx * 3 + 1];
byte b = palette[idx * 3 + 2];
byte a = paletteA[idx];
buffer.put(r).put(g).put(b).put(a);
}
} else {
for (int i = 1, n = curLine.length; i < n; i += 1) {
int idx = curLine[i] & 255;
byte r = palette[idx * 3 + 0];
byte g = palette[idx * 3 + 1];
byte b = palette[idx * 3 + 2];
byte a = (byte) 0xFF;
buffer.put(r).put(g).put(b).put(a);
}
}
}
private void copyPALtoBGRA(ByteBuffer buffer, byte[] curLine) {
if (paletteA != null) {
for (int i = 1, n = curLine.length; i < n; i += 1) {
int idx = curLine[i] & 255;
byte r = palette[idx * 3 + 0];
byte g = palette[idx * 3 + 1];
byte b = palette[idx * 3 + 2];
byte a = paletteA[idx];
buffer.put(b).put(g).put(r).put(a);
}
} else {
for (int i = 1, n = curLine.length; i < n; i += 1) {
int idx = curLine[i] & 255;
byte r = palette[idx * 3 + 0];
byte g = palette[idx * 3 + 1];
byte b = palette[idx * 3 + 2];
byte a = (byte) 0xFF;
buffer.put(b).put(g).put(r).put(a);
}
}
}
private void expand4(byte[] src, byte[] dst) {
for (int i = 1, n = dst.length; i < n; i += 2) {
int val = src[1 + (i >> 1)] & 255;
switch (n - i) {
default:
dst[i + 1] = (byte) (val & 15);
case 1:
dst[i] = (byte) (val >> 4);
}
}
}
private void expand2(byte[] src, byte[] dst) {
for (int i = 1, n = dst.length; i < n; i += 4) {
int val = src[1 + (i >> 2)] & 255;
switch (n - i) {
default:
dst[i + 3] = (byte) ((val) & 3);
case 3:
dst[i + 2] = (byte) ((val >> 2) & 3);
case 2:
dst[i + 1] = (byte) ((val >> 4) & 3);
case 1:
dst[i] = (byte) ((val >> 6));
}
}
}
private void expand1(byte[] src, byte[] dst) {
for (int i = 1, n = dst.length; i < n; i += 8) {
int val = src[1 + (i >> 3)] & 255;
switch (n - i) {
default:
dst[i + 7] = (byte) ((val) & 1);
case 7:
dst[i + 6] = (byte) ((val >> 1) & 1);
case 6:
dst[i + 5] = (byte) ((val >> 2) & 1);
case 5:
dst[i + 4] = (byte) ((val >> 3) & 1);
case 4:
dst[i + 3] = (byte) ((val >> 4) & 1);
case 3:
dst[i + 2] = (byte) ((val >> 5) & 1);
case 2:
dst[i + 1] = (byte) ((val >> 6) & 1);
case 1:
dst[i] = (byte) ((val >> 7));
}
}
}
private void unfilter(byte[] curLine, byte[] prevLine) throws IOException {
switch (curLine[0]) {
case 0: // none
break;
case 1:
unfilterSub(curLine);
break;
case 2:
unfilterUp(curLine, prevLine);
break;
case 3:
unfilterAverage(curLine, prevLine);
break;
case 4:
unfilterPaeth(curLine, prevLine);
break;
default:
throw new IOException("invalide filter type in scanline: "
+ curLine[0]);
}
}
private void unfilterSub(byte[] curLine) {
final int bpp = this.bytesPerPixel;
for (int i = bpp + 1, n = curLine.length; i < n; ++i) {
curLine[i] += curLine[i - bpp];
}
}
private void unfilterUp(byte[] curLine, byte[] prevLine) {
final int bpp = this.bytesPerPixel;
for (int i = 1, n = curLine.length; i < n; ++i) {
curLine[i] += prevLine[i];
}
}
private void unfilterAverage(byte[] curLine, byte[] prevLine) {
final int bpp = this.bytesPerPixel;
int i;
for (i = 1; i <= bpp; ++i) {
curLine[i] += (byte) ((prevLine[i] & 0xFF) >>> 1);
}
for (int n = curLine.length; i < n; ++i) {
curLine[i] += (byte) (((prevLine[i] & 0xFF) + (curLine[i - bpp] & 0xFF)) >>> 1);
}
}
private void unfilterPaeth(byte[] curLine, byte[] prevLine) {
final int bpp = this.bytesPerPixel;
int i;
for (i = 1; i <= bpp; ++i) {
curLine[i] += prevLine[i];
}
for (int n = curLine.length; i < n; ++i) {
int a = curLine[i - bpp] & 255;
int b = prevLine[i] & 255;
int c = prevLine[i - bpp] & 255;
int p = a + b - c;
int pa = p - a;
if (pa < 0)
pa = -pa;
int pb = p - b;
if (pb < 0)
pb = -pb;
int pc = p - c;
if (pc < 0)
pc = -pc;
if (pa <= pb && pa <= pc)
c = a;
else if (pb <= pc)
c = b;
curLine[i] += (byte) c;
}
}
private void readIHDR() throws IOException {
checkChunkLength(13);
readChunk(buffer, 0, 13);
width = readInt(buffer, 0);
height = readInt(buffer, 4);
bitdepth = buffer[8] & 255;
colorType = buffer[9] & 255;
switch (colorType) {
case COLOR_GREYSCALE:
if (bitdepth != 8) {
throw new IOException("Unsupported bit depth: " + bitdepth);
}
bytesPerPixel = 1;
break;
case COLOR_GREYALPHA:
if (bitdepth != 8) {
throw new IOException("Unsupported bit depth: " + bitdepth);
}
bytesPerPixel = 2;
break;
case COLOR_TRUECOLOR:
if (bitdepth != 8) {
throw new IOException("Unsupported bit depth: " + bitdepth);
}
bytesPerPixel = 3;
break;
case COLOR_TRUEALPHA:
if (bitdepth != 8) {
throw new IOException("Unsupported bit depth: " + bitdepth);
}
bytesPerPixel = 4;
break;
case COLOR_INDEXED:
switch (bitdepth) {
case 8:
case 4:
case 2:
case 1:
bytesPerPixel = 1;
break;
default:
throw new IOException("Unsupported bit depth: " + bitdepth);
}
break;
default:
throw new IOException("unsupported color format: " + colorType);
}
if (buffer[10] != 0) {
throw new IOException("unsupported compression method");
}
if (buffer[11] != 0) {
throw new IOException("unsupported filtering method");
}
if (buffer[12] != 0) {
throw new IOException("unsupported interlace method");
}
}
private void readPLTE() throws IOException {
int paletteEntries = chunkLength / 3;
if (paletteEntries < 1 || paletteEntries > 256
|| (chunkLength % 3) != 0) {
throw new IOException("PLTE chunk has wrong length");
}
palette = new byte[paletteEntries * 3];
readChunk(palette, 0, palette.length);
}
private void readtRNS() throws IOException {
switch (colorType) {
case COLOR_GREYSCALE:
checkChunkLength(2);
transPixel = new byte[2];
readChunk(transPixel, 0, 2);
break;
case COLOR_TRUECOLOR:
checkChunkLength(6);
transPixel = new byte[6];
readChunk(transPixel, 0, 6);
break;
case COLOR_INDEXED:
if (palette == null) {
throw new IOException("tRNS chunk without PLTE chunk");
}
paletteA = new byte[palette.length / 3];
Arrays.fill(paletteA, (byte) 0xFF);
readChunk(paletteA, 0, paletteA.length);
break;
default:
// just ignore it
}
}
private void closeChunk() throws IOException {
if (chunkRemaining > 0) {
// just skip the rest and the CRC
skip(chunkRemaining + 4);
} else {
readFully(buffer, 0, 4);
int expectedCrc = readInt(buffer, 0);
int computedCrc = (int) crc.getValue();
if (computedCrc != expectedCrc) {
throw new IOException("Invalid CRC");
}
}
chunkRemaining = 0;
chunkLength = 0;
chunkType = 0;
}
private void openChunk() throws IOException {
readFully(buffer, 0, 8);
chunkLength = readInt(buffer, 0);
chunkType = readInt(buffer, 4);
chunkRemaining = chunkLength;
crc.reset();
crc.update(buffer, 4, 4); // only chunkType
}
private void openChunk(int expected) throws IOException {
openChunk();
if (chunkType != expected) {
throw new IOException("Expected chunk: "
+ Integer.toHexString(expected));
}
}
private void checkChunkLength(int expected) throws IOException {
if (chunkLength != expected) {
throw new IOException("Chunk has wrong size");
}
}
private int readChunk(byte[] buffer, int offset, int length)
throws IOException {
if (length > chunkRemaining) {
length = chunkRemaining;
}
readFully(buffer, offset, length);
crc.update(buffer, offset, length);
chunkRemaining -= length;
return length;
}
private void refillInflater(Inflater inflater) throws IOException {
while (chunkRemaining == 0) {
closeChunk();
openChunk(IDAT);
}
int read = readChunk(buffer, 0, buffer.length);
inflater.setInput(buffer, 0, read);
}
private void readChunkUnzip(Inflater inflater, byte[] buffer, int offset,
int length) throws IOException {
assert (buffer != this.buffer);
try {
do {
int read = inflater.inflate(buffer, offset, length);
if (read <= 0) {
if (inflater.finished()) {
throw new EOFException();
}
if (inflater.needsInput()) {
refillInflater(inflater);
} else {
throw new IOException("Can't inflate " + length
+ " bytes");
}
} else {
offset += read;
length -= read;
}
} while (length > 0);
} catch (DataFormatException ex) {
throw (IOException) (new IOException("inflate error").initCause(ex));
}
}
private void readFully(byte[] buffer, int offset, int length)
throws IOException {
do {
int read = input.read(buffer, offset, length);
if (read < 0) {
throw new EOFException();
}
offset += read;
length -= read;
} while (length > 0);
}
private int readInt(byte[] buffer, int offset) {
return ((buffer[offset]) << 24) | ((buffer[offset + 1] & 255) << 16)
| ((buffer[offset + 2] & 255) << 8)
| ((buffer[offset + 3] & 255));
}
private void skip(long amount) throws IOException {
while (amount > 0) {
long skipped = input.skip(amount);
if (skipped < 0) {
throw new EOFException();
}
amount -= skipped;
}
}
private static boolean checkSignature(byte[] buffer) {
for (int i = 0; i < SIGNATURE.length; i++) {
if (buffer[i] != SIGNATURE[i]) {
return false;
}
}
return true;
}
}
| bsd-2-clause |
synergynet/synergynet2.1 | SynergySpace2.1/src/synergyspace/jme/mmt/TransferController.java | 8650 | /*
* Copyright (c) 2009 University of Durham, England
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'SynergySpace' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package synergyspace.jme.mmt;
import java.util.ArrayList;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.logging.Logger;
import synergyspace.jme.cursorsystem.MultiTouchElement;
import synergyspace.jme.mmt.flicksystem.FlickSystem;
import synergyspace.jme.mmt.messages.createmessages.TransferredSpatialCreateMessage;
import synergyspace.jme.mmt.messages.mtmessages.AnnounceTablesListMessage;
import synergyspace.jme.mmt.messages.mtmessages.MTMessage;
import synergyspace.jme.mmt.messages.mtmessages.RegisterTableMessage;
import synergyspace.jme.mmt.messages.mtmessages.UnregisterTableMessage;
import synergyspace.jme.mmt.utility.SpatialMessageMapper;
import synergyspace.jme.mmt.utility.TableInfo;
import synergyspace.jme.mmt.utility.TransferArea;
import synergyspace.jmeapps.networkflick.Client;
import com.captiveimagination.jgn.clientserver.JGNClient;
import com.jme.math.Vector2f;
import com.jme.math.Vector3f;
import com.jme.scene.Node;
import com.jme.scene.Spatial;
import com.jme.system.DisplaySystem;
import com.jme.util.GameTaskQueueManager;
public class TransferController
{
private static final Logger log = Logger.getLogger(TransferController.class.getName());
private ConcurrentLinkedQueue<TransferArea> transferAreas = new ConcurrentLinkedQueue<TransferArea>();
private JGNClient client;
private TableInfo localTableInfo = null;
private Node rootNode;
private SpatialMessageMapper spatialMsgMapper;
public TransferController(JGNClient client, Node rootNode)
{
this.client = client;
this.rootNode = rootNode;
spatialMsgMapper = new SpatialMessageMapper();
}
public void applyTransfer(MultiTouchElement movingElement, Vector3f linearVelocity, float deceleration)
{
for(TransferArea transferArea : transferAreas)
{
if(movingElement.getTargetSpatial().hasCollision(transferArea, true))
{
FlickSystem.getInstance().makeUnflickable(movingElement);
// Calculate position in world space
Vector3f newPosition = movingElement.getTargetSpatial().getLocalTranslation().add(localTableInfo.getTablePosition().x,localTableInfo.getTablePosition().y,0);
TransferredSpatialCreateMessage message = spatialMsgMapper.getCreateMessageForTransferredMultiTouchElement(movingElement, transferArea.getClientId(),"QuadWithImage", newPosition, linearVelocity, deceleration);
detachSpatial(movingElement.getTargetSpatial());
client.sendToPlayer(message, transferArea.getClientId());
return;
}
}
}
public void applyMTMessage(MTMessage message)
{
if(!isLocalTableRegistered())
return;
if(message instanceof AnnounceTablesListMessage)
{
log.info("Announce message received");
AnnounceTablesListMessage msg = (AnnounceTablesListMessage) message;
ArrayList<TableInfo> tablesInfo = msg.getTablesInfo();
for(TableInfo tableInfo : tablesInfo)
registerRemoteTable(tableInfo);
}
if(message instanceof RegisterTableMessage)
{
RegisterTableMessage msg = (RegisterTableMessage) message;
log.info("register table msg : "+ msg.getClientId() + " , "+ msg.getTablePosition());
registerRemoteTable(msg.getTableInfo());
}
if(message instanceof UnregisterTableMessage)
{
UnregisterTableMessage msg = (UnregisterTableMessage) message;
if(msg.getClientId() == localTableInfo.getClientId())
{
for(TransferArea transferArea : transferAreas)
detachSpatial(transferArea);
transferAreas.clear();
localTableInfo = null;
}
else
{
TransferArea transferArea = findAreaById(msg.getClientId());
if(transferArea != null)
{
transferAreas.remove(transferArea);
detachSpatial(transferArea);
}
}
}
}
public void applySpatialCreateMessage(TransferredSpatialCreateMessage message)
{
if(!isLocalTableRegistered())
return;
MultiTouchElement multiTouchElement = spatialMsgMapper.getTransferredMultiTouchElementFromCreateMessage(Client.class, message);
// Translate world position to local position
Vector3f relativePosition = message.getPosition().subtract(localTableInfo.getTablePosition().x,localTableInfo.getTablePosition().y,0);
multiTouchElement.getTargetSpatial().setLocalTranslation(relativePosition);
FlickSystem.getInstance().makeFlickable(multiTouchElement.getTargetSpatial(), multiTouchElement, message.getDeceleration());
attachSpatial(multiTouchElement.getTargetSpatial());
FlickSystem.getInstance().flick(multiTouchElement, message.getLinearVelocity(), message.getDeceleration());
}
private boolean registerRemoteTable(TableInfo remoteTableInfo)
{
TransferArea temp = findAreaById(remoteTableInfo.getClientId());
if(temp == null && remoteTableInfo.getClientId() != localTableInfo.getClientId())
{
TransferArea transferArea = new TransferArea(remoteTableInfo);
Vector2f relativePosition = remoteTableInfo.getTablePosition().subtract(localTableInfo.getTablePosition());
transferArea.setLocalTranslation(relativePosition.x, relativePosition.y, 0);
attachSpatial(transferArea);
transferAreas.add(transferArea);
return true;
}
else
return false;
}
private TransferArea findAreaById(short clientId)
{
for(TransferArea area : transferAreas)
{
if(area.getClientId() == clientId)
return area;
}
return null;
}
public void registerLocalTable(Vector2f tablePosition)
{
if(client != null && tablePosition != null)
{
localTableInfo = new TableInfo(client.getPlayerId(),tablePosition, DisplaySystem.getDisplaySystem().getWidth(), DisplaySystem.getDisplaySystem().getHeight());
RegisterTableMessage msg = new RegisterTableMessage(localTableInfo);
client.sendToServer(msg);
}
}
public void unregisterTable()
{
if(client != null && localTableInfo != null)
{
UnregisterTableMessage msg = new UnregisterTableMessage(client.getPlayerId());
client.sendToServer(msg);
this.applyMTMessage(msg);
}
}
private boolean isLocalTableRegistered()
{
if(localTableInfo != null)
return true;
return false;
}
private void attachSpatial(Spatial spatial)
{
final Spatial temp = spatial;
GameTaskQueueManager.getManager().update(new Callable<Object>() {
public Object call() throws Exception {
rootNode.attachChild(temp);
rootNode.updateRenderState();
rootNode.updateGeometricState(0f, false);
return null;
}
});
}
private void detachSpatial(Spatial spatial)
{
final Spatial temp = spatial;
GameTaskQueueManager.getManager().update(new Callable<Object>() {
public Object call() throws Exception {
rootNode.detachChild(temp);
rootNode.updateRenderState();
rootNode.updateGeometricState(0f, false);
return null;
}
});
}
}
| bsd-2-clause |
synergynet/synergynet2.1 | SynergySpace2.1/src_synergynet/synergynet/services/net/networkedcontentmanager/NetworkedContentListener.java | 2328 | /*
* Copyright (c) 2009 University of Durham, England
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'SynergySpace' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package synergynet.services.net.networkedcontentmanager;
import java.util.Map;
import synergynet.contentsystem.items.ContentItem;
import synergynet.contentsystem.items.OrthoContentItem;
import synergynet.services.net.networkedcontentmanager.utils.RemoteDesktop;
public interface NetworkedContentListener {
public void renderSynchronisedDate(ContentItem item, Map<String, String> itemAttrs);
public void contentLoaded();
public void channelSwitched();
public void remoteContentLoaded(RemoteDesktop remoteDesktop);
public void renderRemoteDesktop(RemoteDesktop remoteDesktop, OrthoContentItem item, Map<String, String> map);
public void contentItemLoaded(ContentItem item);
}
| bsd-2-clause |
bencall/RPlay | src/biquad_t.java | 192 |
/**
* biquad_t struct
* @author bencall
*
*/
public class Biquad_t {
public double[] hist = new double[2];
public double[] a = new double[2];
public double[] b = new double[3];
} | bsd-3-clause |
beiyuxinke/CONNECT | Product/Production/Common/CONNECTCoreLib/src/main/java/gov/hhs/fha/nhinc/util/NhincCollections.java | 2442 | /*
* Copyright (c) 2009-2015, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.util;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import java.util.List;
/**
* Helper classes for collection objects.
*/
public final class NhincCollections {
private NhincCollections() {
}
public static <T> List<T> fillAbsents(List<Optional<T>> list, final T fillValue) {
Function<Optional<T>, T> helper = new Function<Optional<T>, T>() {
@Override
public T apply(Optional<T> t) {
if (t != null) {
return t.or(fillValue);
} else {
return null;
}
}
};
return Lists.transform(list, helper);
}
} | bsd-3-clause |
frc3528/upnext2013code | src/com/teamupnext/robot/commands/IncreaseSensitivity.java | 1002 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.teamupnext.robot.commands;
/**
*
* @author Team Up Next
*/
public class IncreaseSensitivity extends CommandBase {
public IncreaseSensitivity() {
// Use requires() here to declare subsystem dependencies
// eg. requires(chassis);
}
// Called just before this Command runs the first time
protected void initialize() {
}
// Called repeatedly when this Command is scheduled to run
protected void execute() {
driveTrain.increaseSensitivity();
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
return true;
}
// Called once after isFinished returns true
protected void end() {
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
}
}
| bsd-3-clause |
cisco/PDTool | CISAdminApi8.0.0/src/com/compositesw/services/system/admin/resource/PortOperationProperty.java | 7366 |
package com.compositesw.services.system.admin.resource;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for portOperationProperty complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="portOperationProperty">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="operationName" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="operationStyle" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="operationSoapAction" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="operationMessageType" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="operationAckMode" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="operationTimeout" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="operationPriority" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="operationExpiry" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="operationDelieveryMode" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "portOperationProperty", propOrder = {
"operationName",
"operationStyle",
"operationSoapAction",
"operationMessageType",
"operationAckMode",
"operationTimeout",
"operationPriority",
"operationExpiry",
"operationDelieveryMode"
})
public class PortOperationProperty {
@XmlElement(required = true)
protected String operationName;
protected String operationStyle;
protected String operationSoapAction;
protected String operationMessageType;
protected String operationAckMode;
protected Long operationTimeout;
protected Integer operationPriority;
protected Long operationExpiry;
protected Integer operationDelieveryMode;
/**
* Gets the value of the operationName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOperationName() {
return operationName;
}
/**
* Sets the value of the operationName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOperationName(String value) {
this.operationName = value;
}
/**
* Gets the value of the operationStyle property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOperationStyle() {
return operationStyle;
}
/**
* Sets the value of the operationStyle property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOperationStyle(String value) {
this.operationStyle = value;
}
/**
* Gets the value of the operationSoapAction property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOperationSoapAction() {
return operationSoapAction;
}
/**
* Sets the value of the operationSoapAction property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOperationSoapAction(String value) {
this.operationSoapAction = value;
}
/**
* Gets the value of the operationMessageType property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOperationMessageType() {
return operationMessageType;
}
/**
* Sets the value of the operationMessageType property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOperationMessageType(String value) {
this.operationMessageType = value;
}
/**
* Gets the value of the operationAckMode property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOperationAckMode() {
return operationAckMode;
}
/**
* Sets the value of the operationAckMode property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOperationAckMode(String value) {
this.operationAckMode = value;
}
/**
* Gets the value of the operationTimeout property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getOperationTimeout() {
return operationTimeout;
}
/**
* Sets the value of the operationTimeout property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setOperationTimeout(Long value) {
this.operationTimeout = value;
}
/**
* Gets the value of the operationPriority property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getOperationPriority() {
return operationPriority;
}
/**
* Sets the value of the operationPriority property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setOperationPriority(Integer value) {
this.operationPriority = value;
}
/**
* Gets the value of the operationExpiry property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getOperationExpiry() {
return operationExpiry;
}
/**
* Sets the value of the operationExpiry property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setOperationExpiry(Long value) {
this.operationExpiry = value;
}
/**
* Gets the value of the operationDelieveryMode property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getOperationDelieveryMode() {
return operationDelieveryMode;
}
/**
* Sets the value of the operationDelieveryMode property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setOperationDelieveryMode(Integer value) {
this.operationDelieveryMode = value;
}
}
| bsd-3-clause |
msf-oca-his/dhis-core | dhis-2/dhis-api/src/main/java/org/hisp/dhis/scheduling/SchedulingManager.java | 5157 | package org.hisp.dhis.scheduling;
/*
* Copyright (c) 2004-2018, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.Date;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ScheduledFuture;
import org.springframework.util.concurrent.ListenableFuture;
/**
* Interface for scheduling jobs.
* <p>
* <p>
* The main steps of the scheduling:
* <p>
* <ul>
* <li>Create a job configuration {@link JobConfiguration}</li>
* <li>This job configuration needs a job specific parameters object {@link JobParameters}, ie {@link org.hisp.dhis.scheduling.parameters.AnalyticsJobParameters}.</li>
* <li>Call scheduleJob with the job configuration.</li>
* <li>The schedulingManager calls the spring scheduler with a runnable object {@link JobInstance}.</li>
* <li>When the cron expression occurs the job will try to execute from the runnable object, job instance.</li>
* </ul>
*
* @author Henning Håkonsen
*/
public interface SchedulingManager
{
/**
* Check if this jobconfiguration is currently running
*
* @param jobConfiguration the job to check
* @return true/false
*/
boolean isJobConfigurationRunning( JobConfiguration jobConfiguration );
/**
* Set up default behavior for a started job.
*
* @param jobConfiguration the job which started
*/
void jobConfigurationStarted( JobConfiguration jobConfiguration );
/**
* Set up default behavior for a finished job.
* <p>
* A special case is if a job is disabled when running, but the job does not stop. The job wil run normally one last time and
* try to set finished status. Since the job is disabled we manually set these parameters in this method so that the
* job is not automatically rescheduled.
* <p>
* Also we dont want to update a job configuration of the job is deleted.
*
* @param jobConfiguration the job which started
*/
void jobConfigurationFinished( JobConfiguration jobConfiguration );
/**
* Get a job based on the job type.
*
* @param jobType the job type for the job we want to collect
* @return the job
*/
Job getJob( JobType jobType );
/**
* Schedules a job with the given job configuration.
*
* @param jobConfiguration the job to schedule.
*/
void scheduleJob( JobConfiguration jobConfiguration );
/**
* Stops one job.
*/
void stopJob( JobConfiguration jobConfiguration );
/**
* Execute the job.
*
* @param jobConfiguration The configuration of the job to be executed
*/
boolean executeJob( JobConfiguration jobConfiguration );
/**
* Execute an actual job without validation
*
* @param job The job to be executed
*/
void executeJob( Runnable job );
/**
* Schedule a job with a start time.
*
* @param jobConfiguration The jobConfiguration with job details to be scheduled
* @param startTime The time at which the job should start
*/
void scheduleJobWithStartTime( JobConfiguration jobConfiguration, Date startTime );
/**
* Execute the given job immediately and return a ListenableFuture.
*
* @param callable the job to execute.
* @param <T> return type of the supplied callable.
* @return a ListenableFuture representing the result of the job.
*/
<T> ListenableFuture<T> executeJob( Callable<T> callable );
/**
* Returns a list of all scheduled jobs sorted based on cron expression and the current time.
*
* @return list of jobs
*/
Map<String, ScheduledFuture<?>> getAllFutureJobs();
}
| bsd-3-clause |
beiyuxinke/CONNECT | Product/Production/Adapters/General/CONNECTDirectConfig/src/main/java/gov/hhs/fha/nhinc/directconfig/service/jaxws/UpdateTrustBundleAnchorsResponse.java | 3721 | /*
* Copyright (c) 2009-2015, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
Copyright (c) 2010, NHIN Direct Project
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the distribution.
3. Neither the name of the The NHIN Direct Project (nhindirect.org) nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.directconfig.service.jaxws;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
@XmlRootElement(name = "updateTrustBundleAnchorsResponse", namespace = "http://nhind.org/config")
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "updateTrustBundleAnchorsResponse", namespace = "http://nhind.org/config")
public class UpdateTrustBundleAnchorsResponse {
}
| bsd-3-clause |
cisco/PDTool | CISAdminApi8.0.0/src/com/compositesw/services/system/util/security/UpdateSecurityBundleResponse.java | 5355 |
package com.compositesw.services.system.util.security;
import java.math.BigInteger;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import com.compositesw.services.system.util.common.BaseResponse;
/**
* <p>Java class for updateSecurityBundleResponse complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="updateSecurityBundleResponse">
* <complexContent>
* <extension base="{http://www.compositesw.com/services/system/util/common}baseResponse">
* <sequence>
* <element name="id" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="projectRoot" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="bundleName" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="isEnabled" type="{http://www.w3.org/2001/XMLSchema}boolean"/>
* <element name="isUpdateable" type="{http://www.w3.org/2001/XMLSchema}boolean"/>
* <element name="instanceCount" type="{http://www.w3.org/2001/XMLSchema}integer"/>
* <element name="annotation" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "updateSecurityBundleResponse", propOrder = {
"id",
"projectRoot",
"bundleName",
"isEnabled",
"isUpdateable",
"instanceCount",
"annotation"
})
public class UpdateSecurityBundleResponse
extends BaseResponse
{
@XmlElement(required = true)
protected String id;
@XmlElement(required = true)
protected String projectRoot;
@XmlElement(required = true)
protected String bundleName;
protected boolean isEnabled;
protected boolean isUpdateable;
@XmlElement(required = true)
protected BigInteger instanceCount;
protected String annotation;
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the projectRoot property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getProjectRoot() {
return projectRoot;
}
/**
* Sets the value of the projectRoot property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setProjectRoot(String value) {
this.projectRoot = value;
}
/**
* Gets the value of the bundleName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBundleName() {
return bundleName;
}
/**
* Sets the value of the bundleName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBundleName(String value) {
this.bundleName = value;
}
/**
* Gets the value of the isEnabled property.
*
*/
public boolean isIsEnabled() {
return isEnabled;
}
/**
* Sets the value of the isEnabled property.
*
*/
public void setIsEnabled(boolean value) {
this.isEnabled = value;
}
/**
* Gets the value of the isUpdateable property.
*
*/
public boolean isIsUpdateable() {
return isUpdateable;
}
/**
* Sets the value of the isUpdateable property.
*
*/
public void setIsUpdateable(boolean value) {
this.isUpdateable = value;
}
/**
* Gets the value of the instanceCount property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getInstanceCount() {
return instanceCount;
}
/**
* Sets the value of the instanceCount property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setInstanceCount(BigInteger value) {
this.instanceCount = value;
}
/**
* Gets the value of the annotation property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAnnotation() {
return annotation;
}
/**
* Sets the value of the annotation property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAnnotation(String value) {
this.annotation = value;
}
}
| bsd-3-clause |
anildahiya/sdl_android | base/src/main/java/com/smartdevicelink/proxy/rpc/UnsubscribeVehicleDataResponse.java | 20072 | /*
* Copyright (c) 2017 - 2019, SmartDeviceLink Consortium, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of the SmartDeviceLink Consortium, Inc. nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.smartdevicelink.proxy.rpc;
import android.support.annotation.NonNull;
import com.smartdevicelink.protocol.enums.FunctionID;
import com.smartdevicelink.proxy.RPCResponse;
import com.smartdevicelink.proxy.rpc.enums.Result;
import java.util.Hashtable;
/**
* Unsubscribe Vehicle Data Response is sent, when UnsubscribeVehicleData has been called.
*
* @since SmartDeviceLink 2.0
*/
public class UnsubscribeVehicleDataResponse extends RPCResponse {
public static final String KEY_SPEED = "speed";
public static final String KEY_RPM = "rpm";
public static final String KEY_FUEL_LEVEL = "fuelLevel";
public static final String KEY_EXTERNAL_TEMPERATURE = "externalTemperature";
public static final String KEY_PRNDL = "prndl";
public static final String KEY_TIRE_PRESSURE = "tirePressure";
public static final String KEY_ENGINE_TORQUE = "engineTorque";
public static final String KEY_ENGINE_OIL_LIFE = "engineOilLife";
public static final String KEY_ODOMETER = "odometer";
public static final String KEY_GPS = "gps";
public static final String KEY_FUEL_LEVEL_STATE = "fuelLevel_State";
public static final String KEY_INSTANT_FUEL_CONSUMPTION = "instantFuelConsumption";
public static final String KEY_BELT_STATUS = "beltStatus";
public static final String KEY_BODY_INFORMATION = "bodyInformation";
public static final String KEY_DEVICE_STATUS = "deviceStatus";
public static final String KEY_DRIVER_BRAKING = "driverBraking";
public static final String KEY_WIPER_STATUS = "wiperStatus";
public static final String KEY_HEAD_LAMP_STATUS = "headLampStatus";
public static final String KEY_ACC_PEDAL_POSITION = "accPedalPosition";
public static final String KEY_STEERING_WHEEL_ANGLE = "steeringWheelAngle";
public static final String KEY_E_CALL_INFO = "eCallInfo";
public static final String KEY_AIRBAG_STATUS = "airbagStatus";
public static final String KEY_EMERGENCY_EVENT = "emergencyEvent";
public static final String KEY_CLUSTER_MODE_STATUS = "clusterModeStatus";
public static final String KEY_MY_KEY = "myKey";
public static final String KEY_FUEL_RANGE = "fuelRange";
public static final String KEY_TURN_SIGNAL = "turnSignal";
public static final String KEY_ELECTRONIC_PARK_BRAKE_STATUS = "electronicParkBrakeStatus";
public static final String KEY_CLOUD_APP_VEHICLE_ID = "cloudAppVehicleID";
/**
* Constructs a new UnsubscribeVehicleDataResponse object
*/
public UnsubscribeVehicleDataResponse() {
super(FunctionID.UNSUBSCRIBE_VEHICLE_DATA.toString());
}
/**
* Constructs a new UnsubscribeVehicleDataResponse object
* @param success whether the request is successfully processed
* @param resultCode whether the request is successfully processed
*/
public UnsubscribeVehicleDataResponse(@NonNull Boolean success, @NonNull Result resultCode) {
this();
setSuccess(success);
setResultCode(resultCode);
}
/**
* Constructs a new UnsubscribeVehicleDataResponse object indicated by the Hashtable
* parameter
* <p></p>
*
* @param hash The Hashtable to use to build this RPC
*
*/
public UnsubscribeVehicleDataResponse(Hashtable<String, Object> hash) {
super(hash);
}
/**
* Sets Gps
* @param gps a VehicleDataResult related to GPS
*/
public void setGps(VehicleDataResult gps) {
setParameters(KEY_GPS, gps);
}
/**
* Gets Gps
* @return a VehicleDataResult related to GPS
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getGps() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_GPS);
}
/**
* Sets Speed
* @param speed a VehicleDataResult related to speed
*/
public void setSpeed(VehicleDataResult speed) {
setParameters(KEY_SPEED, speed);
}
/**
* Gets Speed
* @return a VehicleDataResult related to speed
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getSpeed() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_SPEED);
}
/**
* Sets rpm
* @param rpm a VehicleDataResult related to RPM
*/
public void setRpm(VehicleDataResult rpm) {
setParameters(KEY_RPM, rpm);
}
/**
* Gets rpm
* @return a VehicleDataResult related to RPM
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getRpm() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_RPM);
}
/**
* Sets Fuel Level
* @param fuelLevel a VehicleDataResult related to Fuel Level
*/
public void setFuelLevel(VehicleDataResult fuelLevel) {
setParameters(KEY_FUEL_LEVEL, fuelLevel);
}
/**
* Gets Fuel Level
* @return a VehicleDataResult related to FuelLevel
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getFuelLevel() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_FUEL_LEVEL);
}
/**
* Sets Fuel Level State
* @param fuelLevel_State a VehicleDataResult related to FuelLevel State
*/
@Deprecated
public void setFuelLevel_State(VehicleDataResult fuelLevel_State) {
setFuelLevel(fuelLevel_State);
}
/**
* Gets Fuel Level State
* @return a VehicleDataResult related to FuelLevel State
*/
@Deprecated
public VehicleDataResult getFuelLevel_State() {
return getFuelLevelState();
}
/**
* Sets Fuel Level State
* @param fuelLevelState a VehicleDataResult related to FuelLevel State
*/
public void setFuelLevelState(VehicleDataResult fuelLevelState) {
setParameters(KEY_FUEL_LEVEL_STATE, fuelLevelState);
}
/**
* Gets Fuel Level State
* @return a VehicleDataResult related to FuelLevel State
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getFuelLevelState() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_FUEL_LEVEL_STATE);
}
/**
* Sets Instant Fuel Consumption
* @param instantFuelConsumption a VehicleDataResult related to instant fuel consumption
*/
public void setInstantFuelConsumption(VehicleDataResult instantFuelConsumption) {
setParameters(KEY_INSTANT_FUEL_CONSUMPTION, instantFuelConsumption);
}
/**
* Gets Instant Fuel Consumption
* @return a VehicleDataResult related to instant fuel consumption
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getInstantFuelConsumption() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_INSTANT_FUEL_CONSUMPTION);
}
/**
* Sets External Temperature
* @param externalTemperature a VehicleDataResult related to external temperature
*/
public void setExternalTemperature(VehicleDataResult externalTemperature) {
setParameters(KEY_EXTERNAL_TEMPERATURE, externalTemperature);
}
/**
* Gets External Temperature
* @return a VehicleDataResult related to external temperature
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getExternalTemperature() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_EXTERNAL_TEMPERATURE);
}
/**
* Gets currently selected gear data
* @param prndl a VehicleDataResult related to the PRNDL status (automatic transmission gear)
*/
public void setPrndl(VehicleDataResult prndl) {
setParameters(KEY_PRNDL, prndl);
}
/**
* Gets currently selected gear data
* @return a VehicleDataResult related to the PRNDL status (automatic transmission gear)
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getPrndl() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_PRNDL);
}
/**
* Sets Tire Pressure
* @param tirePressure a VehicleDataResult related to tire pressure
*/
public void setTirePressure(VehicleDataResult tirePressure) {
setParameters(KEY_TIRE_PRESSURE, tirePressure);
}
/**
* Gets Tire Pressure
* @return a VehicleDataResult related to tire pressure
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getTirePressure() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_TIRE_PRESSURE);
}
/**
* Sets Odometer
* @param odometer a VehicleDataResult related to the odometer
*/
public void setOdometer(VehicleDataResult odometer) {
setParameters(KEY_ODOMETER, odometer);
}
/**
* Gets Odometer
* @return a VehicleDataResult related to the odometer
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getOdometer() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_ODOMETER);
}
/**
* Sets Belt Status
* @param beltStatus a VehicleDataResult related to the seat belt status
*/
public void setBeltStatus(VehicleDataResult beltStatus) {
setParameters(KEY_BELT_STATUS, beltStatus);
}
/**
* Gets Belt Status
* @return a VehicleDataResult related to the seat belt status
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getBeltStatus() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_BELT_STATUS);
}
/**
* Sets Body Information
* @param bodyInformation a VehicleDataResult related to the body info
*/
public void setBodyInformation(VehicleDataResult bodyInformation) {
setParameters(KEY_BODY_INFORMATION, bodyInformation);
}
/**
* Gets Body Information
* @return a VehicleDataResult related to the body info
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getBodyInformation() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_BODY_INFORMATION);
}
/**
* Sets Device Status
* @param deviceStatus a VehicleDataResult related to the device status of the connected device
*/
public void setDeviceStatus(VehicleDataResult deviceStatus) {
setParameters(KEY_DEVICE_STATUS, deviceStatus);
}
/**
* Gets Device Status
* @return a VehicleDataResult related to the device status of the connected device
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getDeviceStatus() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_DEVICE_STATUS);
}
/**
* Sets Driver Braking
* @param driverBraking a VehicleDataResult related to the driver breaking status
*/
public void setDriverBraking(VehicleDataResult driverBraking) {
setParameters(KEY_DRIVER_BRAKING, driverBraking);
}
/**
* Gets Driver Braking
* @return a VehicleDataResult related to the driver breaking status
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getDriverBraking() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_DRIVER_BRAKING);
}
/**
* Sets Wiper Status
* @param wiperStatus a VehicleDataResult related to the wiper status
*/
public void setWiperStatus(VehicleDataResult wiperStatus) {
setParameters(KEY_WIPER_STATUS, wiperStatus);
}
/**
* Gets Wiper Status
* @return a VehicleDataResult related to the wiper status
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getWiperStatus() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_WIPER_STATUS);
}
/**
* Sets Head Lamp Status
* @param headLampStatus a VehicleDataResult related to the headlamp status
*/
public void setHeadLampStatus(VehicleDataResult headLampStatus) {
setParameters(KEY_HEAD_LAMP_STATUS, headLampStatus);
}
/**
* Gets Head Lamp Status
* @return a VehicleDataResult related to the headlamp status
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getHeadLampStatus() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_HEAD_LAMP_STATUS);
}
/**
* Sets Engine Torque
* @param engineTorque a VehicleDataResult related to the engine's torque
*/
public void setEngineTorque(VehicleDataResult engineTorque) {
setParameters(KEY_ENGINE_TORQUE, engineTorque);
}
/**
* Gets Engine Torque
* @return a VehicleDataResult related to the enginer's torque
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getEngineTorque() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_ENGINE_TORQUE);
}
/**
* Sets Engine Oil Life
* @param engineOilLife a VehicleDataResult related to the engine's oil life
*/
public void setEngineOilLife(VehicleDataResult engineOilLife) {
setParameters(KEY_ENGINE_OIL_LIFE, engineOilLife);
}
/**
* Gets Engine Oil Life
* @return a VehicleDataResult related to the engine's oil life
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getEngineOilLife() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_ENGINE_OIL_LIFE);
}
/**
* Sets AccPedal Position
* @param accPedalPosition a VehicleDataResult related to the accelerator pedal's position
*/
public void setAccPedalPosition(VehicleDataResult accPedalPosition) {
setParameters(KEY_ACC_PEDAL_POSITION, accPedalPosition);
}
/**
* Gets AccPedal Position
* @return a VehicleDataResult related to the accelerator pedal's position
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getAccPedalPosition() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_ACC_PEDAL_POSITION);
}
public void setSteeringWheelAngle(VehicleDataResult steeringWheelAngle) {
setParameters(KEY_STEERING_WHEEL_ANGLE, steeringWheelAngle);
}
@SuppressWarnings("unchecked")
public VehicleDataResult getSteeringWheelAngle() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_STEERING_WHEEL_ANGLE);
}
public void setECallInfo(VehicleDataResult eCallInfo) {
setParameters(KEY_E_CALL_INFO, eCallInfo);
}
@SuppressWarnings("unchecked")
public VehicleDataResult getECallInfo() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_E_CALL_INFO);
}
public void setAirbagStatus(VehicleDataResult airbagStatus) {
setParameters(KEY_AIRBAG_STATUS, airbagStatus);
}
@SuppressWarnings("unchecked")
public VehicleDataResult getAirbagStatus() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_AIRBAG_STATUS);
}
public void setEmergencyEvent(VehicleDataResult emergencyEvent) {
setParameters(KEY_EMERGENCY_EVENT, emergencyEvent);
}
@SuppressWarnings("unchecked")
public VehicleDataResult getEmergencyEvent() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_EMERGENCY_EVENT);
}
public void setClusterModeStatus(VehicleDataResult clusterModeStatus) {
setParameters(KEY_CLUSTER_MODE_STATUS, clusterModeStatus);
}
@SuppressWarnings("unchecked")
public VehicleDataResult getClusterModeStatus() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_CLUSTER_MODE_STATUS);
}
public void setMyKey(VehicleDataResult myKey) {
setParameters(KEY_MY_KEY, myKey);
}
@SuppressWarnings("unchecked")
public VehicleDataResult getMyKey() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_MY_KEY);
}
/**
* Sets Fuel Range
* @param fuelRange a VehicleDataResult related to the fuel range
*/
public void setFuelRange(VehicleDataResult fuelRange) {
setParameters(KEY_FUEL_RANGE, fuelRange);
}
/**
* Gets Fuel Range
* @return a VehicleDataResult related to the fuel range
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getFuelRange() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_FUEL_RANGE);
}
/**
* Sets turnSignal
* @param turnSignal a VehicleDataResult related to the turn signal status
*/
public void setTurnSignal(VehicleDataResult turnSignal) {
setParameters(KEY_TURN_SIGNAL, turnSignal);
}
/**
* Gets turnSignal
* @return a VehicleDataResult related to the turn signal status
*/
@SuppressWarnings("unchecked")
public VehicleDataResult getTurnSignal() {
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_TURN_SIGNAL);
}
/**
* Sets electronicParkBrakeStatus
* @param electronicParkBrakeStatus a VehicleDataResult related to the electronic park brake status
*/
public void setElectronicParkBrakeStatus(VehicleDataResult electronicParkBrakeStatus){
setParameters(KEY_ELECTRONIC_PARK_BRAKE_STATUS, electronicParkBrakeStatus);
}
/**
* Gets electronicParkBrakeStatus
* @return a VehicleDataResult related to the electronic park brake status
*/
public VehicleDataResult getElectronicParkBrakeStatus(){
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_ELECTRONIC_PARK_BRAKE_STATUS);
}
/**
* Sets cloudAppVehicleID
* @param cloudAppVehicleID a VehicleDataResult related to the cloud app vehicle ID
*/
public void setCloudAppVehicleID(VehicleDataResult cloudAppVehicleID){
setParameters(KEY_CLOUD_APP_VEHICLE_ID, cloudAppVehicleID);
}
/**
* Gets a VehicleDataResult for the unsubscribe response of the CloudAppVehicleID vehicle data item.
* @return a VehicleDataResult related to the cloud app vehicle ID
*/
public VehicleDataResult getCloudAppVehicleID(){
return (VehicleDataResult) getObject(VehicleDataResult.class, KEY_CLOUD_APP_VEHICLE_ID);
}
}
| bsd-3-clause |
looker/puma | ext/puma_http11/org/jruby/puma/Http11.java | 9869 | package org.jruby.puma;
import org.jruby.Ruby;
import org.jruby.RubyClass;
import org.jruby.RubyHash;
import org.jruby.RubyModule;
import org.jruby.RubyNumeric;
import org.jruby.RubyObject;
import org.jruby.RubyString;
import org.jruby.anno.JRubyMethod;
import org.jruby.runtime.ObjectAllocator;
import org.jruby.runtime.builtin.IRubyObject;
import org.jruby.exceptions.RaiseException;
import org.jruby.util.ByteList;
/**
* @author <a href="mailto:ola.bini@ki.se">Ola Bini</a>
* @author <a href="mailto:headius@headius.com">Charles Oliver Nutter</a>
*/
public class Http11 extends RubyObject {
public final static int MAX_FIELD_NAME_LENGTH = 256;
public final static String MAX_FIELD_NAME_LENGTH_ERR = "HTTP element FIELD_NAME is longer than the 256 allowed length.";
public final static int MAX_FIELD_VALUE_LENGTH = 80 * 1024;
public final static String MAX_FIELD_VALUE_LENGTH_ERR = "HTTP element FIELD_VALUE is longer than the 81920 allowed length.";
public final static int MAX_REQUEST_URI_LENGTH = 1024 * 12;
public final static String MAX_REQUEST_URI_LENGTH_ERR = "HTTP element REQUEST_URI is longer than the 12288 allowed length.";
public final static int MAX_FRAGMENT_LENGTH = 1024;
public final static String MAX_FRAGMENT_LENGTH_ERR = "HTTP element REQUEST_PATH is longer than the 1024 allowed length.";
public final static int MAX_REQUEST_PATH_LENGTH = 8192;
public final static String MAX_REQUEST_PATH_LENGTH_ERR = "HTTP element REQUEST_PATH is longer than the 8192 allowed length.";
public final static int MAX_QUERY_STRING_LENGTH = 1024 * 10;
public final static String MAX_QUERY_STRING_LENGTH_ERR = "HTTP element QUERY_STRING is longer than the 10240 allowed length.";
public final static int MAX_HEADER_LENGTH = 1024 * (80 + 32);
public final static String MAX_HEADER_LENGTH_ERR = "HTTP element HEADER is longer than the 114688 allowed length.";
public static final ByteList CONTENT_TYPE_BYTELIST = new ByteList(ByteList.plain("CONTENT_TYPE"));
public static final ByteList CONTENT_LENGTH_BYTELIST = new ByteList(ByteList.plain("CONTENT_LENGTH"));
public static final ByteList HTTP_PREFIX_BYTELIST = new ByteList(ByteList.plain("HTTP_"));
public static final ByteList COMMA_SPACE_BYTELIST = new ByteList(ByteList.plain(", "));
public static final ByteList REQUEST_METHOD_BYTELIST = new ByteList(ByteList.plain("REQUEST_METHOD"));
public static final ByteList REQUEST_URI_BYTELIST = new ByteList(ByteList.plain("REQUEST_URI"));
public static final ByteList FRAGMENT_BYTELIST = new ByteList(ByteList.plain("FRAGMENT"));
public static final ByteList REQUEST_PATH_BYTELIST = new ByteList(ByteList.plain("REQUEST_PATH"));
public static final ByteList QUERY_STRING_BYTELIST = new ByteList(ByteList.plain("QUERY_STRING"));
public static final ByteList HTTP_VERSION_BYTELIST = new ByteList(ByteList.plain("HTTP_VERSION"));
private static ObjectAllocator ALLOCATOR = new ObjectAllocator() {
public IRubyObject allocate(Ruby runtime, RubyClass klass) {
return new Http11(runtime, klass);
}
};
public static void createHttp11(Ruby runtime) {
RubyModule mPuma = runtime.defineModule("Puma");
mPuma.defineClassUnder("HttpParserError",runtime.getClass("IOError"),runtime.getClass("IOError").getAllocator());
RubyClass cHttpParser = mPuma.defineClassUnder("HttpParser",runtime.getObject(),ALLOCATOR);
cHttpParser.defineAnnotatedMethods(Http11.class);
}
private Ruby runtime;
private Http11Parser hp;
private RubyString body;
public Http11(Ruby runtime, RubyClass clazz) {
super(runtime,clazz);
this.runtime = runtime;
this.hp = new Http11Parser();
this.hp.parser.init();
}
public static void validateMaxLength(Ruby runtime, int len, int max, String msg) {
if(len>max) {
throw newHTTPParserError(runtime, msg);
}
}
private static RaiseException newHTTPParserError(Ruby runtime, String msg) {
return runtime.newRaiseException(getHTTPParserError(runtime), msg);
}
private static RubyClass getHTTPParserError(Ruby runtime) {
// Cheaper to look this up lazily than cache eagerly and consume a field, since it's rarely encountered
return (RubyClass)runtime.getModule("Puma").getConstant("HttpParserError");
}
public static void http_field(Ruby runtime, RubyHash req, ByteList buffer, int field, int flen, int value, int vlen) {
RubyString f;
IRubyObject v;
validateMaxLength(runtime, flen, MAX_FIELD_NAME_LENGTH, MAX_FIELD_NAME_LENGTH_ERR);
validateMaxLength(runtime, vlen, MAX_FIELD_VALUE_LENGTH, MAX_FIELD_VALUE_LENGTH_ERR);
ByteList b = new ByteList(buffer,field,flen);
for(int i = 0,j = b.length();i<j;i++) {
int bite = b.get(i) & 0xFF;
if(bite == '-') {
b.set(i, (byte)'_');
} else {
b.set(i, (byte)Character.toUpperCase(bite));
}
}
while (vlen > 0 && Character.isWhitespace(buffer.get(value + vlen - 1))) vlen--;
if (b.equals(CONTENT_LENGTH_BYTELIST) || b.equals(CONTENT_TYPE_BYTELIST)) {
f = RubyString.newString(runtime, b);
} else {
f = RubyString.newStringShared(runtime, HTTP_PREFIX_BYTELIST);
f.cat(b);
}
b = new ByteList(buffer, value, vlen);
v = req.fastARef(f);
if (v == null || v.isNil()) {
req.fastASet(f, RubyString.newString(runtime, b));
} else {
RubyString vs = v.convertToString();
vs.cat(COMMA_SPACE_BYTELIST);
vs.cat(b);
}
}
public static void request_method(Ruby runtime, RubyHash req, ByteList buffer, int at, int length) {
RubyString val = RubyString.newString(runtime,new ByteList(buffer,at,length));
req.fastASet(RubyString.newStringShared(runtime, REQUEST_METHOD_BYTELIST),val);
}
public static void request_uri(Ruby runtime, RubyHash req, ByteList buffer, int at, int length) {
validateMaxLength(runtime, length, MAX_REQUEST_URI_LENGTH, MAX_REQUEST_URI_LENGTH_ERR);
RubyString val = RubyString.newString(runtime,new ByteList(buffer,at,length));
req.fastASet(RubyString.newStringShared(runtime, REQUEST_URI_BYTELIST),val);
}
public static void fragment(Ruby runtime, RubyHash req, ByteList buffer, int at, int length) {
validateMaxLength(runtime, length, MAX_FRAGMENT_LENGTH, MAX_FRAGMENT_LENGTH_ERR);
RubyString val = RubyString.newString(runtime,new ByteList(buffer,at,length));
req.fastASet(RubyString.newStringShared(runtime, FRAGMENT_BYTELIST),val);
}
public static void request_path(Ruby runtime, RubyHash req, ByteList buffer, int at, int length) {
validateMaxLength(runtime, length, MAX_REQUEST_PATH_LENGTH, MAX_REQUEST_PATH_LENGTH_ERR);
RubyString val = RubyString.newString(runtime,new ByteList(buffer,at,length));
req.fastASet(RubyString.newStringShared(runtime, REQUEST_PATH_BYTELIST),val);
}
public static void query_string(Ruby runtime, RubyHash req, ByteList buffer, int at, int length) {
validateMaxLength(runtime, length, MAX_QUERY_STRING_LENGTH, MAX_QUERY_STRING_LENGTH_ERR);
RubyString val = RubyString.newString(runtime,new ByteList(buffer,at,length));
req.fastASet(RubyString.newStringShared(runtime, QUERY_STRING_BYTELIST),val);
}
public static void http_version(Ruby runtime, RubyHash req, ByteList buffer, int at, int length) {
RubyString val = RubyString.newString(runtime,new ByteList(buffer,at,length));
req.fastASet(RubyString.newStringShared(runtime, HTTP_VERSION_BYTELIST),val);
}
public void header_done(Ruby runtime, RubyHash req, ByteList buffer, int at, int length) {
body = RubyString.newStringShared(runtime, new ByteList(buffer, at, length));
}
@JRubyMethod
public IRubyObject initialize() {
this.hp.parser.init();
return this;
}
@JRubyMethod
public IRubyObject reset() {
this.hp.parser.init();
return runtime.getNil();
}
@JRubyMethod
public IRubyObject finish() {
this.hp.finish();
return this.hp.is_finished() ? runtime.getTrue() : runtime.getFalse();
}
@JRubyMethod
public IRubyObject execute(IRubyObject req_hash, IRubyObject data, IRubyObject start) {
int from = RubyNumeric.fix2int(start);
ByteList d = ((RubyString)data).getByteList();
if(from >= d.length()) {
throw newHTTPParserError(runtime, "Requested start is after data buffer end.");
} else {
Http11Parser hp = this.hp;
Http11Parser.HttpParser parser = hp.parser;
parser.data = (RubyHash) req_hash;
hp.execute(runtime, this, d,from);
validateMaxLength(runtime, parser.nread,MAX_HEADER_LENGTH, MAX_HEADER_LENGTH_ERR);
if(hp.has_error()) {
throw newHTTPParserError(runtime, "Invalid HTTP format, parsing fails. Are you trying to open an SSL connection to a non-SSL Puma?");
} else {
return runtime.newFixnum(parser.nread);
}
}
}
@JRubyMethod(name = "error?")
public IRubyObject has_error() {
return this.hp.has_error() ? runtime.getTrue() : runtime.getFalse();
}
@JRubyMethod(name = "finished?")
public IRubyObject is_finished() {
return this.hp.is_finished() ? runtime.getTrue() : runtime.getFalse();
}
@JRubyMethod
public IRubyObject nread() {
return runtime.newFixnum(this.hp.parser.nread);
}
@JRubyMethod
public IRubyObject body() {
return body;
}
}// Http11
| bsd-3-clause |
JoeWolski/CMSC436 | OpenCVCameraTest/app/src/main/java/com/example/joe/opencvcameratest/ThermalSensor.java | 4386 | package com.example.joe.opencvcameratest;
import android.app.Activity;
import android.content.Context;
import android.hardware.usb.UsbDeviceConnection;
import android.hardware.usb.UsbManager;
import android.util.Log;
import com.hoho.android.usbserial.driver.UsbSerialDriver;
import com.hoho.android.usbserial.driver.UsbSerialPort;
import com.hoho.android.usbserial.driver.UsbSerialProber;
import com.hoho.android.usbserial.util.SerialInputOutputManager;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class ThermalSensor {
private UsbManager mUsbManager;
private UsbSerialPort sPort = null;
private final ExecutorService mExecutor = Executors.newSingleThreadExecutor();
private SerialInputOutputManager mSerialIoManager = null;
private final String TAG = "USB_APP";
private double temp;
private boolean fresh_data;
private final SerialInputOutputManager.Listener mListener =
new SerialInputOutputManager.Listener() {
@Override
public void onRunError(Exception e) {
Log.d(TAG, "Runner stopped.");
}
@Override
public void onNewData(final byte[] data) {
String input = new String(data);
synchronized (ThermalSensor.this) {
temp = Double.parseDouble(input);
fresh_data = true;
}
}
};
public boolean start(Context app) {
mUsbManager = (UsbManager) app.getSystemService(Context.USB_SERVICE);
List<UsbSerialDriver> availableDrivers = UsbSerialProber.getDefaultProber().findAllDrivers(mUsbManager);
if (availableDrivers.isEmpty()) {
Log.v(TAG, "No Drivers");
return false;
} else {
// Open a connection to the first available driver.
UsbSerialDriver driver = availableDrivers.get(0);
UsbDeviceConnection connection = mUsbManager.openDevice(driver.getDevice());
if (connection == null) {
Log.v(TAG, "Couldn't connect");
return false;
} else {
sPort = driver.getPorts().get(0);
if (sPort != null) {
try {
sPort.open(connection);
sPort.setParameters(115200, 8, UsbSerialPort.STOPBITS_1, UsbSerialPort.PARITY_NONE);
mSerialIoManager = new SerialInputOutputManager(sPort, mListener);
mExecutor.submit(mSerialIoManager);
} catch (IOException e) {
Log.v(TAG, "IOException");
return false;
}
} else {
Log.v(TAG, "No Port");
return false;
}
}
return true;
}
}
public void stop() {
if (mSerialIoManager != null) {
mSerialIoManager.stop();
mSerialIoManager = null;
}
if (sPort != null) {
try {
sPort.close();
} catch (IOException e) {
// Ignore.
}
sPort = null;
}
}
public double read() {
synchronized (this) {
fresh_data = false;
}
byte[] send = new byte[2];
send[0] = 'r';
send[1] = '\n';
Log.v(TAG, "About to write");
// mSerialIoManager.writeAsync(send);
try {
sPort.write(send, 1000);
} catch (IOException e) {
Log.v(TAG, "Failed to write");
}
while(!fresh_data);
return temp;
}
public void setLaser(boolean on) {
byte[] send = new byte[2];
if(on) {
send[0] = 'n';
} else {
send[0] = 'f';
}
send[1] = '\n';
try {
sPort.write(send, 1000);
} catch (IOException e) {
Log.v(TAG, "Failed to write");
}
}
public void pause() {
if (mSerialIoManager != null) {
Log.i(TAG, "Stopping io manager ..");
mSerialIoManager.stop();
mSerialIoManager = null;
}
}
} | bsd-3-clause |