gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*########################################################################
*# #
*# Copyright (c) 2014 by #
*# Shanghai Stock Exchange (SSE), Shanghai, China #
*# All rights reserved. #
*# #
*########################################################################
*/
package sse.ngts.common.plugin.step.business;
import sse.ngts.common.plugin.step.*;
import sse.ngts.common.plugin.step.field.*;
public class QuoteStatusSend extends Message {
private static final long serialVersionUID = 20130819;
public static final String MSGTYPE = "U012";
public QuoteStatusSend() {
super();
getHeader().setField(new MsgType(MSGTYPE));
}
public QuoteStatusSend(int[] fieldOrder) {
super(fieldOrder);
getHeader().setField(new MsgType(MSGTYPE));
}
public void set(ClOrdID value) {
setField(value);
}
public ClOrdID get(ClOrdID value) throws FieldNotFound {
getField(value);
return value;
}
public ClOrdID getClOrdID() throws FieldNotFound {
ClOrdID value = new ClOrdID();
getField(value);
return value;
}
public boolean isSet(ClOrdID field) {
return isSetField(field);
}
public boolean isSetClOrdID() {
return isSetField(ClOrdID.FIELD);
}
public void set(OrderID value) {
setField(value);
}
public OrderID get(OrderID value) throws FieldNotFound {
getField(value);
return value;
}
public OrderID getOrderID() throws FieldNotFound {
OrderID value = new OrderID();
getField(value);
return value;
}
public boolean isSet(OrderID field) {
return isSetField(field);
}
public boolean isSetOrderID() {
return isSetField(OrderID.FIELD);
}
public void set(SecurityID value) {
setField(value);
}
public SecurityID get(SecurityID value) throws FieldNotFound {
getField(value);
return value;
}
public SecurityID getSecurityID() throws FieldNotFound {
SecurityID value = new SecurityID();
getField(value);
return value;
}
public boolean isSet(SecurityID field) {
return isSetField(field);
}
public boolean isSetSecurityID() {
return isSetField(SecurityID.FIELD);
}
public void set(Text value) {
setField(value);
}
public Text get(Text value) throws FieldNotFound {
getField(value);
return value;
}
public Text getText() throws FieldNotFound {
Text value = new Text();
getField(value);
return value;
}
public boolean isSet(Text field) {
return isSetField(field);
}
public boolean isSetText() {
return isSetField(Text.FIELD);
}
public void set(TransactTime value) {
setField(value);
}
public TransactTime get(TransactTime value) throws FieldNotFound {
getField(value);
return value;
}
public TransactTime getTransactTime() throws FieldNotFound {
TransactTime value = new TransactTime();
getField(value);
return value;
}
public boolean isSet(TransactTime field) {
return isSetField(field);
}
public boolean isSetTransactTime() {
return isSetField(TransactTime.FIELD);
}
public void set(QuoteStatus value) {
setField(value);
}
public QuoteStatus get(QuoteStatus value) throws FieldNotFound {
getField(value);
return value;
}
public QuoteStatus getQuoteStatus() throws FieldNotFound {
QuoteStatus value = new QuoteStatus();
getField(value);
return value;
}
public boolean isSet(QuoteStatus field) {
return isSetField(field);
}
public boolean isSetQuoteStatus() {
return isSetField(QuoteStatus.FIELD);
}
public void set(NoPartyIDs value) {
setField(value);
}
public NoPartyIDs get(NoPartyIDs value) throws FieldNotFound {
getField(value);
return value;
}
public NoPartyIDs getNoPartyIDs() throws FieldNotFound {
NoPartyIDs value = new NoPartyIDs();
getField(value);
return value;
}
public boolean isSet(NoPartyIDs field) {
return isSetField(field);
}
public boolean isSetNoPartyIDs() {
return isSetField(NoPartyIDs.FIELD);
}
public void set(QuoteRespType value) {
setField(value);
}
public QuoteRespType get(QuoteRespType value) throws FieldNotFound {
getField(value);
return value;
}
public QuoteRespType getQuoteRespType() throws FieldNotFound {
QuoteRespType value = new QuoteRespType();
getField(value);
return value;
}
public boolean isSet(QuoteRespType field) {
return isSetField(field);
}
public boolean isSetQuoteRespType() {
return isSetField(QuoteRespType.FIELD);
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.testIntegration;
import com.intellij.codeInsight.TestFrameworks;
import com.intellij.codeInsight.daemon.impl.quickfix.CreateFromUsageUtils;
import com.intellij.codeInsight.generation.GenerateMembersUtil;
import com.intellij.codeInsight.template.Expression;
import com.intellij.codeInsight.template.Template;
import com.intellij.codeInsight.template.TemplateEditingAdapter;
import com.intellij.codeInsight.template.TemplateManager;
import com.intellij.codeInsight.template.impl.ConstantNode;
import com.intellij.ide.fileTemplates.FileTemplate;
import com.intellij.ide.fileTemplates.FileTemplateDescriptor;
import com.intellij.ide.fileTemplates.FileTemplateManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.util.classMembers.MemberInfo;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.*;
public class TestIntegrationUtils {
private static final Logger LOG = Logger.getInstance(TestIntegrationUtils.class);
public enum MethodKind {
SET_UP("setUp") {
public FileTemplateDescriptor getFileTemplateDescriptor(@NotNull TestFramework framework) {
return framework.getSetUpMethodFileTemplateDescriptor();
}
},
TEAR_DOWN("tearDown") {
public FileTemplateDescriptor getFileTemplateDescriptor(@NotNull TestFramework framework) {
return framework.getTearDownMethodFileTemplateDescriptor();
}
},
TEST("test") {
public FileTemplateDescriptor getFileTemplateDescriptor(@NotNull TestFramework framework) {
return framework.getTestMethodFileTemplateDescriptor();
}
},
DATA("data") {
@Override
public FileTemplateDescriptor getFileTemplateDescriptor(@NotNull TestFramework framework) {
if (framework instanceof JavaTestFramework) {
return ((JavaTestFramework)framework).getParametersMethodFileTemplateDescriptor();
}
return null;
}
},
TEST_CLASS("testClass") {
@Override
public FileTemplateDescriptor getFileTemplateDescriptor(@NotNull TestFramework framework) {
if (framework instanceof JavaTestFramework) {
return ((JavaTestFramework)framework).getTestClassFileTemplateDescriptor();
}
return null;
}
};
private final String myDefaultName;
MethodKind(String defaultName) {
myDefaultName = defaultName;
}
public String getDefaultName() {
return myDefaultName;
}
public abstract FileTemplateDescriptor getFileTemplateDescriptor(@NotNull TestFramework framework);
}
public static boolean isTest(@NotNull PsiElement element) {
PsiClass klass = findOuterClass(element);
return klass != null && TestFrameworks.getInstance().isTestClass(klass);
}
@Nullable
public static PsiClass findOuterClass(@NotNull PsiElement element) {
PsiClass result = PsiTreeUtil.getParentOfType(element, PsiClass.class, false);
if (result == null) {
final PsiFile containingFile = element.getContainingFile();
if (containingFile instanceof PsiClassOwner){
final PsiClass[] classes = ((PsiClassOwner)containingFile).getClasses();
if (classes.length == 1) {
result = classes[0];
}
}
}
if (result == null) return null;
do {
PsiClass nextParent = PsiTreeUtil.getParentOfType(result, PsiClass.class, true);
if (nextParent == null) return result;
result = nextParent;
}
while (true);
}
public static List<MemberInfo> extractClassMethods(PsiClass clazz, boolean includeInherited) {
List<MemberInfo> result = new ArrayList<>();
do {
MemberInfo.extractClassMembers(clazz, result, new MemberInfo.Filter<PsiMember>() {
public boolean includeMember(PsiMember member) {
if (!(member instanceof PsiMethod)) return false;
PsiModifierList list = member.getModifierList();
return !list.hasModifierProperty(PsiModifier.PRIVATE);
}
}, false);
clazz = clazz.getSuperClass();
}
while (clazz != null
&& clazz.getSuperClass() != null // not the Object
&& includeInherited);
return result;
}
public static void runTestMethodTemplate(@NotNull MethodKind methodKind,
TestFramework framework,
final Editor editor,
final PsiClass targetClass,
final PsiMethod method,
@Nullable String name,
boolean automatic, Set<String> existingNames) {
runTestMethodTemplate(methodKind, framework, editor, targetClass, null, method, name, automatic, existingNames);
}
public static void runTestMethodTemplate(@NotNull MethodKind methodKind,
TestFramework framework,
final Editor editor,
final PsiClass targetClass,
@Nullable PsiClass sourceClass,
final PsiMethod method,
@Nullable String name,
boolean automatic,
Set<String> existingNames) {
runTestMethodTemplate(editor, targetClass, method, automatic,
createTestMethodTemplate(methodKind, framework, targetClass, sourceClass, name, automatic, existingNames));
}
public static void runTestMethodTemplate(final Editor editor,
final PsiClass targetClass,
final PsiMethod method,
boolean automatic, final Template template) {
final int startOffset = method.getModifierList().getTextRange().getStartOffset();
final TextRange range = new TextRange(startOffset, method.getTextRange().getEndOffset());
editor.getDocument().replaceString(range.getStartOffset(), range.getEndOffset(), "");
editor.getCaretModel().moveToOffset(range.getStartOffset());
final Project project = targetClass.getProject();
TemplateEditingAdapter adapter = null;
if (!automatic) {
adapter = new TemplateEditingAdapter() {
@Override
public void templateFinished(Template template, boolean brokenOff) {
ApplicationManager.getApplication().runWriteAction(() -> {
PsiDocumentManager.getInstance(project).commitDocument(editor.getDocument());
PsiFile psi = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
PsiElement el = PsiTreeUtil.findElementOfClassAtOffset(psi, editor.getCaretModel().getOffset() - 1, PsiMethod.class, false);
if (el != null) {
PsiMethod method1 = PsiTreeUtil.getParentOfType(el, PsiMethod.class, false);
if (method1 != null) {
if (method1.findDeepestSuperMethods().length > 0) {
GenerateMembersUtil.setupGeneratedMethod(method1);
}
CreateFromUsageUtils.setupEditor(method1, editor);
}
}
});
}
};
}
TemplateManager.getInstance(project).startTemplate(editor, template, adapter);
}
public static Template createTestMethodTemplate(@NotNull MethodKind methodKind,
TestFramework descriptor,
@NotNull PsiClass targetClass,
@Nullable String name,
boolean automatic,
Set<String> existingNames) {
return createTestMethodTemplate(methodKind, descriptor, targetClass, null, name, automatic, existingNames);
}
public static Template createTestMethodTemplate(@NotNull MethodKind methodKind,
TestFramework descriptor,
@NotNull PsiClass targetClass,
@Nullable PsiClass sourceClass,
@Nullable String name,
boolean automatic,
Set<String> existingNames) {
FileTemplateDescriptor templateDesc = methodKind.getFileTemplateDescriptor(descriptor);
String templateName = templateDesc.getFileName();
FileTemplate fileTemplate = FileTemplateManager.getInstance(targetClass.getProject()).getCodeTemplate(templateName);
Template template = TemplateManager.getInstance(targetClass.getProject()).createTemplate("", "");
String templateText;
try {
Properties properties = new Properties();
if (sourceClass != null && sourceClass.isValid()) {
properties.setProperty(FileTemplate.ATTRIBUTE_CLASS_NAME, sourceClass.getQualifiedName());
}
templateText = fileTemplate.getText(properties);
}
catch (IOException e) {
LOG.warn(e);
templateText = fileTemplate.getText();
}
if (name == null) name = methodKind.getDefaultName();
if (existingNames != null && !existingNames.add(name)) {
int idx = 1;
while (existingNames.contains(name)) {
final String newName = name + (idx++);
if (existingNames.add(newName)) {
name = newName;
break;
}
}
}
templateText = StringUtil.replace(templateText, "${BODY}\n", "");
int from = 0;
while (true) {
int index = templateText.indexOf("${NAME}", from);
if (index == -1) break;
template.addTextSegment(templateText.substring(from, index));
if (index > 0 && !Character.isWhitespace(templateText.charAt(index - 1))) {
name = StringUtil.capitalize(name);
}
else {
name = StringUtil.decapitalize(name);
}
if (from == 0) {
Expression nameExpr = new ConstantNode(name);
template.addVariable("name", nameExpr, nameExpr, !automatic);
}
else {
template.addVariableSegment("name");
}
from = index + "${NAME}".length();
}
template.addTextSegment(templateText.substring(from, templateText.length()));
template.setToIndent(true);
template.setToReformat(true);
template.setToShortenLongNames(true);
return template;
}
public static PsiMethod createDummyMethod(@NotNull PsiElement context) {
JVMElementFactory factory = JVMElementFactories.getFactory(context.getLanguage(), context.getProject());
if (factory == null) factory = JavaPsiFacade.getElementFactory(context.getProject());
return factory.createMethod("dummy", PsiType.VOID);
}
public static List<TestFramework> findSuitableFrameworks(PsiClass targetClass) {
TestFramework[] frameworks = Extensions.getExtensions(TestFramework.EXTENSION_NAME);
Project project = targetClass.getProject();
List<TestFramework> result = new SmartList<>();
for (TestFramework framework : frameworks) {
if (isAvailableFor(project, framework)) {
if (framework.isTestClass(targetClass)) {
return Collections.singletonList(framework);
}
if (framework.isPotentialTestClass(targetClass)) {
result.add(framework);
}
}
}
return result;
}
private static boolean isAvailableFor(@NotNull Project project, @NotNull TestFramework framework) {
if (framework instanceof JavaTestFramework) {
GlobalSearchScope scope = GlobalSearchScope.allScope(project);
String markerClassFQName = ((JavaTestFramework)framework).getMarkerClassFQName();
PsiClass c = JavaPsiFacade.getInstance(project).findClass(markerClassFQName, scope);
return c != null;
} else {
return true;
}
}
private TestIntegrationUtils() {
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.firebase.auth;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import com.google.api.client.json.JsonFactory;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.firebase.internal.ApiClientUtils;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.junit.Test;
public class SamlProviderConfigTest {
private static final JsonFactory jsonFactory = ApiClientUtils.getDefaultJsonFactory();
private static final String SAML_JSON_STRING =
("{"
+ " 'name': 'projects/projectId/inboundSamlConfigs/saml.provider-id',"
+ " 'displayName': 'DISPLAY_NAME',"
+ " 'enabled': true,"
+ " 'idpConfig': {"
+ " 'idpEntityId': 'IDP_ENTITY_ID',"
+ " 'ssoUrl': 'https://example.com/login',"
+ " 'idpCertificates': ["
+ " { 'x509Certificate': 'certificate1' },"
+ " { 'x509Certificate': 'certificate2' }"
+ " ]"
+ " },"
+ " 'spConfig': {"
+ " 'spEntityId': 'RP_ENTITY_ID',"
+ " 'callbackUri': 'https://projectId.firebaseapp.com/__/auth/handler'"
+ " }"
+ "}").replace("'", "\"");
@Test
public void testJsonDeserialization() throws IOException {
SamlProviderConfig config = jsonFactory.fromString(SAML_JSON_STRING, SamlProviderConfig.class);
assertEquals("saml.provider-id", config.getProviderId());
assertEquals("DISPLAY_NAME", config.getDisplayName());
assertTrue(config.isEnabled());
assertEquals("IDP_ENTITY_ID", config.getIdpEntityId());
assertEquals("https://example.com/login", config.getSsoUrl());
assertEquals(ImmutableList.of("certificate1", "certificate2"), config.getX509Certificates());
assertEquals("RP_ENTITY_ID", config.getRpEntityId());
assertEquals("https://projectId.firebaseapp.com/__/auth/handler", config.getCallbackUrl());
}
@Test
public void testCreateRequest() throws IOException {
SamlProviderConfig.CreateRequest createRequest =
new SamlProviderConfig.CreateRequest()
.setProviderId("saml.provider-id")
.setDisplayName("DISPLAY_NAME")
.setEnabled(false)
.setIdpEntityId("IDP_ENTITY_ID")
.setSsoUrl("https://example.com/login")
.addX509Certificate("certificate1")
.addX509Certificate("certificate2")
.setRpEntityId("RP_ENTITY_ID")
.setCallbackUrl("https://projectId.firebaseapp.com/__/auth/handler");
assertEquals("saml.provider-id", createRequest.getProviderId());
Map<String,Object> properties = createRequest.getProperties();
assertEquals(4, properties.size());
assertEquals("DISPLAY_NAME", (String) properties.get("displayName"));
assertFalse((boolean) properties.get("enabled"));
Map<String, Object> idpConfig = (Map<String, Object>) properties.get("idpConfig");
assertNotNull(idpConfig);
assertEquals(3, idpConfig.size());
assertEquals("IDP_ENTITY_ID", idpConfig.get("idpEntityId"));
assertEquals("https://example.com/login", idpConfig.get("ssoUrl"));
List<Object> idpCertificates = (List<Object>) idpConfig.get("idpCertificates");
assertNotNull(idpCertificates);
assertEquals(2, idpCertificates.size());
assertEquals(ImmutableMap.of("x509Certificate", "certificate1"), idpCertificates.get(0));
assertEquals(ImmutableMap.of("x509Certificate", "certificate2"), idpCertificates.get(1));
Map<String, Object> spConfig = (Map<String, Object>) properties.get("spConfig");
assertNotNull(spConfig);
assertEquals(2, spConfig.size());
assertEquals("RP_ENTITY_ID", spConfig.get("spEntityId"));
assertEquals("https://projectId.firebaseapp.com/__/auth/handler", spConfig.get("callbackUri"));
}
@Test
public void testCreateRequestX509Certificates() throws IOException {
SamlProviderConfig.CreateRequest createRequest =
new SamlProviderConfig.CreateRequest()
.addX509Certificate("certificate1")
.addAllX509Certificates(ImmutableList.of("certificate2", "certificate3"))
.addX509Certificate("certificate4");
Map<String,Object> properties = createRequest.getProperties();
assertEquals(1, properties.size());
Map<String, Object> idpConfig = (Map<String, Object>) properties.get("idpConfig");
assertNotNull(idpConfig);
assertEquals(1, idpConfig.size());
List<Object> idpCertificates = (List<Object>) idpConfig.get("idpCertificates");
assertNotNull(idpCertificates);
assertEquals(4, idpCertificates.size());
assertEquals(ImmutableMap.of("x509Certificate", "certificate1"), idpCertificates.get(0));
assertEquals(ImmutableMap.of("x509Certificate", "certificate2"), idpCertificates.get(1));
assertEquals(ImmutableMap.of("x509Certificate", "certificate3"), idpCertificates.get(2));
assertEquals(ImmutableMap.of("x509Certificate", "certificate4"), idpCertificates.get(3));
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestMissingProviderId() {
new SamlProviderConfig.CreateRequest().setProviderId(null);
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestInvalidProviderId() {
new SamlProviderConfig.CreateRequest().setProviderId("oidc.provider-id");
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestMissingDisplayName() {
new SamlProviderConfig.CreateRequest().setDisplayName(null);
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestMissingIdpEntityId() {
new SamlProviderConfig.CreateRequest().setIdpEntityId(null);
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestMissingSsoUrl() {
new SamlProviderConfig.CreateRequest().setSsoUrl(null);
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestInvalidSsoUrl() {
new SamlProviderConfig.CreateRequest().setSsoUrl("not a valid url");
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestMissingX509Certificate() {
new SamlProviderConfig.CreateRequest().addX509Certificate(null);
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestNullX509CertificatesCollection() {
new SamlProviderConfig.CreateRequest().addAllX509Certificates(null);
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestEmptyX509CertificatesCollection() {
new SamlProviderConfig.CreateRequest().addAllX509Certificates(ImmutableList.<String>of());
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestMissingRpEntityId() {
new SamlProviderConfig.CreateRequest().setRpEntityId(null);
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestMissingCallbackUrl() {
new SamlProviderConfig.CreateRequest().setCallbackUrl(null);
}
@Test(expected = IllegalArgumentException.class)
public void testCreateRequestInvalidCallbackUrl() {
new SamlProviderConfig.CreateRequest().setCallbackUrl("not a valid url");
}
@Test
public void testUpdateRequestFromSamlProviderConfig() throws IOException {
SamlProviderConfig config = jsonFactory.fromString(SAML_JSON_STRING, SamlProviderConfig.class);
SamlProviderConfig.UpdateRequest updateRequest = config.updateRequest();
assertEquals("saml.provider-id", updateRequest.getProviderId());
assertTrue(updateRequest.getProperties().isEmpty());
}
@Test
public void testUpdateRequest() throws IOException {
SamlProviderConfig.UpdateRequest updateRequest =
new SamlProviderConfig.UpdateRequest("saml.provider-id");
updateRequest
.setDisplayName("DISPLAY_NAME")
.setEnabled(false)
.setIdpEntityId("IDP_ENTITY_ID")
.setSsoUrl("https://example.com/login")
.addX509Certificate("certificate1")
.addX509Certificate("certificate2")
.setRpEntityId("RP_ENTITY_ID")
.setCallbackUrl("https://projectId.firebaseapp.com/__/auth/handler");
Map<String,Object> properties = updateRequest.getProperties();
assertEquals(4, properties.size());
assertEquals("DISPLAY_NAME", (String) properties.get("displayName"));
assertFalse((boolean) properties.get("enabled"));
Map<String, Object> idpConfig = (Map<String, Object>) properties.get("idpConfig");
assertNotNull(idpConfig);
assertEquals(3, idpConfig.size());
assertEquals("IDP_ENTITY_ID", idpConfig.get("idpEntityId"));
assertEquals("https://example.com/login", idpConfig.get("ssoUrl"));
List<Object> idpCertificates = (List<Object>) idpConfig.get("idpCertificates");
assertNotNull(idpCertificates);
assertEquals(2, idpCertificates.size());
assertEquals(ImmutableMap.of("x509Certificate", "certificate1"), idpCertificates.get(0));
assertEquals(ImmutableMap.of("x509Certificate", "certificate2"), idpCertificates.get(1));
Map<String, Object> spConfig = (Map<String, Object>) properties.get("spConfig");
assertNotNull(spConfig);
assertEquals(2, spConfig.size());
assertEquals("RP_ENTITY_ID", spConfig.get("spEntityId"));
assertEquals("https://projectId.firebaseapp.com/__/auth/handler", spConfig.get("callbackUri"));
}
@Test
public void testUpdateRequestX509Certificates() throws IOException {
SamlProviderConfig.UpdateRequest updateRequest =
new SamlProviderConfig.UpdateRequest("saml.provider-id");
updateRequest
.addX509Certificate("certificate1")
.addAllX509Certificates(ImmutableList.of("certificate2", "certificate3"))
.addX509Certificate("certificate4");
Map<String,Object> properties = updateRequest.getProperties();
assertEquals(1, properties.size());
Map<String, Object> idpConfig = (Map<String, Object>) properties.get("idpConfig");
assertNotNull(idpConfig);
assertEquals(1, idpConfig.size());
List<Object> idpCertificates = (List<Object>) idpConfig.get("idpCertificates");
assertNotNull(idpCertificates);
assertEquals(4, idpCertificates.size());
assertEquals(ImmutableMap.of("x509Certificate", "certificate1"), idpCertificates.get(0));
assertEquals(ImmutableMap.of("x509Certificate", "certificate2"), idpCertificates.get(1));
assertEquals(ImmutableMap.of("x509Certificate", "certificate3"), idpCertificates.get(2));
assertEquals(ImmutableMap.of("x509Certificate", "certificate4"), idpCertificates.get(3));
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestMissingProviderId() {
new SamlProviderConfig.UpdateRequest(null);
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestInvalidProviderId() {
new SamlProviderConfig.UpdateRequest("oidc.invalid-saml-provider-id");
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestMissingDisplayName() {
new SamlProviderConfig.UpdateRequest("saml.provider-id").setDisplayName(null);
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestMissingIdpEntityId() {
new SamlProviderConfig.UpdateRequest("saml.provider-id").setIdpEntityId(null);
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestMissingSsoUrl() {
new SamlProviderConfig.UpdateRequest("saml.provider-id").setSsoUrl(null);
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestInvalidSsoUrl() {
new SamlProviderConfig.UpdateRequest("saml.provider-id").setSsoUrl("not a valid url");
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestMissingX509Certificate() {
new SamlProviderConfig.UpdateRequest("saml.provider-id").addX509Certificate(null);
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestNullX509CertificatesCollection() {
new SamlProviderConfig.UpdateRequest("saml.provider-id").addAllX509Certificates(null);
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestEmptyX509CertificatesCollection() {
new SamlProviderConfig.UpdateRequest("saml.provider-id")
.addAllX509Certificates(ImmutableList.<String>of());
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestMissingRpEntityId() {
new SamlProviderConfig.UpdateRequest("saml.provider-id").setRpEntityId(null);
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestMissingCallbackUrl() {
new SamlProviderConfig.UpdateRequest("saml.provider-id").setCallbackUrl(null);
}
@Test(expected = IllegalArgumentException.class)
public void testUpdateRequestInvalidCallbackUrl() {
new SamlProviderConfig.UpdateRequest("saml.provider-id").setCallbackUrl("not a valid url");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.resourceresolver.impl;
import static java.util.Arrays.asList;
import static org.apache.sling.resourceresolver.impl.MockedResourceResolverImplTest.createRPHandler;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.jcr.Session;
import javax.servlet.http.HttpServletRequest;
import org.apache.sling.api.SlingException;
import org.apache.sling.api.resource.LoginException;
import org.apache.sling.api.resource.NonExistingResource;
import org.apache.sling.api.resource.PersistenceException;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ResourceResolverFactory;
import org.apache.sling.api.resource.SyntheticResource;
import org.apache.sling.resourceresolver.impl.providers.ResourceProviderHandler;
import org.apache.sling.resourceresolver.impl.providers.ResourceProviderStorage;
import org.apache.sling.resourceresolver.impl.providers.ResourceProviderTracker;
import org.apache.sling.spi.resource.provider.ResolveContext;
import org.apache.sling.spi.resource.provider.ResourceContext;
import org.apache.sling.spi.resource.provider.ResourceProvider;
import org.junit.Before;
import org.junit.Test;
import org.mockito.internal.util.reflection.Whitebox;
public class ResourceResolverImplTest {
private CommonResourceResolverFactoryImpl commonFactory;
private ResourceResolver resResolver;
private ResourceResolverFactoryImpl resFac;
private ResourceProviderTracker resourceProviderTracker;
@Before public void setup() throws LoginException {
ResourceProvider<?> rp = new ResourceProvider<Object>() {
@Override
public Resource getResource(ResolveContext<Object> ctx, String path, ResourceContext rCtx, Resource parent) {
return null;
}
@Override
public Iterator<Resource> listChildren(ResolveContext<Object> ctx, Resource parent) {
return null;
}
};
List<ResourceProviderHandler> handlers = asList(createRPHandler(rp, "rp1", 0, "/"));
resourceProviderTracker = mock(ResourceProviderTracker.class);
ResourceProviderStorage storage = new ResourceProviderStorage(handlers);
when(resourceProviderTracker.getResourceProviderStorage()).thenReturn(storage);
ResourceResolverFactoryActivator activator = new ResourceResolverFactoryActivator();
activator.resourceProviderTracker = resourceProviderTracker;
activator.resourceAccessSecurityTracker = new ResourceAccessSecurityTracker();
commonFactory = new CommonResourceResolverFactoryImpl(activator);
resFac = new ResourceResolverFactoryImpl(commonFactory, /* TODO: using Bundle */ null, null);
resResolver = resFac.getAdministrativeResourceResolver(null);
}
@SuppressWarnings("deprecation")
@Test public void testClose() throws Exception {
final ResourceResolver rr = new ResourceResolverImpl(commonFactory, false, null, resourceProviderTracker);
assertTrue(rr.isLive());
rr.close();
assertFalse(rr.isLive());
// close is always allowed to be called
rr.close();
assertFalse(rr.isLive());
// now check all public method - they should all throw!
try {
rr.adaptTo(Session.class);
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.clone(null);
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.findResources("a", "b");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getAttribute("a");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getAttributeNames();
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getResource(null);
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getResource(null, "/a");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getSearchPath();
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getUserID();
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.listChildren(null);
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.map("/somepath");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.map(null, "/somepath");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.queryResources("a", "b");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.resolve((HttpServletRequest)null);
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.resolve("/path");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.resolve(null, "/path");
fail();
} catch (final IllegalStateException ise) {
// expected
}
}
@SuppressWarnings("deprecation")
@Test
public void testCloseWithStackTraceLogging() throws Exception {
ResourceResolverFactoryActivator rrfa = spy(new ResourceResolverFactoryActivator());
Whitebox.setInternalState(rrfa, "logResourceResolverClosing", true);
CommonResourceResolverFactoryImpl crrfi = new CommonResourceResolverFactoryImpl(rrfa);
final ResourceResolver rr = new ResourceResolverImpl(crrfi, false, null, resourceProviderTracker);
assertTrue(rr.isLive());
rr.close();
assertFalse(rr.isLive());
// close is always allowed to be called
rr.close();
assertFalse(rr.isLive());
// now check all public method - they should all throw!
try {
rr.adaptTo(Session.class);
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.clone(null);
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.findResources("a", "b");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getAttribute("a");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getAttributeNames();
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getResource(null);
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getResource(null, "/a");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getSearchPath();
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.getUserID();
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.listChildren(null);
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.map("/somepath");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.map(null, "/somepath");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.queryResources("a", "b");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.resolve((HttpServletRequest)null);
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.resolve("/path");
fail();
} catch (final IllegalStateException ise) {
// expected
}
try {
rr.resolve(null, "/path");
fail();
} catch (final IllegalStateException ise) {
// expected
}
}
@SuppressWarnings("deprecation")
@Test public void testBasicAPIAssumptions() throws Exception {
// null resource is accessing /, which exists of course
final Resource res00 = resResolver.resolve((String) null);
assertNotNull(res00);
assertTrue("Resource must be NonExistingResource",
res00 instanceof NonExistingResource);
assertEquals("Null path is expected to return root", "/",
res00.getPath());
// relative paths are treated as if absolute
final String path01 = "relPath/relPath";
final Resource res01 = resResolver.resolve(path01);
assertNotNull(res01);
assertEquals("Expecting absolute path for relative path", "/" + path01,
res01.getPath());
assertTrue("Resource must be NonExistingResource",
res01 instanceof NonExistingResource);
final String no_resource_path = "/no_resource/at/this/location";
final Resource res02 = resResolver.resolve(no_resource_path);
assertNotNull(res02);
assertEquals("Expecting absolute path for relative path",
no_resource_path, res02.getPath());
assertTrue("Resource must be NonExistingResource",
res01 instanceof NonExistingResource);
try {
resResolver.resolve((HttpServletRequest) null);
fail("Expected NullPointerException trying to resolve null request");
} catch (NullPointerException npe) {
// expected
}
final Resource res0 = resResolver.resolve(null, no_resource_path);
assertNotNull("Expecting resource if resolution fails", res0);
assertTrue("Resource must be NonExistingResource",
res0 instanceof NonExistingResource);
assertEquals("Path must be the original path", no_resource_path,
res0.getPath());
final HttpServletRequest req1 = mock(HttpServletRequest.class);
when(req1.getProtocol()).thenReturn("http");
when(req1.getServerName()).thenReturn("localhost");
when(req1.getPathInfo()).thenReturn(no_resource_path);
final Resource res1 = resResolver.resolve(req1);
assertNotNull("Expecting resource if resolution fails", res1);
assertTrue("Resource must be NonExistingResource",
res1 instanceof NonExistingResource);
assertEquals("Path must be the original path", no_resource_path,
res1.getPath());
final HttpServletRequest req2 = mock(HttpServletRequest.class);
when(req2.getProtocol()).thenReturn("http");
when(req2.getServerName()).thenReturn("localhost");
when(req2.getPathInfo()).thenReturn(null);
final Resource res2 = resResolver.resolve(req2);
assertNotNull("Expecting resource if resolution fails", res2);
assertTrue("Resource must be NonExistingResource",
res2 instanceof NonExistingResource);
assertEquals("Path must be the the root path", "/", res2.getPath());
final Resource res3 = resResolver.getResource(null);
assertNull("Expected null resource for null path", res3);
final Resource res4 = resResolver.getResource(null, null);
assertNull("Expected null resource for null path", res4);
final Resource res5 = resResolver.getResource(res01, null);
assertNull("Expected null resource for null path", res5);
}
@Test public void test_clone_based_on_anonymous() throws Exception {
final ResourceResolver anon0 = resFac.getResourceResolver((Map<String, Object>) null);
// no session
final Session anon0Session = anon0.adaptTo(Session.class);
assertNull("Session should not be available", anon0Session);
// no user information, so user id is null
assertEquals(null, anon0.getUserID());
// same user and workspace
final ResourceResolver anon1 = anon0.clone(null);
final Session anon1Session = anon1.adaptTo(Session.class);
assertEquals(anon0.getUserID(), anon1.getUserID());
assertNull("Session should not be available", anon1Session);
anon1.close();
// same workspace but admin user
final Map<String, Object> admin0Cred = new HashMap<String, Object>();
admin0Cred.put(ResourceResolverFactory.USER, "admin");
admin0Cred.put(ResourceResolverFactory.PASSWORD, "admin".toCharArray());
final ResourceResolver admin0 = anon0.clone(admin0Cred);
assertEquals("admin", admin0.getUserID());
admin0.close();
anon0.close();
}
@Test public void test_clone_based_on_admin() throws Exception {
final ResourceResolver admin0 = resFac.getAdministrativeResourceResolver((Map<String, Object>) null);
// no user information, so user id is null
assertEquals(null, admin0.getUserID());
// same user and workspace
final ResourceResolver admin1 = admin0.clone(null);
assertEquals(admin0.getUserID(), admin1.getUserID());
admin1.close();
// same workspace but anonymous user
final Map<String, Object> anon0Cred = new HashMap<String, Object>();
anon0Cred.put(ResourceResolverFactory.USER, "anonymous");
final ResourceResolver anon0 = admin0.clone(anon0Cred);
assertEquals("anonymous", anon0.getUserID());
anon0.close();
admin0.close();
}
@Test public void test_attributes_from_authInfo() throws Exception {
final Map<String, Object> authInfo = new HashMap<String, Object>();
authInfo.put(ResourceResolverFactory.USER, "admin");
authInfo.put(ResourceResolverFactory.PASSWORD, "admin".toCharArray());
authInfo.put("testAttributeString", "AStringValue");
authInfo.put("testAttributeNumber", 999);
final ResourceResolver rr = resFac.getResourceResolver(authInfo);
assertEquals("AStringValue", rr.getAttribute("testAttributeString"));
assertEquals(999, rr.getAttribute("testAttributeNumber"));
assertEquals("admin", rr.getAttribute(ResourceResolverFactory.USER));
assertNull(rr.getAttribute(ResourceResolverFactory.PASSWORD));
final HashSet<String> validNames = new HashSet<String>();
validNames.add(ResourceResolverFactory.USER);
validNames.add("testAttributeString");
validNames.add("testAttributeNumber");
final Iterator<String> names = rr.getAttributeNames();
assertTrue(validNames.remove(names.next()));
assertTrue(validNames.remove(names.next()));
assertTrue(validNames.remove(names.next()));
assertFalse("Expect no more names", names.hasNext());
assertTrue("Expect validNames set to be empty now",
validNames.isEmpty());
rr.close();
}
@Test public void testBasicCrud() throws Exception {
final Resource r = mock(Resource.class);
when(r.getPath()).thenReturn("/some");
try {
this.resResolver.create(null, "a", null);
fail("Null parent resource should throw NPE");
} catch (final NullPointerException npe) {
// correct
}
try {
this.resResolver.create(r, null, null);
fail("Null name should throw NPE");
} catch (final NullPointerException npe) {
// correct
}
try {
this.resResolver.create(r, "a/b", null);
fail("Slash in name should throw illegal argument exception");
} catch (final IllegalArgumentException pe) {
// correct
}
try {
this.resResolver.create(r, "a", null);
fail("This should be unsupported.");
} catch (final PersistenceException uoe) {
// correct
}
}
@Test public void test_getResourceSuperType() {
final PathBasedResourceResolverImpl resolver = getPathBasedResourceResolver();
// the resources to test
final Resource r = resolver.add(new SyntheticResource(resolver, "/a", "a:b"));
final Resource r2 = resolver.add(new SyntheticResource(resolver, "/a2", "a:c"));
resolver.add(new SyntheticResourceWithSupertype(resolver, "/a/b", "x:y", "t:c"));
assertEquals("t:c", resolver.getParentResourceType(r.getResourceType()));
assertNull(resolver.getParentResourceType(r2.getResourceType()));
}
@Test public void testIsResourceType() {
final PathBasedResourceResolverImpl resolver = getPathBasedResourceResolver();
final Resource r = resolver.add(new SyntheticResourceWithSupertype(resolver, "/a", "a:b", "d:e"));
resolver.add(new SyntheticResourceWithSupertype(resolver, "/d/e", "x:y", "t:c"));
assertTrue(resolver.isResourceType(r, "a:b"));
assertTrue(resolver.isResourceType(r, "d:e"));
assertFalse(resolver.isResourceType(r, "x:y"));
assertTrue(resolver.isResourceType(r, "t:c"));
assertFalse(resolver.isResourceType(r, "h:p"));
}
@Test public void testIsResourceTypeWithPaths() {
final PathBasedResourceResolverImpl resolver = getPathBasedResourceResolver();
/**
* prepare resource type hierarchy
* /types/1
* +- /types/2
* +- /types/3
*/
resolver.add(new SyntheticResourceWithSupertype(resolver, "/types/1", "/types/component", "/types/2"));
resolver.add(new SyntheticResourceWithSupertype(resolver, "/types/2", "/types/component", "/types/3"));
resolver.add(new SyntheticResource(resolver, "/types/3", "/types/component"));
Resource resourceT1 = resolver.add(new SyntheticResource(resolver, "/resourceT1", "/types/1"));
Resource resourceT2 = resolver.add(new SyntheticResource(resolver, "/resourceT2", "/types/2"));
Resource resourceT3 = resolver.add(new SyntheticResource(resolver, "/resourceT3", "/types/3"));
assertTrue(resolver.isResourceType(resourceT1, "/types/1"));
assertTrue(resolver.isResourceType(resourceT1, "/types/2"));
assertTrue(resolver.isResourceType(resourceT1, "/types/3"));
assertFalse(resolver.isResourceType(resourceT1, "/types/component"));
assertFalse(resolver.isResourceType(resourceT1, "/types/unknown"));
assertFalse(resolver.isResourceType(resourceT2, "/types/1"));
assertTrue(resolver.isResourceType(resourceT2, "/types/2"));
assertTrue(resolver.isResourceType(resourceT2, "/types/3"));
assertFalse(resolver.isResourceType(resourceT2, "/types/component"));
assertFalse(resolver.isResourceType(resourceT2, "/types/unknown"));
assertFalse(resolver.isResourceType(resourceT3, "/types/1"));
assertFalse(resolver.isResourceType(resourceT3, "/types/2"));
assertTrue(resolver.isResourceType(resourceT3, "/types/3"));
assertFalse(resolver.isResourceType(resourceT3, "/types/component"));
assertFalse(resolver.isResourceType(resourceT3, "/types/unknown"));
}
@Test(expected=SlingException.class) public void testIsResourceCyclicHierarchyDirect() {
final PathBasedResourceResolverImpl resolver = getPathBasedResourceResolver();
/**
* prepare resource type hierarchy
* /types/1 <---+
* +- /types/2 -+
*/
resolver.add(new SyntheticResourceWithSupertype(resolver, "/types/1", "/types/component", "/types/2"));
resolver.add(new SyntheticResourceWithSupertype(resolver, "/types/2", "/types/component", "/types/1"));
Resource resource = resolver.add(new SyntheticResource(resolver, "/resourceT1", "/types/1"));
assertTrue(resolver.isResourceType(resource, "/types/1"));
assertTrue(resolver.isResourceType(resource, "/types/2"));
// this should throw a SlingException when detecting the cyclic hierarchy
resolver.isResourceType(resource, "/types/unknown");
}
@Test(expected=SlingException.class) public void testIsResourceCyclicHierarchyIndirect() {
final PathBasedResourceResolverImpl resolver = getPathBasedResourceResolver();
/**
* prepare resource type hierarchy
* /types/1 <----+
* +- /types/2 |
* +- /types/3 -+
*/
resolver.add(new SyntheticResourceWithSupertype(resolver, "/types/1", "/types/component", "/types/2"));
resolver.add(new SyntheticResourceWithSupertype(resolver, "/types/2", "/types/component", "/types/3"));
resolver.add(new SyntheticResourceWithSupertype(resolver, "/types/3", "/types/component", "/types/1"));
Resource resource = resolver.add(new SyntheticResource(resolver, "/resourceT1", "/types/1"));
assertTrue(resolver.isResourceType(resource, "/types/1"));
assertTrue(resolver.isResourceType(resource, "/types/2"));
assertTrue(resolver.isResourceType(resource, "/types/3"));
// this should throw a SlingException when detecting the cyclic hierarchy
resolver.isResourceType(resource, "/types/unknown");
}
private PathBasedResourceResolverImpl getPathBasedResourceResolver() {
try {
final List<ResourceResolver> resolvers = new ArrayList<ResourceResolver>();
final PathBasedResourceResolverImpl resolver = new PathBasedResourceResolverImpl(resolvers, resourceProviderTracker);
resolvers.add(resolver);
return resolver;
}
catch (LoginException ex) {
throw new RuntimeException(ex);
}
}
private static class PathBasedResourceResolverImpl extends ResourceResolverImpl {
private final Map<String, Resource> resources = new HashMap<String, Resource>();
public PathBasedResourceResolverImpl(final List<ResourceResolver> resolvers, final ResourceProviderTracker resourceProviderTracker) throws LoginException {
this(new CommonResourceResolverFactoryImpl(new ResourceResolverFactoryActivator()) {
@Override
public ResourceResolver getAdministrativeResourceResolver(
Map<String, Object> authenticationInfo) throws LoginException {
return resolvers.get(0);
}
}, resourceProviderTracker);
}
public PathBasedResourceResolverImpl(CommonResourceResolverFactoryImpl factory, ResourceProviderTracker resourceProviderTracker) throws LoginException {
super(factory, false, null, resourceProviderTracker);
}
public Resource add(final Resource r) {
this.resources.put(r.getPath(), r);
return r;
}
@Override
public String[] getSearchPath() {
return new String[] {""};
}
@Override
public Resource getResource(final String path) {
final String p = (path.startsWith("/") ? path : "/" + path);
return this.resources.get(p);
}
}
private static class SyntheticResourceWithSupertype extends SyntheticResource {
private final String resourceSuperType;
public SyntheticResourceWithSupertype(ResourceResolver resourceResolver, String path,
String resourceType, String resourceSuperType) {
super(resourceResolver, path, resourceType);
this.resourceSuperType = resourceSuperType;
}
@Override
public String getResourceSuperType() {
return this.resourceSuperType;
}
}
}
| |
package za.co.dvt.android.rxjava2firebase;
import android.support.annotation.NonNull;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.database.ChildEventListener;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.MutableData;
import com.google.firebase.database.Query;
import com.google.firebase.database.Transaction;
import com.google.firebase.database.ValueEventListener;
import java.lang.reflect.Array;
import java.util.Iterator;
import java.util.Map;
import io.reactivex.BackpressureStrategy;
import io.reactivex.Completable;
import io.reactivex.CompletableEmitter;
import io.reactivex.CompletableOnSubscribe;
import io.reactivex.Flowable;
import io.reactivex.FlowableEmitter;
import io.reactivex.FlowableOnSubscribe;
import io.reactivex.Maybe;
import io.reactivex.MaybeEmitter;
import io.reactivex.MaybeOnSubscribe;
import io.reactivex.Single;
import io.reactivex.SingleEmitter;
import io.reactivex.SingleOnSubscribe;
import io.reactivex.functions.Cancellable;
import io.reactivex.functions.Function;
import za.co.dvt.android.rxjava2firebase.exception.RxFirebaseDataException;
public class RxFirebaseDatabase {
/**
* Listener for changes in te data at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param strategy {@link BackpressureStrategy} associated to this {@link Flowable}
* @return a {@link Flowable} which emits when a value of the database change in the given query.
*/
@NonNull
public static Flowable<DataSnapshot> observeValueEvent(@NonNull final Query query,
@NonNull BackpressureStrategy strategy) {
return Flowable.create(new FlowableOnSubscribe<DataSnapshot>() {
@Override
public void subscribe(final FlowableEmitter<DataSnapshot> emitter) throws Exception {
final ValueEventListener valueEventListener = new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
emitter.onNext(dataSnapshot);
}
@Override
public void onCancelled(final DatabaseError error) {
emitter.onError(new RxFirebaseDataException(error));
}
};
emitter.setCancellable(new Cancellable() {
@Override
public void cancel() throws Exception {
query.removeEventListener(valueEventListener);
}
});
query.addValueEventListener(valueEventListener);
}
}, strategy);
}
/**
* Listener for a single change in te data at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @return a {@link Maybe} which emits the actual state of the database for the given query.
*/
@NonNull
public static Maybe<DataSnapshot> observeSingleValueEvent(@NonNull final Query query) {
return Maybe.create(new MaybeOnSubscribe<DataSnapshot>() {
@Override
public void subscribe(final MaybeEmitter<DataSnapshot> emitter) throws Exception {
query.addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
emitter.onSuccess(dataSnapshot);
emitter.onComplete();
}
@Override
public void onCancelled(DatabaseError error) {
emitter.onError(new RxFirebaseDataException(error));
}
});
}
});
}
/**
* Run a transaction on the data at this location. For more information on running transactions, see
*
* @param ref reference represents a particular location in your database.
* @param fireLocalEvents boolean which allow to receive calls of your transaction in your local device.
* @param transactionValue value of the transaction.
* @return a {@link Single} which emits the final {@link DataSnapshot} value if the transaction success.
*/
@NonNull
public static Single<DataSnapshot> runTransaction(@NonNull final DatabaseReference ref,
@NonNull final boolean fireLocalEvents,
@NonNull final long transactionValue) {
return Single.create(new SingleOnSubscribe<DataSnapshot>() {
@Override public void subscribe(final SingleEmitter emitter) throws Exception {
ref.runTransaction(new Transaction.Handler() {
@Override
public Transaction.Result doTransaction(MutableData mutableData) {
Integer currentValue = mutableData.getValue(Integer.class);
if (currentValue == null) {
mutableData.setValue(transactionValue);
} else {
mutableData.setValue(currentValue + transactionValue);
}
return Transaction.success(mutableData);
}
@Override
public void onComplete(DatabaseError databaseError, boolean b, DataSnapshot dataSnapshot) {
if (databaseError != null) {
emitter.onError(new RxFirebaseDataException(databaseError));
} else {
emitter.onSuccess(dataSnapshot);
}
}
}, fireLocalEvents);
}
});
}
/**
* Set the given value on the specified {@link DatabaseReference}.
*
* @param ref reference represents a particular location in your database.
* @param value value to update.
* @return a {@link Completable} which is complete when the set value call finish successfully.
*/
@NonNull
public static Completable setValue(@NonNull final DatabaseReference ref,
final Object value) {
return Completable.create(new CompletableOnSubscribe() {
@Override
public void subscribe(@io.reactivex.annotations.NonNull final CompletableEmitter e) throws Exception {
ref.setValue(value).addOnSuccessListener(new OnSuccessListener<Void>() {
@Override public void onSuccess(Void aVoid) {
e.onComplete();
}
}).addOnFailureListener(new OnFailureListener() {
@Override public void onFailure(@NonNull Exception exception) {
e.onError(exception);
}
});
}
});
}
/**
* Update the specific child keys to the specified values.
*
* @param ref reference represents a particular location in your database.
* @param updateData The paths to update and their new values
* @return a {@link Completable} which is complete when the update children call finish successfully.
*/
@NonNull
public static Completable updateChildren(@NonNull final DatabaseReference ref,
@NonNull final Map<String, Object> updateData) {
return Completable.create(new CompletableOnSubscribe() {
@Override
public void subscribe(final CompletableEmitter emitter) throws Exception {
ref.updateChildren(updateData, new DatabaseReference.CompletionListener() {
@Override
public void onComplete(DatabaseError error, DatabaseReference databaseReference) {
if (error != null) {
emitter.onError(new RxFirebaseDataException(error));
} else {
emitter.onComplete();
}
}
});
}
});
}
/**
* Listener for for child events occurring at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param strategy {@link BackpressureStrategy} associated to this {@link Flowable}
* @return a {@link Flowable} which emits when a value of a child int the database change on the given query.
*/
@NonNull
public static Flowable<RxFirebaseChildEvent<DataSnapshot>> observeChildEvent(
@NonNull final Query query, @NonNull BackpressureStrategy strategy) {
return Flowable.create(new FlowableOnSubscribe<RxFirebaseChildEvent<DataSnapshot>>() {
@Override
public void subscribe(final FlowableEmitter<RxFirebaseChildEvent<DataSnapshot>> emitter) throws Exception {
final ChildEventListener childEventListener = new ChildEventListener() {
@Override
public void onChildAdded(DataSnapshot dataSnapshot, String previousChildName) {
emitter.onNext(
new RxFirebaseChildEvent<>(dataSnapshot.getKey(), dataSnapshot, previousChildName,
RxFirebaseChildEvent.EventType.ADDED));
}
@Override
public void onChildChanged(DataSnapshot dataSnapshot, String previousChildName) {
emitter.onNext(
new RxFirebaseChildEvent<>(dataSnapshot.getKey(), dataSnapshot, previousChildName,
RxFirebaseChildEvent.EventType.CHANGED));
}
@Override
public void onChildRemoved(DataSnapshot dataSnapshot) {
emitter.onNext(new RxFirebaseChildEvent<>(dataSnapshot.getKey(), dataSnapshot,
RxFirebaseChildEvent.EventType.REMOVED));
}
@Override
public void onChildMoved(DataSnapshot dataSnapshot, String previousChildName) {
emitter.onNext(
new RxFirebaseChildEvent<>(dataSnapshot.getKey(), dataSnapshot, previousChildName,
RxFirebaseChildEvent.EventType.MOVED));
}
@Override
public void onCancelled(DatabaseError error) {
emitter.onError(new RxFirebaseDataException(error));
}
};
emitter.setCancellable(new Cancellable() {
@Override
public void cancel() throws Exception {
query.removeEventListener(childEventListener);
}
});
query.addChildEventListener(childEventListener);
}
}, strategy);
}
/**
* Method which retrieve a list of DataSnapshot from multiple {@link DatabaseReference}.
*
* @param whereRefs array of {@link DatabaseReference references.}
* @return a {@link Flowable} which emmit {@link DataSnapshot} from the given queries.
*/
@NonNull
public static Flowable<DataSnapshot> observeMultipleSingleValueEvent(@NonNull DatabaseReference... whereRefs) {
@SuppressWarnings("unchecked")
Maybe<DataSnapshot>[] singleQueries = (Maybe<DataSnapshot>[]) Array.newInstance(Maybe.class, whereRefs.length);
for (int i = 0; i < whereRefs.length; i++) {
singleQueries[i] = (observeSingleValueEvent(whereRefs[i]));
}
return Maybe.mergeArray(singleQueries);
}
/**
* Retrieve the child {@link DatabaseReference references} from an specific parent which equals to the
* references retrieved from another query. Which allow to make a "where" clause on a no relational table.
* <p>
* Example:
* DatabaseReference from = reference.child("Tweets");
* Query where = reference.child("favorited").child(userA);
* requestFilteredReferenceKeys(from, where).subscribe...
* <p>
* This last method will return the key references(/tweets/tweetId) which the userA mark as favorited.
* With the given list we can work together with {@link RxFirebaseDatabase#observeMultipleSingleValueEvent(DatabaseReference...)}
* to retrieve the Datasnapshots from the desired {@link DatabaseReference} based on other {@link DatabaseReference} values.
*
* @param from base reference where you want to retrieve the original references.
* @param whereRef reference that you use as a filter to create your from references.
* @return a {@link Maybe} which contain the list of the given DatabaseReferences.
*/
@NonNull
public static Maybe<DatabaseReference[]> requestFilteredReferenceKeys(@NonNull final DatabaseReference from,
@NonNull Query whereRef) {
return observeSingleValueEvent(whereRef, new Function<DataSnapshot, DatabaseReference[]>() {
@Override
public DatabaseReference[] apply(@io.reactivex.annotations.NonNull DataSnapshot dataSnapshot) throws Exception {
int childrenCount = (int) dataSnapshot.getChildrenCount();
DatabaseReference[] filterRefs = new DatabaseReference[childrenCount];
final Iterator<DataSnapshot> iterator = dataSnapshot.getChildren().iterator();
for (int i = 0; i < childrenCount; i++) {
filterRefs[i] = from.child(iterator.next().getKey());
}
return filterRefs;
}
});
}
/**
* Listener for changes in te data at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param clazz class type for the {@link DataSnapshot} items.
* @param strategy {@link BackpressureStrategy} associated to this {@link Flowable}
* @return a {@link Flowable} which emits when a value of the database change in the given query.
*/
@NonNull
public static <T> Flowable<T> observeValueEvent(@NonNull final Query query,
@NonNull final Class<T> clazz,
@NonNull BackpressureStrategy strategy) {
return observeValueEvent(query, DataSnapshotMapper.of(clazz), strategy);
}
/**
* Listener for a single change in te data at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param clazz class type for the {@link DataSnapshot} items.
* @return a {@link Maybe} which emits the actual state of the database for the given query.
*/
@NonNull
public static <T> Maybe<T> observeSingleValueEvent(@NonNull final Query query,
@NonNull final Class<T> clazz) {
return observeSingleValueEvent(query, DataSnapshotMapper.of(clazz));
}
/**
* Listener for for child events occurring at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param clazz class type for the {@link DataSnapshot} items.
* @param strategy {@link BackpressureStrategy} associated to this {@link Flowable}
* @return a {@link Flowable} which emits when a value of a child int the database change on the given query.
*/
@NonNull
public static <T> Flowable<RxFirebaseChildEvent<T>> observeChildEvent(
@NonNull final Query query, @NonNull final Class<T> clazz,
@NonNull BackpressureStrategy strategy) {
return observeChildEvent(query, DataSnapshotMapper.ofChildEvent(clazz), strategy);
}
/**
* Listener for changes in te data at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param mapper specific function to map the dispatched events.
* @param strategy {@link BackpressureStrategy} associated to this {@link Flowable}
* @return a {@link Flowable} which emits when a value of the database change in the given query.
*/
@NonNull
public static <T> Flowable<T> observeValueEvent(@NonNull final Query query,
@NonNull final Function<? super DataSnapshot, ? extends T> mapper,
@NonNull BackpressureStrategy strategy) {
return observeValueEvent(query, strategy).map(mapper);
}
/**
* Listener for a single change in te data at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param mapper specific function to map the dispatched events.
* @return a {@link Maybe} which emits the actual state of the database for the given query.
*/
@NonNull
public static <T> Maybe<T> observeSingleValueEvent(@NonNull final Query query,
@NonNull final Function<? super DataSnapshot, ? extends T> mapper) {
return observeSingleValueEvent(query).map(mapper);
}
/**
* Listener for for child events occurring at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param mapper specific function to map the dispatched events.
* @param strategy {@link BackpressureStrategy} associated to this {@link Flowable}
* @return a {@link Flowable} which emits when a value of a child int the database change on the given query.
*/
@NonNull
public static <T> Flowable<RxFirebaseChildEvent<T>> observeChildEvent(
@NonNull final Query query, @NonNull final Function<? super RxFirebaseChildEvent<DataSnapshot>,
? extends RxFirebaseChildEvent<T>> mapper, @NonNull BackpressureStrategy strategy) {
return observeChildEvent(query, strategy).map(mapper);
}
/**
* Listener for changes in the data at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @return a {@link Flowable} which emits when a value of the database change in the given query.
*/
@NonNull
public static Flowable<DataSnapshot> observeValueEvent(@NonNull final Query query) {
return observeValueEvent(query, BackpressureStrategy.DROP);
}
/**
* Listener for for child events occurring at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @return a {@link Flowable} which emits when a value of a child int the database change on the given query.
*/
@NonNull
public static Flowable<RxFirebaseChildEvent<DataSnapshot>> observeChildEvent(
@NonNull final Query query) {
return observeChildEvent(query, BackpressureStrategy.DROP);
}
/**
* Run a transaction on the data at this location. For more information on running transactions, see
*
* @param ref reference represents a particular location in your database.
* @param transactionValue value of the transaction.
* @return a {@link Single} which emits the final {@link DataSnapshot} value if the transaction success.
*/
@NonNull
public static Single<DataSnapshot> runTransaction(@NonNull final DatabaseReference ref,
@NonNull final long transactionValue) {
return runTransaction(ref, true, transactionValue);
}
/**
* Listener for changes in te data at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param clazz class type for the {@link DataSnapshot} items.
* @return a {@link Flowable} which emits when a value of the database change in the given query.
*/
@NonNull
public static <T> Flowable<T> observeValueEvent(@NonNull final Query query,
@NonNull final Class<T> clazz) {
return observeValueEvent(query, DataSnapshotMapper.of(clazz), BackpressureStrategy.DROP);
}
/**
* Listener for for child events occurring at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param clazz class type for the {@link DataSnapshot} items.
* @return a {@link Flowable} which emits when a value of a child int the database change on the given query.
*/
@NonNull
public static <T> Flowable<RxFirebaseChildEvent<T>> observeChildEvent(
@NonNull final Query query, @NonNull final Class<T> clazz) {
return observeChildEvent(query, DataSnapshotMapper.ofChildEvent(clazz), BackpressureStrategy.DROP);
}
/**
* Listener for changes in te data at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @return a {@link Flowable} which emits when a value of the database change in the given query.
*/
@NonNull
public static <T> Flowable<T> observeValueEvent(@NonNull final Query query,
@NonNull final Function<? super DataSnapshot, ? extends T> mapper) {
return observeValueEvent(query, BackpressureStrategy.DROP).map(mapper);
}
/**
* Listener for for child events occurring at the given query location.
*
* @param query reference represents a particular location in your Database and can be used for reading or writing data to that Database location.
* @param mapper specific function to map the dispatched events.
* @return a {@link Flowable} which emits when a value of a child int the database change on the given query.
*/
@NonNull
public static <T> Flowable<RxFirebaseChildEvent<T>> observeChildEvent(
@NonNull final Query query, @NonNull final Function<? super RxFirebaseChildEvent<DataSnapshot>,
? extends RxFirebaseChildEvent<T>> mapper) {
return observeChildEvent(query, BackpressureStrategy.DROP).map(mapper);
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.model;
import java.awt.Color;
import java.io.Serializable;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import org.hibernate.FlushMode;
import org.hibernate.criterion.Restrictions;
import org.unitime.commons.NaturalOrderComparator;
import org.unitime.timetable.model.base.BaseDepartment;
import org.unitime.timetable.model.base.BaseRoomDept;
import org.unitime.timetable.model.dao.DepartmentDAO;
import org.unitime.timetable.security.Qualifiable;
import org.unitime.timetable.security.UserContext;
import org.unitime.timetable.security.UserQualifier;
import org.unitime.timetable.security.rights.Right;
/**
* @author Tomas Muller, Stephanie Schluttenhofer, Zuzana Mullerova
*/
public class Department extends BaseDepartment implements Comparable<Department>, Qualifiable {
private static final long serialVersionUID = 1L;
/*[CONSTRUCTOR MARKER BEGIN]*/
public Department () {
super();
}
/**
* Constructor for primary key
*/
public Department (java.lang.Long uniqueId) {
super(uniqueId);
}
/*[CONSTRUCTOR MARKER END]*/
/** Request attribute name for available departments **/
public static String DEPT_ATTR_NAME = "deptsList";
public static String EXTERNAL_DEPT_ATTR_NAME = "externalDepartments";
public static TreeSet<Department> findAll(Long sessionId) {
return new TreeSet<Department>((new DepartmentDAO()).
getSession().
createQuery("select distinct d from Department as d where d.session.uniqueId=:sessionId").
setLong("sessionId", sessionId.longValue()).
setCacheable(true).
list());
}
public static TreeSet<Department> findAllExternal(Long sessionId) {
return new TreeSet<Department>((new DepartmentDAO()).
getSession().
createQuery("select distinct d from Department as d where d.externalManager=true and d.session.uniqueId=:sessionId").
setLong("sessionId", sessionId.longValue()).
setCacheable(true).
list());
}
public static TreeSet findAllNonExternal(Long sessionId) {
return new TreeSet((new DepartmentDAO()).
getSession().
createQuery("select distinct d from Department as d where d.externalManager=false and d.session.uniqueId=:sessionId").
setLong("sessionId", sessionId.longValue()).
setCacheable(true).
list());
}
/**
*
* @param deptCode
* @param sessionId
* @return
* @throws Exception
*/
public static Department findByDeptCode(String deptCode, Long sessionId) {
return(findByDeptCode(deptCode, sessionId, (new DepartmentDAO()). getSession()));
}
/**
*
* @param deptCode
* @param sessionId
* @param hibSession
* @return
* @throws Exception
*/
public static Department findByDeptCode(String deptCode, Long sessionId, org.hibernate.Session hibSession) {
return (Department)hibSession.
createQuery("select distinct d from Department as d where d.deptCode=:deptCode and d.session.uniqueId=:sessionId").
setLong("sessionId", sessionId).
setString("deptCode", deptCode).
setCacheable(true).
setFlushMode(FlushMode.MANUAL).
uniqueResult();
}
/* (non-Javadoc)
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
public int compareTo(Department d) {
int cmp = Double.compare(
isExternalManager() == null ? 0 : isExternalManager() ? 1 : 0,
d.isExternalManager() == null ? 0 : d.isExternalManager() ? 1 : 0);
if (cmp!=0) return cmp;
cmp = new NaturalOrderComparator().compare(
getDeptCode() == null ? "" : getDeptCode(),
d.getDeptCode() == null ? "" : d.getDeptCode());
if (cmp!=0) return cmp;
cmp = new NaturalOrderComparator().compare(
getAbbreviation() == null ? "" : getAbbreviation(),
d.getAbbreviation() == null ? "" : d.getAbbreviation());
if (cmp!=0) return cmp;
return (getUniqueId() == null ? Long.valueOf(-1) : getUniqueId()).compareTo(d.getUniqueId() == null ? -1 : d.getUniqueId());
}
public String htmlLabel(){
return(this.getDeptCode() + " - " + this.getName());
}
public String getHtmlTitle() {
return getDeptCode()+" - "+getName()+(isExternalManager().booleanValue()?" ("+getExternalMgrLabel()+")":"");
}
public String getShortLabel() {
if (isExternalManager().booleanValue())
return getExternalMgrAbbv().trim();
if (getAbbreviation()!=null && getAbbreviation().trim().length()>0)
return getAbbreviation().trim();
return getDeptCode();
}
public String htmlShortLabel() {
return
"<span "+
"style='color:#"+getRoomSharingColor(null)+";font-weight:bold;' "+
"title='"+getHtmlTitle()+"'>"+
getShortLabel()+
"</span>";
}
public String toString(){
return(this.getDeptCode() + " - " + this.getName());
}
public String getLabel(){
return(this.getDeptCode()
+ " - " + this.getName())
+ ( (this.isExternalManager().booleanValue())
? " ( EXT: " + this.getExternalMgrLabel() + " )"
: "" );
}
public static String color2hex(Color color) {
return
(color.getRed()<16?"0":"")+
Integer.toHexString(color.getRed())+
(color.getGreen()<16?"0":"")+
Integer.toHexString(color.getGreen())+
(color.getBlue()<16?"0":"")+
Integer.toHexString(color.getBlue());
}
public static Color hex2color(String hex) {
if (hex==null || hex.length()!=6) return null;
return new Color(
Integer.parseInt(hex.substring(0,2),16),
Integer.parseInt(hex.substring(2,4),16),
Integer.parseInt(hex.substring(4,6),16));
}
private static int distance(String color1, String color2) {
if (color1.equals(color2)) return 0;
Color c1 = hex2color(color1);
Color c2 = hex2color(color2);
return (int)Math.sqrt(
((c1.getRed()-c2.getRed())*(c1.getRed()-c2.getRed())) +
((c1.getGreen()-c2.getGreen())*(c1.getGreen()-c2.getGreen())) +
((c1.getBlue()-c2.getBlue())*(c1.getBlue()-c2.getBlue())));
}
public boolean isRoomSharingColorConflicting(String color) {
if (getUniqueId() == null) return false;
for (String other: (List<String>)DepartmentDAO.getInstance().getSession().createQuery(
"select distinct x.department.roomSharingColor from Department d inner join d.roomDepts rd inner join rd.room.roomDepts x " +
"where d.uniqueId = :uniqueId and d != x.department"
).setLong("uniqueId", getUniqueId()).setCacheable(true).list()) {
if (other != null && distance(color, other) < 50) return true;
}
return false;
}
public boolean isRoomSharingColorConflicting(String color, Collection otherDepartments) {
if (isRoomSharingColorConflicting(color)) return true;
if (otherDepartments!=null && !otherDepartments.isEmpty()) {
for (Iterator i=otherDepartments.iterator();i.hasNext();) {
Object o = i.next();
BaseDepartment d = null;
if (o instanceof BaseDepartment) {
d = (BaseDepartment)o;
} else if (o instanceof BaseRoomDept) {
d = ((BaseRoomDept)o).getDepartment();
} else if (o instanceof Long) {
d = (new DepartmentDAO()).get((Long)o);
}
if (d==null) continue;
if (d.equals(this)) continue;
if (color.equals(d.getRoomSharingColor()))
return true;
}
}
return false;
}
public void fixRoomSharingColor(Collection otherDepartments) {
String color = getRoomSharingColor();
if (isRoomSharingColorConflicting(color, otherDepartments)) {
int idx = 0;
color = color2hex(RoomSharingModel.sDepartmentColors[idx]);
while (isRoomSharingColorConflicting(color, otherDepartments)) {
idx++;
if (idx>=RoomSharingModel.sDepartmentColors.length) {
color = color2hex(new Color((int)(256.0*Math.random()),(int)(256.0*Math.random()),(int)(256.0*Math.random())));
} else {
color = color2hex(RoomSharingModel.sDepartmentColors[idx]);
}
}
setRoomSharingColor(color);
(new DepartmentDAO()).saveOrUpdate(this);
}
}
public String getRoomSharingColor(Collection otherDepartments) {
if (getRoomSharingColor() == null) {
setRoomSharingColor(color2hex(RoomSharingModel.sDepartmentColors[0]));
}
fixRoomSharingColor(otherDepartments);
return getRoomSharingColor();
}
public String getManagingDeptLabel(){
if (isExternalManager().booleanValue()){
return(getExternalMgrLabel());
} else {
return(getDeptCode()+" - "+getName());
}
}
public String getManagingDeptAbbv(){
return "<span title='"+getHtmlTitle()+"'>"+getShortLabel()+"</span>";
}
public Collection getClasses() {
return (new DepartmentDAO()).
getSession().
createQuery("select distinct c from Class_ as c where c.managingDept=:departmentId or (c.managingDept is null and c.controllingDept=:departmentId)").
setLong("departmentId", getUniqueId().longValue()).
list();
}
public Collection getClassesFetchWithStructure() {
return (new DepartmentDAO()).
getSession().
createQuery("select distinct c from Class_ as c " +
"left join fetch c.childClasses as cc "+
"left join fetch c.schedulingSubpart as ss "+
"left join fetch ss.childSubparts as css "+
"left join fetch ss.instrOfferingConfig as ioc "+
"left join fetch ioc.instructionalOffering as io "+
"left join fetch io.courseOfferings as cox "+
"where c.managingDept=:departmentId or (c.managingDept is null and c.controllingDept=:departmentId)").
setLong("departmentId", getUniqueId().longValue()).
list();
}
public Collection getNotAssignedClasses(Solution solution) {
return (new DepartmentDAO()).
getSession().
createQuery(
"select distinct c from Class_ as c where (c.managingDept=:departmentId or (c.managingDept is null and c.controllingDept=:departmentId)) and "+
"not exists (from c.assignments as a where a.solution=:solutionId)"
).
setLong("departmentId", getUniqueId().longValue()).
setInteger("solutionId", solution.getUniqueId().intValue()).
list();
}
public static TreeSet<Department> findAllBeingUsed(Long sessionId) {
TreeSet ret = new TreeSet(
(new DepartmentDAO()).
getSession().
createQuery("select distinct d from Department as d inner join d.timetableManagers as m where d.session.uniqueId=:sessionId").
setLong("sessionId", sessionId.longValue()).
setCacheable(true).
list());
ret.addAll(
(new DepartmentDAO()).
getSession().
createQuery("select distinct d from Department as d inner join d.roomDepts as r where d.session.uniqueId=:sessionId").
setLong("sessionId", sessionId.longValue()).
setCacheable(true).
list());
ret.addAll(
(new DepartmentDAO()).
getSession().
createQuery("select distinct d from Department as d inner join d.subjectAreas as r where d.session.uniqueId=:sessionId").
setLong("sessionId", sessionId.longValue()).
setCacheable(true).
list());
return ret;
}
public DepartmentStatusType effectiveStatusType() {
DepartmentStatusType t = getStatusType();
if (t!=null) return t;
return getSession().getStatusType();
}
public DepartmentStatusType effectiveStatusType(Department controllingDepartment) {
if (isExternalManager() && controllingDepartment != null && getExternalStatusTypes() != null) {
for (ExternalDepartmentStatusType t: getExternalStatusTypes()) {
if (controllingDepartment.equals(t.getDepartment())) return t.getStatusType();
}
}
return effectiveStatusType();
}
public DepartmentStatusType effectiveStatusType(UserContext cx) {
if (isExternalManager() && getExternalStatusTypes() != null && !cx.hasDepartment(getUniqueId())) {
Integer status = null;
for (ExternalDepartmentStatusType t: getExternalStatusTypes()) {
if (cx.hasDepartment(t.getDepartment().getUniqueId())) {
if (status == null) {
status = t.getStatusType().getStatus();
} else {
status = Integer.valueOf(status | t.getStatusType().getStatus());
}
}
}
if (status != null) {
DepartmentStatusType ret = new DepartmentStatusType(); ret.setStatus(status); return ret;
}
}
return effectiveStatusType();
}
public Long getSessionId(){
if (getSession() != null){
return(getSession().getUniqueId());
} else {
return(null);
}
}
public Object clone() {
Department d = new Department();
d.setSession(getSession());
d.setAbbreviation(getAbbreviation());
d.setAllowReqRoom(isAllowReqRoom());
d.setAllowReqTime(isAllowReqTime());
d.setAllowReqDistribution(isAllowReqDistribution());
d.setDeptCode(getDeptCode());
d.setDistributionPrefPriority(getDistributionPrefPriority());
d.setExternalManager(isExternalManager());
d.setExternalMgrAbbv(getExternalMgrAbbv());
d.setExternalMgrLabel(getExternalMgrLabel());
d.setExternalUniqueId(getExternalUniqueId());
d.setName(getName());
d.setStatusType(getStatusType());
d.setAllowEvents(isAllowEvents());
d.setInheritInstructorPreferences(isInheritInstructorPreferences());
d.setAllowStudentScheduling(isAllowStudentScheduling());
return d;
}
public Department findSameDepartmentInSession(Long newSessionId){
return(findSameDepartmentInSession(newSessionId, (new DepartmentDAO()).getSession()));
}
public Department findSameDepartmentInSession(Long newSessionId, org.hibernate.Session hibSession){
if (newSessionId == null){
return(null);
}
Department newDept = Department.findByDeptCode(this.getDeptCode(), newSessionId, hibSession);
if (newDept == null && this.getExternalUniqueId() != null){
// if a department wasn't found and an external uniqueid exists for this
// department check to see if the new term has a department that matches
// the external unique id
List l = hibSession.
createCriteria(Department.class).
add(Restrictions.eq("externalUniqueId",this.getExternalUniqueId())).
add(Restrictions.eq("session.uniqueId", newSessionId)).
setFlushMode(FlushMode.MANUAL).
setCacheable(true).list();
if (l.size() == 1){
newDept = (Department) l.get(0);
}
}
return(newDept);
}
public Department findSameDepartmentInSession(Session newSession){
if (newSession != null) return(findSameDepartmentInSession(newSession.getUniqueId()));
else return(null);
}
@Override
public Serializable getQualifierId() {
return getUniqueId();
}
@Override
public String getQualifierType() {
return getClass().getSimpleName();
}
@Override
public String getQualifierReference() {
return getDeptCode();
}
@Override
public String getQualifierLabel() {
return getName();
}
public static TreeSet<Department> getUserDepartments(UserContext user) {
TreeSet<Department> departments = new TreeSet<Department>();
if (user == null || user.getCurrentAuthority() == null) return departments;
if (user.getCurrentAuthority().hasRight(Right.DepartmentIndependent))
departments.addAll(Department.findAllBeingUsed(user.getCurrentAcademicSessionId()));
else
for (UserQualifier q: user.getCurrentAuthority().getQualifiers("Department"))
departments.add(DepartmentDAO.getInstance().get((Long)q.getQualifierId()));
return departments;
}
@Override
public Department getDepartment() { return this; }
public Set<InstructorAttributeType> getAvailableAttributeTypes() {
return new TreeSet<InstructorAttributeType>(DepartmentDAO.getInstance().getSession().createQuery(
"select distinct t from InstructorAttribute a inner join a.type t " +
"where a.session.uniqueId = :sessionId and (a.department is null or a.department.uniqueId = :departmentId)")
.setLong("sessionId", getSessionId()).setLong("departmentId", getUniqueId()).setCacheable(true).list());
}
public Set<InstructorAttribute> getAvailableAttributes() {
return new TreeSet<InstructorAttribute>(DepartmentDAO.getInstance().getSession().createQuery(
"select a from InstructorAttribute a " +
"where a.session.uniqueId = :sessionId and (a.department is null or a.department.uniqueId = :departmentId)")
.setLong("sessionId", getSessionId()).setLong("departmentId", getUniqueId()).setCacheable(true).list());
}
public static boolean isInstructorSchedulingCommitted(Long departmentId) {
Number oc = (Number)DepartmentDAO.getInstance().getSession().createQuery(
"select count(oc) from OfferingCoordinator oc where oc.teachingRequest is not null and " +
"oc.instructor.department.uniqueId = :departmentId").setLong("departmentId", departmentId).
setCacheable(true).uniqueResult();
if (oc.intValue() > 0) return true;
Number ci = (Number)DepartmentDAO.getInstance().getSession().createQuery(
"select count(ci) from ClassInstructor ci where ci.teachingRequest is not null and " +
"ci.instructor.department.uniqueId = :departmentId").setLong("departmentId", departmentId).
setCacheable(true).uniqueResult();
return ci.intValue() > 0;
}
}
| |
//
// Copyright 2016 Cityzen Data
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package io.warp10.quasar.trl;
import io.warp10.crypto.SipHashInline;
import io.warp10.quasar.filter.QuasarConfiguration;
import io.warp10.quasar.filter.sensision.QuasarTokenFilterSensisionConstants;
import io.warp10.sensision.Sensision;
import org.apache.commons.codec.binary.Hex;
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class QuasarTokenRevocationListLoader {
Properties config = null;
private static AtomicBoolean initialized = new AtomicBoolean(false);
private static long delay = 0L;
private static String path = null;
private static QuasarTRL currentTrl = null;
private static QuasarTokenRevocationListLoader quasarTokenRevocationListLoader = null;
private static AtomicBoolean singleton = new AtomicBoolean(false);
private static long appIdSipHashKeyK0 = 0L;
private static long appIdSipHashKeyK1 = 0L;
private List<QuasarTRLLoadedHandler> quasarTRLLoadedHandler = new ArrayList<>();
private String trlPattern = "^([a-zA-Z0-9_-]*)\\.(read|write|full)\\.([0-9]*)-([a-f0-9]{32})\\.trl$";
// Set of files already read
private Map<String, JavaTRLLoaded> read = new HashMap<String, JavaTRLLoaded>();
private Map<String, String> labels = new HashMap<String, String>();
public static QuasarTokenRevocationListLoader getInstance(Properties config, byte[] appSipHashKey) {
if (singleton.compareAndSet(false, true)) {
ByteBuffer bb = ByteBuffer.wrap(appSipHashKey);
bb.order(ByteOrder.BIG_ENDIAN);
appIdSipHashKeyK0 = bb.getLong();
appIdSipHashKeyK1 = bb.getLong();
quasarTokenRevocationListLoader = new QuasarTokenRevocationListLoader(config);
}
return quasarTokenRevocationListLoader;
}
private QuasarTokenRevocationListLoader(Properties props) {
this.config = props;
delay = Long.parseLong(config.getProperty(QuasarConfiguration.WARP_TRL_PERIOD, QuasarConfiguration.WARP_TRL_PERIOD_DEFAULT));
path = config.getProperty(QuasarConfiguration.WARP_TRL_PATH);
}
public static long getApplicationHash(String appName) {
if (appName != null && appName.length() > 0) {
byte[] appNameByteArray = appName.getBytes();
return SipHashInline.hash24(appIdSipHashKeyK0, appIdSipHashKeyK1, appNameByteArray, 0, appNameByteArray.length);
}
return 0L;
}
public void loadTrl() {
try {
QuasarTRL quasarTRL = null;
//
// Sensision metrics thread heart beat
//
Sensision.event(QuasarTokenFilterSensisionConstants.SENSISION_CLASS_QUASAR_FILTER_TRL_COUNT, labels, 1);
//
// get all files in the directory
//
String[] files = getFolderFiles(path);
//
// extract the most recent files per warp.type
//
Map<String, JavaTRLLoaded> latest = latestFilesToRead(files);
boolean update = updateTRL(read, latest);
if (update) {
long now = System.currentTimeMillis();
// sum files size
int size = getSipHashesSize(latest.values());
// load the selected files
for (Map.Entry<String, JavaTRLLoaded> entry: latest.entrySet()) {
if (null == quasarTRL) {
quasarTRL = new QuasarTRL(size);
}
//
// Read the token revocation list
//
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(new File(path, entry.getValue().fileName)));
while (true) {
String line = br.readLine();
if (null == line) {
break;
}
line = line.trim();
// Skip empty lines
if ("".equals(line)) {
continue;
}
// Skip comments
if (line.startsWith("#")) {
continue;
}
// application
if (line.startsWith(QuasarConfiguration.WARP_APPLICATION_PREFIX)) {
// compute the sip hash with the app name
long appSipHash = getApplicationHash(line.substring(1));
quasarTRL.revokeApplication(appSipHash);
} else {
// token sip hash hex encoded convert it into long
byte[] bytes = Hex.decodeHex(line.toCharArray());
long tokenRevoked = ByteBuffer.wrap(bytes, 0, 8).order(ByteOrder.BIG_ENDIAN).getLong();
// add it to the future trl list
quasarTRL.revokeToken(tokenRevoked);
}
}
// mark as read
read.put(entry.getKey(), entry.getValue());
} catch (Exception exp) {
exp.printStackTrace();
} finally {
if (null != br) {
try {
br.close();
} catch (IOException e) {
}
}
}
} // end for all files
if (0 != quasarTRLLoadedHandler.size() && null != quasarTRL) {
//
// sort and switch the new trl
//
quasarTRL.sortTokens();
//
// call all the handlers
//
for (QuasarTRLLoadedHandler handler: quasarTRLLoadedHandler) {
handler.onQuasarTRL(quasarTRL);
}
currentTrl = quasarTRL;
//
// Sensision trl loaded
//
long timeElapsed = System.currentTimeMillis() - now;
Sensision.event(QuasarTokenFilterSensisionConstants.SENSISION_CLASS_QUASAR_FILTER_TRL_LOAD_TIME, labels, timeElapsed);
Sensision.event(QuasarTokenFilterSensisionConstants.SENSISION_CLASS_QUASAR_FILTER_TRL_TOKENS_COUNT, labels, quasarTRL.getTrlSize());
}
} // end if update
} catch (Exception exp) {
// thread error
Sensision.update(QuasarTokenFilterSensisionConstants.SENSISION_CLASS_QUASAR_FILTER_TRL_ERROR_COUNT, labels, 1);
}
}
public void init() {
// initialize only once per JVM
if (initialized.get()) {
return;
}
Thread t = new Thread() {
@Override
public void run() {
while (true) {
loadTrl();
// time to sleep
try {
Thread.sleep(delay);
} catch (InterruptedException ie) {
}
} // while(true)
} // run()
};
if (null != path && initialized.compareAndSet(false, true)) {
t.setName("[TokenRevocationListLoader]");
t.setDaemon(true);
t.start();
}
}
private boolean updateTRL(Map<String, JavaTRLLoaded> read, Map<String, JavaTRLLoaded> latest) {
boolean update = false;
for (String key: latest.keySet()) {
JavaTRLLoaded actualTrl = read.get(key);
JavaTRLLoaded newTrl = latest.get(key);
// not current trl -> load it
if (null == actualTrl) {
update = true;
break;
}
// md5 not equals -> load it
if (!actualTrl.md5.equals(newTrl.md5)) {
update = true;
break;
}
}
return update;
}
private Map<String, JavaTRLLoaded> latestFilesToRead(String[] files) {
// key = warp.type
Map<String, JavaTRLLoaded> filesToRead = new HashMap<String, JavaTRLLoaded>();
Pattern pattern = Pattern.compile(trlPattern);
for (String file: files) {
Matcher matcher = pattern.matcher(file);
if (matcher.matches()) {
// get the key warp.type
String warp = matcher.group(1);
String type = matcher.group(2);
long ts = Long.valueOf(matcher.group(3));
String md5 = matcher.group(4);
String key = warp + "." + type;
JavaTRLLoaded current = filesToRead.get(key);
if (null == current || (null != current && ts > current.timestamp)) {
JavaTRLLoaded next = new JavaTRLLoaded();
next.fileName = file;
next.timestamp = ts;
next.warp = warp;
next.type = type;
next.md5 = md5;
filesToRead.put(key, next);
}
}
}
return filesToRead;
}
private String[] getFolderFiles(String path) {
final File root = new File(path);
String[] files = root.list(new FilenameFilter() {
@Override
public boolean accept(File d, String name) {
if (!d.equals(root)) {
return false;
}
return name.matches(trlPattern);
}
});
// Sort files in lexicographic order
if (null == files) {
files = new String[0];
}
Arrays.sort(files);
return files;
}
/**
* Estimation if the number of SIPhashes in the files according to the file size
* @param files
* @return
*/
private int getSipHashesSize(Collection<JavaTRLLoaded> files) {
// sum files size
int size = 0;
for (JavaTRLLoaded file: files) {
File filename = new File(path, file.fileName);
size += filename.length();
}
// each line = long hexa encoded (16 bytes) + CR
return size / 17;
}
public void addTrlUpdatedHandler(QuasarTRLLoadedHandler handler) {
quasarTRLLoadedHandler.add(handler);
// notify if a trl is already available
if (null != currentTrl) {
handler.onQuasarTRL(currentTrl);
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.action.admin.cluster.settings;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.HashSet;
import java.util.Set;
import static org.elasticsearch.common.settings.AbstractScopedSettings.ARCHIVED_SETTINGS_PREFIX;
public class TransportClusterUpdateSettingsAction extends
TransportMasterNodeAction<ClusterUpdateSettingsRequest, ClusterUpdateSettingsResponse> {
private static final Logger logger = LogManager.getLogger(TransportClusterUpdateSettingsAction.class);
private final AllocationService allocationService;
private final ClusterSettings clusterSettings;
@Inject
public TransportClusterUpdateSettingsAction(TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, AllocationService allocationService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, ClusterSettings clusterSettings) {
super(ClusterUpdateSettingsAction.NAME, false, transportService, clusterService, threadPool, actionFilters,
ClusterUpdateSettingsRequest::new, indexNameExpressionResolver, ClusterUpdateSettingsResponse::new, ThreadPool.Names.SAME);
this.allocationService = allocationService;
this.clusterSettings = clusterSettings;
}
/**
skip check block if:
* Only at least one of cluster.blocks.read_only or cluster.blocks.read_only_allow_delete is being cleared (set to null or false).
* Or all of the following are true:
* 1. At least one of cluster.blocks.read_only or cluster.blocks.read_only_allow_delete is being cleared (set to null or false).
* 2. Neither cluster.blocks.read_only nor cluster.blocks.read_only_allow_delete is being set to true.
* 3. The only other settings in this update are archived ones being set to null.
*/
@Override
protected ClusterBlockException checkBlock(ClusterUpdateSettingsRequest request, ClusterState state) {
Set<String> clearedBlockAndArchivedSettings = new HashSet<>();
if (checkClearedBlockAndArchivedSettings(request.transientSettings(), clearedBlockAndArchivedSettings)
&& checkClearedBlockAndArchivedSettings(request.persistentSettings(), clearedBlockAndArchivedSettings)) {
if (clearedBlockAndArchivedSettings.contains(Metadata.SETTING_READ_ONLY_SETTING.getKey())
|| clearedBlockAndArchivedSettings.contains(Metadata.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey())) {
return null;
}
}
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
/**
* Check settings that only contains block and archived settings.
* @param settings target settings to be checked.
* @param clearedBlockAndArchivedSettings block settings that have been set to null or false,
* archived settings that have been set to null.
* @return true if all settings are clear blocks or archived settings.
*/
private boolean checkClearedBlockAndArchivedSettings(final Settings settings,
final Set<String> clearedBlockAndArchivedSettings) {
for (String key : settings.keySet()) {
if (Metadata.SETTING_READ_ONLY_SETTING.getKey().equals(key)) {
if (Metadata.SETTING_READ_ONLY_SETTING.get(settings)) {
// set block as true
return false;
}
} else if (Metadata.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey().equals(key)) {
if (Metadata.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.get(settings)) {
// set block as true
return false;
}
} else if (key.startsWith(ARCHIVED_SETTINGS_PREFIX)) {
if (settings.get(key) != null) {
// archived setting value is not null
return false;
}
} else {
// other settings
return false;
}
clearedBlockAndArchivedSettings.add(key);
}
return true;
}
@Override
protected void masterOperation(Task task, final ClusterUpdateSettingsRequest request, final ClusterState state,
final ActionListener<ClusterUpdateSettingsResponse> listener) {
final SettingsUpdater updater = new SettingsUpdater(clusterSettings);
clusterService.submitStateUpdateTask("cluster_update_settings",
new AckedClusterStateUpdateTask(Priority.IMMEDIATE, request, listener) {
private volatile boolean changed = false;
@Override
protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) {
return new ClusterUpdateSettingsResponse(acknowledged, updater.getTransientUpdates(), updater.getPersistentUpdate());
}
@Override
public void onAllNodesAcked(@Nullable Exception e) {
if (changed) {
reroute(true);
} else {
super.onAllNodesAcked(e);
}
}
@Override
public void onAckTimeout() {
if (changed) {
reroute(false);
} else {
super.onAckTimeout();
}
}
private void reroute(final boolean updateSettingsAcked) {
// We're about to send a second update task, so we need to check if we're still the elected master
// For example the minimum_master_node could have been breached and we're no longer elected master,
// so we should *not* execute the reroute.
if (clusterService.state().nodes().isLocalNodeElectedMaster() == false) {
logger.debug("Skipping reroute after cluster update settings, because node is no longer master");
listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, updater.getTransientUpdates(),
updater.getPersistentUpdate()));
return;
}
// The reason the reroute needs to be send as separate update task, is that all the *cluster* settings are encapsulate
// in the components (e.g. FilterAllocationDecider), so the changes made by the first call aren't visible
// to the components until the ClusterStateListener instances have been invoked, but are visible after
// the first update task has been completed.
clusterService.submitStateUpdateTask("reroute_after_cluster_update_settings",
new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) {
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
//we wait for the reroute ack only if the update settings was acknowledged
return updateSettingsAcked;
}
@Override
// we return when the cluster reroute is acked or it times out but the acknowledged flag depends on whether the
// update settings was acknowledged
protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) {
return new ClusterUpdateSettingsResponse(updateSettingsAcked && acknowledged, updater.getTransientUpdates(),
updater.getPersistentUpdate());
}
@Override
public void onNoLongerMaster(String source) {
logger.debug("failed to preform reroute after cluster settings were updated - current node is no longer a master");
listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, updater.getTransientUpdates(),
updater.getPersistentUpdate()));
}
@Override
public void onFailure(String source, Exception e) {
//if the reroute fails we only log
logger.debug(() -> new ParameterizedMessage("failed to perform [{}]", source), e);
listener.onFailure(new ElasticsearchException("reroute after update settings failed", e));
}
@Override
public ClusterState execute(final ClusterState currentState) {
// now, reroute in case things that require it changed (e.g. number of replicas)
return allocationService.reroute(currentState, "reroute after cluster update settings");
}
});
}
@Override
public void onFailure(String source, Exception e) {
logger.debug(() -> new ParameterizedMessage("failed to perform [{}]", source), e);
super.onFailure(source, e);
}
@Override
public ClusterState execute(final ClusterState currentState) {
final ClusterState clusterState =
updater.updateSettings(
currentState,
clusterSettings.upgradeSettings(request.transientSettings()),
clusterSettings.upgradeSettings(request.persistentSettings()),
logger);
changed = clusterState != currentState;
return clusterState;
}
});
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simpledb;
import javax.annotation.Generated;
import com.amazonaws.services.simpledb.model.*;
/**
* Interface for accessing Amazon SimpleDB asynchronously. Each asynchronous method will return a Java Future object
* representing the asynchronous operation; overloads which accept an {@code AsyncHandler} can be used to receive
* notification when an asynchronous operation completes.
* <p>
* <b>Note:</b> Do not directly implement this interface, new methods are added to it regularly. Extend from
* {@link com.amazonaws.services.simpledb.AbstractAmazonSimpleDBAsync} instead.
* </p>
* <p>
* Amazon SimpleDB is a web service providing the core database functions of data indexing and querying in the cloud. By
* offloading the time and effort associated with building and operating a web-scale database, SimpleDB provides
* developers the freedom to focus on application development.
* <p>
* A traditional, clustered relational database requires a sizable upfront capital outlay, is complex to design, and
* often requires extensive and repetitive database administration. Amazon SimpleDB is dramatically simpler, requiring
* no schema, automatically indexing your data and providing a simple API for storage and access. This approach
* eliminates the administrative burden of data modeling, index maintenance, and performance tuning. Developers gain
* access to this functionality within Amazon's proven computing environment, are able to scale instantly, and pay only
* for what they use.
* </p>
* <p>
* Visit <a href="http://aws.amazon.com/simpledb/">http://aws.amazon.com/simpledb/</a> for more information.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public interface AmazonSimpleDBAsync extends AmazonSimpleDB {
/**
* <p>
* Performs multiple DeleteAttributes operations in a single call, which reduces round trips and latencies. This
* enables Amazon SimpleDB to optimize requests, which generally yields better throughput.
* </p>
* <p>
* The following limitations are enforced for this operation:
* <ul>
* <li>1 MB request size</li>
* <li>25 item limit per BatchDeleteAttributes operation</li>
* </ul>
* </p>
*
* @param batchDeleteAttributesRequest
* @return A Java Future containing the result of the BatchDeleteAttributes operation returned by the service.
* @sample AmazonSimpleDBAsync.BatchDeleteAttributes
*/
java.util.concurrent.Future<BatchDeleteAttributesResult> batchDeleteAttributesAsync(BatchDeleteAttributesRequest batchDeleteAttributesRequest);
/**
* <p>
* Performs multiple DeleteAttributes operations in a single call, which reduces round trips and latencies. This
* enables Amazon SimpleDB to optimize requests, which generally yields better throughput.
* </p>
* <p>
* The following limitations are enforced for this operation:
* <ul>
* <li>1 MB request size</li>
* <li>25 item limit per BatchDeleteAttributes operation</li>
* </ul>
* </p>
*
* @param batchDeleteAttributesRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the BatchDeleteAttributes operation returned by the service.
* @sample AmazonSimpleDBAsyncHandler.BatchDeleteAttributes
*/
java.util.concurrent.Future<BatchDeleteAttributesResult> batchDeleteAttributesAsync(BatchDeleteAttributesRequest batchDeleteAttributesRequest,
com.amazonaws.handlers.AsyncHandler<BatchDeleteAttributesRequest, BatchDeleteAttributesResult> asyncHandler);
/**
* <p>
* The <code>BatchPutAttributes</code> operation creates or replaces attributes within one or more items. By using
* this operation, the client can perform multiple <a>PutAttribute</a> operation with a single call. This helps
* yield savings in round trips and latencies, enabling Amazon SimpleDB to optimize requests and generally produce
* better throughput.
* </p>
* <p>
* The client may specify the item name with the <code>Item.X.ItemName</code> parameter. The client may specify new
* attributes using a combination of the <code>Item.X.Attribute.Y.Name</code> and
* <code>Item.X.Attribute.Y.Value</code> parameters. The client may specify the first attribute for the first item
* using the parameters <code>Item.0.Attribute.0.Name</code> and <code>Item.0.Attribute.0.Value</code>, and for the
* second attribute for the first item by the parameters <code>Item.0.Attribute.1.Name</code> and
* <code>Item.0.Attribute.1.Value</code>, and so on.
* </p>
* <p>
* Attributes are uniquely identified within an item by their name/value combination. For example, a single item can
* have the attributes <code>{ "first_name", "first_value" }</code> and
* <code>{ "first_name", "second_value" }</code>. However, it cannot have two attribute instances where both the
* <code>Item.X.Attribute.Y.Name</code> and <code>Item.X.Attribute.Y.Value</code> are the same.
* </p>
* <p>
* Optionally, the requester can supply the <code>Replace</code> parameter for each individual value. Setting this
* value to <code>true</code> will cause the new attribute values to replace the existing attribute values. For
* example, if an item <code>I</code> has the attributes <code>{ 'a', '1' }, { 'b', '2'}</code> and
* <code>{ 'b', '3' }</code> and the requester does a BatchPutAttributes of <code>{'I', 'b', '4' }</code> with the
* Replace parameter set to true, the final attributes of the item will be <code>{ 'a', '1' }</code> and
* <code>{ 'b', '4' }</code>, replacing the previous values of the 'b' attribute with the new value.
* </p>
* <important> This operation is vulnerable to exceeding the maximum URL size when making a REST request using the
* HTTP GET method. This operation does not support conditions using <code>Expected.X.Name</code>,
* <code>Expected.X.Value</code>, or <code>Expected.X.Exists</code>. </important>
* <p>
* You can execute multiple <code>BatchPutAttributes</code> operations and other operations in parallel. However,
* large numbers of concurrent <code>BatchPutAttributes</code> calls can result in Service Unavailable (503)
* responses.
* </p>
* <p>
* The following limitations are enforced for this operation:
* <ul>
* <li>256 attribute name-value pairs per item</li>
* <li>1 MB request size</li>
* <li>1 billion attributes per domain</li>
* <li>10 GB of total user data storage per domain</li>
* <li>25 item limit per <code>BatchPutAttributes</code> operation</li>
* </ul>
* </p>
*
* @param batchPutAttributesRequest
* @return A Java Future containing the result of the BatchPutAttributes operation returned by the service.
* @sample AmazonSimpleDBAsync.BatchPutAttributes
*/
java.util.concurrent.Future<BatchPutAttributesResult> batchPutAttributesAsync(BatchPutAttributesRequest batchPutAttributesRequest);
/**
* <p>
* The <code>BatchPutAttributes</code> operation creates or replaces attributes within one or more items. By using
* this operation, the client can perform multiple <a>PutAttribute</a> operation with a single call. This helps
* yield savings in round trips and latencies, enabling Amazon SimpleDB to optimize requests and generally produce
* better throughput.
* </p>
* <p>
* The client may specify the item name with the <code>Item.X.ItemName</code> parameter. The client may specify new
* attributes using a combination of the <code>Item.X.Attribute.Y.Name</code> and
* <code>Item.X.Attribute.Y.Value</code> parameters. The client may specify the first attribute for the first item
* using the parameters <code>Item.0.Attribute.0.Name</code> and <code>Item.0.Attribute.0.Value</code>, and for the
* second attribute for the first item by the parameters <code>Item.0.Attribute.1.Name</code> and
* <code>Item.0.Attribute.1.Value</code>, and so on.
* </p>
* <p>
* Attributes are uniquely identified within an item by their name/value combination. For example, a single item can
* have the attributes <code>{ "first_name", "first_value" }</code> and
* <code>{ "first_name", "second_value" }</code>. However, it cannot have two attribute instances where both the
* <code>Item.X.Attribute.Y.Name</code> and <code>Item.X.Attribute.Y.Value</code> are the same.
* </p>
* <p>
* Optionally, the requester can supply the <code>Replace</code> parameter for each individual value. Setting this
* value to <code>true</code> will cause the new attribute values to replace the existing attribute values. For
* example, if an item <code>I</code> has the attributes <code>{ 'a', '1' }, { 'b', '2'}</code> and
* <code>{ 'b', '3' }</code> and the requester does a BatchPutAttributes of <code>{'I', 'b', '4' }</code> with the
* Replace parameter set to true, the final attributes of the item will be <code>{ 'a', '1' }</code> and
* <code>{ 'b', '4' }</code>, replacing the previous values of the 'b' attribute with the new value.
* </p>
* <important> This operation is vulnerable to exceeding the maximum URL size when making a REST request using the
* HTTP GET method. This operation does not support conditions using <code>Expected.X.Name</code>,
* <code>Expected.X.Value</code>, or <code>Expected.X.Exists</code>. </important>
* <p>
* You can execute multiple <code>BatchPutAttributes</code> operations and other operations in parallel. However,
* large numbers of concurrent <code>BatchPutAttributes</code> calls can result in Service Unavailable (503)
* responses.
* </p>
* <p>
* The following limitations are enforced for this operation:
* <ul>
* <li>256 attribute name-value pairs per item</li>
* <li>1 MB request size</li>
* <li>1 billion attributes per domain</li>
* <li>10 GB of total user data storage per domain</li>
* <li>25 item limit per <code>BatchPutAttributes</code> operation</li>
* </ul>
* </p>
*
* @param batchPutAttributesRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the BatchPutAttributes operation returned by the service.
* @sample AmazonSimpleDBAsyncHandler.BatchPutAttributes
*/
java.util.concurrent.Future<BatchPutAttributesResult> batchPutAttributesAsync(BatchPutAttributesRequest batchPutAttributesRequest,
com.amazonaws.handlers.AsyncHandler<BatchPutAttributesRequest, BatchPutAttributesResult> asyncHandler);
/**
* <p>
* The <code>CreateDomain</code> operation creates a new domain. The domain name should be unique among the domains
* associated with the Access Key ID provided in the request. The <code>CreateDomain</code> operation may take 10 or
* more seconds to complete.
* </p>
* <p>
* The client can create up to 100 domains per account.
* </p>
* <p>
* If the client requires additional domains, go to <a
* href="http://aws.amazon.com/contact-us/simpledb-limit-request/">
* http://aws.amazon.com/contact-us/simpledb-limit-request/</a>.
* </p>
*
* @param createDomainRequest
* @return A Java Future containing the result of the CreateDomain operation returned by the service.
* @sample AmazonSimpleDBAsync.CreateDomain
*/
java.util.concurrent.Future<CreateDomainResult> createDomainAsync(CreateDomainRequest createDomainRequest);
/**
* <p>
* The <code>CreateDomain</code> operation creates a new domain. The domain name should be unique among the domains
* associated with the Access Key ID provided in the request. The <code>CreateDomain</code> operation may take 10 or
* more seconds to complete.
* </p>
* <p>
* The client can create up to 100 domains per account.
* </p>
* <p>
* If the client requires additional domains, go to <a
* href="http://aws.amazon.com/contact-us/simpledb-limit-request/">
* http://aws.amazon.com/contact-us/simpledb-limit-request/</a>.
* </p>
*
* @param createDomainRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the CreateDomain operation returned by the service.
* @sample AmazonSimpleDBAsyncHandler.CreateDomain
*/
java.util.concurrent.Future<CreateDomainResult> createDomainAsync(CreateDomainRequest createDomainRequest,
com.amazonaws.handlers.AsyncHandler<CreateDomainRequest, CreateDomainResult> asyncHandler);
/**
* <p>
* Deletes one or more attributes associated with an item. If all attributes of the item are deleted, the item is
* deleted.
* </p>
* <p>
* <code>DeleteAttributes</code> is an idempotent operation; running it multiple times on the same item or attribute
* does not result in an error response.
* </p>
* <p>
* Because Amazon SimpleDB makes multiple copies of item data and uses an eventual consistency update model,
* performing a <a>GetAttributes</a> or <a>Select</a> operation (read) immediately after a
* <code>DeleteAttributes</code> or <a>PutAttributes</a> operation (write) might not return updated item data.
* </p>
*
* @param deleteAttributesRequest
* @return A Java Future containing the result of the DeleteAttributes operation returned by the service.
* @sample AmazonSimpleDBAsync.DeleteAttributes
*/
java.util.concurrent.Future<DeleteAttributesResult> deleteAttributesAsync(DeleteAttributesRequest deleteAttributesRequest);
/**
* <p>
* Deletes one or more attributes associated with an item. If all attributes of the item are deleted, the item is
* deleted.
* </p>
* <p>
* <code>DeleteAttributes</code> is an idempotent operation; running it multiple times on the same item or attribute
* does not result in an error response.
* </p>
* <p>
* Because Amazon SimpleDB makes multiple copies of item data and uses an eventual consistency update model,
* performing a <a>GetAttributes</a> or <a>Select</a> operation (read) immediately after a
* <code>DeleteAttributes</code> or <a>PutAttributes</a> operation (write) might not return updated item data.
* </p>
*
* @param deleteAttributesRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DeleteAttributes operation returned by the service.
* @sample AmazonSimpleDBAsyncHandler.DeleteAttributes
*/
java.util.concurrent.Future<DeleteAttributesResult> deleteAttributesAsync(DeleteAttributesRequest deleteAttributesRequest,
com.amazonaws.handlers.AsyncHandler<DeleteAttributesRequest, DeleteAttributesResult> asyncHandler);
/**
* <p>
* The <code>DeleteDomain</code> operation deletes a domain. Any items (and their attributes) in the domain are
* deleted as well. The <code>DeleteDomain</code> operation might take 10 or more seconds to complete.
* </p>
*
* @param deleteDomainRequest
* @return A Java Future containing the result of the DeleteDomain operation returned by the service.
* @sample AmazonSimpleDBAsync.DeleteDomain
*/
java.util.concurrent.Future<DeleteDomainResult> deleteDomainAsync(DeleteDomainRequest deleteDomainRequest);
/**
* <p>
* The <code>DeleteDomain</code> operation deletes a domain. Any items (and their attributes) in the domain are
* deleted as well. The <code>DeleteDomain</code> operation might take 10 or more seconds to complete.
* </p>
*
* @param deleteDomainRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DeleteDomain operation returned by the service.
* @sample AmazonSimpleDBAsyncHandler.DeleteDomain
*/
java.util.concurrent.Future<DeleteDomainResult> deleteDomainAsync(DeleteDomainRequest deleteDomainRequest,
com.amazonaws.handlers.AsyncHandler<DeleteDomainRequest, DeleteDomainResult> asyncHandler);
/**
* <p>
* Returns information about the domain, including when the domain was created, the number of items and attributes
* in the domain, and the size of the attribute names and values.
* </p>
*
* @param domainMetadataRequest
* @return A Java Future containing the result of the DomainMetadata operation returned by the service.
* @sample AmazonSimpleDBAsync.DomainMetadata
*/
java.util.concurrent.Future<DomainMetadataResult> domainMetadataAsync(DomainMetadataRequest domainMetadataRequest);
/**
* <p>
* Returns information about the domain, including when the domain was created, the number of items and attributes
* in the domain, and the size of the attribute names and values.
* </p>
*
* @param domainMetadataRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DomainMetadata operation returned by the service.
* @sample AmazonSimpleDBAsyncHandler.DomainMetadata
*/
java.util.concurrent.Future<DomainMetadataResult> domainMetadataAsync(DomainMetadataRequest domainMetadataRequest,
com.amazonaws.handlers.AsyncHandler<DomainMetadataRequest, DomainMetadataResult> asyncHandler);
/**
* <p>
* Returns all of the attributes associated with the specified item. Optionally, the attributes returned can be
* limited to one or more attributes by specifying an attribute name parameter.
* </p>
* <p>
* If the item does not exist on the replica that was accessed for this operation, an empty set is returned. The
* system does not return an error as it cannot guarantee the item does not exist on other replicas.
* </p>
*
* @param getAttributesRequest
* @return A Java Future containing the result of the GetAttributes operation returned by the service.
* @sample AmazonSimpleDBAsync.GetAttributes
*/
java.util.concurrent.Future<GetAttributesResult> getAttributesAsync(GetAttributesRequest getAttributesRequest);
/**
* <p>
* Returns all of the attributes associated with the specified item. Optionally, the attributes returned can be
* limited to one or more attributes by specifying an attribute name parameter.
* </p>
* <p>
* If the item does not exist on the replica that was accessed for this operation, an empty set is returned. The
* system does not return an error as it cannot guarantee the item does not exist on other replicas.
* </p>
*
* @param getAttributesRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the GetAttributes operation returned by the service.
* @sample AmazonSimpleDBAsyncHandler.GetAttributes
*/
java.util.concurrent.Future<GetAttributesResult> getAttributesAsync(GetAttributesRequest getAttributesRequest,
com.amazonaws.handlers.AsyncHandler<GetAttributesRequest, GetAttributesResult> asyncHandler);
/**
* <p>
* The <code>ListDomains</code> operation lists all domains associated with the Access Key ID. It returns domain
* names up to the limit set by <a href="#MaxNumberOfDomains">MaxNumberOfDomains</a>. A <a
* href="#NextToken">NextToken</a> is returned if there are more than <code>MaxNumberOfDomains</code> domains.
* Calling <code>ListDomains</code> successive times with the <code>NextToken</code> provided by the operation
* returns up to <code>MaxNumberOfDomains</code> more domain names with each successive operation call.
* </p>
*
* @param listDomainsRequest
* @return A Java Future containing the result of the ListDomains operation returned by the service.
* @sample AmazonSimpleDBAsync.ListDomains
*/
java.util.concurrent.Future<ListDomainsResult> listDomainsAsync(ListDomainsRequest listDomainsRequest);
/**
* <p>
* The <code>ListDomains</code> operation lists all domains associated with the Access Key ID. It returns domain
* names up to the limit set by <a href="#MaxNumberOfDomains">MaxNumberOfDomains</a>. A <a
* href="#NextToken">NextToken</a> is returned if there are more than <code>MaxNumberOfDomains</code> domains.
* Calling <code>ListDomains</code> successive times with the <code>NextToken</code> provided by the operation
* returns up to <code>MaxNumberOfDomains</code> more domain names with each successive operation call.
* </p>
*
* @param listDomainsRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the ListDomains operation returned by the service.
* @sample AmazonSimpleDBAsyncHandler.ListDomains
*/
java.util.concurrent.Future<ListDomainsResult> listDomainsAsync(ListDomainsRequest listDomainsRequest,
com.amazonaws.handlers.AsyncHandler<ListDomainsRequest, ListDomainsResult> asyncHandler);
/**
* Simplified method form for invoking the ListDomains operation.
*
* @see #listDomainsAsync(ListDomainsRequest)
*/
java.util.concurrent.Future<ListDomainsResult> listDomainsAsync();
/**
* Simplified method form for invoking the ListDomains operation with an AsyncHandler.
*
* @see #listDomainsAsync(ListDomainsRequest, com.amazonaws.handlers.AsyncHandler)
*/
java.util.concurrent.Future<ListDomainsResult> listDomainsAsync(com.amazonaws.handlers.AsyncHandler<ListDomainsRequest, ListDomainsResult> asyncHandler);
/**
* <p>
* The PutAttributes operation creates or replaces attributes in an item. The client may specify new attributes
* using a combination of the <code>Attribute.X.Name</code> and <code>Attribute.X.Value</code> parameters. The
* client specifies the first attribute by the parameters <code>Attribute.0.Name</code> and
* <code>Attribute.0.Value</code>, the second attribute by the parameters <code>Attribute.1.Name</code> and
* <code>Attribute.1.Value</code>, and so on.
* </p>
* <p>
* Attributes are uniquely identified in an item by their name/value combination. For example, a single item can
* have the attributes <code>{ "first_name", "first_value" }</code> and <code>{ "first_name", second_value" }</code>
* . However, it cannot have two attribute instances where both the <code>Attribute.X.Name</code> and
* <code>Attribute.X.Value</code> are the same.
* </p>
* <p>
* Optionally, the requestor can supply the <code>Replace</code> parameter for each individual attribute. Setting
* this value to <code>true</code> causes the new attribute value to replace the existing attribute value(s). For
* example, if an item has the attributes <code>{ 'a', '1' }</code>, <code>{ 'b', '2'}</code> and
* <code>{ 'b', '3' }</code> and the requestor calls <code>PutAttributes</code> using the attributes
* <code>{ 'b', '4' }</code> with the <code>Replace</code> parameter set to true, the final attributes of the item
* are changed to <code>{ 'a', '1' }</code> and <code>{ 'b', '4' }</code>, which replaces the previous values of the
* 'b' attribute with the new value.
* </p>
* <p>
* You cannot specify an empty string as an attribute name.
* </p>
* <p>
* Because Amazon SimpleDB makes multiple copies of client data and uses an eventual consistency update model, an
* immediate <a>GetAttributes</a> or <a>Select</a> operation (read) immediately after a <a>PutAttributes</a> or
* <a>DeleteAttributes</a> operation (write) might not return the updated data.
* </p>
* <p>
* The following limitations are enforced for this operation:
* <ul>
* <li>256 total attribute name-value pairs per item</li>
* <li>One billion attributes per domain</li>
* <li>10 GB of total user data storage per domain</li>
* </ul>
* </p>
*
* @param putAttributesRequest
* @return A Java Future containing the result of the PutAttributes operation returned by the service.
* @sample AmazonSimpleDBAsync.PutAttributes
*/
java.util.concurrent.Future<PutAttributesResult> putAttributesAsync(PutAttributesRequest putAttributesRequest);
/**
* <p>
* The PutAttributes operation creates or replaces attributes in an item. The client may specify new attributes
* using a combination of the <code>Attribute.X.Name</code> and <code>Attribute.X.Value</code> parameters. The
* client specifies the first attribute by the parameters <code>Attribute.0.Name</code> and
* <code>Attribute.0.Value</code>, the second attribute by the parameters <code>Attribute.1.Name</code> and
* <code>Attribute.1.Value</code>, and so on.
* </p>
* <p>
* Attributes are uniquely identified in an item by their name/value combination. For example, a single item can
* have the attributes <code>{ "first_name", "first_value" }</code> and <code>{ "first_name", second_value" }</code>
* . However, it cannot have two attribute instances where both the <code>Attribute.X.Name</code> and
* <code>Attribute.X.Value</code> are the same.
* </p>
* <p>
* Optionally, the requestor can supply the <code>Replace</code> parameter for each individual attribute. Setting
* this value to <code>true</code> causes the new attribute value to replace the existing attribute value(s). For
* example, if an item has the attributes <code>{ 'a', '1' }</code>, <code>{ 'b', '2'}</code> and
* <code>{ 'b', '3' }</code> and the requestor calls <code>PutAttributes</code> using the attributes
* <code>{ 'b', '4' }</code> with the <code>Replace</code> parameter set to true, the final attributes of the item
* are changed to <code>{ 'a', '1' }</code> and <code>{ 'b', '4' }</code>, which replaces the previous values of the
* 'b' attribute with the new value.
* </p>
* <p>
* You cannot specify an empty string as an attribute name.
* </p>
* <p>
* Because Amazon SimpleDB makes multiple copies of client data and uses an eventual consistency update model, an
* immediate <a>GetAttributes</a> or <a>Select</a> operation (read) immediately after a <a>PutAttributes</a> or
* <a>DeleteAttributes</a> operation (write) might not return the updated data.
* </p>
* <p>
* The following limitations are enforced for this operation:
* <ul>
* <li>256 total attribute name-value pairs per item</li>
* <li>One billion attributes per domain</li>
* <li>10 GB of total user data storage per domain</li>
* </ul>
* </p>
*
* @param putAttributesRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the PutAttributes operation returned by the service.
* @sample AmazonSimpleDBAsyncHandler.PutAttributes
*/
java.util.concurrent.Future<PutAttributesResult> putAttributesAsync(PutAttributesRequest putAttributesRequest,
com.amazonaws.handlers.AsyncHandler<PutAttributesRequest, PutAttributesResult> asyncHandler);
/**
* <p>
* The <code>Select</code> operation returns a set of attributes for <code>ItemNames</code> that match the select
* expression. <code>Select</code> is similar to the standard SQL SELECT statement.
* </p>
* <p>
* The total size of the response cannot exceed 1 MB in total size. Amazon SimpleDB automatically adjusts the number
* of items returned per page to enforce this limit. For example, if the client asks to retrieve 2500 items, but
* each individual item is 10 kB in size, the system returns 100 items and an appropriate <code>NextToken</code> so
* the client can access the next page of results.
* </p>
* <p>
* For information on how to construct select expressions, see Using Select to Create Amazon SimpleDB Queries in the
* Developer Guide.
* </p>
*
* @param selectRequest
* @return A Java Future containing the result of the Select operation returned by the service.
* @sample AmazonSimpleDBAsync.Select
*/
java.util.concurrent.Future<SelectResult> selectAsync(SelectRequest selectRequest);
/**
* <p>
* The <code>Select</code> operation returns a set of attributes for <code>ItemNames</code> that match the select
* expression. <code>Select</code> is similar to the standard SQL SELECT statement.
* </p>
* <p>
* The total size of the response cannot exceed 1 MB in total size. Amazon SimpleDB automatically adjusts the number
* of items returned per page to enforce this limit. For example, if the client asks to retrieve 2500 items, but
* each individual item is 10 kB in size, the system returns 100 items and an appropriate <code>NextToken</code> so
* the client can access the next page of results.
* </p>
* <p>
* For information on how to construct select expressions, see Using Select to Create Amazon SimpleDB Queries in the
* Developer Guide.
* </p>
*
* @param selectRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the Select operation returned by the service.
* @sample AmazonSimpleDBAsyncHandler.Select
*/
java.util.concurrent.Future<SelectResult> selectAsync(SelectRequest selectRequest,
com.amazonaws.handlers.AsyncHandler<SelectRequest, SelectResult> asyncHandler);
}
| |
/*
* Copyright (C) 2015 Karumi.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.karumi.expandableselector;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import com.karumi.expandableselector.animation.ExpandableSelectorAnimator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* FrameLayout extension used to show a list of ExpandableItems instances represented with Button
* or ImageButton widgets which can be collapsed and expanded using an animation. The configurable
* elements of the class are:
*
* - List of items to show represented with ExpandableItem instances.
* - Time used to perform the collapse/expand animations. Expressed in milliseconds.
* - Show or hide the view background when the List of ExpandaleItems are collapsed.
* - Configure a ExpandableSelectorListeners to be notified when the view is going to be
* collapsed/expanded or has
* been collapsed/expanded.
* - Configure a OnExpandableItemClickListener to be notified when an item is clicked.
*/
public class ExpandableSelector extends FrameLayout {
private static final int DEFAULT_ANIMATION_DURATION = 300;
private List<ExpandableItem> expandableItems = Collections.EMPTY_LIST;
private List<View> buttons = new ArrayList<View>();
private ExpandableSelectorAnimator expandableSelectorAnimator;
private ExpandableSelectorListener listener;
private OnExpandableItemClickListener clickListener;
private boolean hideBackgroundIfCollapsed;
private Drawable expandedBackground;
public ExpandableSelector(Context context) {
this(context, null);
}
public ExpandableSelector(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public ExpandableSelector(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
initializeView(attrs);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public ExpandableSelector(Context context, AttributeSet attrs, int defStyleAttr,
int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
initializeView(attrs);
}
/**
* Configures a List<ExpandableItem> to be shown. By default, the list of ExpandableItems is
* going to be shown collapsed. Please take into account that this method creates
* ImageButton/Button widgets based on the size of the list passed as parameter. Don't use this
* library as a RecyclerView and take into account the number of elements to show.
*/
public void showExpandableItems(List<ExpandableItem> expandableItems) {
validateExpandableItems(expandableItems);
reset();
setExpandableItems(expandableItems);
renderExpandableItems();
hookListeners();
bringChildsToFront(expandableItems);
}
/**
* Performs different animations to show the previously configured ExpandableItems transformed
* into Button widgets. Notifies the ExpandableSelectorListener instance there was previously
* configured.
*/
public void expand() {
expandableSelectorAnimator.expand(new ExpandableSelectorAnimator.Listener() {
@Override public void onAnimationFinished() {
notifyExpanded();
}
});
notifyExpand();
updateBackground();
}
/**
* Performs different animations to hide the previously configured ExpandableItems transformed
* into Button widgets. Notifies the ExpandableSelectorListener instance there was previously
* configured.
*/
public void collapse() {
expandableSelectorAnimator.collapse(new ExpandableSelectorAnimator.Listener() {
@Override public void onAnimationFinished() {
updateBackground();
notifyCollapsed();
}
});
notifyCollapse();
}
/**
* Returns true if the view is collapsed and false if the view is expanded.
*/
public boolean isCollapsed() {
return expandableSelectorAnimator.isCollapsed();
}
/**
* Returns true if the view is expanded and false if the view is collapsed.
*/
public boolean isExpanded() {
return expandableSelectorAnimator.isExpanded();
}
/**
* Configures a ExpandableSelectorListener instance to be notified when different collapse/expand
* animations be performed.
*/
public void setExpandableSelectorListener(ExpandableSelectorListener listener) {
this.listener = listener;
}
/**
* Configures a OnExpandableItemClickListener instance to be notified when a Button/ImageButton
* inside ExpandableSelector be clicked. If the component is collapsed an the first button is
* clicked the listener will not be notified. This listener will be notified about button clicks
* just when ExpandableSelector be collapsed.
*/
public void setOnExpandableItemClickListener(OnExpandableItemClickListener clickListener) {
this.clickListener = clickListener;
}
/**
* Given a position passed as parameter returns the ExpandableItem associated.
*/
public ExpandableItem getExpandableItem(int expandableItemPosition) {
return expandableItems.get(expandableItemPosition);
}
/**
* Changes the ExpandableItem associated to a given position and updates the Button widget to
* show
* the new ExpandableItem information.
*/
public void updateExpandableItem(int expandableItemPosition, ExpandableItem expandableItem) {
validateExpandableItem(expandableItem);
expandableItems.remove(expandableItemPosition);
expandableItems.add(expandableItemPosition, expandableItem);
int buttonPosition = buttons.size() - 1 - expandableItemPosition;
configureButtonContent(buttons.get(buttonPosition), expandableItem);
}
private void initializeView(AttributeSet attrs) {
TypedArray attributes =
getContext().obtainStyledAttributes(attrs, R.styleable.expandable_selector);
initializeAnimationDuration(attributes);
initializeHideBackgroundIfCollapsed(attributes);
initializeHideFirstItemOnCollapse(attributes);
attributes.recycle();
}
private void initializeHideBackgroundIfCollapsed(TypedArray attributes) {
hideBackgroundIfCollapsed =
attributes.getBoolean(R.styleable.expandable_selector_hide_background_if_collapsed, false);
expandedBackground = getBackground();
updateBackground();
}
private void initializeAnimationDuration(TypedArray attributes) {
int animationDuration =
attributes.getInteger(R.styleable.expandable_selector_animation_duration,
DEFAULT_ANIMATION_DURATION);
int expandInterpolatorId =
attributes.getResourceId(R.styleable.expandable_selector_expand_interpolator,
android.R.anim.accelerate_interpolator);
int collapseInterpolatorId =
attributes.getResourceId(R.styleable.expandable_selector_collapse_interpolator,
android.R.anim.decelerate_interpolator);
int containerInterpolatorId =
attributes.getResourceId(R.styleable.expandable_selector_container_interpolator,
android.R.anim.decelerate_interpolator);
expandableSelectorAnimator = new ExpandableSelectorAnimator(this, animationDuration, expandInterpolatorId,
collapseInterpolatorId, containerInterpolatorId);
}
private void initializeHideFirstItemOnCollapse(TypedArray attributes) {
boolean hideFirstItemOnCollapsed =
attributes.getBoolean(R.styleable.expandable_selector_hide_first_item_on_collapse, false);
expandableSelectorAnimator.setHideFirstItemOnCollapse(hideFirstItemOnCollapsed);
}
private void updateBackground() {
if (!hideBackgroundIfCollapsed) {
return;
}
if (isExpanded()) {
setBackgroundDrawable(expandedBackground);
} else {
setBackgroundResource(android.R.color.transparent);
}
}
private void reset() {
this.expandableItems = Collections.EMPTY_LIST;
for (View button : buttons) {
removeView(button);
}
this.buttons = new ArrayList<View>();
expandableSelectorAnimator.reset();
}
private void renderExpandableItems() {
int numberOfItems = expandableItems.size();
for (int i = numberOfItems - 1; i >= 0; i--) {
View button = initializeButton(i);
addView(button);
buttons.add(button);
expandableSelectorAnimator.initializeButton(button);
configureButtonContent(button, expandableItems.get((i)));
}
expandableSelectorAnimator.setButtons(buttons);
}
private void hookListeners() {
final int numberOfButtons = buttons.size();
boolean thereIsMoreThanOneButton = numberOfButtons > 1;
if (thereIsMoreThanOneButton) {
buttons.get(numberOfButtons - 1).setOnClickListener(new OnClickListener() {
@Override public void onClick(View v) {
if (isCollapsed()) {
expand();
} else {
notifyButtonClicked(0, v);
}
}
});
}
for (int i = 0; i < numberOfButtons - 1; i++) {
final int buttonPosition = i;
buttons.get(i).setOnClickListener(new OnClickListener() {
@Override public void onClick(View v) {
int buttonIndex = numberOfButtons - 1 - buttonPosition;
notifyButtonClicked(buttonIndex, v);
}
});
}
}
private void notifyButtonClicked(int itemPosition, View button) {
if (clickListener != null) {
clickListener.onExpandableItemClickListener(itemPosition, button);
}
}
private View initializeButton(int expandableItemPosition) {
ExpandableItem expandableItem = expandableItems.get(expandableItemPosition);
View button = null;
Context context = getContext();
LayoutInflater layoutInflater = LayoutInflater.from(context);
if (expandableItem.hasTitle()) {
button = layoutInflater.inflate(R.layout.expandable_item_button, this, false);
} else {
button = layoutInflater.inflate(R.layout.expandable_item_image_button, this, false);
}
int visibility = expandableItemPosition == 0 ? View.VISIBLE : View.INVISIBLE;
button.setVisibility(visibility);
return button;
}
private void configureButtonContent(View button, ExpandableItem expandableItem) {
if (expandableItem.hasBackgroundId()) {
int backgroundId = expandableItem.getBackgroundId();
button.setBackgroundResource(backgroundId);
}
if (expandableItem.hasTitle()) {
String text = expandableItem.getTitle();
((Button) button).setText(text);
}
if (expandableItem.hasResourceId()) {
ImageButton imageButton = (ImageButton) button;
int resourceId = expandableItem.getResourceId();
imageButton.setImageResource(resourceId);
}
}
private void notifyExpand() {
if (hasListenerConfigured()) {
listener.onExpand();
}
}
private void notifyCollapse() {
if (hasListenerConfigured()) {
listener.onCollapse();
}
}
private void notifyExpanded() {
if (hasListenerConfigured()) {
listener.onExpanded();
}
}
private void notifyCollapsed() {
if (hasListenerConfigured()) {
listener.onCollapsed();
}
}
private boolean hasListenerConfigured() {
return listener != null;
}
private void validateExpandableItem(ExpandableItem expandableItem) {
if (expandableItem == null) {
throw new IllegalArgumentException(
"You can't use a null instance of ExpandableItem as parameter.");
}
}
private void validateExpandableItems(List<ExpandableItem> expandableItems) {
if (expandableItems == null) {
throw new IllegalArgumentException(
"The List<ExpandableItem> passed as argument can't be null");
}
}
private void setExpandableItems(List<ExpandableItem> expandableItems) {
this.expandableItems = new ArrayList<ExpandableItem>(expandableItems);
}
private void bringChildsToFront(List<ExpandableItem> expandableItems) {
int childCount = getChildCount();
int numberOfExpandableItems = expandableItems.size();
if (childCount > numberOfExpandableItems) {
for (int i = 0; i < childCount - numberOfExpandableItems; i++) {
getChildAt(i).bringToFront();
}
}
}
}
| |
package controller;
import org.apache.log4j.Logger;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import model.ResponseError;
import model.ServiceProvider;
import service.EmailService;
import service.ServiceException;
import service.ServiceProviderServiceIF;
import spark.Request;
import spark.Response;
import spark.Route;
import static spark.Spark.*;
public class ServiceProviderResource {
private final ServiceProviderServiceIF serviceProviderService;
private final Gson gson;
private final EmailService emailService;
final static Logger logger = Logger.getLogger(ServiceProviderResource.class);
public ServiceProviderResource(ServiceProviderServiceIF serviceProviderService, Gson gson, EmailService emailService) {
this.serviceProviderService = serviceProviderService;
this.emailService = emailService;
this.gson = gson;
setupEndpoints();
}
private void setupEndpoints() {
get("/private/serviceProviders", new Route() {
@Override
public Object handle(Request request, Response response) throws Exception {
logger.debug("Retrieving list of providers (private information): "+request.userAgent());
response.type("application/json");
String jsonresponse;
try{
jsonresponse=gson.toJson(serviceProviderService.getAllServiceProviders());
}
catch(ServiceException e){
response.status(400);
logger.error(e.getMessage());
jsonresponse=gson.toJson(new ResponseError(e.getMessage()));
}
return jsonresponse;
}
});
get("/public/serviceProviders", new Route() {
@Override
public Object handle(Request request, Response response) throws Exception {
logger.debug("Retrieving list of providers (public information): "+request.userAgent()+" IP: "+request.ip());
response.type("application/json");
String jsonresponse;
try{
jsonresponse=gson.toJson(serviceProviderService.getAllServiceProvidersPublic());
}
catch(ServiceException e){
response.status(400);
logger.error(e.getMessage());
jsonresponse=gson.toJson(new ResponseError(e.getMessage()));
}
return jsonresponse;
}
});
get("/public/serviceProviders/:id", new Route() {
@Override
public Object handle(Request request, Response response) throws Exception {
logger.debug("Retrieving public information of provider "+request.params(":id")+": "+request.userAgent()+" IP: "+request.ip());
response.type("application/json");
String jsonresponse;
try{
jsonresponse=gson.toJson(serviceProviderService.getServiceProviderPublic(request.params(":id")));
}
catch(ServiceException e){
response.status(400);
logger.error(e.getMessage());
jsonresponse=gson.toJson(new ResponseError(e.getMessage()));
}
return jsonresponse;
}
});
get("/private/serviceProviders/:id", new Route() {
@Override
public Object handle(Request request, Response response) throws Exception {
logger.debug("Retrieving private information of provider "+request.params(":id")+": "+request.userAgent()+" IP: "+request.ip());
response.type("application/json");
String jsonresponse;
try{
jsonresponse=gson.toJson(serviceProviderService.getServiceProvider(request.params(":id")));
}
catch(ServiceException e){
response.status(400);
logger.error(e.getMessage());
jsonresponse=gson.toJson(new ResponseError(e.getMessage()));
}
return jsonresponse;
}
});
get("/public/serviceProviders/token/:token", new Route() {
@Override
public Object handle(Request request, Response response) throws Exception {
logger.debug("Retrieving public information of provider with token "+request.params(":token")+": "+request.userAgent()+" IP: "+request.ip());
response.type("application/json");
String jsonresponse;
try{
jsonresponse=gson.toJson(serviceProviderService.getServiceProviderByToken(request.params(":token")));
}
catch(ServiceException e){
response.status(400);
logger.error(e.getMessage());
jsonresponse=gson.toJson(new ResponseError(e.getMessage()));
}
return jsonresponse;
}
});
post("/private/serviceProviders/token/:id", new Route(){
@Override
public Object handle(Request request, Response response) throws Exception {
logger.debug("Creating new token for provider "+request.params(":id")+": "+request.userAgent()+" IP: "+request.ip());
response.type("application/json");
String jsonresponse;
try{
int id = Integer.parseInt(request.params(":id"));
String newtoken=serviceProviderService.createToken(id);
JsonObject obj = new JsonObject();
obj.addProperty("token", newtoken);
jsonresponse = obj.toString();
} catch (NumberFormatException e) {
response.status(400);
logger.error(e.getMessage());
jsonresponse=gson.toJson(new ResponseError("Id is not an Integer!"));
}
return jsonresponse;
}
});
post("/private/serviceProviders", new Route() {
@Override
public Object handle(Request request, Response response) throws Exception {
logger.debug("Creating new provider: "+request.userAgent());
response.type("application/json");
String name;
String adress;
String filetype;
String dataURI;
String services;
String email;
if (request.contentType().startsWith("application/json")) {
JsonObject body = new JsonParser().parse(request.body()).getAsJsonObject();
name = body.has("name") ? body.get("name").getAsString() : null;
adress = body.has("adress") ? body.get("adress").getAsString() : null;
filetype = body.has("filetype") ? body.get("filetype").getAsString() : null;
dataURI = body.has("dataURI") ? body.get("dataURI").getAsString() : null;
services = body.has("services") ? body.get("services").getAsString() : null;
email = body.has("email") ? body.get("email").getAsString() : null;
} else {
name = request.queryParams("name");
adress = request.queryParams("adress");
filetype = request.queryParams("filetype");
dataURI = request.queryParams("dataURI");
services = request.queryParams("services");
email = request.queryParams("email");
}
String jsonresponse;
try{
jsonresponse=gson.toJson(serviceProviderService.createServiceProvider(name, adress, filetype, dataURI, services, email));
}
catch(ServiceException e){
response.status(400);
logger.error(e.getMessage());
jsonresponse=gson.toJson(new ResponseError(e.getMessage()));
}
return jsonresponse;
}
});
put("/private/serviceProviders/:id", new Route() {
@Override
public Object handle(Request request, Response response) throws Exception {
logger.debug("Updating provider "+request.params(":id")+": "+request.userAgent());
response.type("application/json");
String id = request.params(":id");
String name;
String adress;
String token;
boolean blocked;
String filetype;
String dataURI;
String services;
String email;
if (request.contentType().startsWith("application/json")) {
JsonObject body = new JsonParser().parse(request.body()).getAsJsonObject();
name = body.has("name") ? body.get("name").getAsString() : null;
adress = body.has("adress") ? body.get("adress").getAsString() : null;
token = body.has("token") ? body.get("token").getAsString() : null;
blocked = body.has("blocked") ? body.get("blocked").getAsBoolean() : false;
filetype = body.has("filetype") ? body.get("filetype").getAsString() : null;
dataURI = body.has("dataURI") ? body.get("dataURI").getAsString() : null;
services = body.has("services") ? body.get("services").getAsString() : null;
email = body.has("email") ? body.get("email").getAsString() : null;
} else {
name = request.queryParams("name");
adress = request.queryParams("adress");
token = request.queryParams("token");
blocked = request.queryParams("blocked").equals("true") ? true : false;
filetype = request.queryParams("filetype");
dataURI = request.queryParams("dataURI");
services = request.queryParams("services");
email = request.queryParams("email");
}
String jsonresponse;
try{
jsonresponse=gson.toJson(serviceProviderService.updateServiceProvider(id, name, adress, token, blocked, filetype, dataURI, services, email));
}
catch(ServiceException e){
response.status(400);
logger.error(e.getMessage());
jsonresponse=gson.toJson(new ResponseError(e.getMessage()));
}
return jsonresponse;
}
});
post("/public/serviceProviders/email/:id", new Route() {
@Override
public Object handle(Request request, Response response) throws Exception {
logger.debug("Sending email to provider "+request.params(":id")+": "+request.userAgent()+" IP: "+request.ip());
response.type("application/json");
String id = request.params(":id");
String email;
String content;
if (request.contentType().startsWith("application/json")) {
JsonObject body = new JsonParser().parse(request.body()).getAsJsonObject();
email = body.has("email") ? body.get("email").getAsString() : null;
content = body.has("content") ? body.get("content").getAsString() : null;
} else {
email = request.queryParams("email");
content = request.queryParams("content");
}
String jsonresponse;
try{
ServiceProvider prov=serviceProviderService.getServiceProvider(id);
if(prov.getEmail().equals(email)){
jsonresponse=emailService.sendMail(email, content);
logger.debug("Successfully sent mail to "+email);
}
else{
response.status(400);
jsonresponse=gson.toJson(new ResponseError("Email is not valid"));
}
}
catch(ServiceException e){
response.status(400);
logger.error(e.getMessage());
jsonresponse=gson.toJson(new ResponseError(e.getMessage()));
}
return jsonresponse;
}
});
delete("/private/serviceProviders/:id", new Route() {
@Override
public Object handle(Request request, Response response) throws Exception {
logger.debug("Deleting provider "+request.params(":id")+": "+request.userAgent());
response.type("application/json");
String jsonresponse;
try{
jsonresponse=gson.toJson(serviceProviderService.deleteServiceProvider(request.params(":id")));
}
catch(ServiceException e){
response.status(400);
logger.error(e.getMessage());
jsonresponse=gson.toJson(new ResponseError(e.getMessage()));
}
return jsonresponse;
}
});
}
}
| |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.util;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Files;
import com.google.common.io.LineReader;
import org.hamcrest.Matchers;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.Reader;
import java.io.Writer;
import java.nio.channels.Channels;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.List;
/** Tests for {@link FileIOChannelFactory}. */
@RunWith(JUnit4.class)
public class FileIOChannelFactoryTest {
@Rule public ExpectedException thrown = ExpectedException.none();
@Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();
private FileIOChannelFactory factory = new FileIOChannelFactory();
private void testCreate(Path path) throws Exception {
String expected = "my test string";
try (Writer writer = Channels.newWriter(
factory.create(path.toString(), MimeTypes.TEXT), StandardCharsets.UTF_8.name())) {
writer.write(expected);
}
assertThat(
Files.readLines(path.toFile(), StandardCharsets.UTF_8),
containsInAnyOrder(expected));
}
@Test
public void testCreateWithExistingFile() throws Exception {
File existingFile = temporaryFolder.newFile();
testCreate(existingFile.toPath());
}
@Test
public void testCreateWithinExistingDirectory() throws Exception {
testCreate(temporaryFolder.getRoot().toPath().resolve("file.txt"));
}
@Test
public void testCreateWithNonExistentSubDirectory() throws Exception {
testCreate(temporaryFolder.getRoot().toPath().resolve("non-existent-dir").resolve("file.txt"));
}
@Test
public void testReadWithExistingFile() throws Exception {
String expected = "my test string";
File existingFile = temporaryFolder.newFile();
Files.write(expected, existingFile, StandardCharsets.UTF_8);
String data;
try (Reader reader =
Channels.newReader(factory.open(existingFile.getPath()), StandardCharsets.UTF_8.name())) {
data = new LineReader(reader).readLine();
}
assertEquals(expected, data);
}
@Test
public void testReadNonExistentFile() throws Exception {
thrown.expect(FileNotFoundException.class);
factory
.open(
temporaryFolder
.getRoot()
.toPath()
.resolve("non-existent-file.txt")
.toString())
.close();
}
@Test
public void testIsReadSeekEfficient() throws Exception {
assertTrue(factory.isReadSeekEfficient("somePath"));
}
@Test
public void testMatchExact() throws Exception {
List<String> expected = ImmutableList.of(temporaryFolder.newFile("a").toString());
temporaryFolder.newFile("aa");
temporaryFolder.newFile("ab");
assertThat(factory.match(temporaryFolder.getRoot().toPath().resolve("a").toString()),
containsInAnyOrder(expected.toArray(new String[expected.size()])));
}
@Test
public void testMatchNone() throws Exception {
List<String> expected = ImmutableList.of();
temporaryFolder.newFile("a");
temporaryFolder.newFile("aa");
temporaryFolder.newFile("ab");
// Windows doesn't like resolving paths with * in them, so the * is appended after resolve.
assertThat(factory.match(factory.resolve(temporaryFolder.getRoot().getPath(), "b") + "*"),
containsInAnyOrder(expected.toArray(new String[expected.size()])));
}
@Test
public void testMatchUsingExplicitPath() throws Exception {
List<String> expected = ImmutableList.of(temporaryFolder.newFile("a").toString());
temporaryFolder.newFile("aa");
assertThat(factory.match(factory.resolve(temporaryFolder.getRoot().getPath(), "a")),
containsInAnyOrder(expected.toArray(new String[expected.size()])));
}
@Test
public void testMatchUsingExplicitPathForNonExistentFile() throws Exception {
List<String> expected = ImmutableList.of();
temporaryFolder.newFile("aa");
assertThat(factory.match(factory.resolve(temporaryFolder.getRoot().getPath(), "a")),
containsInAnyOrder(expected.toArray(new String[expected.size()])));
}
@Test
public void testMatchMultipleWithoutSubdirectoryExpansion() throws Exception {
File unmatchedSubDir = temporaryFolder.newFolder("aaa");
File unmatchedSubDirFile = File.createTempFile("sub-dir-file", "", unmatchedSubDir);
unmatchedSubDirFile.deleteOnExit();
List<String> expected = ImmutableList.of(temporaryFolder.newFile("a").toString(),
temporaryFolder.newFile("aa").toString(), temporaryFolder.newFile("ab").toString());
temporaryFolder.newFile("ba");
temporaryFolder.newFile("bb");
// Windows doesn't like resolving paths with * in them, so the * is appended after resolve.
assertThat(factory.match(factory.resolve(temporaryFolder.getRoot().getPath(), "a") + "*"),
containsInAnyOrder(expected.toArray(new String[expected.size()])));
}
@Test
public void testMatchMultipleWithSubdirectoryExpansion() throws Exception {
File matchedSubDir = temporaryFolder.newFolder("a");
File matchedSubDirFile = File.createTempFile("sub-dir-file", "", matchedSubDir);
matchedSubDirFile.deleteOnExit();
File unmatchedSubDir = temporaryFolder.newFolder("b");
File unmatchedSubDirFile = File.createTempFile("sub-dir-file", "", unmatchedSubDir);
unmatchedSubDirFile.deleteOnExit();
List<String> expected = ImmutableList.of(matchedSubDirFile.toString(),
temporaryFolder.newFile("aa").toString(), temporaryFolder.newFile("ab").toString());
temporaryFolder.newFile("ba");
temporaryFolder.newFile("bb");
// Windows doesn't like resolving paths with * in them, so the ** is appended after resolve.
assertThat(factory.match(factory.resolve(temporaryFolder.getRoot().getPath(), "a") + "**"),
Matchers.hasItems(expected.toArray(new String[expected.size()])));
}
@Test
public void testMatchWithDirectoryFiltersOutDirectory() throws Exception {
List<String> expected = ImmutableList.of(temporaryFolder.newFile("a").toString());
temporaryFolder.newFolder("a_dir_that_should_not_be_matched");
// Windows doesn't like resolving paths with * in them, so the * is appended after resolve.
assertThat(factory.match(factory.resolve(temporaryFolder.getRoot().getPath(), "a") + "*"),
containsInAnyOrder(expected.toArray(new String[expected.size()])));
}
@Test
public void testResolve() throws Exception {
String expected = temporaryFolder.getRoot().toPath().resolve("aa").toString();
assertEquals(expected, factory.resolve(temporaryFolder.getRoot().toString(), "aa"));
}
@Test
public void testResolveOtherIsFullPath() throws Exception {
String expected = temporaryFolder.getRoot().getPath().toString();
assertEquals(expected, factory.resolve(expected, expected));
}
@Test
public void testResolveOtherIsEmptyPath() throws Exception {
String expected = temporaryFolder.getRoot().getPath().toString();
assertEquals(expected, factory.resolve(expected, ""));
}
@Test
public void testGetSizeBytes() throws Exception {
String data = "TestData!!!";
File file = temporaryFolder.newFile();
Files.write(data, file, StandardCharsets.UTF_8);
assertEquals(data.length(), factory.getSizeBytes(file.getPath()));
}
@Test
public void testGetSizeBytesForNonExistentFile() throws Exception {
thrown.expect(FileNotFoundException.class);
factory.getSizeBytes(
factory.resolve(temporaryFolder.getRoot().getPath(), "non-existent-file"));
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.videointelligence.v1p3beta1;
/**
* CloudVideoIntelligence request.
*
* @since 1.3
*/
@SuppressWarnings("javadoc")
public abstract class CloudVideoIntelligenceRequest<T> extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest<T> {
/**
* @param client Google client
* @param method HTTP Method
* @param uriTemplate URI template for the path relative to the base URL. If it starts with a "/"
* the base path from the base URL will be stripped out. The URI template can also be a
* full URL. URI template expansion is done using
* {@link com.google.api.client.http.UriTemplate#expand(String, String, Object, boolean)}
* @param content A POJO that can be serialized into JSON or {@code null} for none
* @param responseClass response class to parse into
*/
public CloudVideoIntelligenceRequest(
CloudVideoIntelligence client, String method, String uriTemplate, Object content, Class<T> responseClass) {
super(
client,
method,
uriTemplate,
content,
responseClass);
}
/** V1 error format. */
@com.google.api.client.util.Key("$.xgafv")
private java.lang.String $Xgafv;
/**
* V1 error format.
*/
public java.lang.String get$Xgafv() {
return $Xgafv;
}
/** V1 error format. */
public CloudVideoIntelligenceRequest<T> set$Xgafv(java.lang.String $Xgafv) {
this.$Xgafv = $Xgafv;
return this;
}
/** OAuth access token. */
@com.google.api.client.util.Key("access_token")
private java.lang.String accessToken;
/**
* OAuth access token.
*/
public java.lang.String getAccessToken() {
return accessToken;
}
/** OAuth access token. */
public CloudVideoIntelligenceRequest<T> setAccessToken(java.lang.String accessToken) {
this.accessToken = accessToken;
return this;
}
/** Data format for response. */
@com.google.api.client.util.Key
private java.lang.String alt;
/**
* Data format for response. [default: json]
*/
public java.lang.String getAlt() {
return alt;
}
/** Data format for response. */
public CloudVideoIntelligenceRequest<T> setAlt(java.lang.String alt) {
this.alt = alt;
return this;
}
/** JSONP */
@com.google.api.client.util.Key
private java.lang.String callback;
/**
* JSONP
*/
public java.lang.String getCallback() {
return callback;
}
/** JSONP */
public CloudVideoIntelligenceRequest<T> setCallback(java.lang.String callback) {
this.callback = callback;
return this;
}
/** Selector specifying which fields to include in a partial response. */
@com.google.api.client.util.Key
private java.lang.String fields;
/**
* Selector specifying which fields to include in a partial response.
*/
public java.lang.String getFields() {
return fields;
}
/** Selector specifying which fields to include in a partial response. */
public CloudVideoIntelligenceRequest<T> setFields(java.lang.String fields) {
this.fields = fields;
return this;
}
/**
* API key. Your API key identifies your project and provides you with API access, quota, and
* reports. Required unless you provide an OAuth 2.0 token.
*/
@com.google.api.client.util.Key
private java.lang.String key;
/**
* API key. Your API key identifies your project and provides you with API access, quota, and
* reports. Required unless you provide an OAuth 2.0 token.
*/
public java.lang.String getKey() {
return key;
}
/**
* API key. Your API key identifies your project and provides you with API access, quota, and
* reports. Required unless you provide an OAuth 2.0 token.
*/
public CloudVideoIntelligenceRequest<T> setKey(java.lang.String key) {
this.key = key;
return this;
}
/** OAuth 2.0 token for the current user. */
@com.google.api.client.util.Key("oauth_token")
private java.lang.String oauthToken;
/**
* OAuth 2.0 token for the current user.
*/
public java.lang.String getOauthToken() {
return oauthToken;
}
/** OAuth 2.0 token for the current user. */
public CloudVideoIntelligenceRequest<T> setOauthToken(java.lang.String oauthToken) {
this.oauthToken = oauthToken;
return this;
}
/** Returns response with indentations and line breaks. */
@com.google.api.client.util.Key
private java.lang.Boolean prettyPrint;
/**
* Returns response with indentations and line breaks. [default: true]
*/
public java.lang.Boolean getPrettyPrint() {
return prettyPrint;
}
/** Returns response with indentations and line breaks. */
public CloudVideoIntelligenceRequest<T> setPrettyPrint(java.lang.Boolean prettyPrint) {
this.prettyPrint = prettyPrint;
return this;
}
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string
* assigned to a user, but should not exceed 40 characters.
*/
@com.google.api.client.util.Key
private java.lang.String quotaUser;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string
* assigned to a user, but should not exceed 40 characters.
*/
public java.lang.String getQuotaUser() {
return quotaUser;
}
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string
* assigned to a user, but should not exceed 40 characters.
*/
public CloudVideoIntelligenceRequest<T> setQuotaUser(java.lang.String quotaUser) {
this.quotaUser = quotaUser;
return this;
}
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
@com.google.api.client.util.Key
private java.lang.String uploadType;
/**
* Legacy upload protocol for media (e.g. "media", "multipart").
*/
public java.lang.String getUploadType() {
return uploadType;
}
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
public CloudVideoIntelligenceRequest<T> setUploadType(java.lang.String uploadType) {
this.uploadType = uploadType;
return this;
}
/** Upload protocol for media (e.g. "raw", "multipart"). */
@com.google.api.client.util.Key("upload_protocol")
private java.lang.String uploadProtocol;
/**
* Upload protocol for media (e.g. "raw", "multipart").
*/
public java.lang.String getUploadProtocol() {
return uploadProtocol;
}
/** Upload protocol for media (e.g. "raw", "multipart"). */
public CloudVideoIntelligenceRequest<T> setUploadProtocol(java.lang.String uploadProtocol) {
this.uploadProtocol = uploadProtocol;
return this;
}
@Override
public final CloudVideoIntelligence getAbstractGoogleClient() {
return (CloudVideoIntelligence) super.getAbstractGoogleClient();
}
@Override
public CloudVideoIntelligenceRequest<T> setDisableGZipContent(boolean disableGZipContent) {
return (CloudVideoIntelligenceRequest<T>) super.setDisableGZipContent(disableGZipContent);
}
@Override
public CloudVideoIntelligenceRequest<T> setRequestHeaders(com.google.api.client.http.HttpHeaders headers) {
return (CloudVideoIntelligenceRequest<T>) super.setRequestHeaders(headers);
}
@Override
public CloudVideoIntelligenceRequest<T> set(String parameterName, Object value) {
return (CloudVideoIntelligenceRequest<T>) super.set(parameterName, value);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.data;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.druid.data.input.MapBasedInputRow;
import org.apache.druid.data.input.Row;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.guava.Accumulator;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.query.Druids;
import org.apache.druid.query.FinalizeResultsQueryRunner;
import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerFactory;
import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.Result;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
import org.apache.druid.query.aggregation.FilteredAggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.filter.BoundDimFilter;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.query.timeseries.TimeseriesQuery;
import org.apache.druid.query.timeseries.TimeseriesQueryEngine;
import org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest;
import org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory;
import org.apache.druid.query.timeseries.TimeseriesResultValue;
import org.apache.druid.segment.CloserRule;
import org.apache.druid.segment.IncrementalIndexSegment;
import org.apache.druid.segment.Segment;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.incremental.IncrementalIndexCreator;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.incremental.IndexSizeExceededException;
import org.apache.druid.segment.incremental.OnheapIncrementalIndex;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
*/
@RunWith(Parameterized.class)
public class IncrementalIndexTest extends InitializedNullHandlingTest
{
public final IncrementalIndexCreator indexCreator;
@Rule
public final CloserRule closer = new CloserRule(false);
public IncrementalIndexTest(String indexType, String mode) throws JsonProcessingException
{
indexCreator = closer.closeLater(new IncrementalIndexCreator(indexType, (builder, args) -> builder
.setSimpleTestingIndexSchema("rollup".equals(mode), (AggregatorFactory[]) args[0])
.setMaxRowCount(1_000_000)
.build()
));
}
@Parameterized.Parameters(name = "{index}: {0}, {1}")
public static Collection<?> constructorFeeder()
{
return IncrementalIndexCreator.indexTypeCartesianProduct(ImmutableList.of("rollup", "plain"));
}
public static AggregatorFactory[] getDefaultCombiningAggregatorFactories()
{
return DEFAULT_COMBINING_AGGREGATOR_FACTORIES;
}
public static IncrementalIndex createIndex(
AggregatorFactory[] aggregatorFactories,
DimensionsSpec dimensionsSpec
)
{
if (null == aggregatorFactories) {
aggregatorFactories = DEFAULT_AGGREGATOR_FACTORIES;
}
return new OnheapIncrementalIndex.Builder()
.setIndexSchema(
new IncrementalIndexSchema.Builder()
.withDimensionsSpec(dimensionsSpec)
.withMetrics(aggregatorFactories)
.build()
)
.setMaxRowCount(1000000)
.build();
}
public static IncrementalIndex createIndex(AggregatorFactory[] aggregatorFactories)
{
if (null == aggregatorFactories) {
aggregatorFactories = DEFAULT_AGGREGATOR_FACTORIES;
}
return new OnheapIncrementalIndex.Builder()
.setSimpleTestingIndexSchema(aggregatorFactories)
.setMaxRowCount(1000000)
.build();
}
public static IncrementalIndex createNoRollupIndex(AggregatorFactory[] aggregatorFactories)
{
if (null == aggregatorFactories) {
aggregatorFactories = DEFAULT_AGGREGATOR_FACTORIES;
}
return new OnheapIncrementalIndex.Builder()
.setSimpleTestingIndexSchema(false, aggregatorFactories)
.setMaxRowCount(1000000)
.build();
}
public static void populateIndex(long timestamp, IncrementalIndex index) throws IndexSizeExceededException
{
index.add(
new MapBasedInputRow(
timestamp,
Arrays.asList("dim1", "dim2"),
ImmutableMap.of("dim1", "1", "dim2", "2")
)
);
index.add(
new MapBasedInputRow(
timestamp,
Arrays.asList("dim1", "dim2"),
ImmutableMap.of("dim1", "3", "dim2", "4")
)
);
}
public static MapBasedInputRow getRow(long timestamp, int rowID, int dimensionCount)
{
List<String> dimensionList = new ArrayList<String>(dimensionCount);
ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder();
for (int i = 0; i < dimensionCount; i++) {
String dimName = StringUtils.format("Dim_%d", i);
dimensionList.add(dimName);
builder.put(dimName, dimName + rowID);
}
return new MapBasedInputRow(timestamp, dimensionList, builder.build());
}
private static MapBasedInputRow getLongRow(long timestamp, int dimensionCount)
{
List<String> dimensionList = new ArrayList<String>(dimensionCount);
ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder();
for (int i = 0; i < dimensionCount; i++) {
String dimName = StringUtils.format("Dim_%d", i);
dimensionList.add(dimName);
builder.put(dimName, (Long) 1L);
}
return new MapBasedInputRow(timestamp, dimensionList, builder.build());
}
private static final AggregatorFactory[] DEFAULT_AGGREGATOR_FACTORIES = new AggregatorFactory[]{
new CountAggregatorFactory(
"count"
)
};
private static final AggregatorFactory[] DEFAULT_COMBINING_AGGREGATOR_FACTORIES = new AggregatorFactory[]{
DEFAULT_AGGREGATOR_FACTORIES[0].getCombiningFactory()
};
@Test
public void testCaseSensitivity() throws Exception
{
long timestamp = System.currentTimeMillis();
IncrementalIndex index = indexCreator.createIndex((Object) DEFAULT_AGGREGATOR_FACTORIES);
populateIndex(timestamp, index);
Assert.assertEquals(Arrays.asList("dim1", "dim2"), index.getDimensionNames());
Assert.assertEquals(2, index.size());
final Iterator<Row> rows = index.iterator();
Row row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Collections.singletonList("1"), row.getDimension("dim1"));
Assert.assertEquals(Collections.singletonList("2"), row.getDimension("dim2"));
row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Collections.singletonList("3"), row.getDimension("dim1"));
Assert.assertEquals(Collections.singletonList("4"), row.getDimension("dim2"));
}
@Test
public void testFilteredAggregators() throws Exception
{
long timestamp = System.currentTimeMillis();
IncrementalIndex index = indexCreator.createIndex((Object) new AggregatorFactory[]{
new CountAggregatorFactory("count"),
new FilteredAggregatorFactory(
new CountAggregatorFactory("count_selector_filtered"),
new SelectorDimFilter("dim2", "2", null)
),
new FilteredAggregatorFactory(
new CountAggregatorFactory("count_bound_filtered"),
new BoundDimFilter("dim2", "2", "3", false, true, null, null, StringComparators.NUMERIC)
),
new FilteredAggregatorFactory(
new CountAggregatorFactory("count_multivaldim_filtered"),
new SelectorDimFilter("dim3", "b", null)
),
new FilteredAggregatorFactory(
new CountAggregatorFactory("count_numeric_filtered"),
new SelectorDimFilter("met1", "11", null)
)
});
index.add(
new MapBasedInputRow(
timestamp,
Arrays.asList("dim1", "dim2", "dim3"),
ImmutableMap.of("dim1", "1", "dim2", "2", "dim3", Lists.newArrayList("b", "a"), "met1", 10)
)
);
index.add(
new MapBasedInputRow(
timestamp,
Arrays.asList("dim1", "dim2", "dim3"),
ImmutableMap.of("dim1", "3", "dim2", "4", "dim3", Lists.newArrayList("c", "d"), "met1", 11)
)
);
Assert.assertEquals(Arrays.asList("dim1", "dim2", "dim3"), index.getDimensionNames());
Assert.assertEquals(
Arrays.asList(
"count",
"count_selector_filtered",
"count_bound_filtered",
"count_multivaldim_filtered",
"count_numeric_filtered"
),
index.getMetricNames()
);
Assert.assertEquals(2, index.size());
final Iterator<Row> rows = index.iterator();
Row row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Collections.singletonList("1"), row.getDimension("dim1"));
Assert.assertEquals(Collections.singletonList("2"), row.getDimension("dim2"));
Assert.assertEquals(Arrays.asList("a", "b"), row.getDimension("dim3"));
Assert.assertEquals(1L, row.getMetric("count"));
Assert.assertEquals(1L, row.getMetric("count_selector_filtered"));
Assert.assertEquals(1L, row.getMetric("count_bound_filtered"));
Assert.assertEquals(1L, row.getMetric("count_multivaldim_filtered"));
Assert.assertEquals(0L, row.getMetric("count_numeric_filtered"));
row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Collections.singletonList("3"), row.getDimension("dim1"));
Assert.assertEquals(Collections.singletonList("4"), row.getDimension("dim2"));
Assert.assertEquals(Arrays.asList("c", "d"), row.getDimension("dim3"));
Assert.assertEquals(1L, row.getMetric("count"));
Assert.assertEquals(0L, row.getMetric("count_selector_filtered"));
Assert.assertEquals(0L, row.getMetric("count_bound_filtered"));
Assert.assertEquals(0L, row.getMetric("count_multivaldim_filtered"));
Assert.assertEquals(1L, row.getMetric("count_numeric_filtered"));
}
@Test
public void testSingleThreadedIndexingAndQuery() throws Exception
{
final int dimensionCount = 5;
final ArrayList<AggregatorFactory> ingestAggregatorFactories = new ArrayList<>();
ingestAggregatorFactories.add(new CountAggregatorFactory("rows"));
for (int i = 0; i < dimensionCount; ++i) {
ingestAggregatorFactories.add(
new LongSumAggregatorFactory(
StringUtils.format("sumResult%s", i),
StringUtils.format("Dim_%s", i)
)
);
ingestAggregatorFactories.add(
new DoubleSumAggregatorFactory(
StringUtils.format("doubleSumResult%s", i),
StringUtils.format("Dim_%s", i)
)
);
}
final IncrementalIndex index = indexCreator.createIndex(
(Object) ingestAggregatorFactories.toArray(
new AggregatorFactory[0]
)
);
final long timestamp = System.currentTimeMillis();
final int rows = 50;
//ingesting same data twice to have some merging happening
for (int i = 0; i < rows; i++) {
index.add(getLongRow(timestamp + i, dimensionCount));
}
for (int i = 0; i < rows; i++) {
index.add(getLongRow(timestamp + i, dimensionCount));
}
//run a timeseries query on the index and verify results
final ArrayList<AggregatorFactory> queryAggregatorFactories = new ArrayList<>();
queryAggregatorFactories.add(new CountAggregatorFactory("rows"));
for (int i = 0; i < dimensionCount; ++i) {
queryAggregatorFactories.add(
new LongSumAggregatorFactory(
StringUtils.format("sumResult%s", i),
StringUtils.format("sumResult%s", i)
)
);
queryAggregatorFactories.add(
new DoubleSumAggregatorFactory(
StringUtils.format("doubleSumResult%s", i),
StringUtils.format("doubleSumResult%s", i)
)
);
}
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
.dataSource("xxx")
.granularity(Granularities.ALL)
.intervals(ImmutableList.of(Intervals.of("2000/2030")))
.aggregators(queryAggregatorFactories)
.build();
final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null);
final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(
new TimeseriesQueryQueryToolChest(),
new TimeseriesQueryEngine(),
QueryRunnerTestHelper.NOOP_QUERYWATCHER
);
final QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(
factory.createRunner(incrementalIndexSegment),
factory.getToolchest()
);
List<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
Result<TimeseriesResultValue> result = Iterables.getOnlyElement(results);
boolean isRollup = index.isRollup();
Assert.assertEquals(rows * (isRollup ? 1 : 2), result.getValue().getLongMetric("rows").intValue());
for (int i = 0; i < dimensionCount; ++i) {
Assert.assertEquals(
"Failed long sum on dimension " + i,
2 * rows,
result.getValue().getLongMetric("sumResult" + i).intValue()
);
Assert.assertEquals(
"Failed double sum on dimension " + i,
2 * rows,
result.getValue().getDoubleMetric("doubleSumResult" + i).intValue()
);
}
}
@Test(timeout = 60_000L)
public void testConcurrentAddRead() throws InterruptedException, ExecutionException
{
final int dimensionCount = 5;
final ArrayList<AggregatorFactory> ingestAggregatorFactories = new ArrayList<>(dimensionCount + 1);
ingestAggregatorFactories.add(new CountAggregatorFactory("rows"));
for (int i = 0; i < dimensionCount; ++i) {
ingestAggregatorFactories.add(
new LongSumAggregatorFactory(
StringUtils.format("sumResult%s", i),
StringUtils.format("Dim_%s", i)
)
);
ingestAggregatorFactories.add(
new DoubleSumAggregatorFactory(
StringUtils.format("doubleSumResult%s", i),
StringUtils.format("Dim_%s", i)
)
);
}
final ArrayList<AggregatorFactory> queryAggregatorFactories = new ArrayList<>(dimensionCount + 1);
queryAggregatorFactories.add(new CountAggregatorFactory("rows"));
for (int i = 0; i < dimensionCount; ++i) {
queryAggregatorFactories.add(
new LongSumAggregatorFactory(
StringUtils.format("sumResult%s", i),
StringUtils.format("sumResult%s", i)
)
);
queryAggregatorFactories.add(
new DoubleSumAggregatorFactory(
StringUtils.format("doubleSumResult%s", i),
StringUtils.format("doubleSumResult%s", i)
)
);
}
final IncrementalIndex index = indexCreator.createIndex(
(Object) ingestAggregatorFactories.toArray(new AggregatorFactory[0])
);
final int concurrentThreads = 2;
final int elementsPerThread = 10_000;
final ListeningExecutorService indexExecutor = MoreExecutors.listeningDecorator(
Executors.newFixedThreadPool(
concurrentThreads,
new ThreadFactoryBuilder()
.setDaemon(false)
.setNameFormat("index-executor-%d")
.setPriority(Thread.MIN_PRIORITY)
.build()
)
);
final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(
Executors.newFixedThreadPool(
concurrentThreads,
new ThreadFactoryBuilder()
.setDaemon(false)
.setNameFormat("query-executor-%d")
.build()
)
);
final long timestamp = System.currentTimeMillis();
final Interval queryInterval = Intervals.of("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z");
final List<ListenableFuture<?>> indexFutures = Lists.newArrayListWithExpectedSize(concurrentThreads);
final List<ListenableFuture<?>> queryFutures = Lists.newArrayListWithExpectedSize(concurrentThreads);
final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null);
final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(
new TimeseriesQueryQueryToolChest(),
new TimeseriesQueryEngine(),
QueryRunnerTestHelper.NOOP_QUERYWATCHER
);
final AtomicInteger currentlyRunning = new AtomicInteger(0);
final AtomicInteger concurrentlyRan = new AtomicInteger(0);
final AtomicInteger someoneRan = new AtomicInteger(0);
final CountDownLatch startLatch = new CountDownLatch(1);
final CountDownLatch readyLatch = new CountDownLatch(concurrentThreads * 2);
final AtomicInteger queriesAccumualted = new AtomicInteger(0);
for (int j = 0; j < concurrentThreads; j++) {
indexFutures.add(
indexExecutor.submit(
new Runnable()
{
@Override
public void run()
{
readyLatch.countDown();
try {
startLatch.await();
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
currentlyRunning.incrementAndGet();
try {
for (int i = 0; i < elementsPerThread; i++) {
index.add(getLongRow(timestamp + i, dimensionCount));
someoneRan.incrementAndGet();
}
}
catch (IndexSizeExceededException e) {
throw new RuntimeException(e);
}
currentlyRunning.decrementAndGet();
}
}
)
);
final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
.dataSource("xxx")
.granularity(Granularities.ALL)
.intervals(ImmutableList.of(queryInterval))
.aggregators(queryAggregatorFactories)
.build();
queryFutures.add(
queryExecutor.submit(
new Runnable()
{
@Override
public void run()
{
readyLatch.countDown();
try {
startLatch.await();
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
while (concurrentlyRan.get() == 0) {
QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(
factory.createRunner(incrementalIndexSegment),
factory.getToolchest()
);
Sequence<Result<TimeseriesResultValue>> sequence = runner.run(QueryPlus.wrap(query));
Double[] results = sequence.accumulate(
new Double[0],
new Accumulator<Double[], Result<TimeseriesResultValue>>()
{
@Override
public Double[] accumulate(Double[] accumulated, Result<TimeseriesResultValue> in)
{
if (currentlyRunning.get() > 0) {
concurrentlyRan.incrementAndGet();
}
queriesAccumualted.incrementAndGet();
return Lists.asList(in.getValue().getDoubleMetric("doubleSumResult0"), accumulated)
.toArray(new Double[0]);
}
}
);
for (Double result : results) {
final Integer maxValueExpected = someoneRan.get() + concurrentThreads;
if (maxValueExpected > 0) {
// Eventually consistent, but should be somewhere in that range
// Actual result is validated after all writes are guaranteed done.
Assert.assertTrue(
StringUtils.format("%d >= %g >= 0 violated", maxValueExpected, result),
result >= 0 && result <= maxValueExpected
);
}
}
}
}
}
)
);
}
readyLatch.await();
startLatch.countDown();
List<ListenableFuture<?>> allFutures = new ArrayList<>(queryFutures.size() + indexFutures.size());
allFutures.addAll(queryFutures);
allFutures.addAll(indexFutures);
Futures.allAsList(allFutures).get();
Assert.assertTrue("Queries ran too fast", queriesAccumualted.get() > 0);
Assert.assertTrue("Did not hit concurrency, please try again", concurrentlyRan.get() > 0);
queryExecutor.shutdown();
indexExecutor.shutdown();
QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(
factory.createRunner(incrementalIndexSegment),
factory.getToolchest()
);
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
.dataSource("xxx")
.granularity(Granularities.ALL)
.intervals(ImmutableList.of(queryInterval))
.aggregators(queryAggregatorFactories)
.build();
List<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
boolean isRollup = index.isRollup();
for (Result<TimeseriesResultValue> result : results) {
Assert.assertEquals(
elementsPerThread * (isRollup ? 1 : concurrentThreads),
result.getValue().getLongMetric("rows").intValue()
);
for (int i = 0; i < dimensionCount; ++i) {
Assert.assertEquals(
StringUtils.format("Failed long sum on dimension %d", i),
elementsPerThread * concurrentThreads,
result.getValue().getLongMetric(StringUtils.format("sumResult%s", i)).intValue()
);
Assert.assertEquals(
StringUtils.format("Failed double sum on dimension %d", i),
elementsPerThread * concurrentThreads,
result.getValue().getDoubleMetric(StringUtils.format("doubleSumResult%s", i)).intValue()
);
}
}
}
@Test
public void testConcurrentAdd() throws Exception
{
final IncrementalIndex index = indexCreator.createIndex((Object) DEFAULT_AGGREGATOR_FACTORIES);
final int threadCount = 10;
final int elementsPerThread = 200;
final int dimensionCount = 5;
ExecutorService executor = Executors.newFixedThreadPool(threadCount);
final long timestamp = System.currentTimeMillis();
final CountDownLatch latch = new CountDownLatch(threadCount);
for (int j = 0; j < threadCount; j++) {
executor.submit(
new Runnable()
{
@Override
public void run()
{
try {
for (int i = 0; i < elementsPerThread; i++) {
index.add(getRow(timestamp + i, i, dimensionCount));
}
}
catch (Exception e) {
e.printStackTrace();
}
latch.countDown();
}
}
);
}
Assert.assertTrue(latch.await(60, TimeUnit.SECONDS));
boolean isRollup = index.isRollup();
Assert.assertEquals(dimensionCount, index.getDimensionNames().size());
Assert.assertEquals(elementsPerThread * (isRollup ? 1 : threadCount), index.size());
Iterator<Row> iterator = index.iterator();
int curr = 0;
while (iterator.hasNext()) {
Row row = iterator.next();
Assert.assertEquals(timestamp + (isRollup ? curr : curr / threadCount), row.getTimestampFromEpoch());
Assert.assertEquals(isRollup ? threadCount : 1, row.getMetric("count").intValue());
curr++;
}
Assert.assertEquals(elementsPerThread * (isRollup ? 1 : threadCount), curr);
}
@Test
public void testgetDimensions()
{
final IncrementalIndex incrementalIndex = indexCreator.createIndex(
(builder, args) -> builder
.setIndexSchema(
new IncrementalIndexSchema.Builder()
.withMetrics(new CountAggregatorFactory("count"))
.withDimensionsSpec(
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(Arrays.asList("dim0", "dim1")),
null,
null
)
)
.build()
)
.setMaxRowCount(1000000)
.build()
);
Assert.assertEquals(Arrays.asList("dim0", "dim1"), incrementalIndex.getDimensionNames());
}
@Test
public void testDynamicSchemaRollup() throws IndexSizeExceededException
{
final IncrementalIndex index = indexCreator.createIndex(
(builder, args) -> builder
.setSimpleTestingIndexSchema(/* empty */)
.setMaxRowCount(10)
.build()
);
index.add(
new MapBasedInputRow(
1481871600000L,
Arrays.asList("name", "host"),
ImmutableMap.of("name", "name1", "host", "host")
)
);
index.add(
new MapBasedInputRow(
1481871670000L,
Arrays.asList("name", "table"),
ImmutableMap.of("name", "name2", "table", "table")
)
);
index.add(
new MapBasedInputRow(
1481871600000L,
Arrays.asList("name", "host"),
ImmutableMap.of("name", "name1", "host", "host")
)
);
Assert.assertEquals(2, index.size());
}
}
| |
//package declaration
package control.forms.tabs;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//import declarations
import java.awt.Color;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import control.ControlPaint;
import view.tabs.Tools;
import view.tabs.Write;
import model.objects.pen.Pen;
import model.objects.pen.normal.BallPen;
import model.objects.pen.normal.Pencil;
import model.settings.Constants;
import model.settings.State;
/**
*
* @author Julius Huelsmann
* @version %I%, %U%
*/
public final class CTabWrite implements ActionListener {
/*
* Identifiers
*/
/**
*
*/
private static final Color
CLR_GRAY = new Color(160, 160, 160),
CLR_GREEN = new Color(64, 197, 153),
CLR_PINK = new Color(255, 153, 254),
CLR_PINK_2 = new Color(133, 33, 134),
CLR_BLUE = new Color(153, 162, 255),
CLR_BLUE_2 = new Color(112, 146, 190);
// /**
// * Headline identifiers.
// */
// private static final int
// HEADLINE_1 = 0,
// HEADLINE_2 = 1,
// HEADLINE_3 = 2;
/**
* The sizes of the headlines.
*/
private static final int SIZE_H1 = 8, SIZE_H2 = 6, SIZE_H3 = 4;
/**
* The pens.
*/
public static final Pen
PEN_THEOREM_1 = new Pencil(Constants.PEN_ID_LINES, 2, CLR_GRAY),
PEN_THEOREM_2 = new BallPen(Constants.PEN_ID_LINES, 2, CLR_GREEN),
PEN_PROOF_1 = new Pencil(Constants.PEN_ID_LINES, 2, CLR_BLUE),
PEN_PROOF_2 = new BallPen(Constants.PEN_ID_LINES, 2, CLR_PINK_2),
PEN_EXMPL_1 = new Pencil(Constants.PEN_ID_LINES, 2, CLR_PINK),
PEN_EXMPL_2 = new BallPen(Constants.PEN_ID_LINES, 2, CLR_BLUE_2),
PEN_CMMNT_1 = new Pencil(Constants.PEN_ID_LINES, 2, CLR_GRAY),
PEN_CMMNT_2 = new BallPen(Constants.PEN_ID_LINES, 2, CLR_PINK_2),
PEN_HEADLINE_1_1 = new Pencil(Constants.PEN_ID_LINES, SIZE_H1, CLR_GRAY),
PEN_HEADLINE_1_2 = new BallPen(Constants.PEN_ID_LINES, SIZE_H1,
CLR_PINK_2),
PEN_HEADLINE_2_1 = new Pencil(Constants.PEN_ID_LINES, SIZE_H2, CLR_GRAY),
PEN_HEADLINE_2_2 = new BallPen(Constants.PEN_ID_LINES, SIZE_H2,
CLR_PINK_2),
PEN_HEADLINE_3_1 = new Pencil(Constants.PEN_ID_LINES, SIZE_H3, CLR_GRAY),
PEN_HEADLINE_3_2 = new BallPen(Constants.PEN_ID_LINES, SIZE_H3, CLR_PINK_2);
/*
* Constructor
*/
/**
* Instance of the main controller class which gives access to all the
* important model, view and controller classes.
*/
private ControlPaint cp;
/**
* Constructor: saves instance of the root controller class.
*
* @param _cp instance of the root controller class.
*/
public CTabWrite(final ControlPaint _cp) {
this.cp = _cp;
}
/*
* ActionListener
*/
/**
* {@inheritDoc}
*/
public void actionPerformed(final ActionEvent _event) {
Tools paint = cp.getView().getTabs().getTab_paint();
Write write = cp.getView().getTabs().getTab_write();
if (_event.getSource().equals(write
.getTb_beispiel().getActionCause())) {
deactivate();
paint.getJbtn_color1().setBackground(
PEN_EXMPL_1.getClr_foreground());
paint.getJbtn_color2().setBackground(
PEN_EXMPL_2.getClr_foreground());
State.setPenSelected1(Pen.clonePen(PEN_EXMPL_1));
State.setPenSelected2(Pen.clonePen(PEN_EXMPL_2));
System.out.println(PEN_EXMPL_1.getClr_foreground());
System.out.println(State.getPenSelected1().getClr_foreground());
} else if (_event.getSource().equals(write
.getTb_bemerkung().getActionCause())) {
deactivate();
paint.getJbtn_color1().setBackground(
PEN_CMMNT_1.getClr_foreground());
paint.getJbtn_color2().setBackground(
PEN_CMMNT_2.getClr_foreground());
State.setPenSelected1(Pen.clonePen(PEN_CMMNT_1));
State.setPenSelected2(Pen.clonePen(PEN_CMMNT_2));
} else if (_event.getSource().equals(write
.getTb_beweis().getActionCause())) {
deactivate();
paint.getJbtn_color1().setBackground(
PEN_PROOF_1.getClr_foreground());
paint.getJbtn_color2().setBackground(
PEN_PROOF_2.getClr_foreground());
State.setPenSelected1(Pen.clonePen(PEN_PROOF_1));
State.setPenSelected2(Pen.clonePen(PEN_PROOF_2));
} else if (_event.getSource().equals(write
.getTb_headline1().getActionCause())) {
deactivate();
paint.getJbtn_color1().setBackground(
PEN_HEADLINE_1_1.getClr_foreground());
paint.getJbtn_color2().setBackground(
PEN_HEADLINE_1_2.getClr_foreground());
State.setPenSelected1(Pen.clonePen(PEN_HEADLINE_1_1));
State.setPenSelected2(Pen.clonePen(PEN_HEADLINE_1_2));
//TODO: update paint gui.
} else if (_event.getSource().equals(write
.getTb_headline2().getActionCause())) {
deactivate();
paint.getJbtn_color1().setBackground(
PEN_HEADLINE_2_1.getClr_foreground());
paint.getJbtn_color2().setBackground(
PEN_HEADLINE_2_2.getClr_foreground());
State.setPenSelected1(Pen.clonePen(PEN_HEADLINE_2_1));
State.setPenSelected2(Pen.clonePen(PEN_HEADLINE_2_2));
//TODO: update paint gui.
} else if (_event.getSource().equals(write
.getTb_headline3().getActionCause())) {
deactivate();
paint.getJbtn_color1().setBackground(
PEN_HEADLINE_3_1.getClr_foreground());
paint.getJbtn_color2().setBackground(
PEN_HEADLINE_3_2.getClr_foreground());
State.setPenSelected1(Pen.clonePen(PEN_HEADLINE_3_1));
State.setPenSelected2(Pen.clonePen(PEN_HEADLINE_3_2));
//TODO: update paint gui.
} else if (_event.getSource().equals(write
.getTb_satz().getActionCause())) {
deactivate();
paint.getJbtn_color1().setBackground(
PEN_THEOREM_1.getClr_foreground());
paint.getJbtn_color2().setBackground(
PEN_THEOREM_2.getClr_foreground());
State.setPenSelected1(Pen.clonePen(PEN_THEOREM_1));
State.setPenSelected2(Pen.clonePen(PEN_THEOREM_2));
//TODO: update paint gui.
}
paint.getIt_stift1().setIcon(
State.getPenSelected1().getIconPath());
paint.getIt_stift2().setIcon(
State.getPenSelected2().getIconPath());
State.setIndexOperation(Constants.CONTROL_PAINTING_INDEX_PAINT_1);
cp.getcTabPaintStatus().deactivate();
paint.getIt_stift1().getTb_open().setActivated(true);
paint.getTb_color1().setActivated(true);
}
// /**
// * Insert a headline somewhere (into the selected items) after releasing
// * selection if necessary.
// *
// * @param _importance the importance.
// */
// private void insertHeadline(final int _importance) {
// switch (_importance) {
// case HEADLINE_1:
// break;
// case HEADLINE_2:
// break;
// case HEADLINE_3:
// break;
// default:
// Status.getLogger().severe("Wrong kind of headline");
// break;
// }
// }
/**
* Deactivate all graphical user interface items.
*/
public void deactivate() {
Write write = cp.getView().getTabs().getTab_write();
write.getTb_beispiel().setActivated(false);
write.getTb_bemerkung().setActivated(false);
write.getTb_beweis().setActivated(false);
write.getTb_satz().setActivated(false);
write.getTb_headline1().setActivated(false);
write.getTb_headline2().setActivated(false);
write.getTb_headline3().setActivated(false);
}
/**
* If the pen in Status changed, check whether the current pen settings
* match with the current ones. The equals method is implemented inside
* Pen.
*/
public void penChanged() {
deactivate();
Write write = cp.getView().getTabs().getTab_write();
if (State.getPenSelected1().equals(PEN_THEOREM_1)
&& State.getPenSelected2().equals(PEN_THEOREM_2)) {
write.getTb_satz().setActivated(true);
} else if (State.getPenSelected1().equals(PEN_PROOF_1)
&& State.getPenSelected2().equals(PEN_PROOF_2)) {
write.getTb_beweis().setActivated(true);
} else if (State.getPenSelected1().equals(PEN_EXMPL_1)
&& State.getPenSelected2().equals(PEN_EXMPL_2)) {
write.getTb_beispiel().setActivated(true);
} else if (State.getPenSelected1().equals(PEN_CMMNT_1)
&& State.getPenSelected2().equals(PEN_CMMNT_2)) {
write.getTb_bemerkung().setActivated(true);
} else if (State.getPenSelected1().equals(PEN_HEADLINE_1_1)
&& State.getPenSelected2().equals(PEN_HEADLINE_1_2)) {
write.getTb_headline1().setActivated(true);
} else if (State.getPenSelected1().equals(PEN_HEADLINE_2_1)
&& State.getPenSelected2().equals(PEN_HEADLINE_2_2)) {
write.getTb_headline2().setActivated(true);
} else if (State.getPenSelected1().equals(PEN_HEADLINE_3_1)
&& State.getPenSelected2().equals(PEN_HEADLINE_3_2)) {
write.getTb_headline3().setActivated(true);
}
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.event.listener;
import com.facebook.buck.core.build.engine.BuildRuleStatus;
import com.facebook.buck.core.build.event.BuildEvent;
import com.facebook.buck.core.build.event.BuildRuleEvent;
import com.facebook.buck.core.model.BuildId;
import com.facebook.buck.core.model.UnflavoredBuildTarget;
import com.facebook.buck.core.util.log.Logger;
import com.facebook.buck.event.ActionGraphEvent;
import com.facebook.buck.event.BuckEvent;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.BuckEventListener;
import com.facebook.buck.event.CommandEvent;
import com.facebook.buck.event.ConsoleEvent;
import com.facebook.buck.event.EventKey;
import com.facebook.buck.event.InstallEvent;
import com.facebook.buck.event.ProjectGenerationEvent;
import com.facebook.buck.event.WatchmanStatusEvent;
import com.facebook.buck.event.listener.stats.cache.CacheRateStatsKeeper;
import com.facebook.buck.event.listener.stats.cache.NetworkStatsKeeper;
import com.facebook.buck.event.listener.stats.cache.NetworkStatsTracker;
import com.facebook.buck.event.listener.stats.parse.ParseStatsTracker;
import com.facebook.buck.event.listener.util.EventInterval;
import com.facebook.buck.event.listener.util.ProgressEstimation;
import com.facebook.buck.event.listener.util.ProgressEstimator;
import com.facebook.buck.test.TestRuleEvent;
import com.facebook.buck.util.Ansi;
import com.facebook.buck.util.Verbosity;
import com.facebook.buck.util.console.ConsoleUtils;
import com.facebook.buck.util.environment.ExecutionEnvironment;
import com.facebook.buck.util.i18n.NumberFormatter;
import com.facebook.buck.util.timing.Clock;
import com.facebook.buck.util.types.Pair;
import com.facebook.buck.util.unit.SizeUnit;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Range;
import com.google.common.collect.RangeSet;
import com.google.common.collect.TreeRangeSet;
import com.google.common.eventbus.Subscribe;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.OptionalLong;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import java.util.stream.Stream;
import javax.annotation.Nullable;
import org.stringtemplate.v4.ST;
/**
* Base class for {@link BuckEventListener}s responsible for outputting information about the
* running build to {@code stderr}.
*/
public abstract class AbstractConsoleEventBusListener implements BuckEventListener {
private static final Logger LOG = Logger.get(AbstractConsoleEventBusListener.class);
private static final NumberFormatter TIME_FORMATTER =
new NumberFormatter(
locale1 -> {
// Yes, this is the only way to apply and localize a pattern to a NumberFormat.
NumberFormat numberFormat = NumberFormat.getIntegerInstance(locale1);
Preconditions.checkState(numberFormat instanceof DecimalFormat);
DecimalFormat decimalFormat = (DecimalFormat) numberFormat;
decimalFormat.applyPattern("0.0s");
return decimalFormat;
});
protected static final long UNFINISHED_EVENT_PAIR = -1;
protected final RenderingConsole console;
protected final Clock clock;
protected final Verbosity verbosity;
protected final Ansi ansi;
private final Locale locale;
private final boolean showTextInAllCaps;
private final int numberOfSlowRulesToShow;
private final boolean showSlowRulesInConsole;
private final Map<UnflavoredBuildTarget, Long> timeSpentMillisecondsInRules;
@Nullable protected volatile ProjectGenerationEvent.Started projectGenerationStarted;
@Nullable protected volatile ProjectGenerationEvent.Finished projectGenerationFinished;
@Nullable protected volatile WatchmanStatusEvent.Started watchmanStarted;
@Nullable protected volatile WatchmanStatusEvent.Finished watchmanFinished;
protected ConcurrentLinkedDeque<ActionGraphEvent.Started> actionGraphStarted;
protected ConcurrentLinkedDeque<ActionGraphEvent.Finished> actionGraphFinished;
protected ConcurrentHashMap<EventKey, EventInterval> actionGraphEvents;
@Nullable protected volatile BuildEvent.Started buildStarted;
@Nullable protected volatile BuildEvent.Finished buildFinished;
@Nullable protected volatile InstallEvent.Started installStarted;
@Nullable protected volatile InstallEvent.Finished installFinished;
@Nullable protected volatile CommandEvent.Finished commandFinished;
protected volatile OptionalInt ruleCount = OptionalInt.empty();
protected Optional<String> publicAnnouncements = Optional.empty();
protected final AtomicInteger numRulesCompleted = new AtomicInteger();
protected Optional<ProgressEstimator> progressEstimator = Optional.empty();
protected final NetworkStatsTracker networkStatsTracker;
protected final ParseStatsTracker parseStats;
protected BuildRuleThreadTracker buildRuleThreadTracker;
/** Commands that should print out the build details, if provided */
protected final ImmutableSet<String> buildDetailsCommands;
private final AtomicBoolean topSlowestRulesLogged = new AtomicBoolean(false);
public AbstractConsoleEventBusListener(
RenderingConsole console,
Clock clock,
Locale locale,
ExecutionEnvironment executionEnvironment,
boolean showTextInAllCaps,
int numberOfSlowRulesToShow,
boolean showSlowRulesInConsole,
ImmutableSet<String> buildDetailsCommands) {
this.console = console;
this.parseStats = new ParseStatsTracker();
this.networkStatsTracker = new NetworkStatsTracker();
this.clock = clock;
this.locale = locale;
this.ansi = console.getAnsi();
this.verbosity = console.getVerbosity();
this.showTextInAllCaps = showTextInAllCaps;
this.numberOfSlowRulesToShow = numberOfSlowRulesToShow;
this.showSlowRulesInConsole = showSlowRulesInConsole;
this.timeSpentMillisecondsInRules = new HashMap<>();
this.projectGenerationStarted = null;
this.projectGenerationFinished = null;
this.watchmanStarted = null;
this.watchmanFinished = null;
this.actionGraphStarted = new ConcurrentLinkedDeque<>();
this.actionGraphFinished = new ConcurrentLinkedDeque<>();
this.actionGraphEvents = new ConcurrentHashMap<>();
this.buildStarted = null;
this.buildFinished = null;
this.installStarted = null;
this.installFinished = null;
this.buildRuleThreadTracker = new BuildRuleThreadTracker(executionEnvironment);
this.buildDetailsCommands = buildDetailsCommands;
}
public void register(BuckEventBus buildEventBus) {
buildEventBus.register(this);
buildEventBus.register(parseStats);
buildEventBus.register(networkStatsTracker);
}
public static String getBuildDetailsLine(BuildId buildId, String buildDetailsTemplate) {
return new ST(buildDetailsTemplate, '{', '}').add("build_id", buildId).render();
}
public static String getBuildLogLine(BuildId buildId) {
return "Build UUID: " + buildId;
}
@VisibleForTesting
Optional<String> getPublicAnnouncements() {
return publicAnnouncements;
}
public boolean displaysEstimatedProgress() {
return false;
}
public void setProgressEstimator(ProgressEstimator estimator) {
if (displaysEstimatedProgress()) {
progressEstimator = Optional.of(estimator);
parseStats.setProgressEstimator(estimator);
}
}
protected String formatElapsedTime(long elapsedTimeMs) {
long minutes = elapsedTimeMs / 60_000L;
String seconds = TIME_FORMATTER.format(locale, elapsedTimeMs / 1000.0 - (minutes * 60));
return minutes == 0 ? String.valueOf(seconds) : String.format("%2$dm %1$s", seconds, minutes);
}
protected Optional<Double> getApproximateBuildProgress() {
if (progressEstimator.isPresent()) {
return progressEstimator.get().getApproximateBuildProgress();
} else {
return Optional.empty();
}
}
protected Optional<Double> getEstimatedProgressOfGeneratingProjectFiles() {
if (progressEstimator.isPresent()) {
return progressEstimator.get().getEstimatedProgressOfGeneratingProjectFiles();
} else {
return Optional.empty();
}
}
/** @return Estimated progress of parsing files stage. */
protected ProgressEstimation getEstimatedProgressOfParsingBuckFiles() {
if (progressEstimator.isPresent()) {
return progressEstimator.get().getEstimatedProgressOfParsingBuckFiles();
} else {
return ProgressEstimation.UNKNOWN;
}
}
/** @return Estimated progress of parsing files stage. */
protected ProgressEstimation getEstimatedProgressOfCreatingActionGraph() {
if (progressEstimator.isPresent()) {
return progressEstimator.get().getEstimatedProgressOfCreatingActionGraph();
} else {
return ProgressEstimation.UNKNOWN;
}
}
public void setPublicAnnouncements(BuckEventBus eventBus, Optional<String> announcements) {
this.publicAnnouncements = announcements;
announcements.ifPresent(
announcement ->
eventBus.post(
ConsoleEvent.createForMessageWithAnsiEscapeCodes(
Level.INFO, ansi.asInformationText(announcement))));
}
// This is used by the logging infrastructure to add a line to the console in a way that doesn't
// break rendering.
public abstract void printSevereWarningDirectly(String line);
/**
* Filter a list of events and return the subset that fall between the given start and end
* timestamps. Preserves ordering if the given iterable was ordered. Will replace event pairs that
* straddle the boundary with {@link com.facebook.buck.event.listener.ProxyBuckEvent} instances,
* so that the resulting collection is strictly contained within the boundaries.
*
* @param start the start timestamp (inclusive)
* @param end the end timestamp (also inclusive)
* @param eventIntervals the events to filter.
* @return a list of all events from the given iterable that fall between the given start and end
* times. If an event straddles the given start or end, it will be replaced with a proxy event
* pair that cuts off at exactly the start or end.
*/
protected static Collection<EventInterval> getEventsBetween(
long start, long end, Iterable<EventInterval> eventIntervals) {
List<EventInterval> outEvents = new ArrayList<>();
for (EventInterval ep : eventIntervals) {
long startTime = ep.getStartTime();
long endTime = ep.getEndTime();
if (ep.isComplete()) {
if (startTime >= start && endTime <= end) {
outEvents.add(ep);
} else if (startTime >= start && startTime <= end) {
// If the start time is within bounds, but the end time is not, replace with a proxy
outEvents.add(EventInterval.proxy(startTime, end));
} else if (endTime <= end && endTime >= start) {
// If the end time is within bounds, but the start time is not, replace with a proxy
outEvents.add(EventInterval.proxy(start, endTime));
}
} else if (ep.isOngoing()) {
// If the event is ongoing, replace with a proxy
outEvents.add(EventInterval.proxy(startTime, end));
} // Ignore the case where we have an end event but not a start. Just drop that EventInterval.
}
return outEvents;
}
protected String convertToAllCapsIfNeeded(String str) {
if (showTextInAllCaps) {
return str.toUpperCase();
} else {
return str;
}
}
/**
* Adds a line about a pair of start and finished events to lines.
*
* @param prefix Prefix to print for this event pair.
* @param suffix Suffix to print for this event pair.
* @param currentMillis The current time in milliseconds.
* @param offsetMs Offset to remove from calculated time. Set this to a non-zero value if the
* event pair would contain another event. For example, build time includes parse time, but to
* make the events easier to reason about it makes sense to pull parse time out of build time.
* @param startEvent The started event.
* @param finishedEvent The finished event.
* @param lines The builder to append lines to.
* @return The amount of time between start and finished if finished is present, otherwise {@link
* AbstractConsoleEventBusListener#UNFINISHED_EVENT_PAIR}.
*/
protected long logEventInterval(
String prefix,
Optional<String> suffix,
long currentMillis,
long offsetMs,
@Nullable BuckEvent startEvent,
@Nullable BuckEvent finishedEvent,
Optional<Double> progress,
Optional<Long> minimum,
ImmutableList.Builder<String> lines) {
if (startEvent == null) {
return UNFINISHED_EVENT_PAIR;
}
EventInterval interval =
EventInterval.of(
OptionalLong.of(startEvent.getTimestampMillis()),
finishedEvent == null
? OptionalLong.empty()
: OptionalLong.of(finishedEvent.getTimestampMillis()));
return logEventInterval(
prefix,
suffix,
currentMillis,
offsetMs,
ImmutableList.of(interval),
progress,
minimum,
lines);
}
/**
* Adds a line about a the state of cache uploads to lines.
*
* @param lines The builder to append lines to.
*/
protected void logHttpCacheUploads(ImmutableList.Builder<String> lines) {
if (networkStatsTracker.haveUploadsStarted()) {
boolean isFinished = networkStatsTracker.haveUploadsFinished();
String line = "HTTP CACHE UPLOAD" + (isFinished ? ": FINISHED " : "... ");
line += renderRemoteUploads();
lines.add(line);
}
}
/**
* Adds a line about a set of start and finished events to lines.
*
* @param prefix Prefix to print for this event pair.
* @param suffix Suffix to print for this event pair.
* @param currentMillis The current time in milliseconds.
* @param eventIntervals the collection of start/end events to measure elapsed time.
* @param lines The builder to append lines to.
* @return True if all events are finished, false otherwise
*/
protected boolean addLineFromEvents(
String prefix,
Optional<String> suffix,
long currentMillis,
Collection<EventInterval> eventIntervals,
ProgressEstimation progress,
Optional<Long> minimum,
ImmutableList.Builder<String> lines) {
return addLineFromEventInterval(
prefix, suffix, currentMillis, getStartAndFinish(eventIntervals), progress, minimum, lines);
}
/**
* Adds a line about an {@link EventInterval} to lines.
*
* @param prefix Prefix to print for this event pair.
* @param suffix Suffix to print for this event pair.
* @param currentMillis The current time in milliseconds.
* @param startAndFinish the event interval to measure elapsed time.
* @param lines The builder to append lines to.
* @return True if all events are finished, false otherwise
*/
protected boolean addLineFromEventInterval(
String prefix,
Optional<String> suffix,
long currentMillis,
EventInterval startAndFinish,
ProgressEstimation progress,
Optional<Long> minimum,
ImmutableList.Builder<String> lines) {
if (!startAndFinish.getStart().isPresent()) {
// nothing to display, event has not even started yet
return false;
}
boolean isFinished = startAndFinish.getFinish().isPresent();
long startTime = startAndFinish.getStartTime();
long endTime = isFinished ? startAndFinish.getEndTime() : currentMillis;
long elapsedTime = endTime - startTime;
if (minimum.isPresent() && elapsedTime < minimum.get()) {
return isFinished;
}
String result = prefix;
if (!isFinished) {
result += "... ";
} else {
result += showTextInAllCaps ? ": FINISHED IN " : ": finished in ";
}
result += formatElapsedTime(elapsedTime);
if (!isFinished) {
if (progress.getProgress().isPresent()) {
result += " (" + Math.round(progress.getProgress().get() * 100) + "%)";
} else if (progress.getNumber().isPresent()) {
result += " (" + progress.getNumber().get() + "/unknown)";
}
}
if (suffix.isPresent()) {
result += " " + suffix.get();
}
lines.add(result);
return isFinished;
}
/**
* Adds a line about a set of start and finished events to lines.
*
* @param prefix Prefix to print for this event pair.
* @param suffix Suffix to print for this event pair.
* @param currentMillis The current time in milliseconds.
* @param offsetMs Offset to remove from calculated time. Set this to a non-zero value if the
* event pair would contain another event. For example, build time includes parse time, but to
* make the events easier to reason about it makes sense to pull parse time out of build time.
* @param eventIntervals the collection of start/end events to sum up when calculating elapsed
* time.
* @param lines The builder to append lines to.
* @return The summed time between start and finished events if each start event has a matching
* finished event, otherwise {@link AbstractConsoleEventBusListener#UNFINISHED_EVENT_PAIR}.
*/
@Deprecated
protected long logEventInterval(
String prefix,
Optional<String> suffix,
long currentMillis,
long offsetMs,
Collection<EventInterval> eventIntervals,
Optional<Double> progress,
Optional<Long> minimum,
ImmutableList.Builder<String> lines) {
if (eventIntervals.isEmpty()) {
return UNFINISHED_EVENT_PAIR;
}
long completedRunTimesMs = getTotalCompletedTimeFromEventIntervals(eventIntervals);
long currentlyRunningTime = getWorkingTimeFromLastStartUntilNow(eventIntervals, currentMillis);
boolean stillRunning = currentlyRunningTime >= 0;
String parseLine = prefix;
long elapsedTimeMs = completedRunTimesMs - offsetMs;
if (stillRunning) {
parseLine += "... ";
elapsedTimeMs += currentlyRunningTime;
} else {
parseLine += convertToAllCapsIfNeeded(": finished in ");
if (progress.isPresent()) {
progress = Optional.of(1.0);
}
}
if (minimum.isPresent() && elapsedTimeMs < minimum.get()) {
return elapsedTimeMs;
}
parseLine += formatElapsedTime(elapsedTimeMs);
if (progress.isPresent()) {
parseLine += " (" + Math.round(progress.get() * 100) + "%)";
}
if (suffix.isPresent()) {
parseLine += " " + suffix.get();
}
lines.add(parseLine);
return stillRunning ? UNFINISHED_EVENT_PAIR : elapsedTimeMs;
}
/**
* Calculate event pair that start and end the sequence. If there is any ongoing event, end event
* would be empty.
*
* @param eventIntervals the collection of event starts/stops.
* @return The pair of events, start event is the earliest start event, end event is the latest
* finish event, or empty if there are ongoing events, i.e. not completed pairs
*/
private static EventInterval getStartAndFinish(Collection<EventInterval> eventIntervals) {
OptionalLong start = OptionalLong.empty();
OptionalLong end = OptionalLong.empty();
boolean anyOngoing = false;
for (EventInterval pair : eventIntervals) {
OptionalLong candidate = pair.getStart();
if (!start.isPresent()
|| (candidate.isPresent() && candidate.getAsLong() < start.getAsLong())) {
start = candidate;
}
if (anyOngoing) {
continue;
}
candidate = pair.getFinish();
if (!candidate.isPresent()) {
anyOngoing = true;
end = OptionalLong.empty();
continue;
}
if (!end.isPresent() || candidate.getAsLong() > end.getAsLong()) {
end = candidate;
}
}
return EventInterval.of(start, end);
}
/**
* Takes a collection of start and finished events. If there are any events that have a start, but
* no finished time, the collection is considered ongoing.
*
* @param eventIntervals the collection of event starts/stops.
* @param currentMillis the current time.
* @return -1 if all events are completed, otherwise the time elapsed between the latest event and
* currentMillis.
*/
@Deprecated
protected static long getWorkingTimeFromLastStartUntilNow(
Collection<EventInterval> eventIntervals, long currentMillis) {
// We examine all events to determine whether we have any incomplete events and also
// to get the latest timestamp available (start or stop).
long latestTimestamp = 0L;
long earliestOngoingStart = Long.MAX_VALUE;
boolean anyEventIsOngoing = false;
for (EventInterval pair : eventIntervals) {
if (pair.isOngoing()) {
anyEventIsOngoing = true;
if (pair.getStartTime() < earliestOngoingStart) {
latestTimestamp = pair.getStartTime();
}
} else if (pair.getEndTime() > latestTimestamp) {
latestTimestamp = pair.getEndTime();
}
}
// If any events are unpaired, the whole collection is considered ongoing and we return
// the difference between the latest time in the collection and the current time.
return anyEventIsOngoing ? currentMillis - latestTimestamp : -1;
}
/**
* Get the summed elapsed time from all matched event pairs. Does not consider unmatched event
* pairs. Pairs are determined by their {@link com.facebook.buck.event.EventKey}.
*
* @param eventIntervals a set of paired events (incomplete events are okay).
* @return the sum of all times between matched event pairs.
*/
protected static long getTotalCompletedTimeFromEventIntervals(
Collection<EventInterval> eventIntervals) {
long totalTime = 0L;
// Flatten the event groupings into a timeline, so that we don't over count parallel work.
RangeSet<Long> timeline = TreeRangeSet.create();
for (EventInterval pair : eventIntervals) {
if (pair.isComplete() && pair.getElapsedTimeMs() > 0) {
timeline.add(Range.open(pair.getStartTime(), pair.getEndTime()));
}
}
for (Range<Long> range : timeline.asRanges()) {
totalTime += range.upperEndpoint() - range.lowerEndpoint();
}
return totalTime;
}
/** Formats a {@link ConsoleEvent} and adds it to {@code lines}. */
protected ImmutableList<String> formatConsoleEvent(ConsoleEvent logEvent) {
return ConsoleUtils.formatConsoleEvent(logEvent, console.getAnsi());
}
@Subscribe
public void commandStartedEvent(CommandEvent.Started startedEvent) {
progressEstimator.ifPresent(
estimator ->
estimator.setCurrentCommand(startedEvent.getCommandName(), startedEvent.getArgs()));
}
public static void aggregateStartedEvent(
ConcurrentHashMap<EventKey, EventInterval> map, BuckEvent started) {
map.compute(
started.getEventKey(),
(key, pair) ->
pair == null
? EventInterval.start(started.getTimestampMillis())
: pair.withStart(started.getTimestampMillis()));
}
public static void aggregateFinishedEvent(
ConcurrentHashMap<EventKey, EventInterval> map, BuckEvent finished) {
map.compute(
finished.getEventKey(),
(key, pair) ->
pair == null
? EventInterval.finish(finished.getTimestampMillis())
: pair.withFinish(finished.getTimestampMillis()));
}
@Subscribe
public void projectGenerationStarted(ProjectGenerationEvent.Started started) {
projectGenerationStarted = started;
}
@SuppressWarnings("unused")
@Subscribe
public void projectGenerationProcessedTarget(ProjectGenerationEvent.Processed processed) {
progressEstimator.ifPresent(ProgressEstimator::didGenerateProjectForTarget);
}
@Subscribe
public void projectGenerationFinished(ProjectGenerationEvent.Finished finished) {
projectGenerationFinished = finished;
progressEstimator.ifPresent(ProgressEstimator::didFinishProjectGeneration);
}
@Subscribe
public void watchmanStarted(WatchmanStatusEvent.Started started) {
watchmanStarted = started;
}
@Subscribe
public void watchmanFinished(WatchmanStatusEvent.Finished finished) {
watchmanFinished = finished;
}
@Subscribe
public void actionGraphStarted(ActionGraphEvent.Started started) {
actionGraphStarted.add(started);
aggregateStartedEvent(actionGraphEvents, started);
}
@Subscribe
public void actionGraphFinished(ActionGraphEvent.Finished finished) {
actionGraphFinished.add(finished);
aggregateFinishedEvent(actionGraphEvents, finished);
}
@Subscribe
public void buildStarted(BuildEvent.Started started) {
buildStarted = started;
progressEstimator.ifPresent(ProgressEstimator::didStartBuild);
}
@Subscribe
public void ruleCountCalculated(BuildEvent.RuleCountCalculated calculated) {
ruleCount = OptionalInt.of(calculated.getNumRules());
progressEstimator.ifPresent(estimator -> estimator.setNumberOfRules(calculated.getNumRules()));
}
@Subscribe
public void ruleCountUpdated(BuildEvent.UnskippedRuleCountUpdated updated) {
ruleCount = OptionalInt.of(updated.getNumRules());
progressEstimator.ifPresent(estimator -> estimator.setNumberOfRules(updated.getNumRules()));
}
protected Optional<String> getOptionalBuildLineSuffix() {
// Log build time, excluding time spent in parsing.
String jobSummary = null;
if (ruleCount.isPresent()) {
List<String> columns = new ArrayList<>();
columns.add(
String.format(
locale,
"%d/%d " + convertToAllCapsIfNeeded("jobs"),
numRulesCompleted.get(),
ruleCount.getAsInt()));
CacheRateStatsKeeper.CacheRateStatsUpdateEvent cacheRateStats =
networkStatsTracker.getCacheRateStats();
columns.add(
String.format(
locale,
"%d " + convertToAllCapsIfNeeded("updated"),
cacheRateStats.getUpdatedRulesCount()));
jobSummary = String.join(", ", columns);
}
return Strings.isNullOrEmpty(jobSummary) ? Optional.empty() : Optional.of(jobSummary);
}
protected String getNetworkStatsLine(@Nullable BuildEvent.Finished finishedEvent) {
String parseLine =
finishedEvent != null
? convertToAllCapsIfNeeded("Downloaded")
: convertToAllCapsIfNeeded("Downloading") + "...";
List<String> columns = new ArrayList<>();
NetworkStatsKeeper.RemoteDownloadStats downloadStats =
networkStatsTracker.getRemoteDownloadStats();
long remoteDownloadedBytes = downloadStats.getBytes();
Pair<Double, SizeUnit> redableRemoteDownloadedBytes =
SizeUnit.getHumanReadableSize(remoteDownloadedBytes, SizeUnit.BYTES);
columns.add(
String.format(
locale, "%d " + convertToAllCapsIfNeeded("artifacts"), downloadStats.getArtifacts()));
columns.add(
String.format(
locale,
"%s",
convertToAllCapsIfNeeded(
SizeUnit.toHumanReadableString(redableRemoteDownloadedBytes, locale))));
CacheRateStatsKeeper.CacheRateStatsUpdateEvent cacheRateStats =
networkStatsTracker.getCacheRateStats();
columns.add(
String.format(
locale,
"%.1f%% " + convertToAllCapsIfNeeded("cache miss"),
cacheRateStats.getCacheMissRate()));
if (cacheRateStats.getCacheErrorCount() > 0) {
columns.add(
String.format(
locale,
"%.1f%% " + convertToAllCapsIfNeeded("cache errors"),
cacheRateStats.getCacheErrorRate()));
}
return parseLine + " " + String.join(", ", columns);
}
@Subscribe
public void buildRuleStarted(BuildRuleEvent.Started started) {
buildRuleThreadTracker.didStartBuildRule(started);
}
@Subscribe
public void buildRuleResumed(BuildRuleEvent.Resumed resumed) {
buildRuleThreadTracker.didResumeBuildRule(resumed);
}
@Subscribe
public void buildRuleSuspended(BuildRuleEvent.Suspended suspended) {
buildRuleThreadTracker.didSuspendBuildRule(suspended);
}
@Subscribe
public void buildRuleFinished(BuildRuleEvent.Finished finished) {
if (numberOfSlowRulesToShow != 0) {
synchronized (timeSpentMillisecondsInRules) {
UnflavoredBuildTarget unflavoredTarget =
finished.getBuildRule().getBuildTarget().getUnflavoredBuildTarget();
Long value = timeSpentMillisecondsInRules.get(unflavoredTarget);
if (value == null) {
value = 0L;
}
value = value + finished.getDuration().getWallMillisDuration();
timeSpentMillisecondsInRules.put(unflavoredTarget, value);
}
}
if (finished.getStatus() != BuildRuleStatus.CANCELED) {
progressEstimator.ifPresent(ProgressEstimator::didFinishRule);
numRulesCompleted.getAndIncrement();
}
buildRuleThreadTracker.didFinishBuildRule(finished);
}
@Subscribe
public void buildFinished(BuildEvent.Finished finished) {
buildFinished = finished;
progressEstimator.ifPresent(ProgressEstimator::didFinishBuild);
}
@Subscribe
public void testRuleStarted(TestRuleEvent.Started started) {
buildRuleThreadTracker.didStartTestRule(started);
}
@Subscribe
public void testRuleFinished(TestRuleEvent.Finished finished) {
buildRuleThreadTracker.didFinishTestRule(finished);
}
@Subscribe
public void installStarted(InstallEvent.Started started) {
installStarted = started;
}
@Subscribe
public void installFinished(InstallEvent.Finished finished) {
installFinished = finished;
}
@Subscribe
public void commandFinished(CommandEvent.Finished event) {
commandFinished = event;
}
/**
* A method to print the line responsible to show how our remote cache upload goes.
*
* @return the line
*/
protected String renderRemoteUploads() {
NetworkStatsTracker.RemoteArtifactUploadStats uploadStats =
networkStatsTracker.getRemoteArtifactUploadStats();
long bytesUploaded = uploadStats.getTotalBytes();
String humanReadableBytesUploaded =
convertToAllCapsIfNeeded(
SizeUnit.toHumanReadableString(
SizeUnit.getHumanReadableSize(bytesUploaded, SizeUnit.BYTES), locale));
int scheduled = uploadStats.getScheduled();
int complete = uploadStats.getUploaded();
int failed = uploadStats.getFailed();
int uploading = uploadStats.getStarted() - (complete + failed);
int pending = scheduled - (uploading + complete + failed);
if (scheduled > 0) {
return String.format(
"%s (%d COMPLETE/%d FAILED/%d UPLOADING/%d PENDING)",
humanReadableBytesUploaded, complete, failed, uploading, pending);
} else {
return humanReadableBytesUploaded;
}
}
void showTopSlowBuildRules(ImmutableList.Builder<String> lines) {
if (numberOfSlowRulesToShow == 0 || buildFinished == null) {
return;
}
Comparator<UnflavoredBuildTarget> comparator =
(target1, target2) -> {
Long elapsedTime1 = Objects.requireNonNull(timeSpentMillisecondsInRules.get(target1));
Long elapsedTime2 = Objects.requireNonNull(timeSpentMillisecondsInRules.get(target2));
long delta = elapsedTime2 - elapsedTime1;
return Long.compare(delta, 0L);
};
ImmutableList.Builder<String> slowRulesLogsBuilder = ImmutableList.builder();
slowRulesLogsBuilder.add("");
synchronized (timeSpentMillisecondsInRules) {
if (timeSpentMillisecondsInRules.isEmpty()) {
slowRulesLogsBuilder.add("Top slow rules: Buck didn't spend time in rules.");
} else {
slowRulesLogsBuilder.add("Top slow rules");
Stream<UnflavoredBuildTarget> keys =
timeSpentMillisecondsInRules.keySet().stream().sorted(comparator);
keys.limit(numberOfSlowRulesToShow)
.forEachOrdered(
target -> {
if (timeSpentMillisecondsInRules.containsKey(target)) {
slowRulesLogsBuilder.add(
String.format(
" %s: %s",
target, formatElapsedTime(timeSpentMillisecondsInRules.get(target))));
}
});
}
}
ImmutableList<String> slowRulesLogs = slowRulesLogsBuilder.build();
logTopSlowBuildRulesIfNotLogged(slowRulesLogs);
if (showSlowRulesInConsole) {
lines.addAll(slowRulesLogs);
}
}
private void logTopSlowBuildRulesIfNotLogged(ImmutableList<String> slowRulesLogs) {
if (topSlowestRulesLogged.compareAndSet(false, true)) {
LOG.info(String.join(System.lineSeparator(), slowRulesLogs));
}
}
@Override
public void close() throws IOException {
progressEstimator.ifPresent(ProgressEstimator::close);
}
}
| |
/************************************************************************************************
* _________ _ ____ _ __ __ _ _ _ _ _ ___
* |__ / ___|__ _ ___| |__ / ___|_ _(_)_ __ __ \ \ / /_ _| | | ___| |_| | | |_ _|
* / / | / _` / __| '_ \\___ \ \ /\ / / | '_ \ / _` \ \ /\ / / _` | | |/ _ \ __| | | || |
* / /| |__| (_| \__ \ | | |___) \ V V /| | | | | (_| |\ V V / (_| | | | __/ |_| |_| || |
* /____\____\__,_|___/_| |_|____/ \_/\_/ |_|_| |_|\__, | \_/\_/ \__,_|_|_|\___|\__|\___/|___|
* |___/
*
* Copyright (c) 2016 Ivan Vaklinov <ivan@vaklinov.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
**********************************************************************************/
package com.vaklinov.zcashui;
import java.awt.BorderLayout;
import java.awt.FlowLayout;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.StringSelection;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.UnsupportedEncodingException;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTabbedPane;
import javax.swing.JTextArea;
import javax.swing.border.EtchedBorder;
/**
* Typical about box stuff...
*
* @author Ivan Vaklinov <ivan@vaklinov.com>
*/
public class AboutDialog
extends JDialog
{
public AboutDialog(JFrame parent)
throws UnsupportedEncodingException
{
this.setTitle("About...");
this.setSize(600, 440);
this.setLocation(100, 100);
this.setLocationRelativeTo(parent);
this.setModal(true);
this.setDefaultCloseOperation(DISPOSE_ON_CLOSE);
JTabbedPane tabs = new JTabbedPane();
JPanel copyrigthPanel = new JPanel();
copyrigthPanel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
copyrigthPanel.setLayout(new BorderLayout(3, 3));
JLabel copyrightLabel = new JLabel();
copyrightLabel.setText(new String(new byte[] {
(byte)0x3c,(byte)0x68,(byte)0x74,(byte)0x6d,(byte)0x6c,(byte)0x3e,(byte)0x3c,(byte)0x62,(byte)0x6f,(byte)0x64,
(byte)0x79,(byte)0x3e,(byte)0x3c,(byte)0x73,(byte)0x70,(byte)0x61,(byte)0x6e,(byte)0x20,(byte)0x73,(byte)0x74,
(byte)0x79,(byte)0x6c,(byte)0x65,(byte)0x3d,(byte)0x22,(byte)0x66,(byte)0x6f,(byte)0x6e,(byte)0x74,(byte)0x2d,
(byte)0x77,(byte)0x65,(byte)0x69,(byte)0x67,(byte)0x68,(byte)0x74,(byte)0x3a,(byte)0x62,(byte)0x6f,(byte)0x6c,
(byte)0x64,(byte)0x22,(byte)0x3e,(byte)0x5a,(byte)0x65,(byte)0x6e,(byte)0x20,(byte)0x43,(byte)0x61,(byte)0x73,
(byte)0x68,(byte)0x20,(byte)0x53,(byte)0x77,(byte)0x69,(byte)0x6e,(byte)0x67,(byte)0x20,(byte)0x57,(byte)0x61,
(byte)0x6c,(byte)0x6c,(byte)0x65,(byte)0x74,(byte)0x20,(byte)0x55,(byte)0x49,(byte)0x3c,(byte)0x2f,(byte)0x73,
(byte)0x70,(byte)0x61,(byte)0x6e,(byte)0x3e,(byte)0x3c,(byte)0x62,(byte)0x72,(byte)0x2f,(byte)0x3e,(byte)0x3c,
(byte)0x62,(byte)0x72,(byte)0x2f,(byte)0x3e,(byte)0x43,(byte)0x6f,(byte)0x70,(byte)0x79,(byte)0x72,(byte)0x69,
(byte)0x67,(byte)0x68,(byte)0x74,(byte)0x3a,(byte)0x20,(byte)0x49,(byte)0x76,(byte)0x61,(byte)0x6e,(byte)0x20,
(byte)0x56,(byte)0x61,(byte)0x6b,(byte)0x6c,(byte)0x69,(byte)0x6e,(byte)0x6f,(byte)0x76,(byte)0x20,(byte)0x26,
(byte)0x6c,(byte)0x74,(byte)0x3b,(byte)0x69,(byte)0x76,(byte)0x61,(byte)0x6e,(byte)0x40,(byte)0x76,(byte)0x61,
(byte)0x6b,(byte)0x6c,(byte)0x69,(byte)0x6e,(byte)0x6f,(byte)0x76,(byte)0x2e,(byte)0x63,(byte)0x6f,(byte)0x6d,
(byte)0x26,(byte)0x67,(byte)0x74,(byte)0x3b,(byte)0x3c,(byte)0x62,(byte)0x72,(byte)0x2f,(byte)0x3e,(byte)0x3c,
(byte)0x62,(byte)0x72,(byte)0x2f,(byte)0x3e,(byte)0x54,(byte)0x68,(byte)0x69,(byte)0x73,(byte)0x20,(byte)0x70,
(byte)0x72,(byte)0x6f,(byte)0x67,(byte)0x72,(byte)0x61,(byte)0x6d,(byte)0x20,(byte)0x69,(byte)0x73,(byte)0x20,
(byte)0x69,(byte)0x6e,(byte)0x74,(byte)0x65,(byte)0x6e,(byte)0x64,(byte)0x65,(byte)0x64,(byte)0x20,(byte)0x74,
(byte)0x6f,(byte)0x20,(byte)0x6d,(byte)0x61,(byte)0x6b,(byte)0x65,(byte)0x20,(byte)0x69,(byte)0x74,(byte)0x20,
(byte)0x65,(byte)0x61,(byte)0x73,(byte)0x79,(byte)0x20,(byte)0x74,(byte)0x6f,(byte)0x20,(byte)0x77,(byte)0x6f,
(byte)0x72,(byte)0x6b,(byte)0x20,(byte)0x77,(byte)0x69,(byte)0x74,(byte)0x68,(byte)0x20,(byte)0x74,(byte)0x68,
(byte)0x65,(byte)0x20,(byte)0x5a,(byte)0x65,(byte)0x6e,(byte)0x20,(byte)0x43,(byte)0x61,(byte)0x73,
(byte)0x68,(byte)0x20,
(byte)0x20,(byte)0x63,(byte)0x6c,(byte)0x69,(byte)0x65,(byte)0x6e,(byte)0x74,(byte)0x20,(byte)0x74,(byte)0x6f,
(byte)0x6f,(byte)0x6c,(byte)0x73,(byte)0x20,(byte)0x62,(byte)0x79,(byte)0x20,(byte)0x70,(byte)0x72,(byte)0x6f,
(byte)0x76,(byte)0x69,(byte)0x64,(byte)0x69,(byte)0x6e,(byte)0x67,(byte)0x20,(byte)0x61,(byte)0x20,(byte)0x47,
(byte)0x72,(byte)0x61,(byte)0x70,(byte)0x68,(byte)0x69,(byte)0x63,(byte)0x61,(byte)0x6c,(byte)0x20,(byte)0x55,
(byte)0x73,(byte)0x65,(byte)0x72,(byte)0x20,(byte)0x49,(byte)0x6e,(byte)0x74,(byte)0x65,(byte)0x72,(byte)0x66,
(byte)0x61,(byte)0x63,(byte)0x65,(byte)0x20,(byte)0x28,(byte)0x47,(byte)0x55,(byte)0x49,(byte)0x29,(byte)0x20,
(byte)0x74,(byte)0x68,(byte)0x61,(byte)0x74,(byte)0x20,(byte)0x61,(byte)0x63,(byte)0x74,(byte)0x73,(byte)0x20,
(byte)0x61,(byte)0x73,(byte)0x20,(byte)0x61,(byte)0x20,(byte)0x77,(byte)0x72,(byte)0x61,(byte)0x70,(byte)0x70,
(byte)0x65,(byte)0x72,(byte)0x20,(byte)0x61,(byte)0x6e,(byte)0x64,(byte)0x20,(byte)0x70,(byte)0x72,(byte)0x65,
(byte)0x73,(byte)0x65,(byte)0x6e,(byte)0x74,(byte)0x73,(byte)0x20,(byte)0x74,(byte)0x68,(byte)0x65,(byte)0x20,
(byte)0x69,(byte)0x6e,(byte)0x66,(byte)0x6f,(byte)0x72,(byte)0x6d,(byte)0x61,(byte)0x74,(byte)0x69,(byte)0x6f,
(byte)0x6e,(byte)0x20,(byte)0x69,(byte)0x6e,(byte)0x20,(byte)0x61,(byte)0x20,(byte)0x75,(byte)0x73,(byte)0x65,
(byte)0x72,(byte)0x2d,(byte)0x66,(byte)0x72,(byte)0x69,(byte)0x65,(byte)0x6e,(byte)0x64,(byte)0x6c,(byte)0x79,
(byte)0x20,(byte)0x6d,(byte)0x61,(byte)0x6e,(byte)0x6e,(byte)0x65,(byte)0x72,(byte)0x2e,(byte)0x3c,(byte)0x62,
(byte)0x72,(byte)0x2f,(byte)0x3e,(byte)0x3c,(byte)0x62,(byte)0x72,(byte)0x2f,(byte)0x3e,(byte)0x3c,(byte)0x73,
(byte)0x70,(byte)0x61,(byte)0x6e,(byte)0x20,(byte)0x73,(byte)0x74,(byte)0x79,(byte)0x6c,(byte)0x65,(byte)0x3d,
(byte)0x22,(byte)0x66,(byte)0x6f,(byte)0x6e,(byte)0x74,(byte)0x2d,(byte)0x77,(byte)0x65,(byte)0x69,(byte)0x67,
(byte)0x68,(byte)0x74,(byte)0x3a,(byte)0x62,(byte)0x6f,(byte)0x6c,(byte)0x64,(byte)0x22,(byte)0x3e,(byte)0x44,
(byte)0x69,(byte)0x73,(byte)0x63,(byte)0x6c,(byte)0x61,(byte)0x69,(byte)0x6d,(byte)0x65,(byte)0x72,(byte)0x3a,
(byte)0x3c,(byte)0x2f,(byte)0x73,(byte)0x70,(byte)0x61,(byte)0x6e,(byte)0x3e,(byte)0x20,(byte)0x74,(byte)0x68,
(byte)0x69,(byte)0x73,(byte)0x20,(byte)0x70,(byte)0x72,(byte)0x6f,(byte)0x67,(byte)0x72,(byte)0x61,(byte)0x6d,
(byte)0x20,(byte)0x69,(byte)0x73,(byte)0x20,(byte)0x6e,(byte)0x6f,(byte)0x74,(byte)0x20,(byte)0x6f,(byte)0x66,
(byte)0x66,(byte)0x69,(byte)0x63,(byte)0x69,(byte)0x61,(byte)0x6c,(byte)0x6c,(byte)0x79,(byte)0x20,(byte)0x65,
(byte)0x6e,(byte)0x64,(byte)0x6f,(byte)0x72,(byte)0x73,(byte)0x65,(byte)0x64,(byte)0x20,(byte)0x62,(byte)0x79,
(byte)0x20,(byte)0x6f,(byte)0x72,(byte)0x20,(byte)0x61,(byte)0x73,(byte)0x73,(byte)0x6f,(byte)0x63,(byte)0x69,
(byte)0x61,(byte)0x74,(byte)0x65,(byte)0x64,(byte)0x20,(byte)0x77,(byte)0x69,(byte)0x74,(byte)0x68,(byte)0x20,
(byte)0x74,(byte)0x68,(byte)0x65,(byte)0x20,(byte)0x5a,(byte)0x43,(byte)0x61,(byte)0x73,(byte)0x68,(byte)0x20,
(byte)0x70,(byte)0x72,(byte)0x6f,(byte)0x6a,(byte)0x65,(byte)0x63,(byte)0x74,(byte)0x20,(byte)0x61,(byte)0x6e,
(byte)0x64,(byte)0x20,(byte)0x74,(byte)0x68,(byte)0x65,(byte)0x20,(byte)0x5a,(byte)0x43,(byte)0x61,(byte)0x73,
(byte)0x68,(byte)0x20,(byte)0x63,(byte)0x6f,(byte)0x6d,(byte)0x70,(byte)0x61,(byte)0x6e,(byte)0x79,(byte)0x2e,
(byte)0x3c,(byte)0x62,(byte)0x72,(byte)0x2f,(byte)0x3e,(byte)0x3c,(byte)0x62,(byte)0x72,(byte)0x2f,(byte)0x3e,
(byte)0x41,(byte)0x63,(byte)0x6b,(byte)0x6e,(byte)0x6f,(byte)0x77,(byte)0x6c,(byte)0x65,(byte)0x64,(byte)0x67,
(byte)0x65,(byte)0x6d,(byte)0x65,(byte)0x6e,(byte)0x74,(byte)0x73,(byte)0x3a,(byte)0x20,(byte)0x54,(byte)0x68,
(byte)0x69,(byte)0x73,(byte)0x20,(byte)0x70,(byte)0x72,(byte)0x6f,(byte)0x67,(byte)0x72,(byte)0x61,(byte)0x6d,
(byte)0x20,(byte)0x69,(byte)0x6e,(byte)0x63,(byte)0x6c,(byte)0x75,(byte)0x64,(byte)0x65,(byte)0x73,(byte)0x20,
(byte)0x73,(byte)0x6f,(byte)0x66,(byte)0x74,(byte)0x77,(byte)0x61,(byte)0x72,(byte)0x65,(byte)0x20,(byte)0x66,
(byte)0x6f,(byte)0x72,(byte)0x20,(byte)0x4a,(byte)0x53,(byte)0x4f,(byte)0x4e,(byte)0x20,(byte)0x70,(byte)0x72,
(byte)0x6f,(byte)0x63,(byte)0x65,(byte)0x73,(byte)0x73,(byte)0x69,(byte)0x6e,(byte)0x67,(byte)0x20,(byte)0x28,
(byte)0x68,(byte)0x74,(byte)0x74,(byte)0x70,(byte)0x73,(byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x67,(byte)0x69,
(byte)0x74,(byte)0x68,(byte)0x75,(byte)0x62,(byte)0x2e,(byte)0x63,(byte)0x6f,(byte)0x6d,(byte)0x2f,(byte)0x72,
(byte)0x61,(byte)0x6c,(byte)0x66,(byte)0x73,(byte)0x74,(byte)0x78,(byte)0x2f,(byte)0x6d,(byte)0x69,(byte)0x6e,
(byte)0x69,(byte)0x6d,(byte)0x61,(byte)0x6c,(byte)0x2d,(byte)0x6a,(byte)0x73,(byte)0x6f,(byte)0x6e,(byte)0x29,
(byte)0x20,(byte)0x74,(byte)0x68,(byte)0x61,(byte)0x74,(byte)0x20,(byte)0x69,(byte)0x73,(byte)0x20,(byte)0x43,
(byte)0x6f,(byte)0x70,(byte)0x79,(byte)0x72,(byte)0x69,(byte)0x67,(byte)0x68,(byte)0x74,(byte)0x20,(byte)0x28,
(byte)0x63,(byte)0x29,(byte)0x20,(byte)0x32,(byte)0x30,(byte)0x31,(byte)0x35,(byte)0x2c,(byte)0x20,(byte)0x32,
(byte)0x30,(byte)0x31,(byte)0x36,(byte)0x20,(byte)0x45,(byte)0x63,(byte)0x6c,(byte)0x69,(byte)0x70,(byte)0x73,
(byte)0x65,(byte)0x53,(byte)0x6f,(byte)0x75,(byte)0x72,(byte)0x63,(byte)0x65,(byte)0x2e,(byte)0x3c,(byte)0x62,
(byte)0x72,(byte)0x2f,(byte)0x3e,(byte)0x3c,(byte)0x62,(byte)0x72,(byte)0x2f,(byte)0x3e,(byte)0x3c,(byte)0x2f,
(byte)0x62,(byte)0x6f,(byte)0x64,(byte)0x79,(byte)0x3e,(byte)0x3c,(byte)0x2f,(byte)0x68,(byte)0x74,(byte)0x6d,
(byte)0x6c,(byte)0x3e
}, "UTF-8")
);
copyrightLabel.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.LOWERED));
copyrigthPanel.add(copyrightLabel, BorderLayout.NORTH);
JPanel PD = new JPanel();
PD.setLayout(new BorderLayout(3, 3));
PD.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.LOWERED));
JLabel l1 = new JLabel(new String(new byte[] {
(byte)0x3c,(byte)0x68,(byte)0x74,(byte)0x6d,(byte)0x6c,(byte)0x3e,(byte)0x3c,(byte)0x62,(byte)0x6f,(byte)0x64,
(byte)0x79,(byte)0x3e,(byte)0x3c,(byte)0x73,(byte)0x70,(byte)0x61,(byte)0x6e,(byte)0x20,(byte)0x73,(byte)0x74,
(byte)0x79,(byte)0x6c,(byte)0x65,(byte)0x3d,(byte)0x22,(byte)0x66,(byte)0x6f,(byte)0x6e,(byte)0x74,(byte)0x2d,
(byte)0x77,(byte)0x65,(byte)0x69,(byte)0x67,(byte)0x68,(byte)0x74,(byte)0x3a,(byte)0x62,(byte)0x6f,(byte)0x6c,
(byte)0x64,(byte)0x22,(byte)0x3e,(byte)0x44,(byte)0x6f,(byte)0x6e,(byte)0x61,(byte)0x74,(byte)0x69,(byte)0x6f,
(byte)0x6e,(byte)0x73,(byte)0x20,(byte)0x61,(byte)0x63,(byte)0x63,(byte)0x65,(byte)0x70,(byte)0x74,(byte)0x65,
(byte)0x64,(byte)0x3a,(byte)0x3c,(byte)0x2f,(byte)0x73,(byte)0x70,(byte)0x61,(byte)0x6e,(byte)0x3e,(byte)0x20,
(byte)0x54,(byte)0x68,(byte)0x69,(byte)0x73,(byte)0x20,(byte)0x5a,(byte)0x65,(byte)0x6e,(byte)0x20,(byte)0x43,(byte)0x61,(byte)0x73,
(byte)0x68,(byte)0x20,(byte)0x47,(byte)0x55,(byte)0x49,(byte)0x20,(byte)0x77,(byte)0x61,
(byte)0x6c,(byte)0x6c,(byte)0x65,(byte)0x74,(byte)0x20,(byte)0x68,(byte)0x61,(byte)0x73,(byte)0x20,(byte)0x62,
(byte)0x65,(byte)0x65,(byte)0x6e,(byte)0x20,(byte)0x62,(byte)0x72,(byte)0x6f,(byte)0x75,(byte)0x67,(byte)0x68,
(byte)0x74,(byte)0x20,(byte)0x74,(byte)0x6f,(byte)0x20,(byte)0x79,(byte)0x6f,(byte)0x75,(byte)0x20,(byte)0x74,
(byte)0x68,(byte)0x6f,(byte)0x75,(byte)0x72,(byte)0x67,(byte)0x68,(byte)0x20,(byte)0x74,(byte)0x68,(byte)0x65,
(byte)0x20,(byte)0x65,(byte)0x66,(byte)0x66,(byte)0x6f,(byte)0x72,(byte)0x74,(byte)0x73,(byte)0x20,(byte)0x6f,
(byte)0x66,(byte)0x20,(byte)0x63,(byte)0x6f,(byte)0x6d,(byte)0x6d,(byte)0x75,(byte)0x6e,(byte)0x69,(byte)0x74,
(byte)0x79,(byte)0x20,(byte)0x76,(byte)0x6f,(byte)0x6c,(byte)0x75,(byte)0x6e,(byte)0x74,(byte)0x65,(byte)0x65,
(byte)0x72,(byte)0x73,(byte)0x2e,(byte)0x20,(byte)0x49,(byte)0x66,(byte)0x20,(byte)0x79,(byte)0x6f,(byte)0x75,
(byte)0x20,(byte)0x66,(byte)0x69,(byte)0x6e,(byte)0x64,(byte)0x20,(byte)0x69,(byte)0x74,(byte)0x20,(byte)0x75,
(byte)0x73,(byte)0x65,(byte)0x66,(byte)0x75,(byte)0x6c,(byte)0x20,(byte)0x70,(byte)0x6c,(byte)0x65,(byte)0x61,
(byte)0x73,(byte)0x65,(byte)0x20,(byte)0x63,(byte)0x6f,(byte)0x6e,(byte)0x73,(byte)0x69,(byte)0x64,(byte)0x65,
(byte)0x72,(byte)0x20,(byte)0x6d,(byte)0x61,(byte)0x6b,(byte)0x69,(byte)0x6e,(byte)0x67,(byte)0x20,(byte)0x61,
(byte)0x20,(byte)0x64,(byte)0x6f,(byte)0x6e,(byte)0x61,(byte)0x74,(byte)0x69,(byte)0x6f,(byte)0x6e,(byte)0x20,
(byte)0x66,(byte)0x6f,(byte)0x72,(byte)0x20,(byte)0x69,(byte)0x74,(byte)0x73,(byte)0x20,(byte)0x66,(byte)0x75,
(byte)0x72,(byte)0x74,(byte)0x68,(byte)0x65,(byte)0x72,(byte)0x20,(byte)0x64,(byte)0x65,(byte)0x76,(byte)0x65,
(byte)0x6c,(byte)0x6f,(byte)0x70,(byte)0x6d,(byte)0x65,(byte)0x6e,(byte)0x74,(byte)0x2e,(byte)0x20,(byte)0x44,
(byte)0x6f,(byte)0x6e,(byte)0x61,(byte)0x74,(byte)0x69,(byte)0x6f,(byte)0x6e,(byte)0x73,(byte)0x20,(byte)0x6f,
(byte)0x66,(byte)0x20,(byte)0x3c,(byte)0x73,(byte)0x70,(byte)0x61,(byte)0x6e,(byte)0x20,(byte)0x73,(byte)0x74,
(byte)0x79,(byte)0x6c,(byte)0x65,(byte)0x3d,(byte)0x22,(byte)0x66,(byte)0x6f,(byte)0x6e,(byte)0x74,(byte)0x2d,
(byte)0x77,(byte)0x65,(byte)0x69,(byte)0x67,(byte)0x68,(byte)0x74,(byte)0x3a,(byte)0x62,(byte)0x6f,(byte)0x6c,
(byte)0x64,(byte)0x22,(byte)0x3e,(byte)0x61,(byte)0x6e,(byte)0x79,(byte)0x20,(byte)0x73,(byte)0x69,(byte)0x7a,
(byte)0x65,(byte)0x3c,(byte)0x2f,(byte)0x73,(byte)0x70,(byte)0x61,(byte)0x6e,(byte)0x3e,(byte)0x20,(byte)0x61,
(byte)0x72,(byte)0x65,(byte)0x20,(byte)0x61,(byte)0x63,(byte)0x63,(byte)0x65,(byte)0x70,(byte)0x74,(byte)0x65,
(byte)0x64,(byte)0x20,(byte)0x74,(byte)0x6f,(byte)0x20,(byte)0x74,(byte)0x68,(byte)0x65,(byte)0x20,(byte)0x66,
(byte)0x6f,(byte)0x6c,(byte)0x6c,(byte)0x6f,(byte)0x77,(byte)0x69,(byte)0x6e,(byte)0x67,(byte)0x20,(byte)0x5a,
(byte)0x65,(byte)0x6e,(byte)0x43,(byte)0x61,(byte)0x73,(byte)0x68,(byte)0x20,(byte)0x61,(byte)0x64,
(byte)0x64,(byte)0x72,(byte)0x65,(byte)0x73,(byte)0x73,(byte)0x3a,(byte)0x3c,(byte)0x62,(byte)0x72,(byte)0x2f,
(byte)0x3e,(byte)0x3c,(byte)0x2f,(byte)0x62,(byte)0x6f,(byte)0x64,(byte)0x79,(byte)0x3e,(byte)0x3c,(byte)0x2f,
(byte)0x68,(byte)0x74,(byte)0x6d,(byte)0x6c,(byte)0x3e,
}, "UTF-8")
);
PD.add(l1, BorderLayout.NORTH);
JPanel PD2 = new JPanel();
PD2.setLayout(new BorderLayout(3, 3));
final JTextArea tar = new JTextArea();
tar.setEditable(false);
tar.setLineWrap(true);
tar.setText(new String(new byte[] {
(byte)0x7a,(byte)0x6e,(byte)0x6d,(byte)0x52,(byte)0x65,(byte)0x4e,(byte)0x5a,(byte)0x4a,(byte)0x4b,(byte)0x45,
(byte)0x34,(byte)0x76,(byte)0x53,(byte)0x70,(byte)0x79,(byte)0x5a,(byte)0x6a,(byte)0x4c,(byte)0x79,(byte)0x45,
(byte)0x68,(byte)0x72,(byte)0x39,(byte)0x41,(byte)0x50,(byte)0x33,(byte)0x79,(byte)0x6a,(byte)0x34,(byte)0x56,
(byte)0x56,(byte)0x7a,(byte)0x79,(byte)0x72,(byte)0x6a
}, "UTF-8")
);
PD2.add(tar, BorderLayout.CENTER);
JPanel PD3 = new JPanel(new FlowLayout(FlowLayout.LEFT, 0, 0));
final JButton jb1 = new JButton(new String(new byte[] {
(byte)0x3c,(byte)0x68,(byte)0x74,(byte)0x6d,(byte)0x6c,(byte)0x3e,(byte)0x3c,(byte)0x62,(byte)0x6f,(byte)0x64,
(byte)0x79,(byte)0x3e,(byte)0x3c,(byte)0x73,(byte)0x70,(byte)0x61,(byte)0x6e,(byte)0x20,(byte)0x73,(byte)0x74,
(byte)0x79,(byte)0x6c,(byte)0x65,(byte)0x3d,(byte)0x22,(byte)0x66,(byte)0x6f,(byte)0x6e,(byte)0x74,(byte)0x2d,
(byte)0x73,(byte)0x69,(byte)0x7a,(byte)0x65,(byte)0x3a,(byte)0x38,(byte)0x70,(byte)0x78,(byte)0x3b,(byte)0x66,
(byte)0x6f,(byte)0x6e,(byte)0x74,(byte)0x2d,(byte)0x77,(byte)0x65,(byte)0x69,(byte)0x67,(byte)0x68,(byte)0x74,
(byte)0x3a,(byte)0x62,(byte)0x6f,(byte)0x6c,(byte)0x64,(byte)0x22,(byte)0x3e,(byte)0x43,(byte)0x6f,(byte)0x70,
(byte)0x79,(byte)0x20,(byte)0x61,(byte)0x64,(byte)0x64,(byte)0x72,(byte)0x65,(byte)0x73,(byte)0x73,(byte)0x3c,
(byte)0x62,(byte)0x72,(byte)0x2f,(byte)0x3e,(byte)0x74,(byte)0x6f,(byte)0x20,(byte)0x63,(byte)0x6c,(byte)0x69,
(byte)0x70,(byte)0x62,(byte)0x6f,(byte)0x61,(byte)0x72,(byte)0x64,(byte)0x3c,(byte)0x2f,(byte)0x73,(byte)0x70,
(byte)0x61,(byte)0x6e,(byte)0x3e,(byte)0x3c,(byte)0x2f,(byte)0x68,(byte)0x74,(byte)0x6d,(byte)0x6c,(byte)0x3e,
(byte)0x3c,(byte)0x2f,(byte)0x62,(byte)0x6f,(byte)0x64,(byte)0x79,(byte)0x3e,
}, "UTF-8"));
PD3.add(jb1);
jb1.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
clipboard.setContents(new StringSelection(tar.getText()), null);
}
});
PD2.add(PD3, BorderLayout.EAST);
PD.add(PD2, BorderLayout.CENTER);
copyrigthPanel.add(PD, BorderLayout.CENTER);
tabs.add("About", copyrigthPanel);
JPanel licensePanel = new JPanel();
licensePanel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
licensePanel.setLayout(new BorderLayout(3, 3));
JLabel licenseLabel = new JLabel();
licenseLabel.setText(
"<html><body><pre>" +
" Copyright (c) 2016 Ivan Vaklinov <ivan@vaklinov.com> \n" +
"\n" +
" Permission is hereby granted, free of charge, to any person obtaining a copy\n" +
" of this software and associated documentation files (the \"Software\"), to deal\n" +
" in the Software without restriction, including without limitation the rights\n" +
" to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n" +
" copies of the Software, and to permit persons to whom the Software is\n" +
" furnished to do so, subject to the following conditions:\n" +
" \n" +
" The above copyright notice and this permission notice shall be included in\n" +
" all copies or substantial portions of the Software.\n" +
" \n" +
" THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n" +
" IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n" +
" FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n" +
" AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n" +
" LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n" +
" OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n" +
" THE SOFTWARE. \n" +
"</pre></body></html>");
licenseLabel.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.LOWERED));
licensePanel.add(licenseLabel, BorderLayout.NORTH);
tabs.add("License", licensePanel);
this.getContentPane().setLayout(new BorderLayout(0, 0));
this.getContentPane().add(tabs, BorderLayout.NORTH);
JPanel closePanel = new JPanel();
closePanel.setLayout(new FlowLayout(FlowLayout.CENTER, 3, 3));
JButton closeButon = new JButton("Close");
closePanel.add(closeButon);
this.getContentPane().add(closePanel, BorderLayout.SOUTH);
closeButon.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
AboutDialog.this.setVisible(false);
AboutDialog.this.dispose();
}
});
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This is not the original file distributed by the Apache Software Foundation
* It has been modified by the Hipparchus project
*/
package org.hipparchus.linear;
import java.util.Random;
import org.hipparchus.exception.MathIllegalArgumentException;
import org.hipparchus.util.Precision;
import org.junit.Assert;
import org.junit.Test;
public class EigenSolverTest {
private double[][] bigSingular = {
{ 1.0, 2.0, 3.0, 4.0 },
{ 2.0, 5.0, 3.0, 4.0 },
{ 7.0, 3.0, 256.0, 1930.0 },
{ 3.0, 7.0, 6.0, 8.0 }
}; // 4th row = 1st + 2nd
/** test non invertible matrix */
@Test
public void testNonInvertible() {
Random r = new Random(9994100315209l);
RealMatrix m =
EigenDecompositionTest.createTestMatrix(r, new double[] { 1.0, 0.0, -1.0, -2.0, -3.0 });
DecompositionSolver es = new EigenDecomposition(m).getSolver();
Assert.assertFalse(es.isNonSingular());
try {
es.getInverse();
Assert.fail("an exception should have been thrown");
} catch (MathIllegalArgumentException ime) {
// expected behavior
}
}
/** test invertible matrix */
@Test
public void testInvertible() {
Random r = new Random(9994100315209l);
RealMatrix m =
EigenDecompositionTest.createTestMatrix(r, new double[] { 1.0, 0.5, -1.0, -2.0, -3.0 });
DecompositionSolver es = new EigenDecomposition(m).getSolver();
Assert.assertTrue(es.isNonSingular());
RealMatrix inverse = es.getInverse();
RealMatrix error =
m.multiply(inverse).subtract(MatrixUtils.createRealIdentityMatrix(m.getRowDimension()));
Assert.assertEquals(0, error.getNorm1(), 4.0e-15);
}
/**
* Verifies operation on very small values.
* Matrix with eigenvalues {8e-100, -1e-100, -1e-100}
*/
@Test
public void testInvertibleTinyValues() {
final double tiny = 1e-100;
RealMatrix m = MatrixUtils.createRealMatrix(new double[][] {
{3, 2, 4},
{2, 0, 2},
{4, 2, 3}
});
m = m.scalarMultiply(tiny);
final EigenDecomposition ed = new EigenDecomposition(m);
RealMatrix inv = ed.getSolver().getInverse();
final RealMatrix id = m.multiply(inv);
for (int i = 0; i < m.getRowDimension(); i++) {
for (int j = 0; j < m.getColumnDimension(); j++) {
if (i == j) {
Assert.assertTrue(Precision.equals(1, id.getEntry(i, j), 1e-15));
} else {
Assert.assertTrue(Precision.equals(0, id.getEntry(i, j), 1e-15));
}
}
}
}
@Test(expected=MathIllegalArgumentException.class)
public void testNonInvertibleMath1045() {
EigenDecomposition eigen =
new EigenDecomposition(MatrixUtils.createRealMatrix(bigSingular));
eigen.getSolver().getInverse();
}
@Test(expected=MathIllegalArgumentException.class)
public void testZeroMatrix() {
EigenDecomposition eigen =
new EigenDecomposition(MatrixUtils.createRealMatrix(new double[][] {{0}}));
eigen.getSolver().getInverse();
}
@Test
public void testIsNonSingularTinyOutOfOrderEigenvalue() {
final EigenDecomposition eigen
= new EigenDecomposition(MatrixUtils.createRealMatrix(new double[][] {
{ 1e-13, 0 },
{ 1, 1 },
}));
Assert.assertFalse("Singular matrix not detected",
eigen.getSolver().isNonSingular());
}
/** test solve dimension errors */
@Test
public void testSolveDimensionErrors() {
final double[] refValues = new double[] {
2.003, 2.002, 2.001, 1.001, 1.000, 0.001
};
final RealMatrix matrix = EigenDecompositionTest.createTestMatrix(new Random(35992629946426l), refValues);
DecompositionSolver es = new EigenDecomposition(matrix).getSolver();
RealMatrix b = MatrixUtils.createRealMatrix(new double[2][2]);
try {
es.solve(b);
Assert.fail("an exception should have been thrown");
} catch (MathIllegalArgumentException iae) {
// expected behavior
}
try {
es.solve(b.getColumnVector(0));
Assert.fail("an exception should have been thrown");
} catch (MathIllegalArgumentException iae) {
// expected behavior
}
try {
es.solve(new ArrayRealVectorTest.RealVectorTestImpl(b.getColumn(0)));
Assert.fail("an exception should have been thrown");
} catch (MathIllegalArgumentException iae) {
// expected behavior
}
}
/** test solve */
@Test
public void testSolve() {
RealMatrix m = MatrixUtils.createRealMatrix(new double[][] {
{ 91, 5, 29, 32, 40, 14 },
{ 5, 34, -1, 0, 2, -1 },
{ 29, -1, 12, 9, 21, 8 },
{ 32, 0, 9, 14, 9, 0 },
{ 40, 2, 21, 9, 51, 19 },
{ 14, -1, 8, 0, 19, 14 }
});
DecompositionSolver es = new EigenDecomposition(m).getSolver();
RealMatrix b = MatrixUtils.createRealMatrix(new double[][] {
{ 1561, 269, 188 },
{ 69, -21, 70 },
{ 739, 108, 63 },
{ 324, 86, 59 },
{ 1624, 194, 107 },
{ 796, 69, 36 }
});
RealMatrix xRef = MatrixUtils.createRealMatrix(new double[][] {
{ 1, 2, 1 },
{ 2, -1, 2 },
{ 4, 2, 3 },
{ 8, -1, 0 },
{ 16, 2, 0 },
{ 32, -1, 0 }
});
// using RealMatrix
RealMatrix solution=es.solve(b);
Assert.assertEquals(0, solution.subtract(xRef).getNorm1(), 2.5e-12);
// using RealVector
for (int i = 0; i < b.getColumnDimension(); ++i) {
Assert.assertEquals(0,
es.solve(b.getColumnVector(i)).subtract(xRef.getColumnVector(i)).getNorm(),
2.0e-11);
}
// using RealVector with an alternate implementation
for (int i = 0; i < b.getColumnDimension(); ++i) {
ArrayRealVectorTest.RealVectorTestImpl v =
new ArrayRealVectorTest.RealVectorTestImpl(b.getColumn(i));
Assert.assertEquals(0,
es.solve(v).subtract(xRef.getColumnVector(i)).getNorm(),
2.0e-11);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.raid;
import java.io.File;
import java.io.IOException;
import java.util.Random;
import java.util.zip.CRC32;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.RaidDFSUtil;
import org.apache.hadoop.hdfs.TestRaidDfs;
import junit.framework.TestCase;
public class TestDirectoryRaidEncoder extends TestCase {
final static String TEST_DIR = new File(System.getProperty("test.build.data",
"build/contrib/raid/test/data")).getAbsolutePath();
final static Log LOG = LogFactory.getLog(
"org.apache.hadoop.raid.TestDirectoryRaidEncoder");
final static int NUM_DATANODES = 3;
static {
ParityFilePair.disableCacheUsedInTestOnly();
}
Configuration conf;
String namenode = null;
MiniDFSCluster dfs = null;
FileSystem fileSys = null;
private void mySetup() throws Exception {
new File(TEST_DIR).mkdirs(); // Make sure data directory exists
conf = new Configuration();
conf.setInt("raid.encoder.bufsize", 128);
conf.setInt("raid.decoder.bufsize", 128);
// scan all policies once every 5 second
conf.setLong("raid.policy.rescan.interval", 5000);
// Reduce run time for the test.
conf.setInt("dfs.client.max.block.acquire.failures", 1);
conf.setInt("dfs.client.baseTimeWindow.waitOn.BlockMissingException", 10);
// do not use map-reduce cluster for Raiding
conf.set("raid.classname", "org.apache.hadoop.raid.LocalRaidNode");
conf.set("raid.server.address", "localhost:0");
//Don't allow empty file to be raid
conf.setLong(RaidNode.MINIMUM_RAIDABLE_FILESIZE_KEY, 1L);
dfs = new MiniDFSCluster(conf, NUM_DATANODES, true, null);
dfs.waitActive();
fileSys = dfs.getFileSystem();
namenode = fileSys.getUri().toString();
FileSystem.setDefaultUri(conf, namenode);
}
private Codec loadTestCodecs(String erasureCode, int stripeLength,
boolean isDirRaid) throws Exception {
Utils.loadTestCodecs(conf, stripeLength, stripeLength, 1,
3, "/destraid", "/destraidrs", false, isDirRaid);
return Codec.getCodec(erasureCode);
}
private void myTearDown() throws Exception {
if (dfs != null) { dfs.shutdown(); }
}
private boolean doRaid(Configuration conf, FileSystem fileSys,
Path srcPath, Codec codec)
throws IOException {
return RaidNode.doRaid(conf, fileSys.getFileStatus(srcPath),
new Path(codec.parityDirectory), codec,
new RaidNode.Statistics(),
RaidUtils.NULL_PROGRESSABLE, false, 1, 1);
}
public void testAbnormalDirectory() throws Exception {
mySetup();
Codec codec = loadTestCodecs("xor", 4, true);
try {
Path sourceDir = new Path("/user/raid");
Path parityFile = new Path("/destraid/user/raid");
assertTrue(fileSys.mkdirs(sourceDir));
LOG.info("Test non-leaf directory");
assertFalse("Couldn't raid non-leaf directory ",
doRaid(conf, fileSys, sourceDir.getParent(), codec));
assertFalse(fileSys.exists(parityFile.getParent()));
LOG.info("Test empty directory");
assertFalse("Couldn't raid empty directory ",
doRaid(conf, fileSys, sourceDir, codec));
assertFalse(fileSys.exists(parityFile));
LOG.info("Test empty file in the directory");
Path emptyFile = new Path(sourceDir, "emptyFile");
TestRaidDfs.createTestFile(fileSys, emptyFile, 1, 0, 8192L);
assertTrue(fileSys.exists(emptyFile));
assertFalse("No raidable files in the directory",
doRaid(conf, fileSys, sourceDir, codec));
assertFalse(fileSys.exists(parityFile));
LOG.info("Test not enough blocks in the directory");
Path file1 = new Path(sourceDir, "file1");
Path file2 = new Path(sourceDir, "file2");
TestRaidDfs.createTestFile(fileSys, file1, 1, 1, 8192L);
TestRaidDfs.createTestFile(fileSys, file2, 1, 1, 8192L);
LOG.info("Created two files with two blocks in total");
assertTrue(fileSys.exists(file1));
assertTrue(fileSys.exists(file2));
assertFalse("Not enough blocks in the directory",
doRaid(conf, fileSys, sourceDir, codec));
assertFalse(fileSys.exists(parityFile));
} finally {
myTearDown();
}
}
private void validateSingleFile(String code, FileSystem fileSys,
Path sourceDir, int stripeLength, int blockNum, boolean lastPartial)
throws Exception {
LOG.info("Test file with " + blockNum + " blocks and " +
(lastPartial? "partial": "full") + " last block");
Codec codec = loadTestCodecs(code, stripeLength, true);
Path parityDir = new Path(codec.parityDirectory);
RaidDFSUtil.cleanUp(fileSys, sourceDir);
RaidDFSUtil.cleanUp(fileSys, parityDir);
fileSys.mkdirs(sourceDir);
Path file1 = new Path(sourceDir, "file1");
if (!lastPartial) {
TestRaidDfs.createTestFile(fileSys, file1, 2, blockNum, 8192L);
} else {
TestRaidDfs.createTestFilePartialLastBlock(fileSys, file1, 2,
blockNum, 8192L);
}
Path parityFile = RaidNode.getOriginalParityFile(parityDir, sourceDir);
// Do directory level raid
LOG.info("Create a directory-raid parity file " + parityFile);
assertTrue("Cannot raid directory " + sourceDir,
doRaid(conf, fileSys, sourceDir, codec));
assertEquals("Modification time should be the same",
fileSys.getFileStatus(sourceDir).getModificationTime(),
fileSys.getFileStatus(parityFile).getModificationTime());
assertEquals("Replica num of source file should be reduced to 1",
fileSys.getFileStatus(file1).getReplication(), 1);
assertEquals("Replica num of parity file should be reduced to 1",
fileSys.getFileStatus(parityFile).getReplication(), 1);
long dirCRC = RaidDFSUtil.getCRC(fileSys, parityFile);
long dirLen = fileSys.getFileStatus(parityFile).getLen();
// remove the parity dir
RaidDFSUtil.cleanUp(fileSys, parityDir);
codec = loadTestCodecs(code, stripeLength, false);
Path parityFile1 = RaidNode.getOriginalParityFile(parityDir,
file1);
LOG.info("Create a file-raid parity file " + parityFile1);
assertTrue("Cannot raid file " + file1,
doRaid(conf, fileSys, file1, codec));
assertTrue("Parity file doesn't match when the file has " + blockNum +
" blocks ",
TestRaidDfs.validateFile(fileSys, parityFile1, dirLen, dirCRC));
}
public void testOneFileDirectory() throws Exception {
mySetup();
int stripeLength = 4;
try {
for (String code: RaidDFSUtil.codes) {
LOG.info("testOneFileDirectory: Test code " + code);
Codec codec = loadTestCodecs(code, stripeLength, true);
Path sourceDir = new Path("/user/raid", code);
assertTrue(fileSys.mkdirs(sourceDir));
Path twoBlockFile = new Path(sourceDir, "twoBlockFile");;
LOG.info("Test one file with 2 blocks");
TestRaidDfs.createTestFile(fileSys, twoBlockFile, 2, 2, 8192L);
assertTrue(fileSys.exists(twoBlockFile));
assertFalse("Not enough blocks in the directory",
RaidNode.doRaid(conf, fileSys.getFileStatus(sourceDir),
new Path(codec.parityDirectory), codec,
new RaidNode.Statistics(), RaidUtils.NULL_PROGRESSABLE,
false, 1, 1));
fileSys.delete(twoBlockFile, true);
LOG.info("Test one file with blocks less than one stripe");
validateSingleFile(code, fileSys, sourceDir, stripeLength, 3,
false);
validateSingleFile(code, fileSys, sourceDir, stripeLength, 3,
true);
LOG.info("Test one file with one stripe blocks");
validateSingleFile(code, fileSys, sourceDir, stripeLength,
stripeLength, false);
validateSingleFile(code, fileSys, sourceDir, stripeLength,
stripeLength, true);
LOG.info("Test one file with more than one stripe blocks");
validateSingleFile(code, fileSys, sourceDir, stripeLength,
stripeLength + 2, false);
validateSingleFile(code, fileSys, sourceDir, stripeLength,
stripeLength + 2, true);
}
} finally {
myTearDown();
}
}
private void validateMultipleFiles(String code, FileSystem fileSys,
Path sourceDir, int stripeLength, long[] fileSizes, long blockSize,
long singleFileBlockSize) throws Exception {
long[] blockSizes = new long[fileSizes.length];
for (int i = 0; i< fileSizes.length; i++)
blockSizes[i] = blockSize;
validateMultipleFiles(code, fileSys, sourceDir, stripeLength, fileSizes,
blockSizes, singleFileBlockSize);
}
//
// creates a file by grouping multiple files together
// Returns its crc.
//
private long createDirectoryFile(FileSystem fileSys, Path name, int repl,
long[] fileSizes, long[] blockSizes, int[] seeds,
long blockSize) throws IOException {
CRC32 crc = new CRC32();
assert fileSizes.length == blockSizes.length;
assert fileSizes.length == seeds.length;
FSDataOutputStream stm = fileSys.create(name, true,
fileSys.getConf().getInt("io.file.buffer.size", 4096),
(short)repl, blockSize);
byte[] zeros = new byte[(int)(blockSize)];
for (int j = 0; j < zeros.length; j++) {
zeros[j] = 0;
}
// fill random data into file
for (int i = 0; i < fileSizes.length; i++) {
assert blockSizes[i] <= blockSize;
byte[] b = new byte[(int)blockSizes[i]];
long numBlocks = fileSizes[i] / blockSizes[i];
Random rand = new Random(seeds[i]);
for (int j = 0; j < numBlocks; j++) {
rand.nextBytes(b);
stm.write(b);
crc.update(b);
int zeroLen = (int)(blockSize - blockSizes[i]);
stm.write(zeros, 0, zeroLen);
crc.update(zeros, 0, zeroLen);
}
long lastBlock = fileSizes[i] - numBlocks * blockSizes[i];
if (lastBlock > 0) {
b = new byte[(int)lastBlock];
rand.nextBytes(b);
stm.write(b);
crc.update(b);
if (i + 1 < fileSizes.length) {
// Not last block of file, write zero
int zeroLen = (int)(blockSize - lastBlock);
stm.write(zeros, 0, zeroLen);
crc.update(zeros, 0, zeroLen);
}
}
}
stm.close();
return crc.getValue();
}
private void printFileCRC(FileSystem fs, Path file, long bufferSize)
throws IOException {
byte[] buffer = new byte[(int)bufferSize];
FSDataInputStream stm = fs.open(file);
StringBuilder sb = new StringBuilder();
sb.append("CRC for file: " + file + " size " +
fs.getFileStatus(file).getLen() + "\n");
while (stm.read(buffer) >= 0) {
CRC32 crc = new CRC32();
crc.update(buffer);
sb.append(" " + crc.getValue());
}
sb.append("\n");
System.out.println(sb.toString());
stm.close();
}
private void validateMultipleFiles(String code, FileSystem fileSys,
Path sourceDir, int stripeLength, long[] fileSizes, long[] blockSizes,
long blockSize) throws Exception {
assert fileSizes.length == blockSizes.length;
Codec codec = loadTestCodecs(code, stripeLength, true);
Path parityDir = new Path(codec.parityDirectory);
RaidDFSUtil.cleanUp(fileSys, sourceDir);
RaidDFSUtil.cleanUp(fileSys, parityDir);
fileSys.mkdirs(sourceDir);
LOG.info("Create files under directory " + sourceDir);
Random rand = new Random();
int[] seeds = new int[fileSizes.length];
for (int i = 0; i < fileSizes.length; i++) {
Path file = new Path(sourceDir, "file" + i);
seeds[i] = rand.nextInt();
TestRaidDfs.createTestFile(fileSys, file, 2, fileSizes[i],
blockSizes[i], seeds[i]);
}
Path parityFile = RaidNode.getOriginalParityFile(parityDir, sourceDir);
// Do directory level raid
LOG.info("Create a directory-raid parity file " + parityFile);
assertTrue("Cannot raid directory " + sourceDir,
doRaid(conf, fileSys, sourceDir, codec));
this.printFileCRC(fileSys, parityFile, blockSize);
long dirCRC = RaidDFSUtil.getCRC(fileSys, parityFile);
long dirLen = fileSys.getFileStatus(parityFile).getLen();
assertEquals("Modification time should be the same",
fileSys.getFileStatus(sourceDir).getModificationTime(),
fileSys.getFileStatus(parityFile).getModificationTime());
assertEquals("Replica num of parity file should be reduced to 1",
fileSys.getFileStatus(parityFile).getReplication(), 1);
for (int i = 0; i < fileSizes.length; i++) {
Path file = new Path(sourceDir, "file" + i);
assertEquals("Replica num of source file should be reduced to 1",
fileSys.getFileStatus(file).getReplication(), 1);
}
// remove the source dir and parity dir
RaidDFSUtil.cleanUp(fileSys, sourceDir);
RaidDFSUtil.cleanUp(fileSys, parityDir);
fileSys.mkdirs(sourceDir);
codec = loadTestCodecs(code, stripeLength, false);
Path file1 = new Path(sourceDir, "file1");
Path parityFile1 = RaidNode.getOriginalParityFile(parityDir,
file1);
LOG.info("Create a source file " + file1);
this.createDirectoryFile(fileSys, file1, 1, fileSizes, blockSizes, seeds,
blockSize);
LOG.info("Create a file-raid parity file " + parityFile1);
assertTrue("Cannot raid file " + file1,
doRaid(conf, fileSys, file1, codec));
this.printFileCRC(fileSys, parityFile1, blockSize);
assertTrue("Parity file doesn't match",
TestRaidDfs.validateFile(fileSys, parityFile1, dirLen, dirCRC));
}
public void testSmallFileDirectory() throws Exception {
mySetup();
int stripeLength = 4;
long blockSize = 8192L;
try {
for (String code: RaidDFSUtil.codes) {
LOG.info("testSmallFileDirectory: Test code " + code);
Path sourceDir = new Path("/user/raid");
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[]{1000L, 4000L, 1000L}, blockSize, 4096L);
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[]{2000L, 3000L, 2000L, 3000L}, blockSize, 3072L);
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[]{3000L, 3000L, 3000L, 3000L}, blockSize, 3072L);
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[]{511L, 3584L, 3000L, 1234L, 512L, 1234L, 3000L,
3234L, 511L}, blockSize, 3584L);
}
} finally {
myTearDown();
}
}
public void testIdenticalBlockSizeFileDirectory() throws Exception {
mySetup();
int stripeLength = 4;
long blockSize = 8192L;
try {
for (String code: RaidDFSUtil.codes) {
LOG.info("testIdenticalBlockSizeFileDirectory: Test code " + code);
Path sourceDir = new Path("/user/raid");
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[] {1000L, blockSize, 2*blockSize, 4000L}, blockSize,
blockSize);
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[] {blockSize, 2*blockSize, 3*blockSize, 4*blockSize},
blockSize, blockSize);
int halfBlock = (int)blockSize/2;
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[] {blockSize + halfBlock, 2*blockSize + halfBlock,
3*blockSize + halfBlock, 4*blockSize + halfBlock},
blockSize, blockSize);
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[] {blockSize+1, 9*blockSize+1, 2*blockSize+1,
3*blockSize+1}, blockSize, blockSize);
}
} finally {
myTearDown();
}
}
public void testDifferentBlockSizeFileDirectory() throws Exception {
mySetup();
int stripeLength = 3;
long blockSize = 8192L;
try {
for (String code: RaidDFSUtil.codes) {
LOG.info("testDifferentBlockSizeFileDirectory: Test code " + code);
Path sourceDir = new Path("/user/raid");
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[] {1000, blockSize, 2*blockSize, 2*blockSize + 1},
new long[] {blockSize, blockSize, 2*blockSize, blockSize},
2*blockSize);
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[] {blockSize, 2*blockSize, 3*blockSize, 4*blockSize},
new long[] {blockSize, 2*blockSize, 3*blockSize, blockSize},
3*blockSize);
validateMultipleFiles(code, fileSys, sourceDir, stripeLength,
new long[] {blockSize+1, 9*blockSize+1, 2*blockSize+1,
blockSize+1}, new long[]{blockSize, 2*blockSize, 3*blockSize,
blockSize}, 2*blockSize+512);
}
} finally {
myTearDown();
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.roots;
import com.intellij.ProjectTopics;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ex.PathManagerEx;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.module.ModifiableModuleModel;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.StdModuleTypes;
import com.intellij.openapi.project.ModuleAdapter;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ContentEntry;
import com.intellij.openapi.roots.ModifiableRootModel;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.impl.ModifiableModelCommitter;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.JavaPsiFacade;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.testFramework.ModuleTestCase;
import com.intellij.util.messages.MessageBusConnection;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* @author dsl
*/
public class MultiModuleEditingTest extends ModuleTestCase {
private static final String TEST_PATH = PathManagerEx.getTestDataPath() +
"/moduleRootManager/multiModuleEditing".replace('/', File.separatorChar);
@Override
protected void setUpModule() {
}
@Override
protected void setUpJdk() {
}
public void testAddTwoModules() throws Exception {
final MessageBusConnection connection = myProject.getMessageBus().connect();
final MyModuleListener moduleListener = new MyModuleListener();
connection.subscribe(ProjectTopics.MODULES, moduleListener);
final ModuleManager moduleManager = ModuleManager.getInstance(myProject);
final Module moduleA;
final Module moduleB;
{
final ModifiableModuleModel modifiableModel = moduleManager.getModifiableModel();
moduleA = modifiableModel.newModule("a.iml", StdModuleTypes.JAVA.getId());
moduleB = modifiableModel.newModule("b.iml", StdModuleTypes.JAVA.getId());
assertEquals("Changes are not applied until commit", 0, moduleManager.getModules().length);
//noinspection SSBasedInspection
moduleListener.assertCorrectEvents(new String[0][]);
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
modifiableModel.commit();
}
});
}
assertEquals(2, moduleManager.getModules().length);
assertTrue(moduleManager.findModuleByName("a").equals(moduleA));
assertTrue(moduleManager.findModuleByName("b").equals(moduleB));
moduleListener.assertCorrectEvents(new String[][]{{"+a", "+b"}});
{
final ModifiableModuleModel modifiableModel = moduleManager.getModifiableModel();
modifiableModel.disposeModule(moduleA);
modifiableModel.disposeModule(moduleB);
assertEquals("Changes are not applied until commit", 2, moduleManager.getModules().length);
moduleListener.assertCorrectEvents(new String[][]{{"+a", "+b"}});
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
modifiableModel.commit();
}
});
}
assertEquals(0, moduleManager.getModules().length);
moduleListener.assertCorrectEvents(new String[][]{{"+a", "+b"}, {"-a", "-b"}});
connection.disconnect();
}
public void testRootsEditing() {
final MessageBusConnection connection = myProject.getMessageBus().connect();
final ModuleManager moduleManager = ModuleManager.getInstance(myProject);
final MyModuleListener moduleListener = new MyModuleListener();
connection.subscribe(ProjectTopics.MODULES, moduleListener);
final Module moduleA;
final Module moduleB;
{
final ModifiableModuleModel moduleModel = moduleManager.getModifiableModel();
moduleA = moduleModel.newModule("a.iml", StdModuleTypes.JAVA.getId());
moduleB = moduleModel.newModule("b.iml", StdModuleTypes.JAVA.getId());
final ModifiableRootModel rootModelA = ModuleRootManager.getInstance(moduleA).getModifiableModel();
final ModifiableRootModel rootModelB = ModuleRootManager.getInstance(moduleB).getModifiableModel();
rootModelB.addModuleOrderEntry(moduleA);
final ContentEntry contentEntryA = rootModelA.addContentEntry(getVirtualFileInTestData("a"));
contentEntryA.addSourceFolder(getVirtualFileInTestData("a/src"), false);
final ContentEntry contentEntryB = rootModelB.addContentEntry(getVirtualFileInTestData("b"));
contentEntryB.addSourceFolder(getVirtualFileInTestData("b/src"), false);
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
ModifiableModelCommitter.multiCommit(new ModifiableRootModel[]{rootModelB, rootModelA}, moduleModel);
}
});
}
final JavaPsiFacade psiManager = getJavaFacade();
assertNull(psiManager.findClass("j.B", GlobalSearchScope.moduleWithDependenciesScope(moduleA)));
assertNull(psiManager.findClass("q.A", GlobalSearchScope.moduleScope(moduleB)));
assertNotNull(psiManager.findClass("q.A", GlobalSearchScope.moduleScope(moduleA)));
assertNotNull(psiManager.findClass("q.A", GlobalSearchScope.moduleWithDependenciesScope(moduleB)));
assertNotNull(psiManager.findClass("j.B", GlobalSearchScope.moduleScope(moduleB)));
assertNotNull(psiManager.findClass("j.B", GlobalSearchScope.moduleWithDependenciesScope(moduleB)));
moduleManager.disposeModule(moduleB);
moduleManager.disposeModule(moduleA);
moduleListener.assertCorrectEvents(new String[][]{{"+b", "+a"}, {"-b"}, {"-a"}});
connection.disconnect();
}
public void testRenaming() throws Exception{
final ModuleManager moduleManager = ModuleManager.getInstance(myProject);
final Module moduleA;
final Module moduleB;
{
final ModifiableModuleModel moduleModel = moduleManager.getModifiableModel();
moduleA = moduleModel.newModule("a.iml", StdModuleTypes.JAVA.getId());
moduleB = moduleModel.newModule("b.iml", StdModuleTypes.JAVA.getId());
final Module moduleC = moduleModel.newModule("c.iml", StdModuleTypes.JAVA.getId());
final ModifiableRootModel rootModelB = ModuleRootManager.getInstance(moduleB).getModifiableModel();
rootModelB.addModuleOrderEntry(moduleC);
moduleModel.disposeModule(moduleC);
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
ModifiableModelCommitter.multiCommit(new ModifiableRootModel[]{rootModelB}, moduleModel);
}
});
}
final ModuleRootManager rootManagerB = ModuleRootManager.getInstance(moduleB);
assertEquals(0, rootManagerB.getDependencies().length);
final String[] dependencyModuleNames = rootManagerB.getDependencyModuleNames();
assertEquals(1, dependencyModuleNames.length);
assertEquals("c", dependencyModuleNames[0]);
{
final ModifiableModuleModel moduleModel = moduleManager.getModifiableModel();
moduleModel.renameModule(moduleA, "c");
moduleModel.commit();
}
assertEquals(1, rootManagerB.getDependencies().length);
assertEquals(moduleA, rootManagerB.getDependencies()[0]);
assertEquals("c", moduleA.getName());
moduleManager.disposeModule(moduleA);
moduleManager.disposeModule(moduleB);
}
private VirtualFile getVirtualFileInTestData(final String relativeVfsPath) {
return WriteCommandAction.runWriteCommandAction(null, new Computable<VirtualFile>() {
@Override
public VirtualFile compute() {
final String path =
TEST_PATH + File.separatorChar + getTestName(true) + File.separatorChar + relativeVfsPath.replace('/', File.separatorChar);
final VirtualFile result = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(path));
assertNotNull("File " + path + " doen\'t exist", result);
return result;
}
});
}
private static class MyModuleListener extends ModuleAdapter {
private final List<String> myLog = new ArrayList<String>();
@Override
public void moduleRemoved(@NotNull Project project, @NotNull Module module) {
myLog.add("-" + module.getName());
}
@Override
public void moduleAdded(@NotNull Project project, @NotNull Module module) {
myLog.add("+" + module.getName());
}
public void assertCorrectEvents(String[][] expected) {
int runningIndex = 0;
for (int chunkIndex = 0; chunkIndex < expected.length; chunkIndex++) {
String[] chunk = expected[chunkIndex];
final List<String> expectedChunkList = new ArrayList<String>(Arrays.asList(chunk));
int nextIndex = runningIndex + chunk.length;
assertTrue("Expected chunk " + expectedChunkList.toString(), nextIndex <= myLog.size());
final List<String> actualChunkList = new ArrayList<String>(myLog.subList(runningIndex, nextIndex));
Collections.sort(expectedChunkList);
Collections.sort(actualChunkList);
assertEquals("Chunk " + chunkIndex, expectedChunkList.toString(), actualChunkList.toString());
runningIndex = nextIndex;
}
assertEquals("More events than needed", runningIndex, myLog.size());
}
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.lwjgl3;
import java.io.File;
import java.io.PrintStream;
import java.nio.IntBuffer;
import com.badlogic.gdx.ApplicationLogger;
import com.badlogic.gdx.graphics.glutils.GLVersion;
import org.lwjgl.glfw.GLFW;
import org.lwjgl.glfw.GLFWErrorCallback;
import org.lwjgl.glfw.GLFWVidMode;
import org.lwjgl.opengl.AMDDebugOutput;
import org.lwjgl.opengl.ARBDebugOutput;
import org.lwjgl.opengl.GL;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL43;
import org.lwjgl.opengl.GLCapabilities;
import org.lwjgl.opengl.GLUtil;
import org.lwjgl.opengl.KHRDebug;
import org.lwjgl.system.Callback;
import com.badlogic.gdx.Application;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Audio;
import com.badlogic.gdx.Files;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Graphics;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.LifecycleListener;
import com.badlogic.gdx.Net;
import com.badlogic.gdx.Preferences;
import com.badlogic.gdx.backends.lwjgl3.audio.OpenALAudio;
import com.badlogic.gdx.backends.lwjgl3.audio.mock.MockAudio;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.Clipboard;
import com.badlogic.gdx.utils.GdxRuntimeException;
import com.badlogic.gdx.utils.ObjectMap;
import com.badlogic.gdx.utils.SharedLibraryLoader;
public class Lwjgl3Application implements Application {
private final Lwjgl3ApplicationConfiguration config;
private final Array<Lwjgl3Window> windows = new Array<Lwjgl3Window>();
private volatile Lwjgl3Window currentWindow;
private Audio audio;
private final Files files;
private final Net net;
private final ObjectMap<String, Preferences> preferences = new ObjectMap<String, Preferences>();
private final Lwjgl3Clipboard clipboard;
private int logLevel = LOG_INFO;
private ApplicationLogger applicationLogger;
private volatile boolean running = true;
private final Array<Runnable> runnables = new Array<Runnable>();
private final Array<Runnable> executedRunnables = new Array<Runnable>();
private final Array<LifecycleListener> lifecycleListeners = new Array<LifecycleListener>();
private static GLFWErrorCallback errorCallback;
private static GLVersion glVersion;
private static Callback glDebugCallback;
static void initializeGlfw() {
if (errorCallback == null) {
Lwjgl3NativesLoader.load();
errorCallback = GLFWErrorCallback.createPrint(System.err);
GLFW.glfwSetErrorCallback(errorCallback);
if (!GLFW.glfwInit()) {
throw new GdxRuntimeException("Unable to initialize GLFW");
}
}
}
public Lwjgl3Application(ApplicationListener listener, Lwjgl3ApplicationConfiguration config) {
initializeGlfw();
setApplicationLogger(new Lwjgl3ApplicationLogger());
this.config = Lwjgl3ApplicationConfiguration.copy(config);
if (this.config.title == null) this.config.title = listener.getClass().getSimpleName();
Gdx.app = this;
if (!config.disableAudio) {
try {
this.audio = Gdx.audio = new OpenALAudio(config.audioDeviceSimultaneousSources,
config.audioDeviceBufferCount, config.audioDeviceBufferSize);
} catch (Throwable t) {
log("Lwjgl3Application", "Couldn't initialize audio, disabling audio", t);
this.audio = Gdx.audio = new MockAudio();
}
} else {
this.audio = Gdx.audio = new MockAudio();
}
this.files = Gdx.files = new Lwjgl3Files();
this.net = Gdx.net = new Lwjgl3Net();
this.clipboard = new Lwjgl3Clipboard();
Lwjgl3Window window = createWindow(config, listener, 0);
windows.add(window);
try {
loop();
cleanupWindows();
} catch(Throwable t) {
if (t instanceof RuntimeException)
throw (RuntimeException) t;
else
throw new GdxRuntimeException(t);
} finally {
cleanup();
}
}
private void loop() {
Array<Lwjgl3Window> closedWindows = new Array<Lwjgl3Window>();
while (running && windows.size > 0) {
// FIXME put it on a separate thread
if (audio instanceof OpenALAudio) {
((OpenALAudio) audio).update();
}
boolean haveWindowsRendered = false;
closedWindows.clear();
for (Lwjgl3Window window : windows) {
window.makeCurrent();
currentWindow = window;
synchronized (lifecycleListeners) {
haveWindowsRendered |= window.update();
}
if (window.shouldClose()) {
closedWindows.add(window);
}
}
GLFW.glfwPollEvents();
boolean shouldRequestRendering;
synchronized (runnables) {
shouldRequestRendering = runnables.size > 0;
executedRunnables.clear();
executedRunnables.addAll(runnables);
runnables.clear();
}
for (Runnable runnable : executedRunnables) {
runnable.run();
}
if (shouldRequestRendering){
// Must follow Runnables execution so changes done by Runnables are reflected
// in the following render.
for (Lwjgl3Window window : windows) {
if (!window.getGraphics().isContinuousRendering())
window.requestRendering();
}
}
for (Lwjgl3Window closedWindow : closedWindows) {
if (windows.size == 1) {
// Lifecycle listener methods have to be called before ApplicationListener methods. The
// application will be disposed when _all_ windows have been disposed, which is the case,
// when there is only 1 window left, which is in the process of being disposed.
for (int i = lifecycleListeners.size - 1; i >= 0; i--) {
LifecycleListener l = lifecycleListeners.get(i);
l.pause();
l.dispose();
}
lifecycleListeners.clear();
}
closedWindow.dispose();
windows.removeValue(closedWindow, false);
}
if (!haveWindowsRendered) {
// Sleep a few milliseconds in case no rendering was requested
// with continuous rendering disabled.
try {
Thread.sleep(1000 / config.idleFPS);
} catch (InterruptedException e) {
// ignore
}
}
}
}
private void cleanupWindows() {
synchronized (lifecycleListeners) {
for(LifecycleListener lifecycleListener : lifecycleListeners){
lifecycleListener.pause();
lifecycleListener.dispose();
}
}
for (Lwjgl3Window window : windows) {
window.dispose();
}
windows.clear();
}
private void cleanup() {
Lwjgl3Cursor.disposeSystemCursors();
if (audio instanceof OpenALAudio) {
((OpenALAudio) audio).dispose();
}
errorCallback.free();
if (glDebugCallback != null) {
glDebugCallback.free();
}
GLFW.glfwTerminate();
}
@Override
public ApplicationListener getApplicationListener() {
return currentWindow.getListener();
}
@Override
public Graphics getGraphics() {
return currentWindow.getGraphics();
}
@Override
public Audio getAudio() {
return audio;
}
@Override
public Input getInput() {
return currentWindow.getInput();
}
@Override
public Files getFiles() {
return files;
}
@Override
public Net getNet() {
return net;
}
@Override
public void debug (String tag, String message) {
if (logLevel >= LOG_DEBUG) getApplicationLogger().debug(tag, message);
}
@Override
public void debug (String tag, String message, Throwable exception) {
if (logLevel >= LOG_DEBUG) getApplicationLogger().debug(tag, message, exception);
}
@Override
public void log (String tag, String message) {
if (logLevel >= LOG_INFO) getApplicationLogger().log(tag, message);
}
@Override
public void log (String tag, String message, Throwable exception) {
if (logLevel >= LOG_INFO) getApplicationLogger().log(tag, message, exception);
}
@Override
public void error (String tag, String message) {
if (logLevel >= LOG_ERROR) getApplicationLogger().error(tag, message);
}
@Override
public void error (String tag, String message, Throwable exception) {
if (logLevel >= LOG_ERROR) getApplicationLogger().error(tag, message, exception);
}
@Override
public void setLogLevel(int logLevel) {
this.logLevel = logLevel;
}
@Override
public int getLogLevel() {
return logLevel;
}
@Override
public void setApplicationLogger (ApplicationLogger applicationLogger) {
this.applicationLogger = applicationLogger;
}
@Override
public ApplicationLogger getApplicationLogger () {
return applicationLogger;
}
@Override
public ApplicationType getType() {
return ApplicationType.Desktop;
}
@Override
public int getVersion() {
return 0;
}
@Override
public long getJavaHeap() {
return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
}
@Override
public long getNativeHeap() {
return getJavaHeap();
}
@Override
public Preferences getPreferences(String name) {
if (preferences.containsKey(name)) {
return preferences.get(name);
} else {
Preferences prefs = new Lwjgl3Preferences(
new Lwjgl3FileHandle(new File(config.preferencesDirectory, name), config.preferencesFileType));
preferences.put(name, prefs);
return prefs;
}
}
@Override
public Clipboard getClipboard() {
return clipboard;
}
@Override
public void postRunnable(Runnable runnable) {
synchronized (runnables) {
runnables.add(runnable);
}
}
@Override
public void exit() {
running = false;
}
@Override
public void addLifecycleListener(LifecycleListener listener) {
synchronized (lifecycleListeners) {
lifecycleListeners.add(listener);
}
}
@Override
public void removeLifecycleListener(LifecycleListener listener) {
synchronized (lifecycleListeners) {
lifecycleListeners.removeValue(listener, true);
}
}
/**
* Creates a new {@link Lwjgl3Window} using the provided listener and {@link Lwjgl3WindowConfiguration}.
*
* This function only just instantiates a {@link Lwjgl3Window} and returns immediately. The actual window creation
* is postponed with {@link Application#postRunnable(Runnable)} until after all existing windows are updated.
*/
public Lwjgl3Window newWindow(ApplicationListener listener, Lwjgl3WindowConfiguration config) {
Lwjgl3ApplicationConfiguration appConfig = Lwjgl3ApplicationConfiguration.copy(this.config);
appConfig.setWindowConfiguration(config);
return createWindow(appConfig, listener, windows.get(0).getWindowHandle());
}
private Lwjgl3Window createWindow(Lwjgl3ApplicationConfiguration config, ApplicationListener listener, long sharedContext) {
Lwjgl3Window window = new Lwjgl3Window(listener, config);
if (sharedContext == 0) {
// the main window is created immediately
createWindow(window, config, sharedContext);
} else {
// creation of additional windows is deferred to avoid GL context trouble
postRunnable(() -> {
createWindow(window, config, sharedContext);
windows.add(window);
});
}
return window;
}
private void createWindow(Lwjgl3Window window, Lwjgl3ApplicationConfiguration config, long sharedContext) {
long windowHandle = createGlfwWindow(config, sharedContext);
window.create(windowHandle);
window.setVisible(config.initialVisible);
for (int i = 0; i < 2; i++) {
GL11.glClearColor(config.initialBackgroundColor.r, config.initialBackgroundColor.g, config.initialBackgroundColor.b,
config.initialBackgroundColor.a);
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT);
GLFW.glfwSwapBuffers(windowHandle);
}
}
static long createGlfwWindow(Lwjgl3ApplicationConfiguration config, long sharedContextWindow) {
GLFW.glfwDefaultWindowHints();
GLFW.glfwWindowHint(GLFW.GLFW_VISIBLE, GLFW.GLFW_FALSE);
GLFW.glfwWindowHint(GLFW.GLFW_RESIZABLE, config.windowResizable ? GLFW.GLFW_TRUE : GLFW.GLFW_FALSE);
GLFW.glfwWindowHint(GLFW.GLFW_MAXIMIZED, config.windowMaximized ? GLFW.GLFW_TRUE : GLFW.GLFW_FALSE);
if(sharedContextWindow == 0) {
GLFW.glfwWindowHint(GLFW.GLFW_RED_BITS, config.r);
GLFW.glfwWindowHint(GLFW.GLFW_GREEN_BITS, config.g);
GLFW.glfwWindowHint(GLFW.GLFW_BLUE_BITS, config.b);
GLFW.glfwWindowHint(GLFW.GLFW_ALPHA_BITS, config.a);
GLFW.glfwWindowHint(GLFW.GLFW_STENCIL_BITS, config.stencil);
GLFW.glfwWindowHint(GLFW.GLFW_DEPTH_BITS, config.depth);
GLFW.glfwWindowHint(GLFW.GLFW_SAMPLES, config.samples);
}
if (config.useGL30) {
GLFW.glfwWindowHint(GLFW.GLFW_CONTEXT_VERSION_MAJOR, config.gles30ContextMajorVersion);
GLFW.glfwWindowHint(GLFW.GLFW_CONTEXT_VERSION_MINOR, config.gles30ContextMinorVersion);
if (SharedLibraryLoader.isMac) {
// hints mandatory on OS X for GL 3.2+ context creation, but fail on Windows if the
// WGL_ARB_create_context extension is not available
// see: http://www.glfw.org/docs/latest/compat.html
GLFW.glfwWindowHint(GLFW.GLFW_OPENGL_FORWARD_COMPAT, GLFW.GLFW_TRUE);
GLFW.glfwWindowHint(GLFW.GLFW_OPENGL_PROFILE, GLFW.GLFW_OPENGL_CORE_PROFILE);
}
}
if (config.debug) {
GLFW.glfwWindowHint(GLFW.GLFW_OPENGL_DEBUG_CONTEXT, GLFW.GLFW_TRUE);
}
long windowHandle = 0;
if(config.fullscreenMode != null) {
// glfwWindowHint(GLFW.GLFW_REFRESH_RATE, config.fullscreenMode.refreshRate);
windowHandle = GLFW.glfwCreateWindow(config.fullscreenMode.width, config.fullscreenMode.height, config.title, config.fullscreenMode.getMonitor(), sharedContextWindow);
} else {
GLFW.glfwWindowHint(GLFW.GLFW_DECORATED, config.windowDecorated? GLFW.GLFW_TRUE: GLFW.GLFW_FALSE);
windowHandle = GLFW.glfwCreateWindow(config.windowWidth, config.windowHeight, config.title, 0, sharedContextWindow);
}
if (windowHandle == 0) {
throw new GdxRuntimeException("Couldn't create window");
}
Lwjgl3Window.setSizeLimits(windowHandle, config.windowMinWidth, config.windowMinHeight, config.windowMaxWidth, config.windowMaxHeight);
if (config.fullscreenMode == null && !config.windowMaximized) {
if (config.windowX == -1 && config.windowY == -1) {
int windowWidth = Math.max(config.windowWidth, config.windowMinWidth);
int windowHeight = Math.max(config.windowHeight, config.windowMinHeight);
if (config.windowMaxWidth > -1) windowWidth = Math.min(windowWidth, config.windowMaxWidth);
if (config.windowMaxHeight > -1) windowHeight = Math.min(windowHeight, config.windowMaxHeight);
GLFWVidMode vidMode = GLFW.glfwGetVideoMode(GLFW.glfwGetPrimaryMonitor());
GLFW.glfwSetWindowPos(windowHandle, vidMode.width() / 2 - windowWidth / 2, vidMode.height() / 2 - windowHeight / 2);
} else {
GLFW.glfwSetWindowPos(windowHandle, config.windowX, config.windowY);
}
}
if (config.windowIconPaths != null) {
Lwjgl3Window.setIcon(windowHandle, config.windowIconPaths, config.windowIconFileType);
}
GLFW.glfwMakeContextCurrent(windowHandle);
GLFW.glfwSwapInterval(config.vSyncEnabled ? 1 : 0);
GL.createCapabilities();
initiateGL();
if (!glVersion.isVersionEqualToOrHigher(2, 0))
throw new GdxRuntimeException("OpenGL 2.0 or higher with the FBO extension is required. OpenGL version: "
+ GL11.glGetString(GL11.GL_VERSION) + "\n" + glVersion.getDebugVersionString());
if (!supportsFBO()) {
throw new GdxRuntimeException("OpenGL 2.0 or higher with the FBO extension is required. OpenGL version: "
+ GL11.glGetString(GL11.GL_VERSION) + ", FBO extension: false\n" + glVersion.getDebugVersionString());
}
if (config.debug) {
glDebugCallback = GLUtil.setupDebugMessageCallback(config.debugStream);
setGLDebugMessageControl(GLDebugMessageSeverity.NOTIFICATION, false);
}
return windowHandle;
}
private static void initiateGL () {
String versionString = GL11.glGetString(GL11.GL_VERSION);
String vendorString = GL11.glGetString(GL11.GL_VENDOR);
String rendererString = GL11.glGetString(GL11.GL_RENDERER);
glVersion = new GLVersion(Application.ApplicationType.Desktop, versionString, vendorString, rendererString);
}
private static boolean supportsFBO () {
// FBO is in core since OpenGL 3.0, see https://www.opengl.org/wiki/Framebuffer_Object
return glVersion.isVersionEqualToOrHigher(3, 0) || GLFW.glfwExtensionSupported("GL_EXT_framebuffer_object")
|| GLFW.glfwExtensionSupported("GL_ARB_framebuffer_object");
}
public enum GLDebugMessageSeverity {
HIGH(
GL43.GL_DEBUG_SEVERITY_HIGH,
KHRDebug.GL_DEBUG_SEVERITY_HIGH,
ARBDebugOutput.GL_DEBUG_SEVERITY_HIGH_ARB,
AMDDebugOutput.GL_DEBUG_SEVERITY_HIGH_AMD),
MEDIUM(
GL43.GL_DEBUG_SEVERITY_MEDIUM,
KHRDebug.GL_DEBUG_SEVERITY_MEDIUM,
ARBDebugOutput.GL_DEBUG_SEVERITY_MEDIUM_ARB,
AMDDebugOutput.GL_DEBUG_SEVERITY_MEDIUM_AMD),
LOW(
GL43.GL_DEBUG_SEVERITY_LOW,
KHRDebug.GL_DEBUG_SEVERITY_LOW,
ARBDebugOutput.GL_DEBUG_SEVERITY_LOW_ARB,
AMDDebugOutput.GL_DEBUG_SEVERITY_LOW_AMD),
NOTIFICATION(
GL43.GL_DEBUG_SEVERITY_NOTIFICATION,
KHRDebug.GL_DEBUG_SEVERITY_NOTIFICATION,
-1,
-1);
final int gl43, khr, arb, amd;
GLDebugMessageSeverity(int gl43, int khr, int arb, int amd) {
this.gl43 = gl43;
this.khr = khr;
this.arb = arb;
this.amd = amd;
}
}
/**
* Enables or disables GL debug messages for the specified severity level. Returns false if the severity
* level could not be set (e.g. the NOTIFICATION level is not supported by the ARB and AMD extensions).
*
* See {@link Lwjgl3ApplicationConfiguration#enableGLDebugOutput(boolean, PrintStream)}
*/
public static boolean setGLDebugMessageControl (GLDebugMessageSeverity severity, boolean enabled) {
GLCapabilities caps = GL.getCapabilities();
final int GL_DONT_CARE = 0x1100; // not defined anywhere yet
if (caps.OpenGL43) {
GL43.glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, severity.gl43, (IntBuffer) null, enabled);
return true;
}
if (caps.GL_KHR_debug) {
KHRDebug.glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, severity.khr, (IntBuffer) null, enabled);
return true;
}
if (caps.GL_ARB_debug_output && severity.arb != -1) {
ARBDebugOutput.glDebugMessageControlARB(GL_DONT_CARE, GL_DONT_CARE, severity.arb, (IntBuffer) null, enabled);
return true;
}
if (caps.GL_AMD_debug_output && severity.amd != -1) {
AMDDebugOutput.glDebugMessageEnableAMD(GL_DONT_CARE, severity.amd, (IntBuffer) null, enabled);
return true;
}
return false;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.hadoop.integration.hive;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.elasticsearch.hadoop.QueryTestParams;
import org.elasticsearch.hadoop.cfg.ConfigurationOptions;
import org.elasticsearch.hadoop.mr.RestUtils;
import org.elasticsearch.hadoop.util.StringUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import static org.junit.Assert.*;
import static org.elasticsearch.hadoop.integration.hive.HiveSuite.*;
@RunWith(Parameterized.class)
public class AbstractHiveSearchJsonTest {
private static int testInstance = 0;
private boolean readMetadata;
@Parameters
public static Collection<Object[]> queries() {
return QueryTestParams.params();
}
private String query;
public AbstractHiveSearchJsonTest(String query, boolean readMetadata) {
this.query = query;
this.readMetadata = readMetadata;
}
@Before
public void before() throws Exception {
provisionEsLib();
RestUtils.refresh("json-hive");
}
@After
public void after() throws Exception {
testInstance++;
HiveSuite.after();
}
@Test
public void loadMultiNestedField() throws Exception {
Assume.assumeTrue(testInstance == 0);
String data = "{ \"data\" : { \"map\" : { \"key\" : [ 10 20 ] } } }";
RestUtils.putData("json-hive/nestedmap", StringUtils.toUTF(data));
data = "{ \"data\" : { \"different\" : \"structure\" } } }";
RestUtils.putData("json-hive/nestedmap", StringUtils.toUTF(data));
RestUtils.refresh("json-hive");
String createList = "CREATE EXTERNAL TABLE jsonnestedmaplistload" + testInstance + "("
+ "nested ARRAY<INT>) "
+ tableProps("json-hive/nestedmap", "'es.mapping.names' = 'nested:data.map.key'");
String selectList = "SELECT * FROM jsonnestedmaplistload" + testInstance;
String createMap = "CREATE EXTERNAL TABLE jsonnestedmapmapload" + testInstance + "("
+ "nested MAP<STRING,ARRAY<INT>>) "
+ tableProps("json-hive/nestedmap", "'es.mapping.names' = 'nested:data.map'");
String selectMap = "SELECT * FROM jsonnestedmapmapload" + testInstance;
server.execute(createList);
List<String> result = server.execute(selectList);
assertTrue("Hive returned null", containsNoNull(result));
assertContains(result, "10");
assertContains(result, "20");
server.execute(createMap);
result = server.execute(selectMap);
assertTrue("Hive returned null", containsNoNull(result));
assertContains(result, "key");
assertContains(result, "10");
assertContains(result, "20");
}
@Test
public void loadSingleNestedField() throws Exception {
Assume.assumeTrue(testInstance == 0);
String data = "{ \"data\" : { \"single\" : { \"key\" : [ 10 ] } } }";
RestUtils.putData("json-hive/nestedmap", StringUtils.toUTF(data));
RestUtils.refresh("json-hive");
String createList = "CREATE EXTERNAL TABLE jsonnestedsinglemaplistload" + testInstance + "("
+ "nested ARRAY<INT>) "
+ tableProps("json-hive/nestedmap", "'es.mapping.names' = 'nested:data.single.key'");
String selectList = "SELECT * FROM jsonnestedsinglemaplistload" + testInstance;
String createMap = "CREATE EXTERNAL TABLE jsonnestedsinglemapmapload" + testInstance + "("
+ "nested MAP<STRING,ARRAY<INT>>) "
+ tableProps("json-hive/nestedmap", "'es.mapping.names' = 'nested:data.single'");
String selectMap = "SELECT * FROM jsonnestedsinglemapmapload" + testInstance;
server.execute(createList);
List<String> result = server.execute(selectList);
assertTrue("Hive returned null", containsNoNull(result));
assertContains(result, "10");
server.execute(createMap);
result = server.execute(selectMap);
assertTrue("Hive returned null", containsNoNull(result));
assertContains(result, "key");
assertContains(result, "10");
}
@Test
public void basicLoad() throws Exception {
String create = "CREATE EXTERNAL TABLE jsonartistsload" + testInstance + "("
+ "number STRING, "
+ "name STRING, "
+ "url STRING, "
+ "picture STRING) "
+ tableProps("json-hive/artists");
String select = "SELECT * FROM jsonartistsload" + testInstance;
server.execute(create);
List<String> result = server.execute(select);
assertTrue("Hive returned null", containsNoNull(result));
assertContains(result, "Marilyn");
assertContains(result, "last.fm/music/MALICE");
assertContains(result, "last.fm/serve/252/5872875.jpg");
}
//@Test
public void basicCountOperator() throws Exception {
String create = "CREATE EXTERNAL TABLE jsonartistscount" + testInstance + " ("
+ "number STRING, "
+ "name STRING, "
+ "url STRING, "
+ "picture STRING) "
+ tableProps("json-hive/artists");
String select = "SELECT count(*) FROM jsonartistscount" + testInstance;
server.execute(create);
List<String> result = server.execute(select);
assertTrue("Hive returned null", containsNoNull(result));
assertEquals(1, result.size());
assertTrue(Integer.valueOf(result.get(0)) > 1);
}
@Test
public void testMissingIndex() throws Exception {
String create = "CREATE EXTERNAL TABLE jsonmissing" + testInstance + " ("
+ "daTE TIMESTAMP, "
+ "Name STRING, "
+ "links STRUCT<uRl:STRING, pICture:STRING>) "
+ tableProps("foobar/missing", "'es.index.read.missing.as.empty' = 'true'");
String select = "SELECT * FROM jsonmissing" + testInstance;
server.execute(create);
List<String> result = server.execute(select);
assertEquals(0, result.size());
}
@Test
public void testVarcharLoad() throws Exception {
String create = "CREATE EXTERNAL TABLE jsonvarcharload" + testInstance + " ("
+ "number STRING, "
+ "name STRING, "
+ "url STRING, "
+ "picture STRING) "
+ tableProps("json-hive/varcharsave");
String select = "SELECT * FROM jsonvarcharload" + testInstance;
System.out.println(server.execute(create));
List<String> result = server.execute(select);
assertTrue("Hive returned null", containsNoNull(result));
assertTrue(result.size() > 1);
assertContains(result, "Marilyn");
assertContains(result, "last.fm/music/MALICE");
assertContains(result, "last.fm/serve/252/2181591.jpg");
}
@Test
public void testParentChild() throws Exception {
String create = "CREATE EXTERNAL TABLE jsonchildload" + testInstance + " ("
+ "number STRING, "
+ "name STRING, "
+ "url STRING, "
+ "picture STRING) "
+ tableProps("json-hive/child", "'es.index.read.missing.as.empty' = 'true'");
String select = "SELECT * FROM jsonchildload" + testInstance;
System.out.println(server.execute(create));
List<String> result = server.execute(select);
assertTrue("Hive returned null", containsNoNull(result));
assertTrue(result.size() > 1);
assertContains(result, "Marilyn");
assertContains(result, "last.fm/music/MALICE");
assertContains(result, "last.fm/serve/252/2181591.jpg");
}
@Test
public void testDynamicPattern() throws Exception {
Assert.assertTrue(RestUtils.exists("json-hive/pattern-7"));
Assert.assertTrue(RestUtils.exists("json-hive/pattern-10"));
Assert.assertTrue(RestUtils.exists("json-hive/pattern-3"));
}
@Test
public void testDynamicPatternFormat() throws Exception {
Assert.assertTrue(RestUtils.exists("json-hive/pattern-format-2007-10-06"));
Assert.assertTrue(RestUtils.exists("json-hive/pattern-format-2001-10-06"));
Assert.assertTrue(RestUtils.exists("json-hive/pattern-format-2000-10-06"));
}
private static boolean containsNoNull(List<String> str) {
for (String string : str) {
if (string.contains("NULL")) {
return false;
}
}
return true;
}
private static void assertContains(List<String> str, String content) {
for (String string : str) {
if (string.contains(content)) {
return;
}
}
fail(String.format("'%s' not found in %s", content, str));
}
private String tableProps(String resource, String... params) {
List<String> copy = new ArrayList(Arrays.asList(params));
copy.add("'" + ConfigurationOptions.ES_READ_METADATA + "'='" + readMetadata + "'");
return HiveSuite.tableProps(resource, query, copy.toArray(new String[copy.size()]));
}
}
| |
/**
* Copyright 2011-2017 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.runtime.core;
import java.io.IOException;
import java.text.MessageFormat;
import com.asakusafw.runtime.core.api.ApiStub;
import com.asakusafw.runtime.core.api.ReportApi;
import com.asakusafw.runtime.core.legacy.LegacyReport;
import com.asakusafw.runtime.core.legacy.RuntimeResource;
/**
* Report API entry class.
* The Report API enables to notify some messages in operator methods, to the runtime reporting system
* (e.g. logger, standard output, or etc.).
* Generally, the Report API does not have any effect on the batch execution, for example, the batch execution will
* continue even if {@link Report#error(String)} is invoked.
* Clients should put <code>@Sticky</code> annotation for operator methods using this API, otherwise the Asakusa
* DSL compiler optimization may remove the target operator.
<pre><code>
@Sticky
@Update
public void updateWithReport(Hoge hoge) {
if (hoge.getValue() < 0) {
Report.error("invalid value");
} else {
hoge.setValue(0);
}
}
</code></pre>
* @since 0.1.0
* @version 0.9.0
*/
public final class Report {
/**
* The Hadoop property name of the custom implementation class name of {@link Report.Delegate}.
* To use a default implementation, clients should set {@code com.asakusafw.runtime.core.Report$Default} to it.
*/
public static final String K_DELEGATE_CLASS = "com.asakusafw.runtime.core.Report.Delegate"; //$NON-NLS-1$
private static final ApiStub<ReportApi> STUB = new ApiStub<>(LegacyReport.API);
private Report() {
return;
}
/**
* Reports an <em>informative</em> message.
* Clients should put <code>@Sticky</code> annotation to the operator method that using this.
* @param message the message
* @throws Report.FailedException if error was occurred while reporting tReportessage
* @see Report
*/
public static void info(String message) {
STUB.get().info(message);
}
/**
* Reports an <em>informative</em> message.
* Clients should put <code>@Sticky</code> annotation to the operator method that using this.
* @param message the message
* @param throwable the optional exception object (nullable)
* @throws Report.FailedException if error was occurred while reporting the message
* @see Report
* @since 0.5.1
*/
public static void info(String message, Throwable throwable) {
STUB.get().info(message, throwable);
}
/**
* Reports a <em>warning</em> message.
* Clients should put <code>@Sticky</code> annotation to the operator method that using this.
* @param message the message
* @throws Report.FailedException if error was occurred while reporting the message
* @see Report
*/
public static void warn(String message) {
STUB.get().warn(message);
}
/**
* Reports a <em>warning</em> message.
* Clients should put <code>@Sticky</code> annotation to the operator method that using this.
* @param message the message
* @param throwable the optional exception object (nullable)
* @throws Report.FailedException if error was occurred while reporting the message
* @see Report
* @since 0.5.1
*/
public static void warn(String message, Throwable throwable) {
STUB.get().warn(message, throwable);
}
/**
* Reports an <em>error</em> message.
* Clients should put <code>@Sticky</code> annotation to the operator method that using this.
* Please be careful that this method will <em>NOT</em> shutdown the running batch.
* To shutdown the batch, throw an exception ({@link RuntimeException}) in operator methods.
* @param message the message
* @throws Report.FailedException if error was occurred while reporting the message
* @see Report
*/
public static void error(String message) {
STUB.get().error(message);
}
/**
* Reports an <em>error</em> message.
* Clients should put <code>@Sticky</code> annotation to the operator method that using this.
* Please be careful that this method will <em>NOT</em> shutdown the running batch.
* To shutdown the batch, throw an exception ({@link RuntimeException}) in operator methods.
* @param message the message
* @param throwable the optional exception object (nullable)
* @throws Report.FailedException if error was occurred while reporting the message
* @see Report
* @since 0.5.1
*/
public static void error(String message, Throwable throwable) {
STUB.get().error(message, throwable);
}
/**
* Returns the API stub.
* Application developer must not use this directly.
* @return the API stub
* @since 0.9.0
*/
public static ApiStub<ReportApi> getStub() {
return STUB;
}
/**
* {@link FailedException} is thrown when an exception was occurred while processing messages in {@link Report}.
*/
public static class FailedException extends RuntimeException {
private static final long serialVersionUID = 1L;
/**
* Creates a new instance.
*/
public FailedException() {
super();
}
/**
* Creates a new instance.
* @param message the exception message (nullable)
* @param cause the original cause (nullable)
*/
public FailedException(String message, Throwable cause) {
super(message, cause);
}
/**
* Creates a new instance.
* @param message the exception message (nullable)
*/
public FailedException(String message) {
super(message);
}
/**
* Creates a new instance.
* @param cause the original cause (nullable)
*/
public FailedException(Throwable cause) {
super(cause);
}
}
/**
* An abstract super class of delegation objects for {@link Report}.
* Application developers can inherit this class, and set the fully qualified name to the property
* {@link Report#K_DELEGATE_CLASS} to use the custom implementation for the Report API.
* @since 0.1.0
* @version 0.7.4
*/
public abstract static class Delegate implements RuntimeResource {
/**
* Notifies a report.
* @param level report level
* @param message report message
* @throws IOException if failed to notify this report by I/O error
*/
public abstract void report(Level level, String message) throws IOException;
/**
* Notifies a report.
* @param level report level
* @param message report message
* @param throwable optional exception info (nullable)
* @throws IOException if failed to notify this report by I/O error
* @since 0.5.1
*/
public void report(Level level, String message, Throwable throwable) throws IOException {
report(level, message);
}
}
/**
* Represents levels of reporting.
*/
public enum Level {
/**
* Informative level.
*/
INFO,
/**
* Warning level.
*/
WARN,
/**
* Erroneous level.
*/
ERROR,
}
/**
* A basic implementation of {@link Delegate}.
* @since 0.1.0
* @version 0.5.1
*/
public static class Default extends Delegate {
@Override
public void report(Level level, String message) {
switch (level) {
case INFO:
System.out.println(message);
break;
case WARN:
System.err.println(message);
new Exception("Warning").printStackTrace();
break;
case ERROR:
System.err.println(message);
new Exception("Error").printStackTrace();
break;
default:
throw new AssertionError(MessageFormat.format(
"[{0}] {1}", //$NON-NLS-1$
level,
message));
}
}
@Override
public void report(Level level, String message, Throwable throwable) {
switch (level) {
case INFO:
System.out.println(message);
if (throwable != null) {
throwable.printStackTrace(System.out);
}
break;
case WARN:
case ERROR:
System.err.println(message);
if (throwable != null) {
throwable.printStackTrace(System.err);
}
break;
default:
throw new AssertionError(MessageFormat.format(
"[{0}] {1}", //$NON-NLS-1$
level,
message));
}
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.search.MultiSearchAction;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.tasks.TransportTasksAction;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.action.util.QueryPage;
import org.elasticsearch.xpack.core.ml.MlStatsIndex;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsAction;
import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsStatsAction;
import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsStatsAction.Response.Stats;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsState;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsTaskState;
import org.elasticsearch.xpack.core.ml.dataframe.stats.AnalysisStats;
import org.elasticsearch.xpack.core.ml.dataframe.stats.Fields;
import org.elasticsearch.xpack.core.ml.dataframe.stats.classification.ClassificationStats;
import org.elasticsearch.xpack.core.ml.dataframe.stats.common.DataCounts;
import org.elasticsearch.xpack.core.ml.dataframe.stats.common.MemoryUsage;
import org.elasticsearch.xpack.core.ml.dataframe.stats.outlierdetection.OutlierDetectionStats;
import org.elasticsearch.xpack.core.ml.dataframe.stats.regression.RegressionStats;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.PhaseProgress;
import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsTask;
import org.elasticsearch.xpack.ml.dataframe.StoredProgress;
import org.elasticsearch.xpack.ml.dataframe.stats.ProgressTracker;
import org.elasticsearch.xpack.ml.utils.persistence.MlParserUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
public class TransportGetDataFrameAnalyticsStatsAction
extends TransportTasksAction<DataFrameAnalyticsTask, GetDataFrameAnalyticsStatsAction.Request,
GetDataFrameAnalyticsStatsAction.Response, QueryPage<Stats>> {
private static final Logger logger = LogManager.getLogger(TransportGetDataFrameAnalyticsStatsAction.class);
private final Client client;
@Inject
public TransportGetDataFrameAnalyticsStatsAction(TransportService transportService, ClusterService clusterService, Client client,
ActionFilters actionFilters) {
super(GetDataFrameAnalyticsStatsAction.NAME, clusterService, transportService, actionFilters,
GetDataFrameAnalyticsStatsAction.Request::new, GetDataFrameAnalyticsStatsAction.Response::new,
in -> new QueryPage<>(in, GetDataFrameAnalyticsStatsAction.Response.Stats::new), ThreadPool.Names.MANAGEMENT);
this.client = client;
}
@Override
protected GetDataFrameAnalyticsStatsAction.Response newResponse(GetDataFrameAnalyticsStatsAction.Request request,
List<QueryPage<Stats>> tasks,
List<TaskOperationFailure> taskFailures,
List<FailedNodeException> nodeFailures) {
List<Stats> stats = new ArrayList<>();
for (QueryPage<Stats> task : tasks) {
stats.addAll(task.results());
}
Collections.sort(stats, Comparator.comparing(Stats::getId));
return new GetDataFrameAnalyticsStatsAction.Response(taskFailures, nodeFailures, new QueryPage<>(stats, stats.size(),
GetDataFrameAnalyticsAction.Response.RESULTS_FIELD));
}
@Override
protected void taskOperation(GetDataFrameAnalyticsStatsAction.Request request, DataFrameAnalyticsTask task,
ActionListener<QueryPage<Stats>> listener) {
logger.debug("Get stats for running task [{}]", task.getParams().getId());
ActionListener<Void> reindexingProgressListener = ActionListener.wrap(
aVoid -> {
Stats stats = buildStats(
task.getParams().getId(),
task.getStatsHolder().getProgressTracker().report(),
task.getStatsHolder().getDataCountsTracker().report(task.getParams().getId()),
task.getStatsHolder().getMemoryUsage(),
task.getStatsHolder().getAnalysisStats()
);
listener.onResponse(new QueryPage<>(Collections.singletonList(stats), 1,
GetDataFrameAnalyticsAction.Response.RESULTS_FIELD));
}, listener::onFailure
);
task.updateReindexTaskProgress(reindexingProgressListener);
}
@Override
protected void doExecute(Task task, GetDataFrameAnalyticsStatsAction.Request request,
ActionListener<GetDataFrameAnalyticsStatsAction.Response> listener) {
logger.debug("Get stats for data frame analytics [{}]", request.getId());
ActionListener<GetDataFrameAnalyticsAction.Response> getResponseListener = ActionListener.wrap(
getResponse -> {
List<String> expandedIds = getResponse.getResources().results().stream().map(DataFrameAnalyticsConfig::getId)
.collect(Collectors.toList());
request.setExpandedIds(expandedIds);
ActionListener<GetDataFrameAnalyticsStatsAction.Response> runningTasksStatsListener = ActionListener.wrap(
runningTasksStatsResponse -> gatherStatsForStoppedTasks(getResponse.getResources().results(), runningTasksStatsResponse,
ActionListener.wrap(
finalResponse -> {
// While finalResponse has all the stats objects we need, we should report the count
// from the get response
QueryPage<Stats> finalStats = new QueryPage<>(finalResponse.getResponse().results(),
getResponse.getResources().count(), GetDataFrameAnalyticsAction.Response.RESULTS_FIELD);
listener.onResponse(new GetDataFrameAnalyticsStatsAction.Response(finalStats));
},
listener::onFailure)),
listener::onFailure
);
super.doExecute(task, request, runningTasksStatsListener);
},
listener::onFailure
);
GetDataFrameAnalyticsAction.Request getRequest = new GetDataFrameAnalyticsAction.Request();
getRequest.setResourceId(request.getId());
getRequest.setAllowNoResources(request.isAllowNoMatch());
getRequest.setPageParams(request.getPageParams());
executeAsyncWithOrigin(client, ML_ORIGIN, GetDataFrameAnalyticsAction.INSTANCE, getRequest, getResponseListener);
}
void gatherStatsForStoppedTasks(List<DataFrameAnalyticsConfig> configs, GetDataFrameAnalyticsStatsAction.Response runningTasksResponse,
ActionListener<GetDataFrameAnalyticsStatsAction.Response> listener) {
List<DataFrameAnalyticsConfig> stoppedConfigs = determineStoppedConfigs(configs, runningTasksResponse.getResponse().results());
if (stoppedConfigs.isEmpty()) {
listener.onResponse(runningTasksResponse);
return;
}
AtomicInteger counter = new AtomicInteger(stoppedConfigs.size());
AtomicArray<Stats> jobStats = new AtomicArray<>(stoppedConfigs.size());
for (int i = 0; i < stoppedConfigs.size(); i++) {
final int slot = i;
DataFrameAnalyticsConfig config = stoppedConfigs.get(i);
searchStats(config, ActionListener.wrap(
stats -> {
jobStats.set(slot, stats);
if (counter.decrementAndGet() == 0) {
List<Stats> allTasksStats = new ArrayList<>(runningTasksResponse.getResponse().results());
allTasksStats.addAll(jobStats.asList());
Collections.sort(allTasksStats, Comparator.comparing(Stats::getId));
listener.onResponse(new GetDataFrameAnalyticsStatsAction.Response(new QueryPage<>(
allTasksStats, allTasksStats.size(), GetDataFrameAnalyticsAction.Response.RESULTS_FIELD)));
}
},
listener::onFailure)
);
}
}
static List<DataFrameAnalyticsConfig> determineStoppedConfigs(List<DataFrameAnalyticsConfig> configs, List<Stats> runningTasksStats) {
Set<String> startedTasksIds = runningTasksStats.stream().map(Stats::getId).collect(Collectors.toSet());
return configs.stream().filter(config -> startedTasksIds.contains(config.getId()) == false).collect(Collectors.toList());
}
private void searchStats(DataFrameAnalyticsConfig config, ActionListener<Stats> listener) {
logger.debug("[{}] Gathering stats for stopped task", config.getId());
RetrievedStatsHolder retrievedStatsHolder = new RetrievedStatsHolder(
ProgressTracker.fromZeroes(config.getAnalysis().getProgressPhases()).report());
MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
multiSearchRequest.add(buildStoredProgressSearch(config.getId()));
multiSearchRequest.add(buildStatsDocSearch(config.getId(), DataCounts.TYPE_VALUE));
multiSearchRequest.add(buildStatsDocSearch(config.getId(), MemoryUsage.TYPE_VALUE));
multiSearchRequest.add(buildStatsDocSearch(config.getId(), OutlierDetectionStats.TYPE_VALUE));
multiSearchRequest.add(buildStatsDocSearch(config.getId(), ClassificationStats.TYPE_VALUE));
multiSearchRequest.add(buildStatsDocSearch(config.getId(), RegressionStats.TYPE_VALUE));
executeAsyncWithOrigin(client, ML_ORIGIN, MultiSearchAction.INSTANCE, multiSearchRequest, ActionListener.wrap(
multiSearchResponse -> {
MultiSearchResponse.Item[] itemResponses = multiSearchResponse.getResponses();
for (int i = 0; i < itemResponses.length; ++i) {
MultiSearchResponse.Item itemResponse = itemResponses[i];
if (itemResponse.isFailure()) {
SearchRequest itemRequest = multiSearchRequest.requests().get(i);
logger.error(
new ParameterizedMessage(
"[{}] Item failure encountered during multi search for request [indices={}, source={}]: {}",
config.getId(), itemRequest.indices(), itemRequest.source(), itemResponse.getFailureMessage()),
itemResponse.getFailure());
listener.onFailure(ExceptionsHelper.serverError(itemResponse.getFailureMessage(), itemResponse.getFailure()));
return;
} else {
SearchHit[] hits = itemResponse.getResponse().getHits().getHits();
if (hits.length == 0) {
// Not found
} else if (hits.length == 1) {
parseHit(hits[0], config.getId(), retrievedStatsHolder);
} else {
throw ExceptionsHelper.serverError("Found [" + hits.length + "] hits when just one was requested");
}
}
}
listener.onResponse(buildStats(config.getId(),
retrievedStatsHolder.progress.get(),
retrievedStatsHolder.dataCounts,
retrievedStatsHolder.memoryUsage,
retrievedStatsHolder.analysisStats
));
},
e -> listener.onFailure(ExceptionsHelper.serverError("Error searching for stats", e))
));
}
private static SearchRequest buildStoredProgressSearch(String configId) {
SearchRequest searchRequest = new SearchRequest(AnomalyDetectorsIndex.jobStateIndexPattern());
searchRequest.indicesOptions(IndicesOptions.lenientExpandOpen());
searchRequest.source().size(1);
searchRequest.source().query(QueryBuilders.idsQuery().addIds(StoredProgress.documentId(configId)));
return searchRequest;
}
private static SearchRequest buildStatsDocSearch(String configId, String statsType) {
SearchRequest searchRequest = new SearchRequest(MlStatsIndex.indexPattern());
searchRequest.indicesOptions(IndicesOptions.lenientExpandOpen());
searchRequest.source().size(1);
QueryBuilder query = QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery(Fields.JOB_ID.getPreferredName(), configId))
.filter(QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), statsType));
searchRequest.source().query(query);
searchRequest.source().sort(SortBuilders.fieldSort(Fields.TIMESTAMP.getPreferredName()).order(SortOrder.DESC)
// We need this for the search not to fail when there are no mappings yet in the index
.unmappedType("long"));
return searchRequest;
}
private static void parseHit(SearchHit hit, String configId, RetrievedStatsHolder retrievedStatsHolder) {
String hitId = hit.getId();
if (StoredProgress.documentId(configId).equals(hitId)) {
retrievedStatsHolder.progress = MlParserUtils.parse(hit, StoredProgress.PARSER);
} else if (DataCounts.documentId(configId).equals(hitId)) {
retrievedStatsHolder.dataCounts = MlParserUtils.parse(hit, DataCounts.LENIENT_PARSER);
} else if (hitId.startsWith(MemoryUsage.documentIdPrefix(configId))) {
retrievedStatsHolder.memoryUsage = MlParserUtils.parse(hit, MemoryUsage.LENIENT_PARSER);
} else if (hitId.startsWith(OutlierDetectionStats.documentIdPrefix(configId))) {
retrievedStatsHolder.analysisStats = MlParserUtils.parse(hit, OutlierDetectionStats.LENIENT_PARSER);
} else if (hitId.startsWith(ClassificationStats.documentIdPrefix(configId))) {
retrievedStatsHolder.analysisStats = MlParserUtils.parse(hit, ClassificationStats.LENIENT_PARSER);
} else if (hitId.startsWith(RegressionStats.documentIdPrefix(configId))) {
retrievedStatsHolder.analysisStats = MlParserUtils.parse(hit, RegressionStats.LENIENT_PARSER);
} else {
throw ExceptionsHelper.serverError("unexpected doc id [" + hitId + "]");
}
}
private GetDataFrameAnalyticsStatsAction.Response.Stats buildStats(String concreteAnalyticsId,
List<PhaseProgress> progress,
DataCounts dataCounts,
MemoryUsage memoryUsage,
AnalysisStats analysisStats) {
ClusterState clusterState = clusterService.state();
PersistentTasksCustomMetadata tasks = clusterState.getMetadata().custom(PersistentTasksCustomMetadata.TYPE);
PersistentTasksCustomMetadata.PersistentTask<?> analyticsTask = MlTasks.getDataFrameAnalyticsTask(concreteAnalyticsId, tasks);
DataFrameAnalyticsState analyticsState = MlTasks.getDataFrameAnalyticsState(concreteAnalyticsId, tasks);
String failureReason = null;
if (analyticsState == DataFrameAnalyticsState.FAILED) {
DataFrameAnalyticsTaskState taskState = (DataFrameAnalyticsTaskState) analyticsTask.getState();
failureReason = taskState.getReason();
}
DiscoveryNode node = null;
String assignmentExplanation = null;
if (analyticsTask != null) {
node = clusterState.nodes().get(analyticsTask.getExecutorNode());
assignmentExplanation = analyticsTask.getAssignment().getExplanation();
}
return new GetDataFrameAnalyticsStatsAction.Response.Stats(
concreteAnalyticsId,
analyticsState,
failureReason,
progress,
dataCounts,
memoryUsage,
analysisStats,
node,
assignmentExplanation
);
}
private static class RetrievedStatsHolder {
private volatile StoredProgress progress;
private volatile DataCounts dataCounts;
private volatile MemoryUsage memoryUsage;
private volatile AnalysisStats analysisStats;
private RetrievedStatsHolder(List<PhaseProgress> defaultProgress) {
progress = new StoredProgress(defaultProgress);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.security.access;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService.BlockingInterface;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Utility client for doing access control admin operations.
*/
@InterfaceAudience.Public
public class AccessControlClient {
public static final TableName ACL_TABLE_NAME =
TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl");
/**
* Return true if authorization is supported and enabled
* @param connection The connection to use
* @return true if authorization is supported and enabled, false otherwise
* @throws IOException
*/
public static boolean isAuthorizationEnabled(Connection connection) throws IOException {
return connection.getAdmin().getSecurityCapabilities()
.contains(SecurityCapability.AUTHORIZATION);
}
/**
* Return true if cell authorization is supported and enabled
* @param connection The connection to use
* @return true if cell authorization is supported and enabled, false otherwise
* @throws IOException
*/
public static boolean isCellAuthorizationEnabled(Connection connection) throws IOException {
return connection.getAdmin().getSecurityCapabilities()
.contains(SecurityCapability.CELL_AUTHORIZATION);
}
private static BlockingInterface getAccessControlServiceStub(Table ht)
throws IOException {
CoprocessorRpcChannel service = ht.coprocessorService(HConstants.EMPTY_START_ROW);
BlockingInterface protocol =
AccessControlProtos.AccessControlService.newBlockingStub(service);
return protocol;
}
/**
* Grants permission on the specified table for the specified user
* @param connection The Connection instance to use
* @param tableName
* @param userName
* @param family
* @param qual
* @param mergeExistingPermissions If set to false, later granted permissions will override
* previous granted permissions. otherwise, it'll merge with previous granted
* permissions.
* @param actions
* @throws Throwable
*/
private static void grant(Connection connection, final TableName tableName,
final String userName, final byte[] family, final byte[] qual, boolean mergeExistingPermissions,
final Permission.Action... actions) throws Throwable {
connection.getAdmin().grant(new UserPermission(userName, Permission.newBuilder(tableName)
.withFamily(family).withQualifier(qual).withActions(actions).build()),
mergeExistingPermissions);
}
/**
* Grants permission on the specified table for the specified user.
* If permissions for a specified user exists, later granted permissions will override previous granted permissions.
* @param connection The Connection instance to use
* @param tableName
* @param userName
* @param family
* @param qual
* @param actions
* @throws Throwable
*/
public static void grant(Connection connection, final TableName tableName, final String userName,
final byte[] family, final byte[] qual, final Permission.Action... actions) throws Throwable {
grant(connection, tableName, userName, family, qual, true, actions);
}
/**
* Grants permission on the specified namespace for the specified user.
* @param connection
* @param namespace
* @param userName
* @param mergeExistingPermissions If set to false, later granted permissions will override
* previous granted permissions. otherwise, it'll merge with previous granted
* permissions.
* @param actions
* @throws Throwable
*/
private static void grant(Connection connection, final String namespace, final String userName,
boolean mergeExistingPermissions, final Permission.Action... actions) throws Throwable {
connection.getAdmin().grant(
new UserPermission(userName, Permission.newBuilder(namespace).withActions(actions).build()),
mergeExistingPermissions);
}
/**
* Grants permission on the specified namespace for the specified user.
* If permissions on the specified namespace exists, later granted permissions will override previous granted
* permissions.
* @param connection The Connection instance to use
* @param namespace
* @param userName
* @param actions
* @throws Throwable
*/
public static void grant(Connection connection, final String namespace, final String userName,
final Permission.Action... actions) throws Throwable {
grant(connection, namespace, userName, true, actions);
}
/**
* Grant global permissions for the specified user.
* @param connection
* @param userName
* @param mergeExistingPermissions If set to false, later granted permissions will override
* previous granted permissions. otherwise, it'll merge with previous granted
* permissions.
* @param actions
* @throws Throwable
*/
private static void grant(Connection connection, final String userName,
boolean mergeExistingPermissions, final Permission.Action... actions) throws Throwable {
connection.getAdmin().grant(
new UserPermission(userName, Permission.newBuilder().withActions(actions).build()),
mergeExistingPermissions);
}
/**
* Grant global permissions for the specified user.
* If permissions for the specified user exists, later granted permissions will override previous granted
* permissions.
* @param connection
* @param userName
* @param actions
* @throws Throwable
*/
public static void grant(Connection connection, final String userName,
final Permission.Action... actions) throws Throwable {
grant(connection, userName, true, actions);
}
public static boolean isAccessControllerRunning(Connection connection)
throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
try (Admin admin = connection.getAdmin()) {
return admin.isTableAvailable(ACL_TABLE_NAME);
}
}
/**
* Revokes the permission on the table
* @param connection The Connection instance to use
* @param tableName
* @param username
* @param family
* @param qualifier
* @param actions
* @throws Throwable
*/
public static void revoke(Connection connection, final TableName tableName,
final String username, final byte[] family, final byte[] qualifier,
final Permission.Action... actions) throws Throwable {
connection.getAdmin().revoke(new UserPermission(username, Permission.newBuilder(tableName)
.withFamily(family).withQualifier(qualifier).withActions(actions).build()));
}
/**
* Revokes the permission on the namespace for the specified user.
* @param connection The Connection instance to use
* @param namespace
* @param userName
* @param actions
* @throws Throwable
*/
public static void revoke(Connection connection, final String namespace,
final String userName, final Permission.Action... actions) throws Throwable {
connection.getAdmin().revoke(
new UserPermission(userName, Permission.newBuilder(namespace).withActions(actions).build()));
}
/**
* Revoke global permissions for the specified user.
* @param connection The Connection instance to use
*/
public static void revoke(Connection connection, final String userName,
final Permission.Action... actions) throws Throwable {
connection.getAdmin()
.revoke(new UserPermission(userName, Permission.newBuilder().withActions(actions).build()));
}
/**
* List all the userPermissions matching the given pattern. If pattern is null, the behavior is
* dependent on whether user has global admin privileges or not. If yes, the global permissions
* along with the list of superusers would be returned. Else, no rows get returned.
* @param connection The Connection instance to use
* @param tableRegex The regular expression string to match against
* @return List of UserPermissions
* @throws Throwable
*/
public static List<UserPermission> getUserPermissions(Connection connection, String tableRegex)
throws Throwable {
return getUserPermissions(connection, tableRegex, HConstants.EMPTY_STRING);
}
/**
* List all the userPermissions matching the given table pattern and user name.
* @param connection Connection
* @param tableRegex The regular expression string to match against
* @param userName User name, if empty then all user permissions will be retrieved.
* @return List of UserPermissions
* @throws Throwable on failure
*/
public static List<UserPermission> getUserPermissions(Connection connection, String tableRegex,
String userName) throws Throwable {
List<UserPermission> permList = new ArrayList<>();
try (Admin admin = connection.getAdmin()) {
if (tableRegex == null || tableRegex.isEmpty()) {
permList = admin.getUserPermissions(
GetUserPermissionsRequest.newBuilder().withUserName(userName).build());
} else if (tableRegex.charAt(0) == '@') { // Namespaces
String namespaceRegex = tableRegex.substring(1);
for (NamespaceDescriptor nsds : admin.listNamespaceDescriptors()) { // Read out all
// namespaces
String namespace = nsds.getName();
if (namespace.matches(namespaceRegex)) { // Match the given namespace regex?
permList.addAll(admin.getUserPermissions(
GetUserPermissionsRequest.newBuilder(namespace).withUserName(userName).build()));
}
}
} else { // Tables
List<TableDescriptor> htds = admin.listTableDescriptors(Pattern.compile(tableRegex), true);
for (TableDescriptor htd : htds) {
permList.addAll(admin.getUserPermissions(GetUserPermissionsRequest
.newBuilder(htd.getTableName()).withUserName(userName).build()));
}
}
}
return permList;
}
/**
* List all the userPermissions matching the given table pattern and column family.
* @param connection Connection
* @param tableRegex The regular expression string to match against. It shouldn't be null, empty
* or a namespace regular expression.
* @param columnFamily Column family
* @return List of UserPermissions
* @throws Throwable on failure
*/
public static List<UserPermission> getUserPermissions(Connection connection, String tableRegex,
byte[] columnFamily) throws Throwable {
return getUserPermissions(connection, tableRegex, columnFamily, null, HConstants.EMPTY_STRING);
}
/**
* List all the userPermissions matching the given table pattern, column family and user name.
* @param connection Connection
* @param tableRegex The regular expression string to match against. It shouldn't be null, empty
* or a namespace regular expression.
* @param columnFamily Column family
* @param userName User name, if empty then all user permissions will be retrieved.
* @return List of UserPermissions
* @throws Throwable on failure
*/
public static List<UserPermission> getUserPermissions(Connection connection, String tableRegex,
byte[] columnFamily, String userName) throws Throwable {
return getUserPermissions(connection, tableRegex, columnFamily, null, userName);
}
/**
* List all the userPermissions matching the given table pattern, column family and column
* qualifier.
* @param connection Connection
* @param tableRegex The regular expression string to match against. It shouldn't be null, empty
* or a namespace regular expression.
* @param columnFamily Column family
* @param columnQualifier Column qualifier
* @return List of UserPermissions
* @throws Throwable on failure
*/
public static List<UserPermission> getUserPermissions(Connection connection, String tableRegex,
byte[] columnFamily, byte[] columnQualifier) throws Throwable {
return getUserPermissions(connection, tableRegex, columnFamily, columnQualifier,
HConstants.EMPTY_STRING);
}
/**
* List all the userPermissions matching the given table pattern, column family and column
* qualifier.
* @param connection Connection
* @param tableRegex The regular expression string to match against. It shouldn't be null, empty
* or a namespace regular expression.
* @param columnFamily Column family
* @param columnQualifier Column qualifier
* @param userName User name, if empty then all user permissions will be retrieved.
* @return List of UserPermissions
* @throws Throwable on failure
*/
public static List<UserPermission> getUserPermissions(Connection connection, String tableRegex,
byte[] columnFamily, byte[] columnQualifier, String userName) throws Throwable {
if (tableRegex == null || tableRegex.isEmpty() || tableRegex.charAt(0) == '@') {
throw new IllegalArgumentException("Table name can't be null or empty or a namespace.");
}
List<UserPermission> permList = new ArrayList<UserPermission>();
try (Admin admin = connection.getAdmin()) {
List<TableDescriptor> htds = admin.listTableDescriptors(Pattern.compile(tableRegex), true);
// Retrieve table permissions
for (TableDescriptor htd : htds) {
permList.addAll(admin.getUserPermissions(
GetUserPermissionsRequest.newBuilder(htd.getTableName()).withFamily(columnFamily)
.withQualifier(columnQualifier).withUserName(userName).build()));
}
}
return permList;
}
/**
* Validates whether specified user has permission to perform actions on the mentioned table,
* column family or column qualifier.
* @param connection Connection
* @param tableName Table name, it shouldn't be null or empty.
* @param columnFamily The column family. Optional argument, can be empty. If empty then
* validation will happen at table level.
* @param columnQualifier The column qualifier. Optional argument, can be empty. If empty then
* validation will happen at table and column family level. columnQualifier will not be
* considered if columnFamily is passed as null or empty.
* @param userName User name, it shouldn't be null or empty.
* @param actions Actions
* @return true if access allowed to the specified user, otherwise false.
* @throws Throwable on failure
*/
public static boolean hasPermission(Connection connection, String tableName, String columnFamily,
String columnQualifier, String userName, Permission.Action... actions) throws Throwable {
return hasPermission(connection, tableName, Bytes.toBytes(columnFamily),
Bytes.toBytes(columnQualifier), userName, actions);
}
/**
* Validates whether specified user has permission to perform actions on the mentioned table,
* column family or column qualifier.
* @param connection Connection
* @param tableName Table name, it shouldn't be null or empty.
* @param columnFamily The column family. Optional argument, can be empty. If empty then
* validation will happen at table level.
* @param columnQualifier The column qualifier. Optional argument, can be empty. If empty then
* validation will happen at table and column family level. columnQualifier will not be
* considered if columnFamily is passed as null or empty.
* @param userName User name, it shouldn't be null or empty.
* @param actions Actions
* @return true if access allowed to the specified user, otherwise false.
* @throws Throwable on failure
*/
public static boolean hasPermission(Connection connection, String tableName, byte[] columnFamily,
byte[] columnQualifier, String userName, Permission.Action... actions) throws Throwable {
if (StringUtils.isEmpty(tableName) || StringUtils.isEmpty(userName)) {
throw new IllegalArgumentException("Table and user name can't be null or empty.");
}
List<Permission> permissions = new ArrayList<>(1);
permissions.add(Permission.newBuilder(TableName.valueOf(tableName)).withFamily(columnFamily)
.withQualifier(columnQualifier).withActions(actions).build());
return connection.getAdmin().hasUserPermissions(userName, permissions).get(0);
}
}
| |
package com.digmia.jsmppgw;
import java.io.IOException;
import java.util.Date;
import java.util.Enumeration;
import java.util.Random;
import java.util.TimeZone;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.jsmpp.bean.AlertNotification;
import org.jsmpp.bean.DataSm;
import org.jsmpp.bean.DeliverSm;
import org.jsmpp.extra.ProcessRequestException;
import org.jsmpp.session.DataSmResult;
import org.springframework.jmx.export.annotation.ManagedResource;
import org.springframework.jmx.export.annotation.ManagedAttribute;
import org.jsmpp.bean.Alphabet;
import org.jsmpp.bean.BindType;
import org.jsmpp.bean.ESMClass;
import org.jsmpp.bean.GeneralDataCoding;
import org.jsmpp.bean.MessageClass;
import org.jsmpp.bean.NumberingPlanIndicator;
import org.jsmpp.bean.RegisteredDelivery;
import org.jsmpp.bean.SMSCDeliveryReceipt;
import org.jsmpp.bean.TypeOfNumber;
import org.jsmpp.session.BindParameter;
import org.jsmpp.session.SMPPSession;
import org.jsmpp.util.AbsoluteTimeFormatter;
import org.jsmpp.util.TimeFormatter;
import javax.jms.*;
import net.sf.json.JSONObject;
import org.jsmpp.InvalidResponseException;
import org.jsmpp.PDUException;
import org.jsmpp.SMPPConstant;
import org.jsmpp.bean.DeliveryReceipt;
import org.jsmpp.bean.MessageType;
import org.jsmpp.bean.OptionalParameter;
import org.jsmpp.bean.OptionalParameters;
import org.jsmpp.extra.NegativeResponseException;
import org.jsmpp.extra.ResponseTimeoutException;
import org.jsmpp.session.MessageReceiverListener;
import org.jsmpp.util.InvalidDeliveryReceiptException;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.core.MessageCreator;
import org.springframework.jms.listener.AbstractMessageListenerContainer;
import org.springframework.jms.listener.SessionAwareMessageListener;
// javabean junk
/**
*
* @author jurajbednar
*/
@ManagedResource(objectName = "bean:name=SMPPBean", description = "Core SMPP gateway bean", log = true,
logFile = "jmx.log", currencyTimeLimit = 15)
public class SMPPBean implements SessionAwareMessageListener, MessageReceiverListener {
// configuration options
//private static final String hostname="";
private String hostname;
private int port;
private String username;
private String password;
private String longNumber;
private String alphanumericNumber;
private AbstractMessageListenerContainer messageListenerContainer;
private int maxRetransmits = 50;
private Destination jmsDestination;
private Destination incomingQueue;
private JmsTemplate jmsSenderTemplate;
private long smppConnectTimeout = 60000L;
private long messageRetryRate = 1000L;
private long messageSendingRate = 1000L;
// for mbeans
private String status = "uninitialized";
private String lastError = "";
private long lastDeliveryDuration = 0L;
private long messagesDelivered = 0L;
private long errors = 0L;
private int lastTries = 0;
private boolean lastDeliveryOk = true;
private int successfulRedeliveries = 0;
private int failedRedeliveries = 0;
private int processedRedeliveries = 0;
private long receivedMessages = 0L;
private int processedLongMessages = 0;
private Date deliveryStart;
private SMPPSession session = new SMPPSession();
private final TimeFormatter timeFormatter = new AbsoluteTimeFormatter();
private final TimeZone tz = TimeZone.getDefault();
private final Random random = new Random();
public SMPPBean() {
}
@PostConstruct
public void initializeSmpp() {
random.setSeed(new Date().getTime());
System.out.println("jsmppGw: Initializing SMPP"); // DEBUG
if (session == null) {
session = new SMPPSession();
}
try {
session.connectAndBind(hostname, port, new BindParameter(BindType.BIND_TRX, username, password, "cp", TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, null), smppConnectTimeout);
session.setMessageReceiverListener(this);
status = "connected";
System.out.println("jsmppGw: Connected"); // DEBUG
messageListenerContainer.start();
System.out.println("jsmppGw: Message listener started"); // DEBUG
} catch (IOException e) {
e.printStackTrace();
handleException(e, "Failed to connect and bind to host");
}
}
@PreDestroy
public void destroySmpp() {
System.out.println("jsmppGw: Disconnecting"); // DEBUG
session.unbindAndClose();
session = null;
status = "disconnected";
System.out.println("jsmppGw: Disconnected"); // DEBUG
}
public void reconnect() {
destroySmpp();
session = new SMPPSession();
initializeSmpp();
}
public synchronized void onMessage(Message message, Session jmsSession) {
deliveryStart = new Date();
System.out.println("jsmppGw: Entering onMessage"); // DEBUG
if (session == null) {
initializeSmpp();
}
if (message instanceof TextMessage) {
String jmsText = null;
try {
jmsText = ((TextMessage) message).getText();
int redeliveries = 0;
Enumeration propertyNames = message.getPropertyNames();
while (propertyNames.hasMoreElements()) {
final String n = (String) propertyNames.nextElement();
if (n.equals("redeliveries")) {
redeliveries = message.getIntProperty("redeliveries");
}
}
if (redeliveries > 0) {
processedRedeliveries++;
Logger.getLogger("jms").log(Level.INFO,
"Trying to redeliver message that came back to queue, redelivery attempt #" + redeliveries);
}
} catch (JMSException e) {
handleException(e, "Unable to read or process message: ");
System.out.println("jsmppGw: Leaving onMessage: Unable to process message"); // DEBUG
throw new RuntimeException("Unable to read or process message: " + e.getMessage());
}
if (jmsText == null) {
System.out.println("jsmppGw: Leaving onMessage: jmsText is null"); // DEBUG
return;
}
JSONObject jsonObject = JSONObject.fromObject(jmsText);
ShortMessage msg = (ShortMessage) JSONObject.toBean(jsonObject,
ShortMessage.class);
if ((msg.getDestinationNumber() == null) ||
(msg.getDestinationNumber().length() == 0) ||
(msg.getTextContent() == null) ||
(msg.getTextContent().length() == 0)) {
System.out.println("jsmppGw: Either text or destination number is empty"); // DEBUG
return;
}
status = "delivering";
final long now = new Date().getTime();
TypeOfNumber typeOfNumber = TypeOfNumber.INTERNATIONAL;
NumberingPlanIndicator numberingPlanIndicator = NumberingPlanIndicator.ISDN;
String sourceNumber = longNumber;
switch (msg.getFromType()) {
case ShortMessage.SENDER_TYPE_LONG:
typeOfNumber = TypeOfNumber.INTERNATIONAL;
numberingPlanIndicator = NumberingPlanIndicator.ISDN;
sourceNumber = longNumber;
break;
case ShortMessage.SENDER_TYPE_ALPHANUMERIC:
typeOfNumber = TypeOfNumber.ALPHANUMERIC;
numberingPlanIndicator = NumberingPlanIndicator.UNKNOWN;
sourceNumber = alphanumericNumber;
break;
case ShortMessage.SENDER_TYPE_SHORT:
//typeOfNumber = TypeOfNumber.NETWORK_SPECIFIC;
//numberingPlanIndicator = NumberingPlanIndicator.PRIVATE;
//sourceNumber = ;
handleException(new RuntimeException("Can not handle short numbers now, T-mobile network hates me"));
}
boolean delivered = false;
Exception lastException = null;
int tries = 0;
for (tries = 0; (tries < 10) && (!delivered); tries++) {
try {
try {
if (tries > 0) {
System.out.println("jsmppGw: Retrying. Try #" + tries); // DEBUG
try {
Thread.sleep(tries * messageRetryRate);
} catch (InterruptedException e) {
}
}
System.out.println("jsmppGw: Submitting message"); // DEBUG
if (msg.isSarFormattedMessage()) {
// Long message delivery
final OptionalParameter sarMsgRefNum = OptionalParameters.newSarMsgRefNum(msg.getSarMsgRefNum());
final OptionalParameter sarTotalSegments = OptionalParameters.newSarTotalSegments(msg.getSarTotalSegments());
final OptionalParameter sarSegmentSeqnum = OptionalParameters.newSarSegmentSeqnum(msg.getSarSegmentNumber());
String messageId = session.submitShortMessage("",
// sender
typeOfNumber,
numberingPlanIndicator,
sourceNumber,
// recipient
TypeOfNumber.INTERNATIONAL,
NumberingPlanIndicator.ISDN,
msg.getDestinationNumber(),
// parameters
new ESMClass(), (byte) 0, (byte) 1,
timeFormatter.format(new Date(now - tz.getOffset(now))),
null, new RegisteredDelivery(SMSCDeliveryReceipt.DEFAULT),
(byte) 0, new GeneralDataCoding(false, false, MessageClass.CLASS1, Alphabet.ALPHA_DEFAULT), (byte) 0,
// message text
msg.getTextContent().getBytes(),
// long message parameters
sarMsgRefNum, sarSegmentSeqnum, sarTotalSegments);
} else {
if (msg.getTextContent().length() > 160) {
splitAndDeliverLongMessage(msg);
} else {
// Normal message delivery
String messageId = session.submitShortMessage("",
// sender
typeOfNumber,
numberingPlanIndicator,
sourceNumber,
// recipient
TypeOfNumber.INTERNATIONAL,
NumberingPlanIndicator.ISDN,
msg.getDestinationNumber(),
// parameters
new ESMClass(), (byte) 0, (byte) 1,
timeFormatter.format(new Date(now - tz.getOffset(now))),
null, new RegisteredDelivery(SMSCDeliveryReceipt.DEFAULT),
(byte) 0, new GeneralDataCoding(false, false, MessageClass.CLASS1, Alphabet.ALPHA_DEFAULT), (byte) 0,
// message text
msg.getTextContent().getBytes());
}
}
message.acknowledge();
delivered = true;
System.out.println("jsmppGw: Message delivered"); // DEBUG
break;
} catch (ResponseTimeoutException e) {
handleException(e);
System.out.println("Caught exception delivering message, reconnecting and retrying " + e.getMessage());
reconnect();
} catch (IOException e) {
handleException(e);
System.out.println("Caught IO exception delivering message, reconnecting and retrying " + e.getMessage());
reconnect();
} catch (PDUException e) {
handleException(e);
message.acknowledge();
System.out.println("Caught exception delivering message, _NOT_ retrying, message has wrong format: " + e.getMessage());
return;
} catch (InvalidResponseException e) {
handleException(e);
message.acknowledge();
System.out.println("Caught exception delivering message, _NOT_ retrying, reply from operator has wrong format: " + e.getMessage());
return;
} catch (NegativeResponseException e) {
switch (e.getCommandStatus()) {
case SMPPConstant.STAT_ESME_RTHROTTLED:
System.out.println("We are sending too fast and are throttled. Retrying after waiting for a while");
break;
case SMPPConstant.STAT_ESME_RINVDSTADR:
System.out.println("Invalid destination address, we can not deliver this, not retrying.");
errors++;
message.acknowledge();
return;
default:
handleException(e, "Got negative response, please check it in SMPP constants (STAT_ESME_...) " + e.getCommandStatus() + ", retrying, error status: " + e.getMessage());
break;
}
}
} catch (JMSException e) {
handleException(e);
}
}
lastTries = tries;
if (delivered == false) {
handleException(lastException, "Unable to deliver message, tried for three times.");
System.out.println("jsmppGw: Leaving onMessage: redelivering message"); // DEBUG
redeliver((TextMessage) message, jmsSession);
return;
} else {
messagesDelivered++;
lastDeliveryOk = true;
status = "connected";
countDeliveryTime();
}
} else {
handleException(new RuntimeException("Unknown message type"));
}
System.out.println("jsmppGw: Leaving onMessage: at the end of routine, everything went OK."); // DEBUG
try {
Thread.sleep(messageSendingRate);
} catch (InterruptedException e) {
}
}
private void handleException(Exception e, String s) {
lastError = (s == null ? "" : (s + " ")) + e.getMessage() +
" at " + new Date().toString();
status = "exception thrown";
lastDeliveryOk = false;
errors++;
countDeliveryTime();
System.out.println("jsmppGw: Exception thrown: " + lastError);
}
private void handleException(Exception e) {
handleException(e, "Unable to deliver message.");
}
private void countDeliveryTime() {
lastDeliveryDuration = new Date().getTime() - deliveryStart.getTime();
}
private void redeliver(TextMessage message, Session jmsSession) {
try {
System.out.println("jsmppGw: Redeliver"); // DEBUG
Enumeration propertyNames = message.getPropertyNames();
int redeliveries = 0;
while (propertyNames.hasMoreElements()) {
final String n = (String) propertyNames.nextElement();
if (n.equals("redeliveries")) {
redeliveries = message.getIntProperty("redeliveries");
}
}
System.out.println("jsmppGw: Redeliveries so far: " + redeliveries); // DEBUG
if (redeliveries > maxRetransmits) {
lastError = "MaxRetransmits reached. Last error was: " + lastError;
System.out.println(lastError); // DEBUG
throw new RuntimeException(lastError);
}
final MessageProducer producer = jmsSession.createProducer(jmsDestination);
final TextMessage newMsg = jmsSession.createTextMessage();
newMsg.setText(message.getText());
newMsg.setIntProperty("redeliveries", redeliveries + 1);
producer.send(newMsg);
successfulRedeliveries++;
System.out.println("jsmppGw: Redelivery OK"); // DEBUG
} catch (JMSException ex) {
lastError = "Unable to redeliver message, error: " + ex.getMessage() +
" original error that caused redelivery was: " + lastError;
System.out.println(lastError); // DEBUG
failedRedeliveries++;
try {
jmsSession.recover();
} catch (JMSException e) {
Logger.getLogger("jms").log(Level.WARNING, "Unable to recover session", e);
}
throw new RuntimeException(lastError);
}
System.out.println("jsmppGw: Leaving redeliver()"); // DEBUG
}
private void splitAndDeliverLongMessage(ShortMessage msg) {
processedLongMessages++;
System.out.println("Splitting long message, has " + msg.getTextContent().length() + " characters"); // DEBUG
String text = msg.getTextContent();
short refNum = (short) random.nextInt();
byte totalSegments = (byte) ((text.length() / 152) + 1);
if (((text.length() / 152) * 152) == text.length())
totalSegments--;
byte segmentNumber = 0;
while ((text != null) && (text.length() > 0)) {
segmentNumber++;
ShortMessage tmpMsg = new ShortMessage();
tmpMsg.setDestinationNumber(msg.getDestinationNumber());
tmpMsg.setFromType(msg.getFromType());
tmpMsg.setSarFormattedMessage(true);
tmpMsg.setSarMsgRefNum(refNum);
tmpMsg.setSarSegmentNumber(segmentNumber);
tmpMsg.setSarTotalSegments(totalSegments);
if (text.length() > 152)
tmpMsg.setTextContent(text.substring(0, 152));
else tmpMsg.setTextContent(text);
final String textRepresentation = JSONObject.fromObject( tmpMsg ).toString();
// Sending to queue
jmsSenderTemplate.send(jmsDestination, new MessageCreator() {
public Message createMessage(Session session) throws JMSException {
return session.createTextMessage(textRepresentation);
}
});
if (text.length()>152) {
text = text.substring(152, text.length());
} else {
text = null;
break;
}
}
}
// Receiving of messages
public void onAcceptDeliverSm(DeliverSm deliverSm) throws ProcessRequestException {
if (MessageType.SMSC_DEL_RECEIPT.containedIn(deliverSm.getEsmClass())) {
try {
DeliveryReceipt delReceipt = deliverSm.getShortMessageAsDeliveryReceipt();
// lets convert the id to hex string format
long id = Long.parseLong(delReceipt.getId()) & 0xffffffff;
String messageId = Long.toString(id, 16).toUpperCase();
System.out.println("Received delivery report for msg id: " + messageId +
" ' from " + deliverSm.getSourceAddr() + " to " + deliverSm.getDestAddress() + " : " + delReceipt); // DEBUG
} catch (InvalidDeliveryReceiptException e) {
System.out.println("Invalid delivery receipt: " + e.getMessage());
}
} else {
final ShortMessage msg = new ShortMessage();
msg.setTextContent(new String(deliverSm.getShortMessage()));
msg.setSenderNumber(deliverSm.getSourceAddr());
msg.setDestinationNumber(deliverSm.getDestAddress());
// does not work for now
for (OptionalParameter o : deliverSm.getOptionalParametes()) {
if (o.tag == OptionalParameter.Tag.SAR_MSG_REF_NUM.code()) {
msg.setSarMsgRefNum(
((OptionalParameter.Short) o).getValue());
} else if (o.tag == OptionalParameter.Tag.SAR_TOTAL_SEGMENTS.code()) {
msg.setSarTotalSegments(((OptionalParameter.Byte) o).getValue());
if (msg.getSarTotalSegments() > 1) {
msg.setSarFormattedMessage(true);
}
} else if (o.tag == OptionalParameter.Tag.SAR_SEGMENT_SEQNUM.code()) {
msg.setSarSegmentNumber(((OptionalParameter.Byte) o).getValue());
}
final String textRepresentation = JSONObject.fromObject( msg ).toString();
// Sending to queue
jmsSenderTemplate.send(incomingQueue, new MessageCreator() {
public Message createMessage(Session session) throws JMSException {
return session.createTextMessage(textRepresentation);
}
});
receivedMessages++;
}
System.out.println("Received message from: " + msg.getSenderNumber() +
" to: " + msg.getDestinationNumber() + " with text: " + msg.getTextContent());
if (msg.isSarFormattedMessage()) {
System.out.println("This was message with SAR ID " + msg.getSarMsgRefNum() + " #" +
msg.getSarSegmentNumber() + "/" + msg.getSarTotalSegments());
}
}
}
// javabean junk
public String getAlphanumericNumber() {
return alphanumericNumber;
}
public void setAlphanumericNumber(String alphanumericNumber) {
this.alphanumericNumber = alphanumericNumber;
}
public String getHostname() {
return hostname;
}
public void setHostname(String hostname) {
this.hostname = hostname;
}
public String getLongNumber() {
return longNumber;
}
public void setLongNumber(String longNumber) {
this.longNumber = longNumber;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public void setMessageListenerContainer(AbstractMessageListenerContainer messageListenerContainer) {
this.messageListenerContainer = messageListenerContainer;
}
public int getMaxRetransmits() {
return maxRetransmits;
}
public void setMaxRetransmits(int maxRetransmits) {
this.maxRetransmits = maxRetransmits;
}
public Destination getJmsDestination() {
return jmsDestination;
}
public void setJmsDestination(Destination jmsDestination) {
this.jmsDestination = jmsDestination;
}
public long getSmppConnectTimeout() {
return smppConnectTimeout;
}
public void setSmppConnectTimeout(long smppConnectTimeout) {
this.smppConnectTimeout = smppConnectTimeout;
}
public long getMessageRetryRate() {
return messageRetryRate;
}
public void setMessageRetryRate(long messageRetryRate) {
this.messageRetryRate = messageRetryRate;
}
public long getMessageSendingRate() {
return messageSendingRate;
}
public void setMessageSendingRate(long messageSendingRate) {
this.messageSendingRate = messageSendingRate;
}
public Destination getIncomingQueue() {
return incomingQueue;
}
public void setIncomingQueue(Destination incomingQueue) {
this.incomingQueue = incomingQueue;
}
public JmsTemplate getJmsSenderTemplate() {
return jmsSenderTemplate;
}
public void setJmsSenderTemplate(JmsTemplate jmsSenderTemplate) {
this.jmsSenderTemplate = jmsSenderTemplate;
}
// mbean
@ManagedAttribute(description = "Number of errors since start", currencyTimeLimit = 5)
public long getErrors() {
return errors;
}
@ManagedAttribute(description = "Last message delivery duration", currencyTimeLimit = 5)
public long getLastDeliveryDuration() {
return lastDeliveryDuration;
}
@ManagedAttribute(description = "Was last message delivered?", currencyTimeLimit = 5)
public boolean getLastDeliveryOk() {
return lastDeliveryOk;
}
@ManagedAttribute(description = "The text of last error message", currencyTimeLimit = 5)
public String getLastError() {
return lastError;
}
@ManagedAttribute(description = "Number of messages delivered since start", currencyTimeLimit = 5)
public long getMessagesDelivered() {
return messagesDelivered;
}
@ManagedAttribute(description = "Status. Connected and delivering are usual OK states. \"delivering\" for too long is a problem", currencyTimeLimit = 5)
public String getStatus() {
return status;
}
@ManagedAttribute(description = "Current delivery duration (0 if not delivering)", currencyTimeLimit = 5)
public long getCurrentDeliveryDuration() {
if (!status.equals("delivering")) {
return 0L;
} else {
return new Date().getTime() - deliveryStart.getTime();
}
}
@ManagedAttribute(description = "Number of attempted, but failed redeliveries", currencyTimeLimit = 5)
public int getFailedRedeliveries() {
return failedRedeliveries;
}
@ManagedAttribute(description = "Number of redelivered messages, that came back and were processed", currencyTimeLimit = 5)
public int getProcessedRedeliveries() {
return processedRedeliveries;
}
@ManagedAttribute(description = "Number of successful redeliveries back to queue", currencyTimeLimit = 5)
public int getSuccessfulRedeliveries() {
return successfulRedeliveries;
}
@ManagedAttribute(description = "Number of long messages, that we have split and delivered", currencyTimeLimit = 5)
public int getProcessedLongMessages() {
return processedLongMessages;
}
@ManagedAttribute(description = "Messages received and delivered to incoming SMS queue", currencyTimeLimit = 5)
public long getReceivedMessages() {
return receivedMessages;
}
// dummy methods
public void onAcceptAlertNotification(AlertNotification arg0) {
// forget that for now
}
public DataSmResult onAcceptDataSm(
DataSm shortMessage, org.jsmpp.session.Session session) throws ProcessRequestException {
// forget that for now
return null;
}
}
| |
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.util.datastruct;
import java.io.Serializable;
import java.util.Arrays;
/**
* The BitTree class maintains a set of ordered keys between the values of
* 0 and N. It can quickly (O(log(n))) add keys, remove keys, find the next key
* greater than some value , and find the prev key less than some value. It can
* determine if a key is in the set in O(1) time. This implementation has been
* limited to short keys so that it can implement the ShortKeySet interface.
*/
public class BitTree implements ShortKeySet, Serializable {
private final static long serialVersionUID = 1;
private int size; // The maximum number of keys in the set. Keys range from 0 to size-1
private int power2; // The next power of 2 that is greater than size.
private int[] bits; // Array of bits used to represent a tree of binary values. A bit at
// position N will have a left child at 2*N and a right child at 2*N+1.
// Its parent position will be at N/2.
// A bit is on if any bits in its subtree are on. Leaf bits correspond
// directly to keys and are on if the key is in the set.
private int numKeys; // The current number of keys in the set.
// masks for seting and clearing bits within an 32 bit integer.
private static final int[] setMask = { 0x00000001,0x00000002,0x00000004,0x00000008,
0x00000010,0x00000020,0x00000040,0x00000080,
0x00000100,0x00000200,0x00000400,0x00000800,
0x00001000,0x00002000,0x00004000,0x00008000,
0x00010000,0x00020000,0x00040000,0x00080000,
0x00100000,0x00200000,0x00400000,0x00800000,
0x01000000,0x02000000,0x04000000,0x08000000,
0x10000000,0x20000000,0x40000000,0x80000000 };
private static final int[] clearMask={ 0xfffffffe,0xfffffffd,0xfffffffb,0xfffffff7,
0xffffffef,0xffffffdf,0xffffffbf,0xffffff7f,
0xfffffeff,0xfffffdff,0xfffffbff,0xfffff7ff,
0xffffefff,0xffffdfff,0xffffbfff,0xffff7fff,
0xfffeffff,0xfffdffff,0xfffbffff,0xfff7ffff,
0xffefffff,0xffdfffff,0xffbfffff,0xff7fffff,
0xfeffffff,0xfdffffff,0xfbffffff,0xf7ffffff,
0xefffffff,0xdfffffff,0xbfffffff,0x7fffffff };
/**
* The BitTree constructor takes the maximum key value. The legal
* keys for this set range from 0 to maxKey.
* @param maxKey the maximum key that will ever be put into this BitTree.
*/
public BitTree(short maxKey) {
this(maxKey,false);
}
/**
* The BitTree constructor takes the maximum key value. The legal
* keys for this set range from 0 to maxKey.
* @param maxKey the maximum key value.
* @param isFull if true, then the set is initilized to contain all legal keys.
*/
public BitTree(short maxKey, boolean isFull) {
this.size = maxKey+1;
// find the next power of 2 greater than or equal to n.
power2 = 2;
int sz = maxKey+1;
while (sz > 1) {
sz /= 2;
power2 *= 2;
}
// The number of bits need to store the tree is 2 times the number of keys.
// Since we are storing the bits in 32 bit integers we need N/16 integers
// to store the bits.
int nInts = power2/16;
// must have at least 1
if (nInts < 1) {
nInts = 1;
}
bits = new int[nInts];
if (isFull) {
Arrays.fill(bits,0xffffffff);
numKeys = this.size;
}
}
/**
* Removes all keys from the set.
*/
public void removeAll() {
Arrays.fill(bits,0);
numKeys = 0;
}
/**
* Returns the number of keys currently in the set.
*/
public int size() {
return numKeys;
}
/**
* Adds a key to the set.
* @param key to be added.
* @exception IndexOutOfBoundsException if the given key is not
* in the range [0, size-1].
*/
public void put(short key) {
if ((key < 0) || (key >= size)) {
throw new IndexOutOfBoundsException();
}
// The first "power2" number of bits are used for internal tree nodes. The
// leaf nodes start at index "power2".
int nodeIndex = power2+key;
// set the leaf bit on to indicate that the key is in the set.
// if the bit is already on (The key is already in the set), then just return.
if (!setBit(nodeIndex)) {
return;
}
// increment the number of keys in the set.
numKeys++;
// go up the tree setting each parent bit to "on"
while(nodeIndex != 1) {
// compute parent index.
nodeIndex /= 2;
// if any parent bit is already on, then all its parents are already on,
// so were done.
if (!setBit(nodeIndex)) {
return;
}
}
}
/**
* Removes the key from the set.
* @param key The key to remove.
* @exception IndexOutOfBoundsException if the given key is not
* in the range [0, size-1].
*/
public boolean remove(short key) {
if ((key < 0) || (key >= size)) {
throw new IndexOutOfBoundsException();
}
// compute the leaf node index.
int nodeIndex = power2+key;
// clear the leaf bit to indicate that the key is not in the set.
// if it is already "off", then we don't have to do anything
if (!clearBit(nodeIndex)) {
return false;
}
// decrement the number of keys in the set
numKeys--;
// traverse up the tree, clearing any parent nodes if all its child
// nodes are "off".
while(nodeIndex != 1) {
nodeIndex /= 2;
if (!isBitSet(nodeIndex)) {
return true;
}
if (isBitSet(nodeIndex*2) || isBitSet(nodeIndex*2+1)) {
return true;
}
clearBit(nodeIndex);
}
return true;
}
/**
* Determines if a given key is in the set.
* @param key the key to check if it is in this set.
* @return true if the key is in the set.
*/
public boolean containsKey(short key) {
if ((key < 0) || (key >= size)) {
return false;
}
return isBitSet(power2+key);
}
/**
* finds the next key that is in the set that is greater than the given key.
* @param key from which to search forward.
* @return the next key greater than the given key or -1 if there is no key
* greater than the given key.
* @exception IndexOutOfBoundsException if the given key is not
* in the range [0, size-1].
*/
public short getNext(short key) {
if ((key < 0) || (key >= size)) {
throw new IndexOutOfBoundsException();
}
// compute leaf node.
int nodeIndex = key + power2;
// while we are not at the root, search upward until we find a right
// sibling that is "on".
while(nodeIndex != 1) {
// see if we are odd (i.e. the right child)
int odd = nodeIndex % 2;
// if we are the left child see if my sibling on the right is on.
// if so, then the next key must be in that subtree.
if (odd == 0) {
if (isBitSet(nodeIndex+1)) {
// we found a right sibling that is "on", set nodeIndex to
// that node.
nodeIndex++;
break;
}
}
nodeIndex = nodeIndex/2;
}
// if we made it all the way up to the root node, then there is no key
// greater than, so return -1;
if (nodeIndex == 1) {
return (short)-1;
}
// now that we found a right sibling that is "on",
// follow the leftmost trail of "on" bits to an "on" leaf bit. This bit
// represents the next key in the set.
while (nodeIndex < power2) {
nodeIndex *= 2;
// if the left child is not on, then the right child must be "on".
if (!isBitSet(nodeIndex)) {
nodeIndex++;
}
}
short nextKey = (short)(nodeIndex-power2);
if (nextKey >= size) {
nextKey = -1;
}
return nextKey;
}
/**
* Finds the next key that is in the set that is less than the given key.
* @param key the key to search before.
* @return the next key less than the given key or -1 if there is no key
* less than the given key.
* @exception IndexOutOfBoundsException if the given key is not
* in the range [0, size-1].
*/
public short getPrevious(short key) {
if ((key < 0) || (key >= size)) {
throw new IndexOutOfBoundsException();
}
// find the leaf node for the given key.
int nodeIndex = key + power2;
// while we are not at the root, search upward until we find a left
// sibling that is "on".
while(nodeIndex != 1) {
// check if we are a right node.
int odd = nodeIndex % 2;
// if we are the right child see if my sibling on the left is "on".
// if so, then the previous key must be in that subtree.
if (odd == 1) {
if (isBitSet(nodeIndex-1)) {
nodeIndex--;
break;
}
}
nodeIndex = nodeIndex/2;
}
// If we went all the way to the root then there is no previous key, return -1.
if (nodeIndex == 1) {
return (short)-1;
}
// follow the rightmost trail of "on" bits to an "on" leaf bit. This bit
// represents the previous key in the set.
while (nodeIndex < power2) {
nodeIndex *= 2;
if (isBitSet(nodeIndex+1)) {
nodeIndex++;
}
}
return (short)(nodeIndex-power2);
}
/**
* Checks if the set is empty.
* @return true if the set is empty.
*/
public boolean isEmpty() {
return numKeys == 0;
}
/**
* Returns the first (lowest) key in the set.
*/
public short getFirst() {
// if the 0 key is in the set, then return it.
if(containsKey((short)0)) {
return (short)0;
}
// otherwise return the the next key after 0.
return getNext((short)0);
}
/**
* Returns the last (highest) key in the set.
*/
public short getLast() {
// if the highest possible key is in the set, return it.
if(containsKey((short)(size-1))) {
return (short)(size-1);
}
// otherwise return the next lowest key.
return getPrevious((short)(size-1));
}
/**
* Sets the nth bit on.
*/
private boolean setBit(int n) {
int intIndex = n >> 5;
int maskIndex = n & 0x1f;
int old = bits[intIndex];
return ((bits[intIndex] |= setMask[maskIndex]) != old);
}
/**
* Sets the nth bit to off.
*/
private boolean clearBit(int n) {
int intIndex = n >> 5;
int maskIndex = n & 0x1f;
int old = bits[intIndex];
return ((bits[intIndex] &= clearMask[maskIndex]) != old);
}
/**
* Tests if the the nth bit is on.
*/
private boolean isBitSet(int n) {
int intIndex = n >> 5;
int maskIndex = n & 0x1f;
return ((bits[intIndex] & setMask[maskIndex]) != 0);
}
}
| |
// Copyright 2007, 2008, 2010, 2011 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.corelib.components;
import org.apache.tapestry5.BindingConstants;
import org.apache.tapestry5.ComponentAction;
import org.apache.tapestry5.ComponentResources;
import org.apache.tapestry5.PropertyOverrides;
import org.apache.tapestry5.annotations.Environmental;
import org.apache.tapestry5.annotations.Parameter;
import org.apache.tapestry5.annotations.Property;
import org.apache.tapestry5.annotations.SupportsInformalParameters;
import org.apache.tapestry5.beaneditor.BeanModel;
import org.apache.tapestry5.internal.BeanValidationContext;
import org.apache.tapestry5.internal.BeanValidationContextImpl;
import org.apache.tapestry5.internal.beaneditor.BeanModelUtils;
import org.apache.tapestry5.ioc.annotations.Inject;
import org.apache.tapestry5.ioc.internal.util.TapestryException;
import org.apache.tapestry5.plastic.PlasticUtils;
import org.apache.tapestry5.services.BeanEditContext;
import org.apache.tapestry5.services.BeanModelSource;
import org.apache.tapestry5.services.Environment;
import org.apache.tapestry5.services.FormSupport;
import java.lang.annotation.Annotation;
/**
* A component that generates a user interface for editing the properties of a bean. This is the central component of
* the {@link BeanEditForm}, and utilizes a {@link PropertyEditor} for much of its functionality. This component places
* a {@link BeanEditContext} into the environment.
*
* @tapestrydoc
*/
@SupportsInformalParameters
public class BeanEditor
{
public static class Prepare implements ComponentAction<BeanEditor>
{
private static final long serialVersionUID = 6273600092955522585L;
public void execute(BeanEditor component)
{
component.doPrepare();
}
@Override
public String toString()
{
return "BeanEditor.Prepare";
}
}
static class CleanupEnvironment implements ComponentAction<BeanEditor>
{
private static final long serialVersionUID = 6867226962459227016L;
public void execute(BeanEditor component)
{
component.cleanupEnvironment();
}
@Override
public String toString()
{
return "BeanEditor.CleanupEnvironment";
}
}
private static final ComponentAction<BeanEditor> CLEANUP_ENVIRONMENT = new CleanupEnvironment();
/**
* The object to be edited by the BeanEditor. This will be read when the component renders and updated when the form
* for the component is submitted. Typically, the container will listen for a "prepare" event, in order to ensure
* that a non-null value is ready to be read or updated.
*/
@Parameter(autoconnect = true)
private Object object;
/**
* A comma-separated list of property names to be retained from the
* {@link org.apache.tapestry5.beaneditor.BeanModel} (only used
* when a default model is created automatically).
* Only these properties will be retained, and the properties will also be reordered. The names are
* case-insensitive.
*/
@Parameter(defaultPrefix = BindingConstants.LITERAL)
private String include;
/**
* A comma-separated list of property names to be removed from the {@link org.apache.tapestry5.beaneditor.BeanModel}
* (only used
* when a default model is created automatically).
* The names are case-insensitive.
*/
@Parameter(defaultPrefix = BindingConstants.LITERAL)
private String exclude;
/**
* A comma-separated list of property names indicating the order in which the properties should be presented. The
* names are case insensitive. Any properties not indicated in the list will be appended to the end of the display
* orde. Only used
* when a default model is created automatically.
*/
@Parameter(defaultPrefix = BindingConstants.LITERAL)
private String reorder;
/**
* A comma-separated list of property names to be added to the {@link org.apache.tapestry5.beaneditor.BeanModel}
* (only used
* when a default model is created automatically).
*/
@Parameter(defaultPrefix = BindingConstants.LITERAL)
private String add;
/**
* The model that identifies the parameters to be edited, their order, and every other aspect. If not specified, a
* default bean model will be created from the type of the object bound to the object parameter. The add, include,
* exclude and reorder
* parameters are <em>only</em> applied to a default model, not an explicitly provided one.
*/
@Parameter
@Property(write = false)
private BeanModel model;
/**
* Where to search for local overrides of property editing blocks as block parameters. Further, the container of the
* overrides is used as the source for overridden validation messages. This is normally the BeanEditor component
* itself, but when the component is used within a BeanEditForm, it will be the BeanEditForm's resources that will
* be searched.
*/
@Parameter(value = "this", allowNull = false)
@Property(write = false)
private PropertyOverrides overrides;
@Inject
private BeanModelSource modelSource;
@Inject
private ComponentResources resources;
@Inject
private Environment environment;
@Environmental
private FormSupport formSupport;
// Value that change with each change to the current property:
@Property
private String propertyName;
/**
* To support nested BeanEditors, we need to cache the object value inside {@link #doPrepare()}. See TAPESTRY-2460.
*/
private Object cachedObject;
// Needed for testing as well
public Object getObject()
{
return cachedObject;
}
void setupRender()
{
formSupport.storeAndExecute(this, new Prepare());
}
void cleanupRender()
{
formSupport.storeAndExecute(this, CLEANUP_ENVIRONMENT);
}
/**
* Used to initialize the model if necessary, to instantiate the object being edited if necessary, and to push the
* BeanEditContext into the environment.
*/
void doPrepare()
{
if (model == null)
{
Class type = resources.getBoundType("object");
model = modelSource.createEditModel(type, overrides.getOverrideMessages());
BeanModelUtils.modify(model, add, include, exclude, reorder);
}
// The only problem here is that if the bound property is backed by a persistent field, it
// is assigned (and stored to the session, and propagated around the cluster) first,
// before values are assigned.
if (object == null)
{
try
{
object = model.newInstance();
}
catch (Exception ex)
{
String message = String.format("Exception instantiating instance of %s (for component '%s'): %s",
PlasticUtils.toTypeName(model.getBeanType()), resources.getCompleteId(), ex);
throw new TapestryException(message, resources.getLocation(), ex);
}
// If 'object' parameter is bound to a null-value BeanValidationContext is empty.
// This prevents JSR-303 javascript validators to be rendered properly .
refreshBeanValidationContext();
}
BeanEditContext context = new BeanEditContext()
{
public Class<?> getBeanClass()
{
return model.getBeanType();
}
public <T extends Annotation> T getAnnotation(Class<T> type)
{
return getBeanClass().getAnnotation(type);
}
};
cachedObject = object;
environment.push(BeanEditContext.class, context);
}
void cleanupEnvironment()
{
environment.pop(BeanEditContext.class);
}
private void refreshBeanValidationContext()
{
if (environment.peek(BeanValidationContext.class) != null)
{
environment.pop(BeanValidationContext.class);
environment.push(BeanValidationContext.class, new BeanValidationContextImpl(object));
}
}
// For testing
void inject(ComponentResources resources, PropertyOverrides overrides, BeanModelSource source,
Environment environment)
{
this.resources = resources;
this.overrides = overrides;
this.environment = environment;
modelSource = source;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.state;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.streams.state.internals.InMemoryKeyValueStore;
import org.apache.kafka.streams.state.internals.InMemorySessionBytesStoreSupplier;
import org.apache.kafka.streams.state.internals.InMemoryWindowBytesStoreSupplier;
import org.apache.kafka.streams.state.internals.KeyValueStoreBuilder;
import org.apache.kafka.streams.state.internals.MemoryNavigableLRUCache;
import org.apache.kafka.streams.state.internals.RocksDbKeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.internals.RocksDbSessionBytesStoreSupplier;
import org.apache.kafka.streams.state.internals.RocksDbWindowBytesStoreSupplier;
import org.apache.kafka.streams.state.internals.SessionStoreBuilder;
import org.apache.kafka.streams.state.internals.TimestampedKeyValueStoreBuilder;
import org.apache.kafka.streams.state.internals.TimestampedWindowStoreBuilder;
import org.apache.kafka.streams.state.internals.WindowStoreBuilder;
import java.time.Duration;
import java.util.Objects;
import static org.apache.kafka.streams.internals.ApiUtils.prepareMillisCheckFailMsgPrefix;
import static org.apache.kafka.streams.internals.ApiUtils.validateMillisecondDuration;
/**
* Factory for creating state stores in Kafka Streams.
* <p>
* When using the high-level DSL, i.e., {@link org.apache.kafka.streams.StreamsBuilder StreamsBuilder}, users create
* {@link StoreSupplier}s that can be further customized via
* {@link org.apache.kafka.streams.kstream.Materialized Materialized}.
* For example, a topic read as {@link org.apache.kafka.streams.kstream.KTable KTable} can be materialized into an
* in-memory store with custom key/value serdes and caching disabled:
* <pre>{@code
* StreamsBuilder builder = new StreamsBuilder();
* KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore("queryable-store-name");
* KTable<Long,String> table = builder.table(
* "topicName",
* Materialized.<Long,String>as(storeSupplier)
* .withKeySerde(Serdes.Long())
* .withValueSerde(Serdes.String())
* .withCachingDisabled());
* }</pre>
* When using the Processor API, i.e., {@link org.apache.kafka.streams.Topology Topology}, users create
* {@link StoreBuilder}s that can be attached to {@link org.apache.kafka.streams.processor.api.Processor Processor}s.
* For example, you can create a {@link org.apache.kafka.streams.kstream.Windowed windowed} RocksDB store with custom
* changelog topic configuration like:
* <pre>{@code
* Topology topology = new Topology();
* topology.addProcessor("processorName", ...);
*
* Map<String,String> topicConfig = new HashMap<>();
* StoreBuilder<WindowStore<Integer, Long>> storeBuilder = Stores
* .windowStoreBuilder(
* Stores.persistentWindowStore("queryable-store-name", ...),
* Serdes.Integer(),
* Serdes.Long())
* .withLoggingEnabled(topicConfig);
*
* topology.addStateStore(storeBuilder, "processorName");
* }</pre>
*/
public final class Stores {
/**
* Create a persistent {@link KeyValueBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a {@link #keyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}.
* If you want to create a {@link TimestampedKeyValueStore} you should use
* {@link #persistentTimestampedKeyValueStore(String)} to create a store supplier instead.
*
* @param name name of the store (cannot be {@code null})
* @return an instance of a {@link KeyValueBytesStoreSupplier} that can be used
* to build a persistent key-value store
*/
public static KeyValueBytesStoreSupplier persistentKeyValueStore(final String name) {
Objects.requireNonNull(name, "name cannot be null");
return new RocksDbKeyValueBytesStoreSupplier(name, false);
}
/**
* Create a persistent {@link KeyValueBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a
* {@link #timestampedKeyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}.
* If you want to create a {@link KeyValueStore} you should use
* {@link #persistentKeyValueStore(String)} to create a store supplier instead.
*
* @param name name of the store (cannot be {@code null})
* @return an instance of a {@link KeyValueBytesStoreSupplier} that can be used
* to build a persistent key-(timestamp/value) store
*/
public static KeyValueBytesStoreSupplier persistentTimestampedKeyValueStore(final String name) {
Objects.requireNonNull(name, "name cannot be null");
return new RocksDbKeyValueBytesStoreSupplier(name, true);
}
/**
* Create an in-memory {@link KeyValueBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a {@link #keyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}
* or {@link #timestampedKeyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}.
*
* @param name name of the store (cannot be {@code null})
* @return an instance of a {@link KeyValueBytesStoreSupplier} than can be used to
* build an in-memory store
*/
public static KeyValueBytesStoreSupplier inMemoryKeyValueStore(final String name) {
Objects.requireNonNull(name, "name cannot be null");
return new KeyValueBytesStoreSupplier() {
@Override
public String name() {
return name;
}
@Override
public KeyValueStore<Bytes, byte[]> get() {
return new InMemoryKeyValueStore(name);
}
@Override
public String metricsScope() {
return "in-memory";
}
};
}
/**
* Create a LRU Map {@link KeyValueBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a {@link #keyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}
* or {@link #timestampedKeyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}.
*
* @param name name of the store (cannot be {@code null})
* @param maxCacheSize maximum number of items in the LRU (cannot be negative)
* @return an instance of a {@link KeyValueBytesStoreSupplier} that can be used to build
* an LRU Map based store
* @throws IllegalArgumentException if {@code maxCacheSize} is negative
*/
public static KeyValueBytesStoreSupplier lruMap(final String name, final int maxCacheSize) {
Objects.requireNonNull(name, "name cannot be null");
if (maxCacheSize < 0) {
throw new IllegalArgumentException("maxCacheSize cannot be negative");
}
return new KeyValueBytesStoreSupplier() {
@Override
public String name() {
return name;
}
@Override
public KeyValueStore<Bytes, byte[]> get() {
return new MemoryNavigableLRUCache(name, maxCacheSize);
}
@Override
public String metricsScope() {
return "in-memory-lru";
}
};
}
/**
* Create a persistent {@link WindowBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a {@link #windowStoreBuilder(WindowBytesStoreSupplier, Serde, Serde)}.
* If you want to create a {@link TimestampedWindowStore} you should use
* {@link #persistentTimestampedWindowStore(String, Duration, Duration, boolean)} to create a store supplier instead.
*
* @param name name of the store (cannot be {@code null})
* @param retentionPeriod length of time to retain data in the store (cannot be negative)
* (note that the retention period must be at least long enough to contain the
* windowed data's entire life cycle, from window-start through window-end,
* and for the entire grace period)
* @param windowSize size of the windows (cannot be negative)
* @param retainDuplicates whether or not to retain duplicates. Turning this on will automatically disable
* caching and means that null values will be ignored.
* @return an instance of {@link WindowBytesStoreSupplier}
* @throws IllegalArgumentException if {@code retentionPeriod} or {@code windowSize} can't be represented as {@code long milliseconds}
* @throws IllegalArgumentException if {@code retentionPeriod} is smaller than {@code windowSize}
*/
public static WindowBytesStoreSupplier persistentWindowStore(final String name,
final Duration retentionPeriod,
final Duration windowSize,
final boolean retainDuplicates) throws IllegalArgumentException {
return persistentWindowStore(name, retentionPeriod, windowSize, retainDuplicates, false);
}
/**
* Create a persistent {@link WindowBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a
* {@link #timestampedWindowStoreBuilder(WindowBytesStoreSupplier, Serde, Serde)}.
* If you want to create a {@link WindowStore} you should use
* {@link #persistentWindowStore(String, Duration, Duration, boolean)} to create a store supplier instead.
*
* @param name name of the store (cannot be {@code null})
* @param retentionPeriod length of time to retain data in the store (cannot be negative)
* (note that the retention period must be at least long enough to contain the
* windowed data's entire life cycle, from window-start through window-end,
* and for the entire grace period)
* @param windowSize size of the windows (cannot be negative)
* @param retainDuplicates whether or not to retain duplicates. Turning this on will automatically disable
* caching and means that null values will be ignored.
* @return an instance of {@link WindowBytesStoreSupplier}
* @throws IllegalArgumentException if {@code retentionPeriod} or {@code windowSize} can't be represented as {@code long milliseconds}
* @throws IllegalArgumentException if {@code retentionPeriod} is smaller than {@code windowSize}
*/
public static WindowBytesStoreSupplier persistentTimestampedWindowStore(final String name,
final Duration retentionPeriod,
final Duration windowSize,
final boolean retainDuplicates) throws IllegalArgumentException {
return persistentWindowStore(name, retentionPeriod, windowSize, retainDuplicates, true);
}
private static WindowBytesStoreSupplier persistentWindowStore(final String name,
final Duration retentionPeriod,
final Duration windowSize,
final boolean retainDuplicates,
final boolean timestampedStore) {
Objects.requireNonNull(name, "name cannot be null");
final String rpMsgPrefix = prepareMillisCheckFailMsgPrefix(retentionPeriod, "retentionPeriod");
final long retentionMs = validateMillisecondDuration(retentionPeriod, rpMsgPrefix);
final String wsMsgPrefix = prepareMillisCheckFailMsgPrefix(windowSize, "windowSize");
final long windowSizeMs = validateMillisecondDuration(windowSize, wsMsgPrefix);
final long defaultSegmentInterval = Math.max(retentionMs / 2, 60_000L);
return persistentWindowStore(name, retentionMs, windowSizeMs, retainDuplicates, defaultSegmentInterval, timestampedStore);
}
private static WindowBytesStoreSupplier persistentWindowStore(final String name,
final long retentionPeriod,
final long windowSize,
final boolean retainDuplicates,
final long segmentInterval,
final boolean timestampedStore) {
Objects.requireNonNull(name, "name cannot be null");
if (retentionPeriod < 0L) {
throw new IllegalArgumentException("retentionPeriod cannot be negative");
}
if (windowSize < 0L) {
throw new IllegalArgumentException("windowSize cannot be negative");
}
if (segmentInterval < 1L) {
throw new IllegalArgumentException("segmentInterval cannot be zero or negative");
}
if (windowSize > retentionPeriod) {
throw new IllegalArgumentException("The retention period of the window store "
+ name + " must be no smaller than its window size. Got size=["
+ windowSize + "], retention=[" + retentionPeriod + "]");
}
return new RocksDbWindowBytesStoreSupplier(
name,
retentionPeriod,
segmentInterval,
windowSize,
retainDuplicates,
timestampedStore);
}
/**
* Create an in-memory {@link WindowBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a {@link #windowStoreBuilder(WindowBytesStoreSupplier, Serde, Serde)} or
* {@link #timestampedWindowStoreBuilder(WindowBytesStoreSupplier, Serde, Serde)}.
*
* @param name name of the store (cannot be {@code null})
* @param retentionPeriod length of time to retain data in the store (cannot be negative)
* Note that the retention period must be at least long enough to contain the
* windowed data's entire life cycle, from window-start through window-end,
* and for the entire grace period.
* @param windowSize size of the windows (cannot be negative)
* @param retainDuplicates whether or not to retain duplicates. Turning this on will automatically disable
* caching and means that null values will be ignored.
* @return an instance of {@link WindowBytesStoreSupplier}
* @throws IllegalArgumentException if {@code retentionPeriod} or {@code windowSize} can't be represented as {@code long milliseconds}
* @throws IllegalArgumentException if {@code retentionPeriod} is smaller than {@code windowSize}
*/
public static WindowBytesStoreSupplier inMemoryWindowStore(final String name,
final Duration retentionPeriod,
final Duration windowSize,
final boolean retainDuplicates) throws IllegalArgumentException {
Objects.requireNonNull(name, "name cannot be null");
final String repartitionPeriodErrorMessagePrefix = prepareMillisCheckFailMsgPrefix(retentionPeriod, "retentionPeriod");
final long retentionMs = validateMillisecondDuration(retentionPeriod, repartitionPeriodErrorMessagePrefix);
if (retentionMs < 0L) {
throw new IllegalArgumentException("retentionPeriod cannot be negative");
}
final String windowSizeErrorMessagePrefix = prepareMillisCheckFailMsgPrefix(windowSize, "windowSize");
final long windowSizeMs = validateMillisecondDuration(windowSize, windowSizeErrorMessagePrefix);
if (windowSizeMs < 0L) {
throw new IllegalArgumentException("windowSize cannot be negative");
}
if (windowSizeMs > retentionMs) {
throw new IllegalArgumentException("The retention period of the window store "
+ name + " must be no smaller than its window size. Got size=["
+ windowSize + "], retention=[" + retentionPeriod + "]");
}
return new InMemoryWindowBytesStoreSupplier(name, retentionMs, windowSizeMs, retainDuplicates);
}
/**
* Create a persistent {@link SessionBytesStoreSupplier}.
*
* @param name name of the store (cannot be {@code null})
* @param retentionPeriod length of time to retain data in the store (cannot be negative)
* (note that the retention period must be at least as long enough to
* contain the inactivity gap of the session and the entire grace period.)
* @return an instance of a {@link SessionBytesStoreSupplier}
*/
public static SessionBytesStoreSupplier persistentSessionStore(final String name,
final Duration retentionPeriod) {
Objects.requireNonNull(name, "name cannot be null");
final String msgPrefix = prepareMillisCheckFailMsgPrefix(retentionPeriod, "retentionPeriod");
final long retentionPeriodMs = validateMillisecondDuration(retentionPeriod, msgPrefix);
if (retentionPeriodMs < 0) {
throw new IllegalArgumentException("retentionPeriod cannot be negative");
}
return new RocksDbSessionBytesStoreSupplier(name, retentionPeriodMs);
}
/**
* Create an in-memory {@link SessionBytesStoreSupplier}.
*
* @param name name of the store (cannot be {@code null})
* @param retentionPeriod length ot time to retain data in the store (cannot be negative)
* (note that the retention period must be at least as long enough to
* contain the inactivity gap of the session and the entire grace period.)
* @return an instance of a {@link SessionBytesStoreSupplier}
*/
public static SessionBytesStoreSupplier inMemorySessionStore(final String name, final Duration retentionPeriod) {
Objects.requireNonNull(name, "name cannot be null");
final String msgPrefix = prepareMillisCheckFailMsgPrefix(retentionPeriod, "retentionPeriod");
final long retentionPeriodMs = validateMillisecondDuration(retentionPeriod, msgPrefix);
if (retentionPeriodMs < 0) {
throw new IllegalArgumentException("retentionPeriod cannot be negative");
}
return new InMemorySessionBytesStoreSupplier(name, retentionPeriodMs);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link KeyValueStore}.
* <p>
* The provided supplier should <strong>not</strong> be a supplier for
* {@link TimestampedKeyValueStore TimestampedKeyValueStores}.
*
* @param supplier a {@link KeyValueBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as delete
* @param <K> key type
* @param <V> value type
* @return an instance of a {@link StoreBuilder} that can build a {@link KeyValueStore}
*/
public static <K, V> StoreBuilder<KeyValueStore<K, V>> keyValueStoreBuilder(final KeyValueBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new KeyValueStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link TimestampedKeyValueStore}.
* <p>
* The provided supplier should <strong>not</strong> be a supplier for
* {@link KeyValueStore KeyValueStores}. For this case, passed in timestamps will be dropped and not stored in the
* key-value-store. On read, no valid timestamp but a dummy timestamp will be returned.
*
* @param supplier a {@link KeyValueBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as delete
* @param <K> key type
* @param <V> value type
* @return an instance of a {@link StoreBuilder} that can build a {@link KeyValueStore}
*/
public static <K, V> StoreBuilder<TimestampedKeyValueStore<K, V>> timestampedKeyValueStoreBuilder(final KeyValueBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new TimestampedKeyValueStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link WindowStore}.
* <p>
* The provided supplier should <strong>not</strong> be a supplier for
* {@link TimestampedWindowStore TimestampedWindowStores}.
*
* @param supplier a {@link WindowBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as delete
* @param <K> key type
* @param <V> value type
* @return an instance of {@link StoreBuilder} than can build a {@link WindowStore}
*/
public static <K, V> StoreBuilder<WindowStore<K, V>> windowStoreBuilder(final WindowBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new WindowStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link TimestampedWindowStore}.
* <p>
* The provided supplier should <strong>not</strong> be a supplier for
* {@link WindowStore WindowStores}. For this case, passed in timestamps will be dropped and not stored in the
* window-store. On read, no valid timestamp but a dummy timestamp will be returned.
*
* @param supplier a {@link WindowBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as delete
* @param <K> key type
* @param <V> value type
* @return an instance of {@link StoreBuilder} that can build a {@link TimestampedWindowStore}
*/
public static <K, V> StoreBuilder<TimestampedWindowStore<K, V>> timestampedWindowStoreBuilder(final WindowBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new TimestampedWindowStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link SessionStore}.
*
* @param supplier a {@link SessionBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as delete
* @param <K> key type
* @param <V> value type
* @return an instance of {@link StoreBuilder} than can build a {@link SessionStore}
*/
public static <K, V> StoreBuilder<SessionStore<K, V>> sessionStoreBuilder(final SessionBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new SessionStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
}
| |
/*
* Copyright 2013 Ken Sedgwick
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.crypto;
import com.google.bitcoin.core.Sha256Hash;
import com.google.common.base.Joiner;
import org.spongycastle.util.encoders.Hex;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A MnemonicCode object may be used to convert between binary seed values and
* lists of words per <a href="https://en.bitcoin.it/wiki/BIP_0039">the BIP 39
* specification</a>
*/
public class MnemonicCode {
private ArrayList<String> wordList;
public static String BIP39_ENGLISH_SHA256 = "ad90bf3beb7b0eb7e5acd74727dc0da96e0a280a258354e7293fb7e211ac03db";
private static final int PBKDF2_ROUNDS = 4096;
public MnemonicCode() throws IOException {
this(MnemonicCode.class.getResourceAsStream("mnemonic/wordlist/english.txt"), BIP39_ENGLISH_SHA256);
}
/**
* Creates an MnemonicCode object, initializing with words read from the supplied input stream. If a wordListDigest
* is supplied the digest of the words will be checked.
*/
public MnemonicCode(InputStream wordstream, String wordListDigest) throws IOException, IllegalArgumentException {
BufferedReader br = new BufferedReader(new InputStreamReader(wordstream, "UTF-8"));
String word;
this.wordList = new ArrayList<String>();
MessageDigest md;
try {
md = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException ex) {
throw new RuntimeException(ex); // Can't happen.
}
while ((word = br.readLine()) != null) {
md.update(word.getBytes());
this.wordList.add(word);
}
br.close();
if (this.wordList.size() != 2048)
throw new IllegalArgumentException("input stream did not contain 2048 words");
// If a wordListDigest is supplied check to make sure it matches.
if (wordListDigest != null) {
byte[] digest = md.digest();
String hexdigest = new String(Hex.encode(digest));
if (!hexdigest.equals(wordListDigest))
throw new IllegalArgumentException("wordlist digest mismatch");
}
}
/**
* Convert mnemonic word list to seed.
*/
public static byte[] toSeed(List<String> words, String passphrase) {
// To create binary seed from mnemonic, we use PBKDF2 function
// with mnemonic sentence (in UTF-8) used as a password and
// string "mnemonic" + passphrase (again in UTF-8) used as a
// salt. Iteration count is set to 4096 and HMAC-SHA512 is
// used as a pseudo-random function. Desired length of the
// derived key is 512 bits (= 64 bytes).
//
String pass = Joiner.on(' ').join(words);
String salt = "mnemonic" + passphrase;
return PBKDF2SHA512.derive(pass, salt, PBKDF2_ROUNDS, 64);
}
/**
* Convert mnemonic word list to original entropy value.
*/
public byte[] toEntropy(List<String> words) throws MnemonicException.MnemonicLengthException, MnemonicException.MnemonicWordException, MnemonicException.MnemonicChecksumException {
if (words.size() % 3 > 0)
throw new MnemonicException.MnemonicLengthException("Word list size must be multiple of three words.");
// Look up all the words in the list and construct the
// concatenation of the original entropy and the checksum.
//
int concatLenBits = words.size() * 11;
boolean[] concatBits = new boolean[concatLenBits];
int wordindex = 0;
for (String word : words) {
// Find the words index in the wordlist.
int ndx = Collections.binarySearch(this.wordList, word);
if (ndx < 0)
throw new MnemonicException.MnemonicWordException(word);
// Set the next 11 bits to the value of the index.
for (int ii = 0; ii < 11; ++ii)
concatBits[(wordindex * 11) + ii] = (ndx & (1 << (10 - ii))) != 0;
++wordindex;
}
int checksumLengthBits = concatLenBits / 33;
int entropyLengthBits = concatLenBits - checksumLengthBits;
// Extract original entropy as bytes.
byte[] entropy = new byte[entropyLengthBits / 8];
for (int ii = 0; ii < entropy.length; ++ii)
for (int jj = 0; jj < 8; ++jj)
if (concatBits[(ii * 8) + jj])
entropy[ii] |= 1 << (7 - jj);
// Take the digest of the entropy.
byte[] hash = Sha256Hash.create(entropy).getBytes();
boolean[] hashBits = bytesToBits(hash);
// Check all the checksum bits.
for (int i = 0; i < checksumLengthBits; ++i)
if (concatBits[entropyLengthBits + i] != hashBits[i])
throw new MnemonicException.MnemonicChecksumException();
return entropy;
}
/**
* Convert entropy data to mnemonic word list.
*/
public List<String> toMnemonic(byte[] entropy) throws MnemonicException.MnemonicLengthException {
if (entropy.length % 4 > 0)
throw new MnemonicException.MnemonicLengthException("entropy length not multiple of 32 bits");
// We take initial entropy of ENT bits and compute its
// checksum by taking first ENT / 32 bits of its SHA256 hash.
byte[] hash = Sha256Hash.create(entropy).getBytes();
boolean[] hashBits = bytesToBits(hash);
boolean[] entropyBits = bytesToBits(entropy);
int checksumLengthBits = entropyBits.length / 32;
// We append these bits to the end of the initial entropy.
boolean[] concatBits = new boolean[entropyBits.length + checksumLengthBits];
System.arraycopy(entropyBits, 0, concatBits, 0, entropyBits.length);
System.arraycopy(hashBits, 0, concatBits, entropyBits.length, checksumLengthBits);
// Next we take these concatenated bits and split them into
// groups of 11 bits. Each group encodes number from 0-2047
// which is a position in a wordlist. We convert numbers into
// words and use joined words as mnemonic sentence.
ArrayList<String> words = new ArrayList<String>();
int nwords = concatBits.length / 11;
for (int i = 0; i < nwords; ++i) {
int index = 0;
for (int j = 0; j < 11; ++j) {
index <<= 1;
if (concatBits[(i * 11) + j])
index |= 0x1;
}
words.add(this.wordList.get(index));
}
return words;
}
/**
* Check to see if a mnemonic word list is valid.
*/
public void check(List<String> words) throws MnemonicException {
toEntropy(words);
}
private static boolean[] bytesToBits(byte[] data) {
boolean[] bits = new boolean[data.length * 8];
for (int i = 0; i < data.length; ++i)
for (int j = 0; j < 8; ++j)
bits[(i * 8) + j] = (data[i] & (1 << (7 - j))) != 0;
return bits;
}
}
| |
/**
* Copyright 2010-present Facebook.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.model;
import com.facebook.FacebookGraphObjectException;
import com.facebook.internal.Utility;
import com.facebook.internal.Validate;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.lang.reflect.*;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* GraphObject is the primary interface used by the Facebook SDK for Android to represent objects in the Facebook
* Social Graph and the Facebook Open Graph (OG). It is the base interface for all typed access to graph objects
* in the SDK. No concrete classes implement GraphObject or its derived interfaces. Rather, they are implemented as
* proxies (see the {@link com.facebook.model.GraphObject.Factory Factory} class) that provide strongly-typed property
* getters and setters to access the underlying data. Since the primary use case for graph objects is sending and
* receiving them over the wire to/from Facebook services, they are represented as JSONObjects. No validation is done
* that a graph object is actually of a specific type -- any graph object can be treated as any GraphObject-derived
* interface, and the presence or absence of specific properties determines its suitability for use as that
* particular type of object.
* <br/>
*/
public interface GraphObject {
/**
* Returns a new proxy that treats this graph object as a different GraphObject-derived type.
* @param graphObjectClass the type of GraphObject to return
* @return a new instance of the GraphObject-derived-type that references the same underlying data
*/
<T extends GraphObject> T cast(Class<T> graphObjectClass);
/**
* Returns a Java Collections map of names and properties. Modifying the returned map modifies the
* inner JSON representation.
* @return a Java Collections map representing the GraphObject state
*/
Map<String, Object> asMap();
/**
* Gets the underlying JSONObject representation of this graph object.
* @return the underlying JSONObject representation of this graph object
*/
JSONObject getInnerJSONObject();
/**
* Gets a property of the GraphObject
* @param propertyName the name of the property to get
* @return the value of the named property
*/
Object getProperty(String propertyName);
/**
* Gets a property of the GraphObject, cast to a particular GraphObject-derived interface. This gives some of
* the benefits of having a property getter defined to return a GraphObject-derived type without requiring
* explicit definition of an interface to define the getter.
* @param propertyName the name of the property to get
* @param graphObjectClass the GraphObject-derived interface to cast the property to
* @return
*/
<T extends GraphObject> T getPropertyAs(String propertyName, Class<T> graphObjectClass);
/**
* Gets a property of the GraphObject, cast to a a list of instances of a particular GraphObject-derived interface.
* This gives some of the benefits of having a property getter defined to return a GraphObject-derived type without
* requiring explicit definition of an interface to define the getter.
* @param propertyName the name of the property to get
* @param graphObjectClass the GraphObject-derived interface to cast the property to a list of
* @return
*/
<T extends GraphObject> GraphObjectList<T> getPropertyAsList(String propertyName, Class<T> graphObjectClass);
/**
* Sets a property of the GraphObject
* @param propertyName the name of the property to set
* @param propertyValue the value of the named property to set
*/
void setProperty(String propertyName, Object propertyValue);
/**
* Removes a property of the GraphObject
* @param propertyName the name of the property to remove
*/
void removeProperty(String propertyName);
/**
* Creates proxies that implement GraphObject, GraphObjectList, and their derived types. These proxies allow access
* to underlying collections and name/value property bags via strongly-typed property getters and setters.
* <p/>
* This supports get/set properties that use primitive types, JSON types, Date, other GraphObject types, Iterable,
* Collection, List, and GraphObjectList.
*/
final class Factory {
private static final HashSet<Class<?>> verifiedGraphObjectClasses = new HashSet<Class<?>>();
private static final SimpleDateFormat[] dateFormats = new SimpleDateFormat[] {
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ", Locale.US),
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US), new SimpleDateFormat("yyyy-MM-dd", Locale.US), };
// No objects of this type should exist.
private Factory() {
}
/**
* Creates a GraphObject proxy that provides typed access to the data in an underlying JSONObject.
* @param json the JSONObject containing the data to be exposed
* @return a GraphObject that represents the underlying data
*
* @throws com.facebook.FacebookException
* If the passed in Class is not a valid GraphObject interface
*/
public static GraphObject create(JSONObject json) {
return create(json, GraphObject.class);
}
/**
* Creates a GraphObject-derived proxy that provides typed access to the data in an underlying JSONObject.
* @param json the JSONObject containing the data to be exposed
* @param graphObjectClass the GraphObject-derived type to return
* @return a graphObjectClass that represents the underlying data
*
* @throws com.facebook.FacebookException
* If the passed in Class is not a valid GraphObject interface
*/
public static <T extends GraphObject> T create(JSONObject json, Class<T> graphObjectClass) {
return createGraphObjectProxy(graphObjectClass, json);
}
/**
* Creates a GraphObject proxy that initially contains no data.
* @return a GraphObject with no data
*
* @throws com.facebook.FacebookException
* If the passed in Class is not a valid GraphObject interface
*/
public static GraphObject create() {
return create(GraphObject.class);
}
/**
* Creates a GraphObject-derived proxy that initially contains no data.
* @param graphObjectClass the GraphObject-derived type to return
* @return a graphObjectClass with no data
*
* @throws com.facebook.FacebookException
* If the passed in Class is not a valid GraphObject interface
*/
public static <T extends GraphObject> T create(Class<T> graphObjectClass) {
return createGraphObjectProxy(graphObjectClass, new JSONObject());
}
/**
* Determines if two GraphObjects represent the same underlying graph object, based on their IDs.
* @param a a graph object
* @param b another graph object
* @return true if both graph objects have an ID and it is the same ID, false otherwise
*/
public static boolean hasSameId(GraphObject a, GraphObject b) {
if (a == null || b == null || !a.asMap().containsKey("id") || !b.asMap().containsKey("id")) {
return false;
}
if (a.equals(b)) {
return true;
}
Object idA = a.getProperty("id");
Object idB = b.getProperty("id");
if (idA == null || idB == null || !(idA instanceof String) || !(idB instanceof String)) {
return false;
}
return idA.equals(idB);
}
/**
* Creates a GraphObjectList-derived proxy that provides typed access to the data in an underlying JSONArray.
* @param array the JSONArray containing the data to be exposed
* @param graphObjectClass the GraphObject-derived type to return
* @return a graphObjectClass that represents the underlying data
*
* @throws com.facebook.FacebookException
* If the passed in Class is not a valid GraphObject interface
*/
public static <T> GraphObjectList<T> createList(JSONArray array, Class<T> graphObjectClass) {
return new GraphObjectListImpl<T>(array, graphObjectClass);
}
/**
* Creates a GraphObjectList-derived proxy that initially contains no data.
* @param graphObjectClass the GraphObject-derived type to return
* @return a GraphObjectList with no data
*
* @throws com.facebook.FacebookException
* If the passed in Class is not a valid GraphObject interface
*/
public static <T> GraphObjectList<T> createList(Class<T> graphObjectClass) {
return createList(new JSONArray(), graphObjectClass);
}
private static <T extends GraphObject> T createGraphObjectProxy(Class<T> graphObjectClass, JSONObject state) {
verifyCanProxyClass(graphObjectClass);
Class<?>[] interfaces = new Class<?>[] { graphObjectClass };
GraphObjectProxy graphObjectProxy = new GraphObjectProxy(state, graphObjectClass);
@SuppressWarnings("unchecked")
T graphObject = (T) Proxy.newProxyInstance(GraphObject.class.getClassLoader(), interfaces, graphObjectProxy);
return graphObject;
}
private static Map<String, Object> createGraphObjectProxyForMap(JSONObject state) {
Class<?>[] interfaces = new Class<?>[]{Map.class};
GraphObjectProxy graphObjectProxy = new GraphObjectProxy(state, Map.class);
@SuppressWarnings("unchecked")
Map<String, Object> graphObject = (Map<String, Object>) Proxy
.newProxyInstance(GraphObject.class.getClassLoader(), interfaces, graphObjectProxy);
return graphObject;
}
private static synchronized <T extends GraphObject> boolean hasClassBeenVerified(Class<T> graphObjectClass) {
return verifiedGraphObjectClasses.contains(graphObjectClass);
}
private static synchronized <T extends GraphObject> void recordClassHasBeenVerified(Class<T> graphObjectClass) {
verifiedGraphObjectClasses.add(graphObjectClass);
}
private static <T extends GraphObject> void verifyCanProxyClass(Class<T> graphObjectClass) {
if (hasClassBeenVerified(graphObjectClass)) {
return;
}
if (!graphObjectClass.isInterface()) {
throw new FacebookGraphObjectException("Factory can only wrap interfaces, not class: "
+ graphObjectClass.getName());
}
Method[] methods = graphObjectClass.getMethods();
for (Method method : methods) {
String methodName = method.getName();
int parameterCount = method.getParameterTypes().length;
Class<?> returnType = method.getReturnType();
boolean hasPropertyNameOverride = method.isAnnotationPresent(PropertyName.class);
if (method.getDeclaringClass().isAssignableFrom(GraphObject.class)) {
// Don't worry about any methods from GraphObject or one of its base classes.
continue;
} else if (parameterCount == 1 && returnType == Void.TYPE) {
if (hasPropertyNameOverride) {
// If a property override is present, it MUST be valid. We don't fallback
// to using the method name
if (!Utility.isNullOrEmpty(method.getAnnotation(PropertyName.class).value())) {
continue;
}
} else if (methodName.startsWith("set") && methodName.length() > 3) {
// Looks like a valid setter
continue;
}
} else if (parameterCount == 0 && returnType != Void.TYPE) {
if (hasPropertyNameOverride) {
// If a property override is present, it MUST be valid. We don't fallback
// to using the method name
if (!Utility.isNullOrEmpty(method.getAnnotation(PropertyName.class).value())) {
continue;
}
} else if (methodName.startsWith("get") && methodName.length() > 3) {
// Looks like a valid getter
continue;
}
}
throw new FacebookGraphObjectException("Factory can't proxy method: " + method.toString());
}
recordClassHasBeenVerified(graphObjectClass);
}
// If expectedType is a generic type, expectedTypeAsParameterizedType must be provided in order to determine
// generic parameter types.
static <U> U coerceValueToExpectedType(Object value, Class<U> expectedType,
ParameterizedType expectedTypeAsParameterizedType) {
if (value == null) {
if (boolean.class.equals(expectedType)) {
@SuppressWarnings("unchecked")
U result = (U) (Boolean) false;
return result;
} else if (char.class.equals(expectedType)) {
@SuppressWarnings("unchecked")
U result = (U) (Character) '\0';
return result;
} else if (expectedType.isPrimitive()) {
@SuppressWarnings("unchecked")
U result = (U) (Number) 0;
return result;
} else {
return null;
}
}
Class<?> valueType = value.getClass();
if (expectedType.isAssignableFrom(valueType)) {
@SuppressWarnings("unchecked")
U result = (U) value;
return result;
}
if (expectedType.isPrimitive()) {
// If the result is a primitive, let the runtime succeed or fail at unboxing it.
@SuppressWarnings("unchecked")
U result = (U) value;
return result;
}
if (GraphObject.class.isAssignableFrom(expectedType)) {
@SuppressWarnings("unchecked")
Class<? extends GraphObject> graphObjectClass = (Class<? extends GraphObject>) expectedType;
// We need a GraphObject, but we don't have one.
if (JSONObject.class.isAssignableFrom(valueType)) {
// We can wrap a JSONObject as a GraphObject.
@SuppressWarnings("unchecked")
U result = (U) createGraphObjectProxy(graphObjectClass, (JSONObject) value);
return result;
} else if (GraphObject.class.isAssignableFrom(valueType)) {
// We can cast a GraphObject-derived class to another GraphObject-derived class.
@SuppressWarnings("unchecked")
U result = (U) ((GraphObject) value).cast(graphObjectClass);
return result;
} else {
throw new FacebookGraphObjectException("Can't create GraphObject from " + valueType.getName());
}
} else if (Iterable.class.equals(expectedType) || Collection.class.equals(expectedType)
|| List.class.equals(expectedType) || GraphObjectList.class.equals(expectedType)) {
if (expectedTypeAsParameterizedType == null) {
throw new FacebookGraphObjectException("can't infer generic type of: " + expectedType.toString());
}
Type[] actualTypeArguments = expectedTypeAsParameterizedType.getActualTypeArguments();
if (actualTypeArguments == null || actualTypeArguments.length != 1
|| !(actualTypeArguments[0] instanceof Class<?>)) {
throw new FacebookGraphObjectException(
"Expect collection properties to be of a type with exactly one generic parameter.");
}
Class<?> collectionGenericArgument = (Class<?>) actualTypeArguments[0];
if (JSONArray.class.isAssignableFrom(valueType)) {
JSONArray jsonArray = (JSONArray) value;
@SuppressWarnings("unchecked")
U result = (U) createList(jsonArray, collectionGenericArgument);
return result;
} else {
throw new FacebookGraphObjectException("Can't create Collection from " + valueType.getName());
}
} else if (String.class.equals(expectedType)) {
if (Double.class.isAssignableFrom(valueType) ||
Float.class.isAssignableFrom(valueType)) {
@SuppressWarnings("unchecked")
U result = (U) String.format("%f", value);
return result;
} else if (Number.class.isAssignableFrom(valueType)) {
@SuppressWarnings("unchecked")
U result = (U) String.format("%d", value);
return result;
}
} else if (Date.class.equals(expectedType)) {
if (String.class.isAssignableFrom(valueType)) {
for (SimpleDateFormat format : dateFormats) {
try {
Date date = format.parse((String) value);
if (date != null) {
@SuppressWarnings("unchecked")
U result = (U) date;
return result;
}
} catch (ParseException e) {
// Keep going.
}
}
}
}
throw new FacebookGraphObjectException("Can't convert type" + valueType.getName() + " to "
+ expectedType.getName());
}
static String convertCamelCaseToLowercaseWithUnderscores(String string) {
string = string.replaceAll("([a-z])([A-Z])", "$1_$2");
return string.toLowerCase(Locale.US);
}
private static Object getUnderlyingJSONObject(Object obj) {
if (obj == null) {
return null;
}
Class<?> objClass = obj.getClass();
if (GraphObject.class.isAssignableFrom(objClass)) {
GraphObject graphObject = (GraphObject) obj;
return graphObject.getInnerJSONObject();
} else if (GraphObjectList.class.isAssignableFrom(objClass)) {
GraphObjectList<?> graphObjectList = (GraphObjectList<?>) obj;
return graphObjectList.getInnerJSONArray();
} else if (Iterable.class.isAssignableFrom(objClass)) {
JSONArray jsonArray = new JSONArray();
Iterable<?> iterable = (Iterable<?>) obj;
for (Object o : iterable ) {
if (GraphObject.class.isAssignableFrom(o.getClass())) {
jsonArray.put(((GraphObject)o).getInnerJSONObject());
} else {
jsonArray.put(o);
}
}
return jsonArray;
}
return obj;
}
private abstract static class ProxyBase<STATE> implements InvocationHandler {
// Pre-loaded Method objects for the methods in java.lang.Object
private static final String EQUALS_METHOD = "equals";
private static final String TOSTRING_METHOD = "toString";
protected final STATE state;
protected ProxyBase(STATE state) {
this.state = state;
}
// Declared to return Object just to simplify implementation of proxy helpers.
protected final Object throwUnexpectedMethodSignature(Method method) {
throw new FacebookGraphObjectException(getClass().getName() + " got an unexpected method signature: "
+ method.toString());
}
protected final Object proxyObjectMethods(Object proxy, Method method, Object[] args) throws Throwable {
String methodName = method.getName();
if (methodName.equals(EQUALS_METHOD)) {
Object other = args[0];
if (other == null) {
return false;
}
InvocationHandler handler = Proxy.getInvocationHandler(other);
if (!(handler instanceof GraphObjectProxy)) {
return false;
}
GraphObjectProxy otherProxy = (GraphObjectProxy) handler;
return this.state.equals(otherProxy.state);
} else if (methodName.equals(TOSTRING_METHOD)) {
return toString();
}
// For others, just defer to the implementation object.
return method.invoke(this.state, args);
}
}
private final static class GraphObjectProxy extends ProxyBase<JSONObject> {
private static final String CLEAR_METHOD = "clear";
private static final String CONTAINSKEY_METHOD = "containsKey";
private static final String CONTAINSVALUE_METHOD = "containsValue";
private static final String ENTRYSET_METHOD = "entrySet";
private static final String GET_METHOD = "get";
private static final String ISEMPTY_METHOD = "isEmpty";
private static final String KEYSET_METHOD = "keySet";
private static final String PUT_METHOD = "put";
private static final String PUTALL_METHOD = "putAll";
private static final String REMOVE_METHOD = "remove";
private static final String SIZE_METHOD = "size";
private static final String VALUES_METHOD = "values";
private static final String CAST_METHOD = "cast";
private static final String CASTTOMAP_METHOD = "asMap";
private static final String GETPROPERTY_METHOD = "getProperty";
private static final String GETPROPERTYAS_METHOD = "getPropertyAs";
private static final String GETPROPERTYASLIST_METHOD = "getPropertyAsList";
private static final String SETPROPERTY_METHOD = "setProperty";
private static final String REMOVEPROPERTY_METHOD = "removeProperty";
private static final String GETINNERJSONOBJECT_METHOD = "getInnerJSONObject";
private final Class<?> graphObjectClass;
public GraphObjectProxy(JSONObject state, Class<?> graphObjectClass) {
super(state);
this.graphObjectClass = graphObjectClass;
}
@Override
public String toString() {
return String.format("GraphObject{graphObjectClass=%s, state=%s}", graphObjectClass.getSimpleName(), state);
}
@Override
public final Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
Class<?> declaringClass = method.getDeclaringClass();
if (declaringClass == Object.class) {
return proxyObjectMethods(proxy, method, args);
} else if (declaringClass == Map.class) {
return proxyMapMethods(method, args);
} else if (declaringClass == GraphObject.class) {
return proxyGraphObjectMethods(proxy, method, args);
} else if (GraphObject.class.isAssignableFrom(declaringClass)) {
return proxyGraphObjectGettersAndSetters(method, args);
}
return throwUnexpectedMethodSignature(method);
}
private final Object proxyMapMethods(Method method, Object[] args) {
String methodName = method.getName();
if (methodName.equals(CLEAR_METHOD)) {
JsonUtil.jsonObjectClear(this.state);
return null;
} else if (methodName.equals(CONTAINSKEY_METHOD)) {
return this.state.has((String) args[0]);
} else if (methodName.equals(CONTAINSVALUE_METHOD)) {
return JsonUtil.jsonObjectContainsValue(this.state, args[0]);
} else if (methodName.equals(ENTRYSET_METHOD)) {
return JsonUtil.jsonObjectEntrySet(this.state);
} else if (methodName.equals(GET_METHOD)) {
return this.state.opt((String) args[0]);
} else if (methodName.equals(ISEMPTY_METHOD)) {
return this.state.length() == 0;
} else if (methodName.equals(KEYSET_METHOD)) {
return JsonUtil.jsonObjectKeySet(this.state);
} else if (methodName.equals(PUT_METHOD)) {
return setJSONProperty(args);
} else if (methodName.equals(PUTALL_METHOD)) {
Map<String, Object> map = null;
if (args[0] instanceof Map<?, ?>) {
@SuppressWarnings("unchecked")
Map<String, Object> castMap = (Map<String, Object>) args[0];
map = castMap;
} else if (args[0] instanceof GraphObject) {
map = ((GraphObject) args[0]).asMap();
} else {
return null;
}
JsonUtil.jsonObjectPutAll(this.state, map);
return null;
} else if (methodName.equals(REMOVE_METHOD)) {
this.state.remove((String) args[0]);
return null;
} else if (methodName.equals(SIZE_METHOD)) {
return this.state.length();
} else if (methodName.equals(VALUES_METHOD)) {
return JsonUtil.jsonObjectValues(this.state);
}
return throwUnexpectedMethodSignature(method);
}
private final Object proxyGraphObjectMethods(Object proxy, Method method, Object[] args) {
String methodName = method.getName();
if (methodName.equals(CAST_METHOD)) {
@SuppressWarnings("unchecked")
Class<? extends GraphObject> graphObjectClass = (Class<? extends GraphObject>) args[0];
if (graphObjectClass != null &&
graphObjectClass.isAssignableFrom(this.graphObjectClass)) {
return proxy;
}
return Factory.createGraphObjectProxy(graphObjectClass, this.state);
} else if (methodName.equals(GETINNERJSONOBJECT_METHOD)) {
InvocationHandler handler = Proxy.getInvocationHandler(proxy);
GraphObjectProxy otherProxy = (GraphObjectProxy) handler;
return otherProxy.state;
} else if (methodName.equals(CASTTOMAP_METHOD)) {
return Factory.createGraphObjectProxyForMap(this.state);
} else if (methodName.equals(GETPROPERTY_METHOD)) {
return state.opt((String) args[0]);
} else if (methodName.equals(GETPROPERTYAS_METHOD)) {
Object value = state.opt((String) args[0]);
Class<?> expectedType = (Class<?>) args[1];
return coerceValueToExpectedType(value, expectedType, null);
} else if (methodName.equals(GETPROPERTYASLIST_METHOD)) {
Object value = state.opt((String) args[0]);
final Class<?> expectedType = (Class<?>) args[1];
ParameterizedType parameterizedType = new ParameterizedType() {
@Override
public Type[] getActualTypeArguments() {
return new Type[]{ expectedType };
}
@Override
public Type getOwnerType() {
return null;
}
@Override
public Type getRawType() {
return GraphObjectList.class;
}
};
return coerceValueToExpectedType(value, GraphObjectList.class, parameterizedType);
} else if (methodName.equals(SETPROPERTY_METHOD)) {
return setJSONProperty(args);
} else if (methodName.equals(REMOVEPROPERTY_METHOD)) {
this.state.remove((String) args[0]);
return null;
}
return throwUnexpectedMethodSignature(method);
}
private Object createGraphObjectsFromParameters(CreateGraphObject createGraphObject, Object value) {
if (createGraphObject != null &&
!Utility.isNullOrEmpty(createGraphObject.value())) {
String propertyName = createGraphObject.value();
if (List.class.isAssignableFrom(value.getClass())) {
GraphObjectList<GraphObject> graphObjects = Factory.createList(GraphObject.class);
@SuppressWarnings("unchecked")
List<Object> values = (List<Object>)value;
for (Object obj : values) {
GraphObject graphObject = Factory.create();
graphObject.setProperty(propertyName, obj);
graphObjects.add(graphObject);
}
value = graphObjects;
} else {
GraphObject graphObject = Factory.create();
graphObject.setProperty(propertyName, value);
value = graphObject;
}
}
return value;
}
private final Object proxyGraphObjectGettersAndSetters(Method method, Object[] args) throws JSONException {
String methodName = method.getName();
int parameterCount = method.getParameterTypes().length;
PropertyName propertyNameOverride = method.getAnnotation(PropertyName.class);
String key = propertyNameOverride != null ? propertyNameOverride.value() :
convertCamelCaseToLowercaseWithUnderscores(methodName.substring(3));
// If it's a get or a set on a GraphObject-derived class, we can handle it.
if (parameterCount == 0) {
// Has to be a getter. ASSUMPTION: The GraphObject-derived class has been verified
Object value = this.state.opt(key);
Class<?> expectedType = method.getReturnType();
Type genericReturnType = method.getGenericReturnType();
ParameterizedType parameterizedReturnType = null;
if (genericReturnType instanceof ParameterizedType) {
parameterizedReturnType = (ParameterizedType) genericReturnType;
}
value = coerceValueToExpectedType(value, expectedType, parameterizedReturnType);
return value;
} else if (parameterCount == 1) {
// Has to be a setter. ASSUMPTION: The GraphObject-derived class has been verified
CreateGraphObject createGraphObjectAnnotation = method.getAnnotation(CreateGraphObject.class);
Object value = createGraphObjectsFromParameters(createGraphObjectAnnotation, args[0]);
// If this is a wrapped object, store the underlying JSONObject instead, in order to serialize
// correctly.
value = getUnderlyingJSONObject(value);
this.state.putOpt(key, value);
return null;
}
return throwUnexpectedMethodSignature(method);
}
private Object setJSONProperty(Object[] args) {
String name = (String) args[0];
Object property = args[1];
Object value = getUnderlyingJSONObject(property);
try {
state.putOpt(name, value);
} catch (JSONException e) {
throw new IllegalArgumentException(e);
}
return null;
}
}
private final static class GraphObjectListImpl<T> extends AbstractList<T> implements GraphObjectList<T> {
private final JSONArray state;
private final Class<?> itemType;
public GraphObjectListImpl(JSONArray state, Class<?> itemType) {
Validate.notNull(state, "state");
Validate.notNull(itemType, "itemType");
this.state = state;
this.itemType = itemType;
}
@Override
public String toString() {
return String.format("GraphObjectList{itemType=%s, state=%s}", itemType.getSimpleName(), state);
}
@Override
public void add(int location, T object) {
// We only support adding at the end of the list, due to JSONArray restrictions.
if (location < 0) {
throw new IndexOutOfBoundsException();
} else if (location < size()) {
throw new UnsupportedOperationException("Only adding items at the end of the list is supported.");
}
put(location, object);
}
@Override
public T set(int location, T object) {
checkIndex(location);
T result = get(location);
put(location, object);
return result;
}
@Override
public int hashCode() {
return state.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
} else if (this == obj) {
return true;
} else if (getClass() != obj.getClass()) {
return false;
}
@SuppressWarnings("unchecked")
GraphObjectListImpl<T> other = (GraphObjectListImpl<T>) obj;
return state.equals(other.state);
}
@SuppressWarnings("unchecked")
@Override
public T get(int location) {
checkIndex(location);
Object value = state.opt(location);
// Class<?> expectedType = method.getReturnType();
// Type genericType = method.getGenericReturnType();
T result = (T) coerceValueToExpectedType(value, itemType, null);
return result;
}
@Override
public int size() {
return state.length();
}
@Override
public final <U extends GraphObject> GraphObjectList<U> castToListOf(Class<U> graphObjectClass) {
if (GraphObject.class.isAssignableFrom(itemType)) {
if (graphObjectClass.isAssignableFrom(itemType)) {
@SuppressWarnings("unchecked")
GraphObjectList<U> result = (GraphObjectList<U>)this;
return result;
}
return createList(state, graphObjectClass);
} else {
throw new FacebookGraphObjectException("Can't cast GraphObjectCollection of non-GraphObject type "
+ itemType);
}
}
@Override
public final JSONArray getInnerJSONArray() {
return state;
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
private void checkIndex(int index) {
if (index < 0 || index >= state.length()) {
throw new IndexOutOfBoundsException();
}
}
private void put(int index, T obj) {
Object underlyingObject = getUnderlyingJSONObject(obj);
try {
state.put(index, underlyingObject);
} catch (JSONException e) {
throw new IllegalArgumentException(e);
}
}
}
}
}
| |
package us.kbase.kbasefeaturevalues;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
/**
* <p>Original spec-file type: SubmatrixStat</p>
* <pre>
* Data type for bulk queries. It provides various statistics calculated on sub-matrix. The sub-matrix is defined by a subset of rows and columns via parameters.
* Parameters will also define the required types of statics.
*
* mtx_descriptor - basic properties of the source matrix
* row_descriptors - descriptor for each row in a subset defined in the parameters
* column_descriptors - descriptor for each column in a subset defined in the parameters
* row_set_stats - basic statistics for a subset of rows calculated on a subset of columns
* column_set_stat - basic statistics for a subset of columns calculated on a subset of rows
* mtx_row_set_stat - basic statistics for a subset of rows calculated on ALL columns in the matrix (can be used as a backgound in comparison with row_set_stats)
* mtx_column_set_stat - basic statistics for a subset of columns calculated on ALL rows in the matrix (can be used as a backgound in comparison with column_set_stat)
* row_pairwise_correlation - pariwise perason correlation for a subset of rows (features)
* column_pairwise_correlation - pariwise perason correlation for a subset of columns (conditions)
* values - sub-matrix representing actual values for a given subset of rows and a subset of columns
* </pre>
*
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
@Generated("com.googlecode.jsonschema2pojo")
@JsonPropertyOrder({
"mtx_descriptor",
"row_descriptors",
"column_descriptors",
"row_set_stats",
"column_set_stat",
"mtx_row_set_stat",
"mtx_column_set_stat",
"row_pairwise_correlation",
"column_pairwise_correlation",
"values"
})
public class SubmatrixStat {
/**
* <p>Original spec-file type: MatrixDescriptor</p>
* <pre>
* ******************************************
* * data API: data transfer objects (DTOs) *
* *****************************************
* </pre>
*
*/
@JsonProperty("mtx_descriptor")
private MatrixDescriptor mtxDescriptor;
@JsonProperty("row_descriptors")
private List<us.kbase.kbasefeaturevalues.ItemDescriptor> rowDescriptors;
@JsonProperty("column_descriptors")
private List<us.kbase.kbasefeaturevalues.ItemDescriptor> columnDescriptors;
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("row_set_stats")
private ItemSetStat rowSetStats;
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("column_set_stat")
private ItemSetStat columnSetStat;
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("mtx_row_set_stat")
private ItemSetStat mtxRowSetStat;
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("mtx_column_set_stat")
private ItemSetStat mtxColumnSetStat;
/**
* <p>Original spec-file type: PairwiseComparison</p>
* <pre>
* To represent a pairwise comparison of several elements defined by 'indeces'.
* This data type can be used to model represent pairwise correlation of expression profiles for a set of genes.
* indeces - indeces of elements to be compared
* comparison_values - values representing a parituclar type of comparison between elements.
* Expected to be symmetric: comparison_values[i][j] = comparison_values[j][i].
* Diagonal values: comparison_values[i][i] = 0
*
* avgs - mean of comparison_values for each element
* mins - min of comparison_values for each element
* maxs - max of comparison_values for each element
* stds - std of comparison_values for each element
* </pre>
*
*/
@JsonProperty("row_pairwise_correlation")
private PairwiseComparison rowPairwiseCorrelation;
/**
* <p>Original spec-file type: PairwiseComparison</p>
* <pre>
* To represent a pairwise comparison of several elements defined by 'indeces'.
* This data type can be used to model represent pairwise correlation of expression profiles for a set of genes.
* indeces - indeces of elements to be compared
* comparison_values - values representing a parituclar type of comparison between elements.
* Expected to be symmetric: comparison_values[i][j] = comparison_values[j][i].
* Diagonal values: comparison_values[i][i] = 0
*
* avgs - mean of comparison_values for each element
* mins - min of comparison_values for each element
* maxs - max of comparison_values for each element
* stds - std of comparison_values for each element
* </pre>
*
*/
@JsonProperty("column_pairwise_correlation")
private PairwiseComparison columnPairwiseCorrelation;
@JsonProperty("values")
private List<List<Double>> values;
private Map<String, Object> additionalProperties = new HashMap<String, Object>();
/**
* <p>Original spec-file type: MatrixDescriptor</p>
* <pre>
* ******************************************
* * data API: data transfer objects (DTOs) *
* *****************************************
* </pre>
*
*/
@JsonProperty("mtx_descriptor")
public MatrixDescriptor getMtxDescriptor() {
return mtxDescriptor;
}
/**
* <p>Original spec-file type: MatrixDescriptor</p>
* <pre>
* ******************************************
* * data API: data transfer objects (DTOs) *
* *****************************************
* </pre>
*
*/
@JsonProperty("mtx_descriptor")
public void setMtxDescriptor(MatrixDescriptor mtxDescriptor) {
this.mtxDescriptor = mtxDescriptor;
}
public SubmatrixStat withMtxDescriptor(MatrixDescriptor mtxDescriptor) {
this.mtxDescriptor = mtxDescriptor;
return this;
}
@JsonProperty("row_descriptors")
public List<us.kbase.kbasefeaturevalues.ItemDescriptor> getRowDescriptors() {
return rowDescriptors;
}
@JsonProperty("row_descriptors")
public void setRowDescriptors(List<us.kbase.kbasefeaturevalues.ItemDescriptor> rowDescriptors) {
this.rowDescriptors = rowDescriptors;
}
public SubmatrixStat withRowDescriptors(List<us.kbase.kbasefeaturevalues.ItemDescriptor> rowDescriptors) {
this.rowDescriptors = rowDescriptors;
return this;
}
@JsonProperty("column_descriptors")
public List<us.kbase.kbasefeaturevalues.ItemDescriptor> getColumnDescriptors() {
return columnDescriptors;
}
@JsonProperty("column_descriptors")
public void setColumnDescriptors(List<us.kbase.kbasefeaturevalues.ItemDescriptor> columnDescriptors) {
this.columnDescriptors = columnDescriptors;
}
public SubmatrixStat withColumnDescriptors(List<us.kbase.kbasefeaturevalues.ItemDescriptor> columnDescriptors) {
this.columnDescriptors = columnDescriptors;
return this;
}
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("row_set_stats")
public ItemSetStat getRowSetStats() {
return rowSetStats;
}
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("row_set_stats")
public void setRowSetStats(ItemSetStat rowSetStats) {
this.rowSetStats = rowSetStats;
}
public SubmatrixStat withRowSetStats(ItemSetStat rowSetStats) {
this.rowSetStats = rowSetStats;
return this;
}
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("column_set_stat")
public ItemSetStat getColumnSetStat() {
return columnSetStat;
}
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("column_set_stat")
public void setColumnSetStat(ItemSetStat columnSetStat) {
this.columnSetStat = columnSetStat;
}
public SubmatrixStat withColumnSetStat(ItemSetStat columnSetStat) {
this.columnSetStat = columnSetStat;
return this;
}
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("mtx_row_set_stat")
public ItemSetStat getMtxRowSetStat() {
return mtxRowSetStat;
}
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("mtx_row_set_stat")
public void setMtxRowSetStat(ItemSetStat mtxRowSetStat) {
this.mtxRowSetStat = mtxRowSetStat;
}
public SubmatrixStat withMtxRowSetStat(ItemSetStat mtxRowSetStat) {
this.mtxRowSetStat = mtxRowSetStat;
return this;
}
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("mtx_column_set_stat")
public ItemSetStat getMtxColumnSetStat() {
return mtxColumnSetStat;
}
/**
* <p>Original spec-file type: ItemSetStat</p>
* <pre>
* Same as ItemStat, but for a set of Items. Actually it can be modeled as a list<ItemStat>, but this way we can optimize data transfer in two ways:
* 1. In parameters we can specify that we need a subset of properties, e.g. only "avgs".
* 2. No field names in json (avg, min, max, etc) for each element in the list
* indeces_for - indeces of items in a collection FOR which all statitics is collected
* indeces_on - indeces of items in the associated vector ON which the statistics is calculated
* size - number of elements defined by indeces_on (expected to be the same for all items defined by indeces_for)
* avgs - mean values for each item defined by indeces_for across all elements defined by indeces_on
* mins - min values for each item defined by indeces_for across all elements defined by indeces_on
* maxs - max values for each item defined by indeces_for across all elements defined by indeces_on
* stds - std values for each item defined by indeces_for across all elements defined by indeces_on
* missing_values - number of missing values for each item defined by indeces_for across all elements defined by indeces_on
* </pre>
*
*/
@JsonProperty("mtx_column_set_stat")
public void setMtxColumnSetStat(ItemSetStat mtxColumnSetStat) {
this.mtxColumnSetStat = mtxColumnSetStat;
}
public SubmatrixStat withMtxColumnSetStat(ItemSetStat mtxColumnSetStat) {
this.mtxColumnSetStat = mtxColumnSetStat;
return this;
}
/**
* <p>Original spec-file type: PairwiseComparison</p>
* <pre>
* To represent a pairwise comparison of several elements defined by 'indeces'.
* This data type can be used to model represent pairwise correlation of expression profiles for a set of genes.
* indeces - indeces of elements to be compared
* comparison_values - values representing a parituclar type of comparison between elements.
* Expected to be symmetric: comparison_values[i][j] = comparison_values[j][i].
* Diagonal values: comparison_values[i][i] = 0
*
* avgs - mean of comparison_values for each element
* mins - min of comparison_values for each element
* maxs - max of comparison_values for each element
* stds - std of comparison_values for each element
* </pre>
*
*/
@JsonProperty("row_pairwise_correlation")
public PairwiseComparison getRowPairwiseCorrelation() {
return rowPairwiseCorrelation;
}
/**
* <p>Original spec-file type: PairwiseComparison</p>
* <pre>
* To represent a pairwise comparison of several elements defined by 'indeces'.
* This data type can be used to model represent pairwise correlation of expression profiles for a set of genes.
* indeces - indeces of elements to be compared
* comparison_values - values representing a parituclar type of comparison between elements.
* Expected to be symmetric: comparison_values[i][j] = comparison_values[j][i].
* Diagonal values: comparison_values[i][i] = 0
*
* avgs - mean of comparison_values for each element
* mins - min of comparison_values for each element
* maxs - max of comparison_values for each element
* stds - std of comparison_values for each element
* </pre>
*
*/
@JsonProperty("row_pairwise_correlation")
public void setRowPairwiseCorrelation(PairwiseComparison rowPairwiseCorrelation) {
this.rowPairwiseCorrelation = rowPairwiseCorrelation;
}
public SubmatrixStat withRowPairwiseCorrelation(PairwiseComparison rowPairwiseCorrelation) {
this.rowPairwiseCorrelation = rowPairwiseCorrelation;
return this;
}
/**
* <p>Original spec-file type: PairwiseComparison</p>
* <pre>
* To represent a pairwise comparison of several elements defined by 'indeces'.
* This data type can be used to model represent pairwise correlation of expression profiles for a set of genes.
* indeces - indeces of elements to be compared
* comparison_values - values representing a parituclar type of comparison between elements.
* Expected to be symmetric: comparison_values[i][j] = comparison_values[j][i].
* Diagonal values: comparison_values[i][i] = 0
*
* avgs - mean of comparison_values for each element
* mins - min of comparison_values for each element
* maxs - max of comparison_values for each element
* stds - std of comparison_values for each element
* </pre>
*
*/
@JsonProperty("column_pairwise_correlation")
public PairwiseComparison getColumnPairwiseCorrelation() {
return columnPairwiseCorrelation;
}
/**
* <p>Original spec-file type: PairwiseComparison</p>
* <pre>
* To represent a pairwise comparison of several elements defined by 'indeces'.
* This data type can be used to model represent pairwise correlation of expression profiles for a set of genes.
* indeces - indeces of elements to be compared
* comparison_values - values representing a parituclar type of comparison between elements.
* Expected to be symmetric: comparison_values[i][j] = comparison_values[j][i].
* Diagonal values: comparison_values[i][i] = 0
*
* avgs - mean of comparison_values for each element
* mins - min of comparison_values for each element
* maxs - max of comparison_values for each element
* stds - std of comparison_values for each element
* </pre>
*
*/
@JsonProperty("column_pairwise_correlation")
public void setColumnPairwiseCorrelation(PairwiseComparison columnPairwiseCorrelation) {
this.columnPairwiseCorrelation = columnPairwiseCorrelation;
}
public SubmatrixStat withColumnPairwiseCorrelation(PairwiseComparison columnPairwiseCorrelation) {
this.columnPairwiseCorrelation = columnPairwiseCorrelation;
return this;
}
@JsonProperty("values")
public List<List<Double>> getValues() {
return values;
}
@JsonProperty("values")
public void setValues(List<List<Double>> values) {
this.values = values;
}
public SubmatrixStat withValues(List<List<Double>> values) {
this.values = values;
return this;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperties(String name, Object value) {
this.additionalProperties.put(name, value);
}
@Override
public String toString() {
return ((((((((((((((((((((((("SubmatrixStat"+" [mtxDescriptor=")+ mtxDescriptor)+", rowDescriptors=")+ rowDescriptors)+", columnDescriptors=")+ columnDescriptors)+", rowSetStats=")+ rowSetStats)+", columnSetStat=")+ columnSetStat)+", mtxRowSetStat=")+ mtxRowSetStat)+", mtxColumnSetStat=")+ mtxColumnSetStat)+", rowPairwiseCorrelation=")+ rowPairwiseCorrelation)+", columnPairwiseCorrelation=")+ columnPairwiseCorrelation)+", values=")+ values)+", additionalProperties=")+ additionalProperties)+"]");
}
}
| |
/**
* Copyright (c) 2000-2012 Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package com.liferay.wsrp.service.persistence;
import com.liferay.portal.kernel.bean.PortletBeanLocatorUtil;
import com.liferay.portal.kernel.dao.orm.DynamicQuery;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.util.OrderByComparator;
import com.liferay.portal.kernel.util.ReferenceRegistry;
import com.liferay.portal.service.ServiceContext;
import com.liferay.wsrp.model.WSRPConsumer;
import java.util.List;
/**
* The persistence utility for the w s r p consumer service. This utility wraps {@link WSRPConsumerPersistenceImpl} and provides direct access to the database for CRUD operations. This utility should only be used by the service layer, as it must operate within a transaction. Never access this utility in a JSP, controller, model, or other front-end class.
*
* <p>
* Caching information and settings can be found in <code>portal.properties</code>
* </p>
*
* @author Brian Wing Shun Chan
* @see WSRPConsumerPersistence
* @see WSRPConsumerPersistenceImpl
* @generated
*/
public class WSRPConsumerUtil {
/*
* NOTE FOR DEVELOPERS:
*
* Never modify this class directly. Modify <code>service.xml</code> and rerun ServiceBuilder to regenerate this class.
*/
/**
* @see com.liferay.portal.service.persistence.BasePersistence#clearCache()
*/
public static void clearCache() {
getPersistence().clearCache();
}
/**
* @see com.liferay.portal.service.persistence.BasePersistence#clearCache(com.liferay.portal.model.BaseModel)
*/
public static void clearCache(WSRPConsumer wsrpConsumer) {
getPersistence().clearCache(wsrpConsumer);
}
/**
* @see com.liferay.portal.service.persistence.BasePersistence#countWithDynamicQuery(DynamicQuery)
*/
public long countWithDynamicQuery(DynamicQuery dynamicQuery)
throws SystemException {
return getPersistence().countWithDynamicQuery(dynamicQuery);
}
/**
* @see com.liferay.portal.service.persistence.BasePersistence#findWithDynamicQuery(DynamicQuery)
*/
public static List<WSRPConsumer> findWithDynamicQuery(
DynamicQuery dynamicQuery) throws SystemException {
return getPersistence().findWithDynamicQuery(dynamicQuery);
}
/**
* @see com.liferay.portal.service.persistence.BasePersistence#findWithDynamicQuery(DynamicQuery, int, int)
*/
public static List<WSRPConsumer> findWithDynamicQuery(
DynamicQuery dynamicQuery, int start, int end)
throws SystemException {
return getPersistence().findWithDynamicQuery(dynamicQuery, start, end);
}
/**
* @see com.liferay.portal.service.persistence.BasePersistence#findWithDynamicQuery(DynamicQuery, int, int, OrderByComparator)
*/
public static List<WSRPConsumer> findWithDynamicQuery(
DynamicQuery dynamicQuery, int start, int end,
OrderByComparator orderByComparator) throws SystemException {
return getPersistence()
.findWithDynamicQuery(dynamicQuery, start, end,
orderByComparator);
}
/**
* @see com.liferay.portal.service.persistence.BasePersistence#update(com.liferay.portal.model.BaseModel, boolean)
*/
public static WSRPConsumer update(WSRPConsumer wsrpConsumer, boolean merge)
throws SystemException {
return getPersistence().update(wsrpConsumer, merge);
}
/**
* @see com.liferay.portal.service.persistence.BasePersistence#update(com.liferay.portal.model.BaseModel, boolean, ServiceContext)
*/
public static WSRPConsumer update(WSRPConsumer wsrpConsumer, boolean merge,
ServiceContext serviceContext) throws SystemException {
return getPersistence().update(wsrpConsumer, merge, serviceContext);
}
/**
* Caches the w s r p consumer in the entity cache if it is enabled.
*
* @param wsrpConsumer the w s r p consumer
*/
public static void cacheResult(
com.liferay.wsrp.model.WSRPConsumer wsrpConsumer) {
getPersistence().cacheResult(wsrpConsumer);
}
/**
* Caches the w s r p consumers in the entity cache if it is enabled.
*
* @param wsrpConsumers the w s r p consumers
*/
public static void cacheResult(
java.util.List<com.liferay.wsrp.model.WSRPConsumer> wsrpConsumers) {
getPersistence().cacheResult(wsrpConsumers);
}
/**
* Creates a new w s r p consumer with the primary key. Does not add the w s r p consumer to the database.
*
* @param wsrpConsumerId the primary key for the new w s r p consumer
* @return the new w s r p consumer
*/
public static com.liferay.wsrp.model.WSRPConsumer create(
long wsrpConsumerId) {
return getPersistence().create(wsrpConsumerId);
}
/**
* Removes the w s r p consumer with the primary key from the database. Also notifies the appropriate model listeners.
*
* @param wsrpConsumerId the primary key of the w s r p consumer
* @return the w s r p consumer that was removed
* @throws com.liferay.wsrp.NoSuchConsumerException if a w s r p consumer with the primary key could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer remove(
long wsrpConsumerId)
throws com.liferay.portal.kernel.exception.SystemException,
com.liferay.wsrp.NoSuchConsumerException {
return getPersistence().remove(wsrpConsumerId);
}
public static com.liferay.wsrp.model.WSRPConsumer updateImpl(
com.liferay.wsrp.model.WSRPConsumer wsrpConsumer, boolean merge)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().updateImpl(wsrpConsumer, merge);
}
/**
* Returns the w s r p consumer with the primary key or throws a {@link com.liferay.wsrp.NoSuchConsumerException} if it could not be found.
*
* @param wsrpConsumerId the primary key of the w s r p consumer
* @return the w s r p consumer
* @throws com.liferay.wsrp.NoSuchConsumerException if a w s r p consumer with the primary key could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer findByPrimaryKey(
long wsrpConsumerId)
throws com.liferay.portal.kernel.exception.SystemException,
com.liferay.wsrp.NoSuchConsumerException {
return getPersistence().findByPrimaryKey(wsrpConsumerId);
}
/**
* Returns the w s r p consumer with the primary key or returns <code>null</code> if it could not be found.
*
* @param wsrpConsumerId the primary key of the w s r p consumer
* @return the w s r p consumer, or <code>null</code> if a w s r p consumer with the primary key could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer fetchByPrimaryKey(
long wsrpConsumerId)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().fetchByPrimaryKey(wsrpConsumerId);
}
/**
* Returns all the w s r p consumers where uuid = ?.
*
* @param uuid the uuid
* @return the matching w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static java.util.List<com.liferay.wsrp.model.WSRPConsumer> findByUuid(
java.lang.String uuid)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().findByUuid(uuid);
}
/**
* Returns a range of all the w s r p consumers where uuid = ?.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param uuid the uuid
* @param start the lower bound of the range of w s r p consumers
* @param end the upper bound of the range of w s r p consumers (not inclusive)
* @return the range of matching w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static java.util.List<com.liferay.wsrp.model.WSRPConsumer> findByUuid(
java.lang.String uuid, int start, int end)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().findByUuid(uuid, start, end);
}
/**
* Returns an ordered range of all the w s r p consumers where uuid = ?.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param uuid the uuid
* @param start the lower bound of the range of w s r p consumers
* @param end the upper bound of the range of w s r p consumers (not inclusive)
* @param orderByComparator the comparator to order the results by (optionally <code>null</code>)
* @return the ordered range of matching w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static java.util.List<com.liferay.wsrp.model.WSRPConsumer> findByUuid(
java.lang.String uuid, int start, int end,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().findByUuid(uuid, start, end, orderByComparator);
}
/**
* Returns the first w s r p consumer in the ordered set where uuid = ?.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param uuid the uuid
* @param orderByComparator the comparator to order the set by (optionally <code>null</code>)
* @return the first matching w s r p consumer
* @throws com.liferay.wsrp.NoSuchConsumerException if a matching w s r p consumer could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer findByUuid_First(
java.lang.String uuid,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException,
com.liferay.wsrp.NoSuchConsumerException {
return getPersistence().findByUuid_First(uuid, orderByComparator);
}
/**
* Returns the last w s r p consumer in the ordered set where uuid = ?.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param uuid the uuid
* @param orderByComparator the comparator to order the set by (optionally <code>null</code>)
* @return the last matching w s r p consumer
* @throws com.liferay.wsrp.NoSuchConsumerException if a matching w s r p consumer could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer findByUuid_Last(
java.lang.String uuid,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException,
com.liferay.wsrp.NoSuchConsumerException {
return getPersistence().findByUuid_Last(uuid, orderByComparator);
}
/**
* Returns the w s r p consumers before and after the current w s r p consumer in the ordered set where uuid = ?.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param wsrpConsumerId the primary key of the current w s r p consumer
* @param uuid the uuid
* @param orderByComparator the comparator to order the set by (optionally <code>null</code>)
* @return the previous, current, and next w s r p consumer
* @throws com.liferay.wsrp.NoSuchConsumerException if a w s r p consumer with the primary key could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer[] findByUuid_PrevAndNext(
long wsrpConsumerId, java.lang.String uuid,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException,
com.liferay.wsrp.NoSuchConsumerException {
return getPersistence()
.findByUuid_PrevAndNext(wsrpConsumerId, uuid,
orderByComparator);
}
/**
* Returns all the w s r p consumers where companyId = ?.
*
* @param companyId the company ID
* @return the matching w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static java.util.List<com.liferay.wsrp.model.WSRPConsumer> findByCompanyId(
long companyId)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().findByCompanyId(companyId);
}
/**
* Returns a range of all the w s r p consumers where companyId = ?.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param companyId the company ID
* @param start the lower bound of the range of w s r p consumers
* @param end the upper bound of the range of w s r p consumers (not inclusive)
* @return the range of matching w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static java.util.List<com.liferay.wsrp.model.WSRPConsumer> findByCompanyId(
long companyId, int start, int end)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().findByCompanyId(companyId, start, end);
}
/**
* Returns an ordered range of all the w s r p consumers where companyId = ?.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param companyId the company ID
* @param start the lower bound of the range of w s r p consumers
* @param end the upper bound of the range of w s r p consumers (not inclusive)
* @param orderByComparator the comparator to order the results by (optionally <code>null</code>)
* @return the ordered range of matching w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static java.util.List<com.liferay.wsrp.model.WSRPConsumer> findByCompanyId(
long companyId, int start, int end,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence()
.findByCompanyId(companyId, start, end, orderByComparator);
}
/**
* Returns the first w s r p consumer in the ordered set where companyId = ?.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param companyId the company ID
* @param orderByComparator the comparator to order the set by (optionally <code>null</code>)
* @return the first matching w s r p consumer
* @throws com.liferay.wsrp.NoSuchConsumerException if a matching w s r p consumer could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer findByCompanyId_First(
long companyId,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException,
com.liferay.wsrp.NoSuchConsumerException {
return getPersistence()
.findByCompanyId_First(companyId, orderByComparator);
}
/**
* Returns the last w s r p consumer in the ordered set where companyId = ?.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param companyId the company ID
* @param orderByComparator the comparator to order the set by (optionally <code>null</code>)
* @return the last matching w s r p consumer
* @throws com.liferay.wsrp.NoSuchConsumerException if a matching w s r p consumer could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer findByCompanyId_Last(
long companyId,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException,
com.liferay.wsrp.NoSuchConsumerException {
return getPersistence()
.findByCompanyId_Last(companyId, orderByComparator);
}
/**
* Returns the w s r p consumers before and after the current w s r p consumer in the ordered set where companyId = ?.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param wsrpConsumerId the primary key of the current w s r p consumer
* @param companyId the company ID
* @param orderByComparator the comparator to order the set by (optionally <code>null</code>)
* @return the previous, current, and next w s r p consumer
* @throws com.liferay.wsrp.NoSuchConsumerException if a w s r p consumer with the primary key could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer[] findByCompanyId_PrevAndNext(
long wsrpConsumerId, long companyId,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException,
com.liferay.wsrp.NoSuchConsumerException {
return getPersistence()
.findByCompanyId_PrevAndNext(wsrpConsumerId, companyId,
orderByComparator);
}
/**
* Returns the w s r p consumer where url = ? or throws a {@link com.liferay.wsrp.NoSuchConsumerException} if it could not be found.
*
* @param url the url
* @return the matching w s r p consumer
* @throws com.liferay.wsrp.NoSuchConsumerException if a matching w s r p consumer could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer findByConsumerURL(
java.lang.String url)
throws com.liferay.portal.kernel.exception.SystemException,
com.liferay.wsrp.NoSuchConsumerException {
return getPersistence().findByConsumerURL(url);
}
/**
* Returns the w s r p consumer where url = ? or returns <code>null</code> if it could not be found. Uses the finder cache.
*
* @param url the url
* @return the matching w s r p consumer, or <code>null</code> if a matching w s r p consumer could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer fetchByConsumerURL(
java.lang.String url)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().fetchByConsumerURL(url);
}
/**
* Returns the w s r p consumer where url = ? or returns <code>null</code> if it could not be found, optionally using the finder cache.
*
* @param url the url
* @param retrieveFromCache whether to use the finder cache
* @return the matching w s r p consumer, or <code>null</code> if a matching w s r p consumer could not be found
* @throws SystemException if a system exception occurred
*/
public static com.liferay.wsrp.model.WSRPConsumer fetchByConsumerURL(
java.lang.String url, boolean retrieveFromCache)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().fetchByConsumerURL(url, retrieveFromCache);
}
/**
* Returns all the w s r p consumers.
*
* @return the w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static java.util.List<com.liferay.wsrp.model.WSRPConsumer> findAll()
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().findAll();
}
/**
* Returns a range of all the w s r p consumers.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param start the lower bound of the range of w s r p consumers
* @param end the upper bound of the range of w s r p consumers (not inclusive)
* @return the range of w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static java.util.List<com.liferay.wsrp.model.WSRPConsumer> findAll(
int start, int end)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().findAll(start, end);
}
/**
* Returns an ordered range of all the w s r p consumers.
*
* <p>
* Useful when paginating results. Returns a maximum of <code>end - start</code> instances. <code>start</code> and <code>end</code> are not primary keys, they are indexes in the result set. Thus, <code>0</code> refers to the first result in the set. Setting both <code>start</code> and <code>end</code> to {@link com.liferay.portal.kernel.dao.orm.QueryUtil#ALL_POS} will return the full result set.
* </p>
*
* @param start the lower bound of the range of w s r p consumers
* @param end the upper bound of the range of w s r p consumers (not inclusive)
* @param orderByComparator the comparator to order the results by (optionally <code>null</code>)
* @return the ordered range of w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static java.util.List<com.liferay.wsrp.model.WSRPConsumer> findAll(
int start, int end,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().findAll(start, end, orderByComparator);
}
/**
* Removes all the w s r p consumers where uuid = ? from the database.
*
* @param uuid the uuid
* @throws SystemException if a system exception occurred
*/
public static void removeByUuid(java.lang.String uuid)
throws com.liferay.portal.kernel.exception.SystemException {
getPersistence().removeByUuid(uuid);
}
/**
* Removes all the w s r p consumers where companyId = ? from the database.
*
* @param companyId the company ID
* @throws SystemException if a system exception occurred
*/
public static void removeByCompanyId(long companyId)
throws com.liferay.portal.kernel.exception.SystemException {
getPersistence().removeByCompanyId(companyId);
}
/**
* Removes the w s r p consumer where url = ? from the database.
*
* @param url the url
* @throws SystemException if a system exception occurred
*/
public static void removeByConsumerURL(java.lang.String url)
throws com.liferay.portal.kernel.exception.SystemException,
com.liferay.wsrp.NoSuchConsumerException {
getPersistence().removeByConsumerURL(url);
}
/**
* Removes all the w s r p consumers from the database.
*
* @throws SystemException if a system exception occurred
*/
public static void removeAll()
throws com.liferay.portal.kernel.exception.SystemException {
getPersistence().removeAll();
}
/**
* Returns the number of w s r p consumers where uuid = ?.
*
* @param uuid the uuid
* @return the number of matching w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static int countByUuid(java.lang.String uuid)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().countByUuid(uuid);
}
/**
* Returns the number of w s r p consumers where companyId = ?.
*
* @param companyId the company ID
* @return the number of matching w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static int countByCompanyId(long companyId)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().countByCompanyId(companyId);
}
/**
* Returns the number of w s r p consumers where url = ?.
*
* @param url the url
* @return the number of matching w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static int countByConsumerURL(java.lang.String url)
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().countByConsumerURL(url);
}
/**
* Returns the number of w s r p consumers.
*
* @return the number of w s r p consumers
* @throws SystemException if a system exception occurred
*/
public static int countAll()
throws com.liferay.portal.kernel.exception.SystemException {
return getPersistence().countAll();
}
public static WSRPConsumerPersistence getPersistence() {
if (_persistence == null) {
_persistence = (WSRPConsumerPersistence)PortletBeanLocatorUtil.locate(com.liferay.wsrp.service.ClpSerializer.getServletContextName(),
WSRPConsumerPersistence.class.getName());
ReferenceRegistry.registerReference(WSRPConsumerUtil.class,
"_persistence");
}
return _persistence;
}
public void setPersistence(WSRPConsumerPersistence persistence) {
_persistence = persistence;
ReferenceRegistry.registerReference(WSRPConsumerUtil.class,
"_persistence");
}
private static WSRPConsumerPersistence _persistence;
}
| |
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.android_webview.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import android.annotation.SuppressLint;
import android.content.res.Configuration;
import android.os.Build;
import androidx.test.filters.SmallTest;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.android_webview.AwContents;
import org.chromium.android_webview.AwDarkMode;
import org.chromium.android_webview.AwSettings;
import org.chromium.android_webview.DarkModeHelper;
import org.chromium.android_webview.test.AwActivityTestRule.TestDependencyFactory;
import org.chromium.base.test.util.CallbackHelper;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.Feature;
import org.chromium.base.test.util.MinAndroidSdkLevel;
import org.chromium.content_public.browser.test.util.TestThreadUtils;
import org.chromium.net.test.util.TestWebServer;
import java.util.concurrent.Callable;
/**
* The integration test for the dark mode.
*/
@RunWith(AwJUnit4ClassRunner.class)
@MinAndroidSdkLevel(Build.VERSION_CODES.P)
@SuppressLint("NewApi")
public class AwDarkModeTest {
private static final String FILE = "/main.html";
private static final String DATA =
"<html><head><meta name=\"color-scheme\" content=\"dark light\"></head>"
+ "<body>DarkMode</body></html>";
@Rule
public AwActivityTestRule mRule = new AwActivityTestRule();
private TestWebServer mWebServer;
private AwTestContainerView mTestContainerView;
private TestAwContentsClient mContentsClient;
private CallbackHelper mCallbackHelper = new CallbackHelper();
private AwContents mAwContents;
@Before
public void setUp() throws Exception {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_FALSE);
mWebServer = TestWebServer.start();
mContentsClient = new TestAwContentsClient();
mTestContainerView = mRule.createAwTestContainerViewOnMainSync(
mContentsClient, false, new TestDependencyFactory());
mAwContents = mTestContainerView.getAwContents();
AwActivityTestRule.enableJavaScriptOnUiThread(mAwContents);
}
@After
public void tearDown() {
mWebServer.shutdown();
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testLightThemeUndefined() throws Throwable {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_UNDEFINED);
final String url = mWebServer.setResponse(FILE, DATA, null);
loadUrlSync(url);
assertEquals("false", getPrefersColorSchemeDark());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testLightThemeTrue() throws Throwable {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_TRUE);
final String url = mWebServer.setResponse(FILE, DATA, null);
loadUrlSync(url);
assertEquals("false", getPrefersColorSchemeDark());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
@CommandLineFlags.Add({"disable-features=WebViewForceDarkModeMatchTheme"})
public void testLightThemeFalseWithMatchThemeDisabled() throws Throwable {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_FALSE);
final String url = mWebServer.setResponse(FILE, DATA, null);
loadUrlSync(url);
assertEquals("true", getPrefersColorSchemeDark());
assertFalse(isForceDarkening());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
@CommandLineFlags.Add({"enable-features=WebViewForceDarkModeMatchTheme"})
public void testLightThemeFalse() throws Throwable {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_FALSE);
final String url = mWebServer.setResponse(FILE, DATA, null);
loadUrlSync(url);
assertEquals("true", getPrefersColorSchemeDark());
assertTrue(isForceDarkening());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testConfigurationChanged() throws Throwable {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_TRUE);
final String url = mWebServer.setResponse(FILE, DATA, null);
loadUrlSync(url);
assertEquals("false", getPrefersColorSchemeDark());
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_FALSE);
Configuration newConfig = new Configuration();
newConfig.uiMode = Configuration.UI_MODE_NIGHT_YES;
TestThreadUtils.runOnUiThreadBlocking(() -> mAwContents.onConfigurationChanged(newConfig));
loadUrlSync(url);
assertEquals("true", getPrefersColorSchemeDark());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testAlgorithmicDarkeningAllowedOnAndroidT() throws Throwable {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_FALSE);
AwDarkMode.enableSimplifiedDarkMode();
// Check setForceDarkMode has noops, otherwise ForceDarkening will be turned off.
mAwContents.getSettings().setForceDarkMode(AwSettings.FORCE_DARK_OFF);
mAwContents.getSettings().setAlgorithmicDarkeningAllowed(true);
// Set force dark mode again to check no ordering issue.
mAwContents.getSettings().setForceDarkMode(AwSettings.FORCE_DARK_OFF);
final String url = mWebServer.setResponse(FILE, DATA, null);
loadUrlSync(url);
assertEquals("true", getPrefersColorSchemeDark());
assertTrue(isForceDarkening());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testAlgorithmicDarkeningAllowedWithLightThemeOnAndroidT() throws Throwable {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_TRUE);
AwDarkMode.enableSimplifiedDarkMode();
// Check setForceDarkMode has noops, otherwise ForceDarkening will be turned off.
mAwContents.getSettings().setForceDarkMode(AwSettings.FORCE_DARK_OFF);
mAwContents.getSettings().setAlgorithmicDarkeningAllowed(true);
// Set force dark mode again to check no ordering issue.
mAwContents.getSettings().setForceDarkMode(AwSettings.FORCE_DARK_OFF);
final String url = mWebServer.setResponse(FILE, DATA, null);
loadUrlSync(url);
// Verify that prefers-color-scheme matches the theme.
assertEquals("false", getPrefersColorSchemeDark());
// Algorithmic darkening isn't enabled because app's light theme.
assertFalse(isForceDarkening());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testAlgorithmicDarkeningDisallowedByDefaultOnAndroidT() throws Throwable {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_FALSE);
AwDarkMode.enableSimplifiedDarkMode();
// Check setForceDarkMode has noops, otherwise ForceDarkening will be turned on.
mAwContents.getSettings().setForceDarkMode(AwSettings.FORCE_DARK_ON);
final String url = mWebServer.setResponse(FILE, DATA, null);
loadUrlSync(url);
assertEquals("true", getPrefersColorSchemeDark());
assertFalse(isForceDarkening());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testPrefersColorSchemeDarkOnAndroidT() throws Throwable {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_FALSE);
AwDarkMode.enableSimplifiedDarkMode();
// Check setForceDarkMode has noops, otherwise, prefers-color-scheme will be set to light.
mAwContents.getSettings().setForceDarkMode(AwSettings.FORCE_DARK_OFF);
final String url = mWebServer.setResponse(FILE, DATA, null);
loadUrlSync(url);
// Verify prefers-color-scheme matches isLightTheme.
assertEquals("true", getPrefersColorSchemeDark());
assertFalse(isForceDarkening());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testPrefersColorSchemeLightOnAndroidT() throws Throwable {
DarkModeHelper.setsLightThemeForTesting(DarkModeHelper.LightTheme.LIGHT_THEME_TRUE);
AwDarkMode.enableSimplifiedDarkMode();
// Check setForceDarkMode has noops, otherwise, prefers-color-scheme will be set to dark.
mAwContents.getSettings().setForceDarkMode(AwSettings.FORCE_DARK_OFF);
final String url = mWebServer.setResponse(FILE, DATA, null);
loadUrlSync(url);
// Verify prefers-color-scheme matches isLightTheme.
assertEquals("false", getPrefersColorSchemeDark());
assertFalse(isForceDarkening());
}
private void loadUrlSync(String url) throws Exception {
CallbackHelper done = mContentsClient.getOnPageCommitVisibleHelper();
int callCount = done.getCallCount();
mRule.loadUrlSync(
mTestContainerView.getAwContents(), mContentsClient.getOnPageFinishedHelper(), url);
done.waitForCallback(callCount);
}
private String executeJavaScriptAndWaitForResult(String code) throws Throwable {
return mRule.executeJavaScriptAndWaitForResult(
mTestContainerView.getAwContents(), mContentsClient, code);
}
private String getPrefersColorSchemeDark() throws Throwable {
return executeJavaScriptAndWaitForResult(
"window.matchMedia && window.matchMedia('(prefers-color-scheme: dark)').matches");
}
private boolean isForceDarkening() throws Throwable {
return TestThreadUtils.runOnUiThreadBlocking(new Callable<Boolean>() {
@Override
public Boolean call() {
return mAwContents.getSettings().isDarkMode();
}
});
}
}
| |
package se.l4.dust.core.template.html;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import com.google.common.base.Charsets;
import se.l4.dust.api.template.TemplateOutputStream;
import se.l4.dust.api.template.dom.AttributeImpl;
/**
* Template output that will output its contents as HTML.
*
* @author Andreas Holstenson
*
*/
public class HtmlTemplateOutput
implements TemplateOutputStream
{
protected final Writer writer;
protected boolean inComment;
protected boolean written;
private final List<Boolean> preserveWhitespace;
protected boolean currentPreserveWhitespace;
protected boolean lastWhitespace;
public HtmlTemplateOutput(OutputStream stream)
{
this(new OutputStreamWriter(stream, Charsets.UTF_8));
}
public HtmlTemplateOutput(Writer writer)
{
this.writer = writer;
preserveWhitespace = new ArrayList<>(20);
lastWhitespace = false;
}
private void escape(String in)
throws IOException
{
for(int i=0, n=in.length(); i<n; i++)
{
escape(in.charAt(i));
}
}
private void escape(char c)
throws IOException
{
switch(c)
{
case '<':
writer.write("<");
break;
case '>':
writer.write(">");
break;
case '&':
writer.write("&");
break;
case '"':
writer.write(""");
break;
default:
writer.write(c);
}
}
private void escapeForce(char c)
throws IOException
{
writer.write("&#");
writer.write(Integer.toString(c, 10));
writer.write(';');
}
@Override
public void startElement(String name, String[] attributes)
throws IOException
{
lastWhitespace = false;
written = true;
writer.write('<');
writer.write(name);
if(attributes.length > 0)
{
for(int i=0, n=attributes.length; i<n; i+=2)
{
String k = attributes[i];
String v = attributes[i+1];
if(k == null)
{
break;
}
if(v == AttributeImpl.ATTR_EMIT)
{
writer.write(' ');
writer.write(k);
}
else if(v != AttributeImpl.ATTR_SKIP)
{
writer.write(' ');
writer.write(k);
if(v != null)
{
writer.write("=\"");
escape(v);
writer.write("\"");
}
}
}
}
if("textarea".equals(name) || "pre".equals(name))
{
currentPreserveWhitespace = Boolean.TRUE;
}
preserveWhitespace.add(currentPreserveWhitespace);
writer.write('>');
}
@Override
public void endElement(String name)
throws IOException
{
lastWhitespace = false;
writer.write("</");
writer.write(name);
writer.write('>');
preserveWhitespace.remove(preserveWhitespace.size() - 1);
if(! preserveWhitespace.isEmpty())
{
currentPreserveWhitespace = preserveWhitespace.get(preserveWhitespace.size() - 1);
}
}
@Override
public void element(String name, String[] attributes)
throws IOException
{
lastWhitespace = false;
written = true;
writer.write('<');
writer.write(name);
if(attributes.length > 0)
{
for(int i=0, n=attributes.length; i<n; i+=2)
{
String k = attributes[i];
String v = attributes[i+1];
if(k == null)
{
break;
}
if(v == AttributeImpl.ATTR_EMIT)
{
writer.write(' ');
writer.write(k);
}
else if(v != AttributeImpl.ATTR_SKIP)
{
writer.write(' ');
writer.write(k);
if(v != null)
{
writer.write("=\"");
escape(v);
writer.write("\"");
}
}
}
}
writer.write('>');
}
@Override
public void startComment()
throws IOException
{
inComment = true;
writer.write("<!--");
}
@Override
public void endComment()
throws IOException
{
lastWhitespace = false;
inComment = false;
writer.write("-->");
}
@Override
public void text(String text)
throws IOException
{
if(text == null)
{
writer.write("null");
return;
}
if(inComment)
{
for(int i=0, n=text.length(); i<n; i++)
{
char c = text.charAt(i);
if(c == '-' && i < n - 1 && text.charAt(i+1) == '-')
{
// Escape this character
escapeForce(c);
}
else
{
writer.write(text.charAt(i));
}
}
}
else if(currentPreserveWhitespace)
{
for(int i=0, n=text.length(); i<n; i++)
{
escape(text.charAt(i));
}
}
else
{
for(int i=0, n=text.length(); i<n; i++)
{
char c = text.charAt(i);
boolean whitespace = Character.isWhitespace(c);
if(whitespace)
{
if(lastWhitespace) continue;
writer.write(' ');
}
else
{
escape(c);
}
lastWhitespace = whitespace;
}
}
}
@Override
public void raw(String text)
throws IOException
{
writer.write(text);
}
@Override
public void docType(String name, String publicId, String systemId)
throws IOException
{
// Do not output if illegal
if(written) return;
writer.write("<!DOCTYPE ");
writer.write(name);
if(publicId != null)
{
writer.write(" PUBLIC \"");
writer.write(publicId);
writer.write('"');
}
if(systemId != null)
{
if(publicId == null) writer.write(" SYSTEM");
writer.write(" \"");
writer.write(systemId);
writer.write('"');
}
writer.write('>');
written = true;
}
@Override
public void close()
throws IOException
{
//writer.flush();
writer.close();
}
}
| |
package net.ros.common.tile.machine;
import lombok.Getter;
import lombok.Setter;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.ItemStackHelper;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.NonNullList;
import net.minecraft.util.math.BlockPos;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.FluidTank;
import net.minecraftforge.fluids.IFluidBlock;
import net.minecraftforge.fluids.IFluidTank;
import net.minecraftforge.fluids.capability.IFluidHandler;
import net.ros.common.ROSConstants;
import net.ros.common.block.BlockVeinOre;
import net.ros.common.container.BuiltContainer;
import net.ros.common.container.ContainerBuilder;
import net.ros.common.container.IContainerProvider;
import net.ros.common.grid.node.IBelt;
import net.ros.common.gui.MachineGui;
import net.ros.common.init.ROSItems;
import net.ros.common.machine.Machines;
import net.ros.common.machine.component.SteamComponent;
import net.ros.common.machine.module.InventoryModule;
import net.ros.common.machine.module.impl.FluidStorageModule;
import net.ros.common.machine.module.impl.IOModule;
import net.ros.common.machine.module.impl.SteamModule;
import net.ros.common.steam.ISteamTank;
import net.ros.common.steam.SteamUtil;
import net.ros.common.util.FluidUtils;
import java.util.Iterator;
public class TileSmallMiningDrill extends TileTickingModularMachine implements IContainerProvider
{
@Getter
@Setter
private float heat, maxHeat;
@Getter
@Setter
private boolean completed;
private BlockPos lastPos;
private final float heatPerOperationTick = 30;
private int tickBeforeHarvest;
private FluidStack tempSludge;
private final NonNullList<ItemStack> tempVarious;
public TileSmallMiningDrill()
{
super(Machines.SMALL_MINING_DRILL);
this.heat = 0;
this.maxHeat = 3000;
this.lastPos = this.getPos();
this.tempVarious = NonNullList.create();
}
@Override
protected void reloadModules()
{
super.reloadModules();
this.addModule(new InventoryModule(this, 0));
this.addModule(new SteamModule(this, SteamUtil::createTank));
this.addModule(new FluidStorageModule(this)
.addFilter("water", FluidUtils.WATER_FILTER));
this.addModule(new IOModule(this));
}
@Override
public void update()
{
super.update();
if (this.isClient())
return;
boolean isDirty = false;
ISteamTank steamTank = this.getModule(SteamModule.class).getInternalSteamHandler();
SteamComponent steamComponent = this.getDescriptor().get(SteamComponent.class);
FluidTank sludgeTank = (FluidTank) this.getModule(FluidStorageModule.class).getFluidHandler("sludge");
if (tempSludge != null && sludgeTank.fill(tempSludge, false) == tempSludge.amount)
{
sludgeTank.fill(tempSludge, true);
tempSludge = null;
}
if (!this.isCompleted() && tempSludge == null && this.tempVarious.isEmpty() && this.heat < this.maxHeat
&& steamTank.getSteam() >= steamComponent.getSteamConsumption())
{
BlockPos toCheck = this.lastPos;
if (lastPos.equals(BlockPos.ORIGIN))
toCheck = new BlockPos(this.getPos().getX() - 2, this.getPos().getY() - 1, this.getPos().getZ() - 2);
else if (this.tickBeforeHarvest == 0)
{
if (toCheck.getX() == this.getPos().getX() + 2)
{
if (toCheck.getZ() == this.getPos().getZ() + 2)
{
if (toCheck.getY() == 0)
{
this.setCompleted(true);
}
else
toCheck = new BlockPos(this.getPos().getX() - 2, toCheck.getY() - 1,
this.getPos().getZ() - 2);
}
else
toCheck = new BlockPos(this.getPos().getX() - 2, toCheck.getY(), toCheck.getZ() + 1);
}
else
toCheck = new BlockPos(toCheck.getX() + 1, toCheck.getY(), toCheck.getZ());
this.tickBeforeHarvest = (int) Math
.ceil(4 * (1 / (steamTank.getPressure() / steamComponent.getMaxPressureCapacity())));
IBlockState state = this.world.getBlockState(toCheck);
if (!this.world.isAirBlock(toCheck) && !(state.getBlock() instanceof IFluidBlock)
&& state.getBlockHardness(world, toCheck) >= 0)
{
if (state.getBlock() instanceof BlockVeinOre)
{
BlockVeinOre veinOre = (BlockVeinOre) state.getBlock();
tempSludge = new FluidStack(veinOre.getOreFromState(state).toSludge(),
veinOre.getRichnessFromState(state).getFluidAmount());
this.world.destroyBlock(toCheck, false);
}
else if (Math.abs(toCheck.getX() - this.getPos().getX()) < 2
&& Math.abs(toCheck.getZ() - this.getPos().getZ()) < 2)
{
state.getBlock().getDrops(tempVarious, this.world, toCheck, state, 0);
this.world.destroyBlock(toCheck, false);
}
else
this.tickBeforeHarvest = 0;
}
else
this.tickBeforeHarvest = 0;
}
else
this.tickBeforeHarvest--;
lastPos = toCheck;
this.heat += this.heatPerOperationTick * (steamTank.getPressure() / 2);
steamTank.drainSteam((int) Math.max(steamComponent.getSteamConsumption() * steamTank.getPressure(),
steamComponent.getSteamConsumption()), true);
isDirty = true;
}
if (!this.isCompleted())
{
IFluidTank fluidTank = (IFluidTank) this.getModule(FluidStorageModule.class).getFluidHandler("water");
if (fluidTank.getFluidAmount() > 0)
{
int removable = Math.min(20, fluidTank.getFluidAmount());
if (this.heat - removable <= this.getMinimumTemp())
removable = (int) (this.heat - this.getMinimumTemp());
if (removable > 0)
{
this.heat = this.heat - removable;
fluidTank.drain(removable, true);
}
}
}
if (!this.tempVarious.isEmpty())
{
if (this.tryInsertTrash(this.getFacing()))
isDirty = true;
}
if (this.world.getTotalWorldTime() % 5 == 0)
{
if (this.heat > this.getMinimumTemp())
{
this.heat--;
isDirty = true;
}
else if (this.heat < this.getMinimumTemp())
{
this.heat = this.getMinimumTemp();
isDirty = true;
}
}
if (isDirty)
this.sync();
}
private int getMinimumTemp()
{
return (int) (this.world.getBiome(this.getPos()).getTemperature(this.pos) * 200);
}
private boolean tryInsertTrash(final EnumFacing facing)
{
TileEntity trashTile = this.world.getTileEntity(this.pos.offset(facing.getOpposite(), 2));
if (trashTile instanceof IBelt)
{
final IBelt trashBelt = (IBelt) trashTile;
Iterator<ItemStack> variousIterator = this.tempVarious.iterator();
while (variousIterator.hasNext())
{
ItemStack next = variousIterator.next();
if (trashBelt.insert(next, false))
{
trashBelt.insert(next, true);
variousIterator.remove();
return true;
}
}
}
return false;
}
@Override
public NBTTagCompound writeToNBT(final NBTTagCompound tag)
{
super.writeToNBT(tag);
tag.setFloat("heat", this.heat);
tag.setFloat("maxHeat", this.maxHeat);
tag.setBoolean("completed", this.isCompleted());
tag.setLong("lastPos", this.lastPos.toLong());
tag.setInteger("tickBeforeHarvest", this.tickBeforeHarvest);
tag.setTag("tempVarious", ItemStackHelper.saveAllItems(new NBTTagCompound(), this.tempVarious));
if (this.tempSludge != null)
tag.setTag("tempSludge", this.tempSludge.writeToNBT(new NBTTagCompound()));
return tag;
}
@Override
public void readFromNBT(final NBTTagCompound tag)
{
super.readFromNBT(tag);
this.heat = tag.getFloat("heat");
this.maxHeat = tag.getFloat("maxHeat");
this.completed = tag.getBoolean("completed");
this.lastPos = BlockPos.fromLong(tag.getLong("lastPos"));
this.tickBeforeHarvest = tag.getInteger("tickBeforeHarvest");
ItemStackHelper.loadAllItems(tag.getCompoundTag("tempVarious"), this.tempVarious);
if (tag.hasKey("tempSludge"))
this.tempSludge = FluidStack.loadFluidStackFromNBT(tag.getCompoundTag("tempSludge"));
}
@Override
public BuiltContainer createContainer(EntityPlayer player)
{
SteamModule steamEngine = this.getModule(SteamModule.class);
FluidStorageModule fluidStorage = this.getModule(FluidStorageModule.class);
return new ContainerBuilder("smallminingdrill", player).player(player).inventory(8, 84).hotbar(8, 142)
.addInventory().tile(this.getModule(InventoryModule.class).getInventory("basic"))
.syncIntegerValue(steamEngine.getInternalSteamHandler()::getSteam,
steamEngine.getInternalSteamHandler()::setSteam)
.syncFluidValue(((FluidTank) fluidStorage.getFluidHandler("water"))::getFluid,
((FluidTank) fluidStorage.getFluidHandler("water"))::setFluid)
.syncFluidValue(((FluidTank) fluidStorage.getFluidHandler("sludge"))::getFluid,
((FluidTank) fluidStorage.getFluidHandler("sludge"))::setFluid)
.addInventory().create();
}
public boolean onRightClick(final EntityPlayer player, final EnumFacing side, final float hitX, final float hitY,
final float hitZ, BlockPos from)
{
if (player.isSneaking())
return false;
if (player.getHeldItemMainhand().getItem() == ROSItems.WRENCH)
return false;
IFluidHandler water = this.getModule(FluidStorageModule.class).getFluidHandler("water");
if (FluidUtils.drainPlayerHand(water, player)
|| FluidUtils.fillPlayerHand(water, player))
{
this.markDirty();
return true;
}
player.openGui(ROSConstants.MODINSTANCE, MachineGui.SMALL_MINING_DRILL.getUniqueID(), this.world, this.pos
.getX(), this.pos.getY(),
this.pos.getZ());
return true;
}
}
| |
/*
* Copyright (C) 2014 Chris Banes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mrengineer13.fll;
import android.content.Context;
import android.content.res.TypedArray;
import android.os.Bundle;
import android.os.Parcelable;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.TextView;
import com.nineoldandroids.animation.Animator;
import com.nineoldandroids.animation.AnimatorListenerAdapter;
import com.nineoldandroids.view.ViewHelper;
import com.nineoldandroids.view.ViewPropertyAnimator;
/**
* Layout which an {@link android.widget.EditText} to show a floating label when the hint is hidden
* due to the user inputting text.
*
* @see <a href="https://dribbble.com/shots/1254439--GIF-Mobile-Form-Interaction">Matt D. Smith on Dribble</a>
* @see <a href="http://bradfrostweb.com/blog/post/float-label-pattern/">Brad Frost's blog post</a>
*/
class FloatingLabelLayout extends FrameLayout {
private static final long ANIMATION_DURATION = 150;
private static final float DEFAULT_PADDING_LEFT_RIGHT_DP = 12f;
private static final String SAVED_SUPER_STATE = "SAVED_SUPER_STATE";
private static final String SAVED_LABEL_VISIBILITY = "SAVED_LABEL_VISIBILITY";
private static final String SAVED_HINT = "SAVED_HINT";
public static final String SAVED_TRIGGER = "SAVED_TRIGGER";
public static final String SAVED_FOCUS = "SAVED_FOCUS";
private EditText mEditText;
private TextView mLabel;
private Trigger mTrigger;
private CharSequence mHint;
public FloatingLabelLayout(Context context) {
this(context, null);
}
public FloatingLabelLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public FloatingLabelLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
final TypedArray a = context
.obtainStyledAttributes(attrs, R.styleable.FloatingLabelLayout);
final int sidePadding = a.getDimensionPixelSize(
R.styleable.FloatingLabelLayout_floatLabelSidePadding,
dipsToPix(DEFAULT_PADDING_LEFT_RIGHT_DP));
mLabel = new TextView(context);
mLabel.setPadding(sidePadding, 0, sidePadding, 0);
mLabel.setVisibility(INVISIBLE);
mLabel.setTextAppearance(context,
a.getResourceId(R.styleable.FloatingLabelLayout_floatLabelTextAppearance,
android.R.style.TextAppearance_Small)
);
int triggerInt = a.getInt(R.styleable.FloatingLabelLayout_floatLabelTrigger, Trigger.TYPE.getValue());
mTrigger = Trigger.fromValue(triggerInt);
addView(mLabel, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
a.recycle();
}
@Override
protected Parcelable onSaveInstanceState() {
Bundle bundle = new Bundle();
bundle.putParcelable(SAVED_SUPER_STATE, super.onSaveInstanceState());
bundle.putInt(SAVED_LABEL_VISIBILITY, mLabel.getVisibility());
bundle.putCharSequence(SAVED_HINT, mHint);
bundle.putInt(SAVED_TRIGGER, mTrigger.getValue());
bundle.putBoolean(SAVED_FOCUS, getEditText().isFocused());
return bundle;
}
@SuppressWarnings("ResourceType")
@Override
protected void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
Bundle bundle = (Bundle) state;
mLabel.setVisibility(bundle.getInt(SAVED_LABEL_VISIBILITY));
mHint = bundle.getCharSequence(SAVED_HINT);
mTrigger = Trigger.fromValue(bundle.getInt(SAVED_TRIGGER));
// if the trigger is on focus
if (mTrigger == Trigger.FOCUS) {
if (bundle.getBoolean(SAVED_FOCUS)) {
mEditText.requestFocus();
} else if (!TextUtils.isEmpty(getEditText().getText())) {
showLabel();
}
} else if (mTrigger == Trigger.TYPE){
if (TextUtils.isEmpty(getEditText().getText())) {
showLabel();
} else {
hideLabel();
}
}
// retrieve super state
state = bundle.getParcelable(SAVED_SUPER_STATE);
}
super.onRestoreInstanceState(state);
}
@Override
public void addView(View child, int index, ViewGroup.LayoutParams params) {
if (child instanceof EditText) {
// If we already have an EditText, throw an exception
if (mEditText != null) {
throw new IllegalArgumentException("We already have an EditText, can only have one");
}
// Update the layout params so that the EditText is at the bottom, with enough top
// margin to show the label
final LayoutParams lp = new LayoutParams(params);
lp.gravity = Gravity.BOTTOM;
lp.topMargin = (int) mLabel.getTextSize();
params = lp;
setEditText((EditText) child);
}
// Carry on adding the View...
super.addView(child, index, params);
}
protected void setEditText(EditText editText) {
mEditText = editText;
mLabel.setText(mEditText.getHint());
if (mHint == null) {
mHint = mEditText.getHint();
}
// Add a TextWatcher so that we know when the text input has changed
mEditText.addTextChangedListener(mTextWatcher);
// Add focus listener to the EditText so that we can notify the label that it is activated.
// Allows the use of a ColorStateList for the text color on the label
mEditText.setOnFocusChangeListener(mOnFocusChangeListener);
// if view already had focus we need to manually call the listener
if (mTrigger == Trigger.FOCUS && mEditText.isFocused()) {
mOnFocusChangeListener.onFocusChange(mEditText, true);
}
}
/**
* @return the {@link android.widget.EditText} text input
*/
public EditText getEditText() {
return mEditText;
}
/**
* @return the {@link android.widget.TextView} label
*/
public TextView getLabel() {
return mLabel;
}
/**
* Show the label using an animation
*/
protected void showLabel() {
mLabel.setVisibility(View.VISIBLE);
ViewHelper.setAlpha(mLabel, 0f);
ViewHelper.setTranslationY(mLabel, mLabel.getHeight());
ViewPropertyAnimator.animate(mLabel)
.alpha(1f)
.translationY(0f)
.setDuration(ANIMATION_DURATION)
.setListener(null).start();
}
/**
* Hide the label using an animation
*/
private void hideLabel() {
ViewHelper.setAlpha(mLabel, 1f);
ViewHelper.setTranslationY(mLabel, 0f);
ViewPropertyAnimator.animate(mLabel)
.alpha(0f)
.translationY(mLabel.getHeight())
.setDuration(ANIMATION_DURATION)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mLabel.setVisibility(View.GONE);
}
}).start();
}
/**
* sets hint on {@link #mEditText}
*
* @param hint to set
*/
public void setHint(String hint){
getEditText().setHint(hint);
getLabel().setText(hint);
}
/**
* Sets text on {@link #mEditText}
*
* @param text to set
*/
public void setText(String text){
getEditText().setText(text);
}
/**
* @return {@link #mEditText} text
*/
public String getText(){
return getEditText().getText().toString();
}
/**
* Helper method to convert dips to pixels.
*/
private int dipsToPix(float dps) {
return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dps,
getResources().getDisplayMetrics());
}
private OnFocusChangeListener mOnFocusChangeListener = new OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean focused) {
if (android.os.Build.VERSION.SDK_INT>= android.os.Build.VERSION_CODES.HONEYCOMB) {
mLabel.setActivated(focused); // only available after API 11
}
if (mTrigger == Trigger.FOCUS) {
if (focused) {
mEditText.setHint("");
showLabel();
} else {
if (TextUtils.isEmpty(mEditText.getText())) {
mEditText.setHint(mHint);
hideLabel();
}
}
}
}
};
private TextWatcher mTextWatcher = new TextWatcher() {
@Override
public void afterTextChanged(Editable s) {
// only takes affect if mTrigger is set to TYPE
if (mTrigger != Trigger.TYPE) {
return;
}
if (TextUtils.isEmpty(s)) {
hideLabel();
} else {
showLabel();
}
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
};
public static enum Trigger {
TYPE(0),
FOCUS(1);
private final int mValue;
private Trigger(int i) {
mValue = i;
}
public int getValue() {
return mValue;
}
public static Trigger fromValue(int value) {
Trigger[] triggers = Trigger.values();
for (int i = 0; i < triggers.length; i++) {
if (triggers[i].getValue() == value) {
return triggers[i];
}
}
throw new IllegalArgumentException(value + " is not a valid value for " + Trigger.class.getSimpleName());
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.testing;
import com.facebook.presto.GroupByHashPageIndexerFactory;
import com.facebook.presto.PagesIndexPageSorter;
import com.facebook.presto.ScheduledSplit;
import com.facebook.presto.Session;
import com.facebook.presto.TaskSource;
import com.facebook.presto.block.BlockEncodingManager;
import com.facebook.presto.connector.ConnectorId;
import com.facebook.presto.connector.ConnectorManager;
import com.facebook.presto.connector.system.CatalogSystemTable;
import com.facebook.presto.connector.system.GlobalSystemConnector;
import com.facebook.presto.connector.system.GlobalSystemConnectorFactory;
import com.facebook.presto.connector.system.NodeSystemTable;
import com.facebook.presto.connector.system.SchemaPropertiesSystemTable;
import com.facebook.presto.connector.system.TablePropertiesSystemTable;
import com.facebook.presto.connector.system.TransactionsSystemTable;
import com.facebook.presto.execution.CommitTask;
import com.facebook.presto.execution.CreateTableTask;
import com.facebook.presto.execution.CreateViewTask;
import com.facebook.presto.execution.DataDefinitionTask;
import com.facebook.presto.execution.DeallocateTask;
import com.facebook.presto.execution.DropTableTask;
import com.facebook.presto.execution.DropViewTask;
import com.facebook.presto.execution.NodeTaskMap;
import com.facebook.presto.execution.PrepareTask;
import com.facebook.presto.execution.RenameColumnTask;
import com.facebook.presto.execution.RenameTableTask;
import com.facebook.presto.execution.ResetSessionTask;
import com.facebook.presto.execution.RollbackTask;
import com.facebook.presto.execution.SetSessionTask;
import com.facebook.presto.execution.StartTransactionTask;
import com.facebook.presto.execution.TaskManagerConfig;
import com.facebook.presto.execution.scheduler.LegacyNetworkTopology;
import com.facebook.presto.execution.scheduler.NodeScheduler;
import com.facebook.presto.execution.scheduler.NodeSchedulerConfig;
import com.facebook.presto.index.IndexManager;
import com.facebook.presto.metadata.CatalogManager;
import com.facebook.presto.metadata.HandleResolver;
import com.facebook.presto.metadata.InMemoryNodeManager;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.metadata.MetadataManager;
import com.facebook.presto.metadata.MetadataUtil;
import com.facebook.presto.metadata.QualifiedObjectName;
import com.facebook.presto.metadata.QualifiedTablePrefix;
import com.facebook.presto.metadata.SchemaPropertyManager;
import com.facebook.presto.metadata.SessionPropertyManager;
import com.facebook.presto.metadata.Split;
import com.facebook.presto.metadata.TableHandle;
import com.facebook.presto.metadata.TableLayoutHandle;
import com.facebook.presto.metadata.TableLayoutResult;
import com.facebook.presto.metadata.TablePropertyManager;
import com.facebook.presto.metadata.ViewDefinition;
import com.facebook.presto.operator.Driver;
import com.facebook.presto.operator.DriverContext;
import com.facebook.presto.operator.DriverFactory;
import com.facebook.presto.operator.FilterAndProjectOperator;
import com.facebook.presto.operator.FilterFunctions;
import com.facebook.presto.operator.GenericPageProcessor;
import com.facebook.presto.operator.Operator;
import com.facebook.presto.operator.OperatorContext;
import com.facebook.presto.operator.OperatorFactory;
import com.facebook.presto.operator.OutputFactory;
import com.facebook.presto.operator.PageSourceOperator;
import com.facebook.presto.operator.ProjectionFunction;
import com.facebook.presto.operator.ProjectionFunctions;
import com.facebook.presto.operator.TaskContext;
import com.facebook.presto.operator.index.IndexJoinLookupStats;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ConnectorPageSource;
import com.facebook.presto.spi.Constraint;
import com.facebook.presto.spi.PageIndexerFactory;
import com.facebook.presto.spi.PageSorter;
import com.facebook.presto.spi.Plugin;
import com.facebook.presto.spi.RecordCursor;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.block.BlockEncodingSerde;
import com.facebook.presto.spi.connector.ConnectorFactory;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spiller.BinarySpillerFactory;
import com.facebook.presto.spiller.SpillerFactory;
import com.facebook.presto.split.PageSinkManager;
import com.facebook.presto.split.PageSourceManager;
import com.facebook.presto.split.SplitManager;
import com.facebook.presto.split.SplitSource;
import com.facebook.presto.sql.analyzer.Analysis;
import com.facebook.presto.sql.analyzer.Analyzer;
import com.facebook.presto.sql.analyzer.FeaturesConfig;
import com.facebook.presto.sql.analyzer.QueryExplainer;
import com.facebook.presto.sql.gen.ExpressionCompiler;
import com.facebook.presto.sql.gen.JoinFilterFunctionCompiler;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.CompilerConfig;
import com.facebook.presto.sql.planner.LocalExecutionPlanner;
import com.facebook.presto.sql.planner.LocalExecutionPlanner.LocalExecutionPlan;
import com.facebook.presto.sql.planner.LogicalPlanner;
import com.facebook.presto.sql.planner.NodePartitioningManager;
import com.facebook.presto.sql.planner.Plan;
import com.facebook.presto.sql.planner.PlanFragmenter;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.PlanOptimizers;
import com.facebook.presto.sql.planner.PlanPrinter;
import com.facebook.presto.sql.planner.SubPlan;
import com.facebook.presto.sql.planner.optimizations.PlanOptimizer;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.tree.Commit;
import com.facebook.presto.sql.tree.CreateTable;
import com.facebook.presto.sql.tree.CreateView;
import com.facebook.presto.sql.tree.Deallocate;
import com.facebook.presto.sql.tree.DropTable;
import com.facebook.presto.sql.tree.DropView;
import com.facebook.presto.sql.tree.Execute;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.Prepare;
import com.facebook.presto.sql.tree.RenameColumn;
import com.facebook.presto.sql.tree.RenameTable;
import com.facebook.presto.sql.tree.ResetSession;
import com.facebook.presto.sql.tree.Rollback;
import com.facebook.presto.sql.tree.SetSession;
import com.facebook.presto.sql.tree.StartTransaction;
import com.facebook.presto.sql.tree.Statement;
import com.facebook.presto.testing.PageConsumerOperator.PageConsumerOutputFactory;
import com.facebook.presto.transaction.TransactionManager;
import com.facebook.presto.transaction.TransactionManagerConfig;
import com.facebook.presto.type.TypeRegistry;
import com.facebook.presto.type.TypeUtils;
import com.facebook.presto.util.FinalizerService;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.io.Closer;
import io.airlift.node.NodeInfo;
import io.airlift.units.Duration;
import org.intellij.lang.annotations.Language;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
import static com.facebook.presto.execution.SqlQueryManager.unwrapExecuteStatement;
import static com.facebook.presto.execution.SqlQueryManager.validateParameters;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.sql.testing.TreeAssertions.assertFormattedSql;
import static com.facebook.presto.testing.TestingSession.TESTING_CATALOG;
import static com.facebook.presto.testing.TestingSession.createBogusTestingCatalog;
import static com.facebook.presto.testing.TestingTaskContext.createTaskContext;
import static com.facebook.presto.transaction.TransactionBuilder.transaction;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static io.airlift.concurrent.MoreFutures.getFutureValue;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.json.JsonCodec.jsonCodec;
import static java.util.Collections.emptyList;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
public class LocalQueryRunner
implements QueryRunner
{
private final Session defaultSession;
private final ExecutorService executor;
private final ScheduledExecutorService transactionCheckExecutor;
private final FinalizerService finalizerService;
private final SqlParser sqlParser;
private final InMemoryNodeManager nodeManager;
private final TypeRegistry typeRegistry;
private final PageSorter pageSorter;
private final PageIndexerFactory pageIndexerFactory;
private final MetadataManager metadata;
private final TestingAccessControlManager accessControl;
private final TestingEventListenerManager eventListener;
private final SplitManager splitManager;
private final BlockEncodingSerde blockEncodingSerde;
private final PageSourceManager pageSourceManager;
private final IndexManager indexManager;
private final NodePartitioningManager nodePartitioningManager;
private final PageSinkManager pageSinkManager;
private final TransactionManager transactionManager;
private final SpillerFactory spillerFactory;
private final ExpressionCompiler expressionCompiler;
private final JoinFilterFunctionCompiler joinFilterFunctionCompiler;
private final ConnectorManager connectorManager;
private final ImmutableMap<Class<? extends Statement>, DataDefinitionTask<?>> dataDefinitionTask;
private boolean printPlan;
private final ReadWriteLock lock = new ReentrantReadWriteLock();
public LocalQueryRunner(Session defaultSession)
{
this(defaultSession, new FeaturesConfig().setOptimizeMixedDistinctAggregations(true), false);
}
public LocalQueryRunner(Session defaultSession, FeaturesConfig featuresConfig)
{
this(defaultSession, featuresConfig, false);
}
private LocalQueryRunner(Session defaultSession, FeaturesConfig featuresConfig, boolean withInitialTransaction)
{
requireNonNull(defaultSession, "defaultSession is null");
checkArgument(!defaultSession.getTransactionId().isPresent() || !withInitialTransaction, "Already in transaction");
this.executor = newCachedThreadPool(daemonThreadsNamed("local-query-runner-%s"));
this.transactionCheckExecutor = newSingleThreadScheduledExecutor(daemonThreadsNamed("transaction-idle-check"));
this.finalizerService = new FinalizerService();
finalizerService.start();
this.sqlParser = new SqlParser();
this.nodeManager = new InMemoryNodeManager();
this.typeRegistry = new TypeRegistry();
this.pageSorter = new PagesIndexPageSorter();
this.pageIndexerFactory = new GroupByHashPageIndexerFactory();
this.indexManager = new IndexManager();
NodeScheduler nodeScheduler = new NodeScheduler(
new LegacyNetworkTopology(),
nodeManager,
new NodeSchedulerConfig().setIncludeCoordinator(true),
new NodeTaskMap(finalizerService));
this.pageSinkManager = new PageSinkManager();
CatalogManager catalogManager = new CatalogManager();
this.transactionManager = TransactionManager.create(
new TransactionManagerConfig().setIdleTimeout(new Duration(1, TimeUnit.DAYS)),
transactionCheckExecutor,
catalogManager,
executor);
this.nodePartitioningManager = new NodePartitioningManager(nodeScheduler);
this.splitManager = new SplitManager();
this.blockEncodingSerde = new BlockEncodingManager(typeRegistry);
this.metadata = new MetadataManager(
featuresConfig,
typeRegistry,
blockEncodingSerde,
new SessionPropertyManager(),
new SchemaPropertyManager(),
new TablePropertyManager(),
transactionManager);
this.accessControl = new TestingAccessControlManager(transactionManager);
this.eventListener = new TestingEventListenerManager();
this.pageSourceManager = new PageSourceManager();
this.expressionCompiler = new ExpressionCompiler(metadata);
this.joinFilterFunctionCompiler = new JoinFilterFunctionCompiler(metadata);
this.connectorManager = new ConnectorManager(
metadata,
catalogManager,
accessControl,
splitManager,
pageSourceManager,
indexManager,
nodePartitioningManager,
pageSinkManager,
new HandleResolver(),
nodeManager,
new NodeInfo("test"),
typeRegistry,
pageSorter,
pageIndexerFactory,
transactionManager);
GlobalSystemConnectorFactory globalSystemConnectorFactory = new GlobalSystemConnectorFactory(ImmutableSet.of(
new NodeSystemTable(nodeManager),
new CatalogSystemTable(transactionManager),
new SchemaPropertiesSystemTable(transactionManager, metadata),
new TablePropertiesSystemTable(transactionManager, metadata),
new TransactionsSystemTable(typeRegistry, transactionManager)),
ImmutableSet.of());
connectorManager.addConnectorFactory(globalSystemConnectorFactory);
connectorManager.createConnection(GlobalSystemConnector.NAME, GlobalSystemConnector.NAME, ImmutableMap.of());
// add bogus connector for testing session properties
catalogManager.registerCatalog(createBogusTestingCatalog(TESTING_CATALOG));
// rewrite session to use managed SessionPropertyMetadata
this.defaultSession = new Session(
defaultSession.getQueryId(),
withInitialTransaction ? Optional.of(transactionManager.beginTransaction(false)) : defaultSession.getTransactionId(),
defaultSession.isClientTransactionSupport(),
defaultSession.getIdentity(),
defaultSession.getSource(),
defaultSession.getCatalog(),
defaultSession.getSchema(),
defaultSession.getTimeZoneKey(),
defaultSession.getLocale(),
defaultSession.getRemoteUserAddress(),
defaultSession.getUserAgent(),
defaultSession.getStartTime(),
defaultSession.getSystemProperties(),
defaultSession.getConnectorProperties(),
defaultSession.getUnprocessedCatalogProperties(),
metadata.getSessionPropertyManager(),
defaultSession.getPreparedStatements());
dataDefinitionTask = ImmutableMap.<Class<? extends Statement>, DataDefinitionTask<?>>builder()
.put(CreateTable.class, new CreateTableTask())
.put(CreateView.class, new CreateViewTask(jsonCodec(ViewDefinition.class), sqlParser, accessControl, new FeaturesConfig()))
.put(DropTable.class, new DropTableTask())
.put(DropView.class, new DropViewTask())
.put(RenameColumn.class, new RenameColumnTask())
.put(RenameTable.class, new RenameTableTask())
.put(ResetSession.class, new ResetSessionTask())
.put(SetSession.class, new SetSessionTask())
.put(Prepare.class, new PrepareTask(sqlParser))
.put(Deallocate.class, new DeallocateTask())
.put(StartTransaction.class, new StartTransactionTask())
.put(Commit.class, new CommitTask())
.put(Rollback.class, new RollbackTask())
.build();
this.spillerFactory = new BinarySpillerFactory(blockEncodingSerde, featuresConfig);
}
public static LocalQueryRunner queryRunnerWithInitialTransaction(Session defaultSession)
{
checkArgument(!defaultSession.getTransactionId().isPresent(), "Already in transaction!");
return new LocalQueryRunner(defaultSession, new FeaturesConfig(), true);
}
@Override
public void close()
{
executor.shutdownNow();
transactionCheckExecutor.shutdownNow();
connectorManager.stop();
finalizerService.destroy();
}
@Override
public int getNodeCount()
{
return 1;
}
public TypeRegistry getTypeManager()
{
return typeRegistry;
}
@Override
public TransactionManager getTransactionManager()
{
return transactionManager;
}
@Override
public Metadata getMetadata()
{
return metadata;
}
@Override
public TestingAccessControlManager getAccessControl()
{
return accessControl;
}
public ExecutorService getExecutor()
{
return executor;
}
@Override
public Session getDefaultSession()
{
return defaultSession;
}
public void createCatalog(String catalogName, ConnectorFactory connectorFactory, Map<String, String> properties)
{
nodeManager.addCurrentNodeConnector(new ConnectorId(catalogName));
connectorManager.addConnectorFactory(connectorFactory);
connectorManager.createConnection(catalogName, connectorFactory.getName(), properties);
}
@Override
public void installPlugin(Plugin plugin)
{
throw new UnsupportedOperationException();
}
@Override
public void createCatalog(String catalogName, String connectorName, Map<String, String> properties)
{
throw new UnsupportedOperationException();
}
public LocalQueryRunner printPlan()
{
printPlan = true;
return this;
}
@Override
public List<QualifiedObjectName> listTables(Session session, String catalog, String schema)
{
lock.readLock().lock();
try {
return transaction(transactionManager, accessControl)
.readOnly()
.execute(session, transactionSession -> {
return getMetadata().listTables(transactionSession, new QualifiedTablePrefix(catalog, schema));
});
}
finally {
lock.readLock().unlock();
}
}
@Override
public boolean tableExists(Session session, String table)
{
lock.readLock().lock();
try {
return transaction(transactionManager, accessControl)
.readOnly()
.execute(session, transactionSession -> {
return MetadataUtil.tableExists(getMetadata(), transactionSession, table);
});
}
finally {
lock.readLock().unlock();
}
}
@Override
public MaterializedResult execute(@Language("SQL") String sql)
{
return execute(defaultSession, sql);
}
@Override
public MaterializedResult execute(Session session, @Language("SQL") String sql)
{
return inTransaction(session, transactionSession -> executeInternal(transactionSession, sql));
}
public <T> T inTransaction(Function<Session, T> transactionSessionConsumer)
{
return inTransaction(defaultSession, transactionSessionConsumer);
}
public <T> T inTransaction(Session session, Function<Session, T> transactionSessionConsumer)
{
return transaction(transactionManager, accessControl)
.singleStatement()
.execute(session, transactionSessionConsumer);
}
private MaterializedResult executeInternal(Session session, @Language("SQL") String sql)
{
lock.readLock().lock();
try (Closer closer = Closer.create()) {
AtomicReference<MaterializedResult.Builder> builder = new AtomicReference<>();
PageConsumerOutputFactory outputFactory = new PageConsumerOutputFactory(types -> {
builder.compareAndSet(null, MaterializedResult.resultBuilder(session, types));
return builder.get()::page;
});
TaskContext taskContext = createTaskContext(executor, session);
List<Driver> drivers = createDrivers(session, sql, outputFactory, taskContext);
drivers.stream().map(closer::register);
boolean done = false;
while (!done) {
boolean processed = false;
for (Driver driver : drivers) {
if (!driver.isFinished()) {
driver.process();
processed = true;
}
}
done = !processed;
}
verify(builder.get() != null, "Output operator was not created");
return builder.get().build();
}
catch (IOException e) {
throw Throwables.propagate(e);
}
finally {
lock.readLock().unlock();
}
}
@Override
public Lock getExclusiveLock()
{
return lock.writeLock();
}
public List<Driver> createDrivers(@Language("SQL") String sql, OutputFactory outputFactory, TaskContext taskContext)
{
return createDrivers(defaultSession, sql, outputFactory, taskContext);
}
public List<Driver> createDrivers(Session session, @Language("SQL") String sql, OutputFactory outputFactory, TaskContext taskContext)
{
Plan plan = createPlan(session, sql);
if (printPlan) {
System.out.println(PlanPrinter.textLogicalPlan(plan.getRoot(), plan.getTypes(), metadata, session));
}
SubPlan subplan = new PlanFragmenter().createSubPlans(session, metadata, plan);
if (!subplan.getChildren().isEmpty()) {
throw new AssertionError("Expected subplan to have no children");
}
LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner(
metadata,
sqlParser,
Optional.empty(),
pageSourceManager,
indexManager,
nodePartitioningManager,
pageSinkManager,
null,
expressionCompiler,
joinFilterFunctionCompiler,
new IndexJoinLookupStats(),
new CompilerConfig().setInterpreterEnabled(false), // make sure tests fail if compiler breaks
new TaskManagerConfig().setTaskConcurrency(4),
spillerFactory);
// plan query
LocalExecutionPlan localExecutionPlan = executionPlanner.plan(
session,
subplan.getFragment().getRoot(),
subplan.getFragment().getPartitioningScheme().getOutputLayout(),
plan.getTypes(),
outputFactory);
// generate sources
List<TaskSource> sources = new ArrayList<>();
long sequenceId = 0;
for (TableScanNode tableScan : findTableScanNodes(subplan.getFragment().getRoot())) {
TableLayoutHandle layout = tableScan.getLayout().get();
SplitSource splitSource = splitManager.getSplits(session, layout);
ImmutableSet.Builder<ScheduledSplit> scheduledSplits = ImmutableSet.builder();
while (!splitSource.isFinished()) {
for (Split split : getFutureValue(splitSource.getNextBatch(1000))) {
scheduledSplits.add(new ScheduledSplit(sequenceId++, tableScan.getId(), split));
}
}
sources.add(new TaskSource(tableScan.getId(), scheduledSplits.build(), true));
}
// create drivers
List<Driver> drivers = new ArrayList<>();
Map<PlanNodeId, DriverFactory> driverFactoriesBySource = new HashMap<>();
for (DriverFactory driverFactory : localExecutionPlan.getDriverFactories()) {
for (int i = 0; i < driverFactory.getDriverInstances().orElse(1); i++) {
if (driverFactory.getSourceId().isPresent()) {
checkState(driverFactoriesBySource.put(driverFactory.getSourceId().get(), driverFactory) == null);
}
else {
DriverContext driverContext = taskContext.addPipelineContext(driverFactory.isInputDriver(), driverFactory.isOutputDriver()).addDriverContext();
Driver driver = driverFactory.createDriver(driverContext);
drivers.add(driver);
}
}
}
// add sources to the drivers
for (TaskSource source : sources) {
DriverFactory driverFactory = driverFactoriesBySource.get(source.getPlanNodeId());
checkState(driverFactory != null);
for (ScheduledSplit split : source.getSplits()) {
DriverContext driverContext = taskContext.addPipelineContext(driverFactory.isInputDriver(), driverFactory.isOutputDriver()).addDriverContext();
Driver driver = driverFactory.createDriver(driverContext);
driver.updateSource(new TaskSource(split.getPlanNodeId(), ImmutableSet.of(split), true));
drivers.add(driver);
}
}
for (DriverFactory driverFactory : localExecutionPlan.getDriverFactories()) {
driverFactory.close();
}
return ImmutableList.copyOf(drivers);
}
public Plan createPlan(Session session, @Language("SQL") String sql)
{
return createPlan(session, sql, LogicalPlanner.Stage.OPTIMIZED_AND_VALIDATED);
}
public Plan createPlan(Session session, @Language("SQL") String sql, LogicalPlanner.Stage stage)
{
Statement statement = unwrapExecuteStatement(sqlParser.createStatement(sql), sqlParser, session);
assertFormattedSql(sqlParser, statement);
FeaturesConfig featuresConfig = new FeaturesConfig()
.setDistributedIndexJoinsEnabled(false)
.setOptimizeHashGeneration(true);
PlanOptimizers planOptimizers = new PlanOptimizers(metadata, sqlParser, featuresConfig, true);
return createPlan(session, sql, featuresConfig, planOptimizers.get(), stage);
}
public Plan createPlan(Session session, @Language("SQL") String sql, FeaturesConfig featuresConfig, List<PlanOptimizer> optimizers)
{
return createPlan(session, sql, featuresConfig, optimizers, LogicalPlanner.Stage.OPTIMIZED_AND_VALIDATED);
}
public Plan createPlan(Session session, @Language("SQL") String sql, FeaturesConfig featuresConfig, List<PlanOptimizer> optimizers, LogicalPlanner.Stage stage)
{
Statement wrapped = sqlParser.createStatement(sql);
Statement statement = unwrapExecuteStatement(wrapped, sqlParser, session);
List<Expression> parameters = emptyList();
if (wrapped instanceof Execute) {
parameters = ((Execute) wrapped).getParameters();
}
validateParameters(statement, parameters);
assertFormattedSql(sqlParser, statement);
PlanNodeIdAllocator idAllocator = new PlanNodeIdAllocator();
QueryExplainer queryExplainer = new QueryExplainer(
optimizers,
metadata,
accessControl,
sqlParser,
dataDefinitionTask);
Analyzer analyzer = new Analyzer(session, metadata, sqlParser, accessControl, Optional.of(queryExplainer), parameters);
LogicalPlanner logicalPlanner = new LogicalPlanner(session, optimizers, idAllocator, metadata, sqlParser);
Analysis analysis = analyzer.analyze(statement);
return logicalPlanner.plan(analysis, stage);
}
public OperatorFactory createTableScanOperator(
Session session,
int operatorId,
PlanNodeId planNodeId,
String tableName,
String... columnNames)
{
checkArgument(session.getCatalog().isPresent(), "catalog not set");
checkArgument(session.getSchema().isPresent(), "schema not set");
// look up the table
QualifiedObjectName qualifiedTableName = new QualifiedObjectName(session.getCatalog().get(), session.getSchema().get(), tableName);
TableHandle tableHandle = metadata.getTableHandle(session, qualifiedTableName).orElse(null);
checkArgument(tableHandle != null, "Table %s does not exist", qualifiedTableName);
// lookup the columns
Map<String, ColumnHandle> allColumnHandles = metadata.getColumnHandles(session, tableHandle);
ImmutableList.Builder<ColumnHandle> columnHandlesBuilder = ImmutableList.builder();
ImmutableList.Builder<Type> columnTypesBuilder = ImmutableList.builder();
for (String columnName : columnNames) {
ColumnHandle columnHandle = allColumnHandles.get(columnName);
checkArgument(columnHandle != null, "Table %s does not have a column %s", tableName, columnName);
columnHandlesBuilder.add(columnHandle);
ColumnMetadata columnMetadata = metadata.getColumnMetadata(session, tableHandle, columnHandle);
columnTypesBuilder.add(columnMetadata.getType());
}
List<ColumnHandle> columnHandles = columnHandlesBuilder.build();
List<Type> columnTypes = columnTypesBuilder.build();
// get the split for this table
List<TableLayoutResult> layouts = metadata.getLayouts(session, tableHandle, Constraint.alwaysTrue(), Optional.empty());
Split split = getLocalQuerySplit(session, layouts.get(0).getLayout().getHandle());
return new OperatorFactory()
{
@Override
public List<Type> getTypes()
{
return columnTypes;
}
@Override
public Operator createOperator(DriverContext driverContext)
{
OperatorContext operatorContext = driverContext.addOperatorContext(operatorId, planNodeId, "BenchmarkSource");
ConnectorPageSource pageSource = pageSourceManager.createPageSource(session, split, columnHandles);
return new PageSourceOperator(pageSource, columnTypes, operatorContext);
}
@Override
public void close()
{
}
@Override
public OperatorFactory duplicate()
{
throw new UnsupportedOperationException();
}
};
}
public OperatorFactory createHashProjectOperator(int operatorId, PlanNodeId planNodeId, List<Type> columnTypes)
{
ImmutableList.Builder<ProjectionFunction> projectionFunctions = ImmutableList.builder();
for (int i = 0; i < columnTypes.size(); i++) {
projectionFunctions.add(ProjectionFunctions.singleColumn(columnTypes.get(i), i));
}
projectionFunctions.add(new HashProjectionFunction(columnTypes));
return new FilterAndProjectOperator.FilterAndProjectOperatorFactory(
operatorId,
planNodeId,
() -> new GenericPageProcessor(FilterFunctions.TRUE_FUNCTION, projectionFunctions.build()),
ImmutableList.copyOf(Iterables.concat(columnTypes, ImmutableList.of(BIGINT))));
}
private Split getLocalQuerySplit(Session session, TableLayoutHandle handle)
{
SplitSource splitSource = splitManager.getSplits(session, handle);
List<Split> splits = new ArrayList<>();
splits.addAll(getFutureValue(splitSource.getNextBatch(1000)));
while (!splitSource.isFinished()) {
splits.addAll(getFutureValue(splitSource.getNextBatch(1000)));
}
checkArgument(splits.size() == 1, "Expected only one split for a local query, but got %s splits", splits.size());
return splits.get(0);
}
private static List<TableScanNode> findTableScanNodes(PlanNode node)
{
ImmutableList.Builder<TableScanNode> tableScanNodes = ImmutableList.builder();
findTableScanNodes(node, tableScanNodes);
return tableScanNodes.build();
}
private static void findTableScanNodes(PlanNode node, ImmutableList.Builder<TableScanNode> builder)
{
for (PlanNode source : node.getSources()) {
findTableScanNodes(source, builder);
}
if (node instanceof TableScanNode) {
builder.add((TableScanNode) node);
}
}
private static class HashProjectionFunction
implements ProjectionFunction
{
private final List<Type> columnTypes;
public HashProjectionFunction(List<Type> columnTypes)
{
this.columnTypes = columnTypes;
}
@Override
public Type getType()
{
return BIGINT;
}
@Override
public void project(int position, Block[] blocks, BlockBuilder output)
{
BIGINT.writeLong(output, TypeUtils.getHashPosition(columnTypes, blocks, position));
}
@Override
public void project(RecordCursor cursor, BlockBuilder output)
{
throw new UnsupportedOperationException("Operation not supported");
}
@Override
public Set<Integer> getInputChannels()
{
throw new UnsupportedOperationException();
}
@Override
public boolean isDeterministic()
{
throw new UnsupportedOperationException();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.rest.controller;
import static org.apache.metron.rest.MetronRestConstants.TEST_PROFILE;
import static org.hamcrest.Matchers.hasSize;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic;
import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.adrianwalker.multilinestring.Multiline;
import org.apache.metron.integration.ComponentRunner;
import org.apache.metron.integration.UnableToStartException;
import org.apache.metron.integration.components.KafkaComponent;
import org.apache.metron.rest.service.AlertsUIService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@ActiveProfiles(TEST_PROFILE)
public class AlertsUIControllerIntegrationTest {
/**
* [
* {
* "is_alert": true,
* "field": "value1"
* },
* {
* "is_alert": true,
* "field": "value2"
* }
* ]
*/
@Multiline
public static String alerts;
/**
* {
* "tableColumns": ["user1_field"],
* "savedSearches": [
* {
* "name": "user1 search 1",
* "searchRequest": {
* "from": 0,
* "indices": ["bro"],
* "query": "*",
* "size": 5
* }
* },
* {
* "name": "user1 search 2",
* "searchRequest": {
* "from": 10,
* "indices": ["snort"],
* "query": "*",
* "size": 10
* }
* }
* ]
* }
*/
@Multiline
public static String user1AlertUserSettingsJson;
/**
* {
* "tableColumns": ["user2_field"],
* "savedSearches": [
* {
* "name": "user2 search 1",
* "searchRequest": {
* "from": 0,
* "indices": ["bro", "snort"],
* "query": "ip_src_addr:192.168.1.1",
* "size": 100
* }
* }
* ]
* }
*/
@Multiline
public static String user2AlertUserSettingsJson;
// A bug in Spring and/or Kafka forced us to move into a component that is spun up and down per test-case
// Given the large spinup time of components, please avoid this pattern until we upgrade Spring.
// See: https://issues.apache.org/jira/browse/METRON-1009
@Autowired
private KafkaComponent kafkaWithZKComponent;
private ComponentRunner runner;
@Autowired
private WebApplicationContext wac;
private MockMvc mockMvc;
@Autowired
private AlertsUIService alertsUIService;
private String alertUrl = "/api/v1/alerts/ui";
private String user1 = "user1";
private String user2 = "user2";
private String admin = "admin";
private String password = "password";
@Before
public void setup() throws Exception {
for (String user : alertsUIService.findAllAlertsUIUserSettings().keySet()) {
alertsUIService.deleteAlertsUIUserSettings(user);
}
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).apply(springSecurity()).build();
}
@Test
public void testSecurity() throws Exception {
this.mockMvc.perform(post(alertUrl + "/escalate").with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(alerts))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(alertUrl + "/settings"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(alertUrl + "/settings/all"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(post(alertUrl + "/settings").with(csrf())
.contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))
.content(user1AlertUserSettingsJson))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(alertUrl + "/settings/all").with(httpBasic(user1, password)).with(csrf()))
.andExpect(status().isForbidden());
this.mockMvc.perform(delete(alertUrl + "/settings/user1").with(httpBasic(user1, password)).with(csrf()))
.andExpect(status().isForbidden());
}
@Test
public void escalateShouldEscalateAlerts() throws Exception {
startKafka();
this.mockMvc.perform(post(alertUrl + "/escalate").with(httpBasic(user1, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(alerts))
.andExpect(status().isOk());
stopKafka();
}
@Test
public void testAlertProfiles() throws Exception {
emptyProfileShouldReturnNotFound();
alertsProfilesShouldBeCreatedOrUpdated();
alertsProfilesShouldBeProperlyDeleted();
}
/** Ensures a 404 is returned when an alerts profile cannot be found. In the case of an admin getting
* all profiles, an empty list should be returned. This tests depends on the alertsProfileRepository
* being empty.
*
* @throws Exception
*/
private void emptyProfileShouldReturnNotFound() throws Exception {
// user1 should get a 404 because an alerts profile has not been created
this.mockMvc.perform(get(alertUrl + "/settings").with(httpBasic(user1, password)))
.andExpect(status().isNotFound());
// user2 should get a 404 because an alerts profile has not been created
this.mockMvc.perform(get(alertUrl + "/settings").with(httpBasic(user2, password)))
.andExpect(status().isNotFound());
// getting all alerts profiles should return an empty list
this.mockMvc.perform(get(alertUrl + "/settings/all").with(httpBasic(admin, password)))
.andExpect(status().isOk())
.andExpect(
content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.*", hasSize(0)));
}
/** Ensures users can update their profiles independently of other users. When user1 updates an
* alerts profile, alerts profile for user2 should not be affected. Tests that an initial update
* returns a 201 status and subsequent updates return 200 statuses. A call to get all alerts profiles
* by an admin user should also work properly. This tests depends on the alertsProfileRepository
* being empty initially.
*
* @throws Exception
*/
private void alertsProfilesShouldBeCreatedOrUpdated() throws Exception {
// user1 creates their alerts profile
this.mockMvc.perform(post(alertUrl + "/settings").with(httpBasic(user1, password)).with(csrf())
.contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))
.content(user1AlertUserSettingsJson))
.andExpect(status().isCreated());
// user1 updates their alerts profile
this.mockMvc.perform(post(alertUrl + "/settings").with(httpBasic(user1, password)).with(csrf())
.contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))
.content(user1AlertUserSettingsJson))
.andExpect(status().isOk());
// user1 gets their alerts profile
this.mockMvc.perform(get(alertUrl + "/settings").with(httpBasic(user1, password)))
.andExpect(status().isOk())
.andExpect(
content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(content().json(user1AlertUserSettingsJson));
// user2 alerts profile should still be empty
this.mockMvc.perform(get(alertUrl + "/settings").with(httpBasic(user2, password)))
.andExpect(status().isNotFound());
// getting all alerts profiles should only return user1's
this.mockMvc.perform(get(alertUrl + "/settings/all").with(httpBasic(admin, password)))
.andExpect(status().isOk())
.andExpect(
content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(content().json("{\"" + user1 + "\": " + user1AlertUserSettingsJson + "}"));
// user2 creates their alerts profile
this.mockMvc.perform(post(alertUrl + "/settings").with(httpBasic(user2, password)).with(csrf())
.contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))
.content(user2AlertUserSettingsJson))
.andExpect(status().isCreated());
// user2 updates their alerts profile
this.mockMvc.perform(get(alertUrl + "/settings").with(httpBasic(user1, password)))
.andExpect(status().isOk())
.andExpect(
content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(content().json(user1AlertUserSettingsJson));
// user2 gets their alerts profile
this.mockMvc.perform(get(alertUrl + "/settings").with(httpBasic(user2, password)))
.andExpect(status().isOk())
.andExpect(
content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(content().json(user2AlertUserSettingsJson));
// getting all alerts profiles should return both
this.mockMvc.perform(get(alertUrl + "/settings/all").with(httpBasic(admin, password)))
.andExpect(status().isOk())
.andExpect(
content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(content().json("{\"" + user1 + "\": " + user1AlertUserSettingsJson + ",\"" + user2 + "\": " + user2AlertUserSettingsJson + "}"));
}
/** Ensures users can delete their profiles independently of other users. When user1 deletes an
* alerts profile, alerts profile for user2 should not be deleted. This tests depends on alerts
* profiles existing for user1 and user2.
*
* @throws Exception
*/
private void alertsProfilesShouldBeProperlyDeleted() throws Exception {
// user1 deletes their profile
this.mockMvc.perform(delete(alertUrl + "/settings/user1").with(httpBasic(admin, password)))
.andExpect(status().isOk());
// user1 should get a 404 when trying to delete an alerts profile that doesn't exist
this.mockMvc.perform(delete(alertUrl + "/settings/user1").with(httpBasic(admin, password)))
.andExpect(status().isNotFound());
// user1 should get a 404 when trying to retrieve their alerts profile
this.mockMvc.perform(get(alertUrl + "/settings").with(httpBasic(user1, password)))
.andExpect(status().isNotFound());
// user2's alerts profile should still exist
this.mockMvc.perform(get(alertUrl + "/settings").with(httpBasic(user2, password)))
.andExpect(status().isOk())
.andExpect(
content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(content().json(user2AlertUserSettingsJson));
// getting all alerts profiles should only return user2's
this.mockMvc.perform(get(alertUrl + "/settings/all").with(httpBasic(admin, password)))
.andExpect(status().isOk())
.andExpect(
content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(content().json("{\"" + user2 + "\": " + user2AlertUserSettingsJson + "}"));
// user2 deletes their profile
this.mockMvc.perform(delete(alertUrl + "/settings/user2").with(httpBasic(admin, password)))
.andExpect(status().isOk());
// user2 should get a 404 when trying to delete an alerts profile that doesn't exist
this.mockMvc.perform(get(alertUrl + "/settings").with(httpBasic(user1, password)))
.andExpect(status().isNotFound());
// user2 should get a 404 when trying to retrieve their alerts profile
this.mockMvc.perform(get(alertUrl + "/settings").with(httpBasic(user2, password)))
.andExpect(status().isNotFound());
// getting all alerts profiles should return an empty list
this.mockMvc.perform(get(alertUrl + "/settings/all").with(httpBasic(admin, password)))
.andExpect(status().isOk())
.andExpect(
content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.*", hasSize(0)));
}
private void startKafka() {
runner = new ComponentRunner.Builder()
.withComponent("kafka", kafkaWithZKComponent)
.withCustomShutdownOrder(new String[]{"kafka"})
.build();
try {
runner.start();
} catch (UnableToStartException e) {
e.printStackTrace();
}
}
private void stopKafka() {
runner.stop();
}
}
| |
/*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb.reactivestreams.client.internal;
import com.mongodb.ExplainVerbosity;
import com.mongodb.MongoNamespace;
import com.mongodb.client.model.Collation;
import com.mongodb.internal.async.AsyncBatchCursor;
import com.mongodb.internal.client.model.AggregationLevel;
import com.mongodb.internal.client.model.FindOptions;
import com.mongodb.internal.operation.AggregateOperation;
import com.mongodb.internal.operation.AsyncReadOperation;
import com.mongodb.lang.Nullable;
import com.mongodb.reactivestreams.client.AggregatePublisher;
import com.mongodb.reactivestreams.client.ClientSession;
import org.bson.BsonDocument;
import org.bson.BsonString;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.reactivestreams.Publisher;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static com.mongodb.assertions.Assertions.notNull;
final class AggregatePublisherImpl<T> extends BatchCursorPublisher<T> implements AggregatePublisher<T> {
private final List<? extends Bson> pipeline;
private final AggregationLevel aggregationLevel;
private Boolean allowDiskUse;
private long maxTimeMS;
private long maxAwaitTimeMS;
private Boolean bypassDocumentValidation;
private Collation collation;
private String comment;
private Bson hint;
private String hintString;
private Bson variables;
AggregatePublisherImpl(
@Nullable final ClientSession clientSession,
final MongoOperationPublisher<T> mongoOperationPublisher,
final List<? extends Bson> pipeline,
final AggregationLevel aggregationLevel) {
super(clientSession, mongoOperationPublisher);
this.pipeline = notNull("pipeline", pipeline);
this.aggregationLevel = notNull("aggregationLevel", aggregationLevel);
}
@Override
public AggregatePublisher<T> allowDiskUse(@Nullable final Boolean allowDiskUse) {
this.allowDiskUse = allowDiskUse;
return this;
}
@Override
public AggregatePublisher<T> batchSize(final int batchSize) {
super.batchSize(batchSize);
return this;
}
@Override
public AggregatePublisher<T> maxTime(final long maxTime, final TimeUnit timeUnit) {
notNull("timeUnit", timeUnit);
this.maxTimeMS = TimeUnit.MILLISECONDS.convert(maxTime, timeUnit);
return this;
}
@Override
public AggregatePublisher<T> maxAwaitTime(final long maxAwaitTime, final TimeUnit timeUnit) {
notNull("timeUnit", timeUnit);
this.maxAwaitTimeMS = TimeUnit.MILLISECONDS.convert(maxAwaitTime, timeUnit);
return this;
}
@Override
public AggregatePublisher<T> bypassDocumentValidation(@Nullable final Boolean bypassDocumentValidation) {
this.bypassDocumentValidation = bypassDocumentValidation;
return this;
}
@Override
public AggregatePublisher<T> collation(@Nullable final Collation collation) {
this.collation = collation;
return this;
}
@Override
public AggregatePublisher<T> comment(@Nullable final String comment) {
this.comment = comment;
return this;
}
@Override
public AggregatePublisher<T> hint(@Nullable final Bson hint) {
this.hint = hint;
return this;
}
@Override
public AggregatePublisher<T> hintString(@Nullable final String hint) {
this.hintString = hint;
return this;
}
@Override
public AggregatePublisher<T> let(@Nullable final Bson variables) {
this.variables = variables;
return this;
}
@Override
public Publisher<Void> toCollection() {
BsonDocument lastPipelineStage = getLastPipelineStage();
if (lastPipelineStage == null || !lastPipelineStage.containsKey("$out") && !lastPipelineStage.containsKey("$merge")) {
throw new IllegalStateException("The last stage of the aggregation pipeline must be $out or $merge");
}
return getMongoOperationPublisher().createReadOperationMono(this::getAggregateToCollectionOperation, getClientSession());
}
@Override
public Publisher<Document> explain() {
return publishExplain(Document.class, null);
}
@Override
public Publisher<Document> explain(final ExplainVerbosity verbosity) {
return publishExplain(Document.class, notNull("verbosity", verbosity));
}
@Override
public <E> Publisher<E> explain(final Class<E> explainResultClass) {
return publishExplain(explainResultClass, null);
}
@Override
public <E> Publisher<E> explain(final Class<E> explainResultClass, final ExplainVerbosity verbosity) {
return publishExplain(explainResultClass, notNull("verbosity", verbosity));
}
private <E> Publisher<E> publishExplain(final Class<E> explainResultClass, @Nullable final ExplainVerbosity verbosity) {
notNull("explainDocumentClass", explainResultClass);
return getMongoOperationPublisher().createReadOperationMono(() ->
asAggregateOperation(1).asAsyncExplainableOperation(verbosity,
getCodecRegistry().get(explainResultClass)),
getClientSession());
}
@Override
AsyncReadOperation<AsyncBatchCursor<T>> asAsyncReadOperation(final int initialBatchSize) {
MongoNamespace outNamespace = getOutNamespace();
if (outNamespace != null) {
AsyncReadOperation<Void> aggregateToCollectionOperation = getAggregateToCollectionOperation();
FindOptions findOptions = new FindOptions().collation(collation).batchSize(initialBatchSize);
AsyncReadOperation<AsyncBatchCursor<T>> findOperation =
getOperations().find(outNamespace, new BsonDocument(), getDocumentClass(), findOptions);
return new VoidReadOperationThenCursorReadOperation<>(aggregateToCollectionOperation, findOperation);
} else {
return asAggregateOperation(initialBatchSize);
}
}
private AggregateOperation<T> asAggregateOperation(final int initialBatchSize) {
return getOperations()
.aggregate(pipeline, getDocumentClass(), maxTimeMS, maxAwaitTimeMS,
initialBatchSize, collation, hint, hintString, comment, variables, allowDiskUse, aggregationLevel);
}
private AsyncReadOperation<Void> getAggregateToCollectionOperation() {
return getOperations().aggregateToCollection(pipeline, maxTimeMS, allowDiskUse, bypassDocumentValidation, collation, hint, hintString, comment,
variables, aggregationLevel);
}
@Nullable
private BsonDocument getLastPipelineStage() {
if (pipeline.isEmpty()) {
return null;
} else {
Bson lastStage = notNull("last pipeline stage", pipeline.get(pipeline.size() - 1));
return lastStage.toBsonDocument(getDocumentClass(), getCodecRegistry());
}
}
@Nullable
private MongoNamespace getOutNamespace() {
BsonDocument lastPipelineStage = getLastPipelineStage();
if (lastPipelineStage == null) {
return null;
}
String databaseName = getNamespace().getDatabaseName();
if (lastPipelineStage.containsKey("$out")) {
if (lastPipelineStage.get("$out").isString()) {
return new MongoNamespace(databaseName, lastPipelineStage.getString("$out").getValue());
} else if (lastPipelineStage.get("$out").isDocument()) {
BsonDocument outDocument = lastPipelineStage.getDocument("$out");
if (!outDocument.containsKey("db") || !outDocument.containsKey("coll")) {
throw new IllegalStateException("Cannot return a cursor when the value for $out stage is not a namespace document");
}
return new MongoNamespace(outDocument.getString("db").getValue(), outDocument.getString("coll").getValue());
} else {
throw new IllegalStateException("Cannot return a cursor when the value for $out stage "
+ "is not a string or namespace document");
}
} else if (lastPipelineStage.containsKey("$merge")) {
if (lastPipelineStage.isString("$merge")) {
return new MongoNamespace(databaseName, lastPipelineStage.getString("$merge").getValue());
} else if (lastPipelineStage.isDocument("$merge")) {
BsonDocument mergeDocument = lastPipelineStage.getDocument("$merge");
if (mergeDocument.isDocument("into")) {
BsonDocument intoDocument = mergeDocument.getDocument("into");
return new MongoNamespace(intoDocument.getString("db", new BsonString(databaseName)).getValue(),
intoDocument.getString("coll").getValue());
} else if (mergeDocument.isString("into")) {
return new MongoNamespace(databaseName, mergeDocument.getString("into").getValue());
}
} else {
throw new IllegalStateException("Cannot return a cursor when the value for $merge stage is not a string or a document");
}
}
return null;
}
}
| |
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.j2cl.transpiler.ast;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.j2cl.transpiler.ast.TypeDescriptors.BootstrapType;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.OptionalInt;
/** Utilities to create method calls to the runtime. */
public class RuntimeMethods {
/** Create a call to an Arrays method. */
public static MethodCall createArraysMethodCall(String methodName, Expression... arguments) {
return createArraysMethodCall(methodName, Arrays.asList(arguments));
}
/** Create a call to an Arrays method. */
public static MethodCall createArraysMethodCall(String methodName, List<Expression> arguments) {
return createRuntimeMethodCall(BootstrapType.ARRAYS.getDescriptor(), methodName, arguments);
}
/** Create a call to an array set. */
public static Expression createArraySetMethodCall(
Expression array, Expression index, Expression value) {
// Get the type of the elements in the array.
TypeDescriptor elementType =
((ArrayTypeDescriptor) array.getTypeDescriptor()).getComponentTypeDescriptor();
// Create and return the method descriptor.
return MethodCall.Builder.from(
MethodDescriptor.newBuilder()
.setJsInfo(JsInfo.RAW)
.setStatic(true)
.setEnclosingTypeDescriptor(BootstrapType.ARRAYS.getDescriptor())
.setName("$set")
.setParameterTypeDescriptors(
TypeDescriptors.get().javaLangObjectArray, // array
PrimitiveTypes.INT, // index
elementType)
.setReturnTypeDescriptor(elementType)
.build())
.setArguments(array, index, value)
.build();
}
/** Create a call to javaemul.internal.WasmArrayHelper.createArray method. */
public static MethodCall createCreateMultiDimensionalArrayCall(
Expression dimensions, Expression leafType) {
return MethodCall.Builder.from(
TypeDescriptors.get()
.javaemulInternalWasmArray
.getMethodDescriptor(
"createMultiDimensional",
ArrayTypeDescriptor.newBuilder()
.setComponentTypeDescriptor(PrimitiveTypes.INT)
.build(),
PrimitiveTypes.INT))
.setArguments(dimensions, leafType)
.build();
}
/** Create a call to an Class method. */
public static MethodCall createClassGetMethodCall(Expression... arguments) {
checkArgument(arguments.length == 1 || arguments.length == 2);
List<TypeDescriptor> parameterTypeDescriptors =
ImmutableList.of(TypeDescriptors.get().nativeFunction, PrimitiveTypes.INT);
return MethodCall.Builder.from(
MethodDescriptor.newBuilder()
.setJsInfo(JsInfo.RAW)
.setStatic(true)
.setEnclosingTypeDescriptor(TypeDescriptors.get().javaLangClass)
.setName("$get")
// Allow the call to use the one or two parameter version.
.setParameterTypeDescriptors(parameterTypeDescriptors.subList(0, arguments.length))
.setReturnTypeDescriptor(TypeDescriptors.get().javaLangClass)
.build())
.setArguments(arguments)
.build();
}
/** Create a call to Enums.createMapFromValues. */
public static Expression createEnumsCreateMapFromValuesMethodCall(Expression values) {
MethodDescriptor createMapMethodDescriptor =
TypeDescriptors.get()
.javaemulInternalEnums
.getMethodDescriptorByName("createMapFromValues");
// createMapFromValues is parameterized by T extends Enum, so specialize the method to the
// right type.
TypeVariable enumType = createMapMethodDescriptor.getTypeParameterTypeDescriptors().get(0);
return MethodCall.Builder.from(
createMapMethodDescriptor.specializeTypeVariables(
ImmutableMap.of(
enumType,
((ArrayTypeDescriptor) values.getTypeDescriptor())
.getComponentTypeDescriptor())))
.setArguments(values)
.build();
}
/** Create a call to Enums.getValueFromNameAndMap. */
public static Expression createEnumsGetValueFromNameAndMapMethodCall(
TypeDescriptor enumTypeDescriptor,
Expression nameParameter,
Expression namesToValuesMapParameter) {
MethodDescriptor getValueMethodDescriptor =
TypeDescriptors.get()
.javaemulInternalEnums
.getMethodDescriptorByName("getValueFromNameAndMap");
// getValueFromNameAndMap is parameterized by T extends Enum, so specialize the method to the
// right enum type.
TypeVariable enumType = getValueMethodDescriptor.getTypeParameterTypeDescriptors().get(0);
return MethodCall.Builder.from(
getValueMethodDescriptor.specializeTypeVariables(
ImmutableMap.of(enumType, enumTypeDescriptor)))
.setArguments(nameParameter, namesToValuesMapParameter)
.build();
}
/** Create a call to Enums.[boxingMethod] */
public static Expression createEnumsBoxMethodCall(Expression value) {
TypeDescriptor valueTypeDescriptor = value.getTypeDescriptor();
String boxingMethodName =
valueTypeDescriptor.getJsEnumInfo().supportsComparable() ? "boxComparable" : "box";
MethodDescriptor boxingMethod =
TypeDescriptors.get().javaemulInternalEnums.getMethodDescriptorByName(boxingMethodName);
// boxing operations are parameterized by the JsEnum type, so specialize the method to the
// right type.
TypeVariable type = boxingMethod.getTypeParameterTypeDescriptors().get(0);
return MethodCall.Builder.from(
boxingMethod.specializeTypeVariables(ImmutableMap.of(type, valueTypeDescriptor)))
.setArguments(value, valueTypeDescriptor.getMetadataConstructorReference())
.build();
}
/** Create a call to Enums.unbox. */
public static Expression createEnumsUnboxMethodCall(Expression expression) {
return createEnumsMethodCall("unbox", expression);
}
public static Expression createEnumsMethodCall(String unbox, Expression... arguments) {
MethodDescriptor methodDescriptor =
TypeDescriptors.get().javaemulInternalEnums.getMethodDescriptorByName(unbox);
return MethodCall.Builder.from(methodDescriptor).setArguments(arguments).build();
}
/** Create a call to an Equality method. */
public static MethodCall createEqualityMethodCall(String methodName, Expression... arguments) {
return createEqualityMethodCall(methodName, Arrays.asList(arguments));
}
/** Create a call to an Equality method. */
public static MethodCall createEqualityMethodCall(String methodName, List<Expression> arguments) {
return createRuntimeMethodCall(
BootstrapType.NATIVE_EQUALITY.getDescriptor(), methodName, arguments);
}
/** Create a call to an Exceptions method. */
public static MethodCall createExceptionsMethodCall(String methodName, Expression... arguments) {
return MethodCall.Builder.from(
TypeDescriptors.get().javaemulInternalExceptions.getMethodDescriptorByName(methodName))
.setArguments(Arrays.asList(arguments))
.build();
}
/** Create a call to InternalPreconditions.checkNotNull method. */
public static MethodCall createCheckNotNullCall(Expression argument) {
return MethodCall.Builder.from(
TypeDescriptors.get()
.javaemulInternalPreconditions
.getMethodDescriptor("checkNotNull", TypeDescriptors.get().javaLangObject))
.setArguments(argument)
.build();
}
/** Create a call to an LongUtils method. */
public static MethodCall createLongUtilsMethodCall(String methodName, Expression... arguments) {
return createLongUtilsMethodCall(methodName, Arrays.asList(arguments));
}
/** Create a call to an LongUtils method. */
public static MethodCall createLongUtilsMethodCall(
String methodName, List<Expression> arguments) {
return createRuntimeMethodCall(BootstrapType.LONG_UTILS.getDescriptor(), methodName, arguments);
}
/** Create a call to a LongUtils method. */
public static Expression createLongUtilsMethodCall(
String name,
TypeDescriptor returnTypeDescriptor,
Expression leftOperand,
Expression rightOperand) {
MethodDescriptor longUtilsMethodDescriptor =
MethodDescriptor.newBuilder()
.setJsInfo(JsInfo.RAW)
.setStatic(true)
.setEnclosingTypeDescriptor(BootstrapType.LONG_UTILS.getDescriptor())
.setName(name)
.setParameterTypeDescriptors(PrimitiveTypes.LONG, PrimitiveTypes.LONG)
.setReturnTypeDescriptor(returnTypeDescriptor)
.build();
// LongUtils.someOperation(leftOperand, rightOperand);
return MethodCall.Builder.from(longUtilsMethodDescriptor)
.setArguments(leftOperand, rightOperand)
.build();
}
/** Create a call to a native Long method. */
public static MethodCall createNativeLongMethodCall(String methodName, Expression... arguments) {
return createNativeLongMethodCall(methodName, Arrays.asList(arguments));
}
/** Create a call to an native Long method. */
private static MethodCall createNativeLongMethodCall(
String methodName, List<Expression> arguments) {
return createRuntimeMethodCall(
BootstrapType.NATIVE_LONG.getDescriptor(), methodName, arguments);
}
public static Expression createMathImulMethodCall(
Expression leftOperand, Expression rightOperand) {
return MethodCall.Builder.from(
MethodDescriptor.newBuilder()
.setJsInfo(
JsInfo.newBuilder()
.setJsMemberType(JsMemberType.METHOD)
.setJsName("Math.imul")
.setJsNamespace(JsUtils.JS_PACKAGE_GLOBAL)
.build())
.setName("imul")
.setStatic(true)
.setNative(true)
.setEnclosingTypeDescriptor(TypeDescriptors.get().nativeObject)
.setParameterTypeDescriptors(PrimitiveTypes.INT, PrimitiveTypes.INT)
.setReturnTypeDescriptor(PrimitiveTypes.INT)
.build())
.setArguments(leftOperand, rightOperand)
.build();
}
/** Create a call to a Primitives method. */
public static MethodCall createPrimitivesMethodCall(String methodName, Expression argument) {
MethodDescriptor narrowMethodDescriptor =
TypeDescriptors.get().javaemulInternalPrimitives.getMethodDescriptorByName(methodName);
return MethodCall.Builder.from(narrowMethodDescriptor).setArguments(argument).build();
}
/** Create a call to the corresponding narrowing Primitives method. */
public static Expression createPrimitivesNarrowingMethodCall(
Expression expression, PrimitiveTypeDescriptor toTypeDescriptor) {
PrimitiveTypeDescriptor fromTypeDescriptor =
(PrimitiveTypeDescriptor) expression.getTypeDescriptor();
String methodName =
String.format(
"narrow%sTo%s",
toProperCase(fromTypeDescriptor.getSimpleSourceName()),
toProperCase(toTypeDescriptor.getSimpleSourceName()));
return createPrimitivesMethodCall(methodName, expression);
}
/** Create a call to the corresponding widening Primitives method. */
public static Expression createWideningPrimitivesMethodCall(
Expression expression, PrimitiveTypeDescriptor toTypeDescriptor) {
PrimitiveTypeDescriptor fromTypeDescriptor =
(PrimitiveTypeDescriptor) expression.getTypeDescriptor();
String methodName =
String.format(
"widen%sTo%s",
toProperCase(fromTypeDescriptor.getSimpleSourceName()),
toProperCase(toTypeDescriptor.getSimpleSourceName()));
return createPrimitivesMethodCall(methodName, expression);
}
/** Return the String with first letter capitalized. */
private static String toProperCase(String string) {
if (string.isEmpty()) {
return string;
}
return string.substring(0, 1).toUpperCase() + string.substring(1);
}
/** Create a call to a Util method. */
public static MethodCall createUtilMethodCall(String methodName, Expression... arguments) {
return createUtilMethodCall(methodName, Arrays.asList(arguments));
}
/** Create a call to an Util method. */
public static MethodCall createUtilMethodCall(String methodName, List<Expression> arguments) {
return createRuntimeMethodCall(
BootstrapType.NATIVE_UTIL.getDescriptor(), methodName, arguments);
}
private static final ThreadLocal<Map<TypeDescriptor, Map<String, MethodInfo>>>
runtimeMethodInfoByMethodNameByType =
ThreadLocal.withInitial(
() ->
ImmutableMap.<TypeDescriptor, Map<String, MethodInfo>>builder()
.put(
BootstrapType.ARRAYS.getDescriptor(),
// Arrays methods
ImmutableMap.<String, MethodInfo>builder()
.put(
"$castTo",
MethodInfo.newBuilder()
.setReturnType(TypeDescriptors.get().javaLangObjectArray)
.setParameters(
TypeDescriptors.get().javaLangObject,
TypeDescriptors.get().javaLangObject,
PrimitiveTypes.INT)
.build())
.put(
"$castToNative",
MethodInfo.newBuilder()
.setReturnType(TypeDescriptors.get().javaLangObjectArray)
.setParameters(TypeDescriptors.get().javaLangObject)
.build())
.put(
"$create",
MethodInfo.newBuilder()
.setReturnType(TypeDescriptors.get().javaLangObjectArray)
.setParameters(
TypeDescriptors.get().javaLangObjectArray,
TypeDescriptors.get().javaLangObject)
.build())
.put(
"$createNative",
MethodInfo.newBuilder()
.setReturnType(TypeDescriptors.get().javaLangObjectArray)
.setParameters(TypeDescriptors.get().javaLangObjectArray)
.build())
.put(
"$init",
MethodInfo.newBuilder()
.setReturnType(TypeDescriptors.get().javaLangObjectArray)
.setParameters(
TypeDescriptors.get().javaLangObjectArray,
TypeDescriptors.get().javaLangObject,
PrimitiveTypes.INT)
.setRequiredParameters(2)
.build())
.put(
"$instanceIsOfType",
MethodInfo.newBuilder()
.setReturnType(TypeDescriptors.get().javaLangBoolean)
.setParameters(
TypeDescriptors.get().javaLangObject,
TypeDescriptors.get().javaLangObject,
PrimitiveTypes.INT)
.build())
.put(
"$instanceIsOfNative",
MethodInfo.newBuilder()
.setReturnType(TypeDescriptors.get().javaLangBoolean)
.setParameters(TypeDescriptors.get().javaLangObject)
.build())
.put(
"$stampType",
MethodInfo.newBuilder()
.setReturnType(TypeDescriptors.get().javaLangObjectArray)
.setParameters(
TypeDescriptors.get().javaLangObjectArray,
TypeDescriptors.get().javaLangObject,
PrimitiveTypes.DOUBLE)
.build())
.build())
.put(
BootstrapType.NATIVE_UTIL.getDescriptor(),
// Util methods
ImmutableMap.<String, MethodInfo>builder()
.put(
"$assertClinit",
MethodInfo.newBuilder()
.setReturnType(PrimitiveTypes.VOID)
.setParameters(TypeDescriptors.get().javaLangObject)
.build())
.put(
"$makeLambdaFunction",
MethodInfo.newBuilder()
.setReturnType(TypeDescriptors.get().nativeFunction)
.setParameters(
TypeDescriptors.get().nativeFunction,
TypeDescriptors.get().javaLangObject,
TypeDescriptors.get().nativeFunction)
.build())
.put(
"$makeEnumName",
MethodInfo.newBuilder()
.setReturnType(TypeDescriptors.get().javaLangString)
.setParameters(TypeDescriptors.get().javaLangString)
.build())
.put(
"$setClassMetadata",
MethodInfo.newBuilder()
.setReturnType(PrimitiveTypes.VOID)
.setParameters(
TypeDescriptors.get().javaemulInternalConstructor,
TypeDescriptors.get().javaLangString)
.build())
.put(
"$setClassMetadataForInterface",
MethodInfo.newBuilder()
.setReturnType(PrimitiveTypes.VOID)
.setParameters(
TypeDescriptors.get().javaemulInternalConstructor,
TypeDescriptors.get().javaLangString)
.build())
.put(
"$setClassMetadataForEnum",
MethodInfo.newBuilder()
.setReturnType(PrimitiveTypes.VOID)
.setParameters(
TypeDescriptors.get().javaemulInternalConstructor,
TypeDescriptors.get().javaLangString)
.build())
.put(
"$synchronized",
MethodInfo.newBuilder()
.setReturnType(PrimitiveTypes.VOID)
.setParameters(TypeDescriptors.get().javaLangObject)
.build())
.build())
.put(
BootstrapType.NATIVE_EQUALITY.getDescriptor(),
// Util methods
ImmutableMap.<String, MethodInfo>builder()
.put(
"$same",
MethodInfo.newBuilder()
.setReturnType(PrimitiveTypes.BOOLEAN)
.setParameters(
TypeDescriptors.get().javaLangObject,
TypeDescriptors.get().javaLangObject)
.build())
.build())
.put(
BootstrapType.LONG_UTILS.getDescriptor(),
// LongUtils methods
ImmutableMap.<String, MethodInfo>builder()
.put(
"negate",
MethodInfo.newBuilder()
.setReturnType(PrimitiveTypes.LONG)
.setParameters(PrimitiveTypes.LONG)
.build())
.put(
"not",
MethodInfo.newBuilder()
.setReturnType(PrimitiveTypes.LONG)
.setParameters(PrimitiveTypes.LONG)
.build())
.build())
.put(
BootstrapType.NATIVE_LONG.getDescriptor(),
// goog.math.long methods
ImmutableMap.<String, MethodInfo>builder()
.put(
"fromInt",
MethodInfo.newBuilder()
.setReturnType(PrimitiveTypes.LONG)
.setParameters(PrimitiveTypes.INT)
.build())
.put(
"fromBits",
MethodInfo.newBuilder()
.setReturnType(PrimitiveTypes.LONG)
.setParameters(PrimitiveTypes.INT, PrimitiveTypes.INT)
.build())
.build())
.build());
/** Create a call to a J2cl runtime method. */
private static MethodCall createRuntimeMethodCall(
DeclaredTypeDescriptor vmTypeDescriptor, String methodName, List<Expression> arguments) {
MethodInfo methodInfo =
runtimeMethodInfoByMethodNameByType.get().get(vmTypeDescriptor).get(methodName);
checkNotNull(methodInfo, "%s#%s(%s)", vmTypeDescriptor, methodName, arguments);
List<TypeDescriptor> parameterTypeDescriptors = methodInfo.getParameters();
int requiredParameters = methodInfo.getRequiredParameters();
TypeDescriptor returnTypeDescriptor = methodInfo.getReturnType();
checkArgument(arguments.size() >= requiredParameters);
MethodDescriptor methodDescriptor =
MethodDescriptor.newBuilder()
.setEnclosingTypeDescriptor(vmTypeDescriptor)
.setJsInfo(JsInfo.RAW)
.setStatic(true)
.setName(methodName)
.setParameterTypeDescriptors(parameterTypeDescriptors.subList(0, arguments.size()))
.setReturnTypeDescriptor(returnTypeDescriptor)
.build();
// Use the raw type as the stamped leaf type. So that we use the upper bound of a generic type
// parameter type instead of the type parameter itself.
return MethodCall.Builder.from(methodDescriptor).setArguments(arguments).build();
}
@AutoValue
abstract static class MethodInfo {
public abstract TypeDescriptor getReturnType();
public abstract int getRequiredParameters();
public abstract ImmutableList<TypeDescriptor> getParameters();
public static Builder newBuilder() {
return new AutoValue_RuntimeMethods_MethodInfo.Builder();
}
@AutoValue.Builder
abstract static class Builder {
public abstract Builder setReturnType(TypeDescriptor returnType);
public abstract Builder setRequiredParameters(int requiredParameters);
public abstract Builder setParameters(TypeDescriptor... parameters);
public abstract MethodInfo autoBuild();
abstract OptionalInt getRequiredParameters();
abstract ImmutableList<TypeDescriptor> getParameters();
public MethodInfo build() {
if (!getRequiredParameters().isPresent()) {
setRequiredParameters(getParameters().size());
}
MethodInfo methodInfo = autoBuild();
checkArgument(
methodInfo.getRequiredParameters() >= 0
&& methodInfo.getRequiredParameters() <= methodInfo.getParameters().size());
return methodInfo;
}
}
}
}
| |
/*
* Copyright 2015 - 2017 Atlarge Research Team,
* operating at Technische Universiteit Delft
* and Vrije Universiteit Amsterdam, the Netherlands.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package science.atlarge.graphalytics.execution;
import akka.actor.ActorSystem;
import akka.actor.Props;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigValueFactory;
import org.apache.commons.configuration.Configuration;
import science.atlarge.graphalytics.configuration.ConfigurationUtil;
import science.atlarge.graphalytics.configuration.GraphalyticsExecutionException;
import science.atlarge.graphalytics.domain.benchmark.BenchmarkRun;
import science.atlarge.graphalytics.plugin.Plugin;
import science.atlarge.graphalytics.report.result.BenchmarkMetrics;
import science.atlarge.graphalytics.report.result.BenchmarkRunResult;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import science.atlarge.graphalytics.util.ProcessUtil;
import science.atlarge.graphalytics.util.TimeUtil;
/**
* @author Wing Lung Ngai
*/
public class RunnerService extends MircoService {
private static final Logger LOG = LogManager.getLogger();
private static final String BENCHMARK_PROPERTIES_FILE = "benchmark.properties";
private static final String BENCHMARK_RUNNER_PORT = "benchmark.runner.port";
public static final String SERVICE_NAME = "runner-service";
public static final String SERVICE_IP = "localhost";
BenchmarkRunner runner;
public RunnerService(BenchmarkRunner runner) {
LOG.info("Benchmark runner service started.");
this.runner = runner;
runner.setService(this);
LOG.info("Benchmark runner service registration started.");
register();
LOG.info("Benchmark runner service registration ended.");
}
public static void InitService(BenchmarkRunner benchmarkRunner) {
Config config = defaultConfiguration();
config = config.withValue("akka.remote.netty.tcp.port", ConfigValueFactory.fromAnyRef(getRunnerPort()));
config = config.withValue("akka.remote.netty.tcp.hostname", ConfigValueFactory.fromAnyRef(SERVICE_IP));
final ActorSystem system = ActorSystem.create(SERVICE_NAME, config);
system.actorOf(Props.create(RunnerService.class, benchmarkRunner), SERVICE_NAME);
LOG.debug("Started Graphalytics Runner Service");
}
private void register() {
String masterAddress = getExecutorAddress();
LOG.info(String.format("Register %s at %s.", runner.getBenchmarkId(), masterAddress));
Integer processId = ProcessUtil.getProcessId();
Notification notification = new Notification(
runner.getBenchmarkId(),
processId,
Notification.Label.REGISTRATION);
getContext().actorSelection(masterAddress).tell(notification, getSelf());
}
private void reportValidation() {
String masterAddress = getExecutorAddress();
LOG.info(String.format("Report validation for %s at %s.", runner.getBenchmarkId(), masterAddress));
Notification notification = new Notification(
runner.getBenchmarkId(),
"Validated benchmark result.",
Notification.Label.VALIDATION);
getContext().actorSelection(masterAddress).tell(notification, getSelf());
}
private void reportExecution() {
String masterAddress = getExecutorAddress();
LOG.info(String.format("Report execution %s at %s.", runner.getBenchmarkId(), masterAddress));
Notification notification = new Notification(
runner.getBenchmarkId(),
"Executed benchmark.",
Notification.Label.EXECUTION);
getContext().actorSelection(masterAddress).tell(notification, getSelf());
}
private void reportFailure(BenchmarkFailure failure) {
String masterAddress = getExecutorAddress();
LOG.info(String.format("Report failures (%s) of %s at %s.", failure, runner.getBenchmarkId(), masterAddress));
Notification notification = new Notification(
runner.getBenchmarkId(),
failure,
Notification.Label.FAILURE);
getContext().actorSelection(masterAddress).tell(notification, getSelf());
TimeUtil.waitFor(5);
}
private void reportRetrievedResult(BenchmarkRunResult benchmarkRunResult) {
String executorAddress = getExecutorAddress();
LOG.info(String.format("Report benchmark result for %s at %s.", runner.getBenchmarkId(), executorAddress));
getContext().actorSelection(executorAddress).tell(benchmarkRunResult, getSelf());
}
private String getExecutorAddress() {
// return String.format("akka.tcp://%s@%s:%s/user/%s",
// ExecutorService.SERVICE_NAME, SERVICE_IP, 8099, ExecutorService.SERVICE_NAME);
return String.format("akka.tcp://%s@%s:%s/user/%s",
ExecutorService.SERVICE_NAME, SERVICE_IP, ExecutorService.getExecutorPort(), ExecutorService.SERVICE_NAME);
}
@Override
public void onReceive(Object message) throws Exception {
if (message instanceof RunSpecification) {
RunSpecification runSpecification = (RunSpecification) message;
BenchmarkRun benchmarkRun = runSpecification.getBenchmarkRun();
LOG.info(String.format("The runner received benchmark specification %s.", benchmarkRun.getId()));
LOG.info(String.format("The runner is executing benchmark %s.", benchmarkRun.getId()));
try {
for (Plugin plugin : runner.getPlugins()) {
plugin.startup(runSpecification);
}
runner.startup(runSpecification);
} catch (Exception e) {
LOG.error("Failed to startup benchmark run.");
reportFailure(BenchmarkFailure.INI);
throw new GraphalyticsExecutionException("Benchmark run aborted.", e);
}
try {
boolean runned = runner.run(runSpecification);
if(!runned) {
reportFailure(BenchmarkFailure.EXE);
}
} catch (Exception e) {
LOG.error("Failed to execute benchmark run.");
reportFailure(BenchmarkFailure.EXE);
throw new GraphalyticsExecutionException("Benchmark run aborted.", e);
}
reportExecution();
try {
boolean counted = runner.count(runSpecification);
if (!counted) {
reportFailure(BenchmarkFailure.COM);
}
} catch (Exception e) {
LOG.error("Failed to count benchmark output.");
reportFailure(BenchmarkFailure.COM);
throw new GraphalyticsExecutionException("Benchmark run aborted.", e);
}
try {
boolean validated = runner.validate(runSpecification);
if(!validated) {
reportFailure(BenchmarkFailure.VAL);
}
} catch (Exception e) {
LOG.error("Failed to validate benchmark run.");
reportFailure(BenchmarkFailure.VAL);
throw new GraphalyticsExecutionException("Benchmark run aborted.", e);
}
reportValidation();
try {
BenchmarkMetrics metrics = runner.finalize(runSpecification);
for (Plugin plugin : runner.getPlugins()) {
metrics = plugin.finalize(runSpecification, metrics);
}
BenchmarkRunResult benchmarkRunResult = runner.summarize(benchmarkRun, metrics);
reportRetrievedResult(benchmarkRunResult);
} catch (Exception e) {
reportFailure(BenchmarkFailure.MET);
LOG.error("Failed to finalize benchmark.");
throw new GraphalyticsExecutionException("Benchmark run aborted.", e);
}
TimeUtil.waitFor(1);
terminate();
System.exit(0);
}
}
public static Integer getRunnerPort() {
Configuration configuration = ConfigurationUtil.loadConfiguration(BENCHMARK_PROPERTIES_FILE);
return ConfigurationUtil.getInteger(configuration, BENCHMARK_RUNNER_PORT);
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Luca Domenico Milanesio, Tom Huybrechts
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import hudson.DescriptorExtensionList;
import hudson.Extension;
import hudson.ExtensionPoint;
import hudson.AbortException;
import hudson.cli.CLICommand;
import hudson.util.DescriptorList;
import java.io.Serializable;
import java.io.IOException;
import java.util.logging.Logger;
import javax.annotation.CheckForNull;
import jenkins.model.Jenkins;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.export.Exported;
import org.kohsuke.stapler.export.ExportedBean;
/**
* Defines a parameter for a build.
*
* <p>
* In Jenkins, a user can configure a job to require parameters for a build.
* For example, imagine a test job that takes the bits to be tested as a parameter.
*
* <p>
* The actual meaning and the purpose of parameters are entirely up to users, so
* what the concrete parameter implementation is pluggable. Write subclasses
* in a plugin and put {@link Extension} on the descriptor to register them.
*
* <p>
* Three classes are used to model build parameters. First is the
* {@link ParameterDescriptor}, which tells Hudson what kind of implementations are
* available. From {@link ParameterDescriptor#newInstance(StaplerRequest, JSONObject)},
* Hudson creates {@link ParameterDefinition}s based on the job configuration.
* For example, if the user defines two string parameters "database-type" and
* "appserver-type", we'll get two {@link StringParameterDefinition} instances
* with their respective names.
*
* <p>
* When a job is configured with {@link ParameterDefinition} (or more precisely,
* {@link ParametersDefinitionProperty}, which in turns retains {@link ParameterDefinition}s),
* user would have to enter the values for the defined build parameters.
* The {@link #createValue(StaplerRequest, JSONObject)} method is used to convert this
* form submission into {@link ParameterValue} objects, which are then accessible
* during a build.
*
*
*
* <h2>Persistence</h2>
* <p>
* Instances of {@link ParameterDefinition}s are persisted into job {@code config.xml}
* through XStream.
*
*
* <h2>Associated Views</h2>
* <h3>config.jelly</h3>
* {@link ParameterDefinition} class uses {@code config.jelly} to contribute a form
* fragment in the job configuration screen. Values entered there are fed back to
* {@link ParameterDescriptor#newInstance(StaplerRequest, JSONObject)} to create {@link ParameterDefinition}s.
*
* <h3>index.jelly</h3>
* The {@code index.jelly} view contributes a form fragment in the page where the user
* enters actual values of parameters for a build. The result of this form submission
* is then fed to {@link ParameterDefinition#createValue(StaplerRequest, JSONObject)} to
* create {@link ParameterValue}s.
*
* TODO: what Jelly pages does this object need for rendering UI?
* TODO: {@link ParameterValue} needs to have some mechanism to expose values to the build
* @see StringParameterDefinition
*/
@ExportedBean(defaultVisibility=3)
public abstract class ParameterDefinition implements
Describable<ParameterDefinition>, ExtensionPoint, Serializable {
private final String name;
private final String description;
public ParameterDefinition(String name) {
this(name, null);
}
public ParameterDefinition(String name, String description) {
this.name = name;
this.description = description;
}
/**
* Create a new instance of this parameter definition and use the passed
* parameter value as the default value.
*
* @since 1.405
*/
public ParameterDefinition copyWithDefaultValue(ParameterValue defaultValue) {
// By default, just return this again
return this;
}
@Exported
public String getType() {
return this.getClass().getSimpleName();
}
@Exported
public String getName() {
return name;
}
@Exported
public String getDescription() {
return description;
}
/**
* return parameter description, applying the configured MarkupFormatter for jenkins instance.
* @since 1.521
*/
public String getFormattedDescription() {
try {
return Jenkins.getInstance().getMarkupFormatter().translate(description);
} catch (IOException e) {
LOGGER.warning("failed to translate description using configured markup formatter");
return "";
}
}
/**
* {@inheritDoc}
*/
@Override
public ParameterDescriptor getDescriptor() {
return (ParameterDescriptor) Jenkins.getInstance().getDescriptorOrDie(getClass());
}
/**
* Create a parameter value from a form submission.
*
* <p>
* This method is invoked when the user fills in the parameter values in the HTML form
* and submits it to the server.
*/
@CheckForNull
public abstract ParameterValue createValue(StaplerRequest req, JSONObject jo);
/**
* Create a parameter value from a GET with query string.
* If no value is available in the request, it returns a default value if possible, or null.
*
* <p>
* Unlike {@link #createValue(StaplerRequest, JSONObject)}, this method is intended to support
* the programmatic POST-ing of the build URL. This form is less expressive (as it doesn't support
* the tree form), but it's more scriptable.
*
* <p>
* If a {@link ParameterDefinition} can't really support this mode of creating a value,
* you may just always return null.
*
* @throws IllegalStateException
* If the parameter is deemed required but was missing in the submission.
*/
@CheckForNull
public abstract ParameterValue createValue(StaplerRequest req);
/**
* Create a parameter value from the string given in the CLI.
*
* @param command
* This is the command that got the parameter.
* @throws AbortException
* If the CLI processing should be aborted. Hudson will report the error message
* without stack trace, and then exits this command. Useful for graceful termination.
* @throws RuntimeException
* All the other exceptions cause the stack trace to be dumped, and then
* the command exits with an error code.
* @since 1.334
*/
@CheckForNull
public ParameterValue createValue(CLICommand command, String value) throws IOException, InterruptedException {
throw new AbortException("CLI parameter submission is not supported for the "+getClass()+" type. Please file a bug report for this");
}
/**
* Returns default parameter value for this definition.
*
* @return default parameter value or null if no defaults are available
* @since 1.253
*/
@CheckForNull
@Exported
public ParameterValue getDefaultParameterValue() {
return null;
}
/**
* Returns all the registered {@link ParameterDefinition} descriptors.
*/
public static DescriptorExtensionList<ParameterDefinition,ParameterDescriptor> all() {
return Jenkins.getInstance().getDescriptorList(ParameterDefinition.class);
}
/**
* A list of available parameter definition types
* @deprecated as of 1.286
* Use {@link #all()} for read access, and {@link Extension} for registration.
*/
@Deprecated
public static final DescriptorList<ParameterDefinition> LIST = new DescriptorList<>(ParameterDefinition.class);
public abstract static class ParameterDescriptor extends
Descriptor<ParameterDefinition> {
protected ParameterDescriptor(Class<? extends ParameterDefinition> klazz) {
super(klazz);
}
/**
* Infers the type of the corresponding {@link ParameterDescriptor} from the outer class.
* This version works when you follow the common convention, where a descriptor
* is written as the static nested class of the describable class.
*
* @since 1.278
*/
protected ParameterDescriptor() {
}
public String getValuePage() {
return getViewPage(clazz, "index.jelly");
}
@Override
public String getDisplayName() {
return "Parameter";
}
}
private static final Logger LOGGER = Logger.getLogger(ParameterDefinition.class.getName());
}
| |
package vandy.mooc.view;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import android.app.Activity;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.os.Bundle;
import android.preference.EditTextPreference;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceFragment;
import android.preference.PreferenceManager;
import android.text.TextUtils;
import vandy.mooc.R;
import vandy.mooc.common.Utils;
public class SettingsActivity extends Activity
{
public static final String KEY_PREFERENCE_PROTOCOL =
"pref_key_protocol";
public static final String KEY_PREFERENCE_IP_ADDRESS =
"pref_key_ip_address";
public static final String KEY_PREFERENCE_PORT =
"pref_key_port";
public static final String KEY_PREFERENCE_USER_NAME =
"pref_key_username";
public static final String KEY_PREFERENCE_PASSWORD =
"pref_key_password";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Display the fragment as the main content.
getFragmentManager()
.beginTransaction()
.replace(android.R.id.content, new SettingsFragment())
.commit();
}
public class SettingsFragment extends PreferenceFragment
implements OnSharedPreferenceChangeListener{
private static final String IPADDRESS_PATTERN =
"^([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
"([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
"([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
"([01]?\\d\\d?|2[0-4]\\d|25[0-5])$";
private Pattern pattern;
private Matcher matcher;
public SettingsFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
pattern = Pattern.compile(IPADDRESS_PATTERN);
// Load the preferences from an XML resource
addPreferencesFromResource(R.xml.preferences);
SharedPreferences prefs =
PreferenceManager.getDefaultSharedPreferences(getApplicationContext());
setPreferenceSummary(prefs, KEY_PREFERENCE_PROTOCOL);
setPreferenceSummary(prefs, KEY_PREFERENCE_IP_ADDRESS);
setPreferenceSummary(prefs, KEY_PREFERENCE_PORT);
setPreferenceSummary(prefs, KEY_PREFERENCE_USER_NAME);
setPreferenceSummary(prefs, KEY_PREFERENCE_PASSWORD);
}
public void onResume() {
super.onResume();
getPreferenceScreen().getSharedPreferences()
.registerOnSharedPreferenceChangeListener(this);
}
public void onPause() {
super.onPause();
getPreferenceScreen().getSharedPreferences()
.unregisterOnSharedPreferenceChangeListener(this);
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPrefs, String key) {
setPreferenceSummary(sharedPrefs, key);
}
private void setPreferenceSummary(SharedPreferences prefs, String key){
SharedPreferences.Editor editor = prefs.edit();
Preference pref = findPreference(key);
if (TextUtils.equals(key, KEY_PREFERENCE_PROTOCOL)) {
ListPreference lp = (ListPreference) pref;
pref.setSummary(lp.getEntry());
}else if(TextUtils.equals(key, KEY_PREFERENCE_IP_ADDRESS)){
EditTextPreference etp = (EditTextPreference) pref;
String ip = etp.getText();
if(TextUtils.isEmpty(ip)){
editor.putString(KEY_PREFERENCE_IP_ADDRESS, "0.0.0.0");
etp.setDefaultValue("0.0.0.0");
Utils.showToast(getActivity(), "IP Address cannot be empty");
}else if(! validateIp(ip)) {
editor.putString(KEY_PREFERENCE_IP_ADDRESS, "0.0.0.0");
etp.setDefaultValue("0.0.0.0");
Utils.showToast(getActivity(), "IP Address is invalid");
}else{
pref.setSummary(etp.getText());
}
}else if(TextUtils.equals(key, KEY_PREFERENCE_PORT)){
EditTextPreference etp = (EditTextPreference) pref;
String portNo = etp.getText();
if(TextUtils.isEmpty(portNo)){
editor.putString(KEY_PREFERENCE_PORT, "8080");
etp.setDefaultValue("8080");
Utils.showToast(getActivity(), "Port number cannot be empty");
}else if(! validatePortNo(portNo)){
editor.putString(KEY_PREFERENCE_PORT, "8080");
etp.setDefaultValue("8080");
Utils.showToast(getActivity(), "Port number is invalid");
}else{
pref.setSummary(etp.getText());
}
}else if(TextUtils.equals(key, KEY_PREFERENCE_USER_NAME)){
EditTextPreference etp = (EditTextPreference) pref;
String userName = etp.getText();
if(TextUtils.isEmpty(userName)){
editor.putString(KEY_PREFERENCE_USER_NAME, "admin");
etp.setDefaultValue("admin");
Utils.showToast(getActivity(), "UserName cannot be empty");
}else{
pref.setSummary(etp.getText());
}
}else if(TextUtils.equals(key, KEY_PREFERENCE_PASSWORD)){
EditTextPreference etp = (EditTextPreference) pref;
String password = etp.getText();
if(TextUtils.isEmpty(password)){
editor.putString(KEY_PREFERENCE_PASSWORD, "pass");
etp.setDefaultValue("pass");
Utils.showToast(getActivity(), "Password cannot be empty");
}else{
pref.setSummary(etp.getText());
}
}
editor.commit();
}
/**
* Validate ip address with regular expression
* @param ip ip address for validation
* @return true valid ip address, false invalid ip address
*/
public boolean validateIp(final String ip){
matcher = pattern.matcher(ip);
return matcher.matches();
}
public boolean validatePortNo(String portNo){
//match a number with optional '-' and decimal.
return portNo.matches("\\d+");
}
}
}
| |
/*
Copyright (c) 2009, 2012 Paul Richards <paul.richards@gmail.com>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package algorithmx;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Stack;
/**
An implementation of the "DancingLinks" algorithm:
http://en.wikipedia.org/wiki/Dancing_Links
*/
public final class DancingLinks {
public static final class MatrixHeader {
/**
First column in matrix, or null if matrix is empty.
*/
private ColumnHeader fRootColumnHeader;
}
public static interface IInsertable
{
public void insert();
}
/**
There is one ColumnHeader per column in the matrix. They form a
circular doubly-linked list so that all columns can be easily enumerated.
*/
private static final class ColumnHeader implements IInsertable {
/**
Previous column.
*/
private ColumnHeader fLeft;
/**
Next column.
*/
private ColumnHeader fRight;
/**
First one-bit in column or null if column is empty.
*/
private Node fRootNode;
/**
Total number of one-bits in column.
(Size of doubly-linked list which fRoot is a part of.)
*/
private int fOnesCount;
/**
Matrix this column belongs to.
*/
private MatrixHeader fMatrixHeader;
public void insert() {
insertColumnHeader(this);
}
}
/**
Represents a single one bit in the sparse matrix.
*/
private static final class Node implements IInsertable {
Node(int rowNumber) {
this.fRowNumber = rowNumber;
}
final int fRowNumber;
/**
Next one-bit above in this column.
*/
private Node fUp = this;
/**
Next one-bit below in this column
*/
private Node fDown = this;
/**
Next one-bit to the left in this row.
*/
private Node fLeft = this;
/**
Next one-bit tot he right in this row.
*/
private Node fRight = this;
/**
Column this one-bit belongs to.
*/
private ColumnHeader fColumnHeader;
public void insert() {
insertNode(this);
}
}
public static SparseBinaryMatrix constructFromDenseMatrix(final boolean[][] matrix) {
if (matrix.length == 0 || matrix[0].length == 0) {
throw new IllegalArgumentException("Must be non-empty.");
}
final int rowCount = matrix.length;
final int columnCount = matrix[0].length;
for (int row = 0; row < rowCount; row++) {
if (matrix[row].length != columnCount) {
throw new IllegalArgumentException("Rows must be of equal length.");
}
}
SparseBinaryMatrix result = new SparseBinaryMatrix(rowCount, columnCount);
for (int row = 0; row < rowCount; row++) {
for (int column = 0; column < columnCount; column++) {
if (matrix[row][column]) {
result.setBit(new SparseBinaryMatrix.BitLocation(row, column), true);
}
}
}
return result;
}
private static MatrixHeader constructFromSparseMatrix(final SparseBinaryMatrix sparseMatrix) {
MatrixHeader result = new MatrixHeader();
ColumnHeader[] headers = new ColumnHeader[sparseMatrix.columnCount()];
for (int col = 0; col < sparseMatrix.columnCount(); col++) {
headers[col] = new ColumnHeader();
headers[col].fMatrixHeader = result;
}
for (int col = 0; col < sparseMatrix.columnCount(); col++) {
headers[col].fLeft = headers[(col + sparseMatrix.columnCount() - 1) % sparseMatrix.columnCount()];
headers[col].fRight = headers[(col + sparseMatrix.columnCount() + 1) % sparseMatrix.columnCount()];
}
Node[] rowRootNodes = new Node[sparseMatrix.rowCount()];
for (SparseBinaryMatrix.BitLocation bit: sparseMatrix.allOnes()) {
Node node = new Node(bit.fRow);
node.fColumnHeader = headers[bit.fColumn];
if (node.fColumnHeader.fRootNode != null) {
node.fUp = node.fColumnHeader.fRootNode.fUp; // Last node in column
node.fDown = node.fColumnHeader.fRootNode; // First node in column
}
if (rowRootNodes[bit.fRow] == null) {
rowRootNodes[bit.fRow] = node;
} else {
node.fLeft = rowRootNodes[bit.fRow].fLeft; // Last node in row
node.fRight = rowRootNodes[bit.fRow]; // First node in row
}
insertNode(node);
}
result.fRootColumnHeader = headers[0];
return result;
}
private static void removeNode(Node node) {
if (node.fUp.fDown != node ||
node.fDown.fUp != node ||
node.fLeft.fRight != node ||
node.fRight.fLeft != node) {
throw new IllegalStateException("Node is not part of list");
}
node.fUp.fDown = node.fDown;
node.fDown.fUp = node.fUp;
node.fLeft.fRight = node.fRight;
node.fRight.fLeft = node.fLeft;
node.fColumnHeader.fOnesCount --;
if (node.fColumnHeader.fRootNode == node) {
node.fColumnHeader.fRootNode = node.fDown;
if (node.fColumnHeader.fRootNode == node) {
node.fColumnHeader.fRootNode = null;
}
}
}
private static void insertNode(Node node) {
node.fUp.fDown = node;
node.fDown.fUp = node;
node.fLeft.fRight = node;
node.fRight.fLeft = node;
node.fColumnHeader.fOnesCount ++;
if (node.fColumnHeader.fRootNode == null) {
node.fColumnHeader.fRootNode = node;
}
}
private static void removeColumnHeader(ColumnHeader column) {
if ((column.fRootNode == null) != (column.fOnesCount == 0)) {
throw new IllegalStateException("ColumnHeader is out of sync");
}
if (column.fOnesCount != 0) {
throw new IllegalStateException("Column must be empty before being removed");
}
if (column.fLeft.fRight != column ||
column.fRight.fLeft != column) {
throw new IllegalStateException("ColumnHeader is not part of list");
}
column.fLeft.fRight = column.fRight;
column.fRight.fLeft = column.fLeft;
if (column.fMatrixHeader.fRootColumnHeader == column) {
column.fMatrixHeader.fRootColumnHeader = column.fRight;
if (column.fMatrixHeader.fRootColumnHeader == column) {
column.fMatrixHeader.fRootColumnHeader = null;
}
}
}
private static void insertColumnHeader(ColumnHeader column) {
column.fLeft.fRight = column;
column.fRight.fLeft = column;
if (column.fMatrixHeader.fRootColumnHeader == null) {
column.fMatrixHeader.fRootColumnHeader = column;
}
}
/**
Removes all nodes in a row. May leave behind empty columns.
*/
private static void removeRow(Node node, Stack<IInsertable> undoStack) {
List<Node> nodesToRemove = new ArrayList<Node>();
Node i = node;
do {
nodesToRemove.add(i);
i = i.fRight;
} while (i != node);
for (Node n: nodesToRemove) {
removeNode(n);
undoStack.push(n);
}
}
/**
Remove column and all rows with a one in this column.
Returns a stack of nodes and ColumnHeaders which can be re-inserted to undo this
operation.
*/
private static void coverColumn(ColumnHeader column, Stack<IInsertable> undoStack) {
if ((column.fRootNode == null) != (column.fOnesCount == 0)) {
throw new IllegalStateException("ColumnHeader is out of sync");
}
while (column.fRootNode != null) {
removeRow(column.fRootNode, undoStack);
}
removeColumnHeader(column);
undoStack.push(column);
}
private static void solve(final MatrixHeader matrixHeader, Set<Set<Integer>> solutions, Set<Integer> partialSolution) {
if (matrixHeader.fRootColumnHeader == null) {
solutions.add(new HashSet<Integer>(partialSolution));
} else {
ColumnHeader selectedColumn = matrixHeader.fRootColumnHeader;
{
ColumnHeader header = matrixHeader.fRootColumnHeader;
do {
if (header.fOnesCount < selectedColumn.fOnesCount) {
selectedColumn = header;
}
header = header.fRight;
} while (header != matrixHeader.fRootColumnHeader);
}
// For each row that had a one in this column - try including it in the solution
Node selectedRow = selectedColumn.fRootNode;
for (int i = 0; i < selectedColumn.fOnesCount; i++) {
// Include this row in the solution
partialSolution.add(selectedRow.fRowNumber);
// Cover all columns which have a 1 in the selected row
Stack<IInsertable> undoStack = new Stack<IInsertable>();
{
List<ColumnHeader> columnsToCover = new ArrayList<ColumnHeader>();
Node node = selectedRow;
do {
columnsToCover.add(node.fColumnHeader);
node = node.fRight;
} while (node != selectedRow);
for (ColumnHeader col: columnsToCover) {
coverColumn(col, undoStack);
}
}
solve(matrixHeader, solutions, partialSolution);
while (undoStack.empty() == false) {
final IInsertable obj = undoStack.pop();
obj.insert();
}
partialSolution.remove(selectedRow.fRowNumber);
selectedRow = selectedRow.fDown;
}
}
}
public static Set<Set<Integer>> solve(SparseBinaryMatrix matrix)
{
MatrixHeader matrixHeader = constructFromSparseMatrix(matrix);
Set<Integer> emptySolution = new HashSet<Integer>();
Set<Set<Integer>> solutions = new HashSet<Set<Integer>>();
solve(matrixHeader, solutions, emptySolution);
return solutions;
}
}
| |
/*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.haulmont.cuba.web.app.loginwindow;
import com.haulmont.cuba.core.global.GlobalConfig;
import com.haulmont.cuba.gui.UrlRouting;
import com.haulmont.cuba.gui.components.*;
import com.haulmont.cuba.security.auth.Credentials;
import com.haulmont.cuba.security.auth.LoginPasswordCredentials;
import com.haulmont.cuba.security.global.InternalAuthenticationException;
import com.haulmont.cuba.security.global.LoginException;
import com.haulmont.cuba.web.App;
import com.haulmont.cuba.web.Connection;
import com.haulmont.cuba.web.WebConfig;
import com.haulmont.cuba.web.app.login.LoginScreen;
import com.haulmont.cuba.web.auth.WebAuthConfig;
import com.haulmont.cuba.web.exception.ExceptionHandlers;
import com.haulmont.cuba.web.security.LoginScreenAuthDelegate;
import com.vaadin.server.ErrorEvent;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
/**
* Legacy base class for a controller of application Login window.
*
* @see LoginScreen
*/
public class AppLoginWindow extends AbstractWindow implements Window.TopLevelWindow {
private static final Logger log = LoggerFactory.getLogger(AppLoginWindow.class);
protected static final ThreadLocal<LoginScreenAuthDelegate.AuthInfo> authInfoThreadLocal = new ThreadLocal<>();
@Inject
protected GlobalConfig globalConfig;
@Inject
protected WebConfig webConfig;
@Inject
protected WebAuthConfig webAuthConfig;
@Inject
protected App app;
@Inject
protected Connection connection;
@Inject
protected LoginScreenAuthDelegate authDelegate;
@Inject
protected Image logoImage;
@Inject
protected TextField<String> loginField;
@Inject
protected CheckBox rememberMeCheckBox;
@Inject
protected PasswordField passwordField;
@Inject
protected LookupField<Locale> localesSelect;
@Inject
protected UrlRouting urlRouting;
@Override
public void init(Map<String, Object> params) {
super.init(params);
loginField.focus();
initPoweredByLink();
initLogoImage();
initDefaultCredentials();
initLocales();
initRememberMe();
initRememberMeLocalesBox();
}
@Override
protected void afterShow(AfterShowEvent event) {
super.afterShow(event);
doRememberMeLogin();
}
protected void initPoweredByLink() {
Component poweredByLink = getComponent("poweredByLink");
if (poweredByLink != null) {
poweredByLink.setVisible(webConfig.getLoginDialogPoweredByLinkVisible());
}
}
protected void initLocales() {
localesSelect.setOptionsMap(globalConfig.getAvailableLocales());
localesSelect.setValue(app.getLocale());
boolean localeSelectVisible = globalConfig.getLocaleSelectVisible();
localesSelect.setVisible(localeSelectVisible);
// if old layout is used
Component localesSelectLabel = getComponent("localesSelectLabel");
if (localesSelectLabel != null) {
localesSelectLabel.setVisible(localeSelectVisible);
}
localesSelect.addValueChangeListener(e -> {
Locale selectedLocale = e.getValue();
app.setLocale(selectedLocale);
authInfoThreadLocal.set(new LoginScreenAuthDelegate.AuthInfo(loginField.getValue(), passwordField.getValue(),
rememberMeCheckBox.getValue()));
try {
app.createTopLevelWindow();
} finally {
authInfoThreadLocal.set(null);
}
});
}
protected void initLogoImage() {
String loginLogoImagePath = messages.getMainMessage("loginWindow.logoImage", app.getLocale());
if (StringUtils.isBlank(loginLogoImagePath) || "loginWindow.logoImage".equals(loginLogoImagePath)) {
logoImage.setVisible(false);
} else {
logoImage.setSource(ThemeResource.class).setPath(loginLogoImagePath);
}
}
protected void initRememberMe() {
if (!webConfig.getRememberMeEnabled()) {
rememberMeCheckBox.setValue(false);
rememberMeCheckBox.setVisible(false);
}
}
protected void initRememberMeLocalesBox() {
Component rememberLocalesBox = getComponent("rememberLocalesBox");
if (rememberLocalesBox != null) {
rememberLocalesBox.setVisible(rememberMeCheckBox.isVisible() || localesSelect.isVisible());
}
}
protected void initDefaultCredentials() {
LoginScreenAuthDelegate.AuthInfo authInfo = authInfoThreadLocal.get();
if (authInfo != null) {
loginField.setValue(authInfo.getLogin());
passwordField.setValue(authInfo.getPassword());
rememberMeCheckBox.setValue(authInfo.getRememberMe());
localesSelect.focus();
authInfoThreadLocal.set(null);
return;
}
String defaultUser = webConfig.getLoginDialogDefaultUser();
if (!StringUtils.isBlank(defaultUser) && !"<disabled>".equals(defaultUser)) {
loginField.setValue(defaultUser);
} else {
loginField.setValue("");
}
String defaultPassw = webConfig.getLoginDialogDefaultPassword();
if (!StringUtils.isBlank(defaultPassw) && !"<disabled>".equals(defaultPassw)) {
passwordField.setValue(defaultPassw);
} else {
passwordField.setValue("");
}
}
protected void showUnhandledExceptionOnLogin(@SuppressWarnings("unused") Exception e) {
String title = messages.getMainMessage("loginWindow.loginFailed", app.getLocale());
String message = messages.getMainMessage("loginWindow.pleaseContactAdministrator", app.getLocale());
showNotification(title, message, NotificationType.ERROR);
}
protected void showLoginException(String message) {
String title = messages.getMainMessage("loginWindow.loginFailed", app.getLocale());
showNotification(title, message, NotificationType.ERROR);
}
public void login() {
doLogin();
setRememberMeCookies();
}
protected void setRememberMeCookies() {
if (Boolean.TRUE.equals(rememberMeCheckBox.getValue())) {
authDelegate.setRememberMeCookies(loginField.getValue());
} else {
authDelegate.resetRememberCookies();
}
}
protected void doLogin() {
String login = loginField.getValue();
String password = passwordField.getValue() != null ? passwordField.getValue() : "";
Map<String, Object> params = new HashMap<>(urlRouting.getState().getParams());
if (StringUtils.isEmpty(login) || StringUtils.isEmpty(password)) {
showNotification(messages.getMainMessage("loginWindow.emptyLoginOrPassword"), NotificationType.WARNING);
return;
}
try {
Locale selectedLocale = localesSelect.getValue();
app.setLocale(selectedLocale);
doLogin(new LoginPasswordCredentials(login, password, selectedLocale, params));
// locale could be set on the server
if (connection.getSession() != null) {
Locale loggedInLocale = connection.getSession().getLocale();
if (globalConfig.getLocaleSelectVisible()) {
app.addCookie(App.COOKIE_LOCALE, loggedInLocale.toLanguageTag());
}
}
} catch (InternalAuthenticationException e) {
log.error("Internal error during login", e);
showUnhandledExceptionOnLogin(e);
} catch (LoginException e) {
log.info("Login failed: {}", e.toString());
String message = StringUtils.abbreviate(e.getMessage(), 1000);
showLoginException(message);
} catch (Exception e) {
if (connection.isAuthenticated()) {
ExceptionHandlers handlers = app.getExceptionHandlers();
handlers.handle(new ErrorEvent(e));
} else {
log.warn("Unable to login", e);
showUnhandledExceptionOnLogin(e);
}
}
}
protected void doLogin(Credentials credentials) throws LoginException {
authDelegate.doLogin(credentials, localesSelect.isVisibleRecursive());
}
protected void doRememberMeLogin() {
authDelegate.doRememberMeLogin(localesSelect.isVisibleRecursive());
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.team751.vision;
import edu.wpi.first.wpilibj.image.BinaryImage;
import edu.wpi.first.wpilibj.image.NIVision;
import edu.wpi.first.wpilibj.image.NIVisionException;
import edu.wpi.first.wpilibj.image.ParticleAnalysisReport;
import edu.wpi.first.wpilibj.networktables2.type.NumberArray;
import org.team751.utils.Logger;
import org.team751.vision.utils.Rect;
import org.team751.vision.utils.Scores;
/**
*
* @author sambaumgarten
*/
public class VisionScoring {
public static Rect[] getRectsFromArray(NumberArray rectanglesArray) {
int numOfRects = rectanglesArray.size() / 8;
Rect rects[] = new Rect[numOfRects];
for (int i = 0; i < numOfRects; i++) {
rects[i] = new Rect(rectanglesArray, i);
}
// Returns an array of arrays. Each array store the x and y value of the rect
return rects;
}
public static double getCenterMass(Rect rectangle, boolean getY) {
int modifier;
modifier = getY ? 1 : 0;
double val = 0;
for (int i = 0; i < 4; i++) {
val += rectangle.points[i * 2 + modifier];
}
return val / 4;
}
public static void populateRectangleBounds(Rect rectangle) {
rectangle.bbLeft = 1000;
rectangle.bbRight = 0;
rectangle.bbTop = 0;
rectangle.bbBottom = 1000;
for (int i = 0; i < 4; i++) {
if (rectangle.x[i] > rectangle.bbRight)
rectangle.bbRight = rectangle.x[i];
if (rectangle.x[i] < rectangle.bbLeft)
rectangle.bbLeft = rectangle.x[i];
if (rectangle.y[i] > rectangle.bbTop)
rectangle.bbTop = rectangle.y[i];
if (rectangle.y[i] < rectangle.bbBottom)
rectangle.bbBottom = rectangle.y[i];
}
rectangle.bbWidth = rectangle.bbRight - rectangle.bbLeft;
rectangle.bbHeight = rectangle.bbTop - rectangle.bbBottom;
}
public static double getRectangleLength(Rect rectangle, boolean shortSide) {
// assume 4 coordinates are in order
double lengths[] = new double[4];
double averageLength = 0.0;
for (int i = 0; i < 4; i++) {
double firstX = rectangle.x[(i) % 4];
double firstY = rectangle.y[(i) % 4];
double secondX = rectangle.x[(i + 1) % 4];
double secondY = rectangle.y[(i + 1) % 4];
lengths[i] = Math.sqrt((firstX - secondX)*(firstX - secondX) + (firstY - secondY) * (firstY - secondY));
averageLength += lengths[i];
}
averageLength /= 4.0;
if (shortSide) {
if (lengths[0] < averageLength) {
return (lengths[0] + lengths[2]) / 2.0;
}
else {
return (lengths[1] + lengths[3]) / 2.0;
}
}
else {
if (lengths[0] > averageLength) {
return (lengths[0] + lengths[2]) / 2.0;
}
else {
return (lengths[1] + lengths[3]) / 2.0;
}
}
}
public static void populateRectangleSizes(Rect rectangle) {
populateRectangleBounds(rectangle);
rectangle.rectLong = getRectangleLength(rectangle, false);
rectangle.rectShort = getRectangleLength(rectangle, true);
rectangle.center_mass_x = getCenterMass(rectangle, false);
rectangle.center_mass_y = getCenterMass(rectangle, true);
if (VisionConstants.DEBUG_LEVEL >= 4) {
System.out.print("rect values:");
for (int i = 0; i < 8; i++) {
System.out.print(" " + rectangle.points[i]);
}
Logger.staticPrintln("");
System.out.print("x values: ");
for (int i = 0; i < 4; i++) {
System.out.print(" " + rectangle.x[i]);
}
Logger.staticPrintln("");
System.out.print("y values: ");
for (int i = 0; i < 4; i++) {
System.out.print(" " + rectangle.y[i]);
}
}
}
public static double getRectangleSize(Rect rectangle, boolean height) {
// We want to average the top values and bottom values (and left/right respectively)
// This won't work if the image is *too* tilted
double lowSum = 0;
double highSum = 0;
double sum = 0;
int modifier = height ? 1 : 0;
// cycle through the 4 points in the "rectangle"
for (int i = 0; i < 4; i++) {
sum += rectangle.points[i * 2 + modifier];
}
for (int i = 0; i < 4; i++) {
double val = rectangle.points[i * 2 + modifier];
if (val < sum / 4) {
lowSum += val;
}
else {
highSum += val;
}
}
return (highSum - lowSum) / 2;
}
/**
* Computes a score (0-100) comparing the aspect ratio to the ideal aspect ratio for the target. This method uses
* the equivalent rectangle sides to determine aspect ratio as it performs better as the target gets skewed by moving
* to the left or right. The equivalent rectangle is the rectangle with sides x and y where particle area= x*y
* and particle perimeter= 2x+2y
*
* @param image The image containing the particle to score, needed to perform additional measurements
* @param report The Particle Analysis Report for the particle, used for the width, height, and particle number
* @param outer Indicates whether the particle aspect ratio should be compared to the ratio for the inner target or the outer
* @return The aspect ratio score (0-100)
*/
public static double scoreAspectRatio(BinaryImage image, ParticleAnalysisReport report, int particleNumber, boolean vertical) throws NIVisionException {
double rectLong, rectShort, aspectRatio, idealAspectRatio;
rectLong = NIVision.MeasureParticle(image.image, particleNumber, false, NIVision.MeasurementType.IMAQ_MT_EQUIVALENT_RECT_LONG_SIDE);
rectShort = NIVision.MeasureParticle(image.image, particleNumber, false, NIVision.MeasurementType.IMAQ_MT_EQUIVALENT_RECT_SHORT_SIDE);
idealAspectRatio = vertical ? (4.0/32) : (23.5/4); //Vertical reflector 4" wide x 32" tall, horizontal 23.5" wide x 4" tall
//Divide width by height to measure aspect ratio
if (report.boundingRectWidth > report.boundingRectHeight) {
//particle is wider than it is tall, divide long by short
aspectRatio = ratioToScore((rectLong/rectShort)/idealAspectRatio);
} else {
//particle is taller than it is wide, divide short by long
aspectRatio = ratioToScore((rectShort/rectLong)/idealAspectRatio);
}
return aspectRatio;
}
// Simplified implementation for NetTable processing
public static double scoreAspectRatio(Rect rectangle, boolean vertical) {
double aspectRatio, idealAspectRatio;
idealAspectRatio = vertical ? (4.0/32) : (23.5/4); //Vertical reflector 4" wide x 32" tall, horizontal 23.5" wide x 4" tall
if (VisionConstants.DEBUG_LEVEL >= 4)
Logger.staticPrintln("width, height: " + rectangle.bbWidth + " " + rectangle.bbHeight);
if (VisionConstants.DEBUG_LEVEL >= 4)
Logger.staticPrintln("long, short: " + rectangle.rectLong + " " + rectangle.rectShort);
//Divide width by height to measure aspect ratio
if(rectangle.bbWidth > rectangle.bbHeight) {
aspectRatio = ratioToScore((rectangle.rectLong/rectangle.rectShort)/idealAspectRatio);
} else {
aspectRatio = ratioToScore((rectangle.rectShort/rectangle.rectLong)/idealAspectRatio);
}
return aspectRatio;
}
/**
* Compares scores to defined limits and returns true if the particle appears to be a target
*
* @param scores The structure containing the scores to compare
* @param outer True if the particle should be treated as an outer target, false to treat it as a center target
*
* @return True if the particle meets all limits, false otherwise
*/
public static boolean scoreCompare(Scores scores, boolean vertical) {
boolean isTarget = true;
isTarget &= scores.rectangularity > VisionConstants.RECTANGULARITY_LIMIT;
if(vertical) {
isTarget &= scores.aspectRatioVertical > VisionConstants.ASPECT_RATIO_LIMIT;
} else {
isTarget &= scores.aspectRatioHorizontal > VisionConstants.ASPECT_RATIO_LIMIT;
}
return isTarget;
}
/**
* Computes a score (0-100) estimating how rectangular the particle is by comparing the area of the particle
* to the area of the bounding box surrounding it. A perfect rectangle would cover the entire bounding box.
*
* @param report The Particle Analysis Report for the particle to score
* @return The rectangularity score (0-100)
*/
public static double scoreRectangularity(ParticleAnalysisReport report) {
if (report.boundingRectWidth*report.boundingRectHeight !=0) {
return 100*report.particleArea/(report.boundingRectWidth*report.boundingRectHeight);
} else {
return 0;
}
}
public double scoreRectangularity(double particleArea, double boundingRectWidth, double boundingRectHeight) {
if (boundingRectWidth*boundingRectHeight !=0) {
return 100*particleArea/(boundingRectWidth*boundingRectHeight);
} else {
return 0;
}
}
/**
* Converts a ratio with ideal value of 1 to a score. The resulting function is piecewise
* linear going from (0,0) to (1,100) to (2,0) and is 0 for all inputs outside the range 0-2
*/
public static double ratioToScore(double ratio) {
return (Math.max(0, Math.min(100*(1-Math.abs(1-ratio)), 100)));
}
}
| |
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hyracks.storage.am.btree.compressors;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.btree.api.IPrefixSlotManager;
import org.apache.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
import org.apache.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
import org.apache.hyracks.storage.am.btree.impls.FieldPrefixTupleReference;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameCompressor;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
public class FieldPrefixCompressor implements ITreeIndexFrameCompressor {
// minimum ratio of uncompressed tuples to total tuple to consider re-compression
private final float ratioThreshold;
// minimum number of tuple matching field prefixes to consider compressing them
private final int occurrenceThreshold;
private final ITypeTraits[] typeTraits;
public FieldPrefixCompressor(ITypeTraits[] typeTraits, float ratioThreshold, int occurrenceThreshold) {
this.typeTraits = typeTraits;
this.ratioThreshold = ratioThreshold;
this.occurrenceThreshold = occurrenceThreshold;
}
@Override
public boolean compress(ITreeIndexFrame indexFrame, MultiComparator cmp) throws Exception {
BTreeFieldPrefixNSMLeafFrame frame = (BTreeFieldPrefixNSMLeafFrame) indexFrame;
int tupleCount = frame.getTupleCount();
if (tupleCount <= 0) {
frame.setPrefixTupleCount(0);
frame.setFreeSpaceOff(frame.getOrigFreeSpaceOff());
frame.setTotalFreeSpace(frame.getOrigTotalFreeSpace());
return false;
}
if (cmp.getKeyFieldCount() == 1) {
return false;
}
int uncompressedTupleCount = frame.getUncompressedTupleCount();
float ratio = (float) uncompressedTupleCount / (float) tupleCount;
if (ratio < ratioThreshold)
return false;
IBinaryComparator[] cmps = cmp.getComparators();
int fieldCount = typeTraits.length;
ByteBuffer buf = frame.getBuffer();
byte[] pageArray = buf.array();
IPrefixSlotManager slotManager = (IPrefixSlotManager) frame.getSlotManager();
// perform analysis pass
ArrayList<KeyPartition> keyPartitions = getKeyPartitions(frame, cmp, occurrenceThreshold);
if (keyPartitions.size() == 0)
return false;
// for each keyPartition, determine the best prefix length for
// compression, and count how many prefix tuple we would need in total
int totalSlotsNeeded = 0;
int totalPrefixBytes = 0;
for (KeyPartition kp : keyPartitions) {
for (int j = 0; j < kp.pmi.length; j++) {
int benefitMinusCost = kp.pmi[j].spaceBenefit - kp.pmi[j].spaceCost;
if (benefitMinusCost > kp.maxBenefitMinusCost) {
kp.maxBenefitMinusCost = benefitMinusCost;
kp.maxPmiIndex = j;
}
}
// ignore keyPartitions with no benefit and don't count bytes and slots needed
if (kp.maxBenefitMinusCost <= 0)
continue;
totalPrefixBytes += kp.pmi[kp.maxPmiIndex].prefixBytes;
totalSlotsNeeded += kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
}
// we use a greedy heuristic to solve this "knapsack"-like problem
// (every keyPartition has a space savings and a number of slots
// required, but the number of slots are constrained by MAX_PREFIX_SLOTS)
// we sort the keyPartitions by maxBenefitMinusCost / prefixSlotsNeeded
// and later choose the top MAX_PREFIX_SLOTS
int[] newPrefixSlots;
if (totalSlotsNeeded > FieldPrefixSlotManager.MAX_PREFIX_SLOTS) {
// order keyPartitions by the heuristic function
SortByHeuristic heuristicComparator = new SortByHeuristic();
Collections.sort(keyPartitions, heuristicComparator);
int slotsUsed = 0;
int numberKeyPartitions = -1;
for (int i = 0; i < keyPartitions.size(); i++) {
KeyPartition kp = keyPartitions.get(i);
slotsUsed += kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
if (slotsUsed > FieldPrefixSlotManager.MAX_PREFIX_SLOTS) {
numberKeyPartitions = i + 1;
slotsUsed -= kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
break;
}
}
newPrefixSlots = new int[slotsUsed];
// remove irrelevant keyPartitions and adjust total prefix bytes
while (keyPartitions.size() >= numberKeyPartitions) {
int lastIndex = keyPartitions.size() - 1;
KeyPartition kp = keyPartitions.get(lastIndex);
if (kp.maxBenefitMinusCost > 0)
totalPrefixBytes -= kp.pmi[kp.maxPmiIndex].prefixBytes;
keyPartitions.remove(lastIndex);
}
// re-order keyPartitions by prefix (corresponding to original order)
SortByOriginalRank originalRankComparator = new SortByOriginalRank();
Collections.sort(keyPartitions, originalRankComparator);
} else {
newPrefixSlots = new int[totalSlotsNeeded];
}
int[] newTupleSlots = new int[tupleCount];
// WARNING: our hope is that compression is infrequent
// here we allocate a big chunk of memory to temporary hold the new, re-compressed tuple
// in general it is very hard to avoid this step
int prefixFreeSpace = frame.getOrigFreeSpaceOff();
int tupleFreeSpace = prefixFreeSpace + totalPrefixBytes;
byte[] buffer = new byte[buf.capacity()];
ByteBuffer byteBuffer = ByteBuffer.wrap(buffer);
// perform compression, and reorg
// we assume that the keyPartitions are sorted by the prefixes
// (i.e., in the logical target order)
int kpIndex = 0;
int tupleIndex = 0;
int prefixTupleIndex = 0;
uncompressedTupleCount = 0;
TypeAwareTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
FieldPrefixTupleReference tupleToWrite = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
tupleToWrite.setFieldCount(fieldCount);
while (tupleIndex < tupleCount) {
if (kpIndex < keyPartitions.size()) {
// beginning of keyPartition found, compress entire keyPartition
if (tupleIndex == keyPartitions.get(kpIndex).firstTupleIndex) {
// number of fields we decided to use for compression of this keyPartition
int fieldCountToCompress = keyPartitions.get(kpIndex).maxPmiIndex + 1;
int segmentStart = keyPartitions.get(kpIndex).firstTupleIndex;
int tuplesInSegment = 1;
FieldPrefixTupleReference prevTuple = new FieldPrefixTupleReference(
tupleWriter.createTupleReference());
prevTuple.setFieldCount(fieldCount);
FieldPrefixTupleReference tuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
tuple.setFieldCount(fieldCount);
for (int i = tupleIndex + 1; i <= keyPartitions.get(kpIndex).lastTupleIndex; i++) {
prevTuple.resetByTupleIndex(frame, i - 1);
tuple.resetByTupleIndex(frame, i);
// check if tuples match in fieldCountToCompress of their first fields
int prefixFieldsMatch = 0;
for (int j = 0; j < fieldCountToCompress; j++) {
if (cmps[j].compare(pageArray, prevTuple.getFieldStart(j), prevTuple.getFieldLength(j),
pageArray, tuple.getFieldStart(j), tuple.getFieldLength(j)) == 0)
prefixFieldsMatch++;
else
break;
}
// the two tuples must match in exactly the number of fields we decided
// to compress for this keyPartition
int processSegments = 0;
if (prefixFieldsMatch == fieldCountToCompress)
tuplesInSegment++;
else
processSegments++;
if (i == keyPartitions.get(kpIndex).lastTupleIndex)
processSegments++;
for (int r = 0; r < processSegments; r++) {
// compress current segment and then start new segment
if (tuplesInSegment < occurrenceThreshold || fieldCountToCompress <= 0) {
// segment does not have at least occurrenceThreshold tuples, so
// write tuples uncompressed
for (int j = 0; j < tuplesInSegment; j++) {
int slotNum = segmentStart + j;
tupleToWrite.resetByTupleIndex(frame, slotNum);
newTupleSlots[tupleCount - 1 - slotNum] = slotManager.encodeSlotFields(
FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
}
uncompressedTupleCount += tuplesInSegment;
} else {
// segment has enough tuples: compress segment, extract prefix,
// write prefix tuple to buffer, and set prefix slot
newPrefixSlots[newPrefixSlots.length - 1 - prefixTupleIndex] = slotManager
.encodeSlotFields(fieldCountToCompress, prefixFreeSpace);
prefixFreeSpace += tupleWriter.writeTupleFields(prevTuple, 0, fieldCountToCompress,
byteBuffer.array(), prefixFreeSpace);
// truncate tuples, write them to buffer, and set tuple slots
for (int j = 0; j < tuplesInSegment; j++) {
int currTupleIndex = segmentStart + j;
tupleToWrite.resetByTupleIndex(frame, currTupleIndex);
newTupleSlots[tupleCount - 1 - currTupleIndex] = slotManager.encodeSlotFields(
prefixTupleIndex, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTupleFields(tupleToWrite, fieldCountToCompress,
fieldCount - fieldCountToCompress, byteBuffer.array(), tupleFreeSpace);
}
prefixTupleIndex++;
}
// begin new segment
segmentStart = i;
tuplesInSegment = 1;
}
}
tupleIndex = keyPartitions.get(kpIndex).lastTupleIndex;
kpIndex++;
} else {
// just write the tuple uncompressed
tupleToWrite.resetByTupleIndex(frame, tupleIndex);
newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
uncompressedTupleCount++;
}
} else {
// just write the tuple uncompressed
tupleToWrite.resetByTupleIndex(frame, tupleIndex);
newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
uncompressedTupleCount++;
}
tupleIndex++;
}
// sanity check to see if we have written exactly as many prefix bytes as computed before
if (prefixFreeSpace != frame.getOrigFreeSpaceOff() + totalPrefixBytes) {
throw new Exception("ERROR: Number of prefix bytes written don't match computed number");
}
// in some rare instances our procedure could even increase the space requirement which is very dangerous
// this can happen to to the greedy solution of the knapsack-like problem
// therefore, we check if the new space exceeds the page size to avoid the only danger of
// an increasing space
int totalSpace = tupleFreeSpace + newTupleSlots.length * slotManager.getSlotSize() + newPrefixSlots.length
* slotManager.getSlotSize();
if (totalSpace > buf.capacity())
// just leave the page as is
return false;
// copy new tuple and new slots into original page
int freeSpaceAfterInit = frame.getOrigFreeSpaceOff();
System.arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit, tupleFreeSpace - freeSpaceAfterInit);
// copy prefix slots
int slotOffRunner = buf.capacity() - slotManager.getSlotSize();
for (int i = 0; i < newPrefixSlots.length; i++) {
buf.putInt(slotOffRunner, newPrefixSlots[newPrefixSlots.length - 1 - i]);
slotOffRunner -= slotManager.getSlotSize();
}
// copy tuple slots
for (int i = 0; i < newTupleSlots.length; i++) {
buf.putInt(slotOffRunner, newTupleSlots[newTupleSlots.length - 1 - i]);
slotOffRunner -= slotManager.getSlotSize();
}
// update space fields, TODO: we need to update more fields
frame.setFreeSpaceOff(tupleFreeSpace);
frame.setPrefixTupleCount(newPrefixSlots.length);
frame.setUncompressedTupleCount(uncompressedTupleCount);
int totalFreeSpace = buf.capacity() - tupleFreeSpace
- ((newTupleSlots.length + newPrefixSlots.length) * slotManager.getSlotSize());
frame.setTotalFreeSpace(totalFreeSpace);
return true;
}
// we perform an analysis pass over the tuples to determine the costs and
// benefits of different compression options
// a "keypartition" is a range of tuples that has an identical first field
// for each keypartition we chose a prefix length to use for compression
// i.e., all tuples in a keypartition will be compressed based on the same
// prefix length (number of fields)
// the prefix length may be different for different keypartitions
// the occurrenceThreshold determines the minimum number of tuples that must
// share a common prefix in order for us to consider compressing them
private ArrayList<KeyPartition> getKeyPartitions(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp,
int occurrenceThreshold) throws HyracksDataException {
IBinaryComparator[] cmps = cmp.getComparators();
int fieldCount = typeTraits.length;
int maxCmps = cmps.length - 1;
ByteBuffer buf = frame.getBuffer();
byte[] pageArray = buf.array();
IPrefixSlotManager slotManager = (IPrefixSlotManager) frame.getSlotManager();
ArrayList<KeyPartition> keyPartitions = new ArrayList<KeyPartition>();
KeyPartition kp = new KeyPartition(maxCmps);
keyPartitions.add(kp);
TypeAwareTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
FieldPrefixTupleReference prevTuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
prevTuple.setFieldCount(fieldCount);
FieldPrefixTupleReference tuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
tuple.setFieldCount(fieldCount);
kp.firstTupleIndex = 0;
int tupleCount = frame.getTupleCount();
for (int i = 1; i < tupleCount; i++) {
prevTuple.resetByTupleIndex(frame, i - 1);
tuple.resetByTupleIndex(frame, i);
int prefixFieldsMatch = 0;
for (int j = 0; j < maxCmps; j++) {
if (cmps[j].compare(pageArray, prevTuple.getFieldStart(j), prevTuple.getFieldLength(j), pageArray,
tuple.getFieldStart(j), prevTuple.getFieldLength(j)) == 0) {
prefixFieldsMatch++;
kp.pmi[j].matches++;
int prefixBytes = tupleWriter.bytesRequired(tuple, 0, prefixFieldsMatch);
int spaceBenefit = tupleWriter.bytesRequired(tuple)
- tupleWriter.bytesRequired(tuple, prefixFieldsMatch, tuple.getFieldCount()
- prefixFieldsMatch);
if (kp.pmi[j].matches == occurrenceThreshold) {
// if we compress this prefix, we pay the cost of storing it once, plus
// the size for one prefix slot
kp.pmi[j].prefixBytes += prefixBytes;
kp.pmi[j].spaceCost += prefixBytes + slotManager.getSlotSize();
kp.pmi[j].prefixSlotsNeeded++;
kp.pmi[j].spaceBenefit += occurrenceThreshold * spaceBenefit;
} else if (kp.pmi[j].matches > occurrenceThreshold) {
// we are beyond the occurrence threshold, every additional tuple with a
// matching prefix increases the benefit
kp.pmi[j].spaceBenefit += spaceBenefit;
}
} else {
kp.pmi[j].matches = 1;
break;
}
}
// this means not even the first field matched, so we start to consider a new "key partition"
if (maxCmps > 0 && prefixFieldsMatch == 0) {
kp.lastTupleIndex = i - 1;
// remove keyPartitions that don't have enough tuples
if ((kp.lastTupleIndex - kp.firstTupleIndex) + 1 < occurrenceThreshold)
keyPartitions.remove(keyPartitions.size() - 1);
kp = new KeyPartition(maxCmps);
keyPartitions.add(kp);
kp.firstTupleIndex = i;
}
}
kp.lastTupleIndex = tupleCount - 1;
// remove keyPartitions that don't have enough tuples
if ((kp.lastTupleIndex - kp.firstTupleIndex) + 1 < occurrenceThreshold)
keyPartitions.remove(keyPartitions.size() - 1);
return keyPartitions;
}
private class PrefixMatchInfo {
public int matches = 1;
public int spaceCost = 0;
public int spaceBenefit = 0;
public int prefixSlotsNeeded = 0;
public int prefixBytes = 0;
}
private class KeyPartition {
public int firstTupleIndex;
public int lastTupleIndex;
public PrefixMatchInfo[] pmi;
public int maxBenefitMinusCost = 0;
public int maxPmiIndex = -1;
// number of fields used for compression for this kp of current page
public KeyPartition(int numKeyFields) {
pmi = new PrefixMatchInfo[numKeyFields];
for (int i = 0; i < numKeyFields; i++) {
pmi[i] = new PrefixMatchInfo();
}
}
}
private class SortByHeuristic implements Comparator<KeyPartition> {
@Override
public int compare(KeyPartition a, KeyPartition b) {
if (a.maxPmiIndex < 0) {
if (b.maxPmiIndex < 0)
return 0;
return 1;
} else if (b.maxPmiIndex < 0)
return -1;
// non-negative maxPmiIndex, meaning a non-zero benefit exists
float thisHeuristicVal = (float) a.maxBenefitMinusCost / (float) a.pmi[a.maxPmiIndex].prefixSlotsNeeded;
float otherHeuristicVal = (float) b.maxBenefitMinusCost / (float) b.pmi[b.maxPmiIndex].prefixSlotsNeeded;
if (thisHeuristicVal < otherHeuristicVal)
return 1;
else if (thisHeuristicVal > otherHeuristicVal)
return -1;
else
return 0;
}
}
private class SortByOriginalRank implements Comparator<KeyPartition> {
@Override
public int compare(KeyPartition a, KeyPartition b) {
return a.firstTupleIndex - b.firstTupleIndex;
}
}
}
| |
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.utils.cache;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import net.sf.ehcache.CacheException;
import net.sf.ehcache.Ehcache;
import net.sf.ehcache.Element;
import net.sf.ehcache.event.CacheEventListenerAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.MapMaker;
/**
* Tracks entries added to {@link Ehcache} instances that have keys or values which implement {@link TaggedCacheEntry}.
* Allows for external removal of elements that match a specified tag
*
* @author Eric Dalquist
*/
@Service("tagTrackingCacheEventListener")
public class TagTrackingCacheEventListener extends CacheEventListenerAdapter implements TaggedCacheEntryPurger {
protected final Logger logger = LoggerFactory.getLogger(getClass());
// tag type -> set of caches that contain keys tagged with that type
// I don't believe that this will leak Ehcache references as this class should have the same lifecycle as the CacheManager
private final LoadingCache<String, Set<Ehcache>> taggedCaches =
CacheBuilder.newBuilder().build(new CacheLoader<String, Set<Ehcache>>() {
@Override
public Set<Ehcache> load(String key) throws Exception {
return Collections.newSetFromMap(new ConcurrentHashMap<Ehcache, Boolean>());
}
});
// Cache Name -> Key Tag -> Set of Keys
private final LoadingCache<String, LoadingCache<CacheEntryTag, Set<Object>>> taggedCacheKeys =
CacheBuilder.newBuilder().build(new CacheLoader<String, LoadingCache<CacheEntryTag, Set<Object>>>() {
@Override
public LoadingCache<CacheEntryTag, Set<Object>> load(String key) throws Exception {
// Key Tag -> Set of Tagged Cache Keys
return CacheBuilder.newBuilder().build(new CacheLoader<CacheEntryTag, Set<Object>>() {
@Override
public Set<Object> load(CacheEntryTag key) throws Exception {
// Set of Tagged Cache Keys
return Collections.newSetFromMap(new MapMaker().weakKeys().<Object, Boolean>makeMap());
}
});
}
});
/**
* Remove all cache entries with keys that have the specified tag
*/
@Override
public int purgeCacheEntries(CacheEntryTag tag) {
final String tagType = tag.getTagType();
final Set<Ehcache> caches = taggedCaches.getIfPresent(tagType);
//Tag exists in cache(s)
if (caches == null || caches.isEmpty()) {
return 0;
}
int purgeCount = 0;
//Iterate over each cache to remove the tagged entries
for (final Ehcache cache : caches) {
final String cacheName = cache.getName();
//See if there are any tagged cache keys for the cache
final LoadingCache<CacheEntryTag, Set<Object>> cacheKeys = taggedCacheKeys.getIfPresent(cacheName);
if (cacheKeys != null) {
//Remove all cache keys from the cache
final Set<Object> taggedKeys = cacheKeys.asMap().remove(tag);
if (taggedKeys != null) {
final int keyCount = taggedKeys.size();
purgeCount += keyCount;
logger.debug("Removing {} keys from {} for tag {}", keyCount, cacheName, tag);
cache.removeAll(taggedKeys);
}
}
}
return purgeCount;
}
/**
* Get the tags associated with the element
*/
protected Set<CacheEntryTag> getTags(Element element) {
final Object key = element.getObjectKey();
if (key instanceof TaggedCacheEntry) {
return ((TaggedCacheEntry) key).getTags();
}
final Object value = element.getObjectValue();
if (value instanceof TaggedCacheEntry) {
return ((TaggedCacheEntry) value).getTags();
}
return null;
}
/**
* If the element has a TaggedCacheKey record the tag associations
*/
protected void putElement(Ehcache cache, Element element) {
final Set<CacheEntryTag> tags = this.getTags(element);
//Check if the key is tagged
if (tags != null && !tags.isEmpty()) {
final String cacheName = cache.getName();
final Object key = element.getObjectKey();
final LoadingCache<CacheEntryTag, Set<Object>> cacheKeys = taggedCacheKeys.getUnchecked(cacheName);
logger.debug("Tracking {} tags in cache {} for key {}", tags.size(), cacheName, key);
//Add all the tags to the tracking map
for (final CacheEntryTag tag : tags) {
//Record that this tag type is stored in this cache
final String tagType = tag.getTagType();
final Set<Ehcache> caches = taggedCaches.getUnchecked(tagType);
caches.add(cache);
//Record the tag->key association
final Set<Object> taggedKeys = cacheKeys.getUnchecked(tag);
taggedKeys.add(key);
}
}
}
/**
* If the element has a TaggedCacheKey remove the tag associations
*/
protected void removeElement(Ehcache cache, Element element) {
final Set<CacheEntryTag> tags = this.getTags(element);
//Check if the key is tagged
if (tags != null && !tags.isEmpty()) {
final String cacheName = cache.getName();
final LoadingCache<CacheEntryTag, Set<Object>> cacheKeys = taggedCacheKeys.getIfPresent(cacheName);
//If there are tracked tagged keys remove matching tags
if (cacheKeys != null) {
final Object key = element.getObjectKey();
logger.debug("Tracking removing key cache {} with tag {} : {}", cacheName, tags, key);
for (final CacheEntryTag tag : tags) {
final Set<Object> taggedKeys = cacheKeys.getIfPresent(tag);
//Remove the tagged key
if (taggedKeys != null) {
taggedKeys.remove(key);
}
}
}
}
}
@Override
public void notifyElementPut(Ehcache cache, Element element) throws CacheException {
putElement(cache, element);
}
@Override
public void notifyElementRemoved(Ehcache cache, Element element) throws CacheException {
removeElement(cache, element);
}
@Override
public void notifyElementExpired(Ehcache cache, Element element) {
removeElement(cache, element);
}
@Override
public void notifyElementEvicted(Ehcache cache, Element element) {
removeElement(cache, element);
}
@Override
public void notifyRemoveAll(Ehcache cache) {
final String cacheName = cache.getName();
final LoadingCache<CacheEntryTag, Set<Object>> cacheKeys = taggedCacheKeys.getIfPresent(cacheName);
if (cacheKeys != null) {
logger.debug("Tracking remove all tagged keys for cache {}", new Object[] { cacheName });
cacheKeys.invalidateAll();
}
}
}
| |
package com.mixpanel.android.mpmetrics;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.test.AndroidTestCase;
import com.mixpanel.android.util.Base64Coder;
import com.mixpanel.android.util.RemoteService;
import com.mixpanel.android.util.HttpService;
import org.apache.http.NameValuePair;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.net.ssl.SSLSocketFactory;
public class HttpTest extends AndroidTestCase {
public void setUp() {
mDisableFallback = false;
mMockPreferences = new TestUtils.EmptyPreferences(getContext());
mFlushResults = new ArrayList<Object>();
mPerformRequestCalls = new LinkedBlockingQueue<String>();
mDecideCalls = new LinkedBlockingQueue<String>();
mCleanupCalls = new ArrayList<String>();
mDecideResults = new ArrayList<Object>();
mForceOverMemThreshold = false;
final RemoteService mockPoster = new HttpService() {
@Override
public byte[] performRequest(String endpointUrl, List<NameValuePair> nameValuePairs, SSLSocketFactory socketFactory)
throws ServiceUnavailableException, IOException {
try {
if (null == nameValuePairs) {
mDecideCalls.put(endpointUrl);
if (mDecideResults.isEmpty()) {
return TestUtils.bytes("{}");
}
final Object obj = mDecideResults.remove(0);
if (obj instanceof IOException) {
throw (IOException)obj;
} else if (obj instanceof MalformedURLException) {
throw (MalformedURLException)obj;
} else if (obj instanceof ServiceUnavailableException) {
throw (ServiceUnavailableException)obj;
}
return (byte[])obj;
}
// ELSE
assertEquals(nameValuePairs.get(0).getName(), "data");
final String jsonData = Base64Coder.decodeString(nameValuePairs.get(0).getValue());
JSONArray msg = new JSONArray(jsonData);
JSONObject event = msg.getJSONObject(0);
mPerformRequestCalls.put(event.getString("event"));
if (mFlushResults.isEmpty()) {
return TestUtils.bytes("1");
}
final Object obj = mFlushResults.remove(0);
if (obj instanceof IOException) {
throw (IOException)obj;
} else if (obj instanceof MalformedURLException) {
throw (MalformedURLException)obj;
} else if (obj instanceof ServiceUnavailableException) {
throw (ServiceUnavailableException)obj;
}
return (byte[])obj;
} catch (JSONException e) {
throw new RuntimeException("Malformed data passed to test mock", e);
} catch (InterruptedException e) {
throw new RuntimeException("Could not write message to reporting queue for tests.", e);
}
}
};
final MPConfig config = new MPConfig(new Bundle(), getContext()) {
@Override
public String getDecideEndpoint() {
return "DECIDE ENDPOINT";
}
@Override
public String getDecideFallbackEndpoint() {
return "DECIDE FALLBACK";
}
@Override
public String getEventsEndpoint() {
return "EVENTS ENDPOINT";
}
@Override
public String getEventsFallbackEndpoint() {
return "EVENTS FALLBACK";
}
@Override
public boolean getDisableFallback() {
return mDisableFallback;
}
};
final MPDbAdapter mockAdapter = new MPDbAdapter(getContext()) {
@Override
public void cleanupEvents(String last_id, Table table) {
mCleanupCalls.add("called");
super.cleanupEvents(last_id, table);
}
@Override
protected boolean belowMemThreshold() {
if (mForceOverMemThreshold) {
return false;
} else {
return super.belowMemThreshold();
}
}
};
final AnalyticsMessages listener = new AnalyticsMessages(getContext()) {
@Override
protected MPDbAdapter makeDbAdapter(Context context) {
return mockAdapter;
}
@Override
protected RemoteService getPoster() {
return mockPoster;
}
@Override
protected MPConfig getConfig(Context context) {
return config;
}
};
mMetrics = new TestUtils.CleanMixpanelAPI(getContext(), mMockPreferences, "Test Message Queuing") {
@Override
protected AnalyticsMessages getAnalyticsMessages() {
return listener;
}
};
}
public void testHTTPFailures() {
try {
// Basic succeed on first, non-fallback url
mCleanupCalls.clear();
mFlushResults.add(TestUtils.bytes("1\n"));
mMetrics.track("Should Succeed", null);
mMetrics.flush();
Thread.sleep(500);
assertEquals("Should Succeed", mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(null, mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(1, mCleanupCalls.size());
// Fallback test--first URL throws IOException
mCleanupCalls.clear();
mFlushResults.add(new IOException());
mFlushResults.add(TestUtils.bytes("1\n"));
mMetrics.track("Should Succeed", null);
mMetrics.flush();
Thread.sleep(500);
assertEquals("Should Succeed", mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals("Should Succeed", mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(1, mCleanupCalls.size());
// Two IOExceptions -- assume temporary network failure, no cleanup should happen until
// second flush
mCleanupCalls.clear();
mFlushResults.add(new IOException());
mFlushResults.add(new IOException());
mFlushResults.add(TestUtils.bytes("1\n"));
mMetrics.track("Should Succeed", null);
mMetrics.flush();
Thread.sleep(500);
assertEquals("Should Succeed", mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals("Should Succeed", mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(0, mCleanupCalls.size());
mMetrics.flush();
Thread.sleep(500);
assertEquals("Should Succeed", mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(null, mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(1, mCleanupCalls.size());
// MalformedURLException -- should dump the events since this will probably never succeed
mCleanupCalls.clear();
mFlushResults.add(new MalformedURLException());
mMetrics.track("Should Fail", null);
mMetrics.flush();
Thread.sleep(500);
assertEquals("Should Fail", mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(null, mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(1, mCleanupCalls.size());
// 503 exception -- should wait for 10 seconds until the queue is able to flush
mCleanupCalls.clear();
mFlushResults.add(new RemoteService.ServiceUnavailableException("", "10"));
mFlushResults.add(TestUtils.bytes("1\n"));
mMetrics.track("Should Succeed", null);
mMetrics.flush();
Thread.sleep(500);
assertEquals("Should Succeed", mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(null, mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(0, mCleanupCalls.size());
mMetrics.flush();
Thread.sleep(500);
assertEquals(null, mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(0, mCleanupCalls.size());
Thread.sleep(10000);
mMetrics.flush();
assertEquals("Should Succeed", mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(null, mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(1, mCleanupCalls.size());
// short of memory test - should drop all the new queries
mForceOverMemThreshold = true;
mCleanupCalls.clear();
mMetrics.track("Should Fail", null);
mMetrics.flush();
Thread.sleep(500);
assertEquals(null, mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(0, mCleanupCalls.size());
mForceOverMemThreshold = false;
mMetrics.track("Should Succeed", null);
mMetrics.flush();
Thread.sleep(500);
assertEquals("Should Succeed", mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(null, mPerformRequestCalls.poll(POLL_WAIT_SECONDS, TimeUnit.SECONDS));
assertEquals(1, mCleanupCalls.size());
} catch (InterruptedException e) {
throw new RuntimeException("Test was interrupted.");
}
}
private Future<SharedPreferences> mMockPreferences;
private List<Object> mFlushResults, mDecideResults;
private BlockingQueue<String> mPerformRequestCalls, mDecideCalls;
private List<String> mCleanupCalls;
private MixpanelAPI mMetrics;
private volatile boolean mDisableFallback;
private volatile boolean mForceOverMemThreshold;
private static final int POLL_WAIT_SECONDS = 5;
}
| |
/*
* $Header: /var/chroot/cvs/cvs/factsheetDesigner/extern/jakarta-slide-server-src-2.1-iPlus Edit/src/share/org/apache/slide/content/NodeRevisionNumber.java,v 1.2 2006-01-22 22:47:24 peter-cvs Exp $
* $Revision: 1.2 $
* $Date: 2006-01-22 22:47:24 $
*
* ====================================================================
*
* Copyright 1999-2002 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.slide.content;
import java.io.Serializable;
import java.util.StringTokenizer;
import org.apache.slide.common.ObjectValidationFailedException;
import org.apache.slide.util.Messages;
/**
* Node Revision Number class.
*
* @version $Revision: 1.2 $
*/
public final class NodeRevisionNumber implements Serializable, Cloneable {
// -------------------------------------------------------------- Constants
public static final int MAJOR = 0;
public static final int MINOR = 1;
// The hidden revision number is used for DeltaV-related system data
// ("version-history" and "backup" branches in VHR and VCR resources)
public static final NodeRevisionNumber HIDDEN_0_0 =
new NodeRevisionNumber( 0, 0 ); // major=0, minor=0
// ----------------------------------------------------------- Constructors
/**
* Create a new revision number.
*/
public NodeRevisionNumber() {
number = new int[2];
number[MAJOR] = 1;
number[MINOR] = 0;
}
/**
* Create a new revision number.
*
* @param major
* @param minor
*/
public NodeRevisionNumber(int major, int minor) {
number = new int[2];
number[MAJOR] = major;
number[MINOR] = minor;
}
/**
* Create a new revision number.
*
* @param revisionNumberStr String representation of the revision number
*/
public NodeRevisionNumber(String revisionNumberStr) {
parseString(revisionNumberStr);
}
/**
* Create a new revision number based on a previous revision number.
*
* @param revisionNumber Previous revision number
*/
public NodeRevisionNumber(NodeRevisionNumber revisionNumber) {
this(revisionNumber, false);
}
/**
* Create a new revision number based on a previous revision number.
*
* @param revisionNumber Previous revision number
* @param createBranch True if a new branch is to be created
*/
public NodeRevisionNumber(NodeRevisionNumber revisionNumber,
boolean createBranch) {
if (revisionNumber != null) {
if (createBranch) {
parseString(revisionNumber.toString() + ".1");
} else {
parseString(revisionNumber.toString());
next();
}
} else {
number = new int[2];
number[MAJOR] = 1;
number[MINOR] = 0;
}
}
// ----------------------------------------------------- Instance Variables
/**
* Number storage.
* <b>Note: when this array or it's content is modified, you must reset
* {@link #cachedToString} to null!</b>
*/
private int[] number;
// --------------------------------------------------------- Public Methods
/**
* Get the first number in the revision number.
*
* @return int
*/
public int getMajor() {
return number[MAJOR];
}
/**
* Get the second number in the revision number.
*
* @return int
*/
public int getMinor() {
return number[MINOR];
}
/**
* Get a number by specifying its order.
*
* @param pos Position of the number
* @return int
*/
public int getNumber(int pos) {
return number[pos];
}
/**
* Return number of digits if the revision number.
*
* @return int
*/
public int getNbDigits() {
return number.length;
}
// -------------------------------------------------------- Private Methods
/**
* Parse a String.
*
* @param revisionNumberStr String representation of the revision number
*/
private void parseString(String revisionNumberStr) {
// We tokenize the string using "." as a delimiter.
StringTokenizer tokenizer =
new StringTokenizer(revisionNumberStr, ".");
int nbDigits = tokenizer.countTokens();
if (nbDigits > 0) {
number = new int[nbDigits];
for (int i=0; i<nbDigits; i++) {
number[i] = (new Integer(tokenizer.nextToken())).intValue();
}
cachedToString=null; // reset cache
} else {
number = new int[2];
number[MAJOR] = 1;
number[MINOR] = 0;
cachedToString=null; // reset cache
}
}
/**
* Next revision.
*/
private void next() {
if (number.length > 0) {
number[number.length - 1] += 1;
cachedToString=null; // reset cache
}
}
// --------------------------------------------------------- Public Methods
/**
* Caches the result of last toString()-call. This will also heavily improve
* hashCode() as the String.hashCode() method is also buffered.
* This imust be resetet to null whenever the {@link #number} array changes.
*/
private String cachedToString;
/**
* Get a String representation of the revision number.
*
* @return String
*/
public String toString() {
if(cachedToString!=null) return cachedToString;
int count=number.length;
if(count==2) cachedToString=(number[MAJOR]+"."+number[MINOR]);
else
{
StringBuffer buf = new StringBuffer();
buf.append(number[MAJOR]);
for (int i=1; i<count; i++) {
buf.append('.').append(number[i]);
}
cachedToString=buf.toString();
}
return cachedToString;
/* old code being replaced by the cached one
StringBuffer buf = new StringBuffer();
buf.append(number[MAJOR]);
for (int i=1; i<number.length; i++) {
buf.append(".").append(number[i]);
}
return buf.toString();*/
}
/**
* HashCode.
*
* @return int
*/
public int hashCode() {
return toString().hashCode();
}
/**
* Equals.
*
* @param obj Object to test
* @return boolean True if the two object are equal :
* <li>obj is of type NodeRevisionNumber and is not null</li>
* <li>The string representations are equal are equal</li>
*/
public boolean equals(Object obj) {
boolean result = false;
if ((obj != null) && (obj instanceof NodeRevisionNumber)) {
result = (this.toString().equals(obj.toString()));
}
return result;
}
/**
* Clone.
*
* @return Object clone
*/
NodeRevisionNumber cloneObject() {
NodeRevisionNumber result = null;
try {
result = (NodeRevisionNumber) super.clone();
} catch(CloneNotSupportedException e) {
}
return result;
}
/**
* Validate.
*/
public void validate() {
if (number == null)
throw new ObjectValidationFailedException
(Messages.message
(NodeRevisionNumber.class.getName() + ".nullNumber"));
if (number.length < 2)
throw new ObjectValidationFailedException
(Messages.message
(NodeRevisionNumber.class.getName() + ".invalidNumber"));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.directory.server.integ;
import java.util.Hashtable;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.naming.ldap.InitialLdapContext;
import javax.naming.ldap.LdapContext;
import netscape.ldap.LDAPConnection;
import netscape.ldap.LDAPException;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.message.Control;
import org.apache.directory.api.ldap.util.JndiUtils;
import org.apache.directory.api.util.Network;
import org.apache.directory.ldap.client.api.LdapConnection;
import org.apache.directory.ldap.client.api.LdapNetworkConnection;
import org.apache.directory.server.constants.ServerDNConstants;
import org.apache.directory.server.core.integ.IntegrationUtils;
import org.apache.directory.server.ldap.LdapServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ServerIntegrationUtils extends IntegrationUtils
{
/** The class logger */
private static final Logger LOG = LoggerFactory.getLogger( ServerIntegrationUtils.class );
private static final String CTX_FACTORY = "com.sun.jndi.ldap.LdapCtxFactory";
private static final int DEFAULT_PORT = 10389;
private static final String DEFAULT_ADMIN = ServerDNConstants.ADMIN_SYSTEM_DN;
private static final String DEFAULT_PASSWORD = "secret";
/**
* Creates a JNDI LdapContext with a connection over the wire using the
* SUN LDAP provider. The connection is made using the administrative
* user as the principalDN. The context is to the rootDSE.
*
* @param ldapServer the LDAP server to get the connection to
* @return an LdapContext as the administrative user to the RootDSE
* @throws NamingException if there are problems creating the context
*/
public static LdapContext getWiredContext( LdapServer ldapServer ) throws NamingException
{
return getWiredContext( ldapServer, null );
}
/**
* Creates a JNDI LdapContext with a connection over the wire using the
* SUN LDAP provider. The connection is made using the administrative
* user as the principalDN. The context is to the rootDSE.
*
* @param ldapServer the LDAP server to get the connection to
* @param principalDn The user to use for authentication
* @param password The user's password
* @return an LdapContext as the administrative user to the RootDSE
* @throws NamingException if there are problems creating the context
*/
public static LdapContext getWiredContext( LdapServer ldapServer, String principalDn, String password )
throws NamingException
{
LOG.debug( "Creating a wired context to local LDAP server on port {}", ldapServer.getPort() );
Hashtable<String, String> env = new Hashtable<>();
env.put( Context.INITIAL_CONTEXT_FACTORY, CTX_FACTORY );
env.put( Context.PROVIDER_URL, Network.ldapLoopbackUrl( ldapServer.getPort() ) );
env.put( Context.SECURITY_PRINCIPAL, principalDn );
env.put( Context.SECURITY_CREDENTIALS, password );
env.put( Context.SECURITY_AUTHENTICATION, "simple" );
return new InitialLdapContext( env, null );
}
/**
* Creates a JNDI LdapContext with a connection over the wire using the
* SUN LDAP provider. The connection is made using the administrative
* user as the principalDN. The context is to the rootDSE.
*
* @param ldapServer the LDAP server to get the connection to
* @param controls The controls to use
* @return an LdapContext as the administrative user to the RootDSE
* @throws NamingException if there are problems creating the context
*/
public static LdapContext getWiredContext( LdapServer ldapServer, Control[] controls ) throws NamingException
{
LOG.debug( "Creating a wired context to local LDAP server on port {}", ldapServer.getPort() );
Hashtable<String, String> env = new Hashtable<>();
env.put( Context.INITIAL_CONTEXT_FACTORY, CTX_FACTORY );
env.put( Context.PROVIDER_URL, Network.ldapLoopbackUrl( ldapServer.getPort() ) );
env.put( Context.SECURITY_PRINCIPAL, ServerDNConstants.ADMIN_SYSTEM_DN );
env.put( Context.SECURITY_CREDENTIALS, "secret" );
env.put( Context.SECURITY_AUTHENTICATION, "simple" );
javax.naming.ldap.Control[] jndiControls;
try
{
jndiControls = JndiUtils.toJndiControls( ldapServer.getDirectoryService().getLdapCodecService(), controls );
}
catch ( org.apache.directory.api.asn1.EncoderException ee )
{
throw new NamingException( ee.getMessage() );
}
return new InitialLdapContext( env, jndiControls );
}
/**
* Creates a JNDI LdapContext with a connection over the wire using the
* SUN LDAP provider. The connection is made using the administrative
* user as the principalDN. The context is to the rootDSE.
*
* @param ldapServer the LDAP server to get the connection to
* @return an LdapContext as the administrative user to the RootDSE
* @throws NamingException if there are problems creating the context
*/
public static LdapContext getWiredContextThrowOnRefferal( LdapServer ldapServer ) throws NamingException
{
LOG.debug( "Creating a wired context to local LDAP server on port {}", ldapServer.getPort() );
Hashtable<String, String> env = new Hashtable<>();
env.put( Context.INITIAL_CONTEXT_FACTORY, CTX_FACTORY );
env.put( Context.PROVIDER_URL, Network.ldapLoopbackUrl( ldapServer.getPort() ) );
env.put( Context.SECURITY_PRINCIPAL, ServerDNConstants.ADMIN_SYSTEM_DN );
env.put( Context.SECURITY_CREDENTIALS, "secret" );
env.put( Context.SECURITY_AUTHENTICATION, "simple" );
env.put( Context.REFERRAL, "throw" );
return new InitialLdapContext( env, null );
}
/**
* Creates a JNDI LdapContext with a connection over the wire using the
* SUN LDAP provider. The connection is made using the administrative
* user as the principalDN. The context is to the rootDSE.
*
* @param ldapServer the LDAP server to get the connection to
* @return an LdapContext as the administrative user to the RootDSE
* @throws NamingException if there are problems creating the context
*/
public static LdapContext getWiredContextRefferalIgnore( LdapServer ldapServer ) throws NamingException
{
LOG.debug( "Creating a wired context to local LDAP server on port {}", ldapServer.getPort() );
Hashtable<String, String> env = new Hashtable<>();
env.put( Context.INITIAL_CONTEXT_FACTORY, CTX_FACTORY );
env.put( Context.PROVIDER_URL, Network.ldapLoopbackUrl( ldapServer.getPort() ) );
env.put( Context.SECURITY_PRINCIPAL, ServerDNConstants.ADMIN_SYSTEM_DN );
env.put( Context.SECURITY_CREDENTIALS, "secret" );
env.put( Context.SECURITY_AUTHENTICATION, "simple" );
env.put( Context.REFERRAL, "ignore" );
return new InitialLdapContext( env, null );
}
/**
* Creates a JNDI LdapContext with a connection over the wire using the
* SUN LDAP provider. The connection is made using the administrative
* user as the principalDN. The context is to the rootDSE.
*
* @param ldapServer the LDAP server to get the connection to
* @return an LdapContext as the administrative user to the RootDSE
* @throws NamingException if there are problems creating the context
*/
public static LdapContext getWiredContextFollowOnRefferal( LdapServer ldapServer ) throws NamingException
{
LOG.debug( "Creating a wired context to local LDAP server on port {}", ldapServer.getPort() );
Hashtable<String, String> env = new Hashtable<>();
env.put( Context.INITIAL_CONTEXT_FACTORY, CTX_FACTORY );
env.put( Context.PROVIDER_URL, Network.ldapLoopbackUrl( ldapServer.getPort() ) );
env.put( Context.SECURITY_PRINCIPAL, ServerDNConstants.ADMIN_SYSTEM_DN );
env.put( Context.SECURITY_CREDENTIALS, "secret" );
env.put( Context.SECURITY_AUTHENTICATION, "simple" );
env.put( Context.REFERRAL, "follow" );
return new InitialLdapContext( env, null );
}
public static LdapConnection getWiredConnection( LdapServer ldapServer ) throws LdapException
{
String testServer = System.getProperty( "ldap.test.server", null );
if ( testServer == null )
{
return getWiredConnection( ldapServer, ServerDNConstants.ADMIN_SYSTEM_DN, "secret" );
}
LOG.debug( "ldap.test.server = {}", testServer );
String admin = System.getProperty( testServer + ".admin", DEFAULT_ADMIN );
LOG.debug( "{}.admin = {}", testServer, admin );
String password = System.getProperty( testServer + ".password", DEFAULT_PASSWORD );
LOG.debug( "{}.password = {}", testServer, password );
String host = System.getProperty( testServer + ".host", Network.LOOPBACK_HOSTNAME );
LOG.debug( "{}.host = {}", testServer, host );
int port = Integer.parseInt( System.getProperty( testServer + ".port", Integer.toString( DEFAULT_PORT ) ) );
LOG.debug( "{}.port = {}", testServer, port );
LdapConnection conn = new LdapNetworkConnection( host, port );
conn.bind( admin, password );
return conn;
}
public static LDAPConnection getNsdkWiredConnection( LdapServer ldapServer ) throws LDAPException
{
String testServer = System.getProperty( "ldap.test.server", null );
if ( testServer == null )
{
return getNsdkWiredConnection( ldapServer, ServerDNConstants.ADMIN_SYSTEM_DN, "secret" );
}
LOG.debug( "ldap.test.server = {}", testServer );
String admin = System.getProperty( testServer + ".admin", DEFAULT_ADMIN );
LOG.debug( "{}.admin = {}", testServer, admin );
String password = System.getProperty( testServer + ".password", DEFAULT_PASSWORD );
LOG.debug( "{}.password = {}", testServer, password );
String host = System.getProperty( testServer + ".host", Network.LOOPBACK_HOSTNAME );
LOG.debug( "{}.host = {}", testServer, host );
int port = Integer.parseInt( System.getProperty( testServer + ".port", Integer.toString( DEFAULT_PORT ) ) );
LOG.debug( "{}.port = {}", testServer, port );
LDAPConnection conn = new LDAPConnection();
conn.connect( 3, host, port, admin, password );
return conn;
}
/**
* Gets a LDAP connection instance on a server, authenticating a user.
*
* @param ldapServer The server we want to connect to
* @param principalDn The user's DN
* @param password The user's password
* @return A LdapConnection instance if we got one
* @throws LdapException If the connection cannot be created
*/
public static LdapConnection getWiredConnection( LdapServer ldapServer, String principalDn, String password )
throws LdapException
{
LdapConnection connection = new LdapNetworkConnection( Network.LOOPBACK_HOSTNAME, ldapServer.getPort() );
connection.bind( principalDn, password );
return connection;
}
/**
* Gets a LDAP connection instance on a server. We won't bind on the server.
*
* @param ldapServer The server we want to connect to
* @return A LdapConnection instance if we got one
* @throws LdapException If the connection cannot be created
*/
public static LdapConnection getLdapConnection( LdapServer ldapServer )
{
return new LdapNetworkConnection( Network.LOOPBACK_HOSTNAME, ldapServer.getPort() );
}
/**
* Gets a LDAP connection instance on a server, authenticating a user.
*
* @param ldapServer The server we want to connect to
* @param principalDn The user's DN
* @param password The user's password
* @return A LdapConnection instance if we got one
* @throws LDAPException If the connection cannot be created
*/
public static LDAPConnection getNsdkWiredConnection( LdapServer ldapServer, String principalDn, String password )
throws LDAPException
{
LDAPConnection connection = new LDAPConnection();
connection.connect( 3, Network.LOOPBACK_HOSTNAME, ldapServer.getPort(), principalDn, password );
return connection;
}
/**
* Gets a LDAP connection instance on a server. We will bind as Admin
*
* @param ldapServer The server we want to connect to
* @return A LdapConnection instance if we got one
* @throws LdapException If the connection cannot be created
*/
public static LdapConnection getAdminConnection( LdapServer ldapServer ) throws LdapException
{
LdapConnection connection = new LdapNetworkConnection( Network.LOOPBACK_HOSTNAME, ldapServer.getPort() );
connection.bind( ServerDNConstants.ADMIN_SYSTEM_DN, "secret" );
return connection;
}
}
| |
package mstb.ldi;
import java.awt.Color;
import java.awt.Point;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Hashtable;
import java.util.List;
import java.util.Map.Entry;
import ij.ImagePlus;
import ij.Prefs;
import ij.gui.GenericDialog;
import ij.gui.ImageCanvas;
import ij.gui.ImageRoi;
import ij.gui.Overlay;
import ij.measure.Calibration;
import ij.measure.ResultsTable;
import ij.process.ColorProcessor;
import ij.process.ImageProcessor;
import mstb.Stat;
import mstb.Tools;
public class LinearDistanceInteractiveHandler {
private Hashtable<Integer, ArrayList<Integer>> markList = new Hashtable<Integer, ArrayList<Integer>>();
private ImagePlus iplus = null;
private ImageCanvas icanv = null;
//private Integer menuHeight = 16;
private Integer numMarks = 0;
ImageProcessor ip = null;
private LinearDistanceInteractiveMenuStrip menuStrip;
private LinearDistanceInteractiveMouseHandler mouseActionListener;
private LinearDistanceInteractiveSettings settings;
private Double step, offset;
Overlay ovl;
Color ovlColor;
public LinearDistanceInteractiveHandler(ImagePlus image, LinearDistanceInteractiveSettings settings, LinearDistanceInteractiveMenuStrip parentStrip) {
this.settings = settings;
iplus = image;
ip = iplus.getProcessor();
icanv = iplus.getCanvas();
menuStrip = parentStrip;
mouseActionListener = new LinearDistanceInteractiveMouseHandler(this);
ImageCanvas icanv = iplus.getCanvas();
icanv.addMouseMotionListener(mouseActionListener);
icanv.addMouseListener(mouseActionListener);
ij.IJ.setTool(12);
icanv.disablePopupMenu(true);
iplus.draw();
Calibration cal = iplus.getCalibration();
if (settings.doCalibrateStep)
step = (Double) settings.step / ((settings.directionY) ? cal.pixelWidth : cal.pixelHeight);
else
step = settings.step;
if (settings.doCalibrateOffset)
offset = (Double) settings.offset / ((settings.directionY) ? cal.pixelWidth : cal.pixelHeight);
else
offset = settings.offset;
setColor(settings.getovlColor());
drawOverlay();
}
public void updateSize() {
iplus.getWindow().validate();
icanv.zoomIn(0, 0);
icanv.zoomOut(0, 0);
}
public Point getRealPos() {
return icanv.getCursorLoc();
}
public void remove() {
iplus.setOverlay(null);
iplus.updateAndDraw();
icanv.removeMouseListener(mouseActionListener);
icanv.removeMouseMotionListener(mouseActionListener);
icanv.disablePopupMenu(false);
}
public boolean askResults() {
GenericDialog gd = new GenericDialog("Select results");
gd.addMessage("Select the results you want to obtain.");
gd.addCheckboxGroup(1, 6, settings.resultsTable, settings.doResults);
gd.addCheckbox("Apply image calbration", settings.doApplyCalibration);
gd.showDialog();
if (gd.wasCanceled())
return false;
for (int i = 0; i < settings.doResults.length; i++) {
settings.doResults[i] = gd.getNextBoolean();
}
settings.doApplyCalibration = gd.getNextBoolean();
Prefs.set("LinearDistanceInteractive.doResults", Tools.BooleanToString(settings.doResults));
Prefs.set("LinearDistanceInteractive.doApplyScale", settings.doApplyCalibration);
settings.save();
return true;
}
public void analyze() {
if (!askResults())
return;
List<Double> stripes = new ArrayList<Double>();
for (Entry<Integer, ArrayList<Integer>> e : markList.entrySet()) {
Integer lastMark = 0;
ArrayList<Integer> marks = e.getValue();
Collections.sort(marks);
for (Integer mark : marks) {
if (lastMark == 0) {
lastMark = mark;
continue;
}
Integer diff = mark - lastMark;
stripes.add(diff.doubleValue());
lastMark = mark;
}
}
@SuppressWarnings("unchecked")
Stat res = new Stat(stripes);
ResultsTable rt = settings.restable;
rt.incrementCounter();
int row = rt.getCounter() - 1;
String unit = "px";
Calibration cal = iplus.getCalibration();
double calval = 1.0;
if (settings.doApplyCalibration) {
calval = (settings.directionY) ? cal.pixelHeight : cal.pixelWidth;
unit = iplus.getCalibration().getUnit();
}
res.factor = calval;
rt.setValue("Image", row, iplus.getTitle() + ((settings.directionY) ? " V" : " H"));
for (int ri = 0; ri < settings.doResults.length; ri++) {
if (!settings.doResults[ri])
continue;
String rName = settings.resultsTable[ri];
String cName = String.format("%s [%s] %s", "Stripe length", unit, rName);
double rValue = res.getFormattedValue(ri);
rt.setValue(cName, row, rValue);
}
rt.show("Linear Distances Results");
}
public void clear() {
markList = new Hashtable<Integer, ArrayList<Integer>>();
numMarks = 0;
iplus.updateAndDraw();
}
public void addPoint() {
Point cursorPos = getRealPos();
int[] found = findPoint(cursorPos);
if (found!=null)
return;
int line = getNextLine(cursorPos);
if (markList.get(line) == null) {
markList.put(line, new ArrayList<Integer>());
}
if (settings.directionY)
markList.get(line).add(cursorPos.y);
else
markList.get(line).add(cursorPos.x);
drawOverlay();
numMarks++;
menuStrip.setCounts(numMarks);
}
public void removePoint() {
Point cursorPos = getRealPos();
int[] found = findPoint(cursorPos);
if (found != null) {
markList.get(found[0]).remove((Object) found[1]);
numMarks--;
drawOverlay();
}
menuStrip.setCounts(numMarks);
}
public int[] findPoint(Point pos) {
int dist = Integer.MAX_VALUE;
int[] found = new int[2];
int line = getNextLine(pos);
if (markList.get(line) == null)
return null;
Integer lpos = (settings.directionY) ? pos.y : pos.x;
for (Integer mark : markList.get(line)) {
int dt = Math.abs(mark - lpos);
if (dt < dist) {
dist = dt;
found[0] = line;
found[1] = mark;
}
}
if (found != null && dist <= settings.remtol)
return found;
return null;
}
public void drawOverlay() {
//overlay.copyBits(ip, 0, 0, Blitter.COPY);
ImageProcessor overlay = new ColorProcessor(ip.getWidth(), ip.getHeight());
//overlay = (ImageProcessor) ip.clone();
overlay.setColor(ovlColor);
int pxlh = (settings.directionY) ? overlay.getWidth() : overlay.getHeight();
int pxlw = (!settings.directionY) ? overlay.getWidth() : overlay.getHeight();
int line = 0;
int nearLine = 0;
Point cursorPos = getRealPos();
if (cursorPos != null)
nearLine = getNextLine(cursorPos);
double offsetLeft = offset;
if (settings.doCenterLines){
Double numlines = (((double) pxlh - 2.0 * offset ) / step);
double nl = Math.floor(numlines);
offsetLeft = (pxlh - nl * step) / 2.0;
//offsetLeft = 1.0;
}
int ml = settings.markLength;
for (double ld = offsetLeft; ld <= pxlh-offset; ld += step) {
int l = Tools.getRoundedInt(ld);
if (settings.directionY)
overlay.drawLine(l, 0, l, pxlw);
else
overlay.drawLine(0, l, pxlw, l);
if (markList.get(line) != null) {
for (Integer markPos : markList.get(line)) {
if (settings.directionY)
overlay.drawLine(l - ml, markPos, l + ml, markPos);
else
overlay.drawLine(markPos, l - ml, markPos, l + ml);
}
}
if (line == nearLine && cursorPos != null) {
if (settings.directionY)
overlay.drawLine(l - ml, cursorPos.y, l + ml, cursorPos.y);
else
overlay.drawLine(cursorPos.x, l - ml, cursorPos.x, l + ml);
}
line++;
}
ImageRoi roi = new ImageRoi(0, 0, overlay);
//roi.setName(iplus.getShortTitle() + " measured stripes");
//roi.setOpacity(1d);
roi.setZeroTransparent(true);
//roi.setProcessor(ip);
//ovl = new Overlay(roi);
//iplus.setOverlay(roi, Color.red, 0, Color.red);
//iplus.setRoi((ImageRoi) null);
iplus.setRoi(roi);
// icanv.setCursor(Cursor.CURSOR_NONE);
iplus.draw();
}
public void setColor(Color color) {
ovlColor = color;
//drawOverlay();
}
public int getNextLine(Point Cursor) {
if (Cursor == null)
throw new NullPointerException("No Point given");
if (settings.directionY)
return Tools.getRoundedInt((((double) Cursor.x) - offset) / step);
else
return Tools.getRoundedInt((((double) Cursor.y) - offset) / step);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.functions.worker;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.CALLS_REAL_METHODS;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyBoolean;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.argThat;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.withSettings;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import com.google.common.collect.ImmutableList;
import io.netty.buffer.Unpooled;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import lombok.extern.slf4j.Slf4j;
import org.apache.distributedlog.api.namespace.Namespace;
import org.apache.pulsar.client.admin.Functions;
import org.apache.pulsar.client.admin.PulsarAdmin;
import org.apache.pulsar.client.admin.Sinks;
import org.apache.pulsar.client.admin.Sources;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.MessageId;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.Reader;
import org.apache.pulsar.client.api.ReaderBuilder;
import org.apache.pulsar.client.impl.MessageIdImpl;
import org.apache.pulsar.client.impl.MessageImpl;
import org.apache.pulsar.common.api.proto.MessageMetadata;
import org.apache.pulsar.common.functions.WorkerInfo;
import org.apache.pulsar.common.util.ObjectMapperFactory;
import org.apache.pulsar.functions.instance.AuthenticationConfig;
import org.apache.pulsar.functions.proto.Function;
import org.apache.pulsar.functions.runtime.RuntimeFactory;
import org.apache.pulsar.functions.runtime.kubernetes.KubernetesRuntime;
import org.apache.pulsar.functions.runtime.kubernetes.KubernetesRuntimeFactory;
import org.apache.pulsar.functions.runtime.kubernetes.KubernetesRuntimeFactoryConfig;
import org.apache.pulsar.functions.runtime.process.ProcessRuntimeFactory;
import org.apache.pulsar.functions.runtime.thread.ThreadRuntimeFactory;
import org.apache.pulsar.functions.runtime.thread.ThreadRuntimeFactoryConfig;
import org.apache.pulsar.functions.secretsproviderconfigurator.SecretsProviderConfigurator;
import org.apache.pulsar.functions.utils.FunctionCommon;
import org.mockito.ArgumentMatchers;
import org.mockito.MockedConstruction;
import org.mockito.MockedStatic;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
@Slf4j
public class FunctionRuntimeManagerTest {
private final String PULSAR_SERVICE_URL = "pulsar://localhost:6650";
@Test
public void testProcessAssignmentUpdateAddFunctions() throws Exception {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName(ThreadRuntimeFactory.class.getName());
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getThreadLocal().convertValue(
new ThreadRuntimeFactoryConfig().setThreadGroupName("test"), Map.class));
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
workerConfig.setFunctionAssignmentTopicName("assignments");
PulsarClient pulsarClient = mock(PulsarClient.class);
ReaderBuilder readerBuilder = mock(ReaderBuilder.class);
doReturn(readerBuilder).when(pulsarClient).newReader();
doReturn(readerBuilder).when(readerBuilder).topic(anyString());
doReturn(readerBuilder).when(readerBuilder).startMessageId(any());
doReturn(readerBuilder).when(readerBuilder).startMessageId(any());
doReturn(readerBuilder).when(readerBuilder).readCompacted(anyBoolean());
doReturn(mock(Reader.class)).when(readerBuilder).create();
PulsarWorkerService workerService = mock(PulsarWorkerService.class);
doReturn(pulsarClient).when(workerService).getClient();
doReturn(mock(PulsarAdmin.class)).when(workerService).getFunctionAdmin();
try (final MockedStatic<RuntimeFactory> runtimeFactoryMockedStatic = Mockito
.mockStatic(RuntimeFactory.class);) {
mockRuntimeFactory(runtimeFactoryMockedStatic);
// test new assignment add functions
FunctionRuntimeManager functionRuntimeManager = spy(new FunctionRuntimeManager(
workerConfig,
workerService,
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class)));
FunctionActioner functionActioner = spy(functionRuntimeManager.getFunctionActioner());
doNothing().when(functionActioner).startFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).stopFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).terminateFunction(any(FunctionRuntimeInfo.class));
functionRuntimeManager.setFunctionActioner(functionActioner);
Function.FunctionMetaData function1 = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("func-1")).build();
Function.FunctionMetaData function2 = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("func-2")).build();
Function.Assignment assignment1 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build())
.build();
Function.Assignment assignment2 = Function.Assignment.newBuilder()
.setWorkerId("worker-2")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function2).setInstanceId(0).build())
.build();
List<Function.Assignment> assignments = new LinkedList<>();
assignments.add(assignment1);
assignments.add(assignment2);
functionRuntimeManager.processAssignment(assignment1);
functionRuntimeManager.processAssignment(assignment2);
verify(functionRuntimeManager, times(2)).setAssignment(any(Function.Assignment.class));
verify(functionRuntimeManager, times(0)).deleteAssignment(any(Function.Assignment.class));
assertEquals(functionRuntimeManager.workerIdToAssignments.size(), 2);
assertEquals(functionRuntimeManager.workerIdToAssignments
.get("worker-1").get("test-tenant/test-namespace/func-1:0"), assignment1);
assertEquals(functionRuntimeManager.workerIdToAssignments.get("worker-2")
.get("test-tenant/test-namespace/func-2:0"), assignment2);
verify(functionActioner, times(1)).startFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner).startFunction(argThat(
functionRuntimeInfo -> functionRuntimeInfo.getFunctionInstance().getFunctionMetaData()
.equals(function1)));
verify(functionActioner, times(0)).stopFunction(any(FunctionRuntimeInfo.class));
assertEquals(functionRuntimeManager.functionRuntimeInfos.size(), 1);
assertEquals(functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0"),
new FunctionRuntimeInfo().setFunctionInstance(
Function.Instance.newBuilder().setFunctionMetaData(function1).setInstanceId(0)
.build()));
}
}
@Test
public void testProcessAssignmentUpdateDeleteFunctions() throws Exception {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName(ThreadRuntimeFactory.class.getName());
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getThreadLocal().convertValue(
new ThreadRuntimeFactoryConfig().setThreadGroupName("test"), Map.class));
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
PulsarClient pulsarClient = mock(PulsarClient.class);
ReaderBuilder readerBuilder = mock(ReaderBuilder.class);
doReturn(readerBuilder).when(pulsarClient).newReader();
doReturn(readerBuilder).when(readerBuilder).topic(anyString());
doReturn(readerBuilder).when(readerBuilder).startMessageId(any());
doReturn(readerBuilder).when(readerBuilder).readCompacted(anyBoolean());
doReturn(mock(Reader.class)).when(readerBuilder).create();
PulsarWorkerService workerService = mock(PulsarWorkerService.class);
doReturn(pulsarClient).when(workerService).getClient();
doReturn(mock(PulsarAdmin.class)).when(workerService).getFunctionAdmin();
try (final MockedStatic<RuntimeFactory> runtimeFactoryMockedStatic = Mockito
.mockStatic(RuntimeFactory.class);) {
mockRuntimeFactory(runtimeFactoryMockedStatic);
// test new assignment delete functions
FunctionRuntimeManager functionRuntimeManager = spy(new FunctionRuntimeManager(
workerConfig,
workerService,
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class)));
FunctionActioner functionActioner = spy(functionRuntimeManager.getFunctionActioner());
doNothing().when(functionActioner).startFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).stopFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).terminateFunction(any(FunctionRuntimeInfo.class));
functionRuntimeManager.setFunctionActioner(functionActioner);
Function.FunctionMetaData function1 = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("func-1")).build();
Function.FunctionMetaData function2 = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("func-2")).build();
// Delete this assignment
Function.Assignment assignment1 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build())
.build();
Function.Assignment assignment2 = Function.Assignment.newBuilder()
.setWorkerId("worker-2")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function2).setInstanceId(0).build())
.build();
// add existing assignments
functionRuntimeManager.setAssignment(assignment1);
functionRuntimeManager.setAssignment(assignment2);
reset(functionRuntimeManager);
functionRuntimeManager.functionRuntimeInfos.put(
"test-tenant/test-namespace/func-1:0", new FunctionRuntimeInfo().setFunctionInstance(
Function.Instance.newBuilder().setFunctionMetaData(function1).setInstanceId(0)
.build()));
functionRuntimeManager.processAssignment(assignment1);
functionRuntimeManager.processAssignment(assignment2);
functionRuntimeManager
.deleteAssignment(FunctionCommon.getFullyQualifiedInstanceId(assignment1.getInstance()));
verify(functionRuntimeManager, times(0)).setAssignment(any(Function.Assignment.class));
verify(functionRuntimeManager, times(1)).deleteAssignment(any(String.class));
assertEquals(functionRuntimeManager.workerIdToAssignments.size(), 1);
assertEquals(functionRuntimeManager.workerIdToAssignments
.get("worker-2").get("test-tenant/test-namespace/func-2:0"), assignment2);
verify(functionActioner, times(0)).startFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner, times(1)).terminateFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner).terminateFunction(argThat(
functionRuntimeInfo -> functionRuntimeInfo.getFunctionInstance().getFunctionMetaData()
.equals(function1)));
assertEquals(functionRuntimeManager.functionRuntimeInfos.size(), 0);
}
}
private void mockRuntimeFactory(MockedStatic<RuntimeFactory> runtimeFactoryMockedStatic) {
runtimeFactoryMockedStatic
.when(() -> RuntimeFactory.getFuntionRuntimeFactory(eq(ThreadRuntimeFactory.class.getName())))
.thenAnswer((Answer<ThreadRuntimeFactory>) invocation -> new ThreadRuntimeFactory());
}
@Test
public void testProcessAssignmentUpdateModifyFunctions() throws Exception {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName(ThreadRuntimeFactory.class.getName());
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getThreadLocal().convertValue(
new ThreadRuntimeFactoryConfig().setThreadGroupName("test"), Map.class));
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
PulsarClient pulsarClient = mock(PulsarClient.class);
ReaderBuilder readerBuilder = mock(ReaderBuilder.class);
doReturn(readerBuilder).when(pulsarClient).newReader();
doReturn(readerBuilder).when(readerBuilder).topic(anyString());
doReturn(readerBuilder).when(readerBuilder).startMessageId(any());
doReturn(readerBuilder).when(readerBuilder).readCompacted(anyBoolean());
doReturn(mock(Reader.class)).when(readerBuilder).create();
PulsarWorkerService workerService = mock(PulsarWorkerService.class);
doReturn(pulsarClient).when(workerService).getClient();
doReturn(mock(PulsarAdmin.class)).when(workerService).getFunctionAdmin();
try (final MockedStatic<RuntimeFactory> runtimeFactoryMockedStatic = Mockito
.mockStatic(RuntimeFactory.class);) {
mockRuntimeFactory(runtimeFactoryMockedStatic);
// test new assignment update functions
FunctionRuntimeManager functionRuntimeManager = new FunctionRuntimeManager(
workerConfig,
workerService,
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class));
FunctionActioner functionActioner = spy(functionRuntimeManager.getFunctionActioner());
doNothing().when(functionActioner).startFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).stopFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).terminateFunction(any(FunctionRuntimeInfo.class));
functionRuntimeManager.setFunctionActioner(functionActioner);
Function.FunctionMetaData function1 = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("func-1")).build();
Function.FunctionMetaData function2 = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("func-2")).build();
Function.Assignment assignment1 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build())
.build();
Function.Assignment assignment2 = Function.Assignment.newBuilder()
.setWorkerId("worker-2")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function2).setInstanceId(0).build())
.build();
// add existing assignments
functionRuntimeManager.setAssignment(assignment1);
functionRuntimeManager.setAssignment(assignment2);
reset(functionActioner);
Function.Assignment assignment3 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function2).setInstanceId(0).build())
.build();
functionRuntimeManager.functionRuntimeInfos.put(
"test-tenant/test-namespace/func-1:0", new FunctionRuntimeInfo().setFunctionInstance(
Function.Instance.newBuilder().setFunctionMetaData(function1).setInstanceId(0)
.build()));
functionRuntimeManager.functionRuntimeInfos.put(
"test-tenant/test-namespace/func-2:0", new FunctionRuntimeInfo().setFunctionInstance(
Function.Instance.newBuilder().setFunctionMetaData(function2).setInstanceId(0)
.build()));
functionRuntimeManager.processAssignment(assignment1);
functionRuntimeManager.processAssignment(assignment3);
verify(functionActioner, times(1)).stopFunction(any(FunctionRuntimeInfo.class));
// make sure terminate is not called since this is a update operation
verify(functionActioner, times(0)).terminateFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner).stopFunction(argThat(
functionRuntimeInfo -> functionRuntimeInfo.getFunctionInstance().getFunctionMetaData()
.equals(function2)));
verify(functionActioner, times(1)).startFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner).startFunction(argThat(
functionRuntimeInfo -> functionRuntimeInfo.getFunctionInstance().getFunctionMetaData()
.equals(function2)));
assertEquals(functionRuntimeManager.functionRuntimeInfos.size(), 2);
assertEquals(functionRuntimeManager.workerIdToAssignments.size(), 1);
assertEquals(functionRuntimeManager.workerIdToAssignments
.get("worker-1").get("test-tenant/test-namespace/func-1:0"), assignment1);
assertEquals(functionRuntimeManager.workerIdToAssignments
.get("worker-1").get("test-tenant/test-namespace/func-2:0"), assignment3);
reset(functionActioner);
// add a stop
Function.FunctionMetaData.Builder function2StoppedBldr = function2.toBuilder();
function2StoppedBldr.putInstanceStates(0, Function.FunctionState.STOPPED);
Function.FunctionMetaData function2Stopped = function2StoppedBldr.build();
Function.Assignment assignment4 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function2Stopped).setInstanceId(0).build())
.build();
functionRuntimeManager.processAssignment(assignment4);
verify(functionActioner, times(1)).stopFunction(any(FunctionRuntimeInfo.class));
// make sure terminate is not called since this is a update operation
verify(functionActioner, times(0)).terminateFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner).stopFunction(argThat(functionRuntimeInfo ->
functionRuntimeInfo.getFunctionInstance().getFunctionMetaData().equals(function2)));
verify(functionActioner, times(0)).startFunction(any(FunctionRuntimeInfo.class));
assertEquals(functionRuntimeManager.functionRuntimeInfos.size(), 2);
assertEquals(functionRuntimeManager.workerIdToAssignments.size(), 1);
assertEquals(functionRuntimeManager.workerIdToAssignments
.get("worker-1").get("test-tenant/test-namespace/func-1:0"), assignment1);
assertEquals(functionRuntimeManager.workerIdToAssignments
.get("worker-1").get("test-tenant/test-namespace/func-2:0"), assignment4);
}
}
@Test
public void testReassignment() throws Exception {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName(ThreadRuntimeFactory.class.getName());
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getThreadLocal().convertValue(
new ThreadRuntimeFactoryConfig().setThreadGroupName("test"), Map.class));
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
PulsarClient pulsarClient = mock(PulsarClient.class);
ReaderBuilder readerBuilder = mock(ReaderBuilder.class);
doReturn(readerBuilder).when(pulsarClient).newReader();
doReturn(readerBuilder).when(readerBuilder).topic(anyString());
doReturn(readerBuilder).when(readerBuilder).startMessageId(any());
doReturn(readerBuilder).when(readerBuilder).readCompacted(anyBoolean());
doReturn(mock(Reader.class)).when(readerBuilder).create();
PulsarWorkerService workerService = mock(PulsarWorkerService.class);
doReturn(pulsarClient).when(workerService).getClient();
doReturn(mock(PulsarAdmin.class)).when(workerService).getFunctionAdmin();
try (final MockedStatic<RuntimeFactory> runtimeFactoryMockedStatic = Mockito
.mockStatic(RuntimeFactory.class);) {
mockRuntimeFactory(runtimeFactoryMockedStatic);
// test new assignment update functions
FunctionRuntimeManager functionRuntimeManager = new FunctionRuntimeManager(
workerConfig,
workerService,
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class));
FunctionActioner functionActioner = spy(functionRuntimeManager.getFunctionActioner());
doNothing().when(functionActioner).startFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).stopFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).terminateFunction(any(FunctionRuntimeInfo.class));
functionRuntimeManager.setFunctionActioner(functionActioner);
Function.FunctionMetaData function1 = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("func-1")).build();
Function.Assignment assignment1 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build())
.build();
/** Test transfer from me to other worker **/
// add existing assignments
functionRuntimeManager.setAssignment(assignment1);
// new assignment with different worker
Function.Assignment assignment2 = Function.Assignment.newBuilder()
.setWorkerId("worker-2")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build())
.build();
FunctionRuntimeInfo functionRuntimeInfo = new FunctionRuntimeInfo().setFunctionInstance(
Function.Instance.newBuilder().setFunctionMetaData(function1).setInstanceId(0)
.build());
functionRuntimeManager.functionRuntimeInfos.put(
"test-tenant/test-namespace/func-1:0", functionRuntimeInfo);
functionRuntimeManager.processAssignment(assignment2);
verify(functionActioner, times(0)).startFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner, times(0)).terminateFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner, times(1)).stopFunction(any(FunctionRuntimeInfo.class));
assertEquals(functionRuntimeManager.workerIdToAssignments
.get("worker-2").get("test-tenant/test-namespace/func-1:0"), assignment2);
assertEquals(functionRuntimeManager.functionRuntimeInfos.size(), 0);
assertNull(functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0"));
/** Test transfer from other worker to me **/
reset(functionActioner);
doNothing().when(functionActioner).startFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).stopFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).terminateFunction(any(FunctionRuntimeInfo.class));
functionRuntimeManager.setFunctionActioner(functionActioner);
Function.Assignment assignment3 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build())
.build();
functionRuntimeManager.processAssignment(assignment3);
verify(functionActioner, times(1)).startFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner, times(0)).terminateFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner, times(0)).stopFunction(any(FunctionRuntimeInfo.class));
assertEquals(functionRuntimeManager.workerIdToAssignments
.get("worker-1").get("test-tenant/test-namespace/func-1:0"), assignment3);
assertNull(functionRuntimeManager.workerIdToAssignments
.get("worker-2"));
assertEquals(functionRuntimeManager.functionRuntimeInfos.size(), 1);
assertEquals(functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0"),
functionRuntimeInfo);
}
}
@Test
public void testRuntimeManagerInitialize() throws Exception {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName(ThreadRuntimeFactory.class.getName());
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getThreadLocal().convertValue(
new ThreadRuntimeFactoryConfig().setThreadGroupName("test"), Map.class));
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
workerConfig.setFunctionAssignmentTopicName("assignments");
Function.FunctionMetaData function1 = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("func-1")).build();
Function.FunctionMetaData function2 = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("func-2")).build();
Function.Assignment assignment1 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build())
.build();
Function.Assignment assignment2 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function2).setInstanceId(0).build())
.build();
Function.Assignment assignment3 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function2).setInstanceId(0).build())
.build();
List<Message<byte[]>> messageList = new LinkedList<>();
MessageMetadata metadata = new MessageMetadata();
MessageId messageId1 = new MessageIdImpl(0, 1, -1);
Message message1 = spy(new MessageImpl("foo", messageId1.toString(),
new HashMap<>(), Unpooled.copiedBuffer(assignment1.toByteArray()), null, metadata));
doReturn(FunctionCommon.getFullyQualifiedInstanceId(assignment1.getInstance())).when(message1).getKey();
MessageId messageId2 = new MessageIdImpl(0, 2, -1);
Message message2 = spy(new MessageImpl("foo", messageId2.toString(),
new HashMap<>(), Unpooled.copiedBuffer(assignment2.toByteArray()), null, metadata));
doReturn(FunctionCommon.getFullyQualifiedInstanceId(assignment2.getInstance())).when(message2).getKey();
// delete function2
MessageId messageId3 = new MessageIdImpl(0, 3, -1);
Message message3 = spy(new MessageImpl("foo", messageId3.toString(),
new HashMap<>(), Unpooled.copiedBuffer("".getBytes()), null, metadata));
doReturn(FunctionCommon.getFullyQualifiedInstanceId(assignment3.getInstance())).when(message3).getKey();
messageList.add(message1);
messageList.add(message2);
messageList.add(message3);
PulsarClient pulsarClient = mock(PulsarClient.class);
Reader<byte[]> reader = mock(Reader.class);
Iterator<Message<byte[]>> it = messageList.iterator();
when(reader.readNext()).thenAnswer(new Answer<Message<byte[]>>() {
@Override
public Message<byte[]> answer(InvocationOnMock invocationOnMock) throws Throwable {
return it.next();
}
});
when(reader.readNextAsync()).thenAnswer(new Answer<CompletableFuture<Message<byte[]>>>() {
@Override
public CompletableFuture<Message<byte[]>> answer(InvocationOnMock invocationOnMock) throws Throwable {
return new CompletableFuture<>();
}
});
when(reader.hasMessageAvailable()).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocationOnMock) throws Throwable {
return it.hasNext();
}
});
ReaderBuilder readerBuilder = mock(ReaderBuilder.class);
doReturn(readerBuilder).when(pulsarClient).newReader();
doReturn(readerBuilder).when(readerBuilder).topic(anyString());
doReturn(readerBuilder).when(readerBuilder).readerName(anyString());
doReturn(readerBuilder).when(readerBuilder).subscriptionRolePrefix(anyString());
doReturn(readerBuilder).when(readerBuilder).startMessageId(any());
doReturn(readerBuilder).when(readerBuilder).startMessageId(any());
doReturn(readerBuilder).when(readerBuilder).readCompacted(anyBoolean());
doReturn(reader).when(readerBuilder).create();
PulsarWorkerService workerService = mock(PulsarWorkerService.class);
doReturn(pulsarClient).when(workerService).getClient();
doReturn(mock(PulsarAdmin.class)).when(workerService).getFunctionAdmin();
ErrorNotifier errorNotifier = mock(ErrorNotifier.class);
try (final MockedStatic<RuntimeFactory> runtimeFactoryMockedStatic = Mockito
.mockStatic(RuntimeFactory.class);) {
mockRuntimeFactory(runtimeFactoryMockedStatic);
// test new assignment add functions
FunctionRuntimeManager functionRuntimeManager = new FunctionRuntimeManager(
workerConfig,
workerService,
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
errorNotifier);
FunctionActioner functionActioner = spy(functionRuntimeManager.getFunctionActioner());
doNothing().when(functionActioner).startFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).stopFunction(any(FunctionRuntimeInfo.class));
doNothing().when(functionActioner).terminateFunction(any(FunctionRuntimeInfo.class));
functionRuntimeManager.setFunctionActioner(functionActioner);
assertEquals(functionRuntimeManager.initialize(), messageId3);
assertEquals(functionRuntimeManager.workerIdToAssignments.size(), 1);
verify(functionActioner, times(1)).startFunction(any(FunctionRuntimeInfo.class));
// verify stop function is called zero times because we don't want to unnecessarily restart any functions during initialization
verify(functionActioner, times(0)).stopFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner, times(0)).terminateFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner).startFunction(
argThat(functionRuntimeInfo -> functionRuntimeInfo.getFunctionInstance()
.equals(assignment1.getInstance())));
assertEquals(functionRuntimeManager.functionRuntimeInfos.size(), 1);
assertEquals(functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0"),
new FunctionRuntimeInfo().setFunctionInstance(
Function.Instance.newBuilder().setFunctionMetaData(function1).setInstanceId(0)
.build()));
// verify no errors occurred
verify(errorNotifier, times(0)).triggerError(any());
}
}
@Test
public void testExternallyManagedRuntimeUpdate() throws Exception {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName(KubernetesRuntimeFactory.class.getName());
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getThreadLocal()
.convertValue(new KubernetesRuntimeFactoryConfig()
.setSubmittingInsidePod(false), Map.class));
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
workerConfig.setPulsarFunctionsCluster("cluster");
PulsarClient pulsarClient = mock(PulsarClient.class);
ReaderBuilder readerBuilder = mock(ReaderBuilder.class);
doReturn(readerBuilder).when(pulsarClient).newReader();
doReturn(readerBuilder).when(readerBuilder).topic(anyString());
doReturn(readerBuilder).when(readerBuilder).startMessageId(any());
doReturn(readerBuilder).when(readerBuilder).readCompacted(anyBoolean());
doReturn(mock(Reader.class)).when(readerBuilder).create();
PulsarWorkerService workerService = mock(PulsarWorkerService.class);
doReturn(pulsarClient).when(workerService).getClient();
doReturn(mock(PulsarAdmin.class)).when(workerService).getFunctionAdmin();
KubernetesRuntimeFactory kubernetesRuntimeFactory = mock(KubernetesRuntimeFactory.class);
doNothing().when(kubernetesRuntimeFactory).initialize(
any(WorkerConfig.class),
any(AuthenticationConfig.class),
any(SecretsProviderConfigurator.class),
any(),
any(),
any()
);
doNothing().when(kubernetesRuntimeFactory).setupClient();
doReturn(true).when(kubernetesRuntimeFactory).externallyManaged();
KubernetesRuntime kubernetesRuntime = mock(KubernetesRuntime.class);
doReturn(kubernetesRuntime).when(kubernetesRuntimeFactory).createContainer(any(), any(), any(), any());
FunctionActioner functionActioner = spy(new FunctionActioner(
workerConfig,
kubernetesRuntimeFactory, null, null, null, null));
try (final MockedStatic<RuntimeFactory> runtimeFactoryMockedStatic = Mockito
.mockStatic(RuntimeFactory.class);) {
runtimeFactoryMockedStatic.when(() -> RuntimeFactory.getFuntionRuntimeFactory(anyString()))
.thenAnswer(invocation -> kubernetesRuntimeFactory);
// test new assignment update functions
FunctionRuntimeManager functionRuntimeManager = new FunctionRuntimeManager(
workerConfig,
workerService,
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class));
functionRuntimeManager.setFunctionActioner(functionActioner);
Function.FunctionMetaData function1 = Function.FunctionMetaData.newBuilder()
.setPackageLocation(Function.PackageLocationMetaData.newBuilder().setPackagePath("path").build())
.setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("func-1")).build();
Function.Assignment assignment1 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build())
.build();
/** Test transfer from me to other worker **/
// add existing assignments
functionRuntimeManager.setAssignment(assignment1);
// new assignment with different worker
Function.Assignment assignment2 = Function.Assignment.newBuilder()
.setWorkerId("worker-2")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build())
.build();
Function.Instance instance = Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build();
FunctionRuntimeInfo functionRuntimeInfo = new FunctionRuntimeInfo()
.setFunctionInstance(instance)
.setRuntimeSpawner(functionActioner
.getRuntimeSpawner(instance, function1.getPackageLocation().getPackagePath()));
functionRuntimeManager.functionRuntimeInfos.put(
"test-tenant/test-namespace/func-1:0", functionRuntimeInfo);
functionRuntimeManager.processAssignment(assignment2);
// make sure nothing is called
verify(functionActioner, times(0)).startFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner, times(0)).terminateFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner, times(0)).stopFunction(any(FunctionRuntimeInfo.class));
assertEquals(functionRuntimeManager.workerIdToAssignments
.get("worker-2").get("test-tenant/test-namespace/func-1:0"), assignment2);
assertNull(functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0"));
/** Test transfer from other worker to me **/
Function.Assignment assignment3 = Function.Assignment.newBuilder()
.setWorkerId("worker-1")
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function1).setInstanceId(0).build())
.build();
functionRuntimeManager.processAssignment(assignment3);
// make sure nothing is called
verify(functionActioner, times(0)).startFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner, times(0)).terminateFunction(any(FunctionRuntimeInfo.class));
verify(functionActioner, times(0)).stopFunction(any(FunctionRuntimeInfo.class));
assertEquals(functionRuntimeManager.workerIdToAssignments
.get("worker-1").get("test-tenant/test-namespace/func-1:0"), assignment3);
assertNull(functionRuntimeManager.workerIdToAssignments
.get("worker-2"));
assertEquals(
functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0")
.getFunctionInstance(),
functionRuntimeInfo.getFunctionInstance());
assertNotNull(
functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0")
.getRuntimeSpawner());
assertEquals(
functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0")
.getRuntimeSpawner().getInstanceConfig().getFunctionDetails(),
function1.getFunctionDetails());
assertEquals(
functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0")
.getRuntimeSpawner().getInstanceConfig().getInstanceId(),
instance.getInstanceId());
assertTrue(
functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0")
.getRuntimeSpawner().getRuntimeFactory() instanceof KubernetesRuntimeFactory);
assertNotNull(
functionRuntimeManager.functionRuntimeInfos.get("test-tenant/test-namespace/func-1:0")
.getRuntimeSpawner().getRuntime());
verify(kubernetesRuntime, times(1)).reinitialize();
}
}
@Test
public void testFunctionRuntimeSetCorrectly() {
// Function runtime not set
try {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
workerConfig.setFunctionAssignmentTopicName("assignments");
new FunctionRuntimeManager(
workerConfig,
mock(PulsarWorkerService.class),
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class));
fail();
} catch (Exception e) {
assertEquals(e.getMessage(), "A Function Runtime Factory needs to be set");
}
// Function runtime class not found
try {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName("foo");
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getThreadLocal().convertValue(new KubernetesRuntimeFactoryConfig(), Map.class));
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
workerConfig.setFunctionAssignmentTopicName("assignments");
new FunctionRuntimeManager(
workerConfig,
mock(PulsarWorkerService.class),
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class));
fail();
} catch (Exception e) {
assertEquals(e.getCause().getClass(), ClassNotFoundException.class);
}
// Function runtime class does not implement correct interface
try {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName(FunctionRuntimeManagerTest.class.getName());
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getThreadLocal().convertValue(new KubernetesRuntimeFactoryConfig(), Map.class));
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
workerConfig.setFunctionAssignmentTopicName("assignments");
new FunctionRuntimeManager(
workerConfig,
mock(PulsarWorkerService.class),
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class));
fail();
} catch (Exception e) {
assertEquals(e.getMessage(),
"org.apache.pulsar.functions.worker.FunctionRuntimeManagerTest does not implement org.apache.pulsar.functions.runtime.RuntimeFactory");
}
// Correct runtime class
try {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName(ThreadRuntimeFactory.class.getName());
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getThreadLocal().convertValue(new KubernetesRuntimeFactoryConfig(), Map.class));
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
workerConfig.setFunctionAssignmentTopicName("assignments");
try (final MockedStatic<RuntimeFactory> runtimeFactoryMockedStatic = Mockito
.mockStatic(RuntimeFactory.class);) {
mockRuntimeFactory(runtimeFactoryMockedStatic);
FunctionRuntimeManager functionRuntimeManager = new FunctionRuntimeManager(
workerConfig,
mock(PulsarWorkerService.class),
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class));
assertEquals(functionRuntimeManager.getRuntimeFactory().getClass(), ThreadRuntimeFactory.class);
}
} catch (Exception e) {
log.error("Failed to initialize the runtime manager : ", e);
fail();
}
}
@Test
public void testFunctionRuntimeFactoryConfigsBackwardsCompatibility() throws Exception {
// Test kubernetes runtime
WorkerConfig.KubernetesContainerFactory kubernetesContainerFactory
= new WorkerConfig.KubernetesContainerFactory();
kubernetesContainerFactory.setK8Uri("k8Uri");
kubernetesContainerFactory.setJobNamespace("jobNamespace");
kubernetesContainerFactory.setJobName("jobName");
kubernetesContainerFactory.setPulsarDockerImageName("pulsarDockerImageName");
kubernetesContainerFactory.setImagePullPolicy("imagePullPolicy");
kubernetesContainerFactory.setPulsarRootDir("pulsarRootDir");
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setKubernetesContainerFactory(kubernetesContainerFactory);
try (MockedConstruction<KubernetesRuntimeFactory> mocked = Mockito.mockConstruction(KubernetesRuntimeFactory.class,
withSettings().defaultAnswer(CALLS_REAL_METHODS),
(mockedKubernetesRuntimeFactory, context) -> {
doNothing().when(mockedKubernetesRuntimeFactory).initialize(
any(WorkerConfig.class),
any(AuthenticationConfig.class),
any(SecretsProviderConfigurator.class),
any(),
any(),
any()
);
doNothing().when(mockedKubernetesRuntimeFactory).setupClient();
doReturn(true).when(mockedKubernetesRuntimeFactory).externallyManaged();
})) {
FunctionRuntimeManager functionRuntimeManager = new FunctionRuntimeManager(
workerConfig,
mock(PulsarWorkerService.class),
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class));
KubernetesRuntimeFactory kubernetesRuntimeFactory = (KubernetesRuntimeFactory) functionRuntimeManager.getRuntimeFactory();
assertEquals(kubernetesRuntimeFactory.getK8Uri(), "k8Uri");
assertEquals(kubernetesRuntimeFactory.getJobNamespace(), "jobNamespace");
assertEquals(kubernetesRuntimeFactory.getPulsarDockerImageName(), "pulsarDockerImageName");
assertEquals(kubernetesRuntimeFactory.getImagePullPolicy(), "imagePullPolicy");
assertEquals(kubernetesRuntimeFactory.getPulsarRootDir(), "pulsarRootDir");
// Test process runtime
WorkerConfig.ProcessContainerFactory processContainerFactory
= new WorkerConfig.ProcessContainerFactory();
processContainerFactory.setExtraFunctionDependenciesDir("extraDependenciesDir");
processContainerFactory.setLogDirectory("logDirectory");
processContainerFactory.setPythonInstanceLocation("pythonInstanceLocation");
processContainerFactory.setJavaInstanceJarLocation("javaInstanceJarLocation");
workerConfig = new WorkerConfig();
workerConfig.setProcessContainerFactory(processContainerFactory);
functionRuntimeManager = new FunctionRuntimeManager(
workerConfig,
mock(PulsarWorkerService.class),
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class));
assertEquals(functionRuntimeManager.getRuntimeFactory().getClass(), ProcessRuntimeFactory.class);
ProcessRuntimeFactory processRuntimeFactory =
(ProcessRuntimeFactory) functionRuntimeManager.getRuntimeFactory();
assertEquals(processRuntimeFactory.getExtraDependenciesDir(), "extraDependenciesDir");
assertEquals(processRuntimeFactory.getLogDirectory(), "logDirectory/functions");
assertEquals(processRuntimeFactory.getPythonInstanceFile(), "pythonInstanceLocation");
assertEquals(processRuntimeFactory.getJavaInstanceJarFile(), "javaInstanceJarLocation");
// Test thread runtime
WorkerConfig.ThreadContainerFactory threadContainerFactory
= new WorkerConfig.ThreadContainerFactory();
threadContainerFactory.setThreadGroupName("threadGroupName");
workerConfig = new WorkerConfig();
workerConfig.setThreadContainerFactory(threadContainerFactory);
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
functionRuntimeManager = new FunctionRuntimeManager(
workerConfig,
mock(PulsarWorkerService.class),
mock(Namespace.class),
mock(MembershipManager.class),
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class));
assertEquals(functionRuntimeManager.getRuntimeFactory().getClass(), ThreadRuntimeFactory.class);
ThreadRuntimeFactory threadRuntimeFactory =
(ThreadRuntimeFactory) functionRuntimeManager.getRuntimeFactory();
assertEquals(threadRuntimeFactory.getThreadGroup().getName(), "threadGroupName");
}
}
@Test
public void testThreadFunctionInstancesRestart() throws Exception {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName(ThreadRuntimeFactory.class.getName());
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getThreadLocal().convertValue(
new ThreadRuntimeFactoryConfig().setThreadGroupName("test"), Map.class));
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
workerConfig.setFunctionAssignmentTopicName("assignments");
PulsarWorkerService workerService = mock(PulsarWorkerService.class);
// mock pulsarAdmin sources sinks functions
PulsarAdmin pulsarAdmin = mock(PulsarAdmin.class);
Sources sources = mock(Sources.class);
doNothing().when(sources).restartSource(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
doReturn(sources).when(pulsarAdmin).sources();
Sinks sinks = mock(Sinks.class);
doReturn(sinks).when(pulsarAdmin).sinks();
Functions functions = mock(Functions.class);
doNothing().when(functions)
.restartFunction(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
doReturn(functions).when(pulsarAdmin).functions();
doReturn(pulsarAdmin).when(workerService).getFunctionAdmin();
try (final MockedStatic<RuntimeFactory> runtimeFactoryMockedStatic = Mockito
.mockStatic(RuntimeFactory.class);) {
mockRuntimeFactory(runtimeFactoryMockedStatic);
List<WorkerInfo> workerInfos = new LinkedList<>();
workerInfos.add(WorkerInfo.of("worker-1", "localhost", 0));
workerInfos.add(WorkerInfo.of("worker-2", "localhost", 0));
MembershipManager membershipManager = mock(MembershipManager.class);
doReturn(workerInfos).when(membershipManager).getCurrentMembership();
// build three types of FunctionMetaData
Function.FunctionMetaData function = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("function")
.setComponentType(Function.FunctionDetails.ComponentType.FUNCTION)).build();
Function.FunctionMetaData source = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("source")
.setComponentType(Function.FunctionDetails.ComponentType.SOURCE)).build();
Function.FunctionMetaData sink = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("sink")
.setComponentType(Function.FunctionDetails.ComponentType.SINK)).build();
FunctionRuntimeManager functionRuntimeManager = spy(new FunctionRuntimeManager(
workerConfig,
workerService,
mock(Namespace.class),
membershipManager,
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class)));
// verify restart function/source/sink using different assignment
verifyRestart(functionRuntimeManager, function, "worker-1", false, false);
verifyRestart(functionRuntimeManager, function, "worker-2", false, true);
verifyRestart(functionRuntimeManager, source, "worker-1", false, false);
verifyRestart(functionRuntimeManager, source, "worker-2", false, true);
verifyRestart(functionRuntimeManager, sink, "worker-1", false, false);
verifyRestart(functionRuntimeManager, sink, "worker-2", false, true);
}
}
@Test
public void testKubernetesFunctionInstancesRestart() throws Exception {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setPulsarServiceUrl(PULSAR_SERVICE_URL);
workerConfig.setStateStorageServiceUrl("foo");
workerConfig.setFunctionAssignmentTopicName("assignments");
WorkerConfig.KubernetesContainerFactory kubernetesContainerFactory
= new WorkerConfig.KubernetesContainerFactory();
workerConfig.setKubernetesContainerFactory(kubernetesContainerFactory);
try (MockedConstruction<KubernetesRuntimeFactory> mocked = Mockito.mockConstruction(KubernetesRuntimeFactory.class,
(mockedKubernetesRuntimeFactory, context) -> {
doNothing().when(mockedKubernetesRuntimeFactory).initialize(
any(WorkerConfig.class),
any(AuthenticationConfig.class),
any(SecretsProviderConfigurator.class),
any(),
any(),
any()
);
doNothing().when(mockedKubernetesRuntimeFactory).setupClient();
doReturn(true).when(mockedKubernetesRuntimeFactory).externallyManaged();
})) {
PulsarWorkerService workerService = mock(PulsarWorkerService.class);
// mock pulsarAdmin sources sinks functions
PulsarAdmin pulsarAdmin = mock(PulsarAdmin.class);
Sources sources = mock(Sources.class);
doNothing().when(sources)
.restartSource(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
doReturn(sources).when(pulsarAdmin).sources();
Sinks sinks = mock(Sinks.class);
doReturn(sinks).when(pulsarAdmin).sinks();
Functions functions = mock(Functions.class);
doNothing().when(functions)
.restartFunction(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any());
doReturn(functions).when(pulsarAdmin).functions();
doReturn(pulsarAdmin).when(workerService).getFunctionAdmin();
try (final MockedStatic<RuntimeFactory> runtimeFactoryMockedStatic = Mockito
.mockStatic(RuntimeFactory.class);) {
mockRuntimeFactory(runtimeFactoryMockedStatic);
List<WorkerInfo> workerInfos = new LinkedList<>();
workerInfos.add(WorkerInfo.of("worker-1", "localhost", 0));
workerInfos.add(WorkerInfo.of("worker-2", "localhost", 0));
MembershipManager membershipManager = mock(MembershipManager.class);
doReturn(workerInfos).when(membershipManager).getCurrentMembership();
// build three types of FunctionMetaData
Function.FunctionMetaData function = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("function")
.setComponentType(Function.FunctionDetails.ComponentType.FUNCTION)).build();
Function.FunctionMetaData source = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("source")
.setComponentType(Function.FunctionDetails.ComponentType.SOURCE)).build();
Function.FunctionMetaData sink = Function.FunctionMetaData.newBuilder().setFunctionDetails(
Function.FunctionDetails.newBuilder()
.setTenant("test-tenant").setNamespace("test-namespace").setName("sink")
.setComponentType(Function.FunctionDetails.ComponentType.SINK)).build();
FunctionRuntimeManager functionRuntimeManager = spy(new FunctionRuntimeManager(
workerConfig,
workerService,
mock(Namespace.class),
membershipManager,
mock(ConnectorsManager.class),
mock(FunctionsManager.class),
mock(FunctionMetaDataManager.class),
mock(WorkerStatsManager.class),
mock(ErrorNotifier.class)));
// verify restart function/source/sink using different assignment
verifyRestart(functionRuntimeManager, function, "worker-1", true, false);
verifyRestart(functionRuntimeManager, function, "worker-2", true, true);
verifyRestart(functionRuntimeManager, source, "worker-1", true, false);
verifyRestart(functionRuntimeManager, source, "worker-2", true, true);
verifyRestart(functionRuntimeManager, sink, "worker-1", true, false);
verifyRestart(functionRuntimeManager, sink, "worker-2", true, true);
}
}
}
private static void verifyRestart(FunctionRuntimeManager functionRuntimeManager, Function.FunctionMetaData function,
String workerId, boolean externallyManaged, boolean expectRestartByPulsarAdmin) throws Exception {
Function.Assignment assignment = Function.Assignment.newBuilder()
.setWorkerId(workerId)
.setInstance(Function.Instance.newBuilder()
.setFunctionMetaData(function).setInstanceId(0).build())
.build();
doReturn(ImmutableList.of(assignment)).when(functionRuntimeManager)
.findFunctionAssignments("test-tenant", "test-namespace", "function");
functionRuntimeManager.restartFunctionInstances("test-tenant", "test-namespace", "function");
if (expectRestartByPulsarAdmin) {
verify(functionRuntimeManager, times(1))
.restartFunctionUsingPulsarAdmin(eq(assignment), eq("test-tenant"),
eq("test-namespace"), eq("function"), eq(externallyManaged));
} else {
verify(functionRuntimeManager).stopFunction(eq(FunctionCommon.getFullyQualifiedInstanceId(assignment.getInstance())), eq(true));
}
}
}
| |
/*
* Channel.java
*
* This file is part of the Sorcix Java IRC Library (sIRC).
*
* Copyright (C) 2008-2010 Vic Demuzere http://sorcix.com
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use, copy,
* modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.sorcix.sirc;
import java.util.concurrent.ConcurrentHashMap;
import java.util.Iterator;
import java.util.Map;
/**
* Represents a channel on the IRC server.
*
* @author Sorcix
*/
public final class Channel {
/** IrcConnection used to send messages to this channel. */
private final IrcConnection irc;
/** Channel name */
private final String name;
/** The topic of this channel. */
private String topic;
/** The user list. */
private Map<String, User> users;
/** Possible channel prefixes. */
protected static final String CHANNEL_PREFIX = "#&+!";
/**
* Creates a new {@code Channel} object with given name.
*
* @param name The channel name.
* @param irc The IrcConnection used to send messages to this
* channel.
* @param global Whether this object is going to be shared.
*/
protected Channel(final String name, final IrcConnection irc, final boolean global) {
this.name = name;
this.irc = irc;
if (global) {
this.users = new ConcurrentHashMap<String, User>(100, .75f, 2);
} else {
this.users = null;
}
}
/**
* Adds a user to the user list in this channel.
*
* @param user The user to add.
*/
protected void addUser(final User user) {
if ((this.users != null) && !this.users.containsKey(user.getNickLower())) {
this.users.put(user.getNickLower(), user);
}
}
/**
* Bans a user from this channel.
*
* @param user The user to ban from this channel.
* @param kick Whether to kick this user after banning.
*/
public void ban(final User user, final boolean kick) {
if (user.getHostName() != null) {
this.setMode("+b *!*@*" + user.getHostName());
} else {
this.setMode("+b *!" + user.getNick() + "@*");
}
// kick if requested
if (kick) {
this.kick(user, "Banned");
}
}
/**
* Changes the topic of this channel. Note that you need
* privileges to do this.
*
* @param topic The new topic.
*/
public void changeTopic(final String topic) {
this.irc.getOutput().send("TOPIC " + this.getName() + " :" + topic);
}
@Override
public boolean equals(final Object channel) {
try {
return ((Channel) channel).getName().equalsIgnoreCase(this.name) && (this.irc != null && this.irc.equals(((Channel)channel).irc));
} catch (final Exception ex) {
return false;
}
}
@Override
public int hashCode() {
return this.name.hashCode();
}
/**
* Returns the channel name.
*
* @return The channel name.
*/
public String getName() {
return this.name;
}
/**
* Gives the topic of this channel, or null if unknown.
*
* @return The topic.
*/
public String getTopic() {
return this.topic;
}
/**
* Retrieves a global User object for a user in this channel. This
* method is not public because end-users should use
* {@link IrcConnection#createUser(String, String)} which always
* returns a {@link User}, even if the user is not in this
* channel.
*
* @param nickLower The nickname of this user.
* @return A user object, or null if the user isn't in this
* channel.
*/
protected User getUser(final String nickLower) {
return this.users.get(nickLower);
}
public User getUs() {
return this.users.get(this.irc.getClient().getNickLower());
}
/**
* Get an Iterator containing all users in this channel.
*
* <pre>
* Iterator<User> users = channel.getUsers();
* User current;
* while (users.hasNext()) {
* current = users.next();
* System.out.println(current.getNick() + " is in this channel!");
* }
* </pre>
*
* @return All users in this channel.
* @see #isGlobal()
*/
public Iterator<User> getUsers() {
return this.users.values().iterator();
}
/**
* Give a user admin privileges in this channel. (Not supported by
* RFC!)
*
* @param user The user to give admin privileges.
* @since 1.0.0
*/
public void giveAdmin(final User user) {
this.setMode(User.MODE_ADMIN, user, true);
}
/**
* Give a user founder privileges in this channel. (Not supported
* by RFC!)
*
* @param user The user to give founder privileges.
* @since 1.0.0
*/
public void giveFounder(final User user) {
this.setMode(User.MODE_FOUNDER, user, true);
}
/**
* Give a user halfop privileges in this channel. (Not supported
* by RFC!)
*
* @since 1.0.0
* @param user The user to give halfop privileges.
*/
public void giveHalfop(final User user) {
this.setMode(User.MODE_HALF_OP, user, true);
}
/**
* Give a user operator privileges in this channel.
*
* @param user The user to give operator privileges.
*/
public void giveOperator(final User user) {
this.setMode(User.MODE_OPERATOR, user, true);
}
/**
* Give a user voice privileges in this channel.
*
* @param user The user to give voice privileges.
*/
public void giveVoice(final User user) {
this.setMode(User.MODE_VOICE, user, true);
}
/**
* Checks whether given user is in this channel.
*
* @param nick The nickname to check.
* @return True if given user is in this channel, false otherwise.
*/
public boolean hasUser(final String nick) {
return (this.users != null) && this.users.containsKey(nick.toLowerCase());
}
/**
* Checks whether given user is in this channel.
*
* @param user The user to check.
* @return True if given user is in this channel, false otherwise.
*/
public boolean hasUser(final User user) {
return this.hasUser(user.getNickLower());
}
/**
* Checks whether this Channel object is shared. Shared channel
* objects contain a list of users.
*
* @return True if this channel object is shared.
*/
public boolean isGlobal() {
return this.users != null;
}
/**
* Attempts to join this channel.
*/
public void join() {
this.irc.getOutput().send("JOIN " + this.getName());
}
/**
* Attempts to join this channel using given password.
*
* @param password The password needed to join this channel.
*/
public void join(final String password) {
this.irc.getOutput().send("JOIN " + this.getName() + " " + password);
}
/**
* Kicks given user from this channel.
*
* @param user The user to kick from this channel.
*/
public void kick(final User user) {
this.irc.getOutput().send("KICK " + this.getName() + " " + user.getNick());
}
/**
* Kicks given user from this channel, with reason.
*
* @param user The user to kick from this channel.
* @param reason The reason why this user was kicked.
*/
public void kick(final User user, final String reason) {
this.irc.getOutput().send("KICK " + this.getName() + " " + user.getNick() + " :" + reason);
}
/**
* Attempts to leave/part this channel.
*/
public void part() {
this.irc.getOutput().send("PART " + this.getName());
}
/**
* Remove admin privileges from a user in this channel.
*
* @param user The user to remove admin privileges from.
* @since 1.0.0
*/
public void removeAdmin(final User user) {
this.setMode(User.MODE_ADMIN, user, false);
}
/**
* Remove founder privileges from a user in this channel.
*
* @param user The user to remove founder privileges from.
* @since 1.0.0
*/
public void removeFounder(final User user) {
this.setMode(User.MODE_FOUNDER, user, false);
}
/**
* Remove halfop privileges from a user in this channel.
*
* @param user The user to remove halfop privileges from.
* @since 1.0.0
*/
public void removeHalfop(final User user) {
this.setMode(User.MODE_HALF_OP, user, false);
}
/**
* Remove operator privileges from a user in this channel.
*
* @param user The user to remove operator privileges from.
*/
public void removeOperator(final User user) {
this.setMode(User.MODE_OPERATOR, user, false);
}
/**
* Removes a user from the user list in this channel.
*
* @param user The user to remove.
*/
protected void removeUser(final User user) {
if ((this.users != null) && this.users.containsKey(user.getNickLower())) {
this.users.remove(user.getNickLower());
}
}
/**
* Remove voice privileges from a user in this channel.
*
* @param user The user to remove voice privileges from.
*/
public void removeVoice(final User user) {
this.setMode(User.MODE_VOICE, user, false);
}
/**
* Changes the nickname of a user in this channel.
*
* @param old The old nickname.
* @param neww The new nickname.
*/
protected void renameUser(final String old, final String neww) {
if ((this.users != null) && this.users.containsKey(old)) {
final User user = this.users.get(old);
this.users.remove(old);
user.setNick(neww);
this.users.put(user.getNickLower(), user);
}
}
/**
* Send message to channel.
*
* @param message The message to send.
* @see #sendMessage(String)
*/
public void send(final String message) {
this.sendMessage(message);
}
/**
* Sends a CTCP ACTION command.
*
* @param action The action to send.
*/
public void sendAction(final String action) {
this.sendCtcpAction(action);
}
/**
* Sends CTCP request. This is a very primitive way to send CTCP
* commands, other methods are preferred.
*
* @param command Command to send.
*/
public void sendCtcp(final String command) {
this.irc.getOutput().send("PRIVMSG " + this.getName() + " :" + IrcPacket.CTCP + command + IrcPacket.CTCP);
}
/**
* Sends a CTCP ACTION command.
*
* @param action The action to send.
* @see #sendCtcp(String)
*/
protected void sendCtcpAction(final String action) {
if ((action != null) && (action.length() != 0)) {
this.sendCtcp("ACTION " + action);
}
}
/**
* Send message to channel.
*
* @param message The message to send.
*/
public void sendMessage(final String message) {
this.irc.getOutput().send("PRIVMSG " + this.getName() + " :" + message);
}
/**
* Send notice to channel.
*
* @param message The notice to send.
*/
public void sendNotice(final String message) {
this.irc.getOutput().send("NOTICE " + this.getName() + " :" + message);
}
/**
* Changes a channel mode for given user.
*
* @param mode The mode character.
* @param user The target user.
* @param toggle True to enable the mode, false to disable.
*/
public void setMode(final char mode, final User user, final boolean toggle) {
if (toggle) {
this.setMode("+" + mode + " " + user.getNick());
} else {
this.setMode("-" + mode + " " + user.getNick());
}
}
/**
* Changes a channel mode. The channel name is automatically
* added.
*
* <pre>
* setMode("+m");
* </pre>
*
* @param mode The mode to change.
*/
public void setMode(final String mode) {
this.irc.getOutput().send("MODE " + this.getName() + " " + mode);
}
/**
* Changes the topic of this channel. This does not send a request
* to the IRC server, to change the topic on the server, use
* {@link #changeTopic(String)}.
*
* @param topic The new topic.
*/
protected void setTopic(final String topic) {
this.topic = topic;
}
@Override
public String toString() {
return this.getName();
}
/**
* Updates the current shared User object with changes in a fresh
* one and returns the updated shared object.
*
* @param user The fresh User object.
* @param createNew Whether to add this user to the channel if it
* didn't exist.
* @return The updated shared User object.
*/
protected User updateUser(final User user, final boolean createNew) {
if (this.hasUser(user.getNickLower())) {
// update user if it exists
final User shared = this.getUser(user.getNickLower());
shared.updateUser(user);
return shared;
} else if (createNew) {
// create a new one
this.addUser(user);
return user;
}
return null;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.testFramework;
import com.intellij.ProjectTopics;
import com.intellij.application.options.CodeStyle;
import com.intellij.codeInspection.InspectionProfileEntry;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.ex.InspectionToolWrapper;
import com.intellij.ide.highlighter.ProjectFileType;
import com.intellij.ide.startup.StartupManagerEx;
import com.intellij.idea.IdeaLogger;
import com.intellij.lang.Language;
import com.intellij.mock.MockApplication;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.actionSystem.EmptyAction;
import com.intellij.openapi.actionSystem.ex.ActionUtil;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.application.ex.ApplicationEx;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.impl.EditorFactoryImpl;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.fileTypes.impl.FileTypeManagerImpl;
import com.intellij.openapi.module.EmptyModuleType;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.ModuleListener;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ex.ProjectManagerEx;
import com.intellij.openapi.project.impl.ProjectImpl;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.AnnotationOrderRootType;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.roots.ex.ProjectRootManagerEx;
import com.intellij.openapi.roots.impl.ProjectRootManagerImpl;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.EmptyRunnable;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.encoding.EncodingManager;
import com.intellij.openapi.vfs.encoding.EncodingManagerImpl;
import com.intellij.openapi.vfs.impl.VirtualFilePointerTracker;
import com.intellij.openapi.vfs.newvfs.persistent.PersistentFS;
import com.intellij.openapi.vfs.newvfs.persistent.PersistentFSImpl;
import com.intellij.project.TestProjectManager;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiFileFactory;
import com.intellij.psi.PsiManager;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.psi.codeStyle.CustomCodeStyleSettings;
import com.intellij.psi.impl.PsiDocumentManagerImpl;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageManagerImpl;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.LocalTimeCounter;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.PathKt;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.IOException;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
public abstract class LightPlatformTestCase extends UsefulTestCase implements DataProvider {
private static Project ourProject;
private static Module ourModule;
private static PsiManager ourPsiManager;
private static boolean ourAssertionsInTestDetected;
private static VirtualFile ourSourceRoot;
private static LightProjectDescriptor ourProjectDescriptor;
private static SdkLeakTracker myOldSdks;
private ThreadTracker myThreadTracker;
static {
PlatformTestUtil.registerProjectCleanup(LightPlatformTestCase::closeAndDeleteProject);
}
private VirtualFilePointerTracker myVirtualFilePointerTracker;
private CodeStyleSettingsTracker myCodeStyleSettingsTracker;
private Disposable mySdkParentDisposable = Disposer.newDisposable("sdk for project in light tests");
/**
* @return Project to be used in tests for example for project components retrieval.
*/
@SuppressWarnings("MethodMayBeStatic")
protected Project getProject() {
return ourProject;
}
/**
* @return Module to be used in tests for example for module components retrieval.
*/
@SuppressWarnings("MethodMayBeStatic")
protected Module getModule() {
return ourModule;
}
/**
* Shortcut to PsiManager.getInstance(getProject())
*/
@NotNull
protected PsiManager getPsiManager() {
if (ourPsiManager == null) {
ourPsiManager = PsiManager.getInstance(getProject());
}
return ourPsiManager;
}
public static TestApplicationManager getApplication() {
return TestApplicationManager.getInstanceIfCreated();
}
@SuppressWarnings("UseOfSystemOutOrSystemErr")
public static void reportTestExecutionStatistics() {
System.out.println("----- TEST STATISTICS -----");
UsefulTestCase.logSetupTeardownCosts();
System.out.printf("##teamcity[buildStatisticValue key='ideaTests.appInstancesCreated' value='%d']%n",
MockApplication.INSTANCES_CREATED);
System.out.printf("##teamcity[buildStatisticValue key='ideaTests.projectInstancesCreated' value='%d']%n",
TestProjectManager.Companion.getTotalCreatedProjectCount());
long totalGcTime = 0;
for (GarbageCollectorMXBean mxBean : ManagementFactory.getGarbageCollectorMXBeans()) {
totalGcTime += mxBean.getCollectionTime();
}
System.out.printf("##teamcity[buildStatisticValue key='ideaTests.gcTimeMs' value='%d']%n", totalGcTime);
System.out.printf("##teamcity[buildStatisticValue key='ideaTests.classesLoaded' value='%d']%n",
ManagementFactory.getClassLoadingMXBean().getTotalLoadedClassCount());
}
protected void resetAllFields() {
resetClassFields(getClass());
}
private void resetClassFields(@NotNull Class<?> aClass) {
try {
UsefulTestCase.clearDeclaredFields(this, aClass);
}
catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
if (aClass == LightPlatformTestCase.class) return;
resetClassFields(aClass.getSuperclass());
}
private static void cleanPersistedVFSContent() {
((PersistentFSImpl)PersistentFS.getInstance()).cleanPersistedContents();
}
private static void initProject(@NotNull LightProjectDescriptor descriptor) {
ourProjectDescriptor = descriptor;
if (ourProject != null) {
closeAndDeleteProject();
}
ApplicationManager.getApplication().runWriteAction(LightPlatformTestCase::cleanPersistedVFSContent);
Path tempDirectory = TemporaryDirectory.generateTemporaryPath(ProjectImpl.LIGHT_PROJECT_NAME + ProjectFileType.DOT_DEFAULT_EXTENSION);
ourProject = Objects.requireNonNull(ProjectManagerEx.getInstanceEx().newProject(tempDirectory, new OpenProjectTaskBuilder().build()));
HeavyPlatformTestCase.synchronizeTempDirVfs(tempDirectory);
ourPsiManager = null;
try {
ourProjectDescriptor.setUpProject(ourProject, new LightProjectDescriptor.SetupHandler() {
@Override
public void moduleCreated(@NotNull Module module) {
//noinspection AssignmentToStaticFieldFromInstanceMethod
ourModule = module;
}
@Override
public void sourceRootCreated(@NotNull VirtualFile sourceRoot) {
//noinspection AssignmentToStaticFieldFromInstanceMethod
ourSourceRoot = sourceRoot;
}
});
}
catch (Throwable e) {
try {
closeAndDeleteProject();
}
catch (Throwable suppressed) {
e.addSuppressed(suppressed);
}
throw new RuntimeException(e);
}
}
/**
* @return The only source root
*/
public static VirtualFile getSourceRoot() {
return ourSourceRoot;
}
@Override
protected void setUp() throws Exception {
if (isPerformanceTest()) {
Timings.getStatistics();
}
TestApplicationManager testAppManager = TestApplicationManager.getInstance();
EdtTestUtil.runInEdtAndWait(() -> {
super.setUp();
testAppManager.setDataProvider(this);
LightProjectDescriptor descriptor = getProjectDescriptor();
doSetup(descriptor, configureLocalInspectionTools(), getTestRootDisposable(), mySdkParentDisposable);
InjectedLanguageManagerImpl.pushInjectors(getProject());
myCodeStyleSettingsTracker = new CodeStyleSettingsTracker(
() -> isStressTest() ||
ApplicationManager.getApplication() == null ||
ApplicationManager.getApplication() instanceof MockApplication ? null : CodeStyle.getDefaultSettings());
myThreadTracker = new ThreadTracker();
ModuleRootManager.getInstance(ourModule).orderEntries().getAllLibrariesAndSdkClassesRoots();
myVirtualFilePointerTracker = new VirtualFilePointerTracker();
});
}
@NotNull
protected LightProjectDescriptor getProjectDescriptor() {
return new SimpleLightProjectDescriptor(getModuleTypeId(), getProjectJDK());
}
public static @NotNull Pair.NonNull<Project, Module> doSetup(@NotNull LightProjectDescriptor descriptor,
LocalInspectionTool @NotNull [] localInspectionTools,
@NotNull Disposable parentDisposable, @NotNull Disposable sdkParentDisposable) {
Application app = ApplicationManager.getApplication();
Ref<Boolean> reusedProject = new Ref<>(true);
app.invokeAndWait(() -> {
IdeaLogger.ourErrorsOccurred = null;
app.assertIsDispatchThread();
myOldSdks = new SdkLeakTracker();
descriptor.registerSdk(sdkParentDisposable);
if (ourProject == null || ourProjectDescriptor == null || !ourProjectDescriptor.equals(descriptor)) {
initProject(descriptor);
reusedProject.set(false);
}
});
Project project = ourProject;
try {
PlatformTestUtil.openProject(project);
}
catch (Throwable e) {
setProject(null);
throw e;
}
Ref<Pair.NonNull<Project, Module>> result = new Ref<>();
app.invokeAndWait(() -> {
if (reusedProject.get()) {
// clear all caches, reindex
WriteAction.run(() -> ProjectRootManagerEx.getInstanceEx(project).makeRootsChange(EmptyRunnable.getInstance(), false, true));
}
MessageBusConnection connection = project.getMessageBus().connect(parentDisposable);
connection.subscribe(ProjectTopics.MODULES, new ModuleListener() {
@Override
public void moduleAdded(@NotNull Project project, @NotNull Module module) {
fail("Adding modules is not permitted in light tests.");
}
});
clearUncommittedDocuments(project);
InspectionsKt.configureInspections(localInspectionTools, project, parentDisposable);
assertFalse(PsiManager.getInstance(project).isDisposed());
Boolean passed = null;
try {
passed = StartupManagerEx.getInstanceEx(project).startupActivityPassed();
}
catch (Exception ignored) {
}
assertTrue("open: " + project.isOpen() +
"; disposed:" + project.isDisposed() +
"; startup passed:" + passed +
"; all open projects: " + Arrays.asList(ProjectManager.getInstance().getOpenProjects()), project.isInitialized());
CodeStyle.setTemporarySettings(project, CodeStyle.createTestSettings());
FileDocumentManager manager = FileDocumentManager.getInstance();
if (manager instanceof FileDocumentManagerImpl) {
Document[] unsavedDocuments = manager.getUnsavedDocuments();
manager.saveAllDocuments();
app.runWriteAction(((FileDocumentManagerImpl)manager)::dropAllUnsavedDocuments);
assertEmpty("There are unsaved documents", Arrays.asList(unsavedDocuments));
}
ActionUtil.performActionDumbAwareWithCallbacks(
new EmptyAction(true), AnActionEvent.createFromDataContext("", null, DataContext.EMPTY_CONTEXT));
// startup activities
PlatformTestUtil.dispatchAllInvocationEventsInIdeEventQueue();
((FileTypeManagerImpl)FileTypeManager.getInstance()).drainReDetectQueue();
result.set(Pair.createNonNull(project, ourModule));
});
return result.get();
}
protected void enableInspectionTools(InspectionProfileEntry @NotNull ... tools) {
InspectionsKt.enableInspectionTools(getProject(), getTestRootDisposable(), tools);
}
protected void enableInspectionTool(@NotNull InspectionToolWrapper<?,?> toolWrapper) {
InspectionsKt.enableInspectionTool(getProject(), toolWrapper, getTestRootDisposable());
}
protected void enableInspectionTool(@NotNull InspectionProfileEntry tool) {
InspectionsKt.enableInspectionTool(getProject(), tool, getTestRootDisposable());
}
protected LocalInspectionTool @NotNull [] configureLocalInspectionTools() {
return LocalInspectionTool.EMPTY_ARRAY;
}
@Override
protected void tearDown() throws Exception {
Project project = getProject();
// don't use method references here to make stack trace reading easier
//noinspection Convert2MethodRef
RunAll.runAll(
() -> {
if (ApplicationManager.getApplication() != null) {
CodeStyle.dropTemporarySettings(project);
}
},
() -> {
if (myCodeStyleSettingsTracker != null) {
myCodeStyleSettingsTracker.checkForSettingsDamage();
}
},
() -> {
if (project != null) {
TestApplicationManagerKt.tearDownProjectAndApp(project);
}
},
() -> {
if (project != null) {
// needed for myVirtualFilePointerTracker check below
((ProjectRootManagerImpl)ProjectRootManager.getInstance(project)).clearScopesCachesForModules();
}
},
() -> checkEditorsReleased(),
() -> super.tearDown(),
() -> Disposer.dispose(mySdkParentDisposable),
() -> myOldSdks.checkForJdkTableLeaks(),
() -> {
if (myThreadTracker != null) {
myThreadTracker.checkLeak();
}
},
() -> {
if (project != null) {
InjectedLanguageManagerImpl.checkInjectorsAreDisposed(project);
}
},
() -> {
if (myVirtualFilePointerTracker != null) {
myVirtualFilePointerTracker.assertPointersAreDisposed();
}
},
() -> {
if (ApplicationManager.getApplication() instanceof ApplicationEx) {
HeavyPlatformTestCase.cleanupApplicationCaches(getProject());
}
},
() -> {
resetAllFields();
}
);
}
static void checkAssertions() throws Exception {
if (!ourAssertionsInTestDetected) {
if (IdeaLogger.ourErrorsOccurred != null) {
throw IdeaLogger.ourErrorsOccurred;
}
}
}
static void tearDownSourceRoot(@NotNull Project project) {
WriteCommandAction.runWriteCommandAction(project, () -> {
if (ourSourceRoot != null) {
try {
for (VirtualFile child : ourSourceRoot.getChildren()) {
child.delete(LightPlatformTestCase.class);
}
}
catch (IOException e) {
//noinspection CallToPrintStackTrace
e.printStackTrace();
}
}
});
}
public static void clearEncodingManagerDocumentQueue() {
EncodingManager encodingManager = ApplicationManager.getApplication().getServiceIfCreated(EncodingManager.class);
if (encodingManager instanceof EncodingManagerImpl) {
((EncodingManagerImpl)encodingManager).clearDocumentQueue();
}
}
public static void clearUncommittedDocuments(@NotNull Project project) {
PsiDocumentManagerImpl documentManager = (PsiDocumentManagerImpl)PsiDocumentManager.getInstance(project);
documentManager.clearUncommittedDocuments();
ProjectManagerEx projectManager = ProjectManagerEx.getInstanceEx();
if (projectManager.isDefaultProjectInitialized()) {
Project defaultProject = projectManager.getDefaultProject();
((PsiDocumentManagerImpl)PsiDocumentManager.getInstance(defaultProject)).clearUncommittedDocuments();
}
}
public static void checkEditorsReleased() {
// don't use method references here to make stack trace reading easier
//noinspection Convert2MethodRef
RunAll.runAll(
() -> UIUtil.dispatchAllInvocationEvents(),
() -> {
// getAllEditors() should be called only after dispatchAllInvocationEvents(), that's why separate RunAll is used
Application app = ApplicationManager.getApplication();
if (app != null) {
EditorFactory editorFactory = app.getServiceIfCreated(EditorFactory.class);
if (editorFactory != null) {
List<ThrowableRunnable<?>> actions = new ArrayList<>();
for (Editor editor : editorFactory.getAllEditors()) {
actions.add(() -> EditorFactoryImpl.throwNotReleasedError(editor));
actions.add(() -> editorFactory.releaseEditor(editor));
}
new RunAll(actions).run();
}
}
});
}
@Override
protected void runBare(@NotNull ThrowableRunnable<Throwable> testRunnable) throws Throwable {
super.runBare(testRunnable);
// just to make sure all deferred Runnables to finish
SwingUtilities.invokeAndWait(EmptyRunnable.getInstance());
if (IdeaLogger.ourErrorsOccurred != null) {
throw IdeaLogger.ourErrorsOccurred;
}
}
@SuppressWarnings("AssignmentToStaticFieldFromInstanceMethod")
@Override
protected void runTestRunnable(@NotNull ThrowableRunnable<Throwable> testRunnable) throws Throwable {
ourAssertionsInTestDetected = true;
super.runTestRunnable(testRunnable);
ourAssertionsInTestDetected = false;
}
@Override
protected void invokeTearDown() throws Exception {
EdtTestUtil.runInEdtAndWait(super::invokeTearDown);
}
@Override
public Object getData(@NotNull String dataId) {
return getProject() == null || getProject().isDisposed() ? null : new TestDataProvider(getProject()).getData(dataId);
}
protected Sdk getProjectJDK() {
return null;
}
@NotNull
protected String getModuleTypeId() {
return EmptyModuleType.EMPTY_MODULE;
}
/**
* Creates dummy source file. One is not placed under source root so some PSI functions like resolve to external classes
* may not work. Though it works significantly faster and yet can be used if you need to create some PSI structures for
* test purposes
*
* @param fileName - name of the file to create. Extension is used to choose what PSI should be created like java, jsp, aj, xml etc.
* @param text - file text.
* @return dummy psi file.
*
*/
@NotNull
protected PsiFile createFile(@NonNls @NotNull String fileName, @NonNls @NotNull String text) throws IncorrectOperationException {
FileType fileType = FileTypeManager.getInstance().getFileTypeByFileName(fileName);
return PsiFileFactory.getInstance(getProject())
.createFileFromText(fileName, fileType, text, LocalTimeCounter.currentTime(), true, false);
}
@NotNull
protected PsiFile createLightFile(@NonNls @NotNull String fileName, @NotNull String text) throws IncorrectOperationException {
FileType fileType = FileTypeManager.getInstance().getFileTypeByFileName(fileName);
return PsiFileFactory.getInstance(getProject())
.createFileFromText(fileName, fileType, text, LocalTimeCounter.currentTime(), false, false);
}
/**
* Convenient conversion of testSomeTest -> someTest | SomeTest where testSomeTest is the name of current test.
*
* @param lowercaseFirstLetter - whether first letter after test should be lowercased.
*/
@NotNull
@Override
protected String getTestName(boolean lowercaseFirstLetter) {
String name = getName();
name = StringUtil.trimStart(name, "test");
if (!name.isEmpty() && lowercaseFirstLetter && !PlatformTestUtil.isAllUppercaseName(name)) {
name = Character.toLowerCase(name.charAt(0)) + name.substring(1);
}
return name;
}
@NotNull
protected CodeStyleSettings getCurrentCodeStyleSettings() {
return CodeStyle.getSettings(getProject());
}
@NotNull
protected CommonCodeStyleSettings getLanguageSettings(@NotNull Language language) {
return getCurrentCodeStyleSettings().getCommonSettings(language);
}
@NotNull
protected <T extends CustomCodeStyleSettings> T getCustomSettings(@NotNull Class<T> settingsClass) {
return getCurrentCodeStyleSettings().getCustomSettings(settingsClass);
}
protected void commitDocument(@NotNull Document document) {
PsiDocumentManager.getInstance(getProject()).commitDocument(document);
}
protected void commitAllDocuments() {
PsiDocumentManager.getInstance(getProject()).commitAllDocuments();
}
protected Document getDocument(@NotNull PsiFile file) {
return PsiDocumentManager.getInstance(getProject()).getDocument(file);
}
@SuppressWarnings("NonPrivateFieldAccessedInSynchronizedContext")
public static synchronized void closeAndDeleteProject() {
Project project = ourProject;
if (project == null) {
return;
}
if (ApplicationManager.getApplication().isWriteAccessAllowed()) {
throw new IllegalStateException("Must not call closeAndDeleteProject from under write action");
}
if (!project.isDisposed()) {
assertEquals(project, ourModule.getProject());
@SuppressWarnings("ConstantConditions")
Path ioFile = Paths.get(project.getProjectFilePath());
if (Files.exists(ioFile)) {
Path dir = ioFile.getParent();
if (dir.getFileName().toString().startsWith(UsefulTestCase.TEMP_DIR_MARKER)) {
PathKt.delete(dir);
}
else {
PathKt.delete(ioFile);
}
}
}
try {
assertTrue(ProjectManagerEx.getInstanceEx().forceCloseProject(project));
assertTrue(project.isDisposed());
assertTrue(ourModule.isDisposed());
if (ourPsiManager != null) {
assertTrue(ourPsiManager.isDisposed());
}
} finally {
setProject(null);
ourModule = null;
ourPsiManager = null;
}
}
protected static void setProject(Project project) {
ourProject = project;
}
private static class SimpleLightProjectDescriptor extends LightProjectDescriptor {
@NotNull private final String myModuleTypeId;
@Nullable private final Sdk mySdk;
SimpleLightProjectDescriptor(@NotNull String moduleTypeId, @Nullable Sdk sdk) {
myModuleTypeId = moduleTypeId;
mySdk = sdk;
}
@NotNull
@Override
public String getModuleTypeId() {
return myModuleTypeId;
}
@Nullable
@Override
public Sdk getSdk() {
return mySdk;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SimpleLightProjectDescriptor that = (SimpleLightProjectDescriptor)o;
if (!myModuleTypeId.equals(that.myModuleTypeId)) return false;
return areJdksEqual(that.getSdk());
}
@Override
public int hashCode() {
return myModuleTypeId.hashCode();
}
private boolean areJdksEqual(final Sdk newSdk) {
if (mySdk == null || newSdk == null) return mySdk == newSdk;
if (!mySdk.getName().equals(newSdk.getName())) return false;
OrderRootType[] rootTypes = {OrderRootType.CLASSES, AnnotationOrderRootType.getInstance()};
for (OrderRootType rootType : rootTypes) {
final String[] myUrls = mySdk.getRootProvider().getUrls(rootType);
final String[] newUrls = newSdk.getRootProvider().getUrls(rootType);
if (!ContainerUtil.newHashSet(myUrls).equals(ContainerUtil.newHashSet(newUrls))) return false;
}
return true;
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.testing;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Closer;
import com.google.inject.Module;
import io.airlift.discovery.server.testing.TestingDiscoveryServer;
import io.airlift.log.Logger;
import io.airlift.testing.Assertions;
import io.airlift.units.Duration;
import io.prestosql.Session;
import io.prestosql.Session.SessionBuilder;
import io.prestosql.connector.CatalogName;
import io.prestosql.cost.StatsCalculator;
import io.prestosql.execution.QueryManager;
import io.prestosql.execution.warnings.WarningCollector;
import io.prestosql.metadata.AllNodes;
import io.prestosql.metadata.Catalog;
import io.prestosql.metadata.InternalNode;
import io.prestosql.metadata.Metadata;
import io.prestosql.metadata.QualifiedObjectName;
import io.prestosql.metadata.SessionPropertyManager;
import io.prestosql.metadata.SqlFunction;
import io.prestosql.plugin.base.security.AllowAllSystemAccessControl;
import io.prestosql.server.BasicQueryInfo;
import io.prestosql.server.testing.TestingPrestoServer;
import io.prestosql.spi.Plugin;
import io.prestosql.spi.QueryId;
import io.prestosql.split.PageSourceManager;
import io.prestosql.split.SplitManager;
import io.prestosql.sql.planner.NodePartitioningManager;
import io.prestosql.sql.planner.Plan;
import io.prestosql.transaction.TransactionManager;
import org.intellij.lang.annotations.Language;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.URI;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
import static com.google.common.base.Throwables.throwIfUnchecked;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.inject.util.Modules.EMPTY_MODULE;
import static io.airlift.units.Duration.nanosSince;
import static io.prestosql.testing.AbstractTestQueries.TEST_CATALOG_PROPERTIES;
import static io.prestosql.testing.AbstractTestQueries.TEST_SYSTEM_PROPERTIES;
import static io.prestosql.testing.TestingSession.TESTING_CATALOG;
import static io.prestosql.testing.TestingSession.createBogusTestingCatalog;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
public class DistributedQueryRunner
implements QueryRunner
{
private static final Logger log = Logger.get(DistributedQueryRunner.class);
private static final String ENVIRONMENT = "testing";
private final TestingDiscoveryServer discoveryServer;
private final TestingPrestoServer coordinator;
private List<TestingPrestoServer> servers;
private final Closer closer = Closer.create();
private final TestingPrestoClient prestoClient;
private final ReadWriteLock lock = new ReentrantReadWriteLock();
public static Builder builder(Session defaultSession)
{
return new Builder(defaultSession);
}
private DistributedQueryRunner(
Session defaultSession,
int nodeCount,
Map<String, String> extraProperties,
Map<String, String> coordinatorProperties,
String environment,
Module additionalModule,
Optional<Path> baseDataDir,
String systemAccessControlName,
Map<String, String> systemAccessControlProperties)
throws Exception
{
requireNonNull(defaultSession, "defaultSession is null");
try {
long start = System.nanoTime();
discoveryServer = new TestingDiscoveryServer(environment);
closer.register(() -> closeUnchecked(discoveryServer));
log.info("Created TestingDiscoveryServer in %s", nanosSince(start).convertToMostSuccinctTimeUnit());
ImmutableList.Builder<TestingPrestoServer> servers = ImmutableList.builder();
for (int i = 1; i < nodeCount; i++) {
TestingPrestoServer worker = closer.register(createTestingPrestoServer(
discoveryServer.getBaseUrl(),
false,
extraProperties,
environment,
additionalModule,
baseDataDir,
systemAccessControlName,
systemAccessControlProperties));
servers.add(worker);
}
Map<String, String> extraCoordinatorProperties = new HashMap<>();
extraCoordinatorProperties.putAll(extraProperties);
extraCoordinatorProperties.putAll(coordinatorProperties);
coordinator = closer.register(createTestingPrestoServer(
discoveryServer.getBaseUrl(),
true,
extraCoordinatorProperties,
environment,
additionalModule,
baseDataDir,
systemAccessControlName,
systemAccessControlProperties));
servers.add(coordinator);
this.servers = servers.build();
}
catch (Exception e) {
try {
throw closer.rethrow(e, Exception.class);
}
finally {
closer.close();
}
}
// copy session using property manager in coordinator
defaultSession = defaultSession.toSessionRepresentation().toSession(coordinator.getMetadata().getSessionPropertyManager(), defaultSession.getIdentity().getExtraCredentials());
this.prestoClient = closer.register(new TestingPrestoClient(coordinator, defaultSession));
waitForAllNodesGloballyVisible();
long start = System.nanoTime();
for (TestingPrestoServer server : servers) {
server.getMetadata().addFunctions(AbstractTestQueries.CUSTOM_FUNCTIONS);
}
log.info("Added functions in %s", nanosSince(start).convertToMostSuccinctTimeUnit());
for (TestingPrestoServer server : servers) {
// add bogus catalog for testing procedures and session properties
addTestingCatalog(server);
}
}
private static TestingPrestoServer createTestingPrestoServer(
URI discoveryUri,
boolean coordinator,
Map<String, String> extraProperties,
String environment,
Module additionalModule,
Optional<Path> baseDataDir,
String systemAccessControlName,
Map<String, String> systemAccessControlProperties)
{
long start = System.nanoTime();
ImmutableMap.Builder<String, String> propertiesBuilder = ImmutableMap.<String, String>builder()
.put("internal-communication.shared-secret", "test-secret")
.put("query.client.timeout", "10m")
.put("exchange.http-client.idle-timeout", "1h")
.put("task.max-index-memory", "16kB") // causes index joins to fault load
.put("distributed-index-joins-enabled", "true");
if (coordinator) {
propertiesBuilder.put("node-scheduler.include-coordinator", "true");
propertiesBuilder.put("join-distribution-type", "PARTITIONED");
}
HashMap<String, String> properties = new HashMap<>(propertiesBuilder.build());
properties.putAll(extraProperties);
TestingPrestoServer server = TestingPrestoServer.builder()
.setCoordinator(coordinator)
.setProperties(properties)
.setEnvironment(environment)
.setDiscoveryUri(discoveryUri)
.setAdditionalModule(additionalModule)
.setBaseDataDir(baseDataDir)
.setSystemAccessControl(systemAccessControlName, systemAccessControlProperties)
.build();
String nodeRole = coordinator ? "coordinator" : "worker";
log.info("Created %s TestingPrestoServer in %s: %s", nodeRole, nanosSince(start).convertToMostSuccinctTimeUnit(), server.getBaseUrl());
return server;
}
public void addServers(int nodeCount)
throws Exception
{
ImmutableList.Builder<TestingPrestoServer> serverBuilder = new ImmutableList.Builder<TestingPrestoServer>()
.addAll(servers);
for (int i = 0; i < nodeCount; i++) {
TestingPrestoServer server = closer.register(createTestingPrestoServer(
discoveryServer.getBaseUrl(),
false,
ImmutableMap.of(),
ENVIRONMENT,
EMPTY_MODULE,
Optional.empty(),
AllowAllSystemAccessControl.NAME,
ImmutableMap.of()));
serverBuilder.add(server);
// add functions
server.getMetadata().addFunctions(AbstractTestQueries.CUSTOM_FUNCTIONS);
addTestingCatalog(server);
}
servers = serverBuilder.build();
waitForAllNodesGloballyVisible();
}
private void waitForAllNodesGloballyVisible()
throws InterruptedException
{
long start = System.nanoTime();
while (!allNodesGloballyVisible()) {
Assertions.assertLessThan(nanosSince(start), new Duration(10, SECONDS));
MILLISECONDS.sleep(10);
}
log.info("Announced servers in %s", nanosSince(start).convertToMostSuccinctTimeUnit());
}
private void addTestingCatalog(TestingPrestoServer server)
{
// add bogus catalog for testing procedures and session properties
Catalog bogusTestingCatalog = createBogusTestingCatalog(TESTING_CATALOG);
server.getCatalogManager().registerCatalog(bogusTestingCatalog);
SessionPropertyManager sessionPropertyManager = server.getMetadata().getSessionPropertyManager();
sessionPropertyManager.addSystemSessionProperties(TEST_SYSTEM_PROPERTIES);
sessionPropertyManager.addConnectorSessionProperties(bogusTestingCatalog.getConnectorCatalogName(), TEST_CATALOG_PROPERTIES);
}
private boolean allNodesGloballyVisible()
{
for (TestingPrestoServer server : servers) {
AllNodes allNodes = server.refreshNodes();
if (!allNodes.getInactiveNodes().isEmpty() ||
(allNodes.getActiveNodes().size() != servers.size())) {
return false;
}
}
return true;
}
public TestingPrestoClient getClient()
{
return prestoClient;
}
@Override
public int getNodeCount()
{
return servers.size();
}
@Override
public Session getDefaultSession()
{
return prestoClient.getDefaultSession();
}
@Override
public TransactionManager getTransactionManager()
{
return coordinator.getTransactionManager();
}
@Override
public Metadata getMetadata()
{
return coordinator.getMetadata();
}
@Override
public SplitManager getSplitManager()
{
return coordinator.getSplitManager();
}
@Override
public PageSourceManager getPageSourceManager()
{
return coordinator.getPageSourceManager();
}
@Override
public NodePartitioningManager getNodePartitioningManager()
{
return coordinator.getNodePartitioningManager();
}
@Override
public StatsCalculator getStatsCalculator()
{
return coordinator.getStatsCalculator();
}
@Override
public TestingAccessControlManager getAccessControl()
{
return coordinator.getAccessControl();
}
public TestingPrestoServer getCoordinator()
{
return coordinator;
}
public List<TestingPrestoServer> getServers()
{
return ImmutableList.copyOf(servers);
}
@Override
public void installPlugin(Plugin plugin)
{
long start = System.nanoTime();
for (TestingPrestoServer server : servers) {
server.installPlugin(plugin);
}
log.info("Installed plugin %s in %s", plugin.getClass().getSimpleName(), nanosSince(start).convertToMostSuccinctTimeUnit());
}
@Override
public void addFunctions(List<? extends SqlFunction> functions)
{
servers.forEach(server -> server.getMetadata().addFunctions(functions));
}
public void createCatalog(String catalogName, String connectorName)
{
createCatalog(catalogName, connectorName, ImmutableMap.of());
}
@Override
public void createCatalog(String catalogName, String connectorName, Map<String, String> properties)
{
long start = System.nanoTime();
Set<CatalogName> catalogNames = new HashSet<>();
for (TestingPrestoServer server : servers) {
catalogNames.add(server.createCatalog(catalogName, connectorName, properties));
}
CatalogName catalog = getOnlyElement(catalogNames);
log.info("Created catalog %s (%s) in %s", catalogName, catalog, nanosSince(start));
// wait for all nodes to announce the new catalog
start = System.nanoTime();
while (!isConnectionVisibleToAllNodes(catalog)) {
Assertions.assertLessThan(nanosSince(start), new Duration(100, SECONDS), "waiting for connector " + catalog + " to be initialized in every node");
try {
MILLISECONDS.sleep(10);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
}
log.info("Announced catalog %s (%s) in %s", catalogName, catalog, nanosSince(start));
}
private boolean isConnectionVisibleToAllNodes(CatalogName catalogName)
{
for (TestingPrestoServer server : servers) {
server.refreshNodes();
Set<InternalNode> activeNodesWithConnector = server.getActiveNodesWithConnector(catalogName);
if (activeNodesWithConnector.size() != servers.size()) {
return false;
}
}
return true;
}
@Override
public List<QualifiedObjectName> listTables(Session session, String catalog, String schema)
{
lock.readLock().lock();
try {
return prestoClient.listTables(session, catalog, schema);
}
finally {
lock.readLock().unlock();
}
}
@Override
public boolean tableExists(Session session, String table)
{
lock.readLock().lock();
try {
return prestoClient.tableExists(session, table);
}
finally {
lock.readLock().unlock();
}
}
@Override
public MaterializedResult execute(@Language("SQL") String sql)
{
lock.readLock().lock();
try {
return prestoClient.execute(sql).getResult();
}
finally {
lock.readLock().unlock();
}
}
@Override
public MaterializedResult execute(Session session, @Language("SQL") String sql)
{
lock.readLock().lock();
try {
return prestoClient.execute(session, sql).getResult();
}
finally {
lock.readLock().unlock();
}
}
public ResultWithQueryId<MaterializedResult> executeWithQueryId(Session session, @Language("SQL") String sql)
{
lock.readLock().lock();
try {
return prestoClient.execute(session, sql);
}
finally {
lock.readLock().unlock();
}
}
@Override
public MaterializedResultWithPlan executeWithPlan(Session session, String sql, WarningCollector warningCollector)
{
ResultWithQueryId<MaterializedResult> resultWithQueryId = executeWithQueryId(session, sql);
return new MaterializedResultWithPlan(resultWithQueryId.getResult().toTestTypes(), getQueryPlan(resultWithQueryId.getQueryId()));
}
@Override
public Plan createPlan(Session session, String sql, WarningCollector warningCollector)
{
QueryId queryId = executeWithQueryId(session, sql).getQueryId();
Plan queryPlan = getQueryPlan(queryId);
coordinator.getQueryManager().cancelQuery(queryId);
return queryPlan;
}
public Plan getQueryPlan(QueryId queryId)
{
return coordinator.getQueryPlan(queryId);
}
@Override
public Lock getExclusiveLock()
{
return lock.writeLock();
}
@Override
public final void close()
{
cancelAllQueries();
try {
closer.close();
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private void cancelAllQueries()
{
QueryManager queryManager = coordinator.getQueryManager();
for (BasicQueryInfo queryInfo : queryManager.getQueries()) {
if (!queryInfo.getState().isDone()) {
queryManager.cancelQuery(queryInfo.getQueryId());
}
}
}
private static void closeUnchecked(AutoCloseable closeable)
{
try {
closeable.close();
}
catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
public static class Builder
{
private Session defaultSession;
private int nodeCount = 3;
private Map<String, String> extraProperties = ImmutableMap.of();
private Map<String, String> coordinatorProperties = ImmutableMap.of();
private String environment = ENVIRONMENT;
private Module additionalModule = EMPTY_MODULE;
private Optional<Path> baseDataDir = Optional.empty();
private String systemAccessControlName = AllowAllSystemAccessControl.NAME;
private Map<String, String> systemAccessControlProperties = ImmutableMap.of();
protected Builder(Session defaultSession)
{
this.defaultSession = requireNonNull(defaultSession, "defaultSession is null");
}
public Builder amendSession(Function<SessionBuilder, SessionBuilder> amendSession)
{
SessionBuilder builder = Session.builder(defaultSession);
this.defaultSession = amendSession.apply(builder).build();
return this;
}
public Builder setNodeCount(int nodeCount)
{
this.nodeCount = nodeCount;
return this;
}
public Builder setExtraProperties(Map<String, String> extraProperties)
{
this.extraProperties = extraProperties;
return this;
}
/**
* Sets extra properties being equal to a map containing given key and value.
* Note, that calling this method OVERWRITES previously set property values.
* As a result, it should only be used when only one extra property needs to be set.
*/
public Builder setSingleExtraProperty(String key, String value)
{
return setExtraProperties(ImmutableMap.of(key, value));
}
public Builder setCoordinatorProperties(Map<String, String> coordinatorProperties)
{
this.coordinatorProperties = coordinatorProperties;
return this;
}
/**
* Sets coordinator properties being equal to a map containing given key and value.
* Note, that calling this method OVERWRITES previously set property values.
* As a result, it should only be used when only one coordinator property needs to be set.
*/
public Builder setSingleCoordinatorProperty(String key, String value)
{
return setCoordinatorProperties(ImmutableMap.of(key, value));
}
public Builder setEnvironment(String environment)
{
this.environment = environment;
return this;
}
public Builder setAdditionalModule(Module additionalModule)
{
this.additionalModule = requireNonNull(additionalModule, "additionalModules is null");
return this;
}
public Builder setBaseDataDir(Optional<Path> baseDataDir)
{
this.baseDataDir = requireNonNull(baseDataDir, "baseDataDir is null");
return this;
}
@SuppressWarnings("unused")
public Builder setSystemAccessControl(String name, Map<String, String> properties)
{
this.systemAccessControlName = requireNonNull(name, "name is null");
this.systemAccessControlProperties = ImmutableMap.copyOf(requireNonNull(properties, "properties is null"));
return this;
}
public DistributedQueryRunner build()
throws Exception
{
return new DistributedQueryRunner(
defaultSession,
nodeCount,
extraProperties,
coordinatorProperties,
environment,
additionalModule,
baseDataDir,
systemAccessControlName,
systemAccessControlProperties);
}
}
}
| |
package com.potlatch.client;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Build;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceActivity;
import android.preference.PreferenceCategory;
import android.preference.PreferenceFragment;
import android.preference.PreferenceManager;
import java.util.List;
import com.potlatch.client.R;
/**
* A {@link PreferenceActivity} that presents a set of application settings. On
* handset devices, settings are presented as a single list. On tablets,
* settings are split by category, with category headers shown to the left of
* the list of settings.
* <p>
* See <a href="http://developer.android.com/design/patterns/settings.html">
* Android Design: Settings</a> for design guidelines and the <a
* href="http://developer.android.com/guide/topics/ui/settings.html">Settings
* API Guide</a> for more information on developing a Settings UI.
*/
public class SettingsActivity extends PreferenceActivity {
/**
* Determines whether to always show the simplified settings UI, where
* settings are presented in a single list. When false, settings are shown
* as a master/detail two-pane view on tablets. When true, a single pane is
* shown on tablets.
*/
private static final boolean ALWAYS_SIMPLE_PREFS = false;
// Preference Keys
public static final String KEY_PREF_SYNC_CONN = "sync_frequency";
public static final String KEY_PREF_DISPLAY_NAME = "display_name";
public static final String KEY_PREF_HIDE_FLAGGED = "hide_flagged";
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
setupSimplePreferencesScreen();
}
/**
* Shows the simplified settings UI if the device configuration if the
* device configuration dictates that a simplified, single-pane UI should be
* shown.
*/
private void setupSimplePreferencesScreen() {
if (!isSimplePreferences(this)) {
return;
}
// In the simplified UI, fragments are not used at all and we instead
// use the older PreferenceActivity APIs.
// Add 'general' preferences.
addPreferencesFromResource(R.xml.pref_general);
// Add 'notifications' preferences, and a corresponding header.
PreferenceCategory fakeHeader = new PreferenceCategory(this);
/*
* fakeHeader.setTitle(R.string.pref_header_notifications);
* getPreferenceScreen().addPreference(fakeHeader);
* addPreferencesFromResource(R.xml.pref_notification);
*/
// Add 'data and sync' preferences, and a corresponding header.
fakeHeader = new PreferenceCategory(this);
fakeHeader.setTitle(R.string.pref_header_data_sync);
getPreferenceScreen().addPreference(fakeHeader);
addPreferencesFromResource(R.xml.pref_data_sync);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences to
// their values. When their values change, their summaries are updated
// to reflect the new value, per the Android Design guidelines.
bindPreferenceSummaryToValue(findPreference("display_name"));
// bindPreferenceSummaryToValue(findPreference("example_list"));
// bindPreferenceSummaryToValue(findPreference("notifications_new_message_ringtone"));
bindPreferenceSummaryToValue(findPreference("sync_frequency"));
}
/** {@inheritDoc} */
@Override
public boolean onIsMultiPane() {
return isXLargeTablet(this) && !isSimplePreferences(this);
}
/**
* Helper method to determine if the device has an extra-large screen. For
* example, 10" tablets are extra-large.
*/
private static boolean isXLargeTablet(Context context) {
return (context.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_XLARGE;
}
/**
* Determines whether the simplified settings UI should be shown. This is
* true if this is forced via {@link #ALWAYS_SIMPLE_PREFS}, or the device
* doesn't have newer APIs like {@link PreferenceFragment}, or the device
* doesn't have an extra-large screen. In these cases, a single-pane
* "simplified" settings UI should be shown.
*/
private static boolean isSimplePreferences(Context context) {
return ALWAYS_SIMPLE_PREFS
|| Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB
|| !isXLargeTablet(context);
}
/** {@inheritDoc} */
@Override
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public void onBuildHeaders(List<Header> target) {
if (!isSimplePreferences(this)) {
loadHeadersFromResource(R.xml.pref_headers, target);
}
}
/**
* A preference value change listener that updates the preference's summary
* to reflect its new value.
*/
private static Preference.OnPreferenceChangeListener sBindPreferenceSummaryToValueListener = new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object value) {
String stringValue = value.toString();
if (preference instanceof ListPreference) {
// For list preferences, look up the correct display value in
// the preference's 'entries' list.
ListPreference listPreference = (ListPreference) preference;
int index = listPreference.findIndexOfValue(stringValue);
// Set the summary to reflect the new value.
preference
.setSummary(index >= 0 ? listPreference.getEntries()[index]
: null);
} else {
// For all other preferences, set the summary to the value's
// simple string representation.
preference.setSummary(stringValue);
}
return true;
}
};
/**
* Binds a preference's summary to its value. More specifically, when the
* preference's value is changed, its summary (line of text below the
* preference title) is updated to reflect the value. The summary is also
* immediately updated upon calling this method. The exact display format is
* dependent on the type of preference.
*
* @see #sBindPreferenceSummaryToValueListener
*/
private static void bindPreferenceSummaryToValue(Preference preference) {
// Set the listener to watch for value changes.
preference
.setOnPreferenceChangeListener(sBindPreferenceSummaryToValueListener);
// Trigger the listener immediately with the preference's
// current value.
sBindPreferenceSummaryToValueListener.onPreferenceChange(
preference,
PreferenceManager.getDefaultSharedPreferences(
preference.getContext()).getString(preference.getKey(),
""));
}
/**
* This fragment shows general preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static class GeneralPreferenceFragment extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.pref_general);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences
// to their values. When their values change, their summaries are
// updated to reflect the new value, per the Android Design
// guidelines.
bindPreferenceSummaryToValue(findPreference("example_text"));
bindPreferenceSummaryToValue(findPreference("example_list"));
bindPreferenceSummaryToValue(findPreference(SettingsActivity.KEY_PREF_HIDE_FLAGGED));
}
}
/**
* This fragment shows notification preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
/*
* @TargetApi(Build.VERSION_CODES.HONEYCOMB) public static class
* NotificationPreferenceFragment extends PreferenceFragment {
*
* @Override public void onCreate(Bundle savedInstanceState) {
* super.onCreate(savedInstanceState);
* addPreferencesFromResource(R.xml.pref_notification);
*
* // Bind the summaries of EditText/List/Dialog/Ringtone preferences // to
* their values. When their values change, their summaries are // updated to
* reflect the new value, per the Android Design // guidelines.
* bindPreferenceSummaryToValue
* (findPreference("notifications_new_message_ringtone")); } }
*/
/**
* This fragment shows data and sync preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static class DataSyncPreferenceFragment extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.pref_data_sync);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences
// to their values. When their values change, their summaries are
// updated to reflect the new value, per the Android Design
// guidelines.
bindPreferenceSummaryToValue(findPreference("sync_frequency"));
}
}
}
| |
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.gui.main.contactlist;
import java.util.*;
import java.util.regex.*;
import net.java.sip.communicator.impl.gui.*;
import net.java.sip.communicator.impl.gui.main.contactlist.contactsource.*;
import net.java.sip.communicator.service.contactsource.*;
import net.java.sip.communicator.service.gui.*;
import net.java.sip.communicator.service.gui.event.*;
/**
* The <tt>SearchFilter</tt> is a <tt>ContactListFilter</tt> that filters the
* contact list content by a filter string.
*
* @author Yana Stamcheva
*/
public class SearchFilter
implements ContactListSearchFilter
{
/**
* The string, which we're searching.
*/
protected String filterString;
/**
* The pattern to filter.
*/
protected Pattern filterPattern;
/**
* The <tt>MetaContactListSource</tt> to search in.
*/
private final MetaContactListSource mclSource;
/**
* The source contact list.
*/
protected ContactList sourceContactList;
/**
* The name of the property indicating if searches in call history are
* enabled.
*/
protected String DISABLE_CALL_HISTORY_SEARCH_PROP
= "net.java.sip.communicator.impl.gui"
+ ".DISABLE_CALL_HISTORY_SEARCH_IN_CONTACT_LIST";
/**
* If set, we are searching a phone number and will use the phone number
* service to try matching the numbers.
*/
private boolean isSearchingPhoneNumber = false;
/**
* Creates an instance of <tt>SearchFilter</tt>.
*/
public SearchFilter(MetaContactListSource contactListSource)
{
this.mclSource = contactListSource;
}
/**
* Creates an instance of <tt>SearchFilter</tt>.
*/
public SearchFilter(ContactList sourceContactList)
{
this.mclSource = null;
this.sourceContactList = sourceContactList;
}
/**
* Applies this filter to the default contact source.
* @param filterQuery the query that tracks this filter.
*/
public void applyFilter(FilterQuery filterQuery)
{
if (sourceContactList == null)
sourceContactList = GuiActivator.getContactList();
Iterator<UIContactSource> filterSources
= sourceContactList.getContactSources().iterator();
if (sourceContactList.getDefaultFilter()
.equals(TreeContactList.presenceFilter))
{
final MetaContactQuery defaultQuery = new MetaContactQuery();
defaultQuery.addContactQueryListener(sourceContactList);
// First add the MetaContactListSource
filterQuery.addContactQuery(defaultQuery);
mclSource.startQuery(defaultQuery, filterPattern);
}
else if (sourceContactList.getDefaultFilter()
.equals(TreeContactList.historyFilter))
{
filterSources = sourceContactList.getContactSources(
ContactSourceService.HISTORY_TYPE).iterator();
}
// If we have stopped filtering in the mean time we return here.
if (filterQuery.isCanceled())
return;
if(sourceContactList instanceof TreeContactList)
{
((TreeContactList) sourceContactList).setAutoSectionAllowed(true);
}
// Then we apply the filter on all its contact sources.
while (filterSources.hasNext())
{
final UIContactSource filterSource
= filterSources.next();
// Don't search in history sources if this is disabled from the
// corresponding configuration property.
if (sourceContactList.getDefaultFilter()
.equals(TreeContactList.presenceFilter)
&& GuiActivator.getConfigurationService().getBoolean(
DISABLE_CALL_HISTORY_SEARCH_PROP, false)
&& filterSource.getContactSourceService().getType()
== ContactSourceService.HISTORY_TYPE)
continue;
if (sourceContactList.getDefaultFilter()
.equals(TreeContactList.presenceFilter))
{
if(filterSource.getContactSourceService().getType()
== ContactSourceService.CONTACT_LIST_TYPE)
{
//We are setting the index from contactSourceOrder map. This
//index is set to reorder the sources in the contact list.
filterSource.setContactSourceIndex(
this.mclSource.getIndex() + 1);
}
}
// If we have stopped filtering in the mean time we return here.
if (filterQuery.isCanceled())
return;
applyFilter(filterSource, filterQuery);
}
// Closes this filter to indicate that we finished adding queries to it.
if (filterQuery.isRunning())
filterQuery.close();
}
/**
* Applies this filter to the given <tt>contactSource</tt>.
*
* @param contactSource the <tt>ExternalContactSource</tt> to apply the
* filter to
* @param filterQuery the filter query object.
* @return the <tt>ContactQuery</tt> that tracks this filter
*/
protected ContactQuery applyFilter(UIContactSource contactSource,
FilterQuery filterQuery)
{
ContactSourceService sourceService
= contactSource.getContactSourceService();
ContactQuery contactQuery;
if (sourceService instanceof ExtendedContactSourceService)
contactQuery
= ((ExtendedContactSourceService) sourceService)
.createContactQuery(filterPattern);
else
contactQuery = sourceService.createContactQuery(filterString);
if(contactQuery == null)
return null;
contactQuery.addContactQueryListener(sourceContactList);
if (contactQuery.getStatus() == ContactQuery.QUERY_IN_PROGRESS)
{
filterQuery.addContactQuery(contactQuery);
}
contactQuery.start();
return contactQuery;
}
/**
* Indicates if the given <tt>uiGroup</tt> matches this filter.
* @param uiContact the <tt>UIGroup</tt> to check
* @return <tt>true</tt> if the given <tt>uiGroup</tt> matches the current
* filter, <tt>false</tt> - otherwise
*/
public boolean isMatching(UIContact uiContact)
{
Iterator<String> searchStrings = uiContact.getSearchStrings();
if (searchStrings != null)
{
while (searchStrings.hasNext())
{
if (isMatching(searchStrings.next()))
return true;
}
}
return false;
}
/**
* For all groups we return false. If some of the child contacts of this
* group matches this filter the group would be automatically added when
* the contact is added in the list.
* @param uiGroup the <tt>UIGroup</tt> to check
* @return false
*/
public boolean isMatching(UIGroup uiGroup)
{
return false;
}
/**
* Creates the <tt>SearchFilter</tt> by specifying the string used for
* filtering.
* @param filter the String used for filtering
*/
public void setFilterString(String filter)
{
// First escape all special characters from the given filter string.
this.filterString = filter;
// Then create the pattern.
// By default, case-insensitive matching assumes that only characters
// in the US-ASCII charset are being matched, that's why we use
// the UNICODE_CASE flag to enable unicode case-insensitive matching.
// Sun Bug ID: 6486934 "RegEx case_insensitive match is broken"
this.filterPattern
= Pattern.compile(
Pattern.quote(filterString),
Pattern.MULTILINE
| Pattern.CASE_INSENSITIVE
| Pattern.UNICODE_CASE);
this.isSearchingPhoneNumber
= GuiActivator.getPhoneNumberI18nService().isPhoneNumber(filter);
}
/**
* Indicates if the given string matches this filter.
* @param text the text to check
* @return <tt>true</tt> to indicate that the given <tt>text</tt> matches
* this filter, <tt>false</tt> - otherwise
*/
private boolean isMatching(String text)
{
if (filterPattern != null)
return filterPattern.matcher(text).find();
if(isSearchingPhoneNumber && this.filterString != null)
return GuiActivator.getPhoneNumberI18nService()
.phoneNumbersMatch(this.filterString, text);
return true;
}
}
| |
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.searchlib.rankingexpression;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.yahoo.searchlib.rankingexpression.rule.ConstantNode;
import com.yahoo.searchlib.rankingexpression.rule.ExpressionNode;
import com.yahoo.searchlib.rankingexpression.rule.FunctionNode;
import com.yahoo.searchlib.rankingexpression.rule.NameNode;
import com.yahoo.searchlib.rankingexpression.rule.NegativeNode;
import com.yahoo.searchlib.rankingexpression.rule.ReferenceNode;
import com.yahoo.searchlib.rankingexpression.rule.SerializationContext;
import com.yahoo.tensor.TensorType;
import com.yahoo.text.Utf8;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
/**
* A function defined by a ranking expression, optionally containing type information
* for inputs and outputs.
*
* Immutable, but note that ranking expressions are *not* immutable.
*
* @author Simon Thoresen Hult
* @author bratseth
*/
public class ExpressionFunction {
private final String name;
private final ImmutableList<String> arguments;
/** Types of the inputs, if known. The keys here is any subset (including empty and identity) of the argument list */
private final ImmutableMap<String, TensorType> argumentTypes;
private final RankingExpression body;
private final Optional<TensorType> returnType;
/**
* Constructs a new function with no arguments
*
* @param name the name of this function
* @param body the ranking expression that defines this function
*/
public ExpressionFunction(String name, RankingExpression body) {
this(name, Collections.emptyList(), body);
}
/**
* Constructs a new function
*
* @param name the name of this function
* @param arguments its argument names
* @param body the ranking expression that defines this function
*/
public ExpressionFunction(String name, List<String> arguments, RankingExpression body) {
this(name, arguments, body, ImmutableMap.of(), Optional.empty());
}
public ExpressionFunction(String name, List<String> arguments, RankingExpression body,
Map<String, TensorType> argumentTypes, Optional<TensorType> returnType) {
this.name = Objects.requireNonNull(name, "name cannot be null");
this.arguments = arguments==null ? ImmutableList.of() : ImmutableList.copyOf(arguments);
this.body = Objects.requireNonNull(body, "body cannot be null");
if ( ! this.arguments.containsAll(argumentTypes.keySet()))
throw new IllegalArgumentException("Argument type keys must be a subset of the argument keys");
this.argumentTypes = ImmutableMap.copyOf(argumentTypes);
this.returnType = Objects.requireNonNull(returnType, "returnType cannot be null");
}
public String getName() { return name; }
/** Returns an immutable list of the arguments of this */
public List<String> arguments() { return arguments; }
public RankingExpression getBody() { return body; }
/** Returns the types of the arguments of this, if specified. The keys of this may be any subset of the arguments */
public Map<String, TensorType> argumentTypes() { return argumentTypes; }
/** Returns the return type of this, or empty if not specified */
public Optional<TensorType> returnType() { return returnType; }
public ExpressionFunction withName(String name) {
return new ExpressionFunction(name, arguments, body, argumentTypes, returnType);
}
/** Returns a copy of this with the body changed to the given value */
public ExpressionFunction withBody(RankingExpression body) {
return new ExpressionFunction(name, arguments, body, argumentTypes, returnType);
}
public ExpressionFunction withReturnType(TensorType returnType) {
return new ExpressionFunction(name, arguments, body, argumentTypes, Optional.of(returnType));
}
/** Returns a copy of this with the given argument added (if not already present) */
public ExpressionFunction withArgument(String argument) {
if (arguments.contains(argument)) return this;
List<String> arguments = new ArrayList<>(this.arguments);
arguments.add(argument);
return new ExpressionFunction(name, arguments, body, argumentTypes, returnType);
}
/** Returns a copy of this with the given argument (if not present) and argument type added */
public ExpressionFunction withArgument(String argument, TensorType type) {
List<String> arguments = new ArrayList<>(this.arguments);
if ( ! arguments.contains(argument))
arguments.add(argument);
Map<String, TensorType> argumentTypes = new HashMap<>(this.argumentTypes);
argumentTypes.put(argument, type);
return new ExpressionFunction(name, arguments, body, argumentTypes, returnType);
}
/**
* Creates and returns an instance of this function based on the given
* arguments. If function calls are nested, this call may produce
* additional functions.
*
* @param context the context used to expand this
* @param argumentValues the arguments to instantiate on.
* @param path the expansion path leading to this.
* @return the script function instance created.
*/
public Instance expand(SerializationContext context, List<ExpressionNode> argumentValues, Deque<String> path) {
Map<String, String> argumentBindings = new HashMap<>();
for (int i = 0; i < arguments.size() && i < argumentValues.size(); ++i) {
String key = arguments.get(i);
ExpressionNode expr = argumentValues.get(i);
String binding = expr.toString(new StringBuilder(), context, path, null).toString();
if (shouldGenerateFeature(expr)) {
String funcName = "autogenerated_ranking_feature@" + Long.toHexString(symbolCode(key + "=" + binding));
context.addFunctionSerialization(RankingExpression.propertyName(funcName), binding);
binding = "rankingExpression(" + funcName + ")";
}
argumentBindings.put(key, binding);
}
context = argumentBindings.isEmpty() ? context.withoutBindings() : context.withBindings(argumentBindings);
String symbol = toSymbol(argumentBindings);
String expressionString = body.getRoot().toString(new StringBuilder(), context, path, null).toString();
return new Instance(symbol, expressionString);
}
private boolean shouldGenerateFeature(ExpressionNode expr) {
if (expr instanceof ConstantNode) return false;
if (expr instanceof ReferenceNode) return false;
if (expr instanceof NameNode) return false;
if (expr instanceof FunctionNode) return false;
if (expr instanceof NegativeNode && ((NegativeNode) expr).getValue() instanceof ConstantNode) return false;
return true;
}
/**
* Returns a symbolic string that represents this function with a given
* list of arguments. The arguments are mangled by hashing the string
* representation of the argument expressions.
*
* @param argumentBindings the bound arguments to include in the symbolic name.
* @return the symbolic name for an instance of this function
*/
private String toSymbol(Map<String, String> argumentBindings) {
if (argumentBindings.isEmpty()) return name;
StringBuilder ret = new StringBuilder();
ret.append(name).append("@");
for (Map.Entry<String,String> argumentBinding : argumentBindings.entrySet()) {
ret.append(Long.toHexString(symbolCode(argumentBinding.getKey() + "=" + argumentBinding.getValue())));
ret.append(".");
}
if (ret.toString().endsWith("."))
ret.setLength(ret.length()-1);
return ret.toString();
}
/**
* Returns a more unique hash code than what Java's own {@link
* String#hashCode()} method would produce.
*
* @param str The string to hash.
* @return A 64 bit long hash code.
*/
private static long symbolCode(String str) {
try {
MessageDigest md = java.security.MessageDigest.getInstance("SHA-1");
byte[] buf = md.digest(Utf8.toBytes(str));
if (buf.length >= 8) {
long ret = 0;
for (int i = 0; i < 8; ++i) {
ret = (ret << 8) + (buf[i] & 0xff);
}
return ret;
}
} catch (NoSuchAlgorithmException e) {
throw new Error("java must always support SHA-1 message digest format", e);
}
return str.hashCode();
}
@Override
public String toString() {
return "function '" + name + "'";
}
/**
* An instance of a serialization of this function, using a particular serialization context (by {@link
* ExpressionFunction#expand})
*/
public class Instance {
private final String name;
private final String expressionString;
public Instance(String name, String expressionString) {
this.name = name;
this.expressionString = expressionString;
}
public String getName() {
return name;
}
public String getExpressionString() {
return expressionString;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.kafka.common;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.common.utils.Utils;
/**
* The <code>MetricName</code> class encapsulates a metric's name, logical group and its related attributes. It should be constructed using metrics.MetricName(...).
* <p>
* This class captures the following parameters
* <pre>
* <b>name</b> The name of the metric
* <b>group</b> logical group name of the metrics to which this metric belongs.
* <b>description</b> A human-readable description to include in the metric. This is optional.
* <b>tags</b> additional key/value attributes of the metric. This is optional.
* </pre>
* group, tags parameters can be used to create unique metric names while reporting in JMX or any custom reporting.
* <p>
* Ex: standard JMX MBean can be constructed like <b>domainName:type=group,key1=val1,key2=val2</b>
* <p>
*
* Usage looks something like this:
* <pre>{@code
* // set up metrics:
*
* Map<String, String> metricTags = new LinkedHashMap<String, String>();
* metricTags.put("client-id", "producer-1");
* metricTags.put("topic", "topic");
*
* MetricConfig metricConfig = new MetricConfig().tags(metricTags);
* Metrics metrics = new Metrics(metricConfig); // this is the global repository of metrics and sensors
*
* Sensor sensor = metrics.sensor("message-sizes");
*
* MetricName metricName = metrics.metricName("message-size-avg", "producer-metrics", "average message size");
* sensor.add(metricName, new Avg());
*
* metricName = metrics.metricName("message-size-max", "producer-metrics");
* sensor.add(metricName, new Max());
*
* metricName = metrics.metricName("message-size-min", "producer-metrics", "message minimum size", "client-id", "my-client", "topic", "my-topic");
* sensor.add(metricName, new Min());
*
* // as messages are sent we record the sizes
* sensor.record(messageSize);
* }</pre>
*/
public final class MetricName {
private final String name;
private final String group;
private final String description;
private Map<String, String> tags;
private int hash = 0;
/**
* Please create MetricName by method {@link org.apache.kafka.common.metrics.Metrics#metricName(String, String, String, Map)}
*
* @param name The name of the metric
* @param group logical group name of the metrics to which this metric belongs
* @param description A human-readable description to include in the metric
* @param tags additional key/value attributes of the metric
*/
public MetricName(String name, String group, String description, Map<String, String> tags) {
this.name = Utils.notNull(name);
this.group = Utils.notNull(group);
this.description = Utils.notNull(description);
this.tags = Utils.notNull(tags);
}
/**
* @deprecated This method will be removed in a future release.
* Please create MetricName by method {@link org.apache.kafka.common.metrics.Metrics#metricName(String, String, String, String...)}
*
* @param name The name of the metric
* @param group logical group name of the metrics to which this metric belongs
* @param description A human-readable description to include in the metric
* @param keyValue additional key/value attributes of the metric (must come in pairs)
*/
@Deprecated
public MetricName(String name, String group, String description, String... keyValue) {
this(name, group, description, getTags(keyValue));
}
private static Map<String, String> getTags(String... keyValue) {
if ((keyValue.length % 2) != 0)
throw new IllegalArgumentException("keyValue needs to be specified in pairs");
Map<String, String> tags = new HashMap<String, String>();
for (int i = 0; i < keyValue.length; i += 2)
tags.put(keyValue[i], keyValue[i + 1]);
return tags;
}
/**
* @deprecated This method will be removed in a future release.
* Please create MetricName by method {@link org.apache.kafka.common.metrics.Metrics#metricName(String, String, Map)}
*
* @param name The name of the metric
* @param group logical group name of the metrics to which this metric belongs
* @param tags key/value attributes of the metric
*/
@Deprecated
public MetricName(String name, String group, Map<String, String> tags) {
this(name, group, "", tags);
}
/**
* @deprecated This method will be removed in a future release.
* Please create MetricName by method {@link org.apache.kafka.common.metrics.Metrics#metricName(String, String, String)}
*
* @param name The name of the metric
* @param group logical group name of the metrics to which this metric belongs
* @param description A human-readable description to include in the metric
*/
@Deprecated
public MetricName(String name, String group, String description) {
this(name, group, description, new HashMap<String, String>());
}
/**
* @deprecated This method will be removed in a future release.
* Please create MetricName by method {@link org.apache.kafka.common.metrics.Metrics#metricName(String, String)}
*
* @param name The name of the metric
* @param group logical group name of the metrics to which this metric belongs
*/
@Deprecated
public MetricName(String name, String group) {
this(name, group, "", new HashMap<String, String>());
}
public String name() {
return this.name;
}
public String group() {
return this.group;
}
public Map<String, String> tags() {
return this.tags;
}
public String description() {
return this.description;
}
@Override
public int hashCode() {
if (hash != 0)
return hash;
final int prime = 31;
int result = 1;
result = prime * result + ((group == null) ? 0 : group.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((tags == null) ? 0 : tags.hashCode());
this.hash = result;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
MetricName other = (MetricName) obj;
if (group == null) {
if (other.group != null)
return false;
} else if (!group.equals(other.group))
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if (tags == null) {
if (other.tags != null)
return false;
} else if (!tags.equals(other.tags))
return false;
return true;
}
@Override
public String toString() {
return "MetricName [name=" + name + ", group=" + group + ", description="
+ description + ", tags=" + tags + "]";
}
}
| |
/* lineaksetup - Configuration Utility for the LinEAK daemon
* Copyright (C) 2005 Alex Brick
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.cabhan.lineaksetup;
import org.eclipse.swt.*;
import org.eclipse.swt.widgets.*;
import org.eclipse.swt.layout.*;
import org.eclipse.swt.events.*;
import java.io.*;
import java.util.regex.*;
import java.util.HashMap;
public class TypeWindow implements SelectionListener
{
private Shell s;
private Table table;
private Button next;
private HashMap boardToCode;
/**
* Generates the GUI and displays it
* @param d the <code>Display</code> to attach to
*/
public TypeWindow(Display d)
{
boardToCode = new HashMap();
s = new Shell(d);
s.setText("LinEAK Configuration Utility");
s.setLayout(new RowLayout(SWT.VERTICAL));
Label instructions = new Label(s, SWT.WRAP);
instructions.setText("Please Select Your Keyboard From the List Below and press " +
"\"Next\"");
makeTable();
next = new Button(s, SWT.PUSH);
next.setText("Next >>");
next.addSelectionListener(this);
s.pack();
s.open();
while(!s.isDisposed())
if(!d.readAndDispatch())
d.sleep();
d.dispose();
}
private void makeTable()
{
File defFile = Driver.getDefFile();
//File defFile = new File("/etc/lineakkb.def"); // My test data (default definition file location)
table = new Table(s, SWT.MULTI|SWT.BORDER|SWT.FULL_SELECTION);
table.setHeaderVisible(true);
table.setLinesVisible(true);
RowData tData = new RowData();
tData.height = 500;
table.setLayoutData(tData);
TableColumn column = new TableColumn(table, SWT.NONE);
column.setText("Keyboards");
try
{
BufferedReader bfrRead = new BufferedReader(new FileReader(defFile));
String currentCode = new String();
String currentBrand = new String();
Pattern p1 = Pattern.compile("^\\s*\\[([\\w\\d\\-]+)\\].*");
Pattern p2 = Pattern.compile("^\\s+brandname\\s+=\\s+\"(\\w+)\"$");
Pattern p3 = Pattern.compile("^\\s+modelname\\s+=\\s+\"([\\w\\d\\s()\\-]+)\"$");
while(bfrRead.ready())
{
String theLine = bfrRead.readLine();
Matcher m1 = p1.matcher(theLine);
Matcher m2 = p2.matcher(theLine);
Matcher m3 = p3.matcher(theLine);
if(m1.matches())
{
String theGroup = m1.group(1);
if(!theGroup.equals("KEYS") && !theGroup.startsWith("END"))
currentCode = theGroup;
}
else if(m2.matches())
currentBrand = m2.group(1);
else if(m3.matches())
{
String fullName = currentBrand + " " + m3.group(1);
TableItem item = new TableItem(table, SWT.NONE);
item.setText(0, fullName);
boardToCode.put(fullName,currentCode);
}
}
bfrRead.close();
table.getColumn(0).pack();
}
catch(FileNotFoundException e)
{
MessageBox message = new MessageBox(s, SWT.ICON_ERROR);
message.setText("File Not Found!!");
message.setMessage("Cannot Find " + defFile.getAbsolutePath() + "!!");
message.open();
System.exit(0);
}
catch(IOException e)
{
MessageBox message = new MessageBox(s, SWT.ICON_ERROR);
message.setText("IOException!!");
message.setMessage("Unspecified Input/Output Exception!!");
message.open();
System.exit(0);
}
}
/**
* A basic test program
* @param args commandline arguments
*/
public static void main(String[] args)
{
Display d = new Display();
TypeWindow type = new TypeWindow(d);
}
/**
* Handles all <code>SelectionEvent</code>'s generated by this GUI
*/
public void widgetSelected(SelectionEvent e)
{
if(e.getSource() == next)
{
TableItem[] items = table.getSelection();
if(items.length == 0)
{
MessageBox message = new MessageBox(s, SWT.ICON_ERROR);
message.setText("No Keyboard!!");
message.setMessage("You Didn't Select A Keyboard!!");
message.open();
return;
}
TableItem theItem = items[0];
String chosenBoard = theItem.getText(0);
String chosenCode = (String) boardToCode.get(chosenBoard);
MessageBox message = new MessageBox(s, SWT.ICON_QUESTION|SWT.YES|SWT.NO);
message.setText("Keyboard Okay?");
message.setMessage("You have chosen the following keyboard:\n" + chosenBoard +
"\n\nIs this the correct choice?");
int response = message.open();
if(response == SWT.YES)
{
Driver.setCode(chosenCode);
Driver.setFullName(chosenBoard);
s.dispose();
Driver.setCommands();
}
}
}
/**
* Exists soley to satisfy the <code>SelectionListener</code> interface
*/
public void widgetDefaultSelected(SelectionEvent e)
{
// empty
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.examples.bpmn.authorization;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.flowable.engine.IdentityService;
import org.flowable.engine.impl.test.PluggableFlowableTestCase;
import org.flowable.engine.repository.ProcessDefinition;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.test.Deployment;
import org.flowable.identitylink.api.IdentityLink;
import org.flowable.idm.api.Group;
import org.flowable.idm.api.User;
import org.junit.jupiter.api.Test;
/**
* @author Saeid Mirzaei
* @author Tijs Rademakers
*/
public class StartAuthorizationTest extends PluggableFlowableTestCase {
IdentityService identityService;
User userInGroup1;
User userInGroup2;
User userInGroup3;
Group group1;
Group group2;
Group group3;
protected void setUpUsersAndGroups() throws Exception {
identityService = processEngine.getIdentityService();
identityService.saveUser(identityService.newUser("user1"));
identityService.saveUser(identityService.newUser("user2"));
identityService.saveUser(identityService.newUser("user3"));
// create users
userInGroup1 = identityService.newUser("userInGroup1");
identityService.saveUser(userInGroup1);
userInGroup2 = identityService.newUser("userInGroup2");
identityService.saveUser(userInGroup2);
userInGroup3 = identityService.newUser("userInGroup3");
identityService.saveUser(userInGroup3);
// create groups
group1 = identityService.newGroup("group1");
identityService.saveGroup(group1);
group2 = identityService.newGroup("group2");
identityService.saveGroup(group2);
group3 = identityService.newGroup("group3");
identityService.saveGroup(group3);
// relate users to groups
identityService.createMembership(userInGroup1.getId(), group1.getId());
identityService.createMembership(userInGroup2.getId(), group2.getId());
identityService.createMembership(userInGroup3.getId(), group3.getId());
}
protected void tearDownUsersAndGroups() throws Exception {
identityService.deleteMembership(userInGroup1.getId(), group1.getId());
identityService.deleteMembership(userInGroup2.getId(), group2.getId());
identityService.deleteMembership(userInGroup3.getId(), group3.getId());
identityService.deleteGroup(group1.getId());
identityService.deleteGroup(group2.getId());
identityService.deleteGroup(group3.getId());
identityService.deleteUser(userInGroup1.getId());
identityService.deleteUser(userInGroup2.getId());
identityService.deleteUser(userInGroup3.getId());
identityService.deleteUser("user1");
identityService.deleteUser("user2");
identityService.deleteUser("user3");
}
@Test
@Deployment
public void testIdentityLinks() throws Exception {
setUpUsersAndGroups();
try {
ProcessDefinition latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey("process1").singleResult();
assertNotNull(latestProcessDef);
List<IdentityLink> links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId());
assertEquals(0, links.size());
latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey("process2").singleResult();
assertNotNull(latestProcessDef);
links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId());
assertEquals(2, links.size());
assertTrue(containsUserOrGroup("user1", null, links));
assertTrue(containsUserOrGroup("user2", null, links));
latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey("process3").singleResult();
assertNotNull(latestProcessDef);
links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId());
assertEquals(1, links.size());
assertEquals("user1", links.get(0).getUserId());
latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey("process4").singleResult();
assertNotNull(latestProcessDef);
links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId());
assertEquals(4, links.size());
assertTrue(containsUserOrGroup("userInGroup2", null, links));
assertTrue(containsUserOrGroup(null, "group1", links));
assertTrue(containsUserOrGroup(null, "group2", links));
assertTrue(containsUserOrGroup(null, "group3", links));
} finally {
tearDownUsersAndGroups();
}
}
@Test
@Deployment
public void testAddAndRemoveIdentityLinks() throws Exception {
setUpUsersAndGroups();
try {
ProcessDefinition latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey("potentialStarterNoDefinition").singleResult();
assertNotNull(latestProcessDef);
List<IdentityLink> links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId());
assertEquals(0, links.size());
repositoryService.addCandidateStarterGroup(latestProcessDef.getId(), "group1");
links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId());
assertEquals(1, links.size());
assertEquals("group1", links.get(0).getGroupId());
repositoryService.addCandidateStarterUser(latestProcessDef.getId(), "user1");
links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId());
assertEquals(2, links.size());
assertTrue(containsUserOrGroup(null, "group1", links));
assertTrue(containsUserOrGroup("user1", null, links));
repositoryService.deleteCandidateStarterGroup(latestProcessDef.getId(), "nonexisting");
links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId());
assertEquals(2, links.size());
repositoryService.deleteCandidateStarterGroup(latestProcessDef.getId(), "group1");
links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId());
assertEquals(1, links.size());
assertEquals("user1", links.get(0).getUserId());
repositoryService.deleteCandidateStarterUser(latestProcessDef.getId(), "user1");
links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId());
assertEquals(0, links.size());
} finally {
tearDownUsersAndGroups();
}
}
private boolean containsUserOrGroup(String userId, String groupId, List<IdentityLink> links) {
boolean found = false;
for (IdentityLink identityLink : links) {
if (userId != null && userId.equals(identityLink.getUserId())) {
found = true;
break;
} else if (groupId != null && groupId.equals(identityLink.getGroupId())) {
found = true;
break;
}
}
return found;
}
@Test
@Deployment
public void testPotentialStarter() throws Exception {
// first check an unauthorized user. An exception is expected
setUpUsersAndGroups();
try {
// Authentication should not be done. So an unidentified user should
// also be able to start the process
identityService.setAuthenticatedUserId("unauthorizedUser");
try {
runtimeService.startProcessInstanceByKey("potentialStarter");
} catch (Exception e) {
fail("No StartAuthorizationException expected, " + e.getClass().getName() + " caught.");
}
// check with an authorized user obviously it should be no problem
// starting the process
identityService.setAuthenticatedUserId("user1");
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("potentialStarter");
assertProcessEnded(processInstance.getId());
assertTrue(processInstance.isEnded());
// check extensionElements with : <formalExpression>group2,
// group(group3), user(user3)</formalExpression>
ProcessDefinition potentialStarter = repositoryService.createProcessDefinitionQuery().processDefinitionKey("potentialStarter").startableByUser("user1").latestVersion().singleResult();
assertNotNull(potentialStarter);
potentialStarter = repositoryService.createProcessDefinitionQuery().processDefinitionKey("potentialStarter").startableByUser("user3").latestVersion().singleResult();
assertNotNull(potentialStarter);
potentialStarter = repositoryService.createProcessDefinitionQuery().processDefinitionKey("potentialStarter").startableByUser("userInGroup2").latestVersion().singleResult();
assertNotNull(potentialStarter);
potentialStarter = repositoryService.createProcessDefinitionQuery().processDefinitionKey("potentialStarter").startableByUser("userInGroup3").latestVersion().singleResult();
assertNotNull(potentialStarter);
} finally {
tearDownUsersAndGroups();
}
}
/*
* if there is no security definition, then user authorization check is not done. This ensures backward compatibility
*/
@Test
@Deployment
public void testPotentialStarterNoDefinition() throws Exception {
identityService = processEngine.getIdentityService();
identityService.setAuthenticatedUserId("someOneFromMars");
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("potentialStarterNoDefinition");
assertNotNull(processInstance.getId());
assertProcessEnded(processInstance.getId());
assertTrue(processInstance.isEnded());
}
// this test checks the list without user constraint
@Test
@Deployment
public void testProcessDefinitionList() throws Exception {
setUpUsersAndGroups();
try {
// Process 1 has no potential starters
ProcessDefinition latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey("process1").singleResult();
List<User> authorizedUsers = identityService.getPotentialStarterUsers(latestProcessDef.getId());
assertEquals(0, authorizedUsers.size());
// user1 and user2 are potential starters of Process2
latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey("process2").singleResult();
authorizedUsers = identityService.getPotentialStarterUsers(latestProcessDef.getId());
assertEquals(2, authorizedUsers.size());
Collections.sort(authorizedUsers, new Comparator<User>() {
@Override
public int compare(User u1, User u2) {
return u1.getId().compareTo(u2.getId());
}
});
assertEquals("user1", authorizedUsers.get(0).getId());
assertEquals("user2", authorizedUsers.get(1).getId());
// Process 2 has no potential starter groups
latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey("process2").singleResult();
List<Group> authorizedGroups = identityService.getPotentialStarterGroups(latestProcessDef.getId());
assertEquals(0, authorizedGroups.size());
// Process 3 has 3 groups as authorized starter groups
latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey("process4").singleResult();
authorizedGroups = identityService.getPotentialStarterGroups(latestProcessDef.getId());
assertEquals(3, authorizedGroups.size());
Collections.sort(authorizedGroups, new Comparator<Group>() {
@Override
public int compare(Group g1, Group g2) {
return g1.getId().compareTo(g2.getId());
}
});
assertEquals("group1", authorizedGroups.get(0).getId());
assertEquals("group2", authorizedGroups.get(1).getId());
assertEquals("group3", authorizedGroups.get(2).getId());
// do not mention user, all processes should be selected
List<ProcessDefinition> processDefinitions = repositoryService.createProcessDefinitionQuery().orderByProcessDefinitionName().asc().list();
assertEquals(4, processDefinitions.size());
assertEquals("process1", processDefinitions.get(0).getKey());
assertEquals("process2", processDefinitions.get(1).getKey());
assertEquals("process3", processDefinitions.get(2).getKey());
assertEquals("process4", processDefinitions.get(3).getKey());
// check user1, process3 has "user1" as only authorized starter, and
// process2 has two authorized starters, of which one is "user1"
processDefinitions = repositoryService.createProcessDefinitionQuery().orderByProcessDefinitionName().asc().startableByUser("user1").list();
assertEquals(2, processDefinitions.size());
assertEquals("process2", processDefinitions.get(0).getKey());
assertEquals("process3", processDefinitions.get(1).getKey());
// "user2" can only start process2
processDefinitions = repositoryService.createProcessDefinitionQuery().startableByUser("user2").list();
assertEquals(1, processDefinitions.size());
assertEquals("process2", processDefinitions.get(0).getKey());
// no process could be started with "user4"
processDefinitions = repositoryService.createProcessDefinitionQuery().startableByUser("user4").list();
assertEquals(0, processDefinitions.size());
// "userInGroup3" is in "group3" and can start only process4 via group authorization
processDefinitions = repositoryService.createProcessDefinitionQuery().startableByUser("userInGroup3").list();
assertEquals(1, processDefinitions.size());
assertEquals("process4", processDefinitions.get(0).getKey());
// "userInGroup2" can start process4, via both user and group authorizations
// but we have to be sure that process4 appears only once
processDefinitions = repositoryService.createProcessDefinitionQuery().startableByUser("userInGroup2").list();
assertEquals(1, processDefinitions.size());
assertEquals("process4", processDefinitions.get(0).getKey());
} finally {
tearDownUsersAndGroups();
}
}
}
| |
package com.borqs.market;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import android.app.ActionBar;
import android.app.ActionBar.Tab;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.view.ViewPager;
import android.text.TextUtils;
import android.view.Menu;
import android.view.MenuItem;
import com.borqs.market.fragment.ProductGridFragment;
import com.borqs.market.json.Product.ProductType;
import com.borqs.market.utils.BLog;
import com.borqs.market.utils.MarketUtils;
public class MarketHomeActivity extends BasicActivity implements
ActionBar.TabListener {
private final String TAG = "MarketHomeActivity";
// static final int NUM_ITEMS = 2;
static final String TAG_THEME = "TAG_THEME";
static final String TAG_OBJECT = "TAG_OBJECT";
private MyAdapter mAdapter;
private ViewPager mPager;
// private ActionBar.Tab tabTheme = null;
// private ActionBar.Tab tabObj = null;
private String[] theme_category;
private ArrayList<ActionBar.Tab> tabList = new ArrayList<ActionBar.Tab>();
private Map<String, ProductGridFragment> fragmentMap = new HashMap<String, ProductGridFragment>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
BLog.d(TAG, "onCreate(savedInstanceState)");
setContentView(R.layout.activity_main);
getActionBar().setDisplayHomeAsUpEnabled(true);
getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
parseIntent();
theme_category = getResources().getStringArray(R.array.theme_category);
if (theme_category != null && theme_category.length > 0) {
for (int i = 0; i < theme_category.length; i++) {
ActionBar.Tab tab = getActionBar().newTab();
tab.setText(theme_category[i]);
tab.setTabListener(this);
getActionBar().addTab(tab, i);
tabList.add(i, tab);
}
}
// tabTheme = getSupportActionBar().newTab();
// tabTheme.setText(R.string.top_navigation_theme);
// tabTheme.setTabListener(this);
// getSupportActionBar().addTab(tabTheme, Category.THEME.ordinal());
//
// tabObj = getSupportActionBar().newTab();
// tabObj.setText(R.string.top_navigation_object);
// tabObj.setTabListener(this);
// getSupportActionBar().addTab(tabObj, Category.OBJECT.ordinal());
getActionBar().setSelectedNavigationItem(tab_index);
mAdapter = new MyAdapter(getSupportFragmentManager());
mPager = (ViewPager) findViewById(R.id.pager);
mPager.setAdapter(mAdapter);
mPager.setCurrentItem(tab_index);
mPager.setOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageSelected(int position) {
tab_index = position;
// if (position == Category.THEME.ordinal()) {
// getSupportActionBar().selectTab(tabTheme);
// } else {
// getSupportActionBar().selectTab(tabObj);
// }
getActionBar().setSelectedNavigationItem(position);
}
@Override
public void onPageScrolled(int arg0, float arg1, int arg2) {
// TODO Auto-generated method stub
}
@Override
public void onPageScrollStateChanged(int arg0) {
// TODO Auto-generated method stub
}
});
}
private int app_version = 0;
private String package_name;
private int tab_index = 0;
private void parseIntent() {
app_version = getIntent().getIntExtra(MarketUtils.EXTRA_APP_VERSION, 0);
package_name = getIntent().getStringExtra(
MarketUtils.EXTRA_PACKAGE_NAME);
if (TextUtils.isEmpty(package_name)) {
throw new IllegalArgumentException("package name is null");
}
String categoryStr = getIntent().getStringExtra(
MarketUtils.EXTRA_CATEGORY);
if (MarketUtils.CATEGORY_THEME.equals(categoryStr)) {
tab_index = 0;
} else if (MarketUtils.CATEGORY_OBJECT.equals(categoryStr)) {
tab_index = 1;
} else {
tab_index = 0;
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
BLog.d(TAG, "onSaveInstanceState(outState)");
// getSupportFragmentManager().putFragment(outState, TAG_THEME,
// themeFragment);
// getSupportFragmentManager().putFragment(outState, TAG_OBJECT,
// objecttFragment);
// if(fragmentList != null && fragmentList.size() >0) {
// for(int index = 0;index < fragmentList.size();index++) {
// ProductGridFragment f = fragmentList.get(index);
// if (f != null) {
// getSupportFragmentManager().putFragment(outState,
// generateFragmentTag(index),
// f);
// }
// }
if (fragmentMap != null && theme_category != null
&& theme_category.length > 0) {
for (int index = 0; index < theme_category.length; index++) {
ProductGridFragment f = fragmentMap.get(theme_category);
if (f != null) {
getSupportFragmentManager().putFragment(outState,
generateFragmentTag(theme_category[index]), f);
}
}
}
super.onSaveInstanceState(outState);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
BLog.d(TAG, "onRestoreInstanceState(outState)");
// if (themeFragment == null) {
// themeFragment = (ProductGridFragment) getSupportFragmentManager()
// .getFragment(savedInstanceState, TAG_THEME);
// }
//
// if (objecttFragment == null) {
// objecttFragment = (ProductGridFragment) getSupportFragmentManager()
// .getFragment(savedInstanceState, TAG_OBJECT);
// }
// if(savedInstanceState != null && theme_category != null &&
// theme_category.length >0) {
// if(fragmentList == null) fragmentList = new
// ArrayList<ProductGridFragment>();
// fragmentList.clear();
// for(int index = 0;index < theme_category.length;index++) {
// fragmentList.add(index,
// (ProductGridFragment) getSupportFragmentManager()
// .getFragment(savedInstanceState, generateFragmentTag(index))
// );
// }
// }
if (savedInstanceState != null && theme_category != null
&& theme_category.length > 0) {
if (fragmentMap == null)
fragmentMap = new HashMap<String, ProductGridFragment>();
fragmentMap.clear();
for (int index = 0; index < theme_category.length; index++) {
fragmentMap
.put(theme_category[index],
(ProductGridFragment) getSupportFragmentManager()
.getFragment(
savedInstanceState,
generateFragmentTag(theme_category[index])));
}
}
}
private String generateFragmentTag(String key) {
return "TAG_" + key;
}
private ProductGridFragment getFragment(int position) {
ProductGridFragment f = null;
if (fragmentMap == null) {
fragmentMap = new HashMap<String, ProductGridFragment>();
}
if (fragmentMap.containsKey(theme_category[position])) {
f = fragmentMap.get(theme_category[position]);
} else {
f = new ProductGridFragment(ProductType.getProductType(position),
app_version, package_name);
fragmentMap.put(theme_category[position], f);
}
return f;
}
// private ProductGridFragment themeFragment = null;
// private ProductGridFragment objecttFragment = null;
class MyAdapter extends FragmentPagerAdapter {
public MyAdapter(FragmentManager fm) {
super(fm);
}
@Override
public int getCount() {
return theme_category == null ? 0 : theme_category.length;
}
@Override
public Fragment getItem(int position) {
// if (position == 0) {
// if (themeFragment == null) {
// themeFragment = new ProductGridFragment(ProductType.THEME,
// app_version, package_name);
// }
// return themeFragment;
// } else {
// if (objecttFragment == null) {
// objecttFragment = new ProductGridFragment(
// ProductType.OBJECT, app_version, package_name);
// }
// return objecttFragment;
// }
ProductGridFragment f = null;
if (fragmentMap == null) {
fragmentMap = new HashMap<String, ProductGridFragment>();
}
if (fragmentMap.containsKey(theme_category[position])) {
f = fragmentMap.get(theme_category[position]);
} else {
f = new ProductGridFragment(
ProductType.getProductType(position), app_version,
package_name);
fragmentMap.put(theme_category[position], f);
}
return f;
}
}
@Override
public boolean onCreateOptionsMenu(Menu optionMenu) {
getMenuInflater().inflate(R.menu.basic_menu, optionMenu);
return super.onCreateOptionsMenu(optionMenu);
}
// @Override
// public boolean onCreateOptionsMenu(com.actionbarsherlock.view.Menu menu)
// {
// getSupportMenuInflater().inflate(R.menu.activity_main, menu);
// return true;
// }
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int itemId = item.getItemId();
if (itemId == android.R.id.home) {
finish();
} else if (itemId == R.id.menu_refresh) {
ProductGridFragment fragment = getFragment(tab_index);
if (fragment != null) {
fragment.onRefresh();
}
}
return super.onOptionsItemSelected(item);
}
public interface ActionListener {
void onrefresh();
}
@Override
public void onTabSelected(Tab tab, android.app.FragmentTransaction ft) {
if (mPager != null) {
tab_index = tab.getPosition();
mPager.setCurrentItem(tab_index, true);
}
}
@Override
public void onTabUnselected(Tab tab, android.app.FragmentTransaction ft) {
// TODO Auto-generated method stub
}
@Override
public void onTabReselected(Tab tab, android.app.FragmentTransaction ft) {
// TODO Auto-generated method stub
}
}
| |
// Copyright 2011, Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.common.lib.soap.axis;
import com.google.api.ads.common.lib.client.RemoteCallReturn;
import com.google.api.ads.common.lib.client.RequestInfo;
import com.google.api.ads.common.lib.client.ResponseInfo;
import com.google.api.ads.common.lib.exception.ServiceException;
import com.google.api.ads.common.lib.soap.RequestInfoXPathSet;
import com.google.api.ads.common.lib.soap.ResponseInfoXPathSet;
import com.google.api.ads.common.lib.soap.SoapCall;
import com.google.api.ads.common.lib.soap.SoapClientHandler;
import com.google.api.ads.common.lib.soap.SoapClientHandlerInterface;
import com.google.api.ads.common.lib.soap.SoapServiceDescriptor;
import com.google.api.ads.common.lib.soap.compatability.AxisCompatible;
import com.google.common.base.Preconditions;
import java.lang.reflect.InvocationTargetException;
import java.util.Hashtable;
import java.util.Map;
import javax.inject.Inject;
import javax.xml.namespace.QName;
import javax.xml.soap.SOAPException;
import org.apache.axis.EngineConfiguration;
import org.apache.axis.EngineConfigurationFactory;
import org.apache.axis.MessageContext;
import org.apache.axis.client.Service;
import org.apache.axis.client.Stub;
import org.apache.axis.message.SOAPHeaderElement;
import org.apache.axis.transport.http.HTTPConstants;
import org.apache.commons.beanutils.BeanUtils;
/**
* SOAP Client Handler implementation for use with Axis 1.x.
*/
public class AxisHandler extends SoapClientHandler<Stub> {
private final EngineConfigurationFactory engineConfigurationFactory;
private final RequestInfoXPathSet requestInfoXPathSet;
private final ResponseInfoXPathSet responseInfoXPathSet;
@Inject
public AxisHandler(EngineConfigurationFactory engineConfigurationFactory,
RequestInfoXPathSet requestInfoXPathSet,
ResponseInfoXPathSet responseInfoXPathSet) {
this.engineConfigurationFactory = engineConfigurationFactory;
this.requestInfoXPathSet = requestInfoXPathSet;
this.responseInfoXPathSet = responseInfoXPathSet;
}
/**
* Sets the endpoint address of the given SOAP client.
*
* @param soapClient the SOAP client to set the endpoint address for
* @param endpointAddress the target endpoint address
*/
@Override
public void setEndpointAddress(Stub soapClient, String endpointAddress) {
soapClient._setProperty(Stub.ENDPOINT_ADDRESS_PROPERTY, endpointAddress);
}
/**
* Sets the read timeout of the given SOAP client.
*
* @param soapClient the SOAP client to set the read timeout for
* @param timeout the timeout in milliseconds
*/
@Override
public void setRequestTimeout(Stub soapClient, int timeout) {
soapClient.setTimeout(timeout);
}
/**
* Returns a SOAP header from the given SOAP client, if it exists.
*
* @param soapClient the SOAP client to check for the given header
* @param headerName the name of the header being looked for
* @return the header element, if it exists
*/
@Override
public Object getHeader(Stub soapClient, String headerName) {
SOAPHeaderElement[] soapHeaders = soapClient.getHeaders();
for (SOAPHeaderElement soapHeader : soapHeaders) {
if (soapHeader.getName().equals(headerName)) {
return soapHeader;
}
}
return null;
}
/**
* Clears all of the SOAP headers from the given SOAP client.
*
* @param soapClient the client to remove the headers from
*/
@Override
public void clearHeaders(Stub soapClient) {
soapClient._setProperty(HTTPConstants.REQUEST_HEADERS, new Hashtable<String, String>());
soapClient.clearHeaders();
}
/**
* @see SoapClientHandler#setHeader(Object, String, String, Object)
*/
@Override
public void setHeader(Stub soapClient, String namespace, String headerName,
Object headerValue) {
try {
QName qName = new QName(namespace, headerName);
SOAPHeaderElement soapHeaderElement = new SOAPHeaderElement(qName);
soapHeaderElement.setObjectValue(headerValue);
soapHeaderElement.setActor(null);
soapClient.setHeader(soapHeaderElement);
} catch (SOAPException e) {
throw new ServiceException("Could not set header.", e);
}
}
/**
* Updates the child attribute of headerName named childName to childValue.
*
* @param soapClient the stub
* @param parentHeaderName the name of the parent header
* @param childName the name of the child
* @param childValue the value for the child
*
* @throws NullPointerException if no header exists named parentHeaderName
*/
public void setHeaderChild(Stub soapClient, String parentHeaderName, String childName,
Object childValue) {
SOAPHeaderElement headerElement = (SOAPHeaderElement) getHeader(soapClient, parentHeaderName);
Object headerObject = Preconditions.checkNotNull(headerElement,
"Parent header named %s does not exist", parentHeaderName).getObjectValue();
try {
BeanUtils.setProperty(headerObject, childName, childValue);
} catch (IllegalAccessException e) {
throw new ServiceException("Failed to set header child " + childName, e);
} catch (InvocationTargetException e) {
throw new ServiceException("Failed to set header child " + childName, e);
}
}
/**
* @see SoapClientHandler#putAllHttpHeaders(Object, Map)
*/
@Override
public void putAllHttpHeaders(Stub soapClient, Map<String, String> headersMap) {
@SuppressWarnings("unchecked")
Hashtable<String, String> headers =
(Hashtable<String, String>) soapClient._getProperty(HTTPConstants.REQUEST_HEADERS);
if (headers == null) {
headers = new Hashtable<String, String>();
}
headers.putAll(headersMap);
soapClient._setProperty(HTTPConstants.REQUEST_HEADERS, headers);
}
/**
* Set whether SOAP requests should use compression.
*
* @param soapClient the client to set compression settings for
* @param compress whether or not to use compression
*/
@Override
public void setCompression(Stub soapClient, boolean compress) {
soapClient._setProperty(HTTPConstants.MC_ACCEPT_GZIP, compress);
soapClient._setProperty(HTTPConstants.MC_GZIP_REQUEST, compress);
}
/**
* Creates a SOAP client using a SOAP service descriptor.
*
* @param soapServiceDescriptor the descriptor to use for creating a client
* @return the SOAP client for this descriptor
* @throws ServiceException thrown if the SOAP client cannot be created
*/
@Override
public Stub createSoapClient(SoapServiceDescriptor soapServiceDescriptor)
throws ServiceException {
try {
if (soapServiceDescriptor instanceof AxisCompatible) {
AxisCompatible axisCompatibleService = (AxisCompatible) soapServiceDescriptor;
EngineConfiguration engineConfiguration =
engineConfigurationFactory.getClientEngineConfig();
Service locator = (Service) axisCompatibleService.getLocatorClass()
.getConstructor(new Class[] {EngineConfiguration.class})
.newInstance(new Object[] {engineConfiguration});
return (Stub) locator.getClass().getMethod("getPort", Class.class)
.invoke(locator, soapServiceDescriptor.getInterfaceClass());
}
throw new ServiceException(
"Service [" + soapServiceDescriptor + "] not compatible with Axis", null);
} catch (SecurityException e) {
throw new ServiceException("Unexpected Exception.", e);
} catch (NoSuchMethodException e) {
throw new ServiceException("Unexpected Exception.", e);
} catch (IllegalArgumentException e) {
throw new ServiceException("Unexpected Exception.", e);
} catch (IllegalAccessException e) {
throw new ServiceException("Unexpected Exception.", e);
} catch (InvocationTargetException e) {
throw new ServiceException("Unexpected Exception.", e);
} catch (ClassNotFoundException e) {
throw new ServiceException("Unexpected Exception.", e);
} catch (InstantiationException e) {
throw new ServiceException("Unexpected Exception.", e);
}
}
/**
* Invoke a SOAP call.
*
* @param soapCall the call to make to a SOAP web service
* @return information about the SOAP response
*/
@Override
public RemoteCallReturn invokeSoapCall(SoapCall<Stub> soapCall) {
Stub stub = soapCall.getSoapClient();
RemoteCallReturn.Builder builder = new RemoteCallReturn.Builder();
synchronized (stub) {
Object result = null;
try {
result = invoke(soapCall);
} catch (InvocationTargetException e) {
builder.withException(e.getTargetException());
} catch (Exception e) {
builder.withException(e);
} finally {
MessageContext messageContext = stub._getCall().getMessageContext();
RequestInfo.Builder requestInfoBuilder = new RequestInfo.Builder()
.withMethodName(stub._getCall().getOperationName().getLocalPart())
.withServiceName(stub._getService().getServiceName().getLocalPart())
.withUrl(stub._getCall().getTargetEndpointAddress());
requestInfoXPathSet.parseMessage(requestInfoBuilder, messageContext.getRequestMessage());
builder.withRequestInfo(requestInfoBuilder
.build());
ResponseInfo.Builder responseInfoBuilder = new ResponseInfo.Builder();
responseInfoXPathSet.parseMessage(responseInfoBuilder, messageContext.getResponseMessage());
builder.withResponseInfo(responseInfoBuilder.build());
}
return builder.withReturnValue(result).build();
}
}
/**
* @see SoapClientHandlerInterface#getEndpointAddress(Object)
*/
@Override
public String getEndpointAddress(Stub soapClient) {
return (String) soapClient._getProperty(Stub.ENDPOINT_ADDRESS_PROPERTY);
}
/**
* @see SoapClientHandlerInterface#createSoapHeaderElement(QName)
*/
@Override
public javax.xml.soap.SOAPHeaderElement createSoapHeaderElement(QName qName) {
return new SOAPHeaderElement(qName);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.function.mapping;
import java.time.Instant;
import java.time.format.DateTimeParseException;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.analytics.value.AnalyticsValue;
import org.apache.solr.analytics.value.AnalyticsValueStream;
import org.apache.solr.analytics.value.BooleanValue;
import org.apache.solr.analytics.value.BooleanValueStream;
import org.apache.solr.analytics.value.DateValue;
import org.apache.solr.analytics.value.DateValueStream;
import org.apache.solr.analytics.value.DoubleValue;
import org.apache.solr.analytics.value.DoubleValueStream;
import org.apache.solr.analytics.value.FillableTestValue.TestAnalyticsValue;
import org.apache.solr.analytics.value.FillableTestValue.TestAnalyticsValueStream;
import org.apache.solr.analytics.value.FillableTestValue.TestBooleanValue;
import org.apache.solr.analytics.value.FillableTestValue.TestBooleanValueStream;
import org.apache.solr.analytics.value.FillableTestValue.TestDateValue;
import org.apache.solr.analytics.value.FillableTestValue.TestDateValueStream;
import org.apache.solr.analytics.value.FillableTestValue.TestDoubleValue;
import org.apache.solr.analytics.value.FillableTestValue.TestDoubleValueStream;
import org.apache.solr.analytics.value.FillableTestValue.TestFloatValue;
import org.apache.solr.analytics.value.FillableTestValue.TestFloatValueStream;
import org.apache.solr.analytics.value.FillableTestValue.TestIntValue;
import org.apache.solr.analytics.value.FillableTestValue.TestIntValueStream;
import org.apache.solr.analytics.value.FillableTestValue.TestLongValue;
import org.apache.solr.analytics.value.FillableTestValue.TestLongValueStream;
import org.apache.solr.analytics.value.FillableTestValue.TestStringValue;
import org.apache.solr.analytics.value.FillableTestValue.TestStringValueStream;
import org.apache.solr.analytics.value.FloatValue;
import org.apache.solr.analytics.value.FloatValueStream;
import org.apache.solr.analytics.value.IntValue;
import org.apache.solr.analytics.value.IntValueStream;
import org.apache.solr.analytics.value.LongValue;
import org.apache.solr.analytics.value.LongValueStream;
import org.apache.solr.analytics.value.StringValue;
import org.apache.solr.analytics.value.StringValueStream;
import org.junit.Test;
public class RemoveFunctionTest extends SolrTestCaseJ4 {
@Test
public void castingTest() {
assertTrue(
RemoveFunction.creatorFunction.apply(
new AnalyticsValueStream[] {new TestBooleanValue(), new TestStringValue()})
instanceof StringValue);
assertFalse(
RemoveFunction.creatorFunction.apply(
new AnalyticsValueStream[] {new TestIntValueStream(), new TestFloatValue()})
instanceof FloatValue);
assertFalse(
RemoveFunction.creatorFunction.apply(
new AnalyticsValueStream[] {new TestLongValueStream(), new TestDateValue()})
instanceof AnalyticsValue);
assertFalse(
RemoveFunction.creatorFunction.apply(
new AnalyticsValueStream[] {new TestLongValue(), new TestAnalyticsValue()})
instanceof StringValue);
assertTrue(
RemoveFunction.creatorFunction.apply(
new AnalyticsValueStream[] {new TestIntValue(), new TestLongValue()})
instanceof DoubleValue);
assertFalse(
RemoveFunction.creatorFunction.apply(
new AnalyticsValueStream[] {new TestDateValue(), new TestStringValue()})
instanceof DateValue);
assertFalse(
RemoveFunction.creatorFunction.apply(
new AnalyticsValueStream[] {new TestBooleanValueStream(), new TestStringValue()})
instanceof BooleanValueStream);
assertFalse(
RemoveFunction.creatorFunction.apply(
new AnalyticsValueStream[] {new TestDoubleValue(), new TestIntValue()})
instanceof LongValue);
}
@Test
public void singleValueBooleanTest() {
TestBooleanValue val = new TestBooleanValue();
TestBooleanValue remover = new TestBooleanValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof BooleanValue);
BooleanValue func = (BooleanValue) uncasted;
// Value doesn't exist
val.setExists(false);
remover.setExists(false);
func.getBoolean();
assertFalse(func.exists());
val.setExists(false);
remover.setValue(false).setExists(true);
func.getBoolean();
assertFalse(func.exists());
// Value exists
val.setValue(true).setExists(true);
remover.setValue(false).setExists(true);
assertEquals(true, func.getBoolean());
assertTrue(func.exists());
val.setValue(true).setExists(true);
remover.setValue(true).setExists(true);
func.getBoolean();
assertFalse(func.exists());
val.setValue(false).setExists(true);
remover.setValue(true).setExists(true);
assertEquals(false, func.getBoolean());
assertTrue(func.exists());
val.setValue(false).setExists(true);
remover.setValue(false).setExists(true);
func.getBoolean();
assertFalse(func.exists());
val.setValue(false).setExists(true);
remover.setExists(false);
assertEquals(false, func.getBoolean());
assertTrue(func.exists());
}
@Test
public void singleValueIntTest() {
TestIntValue val = new TestIntValue();
TestIntValue remover = new TestIntValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof IntValue);
IntValue func = (IntValue) uncasted;
// Value doesn't exist
val.setExists(false);
remover.setExists(false);
func.getInt();
assertFalse(func.exists());
val.setExists(false);
remover.setValue(-234).setExists(true);
func.getInt();
assertFalse(func.exists());
// Value exists
val.setValue(21).setExists(true);
remover.setValue(234).setExists(true);
assertEquals(21, func.getInt());
assertTrue(func.exists());
val.setValue(-154).setExists(true);
remover.setValue(-154).setExists(true);
func.getInt();
assertFalse(func.exists());
val.setValue(52334).setExists(true);
remover.setExists(false);
assertEquals(52334, func.getInt());
assertTrue(func.exists());
}
@Test
public void singleValueLongTest() {
TestLongValue val = new TestLongValue();
TestLongValue remover = new TestLongValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof LongValue);
LongValue func = (LongValue) uncasted;
// Value doesn't exist
val.setExists(false);
remover.setExists(false);
func.getLong();
assertFalse(func.exists());
val.setExists(false);
remover.setValue(234L).setExists(true);
func.getLong();
assertFalse(func.exists());
// Value exists
val.setValue(21L).setExists(true);
remover.setValue(234L).setExists(true);
assertEquals(21L, func.getLong());
assertTrue(func.exists());
val.setValue(3421L).setExists(true);
remover.setValue(3421L).setExists(true);
func.getLong();
assertFalse(func.exists());
val.setValue(-52334L).setExists(true);
remover.setExists(false);
assertEquals(-52334L, func.getLong());
assertTrue(func.exists());
}
@Test
public void singleValueFloatTest() {
TestFloatValue val = new TestFloatValue();
TestFloatValue remover = new TestFloatValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof FloatValue);
FloatValue func = (FloatValue) uncasted;
// Value doesn't exist
val.setExists(false);
remover.setExists(false);
func.getFloat();
assertFalse(func.exists());
val.setExists(false);
remover.setValue(3124123.32F).setExists(true);
func.getFloat();
assertFalse(func.exists());
// Value exists
val.setValue(-21.324F).setExists(true);
remover.setValue(23423.423342F).setExists(true);
assertEquals(-21.324F, func.getFloat(), .00000001);
assertTrue(func.exists());
val.setValue(84353.452F).setExists(true);
remover.setValue(84353.452F).setExists(true);
func.getFloat();
assertFalse(func.exists());
val.setValue(2345.345543F).setExists(true);
remover.setExists(false);
assertEquals(2345.345543F, func.getFloat(), .00000001);
assertTrue(func.exists());
}
@Test
public void singleValueDoubleTest() {
TestDoubleValue val = new TestDoubleValue();
TestDoubleValue remover = new TestDoubleValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof DoubleValue);
DoubleValue func = (DoubleValue) uncasted;
// Value doesn't exist
val.setExists(false);
remover.setExists(false);
func.getDouble();
assertFalse(func.exists());
val.setExists(false);
remover.setValue(3124123.32).setExists(true);
func.getDouble();
assertFalse(func.exists());
// Value exists
val.setValue(-21.324).setExists(true);
remover.setValue(23423.423342).setExists(true);
assertEquals(-21.324, func.getDouble(), .00000001);
assertTrue(func.exists());
val.setValue(84353.452).setExists(true);
remover.setValue(84353.452).setExists(true);
func.getDouble();
assertFalse(func.exists());
val.setValue(2345.345543).setExists(true);
remover.setExists(false);
assertEquals(2345.345543, func.getDouble(), .00000001);
assertTrue(func.exists());
}
@Test
public void singleValueDateTest() throws DateTimeParseException {
Date date1 = Date.from(Instant.parse("1810-12-02T10:30:15Z"));
Date date2 = Date.from(Instant.parse("1950-02-23T14:54:34Z"));
Date date3 = Date.from(Instant.parse("2023-11-01T20:30:15Z"));
TestDateValue val = new TestDateValue();
TestDateValue remover = new TestDateValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof DateValue);
DateValue func = (DateValue) uncasted;
// Value doesn't exist
val.setExists(false);
remover.setExists(false);
func.getLong();
assertFalse(func.exists());
val.setExists(false);
remover.setValue("1950-02-23T14:54:34Z").setExists(true);
func.getLong();
assertFalse(func.exists());
// Value exists
val.setValue("1810-12-02T10:30:15Z").setExists(true);
remover.setValue("2023-11-01T20:30:15Z").setExists(true);
assertEquals(date1.getTime(), func.getLong());
assertTrue(func.exists());
val.setValue("1950-02-23T14:54:34Z").setExists(true);
remover.setValue("1950-02-23T14:54:34Z").setExists(true);
func.getLong();
assertFalse(func.exists());
val.setValue("2023-11-01T20:30:15Z").setExists(true);
remover.setExists(false);
assertEquals(date3.getTime(), func.getLong());
assertTrue(func.exists());
}
@Test
public void singleValueStringTest() {
TestStringValue val = new TestStringValue();
TestStringValue remover = new TestStringValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof StringValue);
StringValue func = (StringValue) uncasted;
// Value doesn't exist
val.setExists(false);
remover.setExists(false);
func.getString();
assertFalse(func.exists());
val.setExists(false);
remover.setValue("abc456").setExists(true);
func.getString();
assertFalse(func.exists());
// Value exists
val.setValue("abc123").setExists(true);
remover.setValue("def456").setExists(true);
assertEquals("abc123", func.getString());
assertTrue(func.exists());
val.setValue("this will be removed").setExists(true);
remover.setValue("this will be removed").setExists(true);
func.getString();
assertFalse(func.exists());
val.setValue("def123").setExists(true);
remover.setExists(false);
assertEquals("def123", func.getString());
assertTrue(func.exists());
}
@Test
public void singleValueObjectTest() {
TestAnalyticsValue val = new TestAnalyticsValue();
TestAnalyticsValue remover = new TestAnalyticsValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof AnalyticsValue);
AnalyticsValue func = (AnalyticsValue) uncasted;
// Value doesn't exist
val.setExists(false);
remover.setExists(false);
func.getObject();
assertFalse(func.exists());
val.setExists(false);
remover.setValue(Boolean.TRUE).setExists(true);
func.getObject();
assertFalse(func.exists());
// Value exists
val.setValue("abc123").setExists(true);
remover.setValue(new Date(123)).setExists(true);
assertEquals("abc123", func.getObject());
assertTrue(func.exists());
val.setValue(23423.0d).setExists(true);
remover.setValue(23423.0d).setExists(true);
func.getObject();
assertFalse(func.exists());
val.setValue(234L).setExists(true);
remover.setExists(false);
assertEquals(234L, func.getObject());
assertTrue(func.exists());
}
@Test
public void multiValueBooleanTest() {
TestBooleanValueStream val = new TestBooleanValueStream();
TestBooleanValue remover = new TestBooleanValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof BooleanValueStream);
BooleanValueStream func = (BooleanValueStream) uncasted;
// No values
val.setValues();
remover.setExists(false);
func.streamBooleans(
value -> {
assertTrue("There should be no values to stream", false);
});
val.setValues();
remover.setValue(true).setExists(true);
func.streamBooleans(
value -> {
assertTrue("There should be no values to stream", false);
});
// Values exist
val.setValues(false, true, false, true, true);
remover.setValue(true).setExists(true);
Iterator<Boolean> values1 = Arrays.asList(false, false).iterator();
func.streamBooleans(
value -> {
assertTrue(values1.hasNext());
assertEquals(values1.next(), value);
});
assertFalse(values1.hasNext());
val.setValues(false, true, false, true, true);
remover.setExists(false);
Iterator<Boolean> values2 = Arrays.asList(false, true, false, true, true).iterator();
func.streamBooleans(
value -> {
assertTrue(values2.hasNext());
assertEquals(values2.next(), value);
});
assertFalse(values2.hasNext());
val.setValues(false, true, false, true, true);
remover.setValue(false).setExists(true);
Iterator<Boolean> values3 = Arrays.asList(true, true, true).iterator();
func.streamBooleans(
value -> {
assertTrue(values3.hasNext());
assertEquals(values3.next(), value);
});
assertFalse(values3.hasNext());
val.setValues(false, false, false);
remover.setValue(false).setExists(true);
func.streamBooleans(
value -> {
assertTrue("There should be no values to stream", false);
});
}
@Test
public void multiValueIntTest() {
TestIntValueStream val = new TestIntValueStream();
TestIntValue remover = new TestIntValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof IntValueStream);
IntValueStream func = (IntValueStream) uncasted;
// No values
val.setValues();
remover.setExists(false);
func.streamInts(
value -> {
assertTrue("There should be no values to stream", false);
});
val.setValues();
remover.setValue(324).setExists(true);
func.streamInts(
value -> {
assertTrue("There should be no values to stream", false);
});
// Values exist
val.setValues(1, 234, -234, 4439, -234, -3245);
remover.setValue(-234).setExists(true);
Iterator<Integer> values1 = Arrays.asList(1, 234, 4439, -3245).iterator();
func.streamInts(
value -> {
assertTrue(values1.hasNext());
assertEquals(values1.next().intValue(), value);
});
assertFalse(values1.hasNext());
val.setValues(1, 234, -234, 4439, -234, -3245);
remover.setExists(false);
Iterator<Integer> values2 = Arrays.asList(1, 234, -234, 4439, -234, -3245).iterator();
func.streamInts(
value -> {
assertTrue(values2.hasNext());
assertEquals(values2.next().intValue(), value);
});
assertFalse(values2.hasNext());
val.setValues(1, 234, -234, 4439, -234, -3245);
remover.setValue(100).setExists(true);
Iterator<Integer> values3 = Arrays.asList(1, 234, -234, 4439, -234, -3245).iterator();
func.streamInts(
value -> {
assertTrue(values3.hasNext());
assertEquals(values3.next().intValue(), value);
});
assertFalse(values3.hasNext());
val.setValues(1, 1);
remover.setValue(1).setExists(true);
func.streamInts(
value -> {
assertTrue("There should be no values to stream", false);
});
}
@Test
public void multiValueLongTest() {
TestLongValueStream val = new TestLongValueStream();
TestLongValue remover = new TestLongValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof LongValueStream);
LongValueStream func = (LongValueStream) uncasted;
// No values
val.setValues();
remover.setExists(false);
func.streamLongs(
value -> {
assertTrue("There should be no values to stream", false);
});
val.setValues();
remover.setValue(2323L).setExists(true);
func.streamLongs(
value -> {
assertTrue("There should be no values to stream", false);
});
// Values exist
val.setValues(323L, -9423L, -1234L, 23423L, -1234L, -1234L);
remover.setValue(-1234L).setExists(true);
Iterator<Long> values1 = Arrays.asList(323L, -9423L, 23423L).iterator();
func.streamLongs(
value -> {
assertTrue(values1.hasNext());
assertEquals(values1.next().longValue(), value);
});
assertFalse(values1.hasNext());
val.setValues(323L, -9423L, -1234L, 23423L, -1234L, -1234L);
remover.setExists(false);
Iterator<Long> values2 = Arrays.asList(323L, -9423L, -1234L, 23423L, -1234L, -1234L).iterator();
func.streamLongs(
value -> {
assertTrue(values2.hasNext());
assertEquals(values2.next().longValue(), value);
});
assertFalse(values2.hasNext());
val.setValues(323L, -9423L, -1234L, 23423L, -1234L, -1234L);
remover.setValue(100L).setExists(true);
Iterator<Long> values3 = Arrays.asList(323L, -9423L, -1234L, 23423L, -1234L, -1234L).iterator();
func.streamLongs(
value -> {
assertTrue(values3.hasNext());
assertEquals(values3.next().longValue(), value);
});
assertFalse(values3.hasNext());
val.setValues(10L);
remover.setValue(10L).setExists(true);
func.streamLongs(
value -> {
assertTrue("There should be no values to stream", false);
});
}
@Test
public void multiValueFloatTest() {
TestFloatValueStream val = new TestFloatValueStream();
TestFloatValue remover = new TestFloatValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof FloatValueStream);
FloatValueStream func = (FloatValueStream) uncasted;
// No values
val.setValues();
remover.setExists(false);
func.streamFloats(
value -> {
assertTrue("There should be no values to stream", false);
});
val.setValues();
remover.setValue(230.32F).setExists(true);
func.streamFloats(
value -> {
assertTrue("There should be no values to stream", false);
});
// Values exist
val.setValues(-1234.9478F, -9423.5F, -1234.9478F, 23423.324F, 942.0F);
remover.setValue(-1234.9478F).setExists(true);
Iterator<Float> values1 = Arrays.asList(-9423.5F, 23423.324F, 942.0F).iterator();
func.streamFloats(
value -> {
assertTrue(values1.hasNext());
assertEquals(values1.next(), value, .00000001);
});
assertFalse(values1.hasNext());
val.setValues(-1234.9478F, -9423.5F, -1234.9478F, 23423.324F, 942.0F);
remover.setExists(false);
Iterator<Float> values2 =
Arrays.asList(-1234.9478F, -9423.5F, -1234.9478F, 23423.324F, 942.0F).iterator();
func.streamFloats(
value -> {
assertTrue(values2.hasNext());
assertEquals(values2.next(), value, .00000001);
});
assertFalse(values2.hasNext());
val.setValues(-1234.9478F, -9423.5F, -1234.9478F, 23423.324F, 942.0F);
remover.setValue(23423.5845F).setExists(true);
Iterator<Float> values3 =
Arrays.asList(-1234.9478F, -9423.5F, -1234.9478F, 23423.324F, 942.0F).iterator();
func.streamFloats(
value -> {
assertTrue(values3.hasNext());
assertEquals(values3.next(), value, .00000001);
});
assertFalse(values3.hasNext());
val.setValues(23.56F, 23.56F, 23.56F);
remover.setValue(23.56F).setExists(true);
func.streamFloats(
value -> {
assertTrue("There should be no values to stream", false);
});
}
@Test
public void multiValueDoubleTest() {
TestDoubleValueStream val = new TestDoubleValueStream();
TestDoubleValue remover = new TestDoubleValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof DoubleValueStream);
DoubleValueStream func = (DoubleValueStream) uncasted;
// No values
val.setValues();
remover.setExists(false);
func.streamDoubles(
value -> {
assertTrue("There should be no values to stream", false);
});
val.setValues();
remover.setValue(234237.67).setExists(true);
func.streamDoubles(
value -> {
assertTrue("There should be no values to stream", false);
});
// Values exist
val.setValues(323.213, -9423.5, 124544.42);
remover.setValue(124544.42).setExists(true);
Iterator<Double> values1 = Arrays.asList(323.213, -9423.5).iterator();
func.streamDoubles(
value -> {
assertTrue(values1.hasNext());
assertEquals(values1.next(), value, .00000001);
});
assertFalse(values1.hasNext());
val.setValues(323.213, -9423.5, 124544.42);
remover.setExists(false);
Iterator<Double> values2 = Arrays.asList(323.213, -9423.5, 124544.42).iterator();
func.streamDoubles(
value -> {
assertTrue(values2.hasNext());
assertEquals(values2.next(), value, .00000001);
});
assertFalse(values2.hasNext());
val.setValues(323.213, -9423.5, 124544.42);
remover.setValue(345.34).setExists(true);
Iterator<Double> values3 = Arrays.asList(323.213, -9423.5, 124544.42).iterator();
func.streamDoubles(
value -> {
assertTrue(values3.hasNext());
assertEquals(values3.next(), value, .00000001);
});
assertFalse(values1.hasNext());
val.setValues(3124.96, 3124.96);
remover.setValue(3124.96).setExists(true);
func.streamDoubles(
value -> {
assertTrue("There should be no values to stream", false);
});
}
@Test
public void multiValueDateTest() throws DateTimeParseException {
Date date1 = Date.from(Instant.parse("1810-12-02T10:30:15Z"));
Date date2 = Date.from(Instant.parse("1931-03-16T18:15:45Z"));
Date date3 = Date.from(Instant.parse("2023-11-01T20:30:15Z"));
TestDateValueStream val = new TestDateValueStream();
TestDateValue remover = new TestDateValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof DateValueStream);
DateValueStream func = (DateValueStream) uncasted;
// No values
val.setValues();
remover.setExists(false);
func.streamLongs(
value -> {
assertTrue("There should be no values to stream", false);
});
val.setValues();
remover.setValue("1700-12-14").setExists(true);
func.streamLongs(
value -> {
assertTrue("There should be no values to stream", false);
});
// Values exist
val.setValues(
"1810-12-02T10:30:15Z",
"1931-03-16T18:15:45Z",
"2023-11-01T20:30:15Z",
"1931-03-16T18:15:45Z");
remover.setValue("1931-03-16T18:15:45Z").setExists(true);
Iterator<Long> values1 = Arrays.asList(date1.getTime(), date3.getTime()).iterator();
func.streamLongs(
value -> {
assertTrue(values1.hasNext());
assertEquals(values1.next().longValue(), value);
});
assertFalse(values1.hasNext());
val.setValues(
"1810-12-02T10:30:15Z",
"1931-03-16T18:15:45Z",
"2023-11-01T20:30:15Z",
"1931-03-16T18:15:45Z");
remover.setExists(false);
Iterator<Long> values2 =
Arrays.asList(date1.getTime(), date2.getTime(), date3.getTime(), date2.getTime())
.iterator();
func.streamLongs(
value -> {
assertTrue(values2.hasNext());
assertEquals(values2.next().longValue(), value);
});
assertFalse(values2.hasNext());
val.setValues(
"1810-12-02T10:30:15Z",
"1931-03-16T18:15:45Z",
"2023-11-01T20:30:15Z",
"1931-03-16T18:15:45Z");
remover.setValue("1810-12-02T10:30:16Z").setExists(true);
Iterator<Long> values3 =
Arrays.asList(date1.getTime(), date2.getTime(), date3.getTime(), date2.getTime())
.iterator();
func.streamLongs(
value -> {
assertTrue(values3.hasNext());
assertEquals(values3.next().longValue(), value);
});
assertFalse(values3.hasNext());
val.setValues("1810-12-02T10:30:15Z");
remover.setValue("1810-12-02T10:30:15Z").setExists(true);
func.streamLongs(
value -> {
assertTrue("There should be no values to stream", false);
});
}
@Test
public void multiValueStringTest() {
TestStringValueStream val = new TestStringValueStream();
TestStringValue remover = new TestStringValue();
AnalyticsValueStream uncasted =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
assertTrue(uncasted instanceof StringValueStream);
StringValueStream func = (StringValueStream) uncasted;
// No values
val.setValues();
remover.setExists(false);
func.streamStrings(
value -> {
assertTrue("There should be no values to stream", false);
});
val.setValues();
remover.setValue("ads").setExists(true);
func.streamStrings(
value -> {
assertTrue("There should be no values to stream", false);
});
// Values exist
val.setValues("abc", "123", "456", "abc");
remover.setValue("abc").setExists(true);
Iterator<String> values1 = Arrays.asList("123", "456").iterator();
func.streamStrings(
value -> {
assertTrue(values1.hasNext());
assertEquals(values1.next(), value);
});
assertFalse(values1.hasNext());
val.setValues("abc", "123", "456", "abc");
remover.setExists(false);
Iterator<String> values2 = Arrays.asList("abc", "123", "456", "abc").iterator();
func.streamStrings(
value -> {
assertTrue(values2.hasNext());
assertEquals(values2.next(), value);
});
assertFalse(values2.hasNext());
val.setValues("string1", "another string", "the final and last string");
remover.setValue("not in values").setExists(true);
Iterator<String> values3 =
Arrays.asList("string1", "another string", "the final and last string").iterator();
func.streamStrings(
value -> {
assertTrue(values3.hasNext());
assertEquals(values3.next(), value);
});
assertFalse(values3.hasNext());
val.setValues("abc123", "abc123", "abc123", "abc123");
remover.setValue("abc123").setExists(true);
func.streamStrings(
value -> {
assertTrue("There should be no values to stream", false);
});
}
@Test
public void multiValueObjectTest() {
TestAnalyticsValueStream val = new TestAnalyticsValueStream();
TestAnalyticsValue remover = new TestAnalyticsValue();
AnalyticsValueStream func =
RemoveFunction.creatorFunction.apply(new AnalyticsValueStream[] {val, remover});
// No values
val.setValues();
remover.setExists(false);
func.streamObjects(
value -> {
assertTrue("There should be no values to stream", false);
});
val.setValues();
remover.setValue("doesn't matter").setExists(true);
func.streamObjects(
value -> {
assertTrue("There should be no values to stream", false);
});
// Values exist
val.setValues("asdfs", new Date(12312), 213123L, new Date(12312));
remover.setValue(new Date(12312)).setExists(true);
Iterator<Object> values1 = Arrays.<Object>asList("asdfs", 213123L).iterator();
func.streamObjects(
value -> {
assertTrue(values1.hasNext());
assertEquals(values1.next(), value);
});
assertFalse(values1.hasNext());
val.setValues("asdfs", new Date(12312), 213123L, new Date(12312));
remover.setExists(false);
Iterator<Object> values2 =
Arrays.<Object>asList("asdfs", new Date(12312), 213123L, new Date(12312)).iterator();
func.streamObjects(
value -> {
assertTrue(values2.hasNext());
assertEquals(values2.next(), value);
});
assertFalse(values2.hasNext());
val.setValues(new Date(3), "3", 3F);
remover.setValue(new Date(4)).setExists(true);
Iterator<Object> values3 = Arrays.<Object>asList(new Date(3), "3", 3F).iterator();
func.streamObjects(
value -> {
assertTrue(values3.hasNext());
assertEquals(values3.next(), value);
});
assertFalse(values3.hasNext());
val.setValues(new Date(4));
remover.setValue(new Date(4)).setExists(true);
func.streamObjects(
value -> {
assertTrue("There should be no values to stream", false);
});
}
}
| |
/**
* Appia: Group communication and protocol composition framework library
* Copyright 2006 University of Lisbon
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Initial developer(s): Alexandre Pinto and Hugo Miranda.
* Contributor(s): See Appia web page for a list of contributors.
*/
/*
* Created on Mar 16, 2004
*
* To change the template for this generated file go to
* Window - Preferences - Java - Code Generation - Code and Comments
*/
package net.sf.appia.test.broadcast.adeb;
import irdp.protocols.tutorialDA.utils.ProcessSet;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Date;
import java.util.Hashtable;
import net.sf.appia.core.AppiaEventException;
import net.sf.appia.core.Channel;
import net.sf.appia.core.Direction;
import net.sf.appia.core.Event;
import net.sf.appia.core.Session;
import net.sf.appia.core.TimeProvider;
import net.sf.appia.core.events.AppiaMulticast;
import net.sf.appia.core.events.SendableEvent;
import net.sf.appia.core.events.channel.ChannelClose;
import net.sf.appia.core.events.channel.ChannelInit;
import net.sf.appia.core.message.Message;
import net.sf.appia.protocols.common.RegisterSocketEvent;
import net.sf.appia.protocols.sslcomplete.SslRegisterSocketEvent;
import net.sf.appia.test.broadcast.adeb.DeliverEvent;
import net.sf.appia.xml.interfaces.InitializableSession;
import net.sf.appia.xml.utils.SessionProperties;
/**
* Authenticated Double Echo Broadcast (algorithm 3.18)
* This class defines a ADEBSession
*
* @author EMDC
* @version 1.0
*/
public class ADEBSession extends Session implements InitializableSession {
private Channel channel;
private TimeProvider time;
private MyShell shell;
Hashtable senderMessageMap;
private boolean sentecho;
private boolean sentready;
private boolean delivered;
private ArrayList<String> echos;
private ArrayList<String> readys;
private int rank;
private final static String BOTTOM = "BOTTOM";
private int N;
private final static int f = 1;
private int sender_rank;
/*For ProcessSet*/
ProcessSet processSet;
String processfile="";
/**
* Creates a new ADEBSession.
* @param l
*/
public ADEBSession(ADEBLayer l) {
super(l);
}
/**
* Initializes the session using the parameters given in the XML configuration.
* Possible parameters:
*
* @param params The parameters given in the XML configuration.
*/
public void init(SessionProperties params) {
//Initialize the parameters for the algorithm
sentecho = false;
delivered = false;
this.rank = Integer.parseInt(params.getProperty("rank"));
/*for sender*/
senderMessageMap = new Hashtable();
/*ProcessSet Stuff*/
this.processfile = params.getProperty("processfile");
this.processSet = ProcessSet.buildProcessSet(processfile,rank);
N = this.processSet.getSize();
echos = new ArrayList<String>(N);
for(int i=0;i<N;i++)
echos.add(BOTTOM);
readys = new ArrayList<String>(N);
for(int i=0;i<N;i++)
readys.add(BOTTOM);
}
/**
* Main event handler.
* @param ev the event to handle.
*
* @see net.sf.appia.core.Session#handle(net.sf.appia.core.Event)
*/
public void handle(Event ev) {
if (ev instanceof ChannelInit)
handleChannelInit((ChannelInit) ev);
else if (ev instanceof ChannelClose)
handleChannelClose((ChannelClose) ev);
else if (ev instanceof BroadcastEvent)
handleBroadcastEvent((BroadcastEvent) ev);
else if (ev instanceof RegisterSocketEvent)
handleRSE((RegisterSocketEvent) ev);
else if (ev instanceof SendEvent)
handleSendEvent((SendEvent) ev);
else if (ev instanceof EchoEvent)
handleEchoEvent((EchoEvent) ev);
else if (ev instanceof ReadyEvent)
handleReadyEvent((ReadyEvent) ev);
else
try {
ev.go();
} catch (AppiaEventException e) {
e.printStackTrace();
}
}
private void handleReadyEvent(ReadyEvent ev) {
if (ev.getDir() == Direction.UP){
String signature = ev.getMessage().popString();
String alias = ev.getMessage().popString();
Message message = ev.getMessage();
sender_rank = message.popInt();
int msg_rank = message.popInt();
String recvd_msg = message.popString();
senderMessageMap.put(recvd_msg, sender_rank);
//do stuff here
System.out.println("[READY_RECEIVED] Source:"+msg_rank +
" MESSAGE: "+ recvd_msg);
if(readys.get(msg_rank).equals(BOTTOM))
{
readys.set(msg_rank, recvd_msg);
System.out.println("Ready collected from process_"+msg_rank + ": "+recvd_msg);
}
String msg = checkMajority_f(readys);
if(msg!=null && sentready == false) {
sentready = true;
multicastReadyEvent(msg, "[READY2]");
}
String msg2 = checkMajority_2f(readys);
if(msg!=null && delivered == false) {
delivered = true;
Deliver(msg2, this.sender_rank);
}
}
}
private String checkMajority_f(ArrayList<String> readys) {
int msgCount = 0;
for(int i=0;i<readys.size();i++) {
String current = readys.get(i);
if (current == BOTTOM) {
continue;
}
else {
for(int j=0;j<readys.size();j++) {
if (current.equals(readys.get(j))) {
msgCount++;
}
}
if (msgCount > f) {
return current; //this is the message
}
}
}
return null; //means msgCount is not greater than f
}
private String checkMajority_2f(ArrayList<String> readys) {
int msgCount = 0;
for(int i=0;i<readys.size();i++) {
String current = readys.get(i);
if (current == BOTTOM) {
continue;
}
else {
for(int j=0;j<readys.size();j++) {
if (current.equals(readys.get(j))) {
msgCount++;
}
}
if (msgCount > (2*f)) {
return current; //this is the message
}
}
}
return null; //means msgCount is not greater than f
}
private void handleEchoEvent(EchoEvent ev) {
if (ev.getDir() == Direction.UP){
String signature = ev.getMessage().popString();
String alias = ev.getMessage().popString();
Message message = ev.getMessage();
sender_rank = message.popInt();
int msg_rank = message.popInt();
String recvd_msg = message.popString();
senderMessageMap.put(recvd_msg, sender_rank);
if(echos.get(msg_rank).equals(BOTTOM))
{
echos.set(msg_rank, recvd_msg);
System.out.println("Echo collected from process_"+msg_rank + ": "+recvd_msg);
}
String msg = checkMajority_Nf2(echos);
if(msg!=null && sentready == false) {
sentready = true;
multicastReadyEvent(msg, "[READY]");
}
}
}
private void multicastReadyEvent(String recvd_msg, String debug_msg) {
ReadyEvent re = new ReadyEvent();
re.getMessage().pushString(recvd_msg);
re.getMessage().pushInt(this.rank);
re.getMessage().pushInt((Integer)senderMessageMap.get(recvd_msg));
re.setDir(Direction.DOWN);
re.setSourceSession(this);
re.setChannel(channel);
re.dest = new AppiaMulticast(null,processSet.getAllSockets());
try {
re.init();
re.go();
System.out.println(debug_msg + " Multicasting");
} catch (AppiaEventException e) {
e.printStackTrace();
}
}
private void Deliver(String msg, int s_rank) {
DeliverEvent de = new DeliverEvent();
final Message messageSend = de.getMessage();
messageSend.pushString(msg);
de.getMessage().pushInt(this.rank);
de.getMessage().pushInt((Integer)senderMessageMap.get(msg));
try {
de.setSourceSession(this);
de.setChannel(channel);
de.setDir(Direction.UP);
de.init();
de.go();
} catch (AppiaEventException e) {
e.printStackTrace();
}
}
/***/
private String checkMajority_Nf2(ArrayList<String> echos) {
for(int i=0;i<echos.size();i++) {
int msgCount = 0;
String current = echos.get(i);
if (current == BOTTOM) {
continue;
}
else {
for(int j=0;j<echos.size();j++) {
if (current.equals(echos.get(j))) {
msgCount++;
}
}
if (msgCount > (N+f)/2) {
//System.out.println("majority echos collected:"+msgCount+" for message:"+current);
return current; //this is the message
}
}
}
return null; //means msgCount is not greater than f
}
private void handleSendEvent(SendEvent ev) {
if (ev.getDir() == Direction.UP && sentecho == false){
String signature = ev.getMessage().popString();
String alias = ev.getMessage().popString();
sentecho = true;
this.sender_rank = ev.getMessage().popInt();
EchoEvent ee = new EchoEvent();
final Message messageSend = ee.getMessage();
String myString = ev.getMessage().popString();
ev.getMessage().pushString(myString);
messageSend.pushString(myString);
ee.getMessage().pushInt(this.rank);
ee.getMessage().pushInt(sender_rank);
ee.dest = new AppiaMulticast(null,processSet.getAllSockets());
try {
//System.out.println("[EchoEvents] Multicasting");
ee.setSourceSession(this);
ee.setChannel(channel);
ee.setDir(Direction.DOWN);
ee.init();
ee.go();
} catch (AppiaEventException e) {
e.printStackTrace();
}
}
}
/*
* ChannelInit
*/
private void handleChannelInit(ChannelInit init) {
System.out.println("Channel init called");
channel = init.getChannel();
time = channel.getTimeProvider();
try {
init.go();
} catch (AppiaEventException e) {
e.printStackTrace();
}
/*
* This event is used to register a socket on the layer that is used
* to interface Appia with sockets.
*/
//SslRegisterSocketEvent rse;
RegisterSocketEvent rse;
try {
/*added for ProcessSet*/
InetSocketAddress selflocal = (InetSocketAddress) processSet.getSelfProcess().getSocketAddress();
rse = new RegisterSocketEvent(channel,Direction.DOWN,this,selflocal.getPort());
try {
/*Added for ProcessSet*/
rse.localHost = InetAddress.getByName(selflocal.getAddress().getHostAddress()); //this lets us use 127.0.0.1
} catch (UnknownHostException e) {
e.printStackTrace();
}
rse.go();
} catch (AppiaEventException e1) {
e1.printStackTrace();
}
}
/*
* RegisterSocketEvent
*/
private void handleRSE(RegisterSocketEvent event) {
if(event.error){
System.err.println("Error on the RegisterSocketEvent!!!");
System.exit(-1);
}
shell = new MyShell(channel);
final Thread t = event.getChannel().getThreadFactory().newThread(shell);
t.setName("Ecco shell");
t.start();
}
/*
* handle BroadCastEvent
*/
private void handleBroadcastEvent(BroadcastEvent ev) {
if (ev.getDir() == Direction.DOWN) {
multicastSendEvent(ev.getText(), "[SEND]");
}
}
private void multicastSendEvent(String recvd_msg, String debug_msg) {
SendEvent se = new SendEvent();
final Message messageSend = se.getMessage();
messageSend.pushString(recvd_msg);
messageSend.pushInt(this.rank); //pushing the initiators rank
se.dest = new AppiaMulticast(null,processSet.getAllSockets());
try {
se.setSourceSession(this);
se.setChannel(channel);
se.setDir(Direction.DOWN);
se.init();
se.go();
} catch (AppiaEventException e) {
e.printStackTrace();
}
}
/*
* ChannelClose
*/
private void handleChannelClose(ChannelClose close) {
try {
System.out.println("Channel close called");
close.go();
} catch (AppiaEventException e) {
e.printStackTrace();
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.highlighting;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.ex.ActionManagerEx;
import com.intellij.openapi.actionSystem.ex.AnActionListener;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.DocumentListener;
import com.intellij.openapi.editor.ex.MarkupModelEx;
import com.intellij.openapi.editor.ex.RangeHighlighterEx;
import com.intellij.openapi.editor.markup.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.UserDataHolderEx;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiReference;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.ui.ColorUtil;
import com.intellij.util.BitUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.util.*;
public class HighlightManagerImpl extends HighlightManager {
private final Project myProject;
public HighlightManagerImpl(Project project) {
myProject = project;
ActionManagerEx.getInstanceEx().addAnActionListener(new MyAnActionListener(), myProject);
DocumentListener documentListener = new DocumentListener() {
@Override
public void documentChanged(DocumentEvent event) {
Document document = event.getDocument();
Editor[] editors = EditorFactory.getInstance().getEditors(document);
for (Editor editor : editors) {
Map<RangeHighlighter, HighlightInfo> map = getHighlightInfoMap(editor, false);
if (map == null) return;
ArrayList<RangeHighlighter> highlightersToRemove = new ArrayList<>();
for (RangeHighlighter highlighter : map.keySet()) {
HighlightInfo info = map.get(highlighter);
if (!info.editor.getDocument().equals(document)) continue;
if (BitUtil.isSet(info.flags, HIDE_BY_TEXT_CHANGE)) {
highlightersToRemove.add(highlighter);
}
}
for (RangeHighlighter highlighter : highlightersToRemove) {
removeSegmentHighlighter(editor, highlighter);
}
}
}
};
EditorFactory.getInstance().getEventMulticaster().addDocumentListener(documentListener, myProject);
}
@Nullable
public Map<RangeHighlighter, HighlightInfo> getHighlightInfoMap(@NotNull Editor editor, boolean toCreate) {
if (editor instanceof EditorWindow) return getHighlightInfoMap(((EditorWindow)editor).getDelegate(), toCreate);
Map<RangeHighlighter, HighlightInfo> map = editor.getUserData(HIGHLIGHT_INFO_MAP_KEY);
if (map == null && toCreate) {
map = ((UserDataHolderEx)editor).putUserDataIfAbsent(HIGHLIGHT_INFO_MAP_KEY, new HashMap<>());
}
return map;
}
@NotNull
public RangeHighlighter[] getHighlighters(@NotNull Editor editor) {
Map<RangeHighlighter, HighlightInfo> highlightersMap = getHighlightInfoMap(editor, false);
if (highlightersMap == null) return RangeHighlighter.EMPTY_ARRAY;
Set<RangeHighlighter> set = new HashSet<>();
for (Map.Entry<RangeHighlighter, HighlightInfo> entry : highlightersMap.entrySet()) {
HighlightInfo info = entry.getValue();
if (info.editor.equals(editor)) set.add(entry.getKey());
}
return set.toArray(RangeHighlighter.EMPTY_ARRAY);
}
private RangeHighlighter addSegmentHighlighter(@NotNull Editor editor, int startOffset, int endOffset, TextAttributes attributes, @HideFlags int flags) {
RangeHighlighter highlighter = editor.getMarkupModel()
.addRangeHighlighter(startOffset, endOffset, HighlighterLayer.SELECTION - 1, attributes, HighlighterTargetArea.EXACT_RANGE);
HighlightInfo info = new HighlightInfo(editor instanceof EditorWindow ? ((EditorWindow)editor).getDelegate() : editor, flags);
Map<RangeHighlighter, HighlightInfo> map = getHighlightInfoMap(editor, true);
map.put(highlighter, info);
return highlighter;
}
@Override
public boolean removeSegmentHighlighter(@NotNull Editor editor, @NotNull RangeHighlighter highlighter) {
Map<RangeHighlighter, HighlightInfo> map = getHighlightInfoMap(editor, false);
if (map == null) return false;
HighlightInfo info = map.get(highlighter);
if (info == null) return false;
MarkupModel markupModel = info.editor.getMarkupModel();
if (((MarkupModelEx)markupModel).containsHighlighter(highlighter)) {
highlighter.dispose();
}
map.remove(highlighter);
return true;
}
@Override
public void addOccurrenceHighlights(@NotNull Editor editor,
@NotNull PsiReference[] occurrences,
@NotNull TextAttributes attributes,
boolean hideByTextChange,
Collection<RangeHighlighter> outHighlighters) {
if (occurrences.length == 0) return;
int flags = HIDE_BY_ESCAPE;
if (hideByTextChange) {
flags |= HIDE_BY_TEXT_CHANGE;
}
Color scrollmarkColor = getScrollMarkColor(attributes, editor.getColorsScheme());
int oldOffset = editor.getCaretModel().getOffset();
int horizontalScrollOffset = editor.getScrollingModel().getHorizontalScrollOffset();
int verticalScrollOffset = editor.getScrollingModel().getVerticalScrollOffset();
for (PsiReference occurrence : occurrences) {
PsiElement element = occurrence.getElement();
int startOffset = element.getTextRange().getStartOffset();
int start = startOffset + occurrence.getRangeInElement().getStartOffset();
int end = startOffset + occurrence.getRangeInElement().getEndOffset();
PsiFile containingFile = element.getContainingFile();
Project project = element.getProject();
// each reference can reside in its own injected editor
Editor textEditor = InjectedLanguageUtil.openEditorFor(containingFile, project);
if (textEditor != null) {
addOccurrenceHighlight(textEditor, start, end, attributes, flags, outHighlighters, scrollmarkColor);
}
}
editor.getCaretModel().moveToOffset(oldOffset);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
editor.getScrollingModel().scrollHorizontally(horizontalScrollOffset);
editor.getScrollingModel().scrollVertically(verticalScrollOffset);
}
@Override
public void addElementsOccurrenceHighlights(@NotNull Editor editor,
@NotNull PsiElement[] elements,
@NotNull TextAttributes attributes,
boolean hideByTextChange,
Collection<RangeHighlighter> outHighlighters) {
addOccurrenceHighlights(editor, elements, attributes, hideByTextChange, outHighlighters);
}
@Override
public void addOccurrenceHighlight(@NotNull Editor editor,
int start,
int end,
TextAttributes attributes,
int flags,
Collection<RangeHighlighter> outHighlighters,
Color scrollmarkColor) {
RangeHighlighter highlighter = addSegmentHighlighter(editor, start, end, attributes, flags);
if (highlighter instanceof RangeHighlighterEx) ((RangeHighlighterEx)highlighter).setVisibleIfFolded(true);
if (outHighlighters != null) {
outHighlighters.add(highlighter);
}
if (scrollmarkColor != null) {
highlighter.setErrorStripeMarkColor(scrollmarkColor);
}
}
@Override
public void addRangeHighlight(@NotNull Editor editor,
int startOffset,
int endOffset,
@NotNull TextAttributes attributes,
boolean hideByTextChange,
@Nullable Collection<RangeHighlighter> highlighters) {
addRangeHighlight(editor, startOffset, endOffset, attributes, hideByTextChange, false, highlighters);
}
@Override
public void addRangeHighlight(@NotNull Editor editor,
int startOffset,
int endOffset,
@NotNull TextAttributes attributes,
boolean hideByTextChange,
boolean hideByAnyKey,
@Nullable Collection<RangeHighlighter> highlighters) {
int flags = HIDE_BY_ESCAPE;
if (hideByTextChange) {
flags |= HIDE_BY_TEXT_CHANGE;
}
if (hideByAnyKey) {
flags |= HIDE_BY_ANY_KEY;
}
Color scrollmarkColor = getScrollMarkColor(attributes, editor.getColorsScheme());
addOccurrenceHighlight(editor, startOffset, endOffset, attributes, flags, highlighters, scrollmarkColor);
}
@Override
public void addOccurrenceHighlights(@NotNull Editor editor,
@NotNull PsiElement[] elements,
@NotNull TextAttributes attributes,
boolean hideByTextChange,
Collection<RangeHighlighter> outHighlighters) {
if (elements.length == 0) return;
int flags = HIDE_BY_ESCAPE;
if (hideByTextChange) {
flags |= HIDE_BY_TEXT_CHANGE;
}
Color scrollmarkColor = getScrollMarkColor(attributes, editor.getColorsScheme());
if (editor instanceof EditorWindow) {
editor = ((EditorWindow)editor).getDelegate();
}
for (PsiElement element : elements) {
TextRange range = element.getTextRange();
range = InjectedLanguageManager.getInstance(myProject).injectedToHost(element, range);
addOccurrenceHighlight(editor,
trimOffsetToDocumentSize(editor, range.getStartOffset()),
trimOffsetToDocumentSize(editor, range.getEndOffset()),
attributes, flags, outHighlighters, scrollmarkColor);
}
}
private static int trimOffsetToDocumentSize(@NotNull Editor editor, int offset) {
if (offset < 0) return 0;
int textLength = editor.getDocument().getTextLength();
return offset < textLength ? offset : textLength;
}
@Nullable
private static Color getScrollMarkColor(@NotNull TextAttributes attributes, @NotNull EditorColorsScheme colorScheme) {
if (attributes.getErrorStripeColor() != null) return attributes.getErrorStripeColor();
if (attributes.getBackgroundColor() != null) {
boolean isDark = ColorUtil.isDark(colorScheme.getDefaultBackground());
return isDark ? attributes.getBackgroundColor().brighter() : attributes.getBackgroundColor().darker();
}
return null;
}
public boolean hideHighlights(@NotNull Editor editor, @HideFlags int mask) {
Map<RangeHighlighter, HighlightInfo> map = getHighlightInfoMap(editor, false);
if (map == null) return false;
boolean done = false;
ArrayList<RangeHighlighter> highlightersToRemove = new ArrayList<>();
for (RangeHighlighter highlighter : map.keySet()) {
HighlightInfo info = map.get(highlighter);
if (!info.editor.equals(editor)) continue;
if ((info.flags & mask) != 0) {
highlightersToRemove.add(highlighter);
done = true;
}
}
for (RangeHighlighter highlighter : highlightersToRemove) {
removeSegmentHighlighter(editor, highlighter);
}
return done;
}
private class MyAnActionListener implements AnActionListener {
@Override
public void beforeActionPerformed(AnAction action, final DataContext dataContext, AnActionEvent event) {
requestHideHighlights(dataContext);
}
@Override
public void beforeEditorTyping(char c, DataContext dataContext) {
requestHideHighlights(dataContext);
}
private void requestHideHighlights(final DataContext dataContext) {
final Editor editor = CommonDataKeys.EDITOR.getData(dataContext);
if (editor == null) return;
hideHighlights(editor, HIDE_BY_ANY_KEY);
}
}
private final Key<Map<RangeHighlighter, HighlightInfo>> HIGHLIGHT_INFO_MAP_KEY = Key.create("HIGHLIGHT_INFO_MAP_KEY");
static class HighlightInfo {
final Editor editor;
@HideFlags final int flags;
public HighlightInfo(Editor editor, @HideFlags int flags) {
this.editor = editor;
this.flags = flags;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.storage.blob.sas;
import com.azure.storage.blob.models.UserDelegationKey;
import com.azure.storage.blob.BlobClientBuilder;
import com.azure.storage.common.implementation.StorageImplUtils;
import com.azure.storage.common.sas.BaseSasQueryParameters;
import com.azure.storage.common.sas.SasProtocol;
import com.azure.storage.common.implementation.Constants;
import com.azure.storage.common.sas.SasIpRange;
import java.time.OffsetDateTime;
import java.util.Map;
/**
* Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly
* by the user; it is only generated by the {@link BlobServiceSasSignatureValues} type. Once generated, it can be set on
* a {@link BlobClientBuilder} object to be constructed as part of a URL or it can be encoded into a {@code String} and
* appended to a URL directly (though caution should be taken here in case there are existing query parameters, which
* might affect the appropriate means of appending these query parameters). NOTE: Instances of this class are immutable
* to ensure thread safety.
* @deprecated Please use the generateSas method on the desired blob/container client after initializing
* {@link BlobServiceSasSignatureValues}.
*/
@Deprecated
public final class BlobServiceSasQueryParameters extends BaseSasQueryParameters {
private final String identifier;
private final String keyObjectId;
private final String keyTenantId;
private final OffsetDateTime keyStart;
private final OffsetDateTime keyExpiry;
private final String keyService;
private final String keyVersion;
private final String resource;
private final String cacheControl;
private final String contentDisposition;
private final String contentEncoding;
private final String contentLanguage;
private final String contentType;
/**
* Creates a new {@link BlobServiceSasQueryParameters} object.
*
* @param queryParamsMap All query parameters for the request as key-value pairs
* @param removeSasParametersFromMap When {@code true}, the SAS query parameters will be removed from
* queryParamsMap
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public BlobServiceSasQueryParameters(Map<String, String[]> queryParamsMap, boolean removeSasParametersFromMap) {
super(queryParamsMap, removeSasParametersFromMap);
this.identifier = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_SIGNED_IDENTIFIER,
removeSasParametersFromMap);
this.keyObjectId = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_SIGNED_OBJECT_ID,
removeSasParametersFromMap);
this.keyTenantId = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_SIGNED_TENANT_ID,
removeSasParametersFromMap);
this.keyStart = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_SIGNED_KEY_START,
removeSasParametersFromMap, StorageImplUtils::parseDateAndFormat).getDateTime();
this.keyExpiry = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_SIGNED_KEY_EXPIRY,
removeSasParametersFromMap, StorageImplUtils::parseDateAndFormat).getDateTime();
this.keyService = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_SIGNED_KEY_SERVICE,
removeSasParametersFromMap);
this.keyVersion = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_SIGNED_KEY_VERSION,
removeSasParametersFromMap);
this.resource = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_SIGNED_RESOURCE,
removeSasParametersFromMap);
this.cacheControl = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_CACHE_CONTROL,
removeSasParametersFromMap);
this.contentDisposition = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_CONTENT_DISPOSITION,
removeSasParametersFromMap);
this.contentEncoding = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_CONTENT_ENCODING,
removeSasParametersFromMap);
this.contentLanguage = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_CONTENT_LANGUAGE,
removeSasParametersFromMap);
this.contentType = getQueryParameter(queryParamsMap, Constants.UrlConstants.SAS_CONTENT_TYPE,
removeSasParametersFromMap);
}
/**
* Creates a new {@link BlobServiceSasQueryParameters} object. These objects are only created internally by
* SASSignatureValues classes.
*
* @param version A {@code String} representing the storage version.
* @param protocol A {@code String} representing the allowed HTTP protocol(s) or {@code null}.
* @param startTime A {@code java.util.Date} representing the start time for this SAS token or {@code null}.
* @param expiryTime A {@code java.util.Date} representing the expiry time for this SAS token.
* @param sasIpRange A {@link SasIpRange} representing the range of valid IP addresses for this SAS token or
* {@code null}.
* @param identifier A {@code String} representing the signed identifier (only for Service SAS) or {@code null}.
* @param resource A {@code String} representing the storage container or blob (only for Service SAS).
* @param permissions A {@code String} representing the storage permissions or {@code null}.
* @param signature A {@code String} representing the signature for the SAS token.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
BlobServiceSasQueryParameters(String version, SasProtocol protocol, OffsetDateTime startTime,
OffsetDateTime expiryTime, SasIpRange sasIpRange, String identifier, String resource, String permissions,
String signature, String cacheControl, String contentDisposition, String contentEncoding,
String contentLanguage, String contentType, UserDelegationKey key) {
super(version, protocol, startTime, expiryTime, sasIpRange, permissions, signature);
this.identifier = identifier;
this.resource = resource;
this.cacheControl = cacheControl;
this.contentDisposition = contentDisposition;
this.contentEncoding = contentEncoding;
this.contentLanguage = contentLanguage;
this.contentType = contentType;
if (key != null) {
this.keyObjectId = key.getSignedObjectId();
this.keyTenantId = key.getSignedTenantId();
this.keyStart = key.getSignedStart();
this.keyExpiry = key.getSignedExpiry();
this.keyService = key.getSignedService();
this.keyVersion = key.getSignedVersion();
} else {
this.keyObjectId = null;
this.keyTenantId = null;
this.keyStart = null;
this.keyExpiry = null;
this.keyService = null;
this.keyVersion = null;
}
}
/**
* @return The signed identifier (only for {@link BlobServiceSasSignatureValues}) or {@code null}. Please see
* <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy">here</a>
* for more information.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getIdentifier() {
return identifier;
}
/**
* @return The storage container or blob (only for {@link BlobServiceSasSignatureValues}).
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getResource() {
return resource;
}
/**
* @return The Cache-Control header value when a client accesses the resource with this sas token.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getCacheControl() {
return cacheControl;
}
/**
* @return The Content-Disposition header value when a client accesses the resource with this sas token.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getContentDisposition() {
return contentDisposition;
}
/**
* @return The Content-Encoding header value when a client accesses the resource with this sas token.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getContentEncoding() {
return contentEncoding;
}
/**
* @return The Content-Language header value when a client accesses the resource with this sas token.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getContentLanguage() {
return contentLanguage;
}
/**
* @return The Content-Type header value when a client accesses the resource with this sas token.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getContentType() {
return contentType;
}
/**
* @return the object ID of the key.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getKeyObjectId() {
return keyObjectId;
}
/**
* @return the tenant ID of the key.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getKeyTenantId() {
return keyTenantId;
}
/**
* @return the datetime when the key becomes active.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public OffsetDateTime getKeyStart() {
return keyStart;
}
/**
* @return the datetime when the key expires.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public OffsetDateTime getKeyExpiry() {
return keyExpiry;
}
/**
* @return the services that are permitted by the key.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getKeyService() {
return keyService;
}
/**
* @return the service version that created the key.
* @deprecated Please use {@link BlobServiceSasSignatureValues}
*/
@Deprecated
public String getKeyVersion() {
return keyVersion;
}
@Deprecated
UserDelegationKey userDelegationKey() {
return new UserDelegationKey()
.setSignedExpiry(this.keyExpiry)
.setSignedObjectId(this.keyObjectId)
.setSignedService(this.keyService)
.setSignedStart(this.keyStart)
.setSignedTenantId(this.keyTenantId)
.setSignedVersion(this.keyVersion);
}
/**
* Encodes all SAS query parameters into a string that can be appended to a URL.
*
* @return A {@code String} representing the SAS query parameters.
* @deprecated Please use the generateSas method on the desired blob/container client after initializing
* {@link BlobServiceSasSignatureValues}.
*/
@Deprecated
public String encode() {
/*
We should be url-encoding each key and each value, but because we know all the keys and values will encode to
themselves, we cheat except for the signature value.
*/
StringBuilder sb = new StringBuilder();
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SERVICE_VERSION, this.version);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_PROTOCOL, this.protocol);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_START_TIME, formatQueryParameterDate(this.startTime));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_EXPIRY_TIME, formatQueryParameterDate(this.expiryTime));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_IP_RANGE, this.sasIpRange);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_IDENTIFIER, this.identifier);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_OBJECT_ID, this.keyObjectId);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_TENANT_ID, this.keyTenantId);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_START,
formatQueryParameterDate(this.keyStart));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_EXPIRY,
formatQueryParameterDate(this.keyExpiry));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_SERVICE, this.keyService);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_VERSION, this.keyVersion);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_RESOURCE, this.resource);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_PERMISSIONS, this.permissions);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNATURE, this.signature);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CACHE_CONTROL, this.cacheControl);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_DISPOSITION, this.contentDisposition);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_ENCODING, this.contentEncoding);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_LANGUAGE, this.contentLanguage);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_TYPE, this.contentType);
return sb.toString();
}
}
| |
/* First created by JCasGen Fri Nov 20 16:31:48 CET 2015 */
package types;
import org.apache.uima.cas.Feature;
import org.apache.uima.cas.FeatureStructure;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.impl.CASImpl;
import org.apache.uima.cas.impl.FSGenerator;
import org.apache.uima.cas.impl.FeatureImpl;
import org.apache.uima.cas.impl.TypeImpl;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.JCasRegistry;
import org.apache.uima.jcas.tcas.Annotation_Type;
/** candidate
* Updated by JCasGen Thu Dec 24 00:46:12 CET 2015
* @generated */
public class Candidate_Type extends Annotation_Type {
/** @generated
* @return the generator for this type
*/
@Override
protected FSGenerator getFSGenerator() {return fsGenerator;}
/** @generated */
private final FSGenerator fsGenerator =
new FSGenerator() {
public FeatureStructure createFS(int addr, CASImpl cas) {
if (Candidate_Type.this.useExistingInstance) {
// Return eq fs instance if already created
FeatureStructure fs = Candidate_Type.this.jcas.getJfsFromCaddr(addr);
if (null == fs) {
fs = new Candidate(addr, Candidate_Type.this);
Candidate_Type.this.jcas.putJfsFromCaddr(addr, fs);
return fs;
}
return fs;
} else return new Candidate(addr, Candidate_Type.this);
}
};
/** @generated */
@SuppressWarnings ("hiding")
public final static int typeIndexID = Candidate.typeIndexID;
/** @generated
@modifiable */
@SuppressWarnings ("hiding")
public final static boolean featOkTst = JCasRegistry.getFeatOkTst("types.Candidate");
/** @generated */
final Feature casFeat_frequency;
/** @generated */
final int casFeatCode_frequency;
/** @generated
* @param addr low level Feature Structure reference
* @return the feature value
*/
public int getFrequency(int addr) {
if (featOkTst && casFeat_frequency == null)
jcas.throwFeatMissing("frequency", "types.Candidate");
return ll_cas.ll_getIntValue(addr, casFeatCode_frequency);
}
/** @generated
* @param addr low level Feature Structure reference
* @param v value to set
*/
public void setFrequency(int addr, int v) {
if (featOkTst && casFeat_frequency == null)
jcas.throwFeatMissing("frequency", "types.Candidate");
ll_cas.ll_setIntValue(addr, casFeatCode_frequency, v);}
/** @generated */
final Feature casFeat_last_occ;
/** @generated */
final int casFeatCode_last_occ;
/** @generated
* @param addr low level Feature Structure reference
* @return the feature value
*/
public int getLast_occ(int addr) {
if (featOkTst && casFeat_last_occ == null)
jcas.throwFeatMissing("last_occ", "types.Candidate");
return ll_cas.ll_getIntValue(addr, casFeatCode_last_occ);
}
/** @generated
* @param addr low level Feature Structure reference
* @param v value to set
*/
public void setLast_occ(int addr, int v) {
if (featOkTst && casFeat_last_occ == null)
jcas.throwFeatMissing("last_occ", "types.Candidate");
ll_cas.ll_setIntValue(addr, casFeatCode_last_occ, v);}
/** @generated */
final Feature casFeat_name;
/** @generated */
final int casFeatCode_name;
/** @generated
* @param addr low level Feature Structure reference
* @return the feature value
*/
public String getName(int addr) {
if (featOkTst && casFeat_name == null)
jcas.throwFeatMissing("name", "types.Candidate");
return ll_cas.ll_getStringValue(addr, casFeatCode_name);
}
/** @generated
* @param addr low level Feature Structure reference
* @param v value to set
*/
public void setName(int addr, String v) {
if (featOkTst && casFeat_name == null)
jcas.throwFeatMissing("name", "types.Candidate");
ll_cas.ll_setStringValue(addr, casFeatCode_name, v);}
/** @generated */
final Feature casFeat_tf;
/** @generated */
final int casFeatCode_tf;
/** @generated
* @param addr low level Feature Structure reference
* @return the feature value
*/
public int getTf(int addr) {
if (featOkTst && casFeat_tf == null)
jcas.throwFeatMissing("tf", "types.Candidate");
return ll_cas.ll_getIntValue(addr, casFeatCode_tf);
}
/** @generated
* @param addr low level Feature Structure reference
* @param v value to set
*/
public void setTf(int addr, int v) {
if (featOkTst && casFeat_tf == null)
jcas.throwFeatMissing("tf", "types.Candidate");
ll_cas.ll_setIntValue(addr, casFeatCode_tf, v);}
/** @generated */
final Feature casFeat_df;
/** @generated */
final int casFeatCode_df;
/** @generated
* @param addr low level Feature Structure reference
* @return the feature value
*/
public int getDf(int addr) {
if (featOkTst && casFeat_df == null)
jcas.throwFeatMissing("df", "types.Candidate");
return ll_cas.ll_getIntValue(addr, casFeatCode_df);
}
/** @generated
* @param addr low level Feature Structure reference
* @param v value to set
*/
public void setDf(int addr, int v) {
if (featOkTst && casFeat_df == null)
jcas.throwFeatMissing("df", "types.Candidate");
ll_cas.ll_setIntValue(addr, casFeatCode_df, v);}
/** @generated */
final Feature casFeat_idf;
/** @generated */
final int casFeatCode_idf;
/** @generated
* @param addr low level Feature Structure reference
* @return the feature value
*/
public double getIdf(int addr) {
if (featOkTst && casFeat_idf == null)
jcas.throwFeatMissing("idf", "types.Candidate");
return ll_cas.ll_getDoubleValue(addr, casFeatCode_idf);
}
/** @generated
* @param addr low level Feature Structure reference
* @param v value to set
*/
public void setIdf(int addr, double v) {
if (featOkTst && casFeat_idf == null)
jcas.throwFeatMissing("idf", "types.Candidate");
ll_cas.ll_setDoubleValue(addr, casFeatCode_idf, v);}
/** @generated */
final Feature casFeat_first_occ;
/** @generated */
final int casFeatCode_first_occ;
/** @generated
* @param addr low level Feature Structure reference
* @return the feature value
*/
public int getFirst_occ(int addr) {
if (featOkTst && casFeat_first_occ == null)
jcas.throwFeatMissing("first_occ", "types.Candidate");
return ll_cas.ll_getIntValue(addr, casFeatCode_first_occ);
}
/** @generated
* @param addr low level Feature Structure reference
* @param v value to set
*/
public void setFirst_occ(int addr, int v) {
if (featOkTst && casFeat_first_occ == null)
jcas.throwFeatMissing("first_occ", "types.Candidate");
ll_cas.ll_setIntValue(addr, casFeatCode_first_occ, v);}
/** @generated */
final Feature casFeat_effective_full_form;
/** @generated */
final int casFeatCode_effective_full_form;
/** @generated
* @param addr low level Feature Structure reference
* @return the feature value
*/
public String getEffective_full_form(int addr) {
if (featOkTst && casFeat_effective_full_form == null)
jcas.throwFeatMissing("effective_full_form", "types.Candidate");
return ll_cas.ll_getStringValue(addr, casFeatCode_effective_full_form);
}
/** @generated
* @param addr low level Feature Structure reference
* @param v value to set
*/
public void setEffective_full_form(int addr, String v) {
if (featOkTst && casFeat_effective_full_form == null)
jcas.throwFeatMissing("effective_full_form", "types.Candidate");
ll_cas.ll_setStringValue(addr, casFeatCode_effective_full_form, v);}
/** @generated */
final Feature casFeat_full_forms;
/** @generated */
final int casFeatCode_full_forms;
/** @generated
* @param addr low level Feature Structure reference
* @return the feature value
*/
public int getFull_forms(int addr) {
if (featOkTst && casFeat_full_forms == null)
jcas.throwFeatMissing("full_forms", "types.Candidate");
return ll_cas.ll_getRefValue(addr, casFeatCode_full_forms);
}
/** @generated
* @param addr low level Feature Structure reference
* @param v value to set
*/
public void setFull_forms(int addr, int v) {
if (featOkTst && casFeat_full_forms == null)
jcas.throwFeatMissing("full_forms", "types.Candidate");
ll_cas.ll_setRefValue(addr, casFeatCode_full_forms, v);}
/** @generated */
final Feature casFeat_class_;
/** @generated */
final int casFeatCode_class_;
/** @generated
* @param addr low level Feature Structure reference
* @return the feature value
*/
public int getClass_(int addr) {
if (featOkTst && casFeat_class_ == null)
jcas.throwFeatMissing("class_", "types.Candidate");
return ll_cas.ll_getIntValue(addr, casFeatCode_class_);
}
/** @generated
* @param addr low level Feature Structure reference
* @param v value to set
*/
public void setClass_(int addr, int v) {
if (featOkTst && casFeat_class_ == null)
jcas.throwFeatMissing("class_", "types.Candidate");
ll_cas.ll_setIntValue(addr, casFeatCode_class_, v);}
/** initialize variables to correspond with Cas Type and Features
* @generated
* @param jcas JCas
* @param casType Type
*/
public Candidate_Type(JCas jcas, Type casType) {
super(jcas, casType);
casImpl.getFSClassRegistry().addGeneratorForType((TypeImpl)this.casType, getFSGenerator());
casFeat_frequency = jcas.getRequiredFeatureDE(casType, "frequency", "uima.cas.Integer", featOkTst);
casFeatCode_frequency = (null == casFeat_frequency) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_frequency).getCode();
casFeat_last_occ = jcas.getRequiredFeatureDE(casType, "last_occ", "uima.cas.Integer", featOkTst);
casFeatCode_last_occ = (null == casFeat_last_occ) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_last_occ).getCode();
casFeat_name = jcas.getRequiredFeatureDE(casType, "name", "uima.cas.String", featOkTst);
casFeatCode_name = (null == casFeat_name) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_name).getCode();
casFeat_tf = jcas.getRequiredFeatureDE(casType, "tf", "uima.cas.Integer", featOkTst);
casFeatCode_tf = (null == casFeat_tf) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_tf).getCode();
casFeat_df = jcas.getRequiredFeatureDE(casType, "df", "uima.cas.Integer", featOkTst);
casFeatCode_df = (null == casFeat_df) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_df).getCode();
casFeat_idf = jcas.getRequiredFeatureDE(casType, "idf", "uima.cas.Double", featOkTst);
casFeatCode_idf = (null == casFeat_idf) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_idf).getCode();
casFeat_first_occ = jcas.getRequiredFeatureDE(casType, "first_occ", "uima.cas.Integer", featOkTst);
casFeatCode_first_occ = (null == casFeat_first_occ) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_first_occ).getCode();
casFeat_effective_full_form = jcas.getRequiredFeatureDE(casType, "effective_full_form", "uima.cas.String", featOkTst);
casFeatCode_effective_full_form = (null == casFeat_effective_full_form) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_effective_full_form).getCode();
casFeat_full_forms = jcas.getRequiredFeatureDE(casType, "full_forms", "uima.cas.StringList", featOkTst);
casFeatCode_full_forms = (null == casFeat_full_forms) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_full_forms).getCode();
casFeat_class_ = jcas.getRequiredFeatureDE(casType, "class_", "uima.cas.Integer", featOkTst);
casFeatCode_class_ = (null == casFeat_class_) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_class_).getCode();
}
}
| |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2010-2013 Oracle and/or its affiliates. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can
* obtain a copy of the License at
* http://glassfish.java.net/public/CDDL+GPL_1_1.html
* or packager/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at packager/legal/LICENSE.txt.
*
* GPL Classpath Exception:
* Oracle designates this particular file as subject to the "Classpath"
* exception as provided by Oracle in the GPL Version 2 section of the License
* file that accompanied this code.
*
* Modifications:
* If applicable, add the following below the License Header, with the fields
* enclosed by brackets [] replaced by your own identifying information:
* "Portions Copyright [year] [name of copyright owner]"
*
* Contributor(s):
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package javax.ws.rs.client;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Factory finder utility class.
*
* @author Paul Sandoz
* @author Marc Hadley
* @since 1.0
*/
final class FactoryFinder {
private static final Logger LOGGER = Logger.getLogger(FactoryFinder.class.getName());
private FactoryFinder() {
// prevents instantiation
}
static ClassLoader getContextClassLoader() {
return AccessController.doPrivileged(
new PrivilegedAction<ClassLoader>() {
@Override
public ClassLoader run() {
ClassLoader cl = null;
try {
cl = Thread.currentThread().getContextClassLoader();
} catch (SecurityException ex) {
LOGGER.log(
Level.WARNING,
"Unable to get context classloader instance.",
ex);
}
return cl;
}
});
}
/**
* Creates an instance of the specified class using the specified
* <code>ClassLoader</code> object.
*
* @param className name of the class to be instantiated.
* @param classLoader class loader to be used.
* @return instance of the specified class.
* @throws ClassNotFoundException if the given class could not be found
* or could not be instantiated
*/
private static Object newInstance(final String className, final ClassLoader classLoader) throws ClassNotFoundException {
try {
Class spiClass;
if (classLoader == null) {
spiClass = Class.forName(className);
} else {
try {
spiClass = Class.forName(className, false, classLoader);
} catch (ClassNotFoundException ex) {
LOGGER.log(
Level.FINE,
"Unable to load provider class " + className
+ " using custom classloader " + classLoader.getClass().getName()
+ " trying again with current classloader.",
ex);
spiClass = Class.forName(className);
}
}
return spiClass.newInstance();
} catch (ClassNotFoundException x) {
throw x;
} catch (Exception x) {
throw new ClassNotFoundException("Provider " + className + " could not be instantiated: " + x, x);
}
}
/**
* Finds the implementation <code>Class</code> object for the given
* factory name, or if that fails, finds the <code>Class</code> object
* for the given fallback class name. The arguments supplied MUST be
* used in order. If using the first argument is successful, the second
* one will not be used.
* <P>
* This method is package private so that this code can be shared.
*
* @param factoryId the name of the factory to find, which is
* a system property
* @param fallbackClassName the implementation class name, which is
* to be used only if nothing else
* is found; <code>null</code> to indicate that
* there is no fallback class name
* @return the <code>Class</code> object of the specified message factory;
* may not be <code>null</code>
* @throws ClassNotFoundException if the given class could not be found
* or could not be instantiated
*/
static Object find(final String factoryId, final String fallbackClassName) throws ClassNotFoundException {
try {
// If we are deployed into an OSGi environment, leverage it
Class factoryClass = FactoryFinder.class.getClassLoader().loadClass(factoryId);
Class spiClass = org.apache.servicemix.specs.locator.OsgiLocator.locate(factoryClass, factoryId);
if (spiClass != null) {
return spiClass.newInstance();
}
} catch (Throwable e) {
}
ClassLoader classLoader = getContextClassLoader();
String serviceId = "META-INF/services/" + factoryId;
// try to find services in CLASSPATH
try {
InputStream is;
if (classLoader == null) {
is = ClassLoader.getSystemResourceAsStream(serviceId);
} else {
is = classLoader.getResourceAsStream(serviceId);
}
if (is != null) {
BufferedReader rd = new BufferedReader(new InputStreamReader(is, "UTF-8"));
String factoryClassName = rd.readLine();
rd.close();
if (factoryClassName != null && !"".equals(factoryClassName)) {
return newInstance(factoryClassName, classLoader);
}
}
} catch (Exception ex) {
LOGGER.log(Level.FINER, "Failed to load service " + factoryId + " from " + serviceId, ex);
}
// try to read from $java.home/lib/jaxrs.properties
try {
String javah = System.getProperty("java.home");
String configFile = javah + File.separator
+ "lib" + File.separator + "jaxrs.properties";
File f = new File(configFile);
if (f.exists()) {
Properties props = new Properties();
props.load(new FileInputStream(f));
String factoryClassName = props.getProperty(factoryId);
return newInstance(factoryClassName, classLoader);
}
} catch (Exception ex) {
LOGGER.log(Level.FINER, "Failed to load service " + factoryId
+ " from $java.home/lib/jaxrs.properties", ex);
}
// Use the system property
try {
String systemProp = System.getProperty(factoryId);
if (systemProp != null) {
return newInstance(systemProp, classLoader);
}
} catch (SecurityException se) {
LOGGER.log(Level.FINER, "Failed to load service " + factoryId
+ " from a system property", se);
}
if (fallbackClassName == null) {
throw new ClassNotFoundException(
"Provider for " + factoryId + " cannot be found", null);
}
return newInstance(fallbackClassName, classLoader);
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 Eugenio Marletti
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package me.eugeniomarletti.tetheringfixer.android;
import android.animation.Animator;
import android.animation.AnimatorSet;
import android.animation.ArgbEvaluator;
import android.animation.PropertyValuesHolder;
import android.animation.TimeInterpolator;
import android.animation.TypeEvaluator;
import android.animation.ValueAnimator;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.PointF;
import android.util.AttributeSet;
import android.view.View;
import android.widget.Checkable;
import me.eugeniomarletti.tetheringfixer.R;
import static me.eugeniomarletti.tetheringfixer.Utils.getInterpolatorFromStyle;
import static me.eugeniomarletti.tetheringfixer.Utils.getResolvedSizeAndState;
import static me.eugeniomarletti.tetheringfixer.android.DrawableState.*;
public final class Bullet extends View
implements Checkable, StateHolderProvider, StateErrorHolder.StateError, StateWorkingHolder.StateWorking
{
public static final int DEFAULT_STYLE = R.style.BulletStyle; // default style
public static final int DEFAULT_STYLE_ATTR = R.attr.bulletStyle; // attribute for theme style
private float bulletSize;
private float ringSize;
private final Paint bulletPaint = new Paint();
private final Paint ringPaint = new Paint();
private final ValueAnimator bulletSizeAnimator = new ValueAnimator();
private final TimeInterpolator bulletSizeAnimationInterpolator;
private final int bulletSizeAnimationDuration;
private final float bulletSizeActivated;
private final float bulletSizeNotActivated;
private final ValueAnimator bulletColorAnimator = new ValueAnimator();
private final TimeInterpolator bulletColorAnimationInterpolator;
private final int bulletColorAnimationDuration;
private final int bulletColorError;
private final int bulletColorChecked;
private final int bulletColorNormal;
private final ValueAnimator ringSizeAnimator = new ValueAnimator();
private final TimeInterpolator ringSizeAnimationInterpolator;
private final float ringSizeStart;
private final float ringSizeEnd;
private final ValueAnimator ringThicknessAnimator = new ValueAnimator();
private final TimeInterpolator ringThicknessAnimationInterpolator;
private final float ringThicknessStart;
private final float ringThicknessEnd;
private final ValueAnimator ringAlphaAnimator = new ValueAnimator();
private final TimeInterpolator ringAlphaAnimationInterpolator;
private final float ringAlphaStart;
private final float ringAlphaEnd;
private final AnimatorSet ringAnimatorSet = new AnimatorSet();
private final int ringAnimationDuration;
private final int ringAnimationPauseDuration;
private boolean ringAnimationContinue = false;
private boolean wasError; // red
private boolean wasWorking; // ring
private boolean wasActivated; // big
private boolean wasChecked; // blue
private final StateHolder stateChecked = new StateCheckedHolder(this);
private final StateHolder stateError = new StateErrorHolder(this);
private final StateHolder stateWorking = new StateWorkingHolder(this);
private final StateHolder[] states = new StateHolder[] { stateChecked, stateError, stateWorking };
private static final TypeEvaluator ARGB_EVALUATOR = new ArgbEvaluator();
public Bullet(Context context)
{
this(context, null);
}
public Bullet(Context context, AttributeSet attrs)
{
this(context, attrs, DEFAULT_STYLE_ATTR);
}
@SuppressWarnings("ConstantConditions")
public Bullet(Context context, AttributeSet attrs, int defStyleAttr)
{
super(context, attrs, defStyleAttr);
TypedArray a = null;
try
{
a = context.obtainStyledAttributes(attrs, R.styleable.Bullet, defStyleAttr, DEFAULT_STYLE);
bulletSizeAnimationInterpolator =
getInterpolatorFromStyle(context, a, R.styleable.Bullet_bulletSizeAnimationInterpolator, null);
bulletSizeAnimationDuration = a.getInt(R.styleable.Bullet_bulletSizeAnimationDuration, 0);
bulletSizeActivated = a.getDimension(R.styleable.Bullet_bulletSizeActivated, 0f);
bulletSizeNotActivated = a.getDimension(R.styleable.Bullet_bulletSizeNotActivated, 0f);
bulletColorAnimationInterpolator =
getInterpolatorFromStyle(context, a, R.styleable.Bullet_bulletColorAnimationInterpolator, null);
bulletColorAnimationDuration = a.getInt(R.styleable.Bullet_bulletColorAnimationDuration, 0);
bulletColorError = a.getColor(R.styleable.Bullet_bulletColorError, 0);
bulletColorChecked = a.getColor(R.styleable.Bullet_bulletColorChecked, 0);
bulletColorNormal = a.getColor(R.styleable.Bullet_bulletColorNormal, 0);
ringSizeAnimationInterpolator =
getInterpolatorFromStyle(context, a, R.styleable.Bullet_ringSizeAnimationInterpolator, null);
ringSizeStart = a.getDimension(R.styleable.Bullet_ringSizeStart, 0f);
ringSizeEnd = a.getDimension(R.styleable.Bullet_ringSizeEnd, 0f);
ringThicknessAnimationInterpolator =
getInterpolatorFromStyle(context, a, R.styleable.Bullet_ringThicknessAnimationInterpolator, null);
ringThicknessStart = a.getDimension(R.styleable.Bullet_ringThicknessStart, 0f);
ringThicknessEnd = a.getDimension(R.styleable.Bullet_ringThicknessEnd, 0f);
ringAlphaAnimationInterpolator =
getInterpolatorFromStyle(context, a, R.styleable.Bullet_ringAlphaAnimationInterpolator, null);
ringAlphaStart = a.getFloat(R.styleable.Bullet_ringAlphaStart, 0f);
ringAlphaEnd = a.getFloat(R.styleable.Bullet_ringAlphaEnd, 0f);
ringAnimationDuration = a.getInt(R.styleable.Bullet_ringAnimationDuration, 0);
ringAnimationPauseDuration = a.getInt(R.styleable.Bullet_ringAnimationPauseDuration, 0);
wasError = a.getBoolean(R.styleable.Bullet_isError, false);
wasWorking = a.getBoolean(R.styleable.Bullet_isWorking, false);
wasActivated = a.getBoolean(R.styleable.Bullet_isActivated, false);
wasChecked = a.getBoolean(R.styleable.Bullet_isChecked, false);
}
finally
{
if (a != null) a.recycle();
}
initialize();
}
private void initialize()
{
bulletPaint.setAntiAlias(true);
ringPaint.setAntiAlias(true);
ringPaint.setStyle(Paint.Style.STROKE);
setBulletSize(wasActivated ? bulletSizeActivated : bulletSizeNotActivated);
setBulletColor(wasError ? bulletColorError : (wasChecked ? bulletColorChecked : bulletColorNormal));
bulletSizeAnimator.setDuration(bulletSizeAnimationDuration);
bulletSizeAnimator.setInterpolator(bulletSizeAnimationInterpolator);
bulletSizeAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener()
{
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator)
{
if (setBulletSize((float)valueAnimator.getAnimatedValue())) postInvalidateOnAnimation();
}
});
bulletColorAnimator.setDuration(bulletColorAnimationDuration);
bulletColorAnimator.setInterpolator(bulletColorAnimationInterpolator);
bulletColorAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener()
{
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator)
{
if (setBulletColor((int)valueAnimator.getAnimatedValue())) postInvalidateOnAnimation();
}
});
ringSizeAnimator.setFloatValues(ringSizeStart, ringSizeEnd);
ringSizeAnimator.setInterpolator(ringSizeAnimationInterpolator);
ringSizeAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener()
{
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator)
{
if (setRingSize((float)valueAnimator.getAnimatedValue())) postInvalidateOnAnimation();
}
});
ringThicknessAnimator.setFloatValues(ringThicknessStart, ringThicknessEnd);
ringThicknessAnimator.setInterpolator(ringThicknessAnimationInterpolator);
ringThicknessAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener()
{
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator)
{
if (setRingThickness((float)valueAnimator.getAnimatedValue())) postInvalidateOnAnimation();
}
});
ringAlphaAnimator.setFloatValues(ringAlphaStart, ringAlphaEnd);
ringAlphaAnimator.setInterpolator(ringAlphaAnimationInterpolator);
ringAlphaAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener()
{
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator)
{
if (setRingAlpha((float)valueAnimator.getAnimatedValue())) postInvalidateOnAnimation();
}
});
//noinspection ConstantConditions
ringAnimatorSet.playTogether(ringSizeAnimator, ringThicknessAnimator, ringAlphaAnimator);
ringAnimatorSet.setDuration(ringAnimationDuration);
ringAnimatorSet.addListener(new SimpleAnimatorListener()
{
@Override
public void onAnimationEnd(Animator animator)
{
if (ringAnimationContinue) postDelayed(new Runnable()
{
@Override
public void run()
{
if (ringAnimationContinue) ringAnimatorSet.start();
}
}, ringAnimationPauseDuration);
}
});
stateError.setState(wasError);
stateWorking.setState(wasWorking);
setActivated(wasActivated);
setChecked(wasChecked);
}
@Override
protected void drawableStateChanged()
{
super.drawableStateChanged();
final boolean isError = isError();
final boolean wasError = this.wasError;
final boolean changedError = isError != wasError;
this.wasError = isError;
final boolean isWorking = isWorking();
final boolean wasWorking = this.wasWorking;
final boolean changedWorking = isWorking != wasWorking;
this.wasWorking = isWorking;
final boolean isChecked = isChecked();
final boolean wasChecked = this.wasChecked;
final boolean changedChecked = isChecked != wasChecked;
this.wasChecked = isChecked;
final boolean isActivated = isActivated();
final boolean wasActivated = this.wasActivated;
final boolean changedActivated = isActivated != wasActivated;
this.wasActivated = isActivated;
// error / checked
Integer newColor = null;
if (changedError && isError) newColor = bulletColorError;
else if (changedChecked && isChecked) newColor = bulletColorChecked;
else if (changedError || changedChecked) newColor = bulletColorNormal;
if (newColor != null) animateBulletColor(newColor);
// activated
if (changedActivated) animateBulletSize(isActivated ? bulletSizeActivated : bulletSizeNotActivated);
// working
if (changedWorking)
{
if (isWorking) startRingAnimation();
else stopRingAnimation();
}
}
@Override
protected int[] onCreateDrawableState(int extraSpace)
{
final int[] additionalState = getAdditionalState(this);
return mergeDrawableStates(super.onCreateDrawableState(extraSpace + additionalState.length), additionalState);
}
@Override
public StateHolder[] getStateHolders()
{
return states;
}
public boolean isError()
{
return stateError.isState();
}
public void setError(boolean isError)
{
stateError.setState(isError);
}
@Override
public void toggleError()
{
stateError.toggleState();
}
public boolean isWorking()
{
return stateWorking.isState();
}
public void setWorking(boolean isWorking)
{
stateWorking.setState(isWorking);
}
@Override
public void toggleWorking()
{
stateWorking.toggleState();
}
@Override
public boolean isChecked()
{
return stateChecked.isState();
}
@Override
public void setChecked(boolean b)
{
stateChecked.setState(b);
}
@Override
public void toggle()
{
stateChecked.toggleState();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec)
{
final int ringSize = (int)Math.ceil(ringSizeEnd);
final int desiredWidth = Math.max(getMinimumWidth(), ringSize);
final int desiredHeight = Math.max(getMinimumHeight(), ringSize);
final int width = getResolvedSizeAndState(desiredWidth, widthMeasureSpec);
final int height = getResolvedSizeAndState(desiredHeight, heightMeasureSpec);
setMeasuredDimension(width, height);
}
@Override
protected void onDraw(Canvas canvas)
{
super.onDraw(canvas);
final float bulletRadius = getBulletSize() / 2f;
final float ringThickness = getRingThickness();
final float ringRadius = (getRingSize() - ringThickness) / 2f;
canvas.save();
canvas.translate(canvas.getWidth() / 2f, canvas.getHeight() / 2f);
if (ringThickness > 0f
&& (ringSize > bulletSize
|| ringThickness * 2f - ringSize > bulletSize
|| bulletPaint.getAlpha() < 255))
canvas.drawCircle(0f, 0f, ringRadius, ringPaint);
canvas.drawCircle(0f, 0f, bulletRadius, bulletPaint);
canvas.restore();
}
private void animateBulletSize(float newSize)
{
if (bulletSizeAnimator.isStarted()) bulletSizeAnimator.cancel();
bulletSizeAnimator.setFloatValues(getBulletSize(), newSize);
bulletSizeAnimator.start();
}
private void animateBulletColor(int newColor)
{
if (bulletColorAnimator.isStarted()) bulletColorAnimator.cancel();
bulletColorAnimator.setValues(
PropertyValuesHolder.ofObject("", ARGB_EVALUATOR, getBulletColor(), newColor));
bulletColorAnimator.start();
}
private void startRingAnimation()
{
ringAnimationContinue = true;
if (!ringAnimatorSet.isStarted()) ringAnimatorSet.start();
}
private void stopRingAnimation()
{
ringAnimationContinue = false;
}
public int getBulletColor()
{
return bulletPaint.getColor();
}
private boolean setBulletColor(int color)
{
if (getBulletColor() != color)
{
final float ringAlpha = getRingAlpha();
bulletPaint.setColor(color);
ringPaint.setColor(color);
setRingAlpha(ringAlpha);
return true;
}
return false;
}
public float getRingAlpha()
{
return ringPaint.getAlpha() / 255f;
}
private boolean setRingAlpha(float alpha)
{
final int _alpha = Math.round(alpha * 255);
if (ringPaint.getAlpha() != _alpha)
{
ringPaint.setAlpha(_alpha);
return true;
}
return false;
}
public float getRingThickness()
{
return ringPaint.getStrokeWidth();
}
private boolean setRingThickness(float thickness)
{
if (getRingThickness() != thickness)
{
ringPaint.setStrokeWidth(thickness);
return true;
}
return false;
}
public float getRingSize()
{
return ringSize;
}
private boolean setRingSize(float size)
{
if (getRingSize() != size)
{
ringSize = size;
return true;
}
return false;
}
public float getBulletSize()
{
return bulletSize;
}
private boolean setBulletSize(float size)
{
if (getBulletSize() != size)
{
bulletSize = size;
return true;
}
return false;
}
public int getBulletColorChecked()
{
return bulletColorChecked;
}
public PointF getBulletCenter()
{
return new PointF(getWidth() / 2f, getHeight() / 2f);
}
public void addBulletColorAnimationListener(Animator.AnimatorListener listener)
{
bulletColorAnimator.addListener(listener);
}
public void removeBulletColorAnimationListener(Animator.AnimatorListener listener)
{
bulletColorAnimator.removeListener(listener);
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.jps.model.java.impl;
import com.intellij.openapi.util.io.FileUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.model.java.impl.runConfiguration.JpsApplicationRunConfigurationPropertiesImpl;
import org.jetbrains.jps.model.java.runConfiguration.JpsApplicationRunConfigurationProperties;
import org.jetbrains.jps.model.java.runConfiguration.JpsApplicationRunConfigurationState;
import org.jetbrains.jps.util.JpsPathUtil;
import org.jetbrains.jps.model.JpsDummyElement;
import org.jetbrains.jps.model.JpsGlobal;
import org.jetbrains.jps.model.JpsProject;
import org.jetbrains.jps.model.java.*;
import org.jetbrains.jps.model.java.compiler.JpsJavaCompilerConfiguration;
import org.jetbrains.jps.model.java.impl.compiler.JpsJavaCompilerConfigurationImpl;
import org.jetbrains.jps.model.library.JpsOrderRootType;
import org.jetbrains.jps.model.library.JpsTypedLibrary;
import org.jetbrains.jps.model.library.sdk.JpsSdk;
import org.jetbrains.jps.model.library.sdk.JpsSdkReference;
import org.jetbrains.jps.model.module.*;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* @author nik
*/
public class JpsJavaExtensionServiceImpl extends JpsJavaExtensionService {
@NotNull
@Override
public JpsJavaProjectExtension getOrCreateProjectExtension(@NotNull JpsProject project) {
return project.getContainer().getOrSetChild(JavaProjectExtensionRole.INSTANCE);
}
@Nullable
@Override
public JpsJavaProjectExtension getProjectExtension(@NotNull JpsProject project) {
return project.getContainer().getChild(JavaProjectExtensionRole.INSTANCE);
}
@NotNull
@Override
public JpsJavaModuleExtension getOrCreateModuleExtension(@NotNull JpsModule module) {
return module.getContainer().getOrSetChild(JavaModuleExtensionRole.INSTANCE);
}
@NotNull
@Override
public JpsJavaDependencyExtension getOrCreateDependencyExtension(@NotNull JpsDependencyElement dependency) {
return dependency.getContainer().getOrSetChild(JpsJavaDependencyExtensionRole.INSTANCE);
}
@Override
public JpsJavaDependencyExtension getDependencyExtension(@NotNull JpsDependencyElement dependency) {
return dependency.getContainer().getChild(JpsJavaDependencyExtensionRole.INSTANCE);
}
@Override
@Nullable
public JpsJavaModuleExtension getModuleExtension(@NotNull JpsModule module) {
return module.getContainer().getChild(JavaModuleExtensionRole.INSTANCE);
}
@Override
@NotNull
public ExplodedDirectoryModuleExtension getOrCreateExplodedDirectoryExtension(@NotNull JpsModule module) {
return module.getContainer().getOrSetChild(ExplodedDirectoryModuleExtensionImpl.ExplodedDirectoryModuleExtensionRole.INSTANCE);
}
@Override
@Nullable
public ExplodedDirectoryModuleExtension getExplodedDirectoryExtension(@NotNull JpsModule module) {
return module.getContainer().getChild(ExplodedDirectoryModuleExtensionImpl.ExplodedDirectoryModuleExtensionRole.INSTANCE);
}
@NotNull
@Override
public List<JpsDependencyElement> getDependencies(JpsModule module, JpsJavaClasspathKind classpathKind, boolean exportedOnly) {
final List<JpsDependencyElement> result = new ArrayList<JpsDependencyElement>();
for (JpsDependencyElement dependencyElement : module.getDependenciesList().getDependencies()) {
final JpsJavaDependencyExtension extension = getDependencyExtension(dependencyElement);
if (extension == null || extension.getScope().isIncludedIn(classpathKind) && (!exportedOnly || extension.isExported())) {
result.add(dependencyElement);
}
}
return result;
}
@Override
public LanguageLevel getLanguageLevel(JpsModule module) {
final JpsJavaModuleExtension moduleExtension = getModuleExtension(module);
if (moduleExtension == null) return null;
final LanguageLevel languageLevel = moduleExtension.getLanguageLevel();
if (languageLevel != null) return languageLevel;
final JpsJavaProjectExtension projectExtension = getProjectExtension(module.getProject());
return projectExtension != null ? projectExtension.getLanguageLevel() : null;
}
@Override
public String getOutputUrl(JpsModule module, boolean forTests) {
final JpsJavaModuleExtension extension = getModuleExtension(module);
if (extension == null) return null;
if (extension.isInheritOutput()) {
JpsJavaProjectExtension projectExtension = getProjectExtension(module.getProject());
if (projectExtension == null) return null;
final String url = projectExtension.getOutputUrl();
if (url == null) return null;
return url + "/" + (forTests ? "test" : "production") + "/" + module.getName();
}
return forTests ? extension.getTestOutputUrl() : extension.getOutputUrl();
}
@Nullable
@Override
public File getOutputDirectory(JpsModule module, boolean forTests) {
String url = getOutputUrl(module, forTests);
return url != null ? JpsPathUtil.urlToFile(url) : null;
}
@Override
public JpsTypedLibrary<JpsSdk<JpsDummyElement>> addJavaSdk(@NotNull JpsGlobal global, @NotNull String name, @NotNull String homePath) {
String version = JdkVersionDetector.getInstance().detectJdkVersion(homePath);
JpsTypedLibrary<JpsSdk<JpsDummyElement>> sdk = global.addSdk(name, homePath, version, JpsJavaSdkType.INSTANCE);
File homeDir = new File(FileUtil.toSystemDependentName(homePath));
List<File> roots = JavaSdkUtil.getJdkClassesRoots(homeDir, false);
for (File root : roots) {
sdk.addRoot(root, JpsOrderRootType.COMPILED);
}
return sdk;
}
@Nullable
@Override
public JpsJavaCompilerConfiguration getCompilerConfiguration(@NotNull JpsProject project) {
return project.getContainer().getChild(JpsJavaCompilerConfigurationImpl.ROLE);
}
@NotNull
@Override
public JpsJavaCompilerConfiguration getOrCreateCompilerConfiguration(@NotNull JpsProject project) {
JpsJavaCompilerConfiguration configuration = getCompilerConfiguration(project);
if (configuration == null) {
configuration = project.getContainer().setChild(JpsJavaCompilerConfigurationImpl.ROLE, new JpsJavaCompilerConfigurationImpl());
}
return configuration;
}
@NotNull
@Override
public JpsSdkReference<JpsDummyElement> createWrappedJavaSdkReference(@NotNull JpsJavaSdkTypeWrapper sdkType,
@NotNull JpsSdkReference<?> wrapperReference) {
return new JpsWrappedJavaSdkReferenceImpl(sdkType, wrapperReference);
}
@NotNull
@Override
public JpsApplicationRunConfigurationProperties createRunConfigurationProperties(JpsApplicationRunConfigurationState state) {
return new JpsApplicationRunConfigurationPropertiesImpl(state);
}
@NotNull
@Override
public JavaSourceRootProperties createSourceRootProperties(@NotNull String packagePrefix, boolean isGenerated) {
return new JavaSourceRootProperties(packagePrefix, isGenerated);
}
@NotNull
@Override
public JavaSourceRootProperties createSourceRootProperties(@NotNull String packagePrefix) {
return createSourceRootProperties(packagePrefix, false);
}
@Override
@NotNull
public JpsProductionModuleOutputPackagingElement createProductionModuleOutput(@NotNull JpsModuleReference moduleReference) {
return new JpsProductionModuleOutputPackagingElementImpl(moduleReference);
}
@Override
@NotNull
public JpsTestModuleOutputPackagingElement createTestModuleOutput(@NotNull JpsModuleReference moduleReference) {
return new JpsTestModuleOutputPackagingElementImpl(moduleReference);
}
@Override
public JpsJavaDependenciesEnumerator enumerateDependencies(Collection<JpsModule> modules) {
return new JpsJavaDependenciesEnumeratorImpl(modules);
}
@Override
protected JpsJavaDependenciesEnumerator enumerateDependencies(JpsProject project) {
return new JpsJavaDependenciesEnumeratorImpl(project.getModules());
}
@Override
protected JpsJavaDependenciesEnumerator enumerateDependencies(JpsModule module) {
return new JpsJavaDependenciesEnumeratorImpl(Collections.singletonList(module));
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.services.dmn;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.Response.Status;
import javax.xml.XMLConstants;
import javax.xml.namespace.QName;
import com.fasterxml.jackson.core.type.TypeReference;
import org.kie.api.builder.ReleaseId;
import org.kie.api.runtime.KieRuntimeFactory;
import org.kie.dmn.api.core.DMNContext;
import org.kie.dmn.api.core.DMNModel;
import org.kie.dmn.api.core.DMNResult;
import org.kie.dmn.api.core.DMNRuntime;
import org.kie.dmn.api.core.ast.DecisionNode;
import org.kie.dmn.api.core.ast.DecisionServiceNode;
import org.kie.dmn.api.core.ast.InputDataNode;
import org.kie.dmn.api.core.event.DMNRuntimeEventListener;
import org.kie.dmn.backend.marshalling.v1x.DMNMarshallerFactory;
import org.kie.dmn.core.ast.InputDataNodeImpl;
import org.kie.dmn.core.ast.ItemDefNodeImpl;
import org.kie.dmn.core.internal.utils.DMNEvaluationUtils;
import org.kie.dmn.core.internal.utils.DMNEvaluationUtils.DMNEvaluationResult;
import org.kie.dmn.core.internal.utils.DynamicDMNContextBuilder;
import org.kie.dmn.model.api.BusinessKnowledgeModel;
import org.kie.dmn.model.api.DRGElement;
import org.kie.dmn.model.api.Decision;
import org.kie.dmn.model.api.Definitions;
import org.kie.dmn.model.api.InputData;
import org.kie.dmn.model.api.ItemDefinition;
import org.kie.server.api.model.ServiceResponse;
import org.kie.server.api.model.dmn.DMNContextKS;
import org.kie.server.api.model.dmn.DMNDecisionInfo;
import org.kie.server.api.model.dmn.DMNDecisionServiceInfo;
import org.kie.server.api.model.dmn.DMNInputDataInfo;
import org.kie.server.api.model.dmn.DMNItemDefinitionInfo;
import org.kie.server.api.model.dmn.DMNModelInfo;
import org.kie.server.api.model.dmn.DMNModelInfoList;
import org.kie.server.api.model.dmn.DMNQNameInfo;
import org.kie.server.api.model.dmn.DMNResultKS;
import org.kie.server.api.model.dmn.DMNUnaryTestsInfo;
import org.kie.server.services.api.KieServerRegistry;
import org.kie.server.services.dmn.modelspecific.DMNFEELComparablePeriodSerializer;
import org.kie.server.services.dmn.modelspecific.KogitoDMNResult;
import org.kie.server.services.dmn.modelspecific.MSConsts;
import org.kie.server.services.dmn.modelspecific.OASGenerator;
import org.kie.server.services.impl.KieContainerInstanceImpl;
import org.kie.server.services.impl.locator.ContainerLocatorProvider;
import org.kie.server.services.impl.marshal.MarshallerHelper;
import org.kie.server.services.prometheus.PrometheusKieServerExtension;
import org.kie.server.services.prometheus.PrometheusMetricsDMNListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ModelEvaluatorServiceBase {
private static final Logger LOG = LoggerFactory.getLogger( ModelEvaluatorServiceBase.class );
private KieServerRegistry context;
private MarshallerHelper marshallerHelper;
private static final com.fasterxml.jackson.databind.ObjectMapper objectMapper = new com.fasterxml.jackson.databind.ObjectMapper()
.registerModule(new com.fasterxml.jackson.datatype.jsr310.JavaTimeModule())
.registerModule(new com.fasterxml.jackson.databind.module.SimpleModule()
.addSerializer(org.kie.dmn.feel.lang.types.impl.ComparablePeriod.class,
new DMNFEELComparablePeriodSerializer()))
.disable(com.fasterxml.jackson.databind.SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
.disable(com.fasterxml.jackson.databind.SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS);
public ModelEvaluatorServiceBase(KieServerRegistry context) {
this.context = context;
this.marshallerHelper = new MarshallerHelper(context);
}
public ServiceResponse<DMNModelInfoList> getModels(String containerId) {
try {
KieContainerInstanceImpl kContainer = context.getContainer(containerId, ContainerLocatorProvider.get().getLocator());
DMNRuntime kieRuntime = KieRuntimeFactory.of(kContainer.getKieContainer().getKieBase()).get(DMNRuntime.class);
List<DMNModel> models = kieRuntime.getModels();
List<DMNModelInfo> result = models.stream().map(ModelEvaluatorServiceBase::modelToInfo).collect(Collectors.toList());
return new ServiceResponse<DMNModelInfoList>(
ServiceResponse.ResponseType.SUCCESS,
"OK models successfully retrieved from container '" + containerId + "'",
new DMNModelInfoList( result ) );
} catch ( Exception e ) {
LOG.error( "Error retrieving models from container '" + containerId + "'", e );
return new ServiceResponse<DMNModelInfoList>(
ServiceResponse.ResponseType.FAILURE,
"Error retrieving models from container '" + containerId + "'" + e.getMessage(),
null );
}
}
public static DMNModelInfo modelToInfo(DMNModel model) {
DMNModelInfo res = new DMNModelInfo();
res.setNamespace(model.getNamespace());
res.setName(model.getName());
res.setId(model.getDefinitions().getId());
res.setDecisions(model.getDecisions().stream().map(ModelEvaluatorServiceBase::decisionToInfo).collect(Collectors.toSet()));
res.setDecisionServices(model.getDecisionServices().stream().map(ModelEvaluatorServiceBase::decisionServiceToInfo).collect(Collectors.toSet()));
res.setInputs(model.getInputs().stream().map(ModelEvaluatorServiceBase::inputDataToInfo).collect(Collectors.toSet()));
res.setItemDefinitions(model.getItemDefinitions().stream().map(id -> itemDefinitionToInfo(((ItemDefNodeImpl) id).getItemDef())).collect(Collectors.toSet()));
return res;
}
public static DMNDecisionServiceInfo decisionServiceToInfo(DecisionServiceNode dsNode) {
DMNDecisionServiceInfo res = new DMNDecisionServiceInfo();
res.setName(dsNode.getName());
res.setId(dsNode.getId());
return res;
}
public static DMNDecisionInfo decisionToInfo(DecisionNode decisionNode) {
DMNDecisionInfo res = new DMNDecisionInfo();
res.setName(decisionNode.getName());
res.setId(decisionNode.getId());
return res;
}
public static DMNInputDataInfo inputDataToInfo(InputDataNode inputDataNode) {
DMNInputDataInfo res = new DMNInputDataInfo();
res.setName(inputDataNode.getName());
res.setId(inputDataNode.getId());
InputData id = ((InputDataNodeImpl) inputDataNode).getInputData();
QName typeRef = id.getVariable().getTypeRef();
// for InputData sometimes the NS is not really valorized inside the jdk QName as internally ns are resolved by prefix directly.
if (typeRef != null) {
if (XMLConstants.NULL_NS_URI.equals(typeRef.getNamespaceURI())) {
String actualNS = id.getNamespaceURI(typeRef.getPrefix());
typeRef = new QName(actualNS, typeRef.getLocalPart(), typeRef.getPrefix());
}
res.setTypeRef(DMNQNameInfo.of(typeRef));
}
return res;
}
public static DMNItemDefinitionInfo itemDefinitionToInfo(ItemDefinition itemDef) {
DMNItemDefinitionInfo res = new DMNItemDefinitionInfo();
res.setId(itemDef.getId());
res.setName(itemDef.getName());
if (itemDef.getTypeRef() != null) {
res.setTypeRef(DMNQNameInfo.of(itemDef.getTypeRef()));
}
if (itemDef.getAllowedValues() != null) {
DMNUnaryTestsInfo av = new DMNUnaryTestsInfo();
av.setText(itemDef.getAllowedValues().getText());
av.setExpressionLanguage(itemDef.getAllowedValues().getExpressionLanguage());
res.setAllowedValues(av);
}
if (itemDef.getItemComponent() != null && !itemDef.getItemComponent().isEmpty()) {
List<DMNItemDefinitionInfo> components = itemDef.getItemComponent().stream().map(ModelEvaluatorServiceBase::itemDefinitionToInfo).collect(Collectors.toList());
res.setItemComponent(components);
}
res.setTypeLanguage(itemDef.getTypeLanguage());
res.setIsCollection(itemDef.isIsCollection());
return res;
}
public ServiceResponse<DMNResultKS> evaluateDecisions(String containerId, String contextPayload, String marshallingType) {
try {
KieContainerInstanceImpl kContainer = context.getContainer(containerId, ContainerLocatorProvider.get().getLocator());
DMNRuntime dmnRuntime = KieRuntimeFactory.of(kContainer.getKieContainer().getKieBase()).get(DMNRuntime.class);
wirePrometheus(kContainer, dmnRuntime);
LOG.debug("Will deserialize payload: {}", contextPayload);
DMNContextKS evalCtx = marshallerHelper.unmarshal(containerId, contextPayload, marshallingType, DMNContextKS.class);
DMNEvaluationResult evaluationResult = DMNEvaluationUtils.evaluate(dmnRuntime,
evalCtx.getNamespace(),
evalCtx.getModelName(),
evalCtx.getDmnContext(),
evalCtx.getDecisionNames(),
evalCtx.getDecisionIds(),
evalCtx.getDecisionServiceName());
DMNResultKS res = new DMNResultKS(evaluationResult.model.getNamespace(),
evaluationResult.model.getName(),
evalCtx.getDecisionNames(),
evaluationResult.result);
return new ServiceResponse<DMNResultKS>(
ServiceResponse.ResponseType.SUCCESS,
"OK from container '" + containerId + "'",
res );
} catch ( Exception e ) {
e.printStackTrace();
LOG.error( "Error from container '" + containerId + "'", e );
return new ServiceResponse<DMNResultKS>(
ServiceResponse.ResponseType.FAILURE,
"Error from container '" + containerId + "'" + e.getMessage(),
null );
}
}
public Response evaluateModel(String containerId, String modelId, String contextPayload, boolean asDmnResult, String decisionServiceId) {
try {
KieContainerInstanceImpl kContainer = context.getContainer(containerId, ContainerLocatorProvider.get().getLocator());
DMNRuntime dmnRuntime = KieRuntimeFactory.of(kContainer.getKieContainer().getKieBase()).get(DMNRuntime.class);
List<DMNModel> modelsWithID = dmnRuntime.getModels().stream().filter(m -> m.getName().equals(modelId)).collect(Collectors.toList());
if (modelsWithID.isEmpty()) {
return Response.status(Status.NOT_FOUND).entity("No model identifies with modelId: " + modelId).build();
} else if (modelsWithID.size() > 1) {
return Response.status(Status.NOT_FOUND).entity("More than one existing DMN model having modelId: " + modelId).build();
}
DMNModel dmnModel = modelsWithID.get(0);
DecisionServiceNode determinedDS = null;
if (decisionServiceId != null) {
Optional<DecisionServiceNode> dsOpt = dmnModel.getDecisionServices().stream().filter(ds -> ds.getName().equals(decisionServiceId)).findFirst();
if (!dsOpt.isPresent()) {
return Response.status(Status.NOT_FOUND).entity("No decisionService found: " + decisionServiceId).build();
}
determinedDS = dsOpt.get();
}
Map<String, Object> jsonContextMap = objectMapper.readValue(contextPayload, new TypeReference<Map<String, Object>>() {});
DMNContext dmnContext = new DynamicDMNContextBuilder(dmnRuntime.newContext(), dmnModel).populateContextWith(jsonContextMap);
wirePrometheus(kContainer, dmnRuntime);
DMNResult determinedResult = null;
if (determinedDS != null) {
determinedResult = dmnRuntime.evaluateDecisionService(dmnModel, dmnContext, determinedDS.getName());
} else {
determinedResult = dmnRuntime.evaluateAll(dmnModel, dmnContext);
}
// at this point the DMN service has executed the evaluation, so it's full model-specific endpoint semantics.
KogitoDMNResult result = new KogitoDMNResult(dmnModel.getNamespace(), dmnModel.getName(), determinedResult);
if (asDmnResult) {
return Response.ok().entity(objectMapper.writeValueAsString(result)).build();
}
String responseJSON = null;
if (determinedDS != null && determinedDS.getDecisionService().getOutputDecision().size() == 1) {
responseJSON = objectMapper.writeValueAsString(result.getDecisionResults().get(0).getResult());
} else {
responseJSON = objectMapper.writeValueAsString(result.getDmnContext());
}
ResponseBuilder response = Response.ok();
if (result.hasErrors()) {
String infoWarns = result.getMessages().stream().map(m -> m.getLevel() + " " + m.getMessage()).collect(java.util.stream.Collectors.joining(", "));
response.header(MSConsts.KOGITO_DECISION_INFOWARN_HEADER, infoWarns);
}
response.entity(responseJSON);
return response.build();
} catch (Exception e) {
LOG.error("Error from container '" + containerId + "'", e);
return Response.serverError().entity(e.getMessage()).build();
}
}
private void wirePrometheus(KieContainerInstanceImpl kContainer, DMNRuntime dmnRuntime) {
PrometheusKieServerExtension extension = (PrometheusKieServerExtension) context.getServerExtension(PrometheusKieServerExtension.EXTENSION_NAME);
if (extension != null) {
//default handler
PrometheusMetricsDMNListener listener = new PrometheusMetricsDMNListener(PrometheusKieServerExtension.getMetrics(), kContainer);
dmnRuntime.addListener(listener);
//custom handler
List<DMNRuntimeEventListener> listeners = extension.getDMNRuntimeListeners(kContainer);
listeners.forEach(l -> {
if (!dmnRuntime.getListeners().contains(l)) {
dmnRuntime.addListener(l);
}
});
}
}
public KieServerRegistry getKieServerRegistry() {
return this.context;
}
public Response getModel(String containerId, String modelId) {
try {
KieContainerInstanceImpl kContainer = context.getContainer(containerId, ContainerLocatorProvider.get().getLocator());
DMNRuntime dmnRuntime = KieRuntimeFactory.of(kContainer.getKieContainer().getKieBase()).get(DMNRuntime.class);
List<DMNModel> modelsWithID = dmnRuntime.getModels().stream().filter(m -> m.getName().equals(modelId)).collect(Collectors.toList());
if (modelsWithID.isEmpty()) {
return Response.status(Status.NOT_FOUND).entity("No model identifies with modelId: " + modelId).build();
} else if (modelsWithID.size() > 1) {
return Response.status(Status.NOT_FOUND).entity("More than one existing DMN model having modelId: " + modelId).build();
}
DMNModel dmnModel = modelsWithID.get(0);
Definitions definitions = dmnModel.getDefinitions();
for (DRGElement drg : definitions.getDrgElement()) {
if (drg instanceof Decision) {
Decision decision = (Decision) drg;
decision.setExpression(null);
} else if (drg instanceof BusinessKnowledgeModel) {
BusinessKnowledgeModel bkm = (BusinessKnowledgeModel) drg;
bkm.setEncapsulatedLogic(null);
}
}
String xml = DMNMarshallerFactory.newDefaultMarshaller().marshal(definitions);
return Response.ok().entity(xml).build();
} catch (Exception e) {
LOG.error("Error from container '" + containerId + "'", e);
return Response.serverError().entity(e.getMessage()).build();
}
}
public Response getOAS(String containerId, boolean asJSON) {
try {
KieContainerInstanceImpl kContainer = context.getContainer(containerId, ContainerLocatorProvider.get().getLocator());
ReleaseId resolvedReleaseId = kContainer.getKieContainer().getResolvedReleaseId();
DMNRuntime dmnRuntime = KieRuntimeFactory.of(kContainer.getKieContainer().getKieBase()).get(DMNRuntime.class);
Collection<DMNModel> models = dmnRuntime.getModels();
String content = new OASGenerator(containerId, resolvedReleaseId).generateOAS(models, asJSON);
return Response.ok().entity(content).build();
} catch (Exception e) {
LOG.error("Error from container '" + containerId + "'", e);
return Response.serverError().entity(e.getMessage()).build();
}
}
}
| |
/*
* Copyright 2019-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.k8snode.api;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableSet;
import org.apache.commons.lang.StringUtils;
import org.onlab.packet.IpAddress;
import org.onlab.packet.IpPrefix;
import java.util.Objects;
import java.util.Set;
import static com.google.common.base.Preconditions.checkArgument;
import static org.onosproject.k8snode.api.Constants.DEFAULT_CLUSTER_NAME;
import static org.onosproject.k8snode.api.K8sApiConfig.Mode.NORMAL;
import static org.onosproject.k8snode.api.K8sApiConfig.Scheme.HTTPS;
/**
* Default implementation of kubernetes API configuration.
*/
public final class DefaultK8sApiConfig implements K8sApiConfig {
private static final String NOT_NULL_MSG = "API Config % cannot be null";
private static final int SHORT_NAME_LENGTH = 10;
private final String clusterName;
private final int segmentId;
private final IpPrefix extNetworkCidr;
private final Scheme scheme;
private final Mode mode;
private final IpAddress ipAddress;
private final int port;
private final State state;
private final String token;
private final String caCertData;
private final String clientCertData;
private final String clientKeyData;
private final Set<HostNodesInfo> infos;
private final boolean dvr;
private DefaultK8sApiConfig(String clusterName, int segmentId, IpPrefix extNetworkCidr,
Scheme scheme, IpAddress ipAddress, int port,
Mode mode, State state, String token, String caCertData,
String clientCertData, String clientKeyData,
Set<HostNodesInfo> infos, boolean dvr) {
this.clusterName = clusterName;
this.segmentId = segmentId;
this.extNetworkCidr = extNetworkCidr;
this.scheme = scheme;
this.ipAddress = ipAddress;
this.port = port;
this.mode = mode;
this.state = state;
this.token = token;
this.caCertData = caCertData;
this.clientCertData = clientCertData;
this.clientKeyData = clientKeyData;
this.infos = infos;
this.dvr = dvr;
}
@Override
public String clusterName() {
return clusterName;
}
@Override
public String clusterShortName() {
return StringUtils.substring(clusterName, 0, SHORT_NAME_LENGTH);
}
@Override
public int segmentId() {
return segmentId;
}
@Override
public IpPrefix extNetworkCidr() {
return extNetworkCidr;
}
@Override
public Scheme scheme() {
return scheme;
}
@Override
public IpAddress ipAddress() {
return ipAddress;
}
@Override
public int port() {
return port;
}
@Override
public State state() {
return state;
}
@Override
public Mode mode() {
return mode;
}
@Override
public K8sApiConfig updateState(State newState) {
return new Builder()
.clusterName(clusterName)
.segmentId(segmentId)
.extNetworkCidr(extNetworkCidr)
.scheme(scheme)
.ipAddress(ipAddress)
.port(port)
.state(newState)
.mode(mode)
.token(token)
.caCertData(caCertData)
.clientCertData(clientCertData)
.clientKeyData(clientKeyData)
.infos(infos)
.dvr(dvr)
.build();
}
@Override
public String token() {
return token;
}
@Override
public String caCertData() {
return caCertData;
}
@Override
public String clientCertData() {
return clientCertData;
}
@Override
public String clientKeyData() {
return clientKeyData;
}
@Override
public Set<HostNodesInfo> infos() {
return ImmutableSet.copyOf(infos);
}
@Override
public boolean dvr() {
return dvr;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DefaultK8sApiConfig that = (DefaultK8sApiConfig) o;
return port == that.port &&
scheme == that.scheme &&
clusterName.equals(that.clusterName) &&
segmentId == that.segmentId &&
extNetworkCidr == that.extNetworkCidr &&
ipAddress.equals(that.ipAddress) &&
mode == that.mode &&
state == that.state &&
token.equals(that.token) &&
caCertData.equals(that.caCertData) &&
clientCertData.equals(that.clientCertData) &&
clientKeyData.equals(that.clientKeyData) &&
infos.equals(that.infos) &&
dvr == that.dvr;
}
@Override
public int hashCode() {
return Objects.hash(clusterName, segmentId, extNetworkCidr, scheme, ipAddress, port,
mode, state, token, caCertData, clientCertData, clientKeyData, infos, dvr);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("clusterName", clusterName)
.add("segmentID", segmentId)
.add("extNetworkCIDR", extNetworkCidr)
.add("scheme", scheme)
.add("ipAddress", ipAddress)
.add("port", port)
.add("mode", mode)
.add("state", state)
.add("token", token)
.add("caCertData", caCertData)
.add("clientCertData", clientCertData)
.add("clientKeyData", clientKeyData)
.add("infos", infos)
.add("dvr", dvr)
.toString();
}
/**
* Returns new builder instance.
*
* @return kubernetes API server config builder
*/
public static Builder builder() {
return new Builder();
}
public static final class Builder implements K8sApiConfig.Builder {
private String clusterName;
private int segmentId;
private IpPrefix extNetworkCidr;
private Scheme scheme;
private Mode mode;
private IpAddress ipAddress;
private int port;
private State state;
private String token;
private String caCertData;
private String clientCertData;
private String clientKeyData;
private Set<HostNodesInfo> infos;
private boolean dvr;
@Override
public K8sApiConfig build() {
checkArgument(scheme != null, NOT_NULL_MSG, "scheme");
checkArgument(ipAddress != null, NOT_NULL_MSG, "ipAddress");
checkArgument(state != null, NOT_NULL_MSG, "state");
if (scheme == HTTPS) {
checkArgument(caCertData != null, NOT_NULL_MSG, "caCertData");
checkArgument(clientCertData != null, NOT_NULL_MSG, "clientCertData");
checkArgument(clientKeyData != null, NOT_NULL_MSG, "clientKeyData");
}
if (StringUtils.isEmpty(clusterName)) {
clusterName = DEFAULT_CLUSTER_NAME;
}
if (mode == null) {
mode = NORMAL;
}
if (infos == null) {
infos = ImmutableSet.of();
}
return new DefaultK8sApiConfig(clusterName, segmentId, extNetworkCidr, scheme, ipAddress,
port, mode, state, token, caCertData, clientCertData, clientKeyData, infos, dvr);
}
@Override
public Builder clusterName(String clusterName) {
this.clusterName = clusterName;
return this;
}
@Override
public Builder segmentId(int segmentId) {
this.segmentId = segmentId;
return this;
}
@Override
public K8sApiConfig.Builder extNetworkCidr(IpPrefix extNetworkCidr) {
this.extNetworkCidr = extNetworkCidr;
return this;
}
@Override
public Builder scheme(Scheme scheme) {
this.scheme = scheme;
return this;
}
@Override
public Builder ipAddress(IpAddress ipAddress) {
this.ipAddress = ipAddress;
return this;
}
@Override
public Builder port(int port) {
this.port = port;
return this;
}
@Override
public Builder state(State state) {
this.state = state;
return this;
}
@Override
public K8sApiConfig.Builder mode(Mode mode) {
this.mode = mode;
return this;
}
@Override
public Builder token(String token) {
this.token = token;
return this;
}
@Override
public Builder caCertData(String caCertData) {
this.caCertData = caCertData;
return this;
}
@Override
public Builder clientCertData(String clientCertData) {
this.clientCertData = clientCertData;
return this;
}
@Override
public Builder clientKeyData(String clientKeyData) {
this.clientKeyData = clientKeyData;
return this;
}
@Override
public K8sApiConfig.Builder infos(Set<HostNodesInfo> infos) {
this.infos = infos;
return this;
}
@Override
public K8sApiConfig.Builder dvr(boolean dvr) {
this.dvr = dvr;
return this;
}
}
}
| |
/*
*******************************************************************************
* Copyright (C) 1996-2009, International Business Machines Corporation and *
* others. All Rights Reserved. *
*******************************************************************************
*/
package com.ibm.icu.text;
/**
* A compression engine implementing the Standard Compression Scheme
* for Unicode (SCSU) as outlined in <A
* HREF="http://www.unicode.org/unicode/reports/tr6">Unicode Technical
* Report #6</A>.
*
* <P>The SCSU works by using dynamically positioned <EM>windows</EM>
* consisting of 128 consecutive characters in Unicode. During compression,
* characters within a window are encoded in the compressed stream as the bytes
* <TT>0x7F - 0xFF</TT>. The SCSU provides transparency for the characters
* (bytes) between <TT>U+0000 - U+00FF</TT>. The SCSU approximates the
* storage size of traditional character sets, for example 1 byte per
* character for ASCII or Latin-1 text, and 2 bytes per character for CJK
* ideographs.</P>
*
* <P><STRONG>USAGE</STRONG></P>
*
* <P>The static methods on <TT>UnicodeCompressor</TT> may be used in a
* straightforward manner to compress simple strings:</P>
*
* <PRE>
* String s = ... ; // get string from somewhere
* byte [] compressed = UnicodeCompressor.compress(s);
* </PRE>
*
* <P>The static methods have a fairly large memory footprint.
* For finer-grained control over memory usage,
* <TT>UnicodeCompressor</TT> offers more powerful APIs allowing
* iterative compression:</P>
*
* <PRE>
* // Compress an array "chars" of length "len" using a buffer of 512 bytes
* // to the OutputStream "out"
*
* UnicodeCompressor myCompressor = new UnicodeCompressor();
* final static int BUFSIZE = 512;
* byte [] byteBuffer = new byte [ BUFSIZE ];
* int bytesWritten = 0;
* int [] unicharsRead = new int [1];
* int totalCharsCompressed = 0;
* int totalBytesWritten = 0;
*
* do {
* // do the compression
* bytesWritten = myCompressor.compress(chars, totalCharsCompressed,
* len, unicharsRead,
* byteBuffer, 0, BUFSIZE);
*
* // do something with the current set of bytes
* out.write(byteBuffer, 0, bytesWritten);
*
* // update the no. of characters compressed
* totalCharsCompressed += unicharsRead[0];
*
* // update the no. of bytes written
* totalBytesWritten += bytesWritten;
*
* } while(totalCharsCompressed < len);
*
* myCompressor.reset(); // reuse compressor
* </PRE>
*
* @see UnicodeDecompressor
*
* @author Stephen F. Booth
* @stable ICU 2.4
*/
/*
*
* COMPRESSION STRATEGY
*
* Single Byte Mode
*
* There are three relevant cases.
* If the character is in the current window or is Latin-1 (U+0000,
* U+0009, U+000A, U+000D, U+0020 - U+007F), the character is placed
* directly in the stream as a single byte.
*
* 1. Current character is in defined, inactive window.
* 2. Current character is in undefined window.
* 3. Current character is uncompressible Unicode (U+3400 - U+DFFF).
*
* 1. Current character is in defined, inactive window
* A. Look ahead two characters
* B. If both following characters in same window as current character,
* switch to defined window
* C. If only next character is in same window as current character,
* quote defined window
* D. If neither of following characters is in same window as current,
* quote defined window
*
* 2. Current character is in undefined window
* A. Look ahead two characters
* B. If both following characters in same window as current character,
* define new window
* C. If only next character in same window as current character,
* switch to Unicode mode
* NOTE: This costs us one extra byte. However,
* since we have a limited number of windows to work with, it is
* assumed the cost will pay off later in savings from a window with
* more characters in it.
* D. If neither of following characters in same window as current,
* switch to Unicode mode. Alternative to above: just quote
* Unicode (same byte cost)
*
* 3. Current character is uncompressible Unicode (U+3400 - U+DFFF)
* A. Look ahead one character
* B. If next character in non-compressible region, switch to
* Unicode mode
* C. If next character not in non-compressible region, quote Unicode
*
*
* The following chart illustrates the bytes required for encoding characters
* in each possible way
*
*
* SINGLE BYTE MODE
* Characters in a row with same index
* tag encountered 1 2 3 4
* ---------------------------------------------------------------
* none (in current window) 1 2 3 4
*
* quote Unicode 3 6 9 12
*
* window not switch to Unicode 3 5 7 9 byte
* defined define window 3 4 5 6 cost
*
* window switch to window 2 3 4 5
* defined quote window 2 4 6 8
*
* Unicode Mode
*
* There are two relevant cases.
* If the character is in the non-compressible region
* (U+3400 - U+DFFF), the character is simply written to the
* stream as a pair of bytes.
*
* 1. Current character is in defined, inactive window.
* 2. Current character is in undefined window.
*
* 1.Current character is in defined, inactive window
* A. Look ahead one character
* B. If next character has same index as current character,
* switch to defined window (and switch to single-byte mode)
* C. If not, just put bytes in stream
*
*
* 2. Current character is in undefined window
* A. Look ahead two characters
* B. If both in same window as current character, define window
* (and switch to single-byte mode)
* C. If only next character in same window, just put bytes in stream
* NOTE: This costs us one extra byte. However,
* since we have a limited number of windows to work with, it is
* assumed the cost will pay off later in savings from a window with
* more characters in it.
* D. If neither in same window, put bytes in stream
*
*
* The following chart illustrates the bytes required for encoding characters
* in each possible way
*
*
* UNICODE MODE
* Characters in a row with same index
* tag encountered 1 2 3 4
* ---------------------------------------------------------------
* none 2 4 6 8
*
* quote Unicode 3 6 9 12
*
* window not define window 3 4 5 6 byte
* defined cost
* window switch to window 2 3 4 5
* defined
*/
public final class UnicodeCompressor implements SCSU
{
//==========================
// Class variables
//==========================
/** For quick identification of a byte as a single-byte mode tag */
private static boolean [] sSingleTagTable = {
// table generated by CompressionTableGenerator
false, true, true, true, true, true, true, true, true, false,
false, true, true, false, true, true, true, true, true, true,
true, true, true, true, true, true, true, true, true, true,
true, true, false, false, false, false, false, false,false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false
};
/** For quick identification of a byte as a unicode mode tag */
private static boolean [] sUnicodeTagTable = {
// table generated by CompressionTableGenerator
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false,
false, false, false, false, false, false, false, false, true,
true, true, true, true, true, true, true, true, true, true,
true, true, true, true, true, true, true, true, false, false,
false, false, false, false, false, false, false, false, false,
false, false
};
//==========================
// Instance variables
//==========================
/** Alias to current dynamic window */
private int fCurrentWindow = 0;
/** Dynamic compression window offsets */
private int [] fOffsets = new int [ NUMWINDOWS ];
/** Current compression mode */
private int fMode = SINGLEBYTEMODE;
/** Keeps count of times character indices are encountered */
private int [] fIndexCount = new int [ MAXINDEX + 1 ];
/** The time stamps indicate when a window was last defined */
private int [] fTimeStamps = new int [ NUMWINDOWS ];
/** The current time stamp */
private int fTimeStamp = 0;
/**
* Create a UnicodeCompressor.
* Sets all windows to their default values.
* @see #reset
* @stable ICU 2.4
*/
public UnicodeCompressor()
{
reset(); // initialize to defaults
}
/**
* Compress a string into a byte array.
* @param buffer The string to compress.
* @return A byte array containing the compressed characters.
* @see #compress(char [], int, int)
* @stable ICU 2.4
*/
public static byte [] compress(String buffer)
{
return compress(buffer.toCharArray(), 0, buffer.length());
}
/**
* Compress a Unicode character array into a byte array.
* @param buffer The character buffer to compress.
* @param start The start of the character run to compress.
* @param limit The limit of the character run to compress.
* @return A byte array containing the compressed characters.
* @see #compress(String)
* @stable ICU 2.4
*/
public static byte [] compress(char [] buffer,
int start,
int limit)
{
UnicodeCompressor comp = new UnicodeCompressor();
// use a buffer that we know will never overflow
// in the worst case, each character will take 3 bytes
// to encode: UQU, hibyte, lobyte. In this case, the
// compressed data will look like: SCU, UQU, hibyte, lobyte, ...
// buffer must be at least 4 bytes in size
int len = Math.max(4, 3 * (limit - start) + 1);
byte [] temp = new byte [len];
int byteCount = comp.compress(buffer, start, limit, null,
temp, 0, len);
byte [] result = new byte [byteCount];
System.arraycopy(temp, 0, result, 0, byteCount);
return result;
}
/**
* Compress a Unicode character array into a byte array.
*
* This function will only consume input that can be completely
* output.
*
* @param charBuffer The character buffer to compress.
* @param charBufferStart The start of the character run to compress.
* @param charBufferLimit The limit of the character run to compress.
* @param charsRead A one-element array. If not null, on return
* the number of characters read from charBuffer.
* @param byteBuffer A buffer to receive the compressed data. This
* buffer must be at minimum four bytes in size.
* @param byteBufferStart The starting offset to which to write
* compressed data.
* @param byteBufferLimit The limiting offset for writing compressed data.
* @return The number of bytes written to byteBuffer.
* @stable ICU 2.4
*/
public int compress(char [] charBuffer,
int charBufferStart,
int charBufferLimit,
int [] charsRead,
byte [] byteBuffer,
int byteBufferStart,
int byteBufferLimit)
{
// the current position in the target byte buffer
int bytePos = byteBufferStart;
// the current position in the source unicode character buffer
int ucPos = charBufferStart;
// the current unicode character from the source buffer
int curUC = INVALIDCHAR;
// the index for the current character
int curIndex = -1;
// look ahead
int nextUC = INVALIDCHAR;
int forwardUC = INVALIDCHAR;
// temporary for window searching
int whichWindow = 0;
// high and low bytes of the current unicode character
int hiByte = 0;
int loByte = 0;
// byteBuffer must be at least 4 bytes in size
if(byteBuffer.length < 4 || (byteBufferLimit - byteBufferStart) < 4)
throw new IllegalArgumentException("byteBuffer.length < 4");
mainLoop:
while(ucPos < charBufferLimit && bytePos < byteBufferLimit) {
switch(fMode) {
// main single byte mode compression loop
case SINGLEBYTEMODE:
singleByteModeLoop:
while(ucPos < charBufferLimit && bytePos < byteBufferLimit) {
// get current char
curUC = charBuffer[ucPos++];
// get next char
if(ucPos < charBufferLimit)
nextUC = charBuffer[ucPos];
else
nextUC = INVALIDCHAR;
// chars less than 0x0080 (excluding tags) go straight
// in stream
if(curUC < 0x0080) {
loByte = curUC & 0xFF;
// we need to check and make sure we don't
// accidentally write a single byte mode tag to
// the stream unless it's quoted
if(sSingleTagTable[loByte]) {
// make sure there is enough room to
// write both bytes if not, rewind the
// source stream and break out
if( (bytePos + 1) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
// since we know the byte is less than 0x80, SQUOTE0
// will use static window 0, or ASCII
byteBuffer[bytePos++] = (byte) SQUOTE0;
}
byteBuffer[bytePos++] = (byte) loByte;
}
// if the char belongs to current window, convert it
// to a byte by adding the generic compression offset
// and subtracting the window's offset
else if(inDynamicWindow(curUC, fCurrentWindow) ) {
byteBuffer[bytePos++] = (byte)
(curUC - fOffsets[ fCurrentWindow ]
+ COMPRESSIONOFFSET);
}
// if char is not in compressible range, either switch to or
// quote from unicode
else if( ! isCompressible(curUC) ) {
// only check next character if it is valid
if(nextUC != INVALIDCHAR && isCompressible(nextUC)) {
// make sure there is enough room to
// write all three bytes if not,
// rewind the source stream and break
// out
if( (bytePos + 2) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
byteBuffer[bytePos++] = (byte) SQUOTEU;
byteBuffer[bytePos++] = (byte) (curUC >>> 8);
byteBuffer[bytePos++] = (byte) (curUC & 0xFF);
}
else {
// make sure there is enough room to
// write all four bytes if not, rewind
// the source stream and break out
if((bytePos + 3) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
byteBuffer[bytePos++] = (byte) SCHANGEU;
hiByte = curUC >>> 8;
loByte = curUC & 0xFF;
if(sUnicodeTagTable[hiByte])
// add quote Unicode tag
byteBuffer[bytePos++] = (byte) UQUOTEU;
byteBuffer[bytePos++] = (byte) hiByte;
byteBuffer[bytePos++] = (byte) loByte;
fMode = UNICODEMODE;
break singleByteModeLoop;
}
}
// if the char is in a currently defined dynamic
// window, figure out which one, and either switch to
// it or quote from it
else if((whichWindow = findDynamicWindow(curUC))
!= INVALIDWINDOW ) {
// look ahead
if( (ucPos + 1) < charBufferLimit )
forwardUC = charBuffer[ucPos + 1];
else
forwardUC = INVALIDCHAR;
// all three chars in same window, switch to that
// window inDynamicWindow will return false for
// INVALIDCHAR
if(inDynamicWindow(nextUC, whichWindow)
&& inDynamicWindow(forwardUC, whichWindow)) {
// make sure there is enough room to
// write both bytes if not, rewind the
// source stream and break out
if( (bytePos + 1) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
byteBuffer[bytePos++] = (byte)(SCHANGE0 + whichWindow);
byteBuffer[bytePos++] = (byte)
(curUC - fOffsets[whichWindow]
+ COMPRESSIONOFFSET);
fTimeStamps [ whichWindow ] = ++fTimeStamp;
fCurrentWindow = whichWindow;
}
// either only next char or neither in same
// window, so quote
else {
// make sure there is enough room to
// write both bytes if not, rewind the
// source stream and break out
if((bytePos + 1) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
byteBuffer[bytePos++] = (byte) (SQUOTE0 + whichWindow);
byteBuffer[bytePos++] = (byte)
(curUC - fOffsets[whichWindow]
+ COMPRESSIONOFFSET);
}
}
// if a static window is defined, and the following
// character is not in that static window, quote from
// the static window Note: to quote from a static
// window, don't add 0x80
else if((whichWindow = findStaticWindow(curUC))
!= INVALIDWINDOW
&& ! inStaticWindow(nextUC, whichWindow) ) {
// make sure there is enough room to write both
// bytes if not, rewind the source stream and
// break out
if((bytePos + 1) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
byteBuffer[bytePos++] = (byte) (SQUOTE0 + whichWindow);
byteBuffer[bytePos++] = (byte)
(curUC - sOffsets[whichWindow]);
}
// if a window is not defined, decide if we want to
// define a new one or switch to unicode mode
else {
// determine index for current char (char is compressible)
curIndex = makeIndex(curUC);
fIndexCount[curIndex]++;
// look ahead
if((ucPos + 1) < charBufferLimit)
forwardUC = charBuffer[ucPos + 1];
else
forwardUC = INVALIDCHAR;
// if we have encountered this index at least once
// before, define a new window
// OR
// three chars in a row with same index, define a
// new window (makeIndex will return RESERVEDINDEX
// for INVALIDCHAR)
if((fIndexCount[curIndex] > 1) ||
(curIndex == makeIndex(nextUC)
&& curIndex == makeIndex(forwardUC))) {
// make sure there is enough room to write all
// three bytes if not, rewind the source
// stream and break out
if( (bytePos + 2) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
// get least recently defined window
whichWindow = getLRDefinedWindow();
byteBuffer[bytePos++] = (byte)(SDEFINE0 + whichWindow);
byteBuffer[bytePos++] = (byte) curIndex;
byteBuffer[bytePos++] = (byte)
(curUC - sOffsetTable[curIndex]
+ COMPRESSIONOFFSET);
fOffsets[whichWindow] = sOffsetTable[curIndex];
fCurrentWindow = whichWindow;
fTimeStamps [whichWindow] = ++fTimeStamp;
}
// only two chars in a row with same index, so
// switch to unicode mode (makeIndex will return
// RESERVEDINDEX for INVALIDCHAR)
// OR
// three chars have different indices, so switch
// to unicode mode
else {
// make sure there is enough room to write all
// four bytes if not, rewind the source stream
// and break out
if((bytePos + 3) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
byteBuffer[bytePos++] = (byte) SCHANGEU;
hiByte = curUC >>> 8;
loByte = curUC & 0xFF;
if(sUnicodeTagTable[hiByte])
// add quote Unicode tag
byteBuffer[bytePos++] = (byte) UQUOTEU;
byteBuffer[bytePos++] = (byte) hiByte;
byteBuffer[bytePos++] = (byte) loByte;
fMode = UNICODEMODE;
break singleByteModeLoop;
}
}
}
break;
case UNICODEMODE:
// main unicode mode compression loop
unicodeModeLoop:
while(ucPos < charBufferLimit && bytePos < byteBufferLimit) {
// get current char
curUC = charBuffer[ucPos++];
// get next char
if( ucPos < charBufferLimit )
nextUC = charBuffer[ucPos];
else
nextUC = INVALIDCHAR;
// if we have two uncompressible chars in a row,
// put the current char's bytes in the stream
if( ! isCompressible(curUC)
|| (nextUC != INVALIDCHAR && ! isCompressible(nextUC))) {
// make sure there is enough room to write all three bytes
// if not, rewind the source stream and break out
if( (bytePos + 2) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
hiByte = curUC >>> 8;
loByte = curUC & 0xFF;
if(sUnicodeTagTable[ hiByte ])
// add quote Unicode tag
byteBuffer[bytePos++] = (byte) UQUOTEU;
byteBuffer[bytePos++] = (byte) hiByte;
byteBuffer[bytePos++] = (byte) loByte;
}
// bytes less than 0x80 can go straight in the stream,
// but in single-byte mode
else if(curUC < 0x0080) {
loByte = curUC & 0xFF;
// if two chars in a row below 0x80 and the
// current char is not a single-byte mode tag,
// switch to single-byte mode
if(nextUC != INVALIDCHAR
&& nextUC < 0x0080 && ! sSingleTagTable[ loByte ] ) {
// make sure there is enough room to
// write both bytes if not, rewind the
// source stream and break out
if( (bytePos + 1) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
// use the last-active window
whichWindow = fCurrentWindow;
byteBuffer[bytePos++] = (byte)(UCHANGE0 + whichWindow);
byteBuffer[bytePos++] = (byte) loByte;
//fCurrentWindow = 0;
fTimeStamps [whichWindow] = ++fTimeStamp;
fMode = SINGLEBYTEMODE;
break unicodeModeLoop;
}
// otherwise, just write the bytes to the stream
// (this will cover the case of only 1 char less than 0x80
// and single-byte mode tags)
else {
// make sure there is enough room to
// write both bytes if not, rewind the
// source stream and break out
if((bytePos + 1) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
// since the character is less than 0x80, the
// high byte is always 0x00 - no need for
// (curUC >>> 8)
byteBuffer[bytePos++] = (byte) 0x00;
byteBuffer[bytePos++] = (byte) loByte;
}
}
// figure out if the current char is in a defined window
else if((whichWindow = findDynamicWindow(curUC))
!= INVALIDWINDOW ) {
// if two chars in a row in the same window,
// switch to that window and go to single-byte mode
// inDynamicWindow will return false for INVALIDCHAR
if(inDynamicWindow(nextUC, whichWindow)) {
// make sure there is enough room to
// write both bytes if not, rewind the
// source stream and break out
if((bytePos + 1) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
byteBuffer[bytePos++] = (byte)(UCHANGE0 + whichWindow);
byteBuffer[bytePos++] = (byte)
(curUC - fOffsets[whichWindow]
+ COMPRESSIONOFFSET);
fTimeStamps [ whichWindow ] = ++fTimeStamp;
fCurrentWindow = whichWindow;
fMode = SINGLEBYTEMODE;
break unicodeModeLoop;
}
// otherwise, just quote the unicode for the char
else {
// make sure there is enough room to
// write all three bytes if not,
// rewind the source stream and break
// out
if((bytePos + 2) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
hiByte = curUC >>> 8;
loByte = curUC & 0xFF;
if(sUnicodeTagTable[ hiByte ])
// add quote Unicode tag
byteBuffer[bytePos++] = (byte) UQUOTEU;
byteBuffer[bytePos++] = (byte) hiByte;
byteBuffer[bytePos++] = (byte) loByte;
}
}
// char is not in a defined window
else {
// determine index for current char (char is compressible)
curIndex = makeIndex(curUC);
fIndexCount[curIndex]++;
// look ahead
if( (ucPos + 1) < charBufferLimit )
forwardUC = charBuffer[ucPos + 1];
else
forwardUC = INVALIDCHAR;
// if we have encountered this index at least once
// before, define a new window for it that hasn't
// previously been redefined
// OR
// if three chars in a row with the same index,
// define a new window (makeIndex will return
// RESERVEDINDEX for INVALIDCHAR)
if((fIndexCount[curIndex] > 1) ||
(curIndex == makeIndex(nextUC)
&& curIndex == makeIndex(forwardUC))) {
// make sure there is enough room to
// write all three bytes if not,
// rewind the source stream and break
// out
if((bytePos + 2) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
// get least recently defined window
whichWindow = getLRDefinedWindow();
byteBuffer[bytePos++] = (byte)(UDEFINE0 + whichWindow);
byteBuffer[bytePos++] = (byte) curIndex;
byteBuffer[bytePos++] = (byte)
(curUC - sOffsetTable[curIndex]
+ COMPRESSIONOFFSET);
fOffsets[whichWindow] = sOffsetTable[curIndex];
fCurrentWindow = whichWindow;
fTimeStamps [whichWindow] = ++fTimeStamp;
fMode = SINGLEBYTEMODE;
break unicodeModeLoop;
}
// otherwise just quote the unicode, and save our
// windows for longer runs
else {
// make sure there is enough room to
// write all three bytes if not,
// rewind the source stream and break
// out
if((bytePos + 2) >= byteBufferLimit)
{ --ucPos; break mainLoop; }
hiByte = curUC >>> 8;
loByte = curUC & 0xFF;
if(sUnicodeTagTable[ hiByte ])
// add quote Unicode tag
byteBuffer[bytePos++] = (byte) UQUOTEU;
byteBuffer[bytePos++] = (byte) hiByte;
byteBuffer[bytePos++] = (byte) loByte;
}
}
}
} // end switch
}
// fill in output parameter
if(charsRead != null)
charsRead [0] = (ucPos - charBufferStart);
// return # of bytes written
return (bytePos - byteBufferStart);
}
/**
* Reset the compressor to its initial state.
* @stable ICU 2.4
*/
public void reset()
{
int i;
// reset dynamic windows
fOffsets[0] = 0x0080; // Latin-1
fOffsets[1] = 0x00C0; // Latin-1 Supplement + Latin Extended-A
fOffsets[2] = 0x0400; // Cyrillic
fOffsets[3] = 0x0600; // Arabic
fOffsets[4] = 0x0900; // Devanagari
fOffsets[5] = 0x3040; // Hiragana
fOffsets[6] = 0x30A0; // Katakana
fOffsets[7] = 0xFF00; // Fullwidth ASCII
// reset time stamps
for(i = 0; i < NUMWINDOWS; i++) {
fTimeStamps[i] = 0;
}
// reset count of seen indices
for(i = 0; i <= MAXINDEX; i++ ) {
fIndexCount[i] = 0;
}
fTimeStamp = 0; // Reset current time stamp
fCurrentWindow = 0; // Make current window Latin-1
fMode = SINGLEBYTEMODE; // Always start in single-byte mode
}
//==========================
// Determine the index for a character
//==========================
/**
* Create the index value for a character.
* For more information on this function, refer to table X-3
* <A HREF="http://www.unicode.org/unicode/reports/tr6">UTR6</A>.
* @param c The character in question.
* @return An index for c
*/
private static int makeIndex(int c)
{
// check the predefined indices
if(c >= 0x00C0 && c < 0x0140)
return LATININDEX;
else if(c >= 0x0250 && c < 0x02D0)
return IPAEXTENSIONINDEX;
else if(c >= 0x0370 && c < 0x03F0)
return GREEKINDEX;
else if(c >= 0x0530 && c < 0x0590)
return ARMENIANINDEX;
else if(c >= 0x3040 && c < 0x30A0)
return HIRAGANAINDEX;
else if(c >= 0x30A0 && c < 0x3120)
return KATAKANAINDEX;
else if(c >= 0xFF60 && c < 0xFF9F)
return HALFWIDTHKATAKANAINDEX;
// calculate index
else if(c >= 0x0080 && c < 0x3400)
return (c / 0x80) & 0xFF;
else if(c >= 0xE000 && c <= 0xFFFF)
return ((c - 0xAC00) / 0x80) & 0xFF;
// should never happen
else {
return RESERVEDINDEX;
}
}
//==========================
// Check if a given character fits in a window
//==========================
/**
* Determine if a character is in a dynamic window.
* @param c The character to test
* @param whichWindow The dynamic window the test
* @return true if <TT>c</TT> will fit in <TT>whichWindow</TT>,
* false otherwise.
*/
private boolean inDynamicWindow(int c,
int whichWindow)
{
return (c >= fOffsets[whichWindow]
&& c < (fOffsets[whichWindow] + 0x80));
}
/**
* Determine if a character is in a static window.
* @param c The character to test
* @param whichWindow The static window the test
* @return true if <TT>c</TT> will fit in <TT>whichWindow</TT>,
* false otherwise.
*/
private static boolean inStaticWindow(int c,
int whichWindow)
{
return (c >= sOffsets[whichWindow]
&& c < (sOffsets[whichWindow] + 0x80));
}
//==========================
// Check if a given character is compressible
//==========================
/**
* Determine if a character is compressible.
* @param c The character to test.
* @return true if the <TT>c</TT> is compressible, false otherwise.
*/
private static boolean isCompressible(int c)
{
return (c < 0x3400 || c >= 0xE000);
}
//==========================
// Check if a window is defined for a given character
//==========================
/**
* Determine if a dynamic window for a certain character is defined
* @param c The character in question
* @return The dynamic window containing <TT>c</TT>, or
* INVALIDWINDOW if not defined.
*/
private int findDynamicWindow(int c)
{
// supposedly faster to count down
//for(int i = 0; i < NUMWINDOWS; i++) {
for(int i = NUMWINDOWS - 1; i >= 0; --i) {
if(inDynamicWindow(c, i)) {
++fTimeStamps[i];
return i;
}
}
return INVALIDWINDOW;
}
/**
* Determine if a static window for a certain character is defined
* @param c The character in question
* @return The static window containing <TT>c</TT>, or
* INVALIDWINDOW if not defined.
*/
private static int findStaticWindow(int c)
{
// supposedly faster to count down
//for(int i = 0; i < NUMSTATICWINDOWS; i++) {
for(int i = NUMSTATICWINDOWS - 1; i >= 0; --i) {
if(inStaticWindow(c, i)) {
return i;
}
}
return INVALIDWINDOW;
}
//==========================
// Find the least-recently used window
//==========================
/** Find the least-recently defined window */
private int getLRDefinedWindow()
{
int leastRU = Integer.MAX_VALUE;
int whichWindow = INVALIDWINDOW;
// find least recently used window
// supposedly faster to count down
//for( int i = 0; i < NUMWINDOWS; i++ ) {
for(int i = NUMWINDOWS - 1; i >= 0; --i ) {
if( fTimeStamps[i] < leastRU ) {
leastRU = fTimeStamps[i];
whichWindow = i;
}
}
return whichWindow;
}
}
| |
/*
* Copyright 2001-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.java.generate.element;
import org.jetbrains.java.generate.psi.PsiAdapter;
import consulo.logging.Logger;
import consulo.java.module.util.JavaClassNames;
import com.intellij.psi.JavaPsiFacade;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElementFactory;
import com.intellij.psi.PsiField;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.PsiModifier;
import com.intellij.psi.PsiModifierList;
import com.intellij.psi.PsiType;
import com.intellij.psi.util.PropertyUtil;
/**
* Factory for creating {@link FieldElement} or {@link ClassElement} objects.
*/
public class ElementFactory
{
private static final Logger log = Logger.getInstance("#ElementFactory");
private ElementFactory()
{
}
/**
* Creates a new {@link ClassElement} object.
*
* @param clazz class information.
* @return a new {@link ClassElement} object.
*/
public static ClassElement newClassElement(PsiClass clazz)
{
ClassElement ce = new ClassElement();
// name
ce.setName(clazz.getName());
ce.setQualifiedName(clazz.getQualifiedName());
// super
PsiClass superClass = clazz.getSuperClass();
if(superClass != null && !JavaClassNames.JAVA_LANG_OBJECT.equals(superClass.getQualifiedName()))
{
ce.setSuperName(superClass.getName());
}
// interfaces
ce.setImplementNames(PsiAdapter.getImplementsClassnames(clazz));
// other
ce.setEnum(clazz.isEnum());
ce.setDeprecated(clazz.isDeprecated());
ce.setException(PsiAdapter.isExceptionClass(clazz));
ce.setAbstract(clazz.hasModifierProperty(PsiModifier.ABSTRACT));
ce.setTypeParams(clazz.getTypeParameters().length);
return ce;
}
/**
* Create a new {@link FieldElement} object.
*
* @param field the {@link com.intellij.psi.PsiField} to get the information from.
* @return a new {@link FieldElement} object.
*/
public static FieldElement newFieldElement(PsiField field, boolean useAccessor)
{
FieldElement fe = new FieldElement();
fe.setName(field.getName());
final PsiMethod getterForField = useAccessor ? PropertyUtil.findGetterForField(field) : null;
fe.setAccessor(getterForField != null ? getterForField.getName() + "()" : field.getName());
if(PsiAdapter.isConstantField(field))
{
fe.setConstant(true);
}
if(PsiAdapter.isEnumField(field))
{
fe.setEnum(true);
}
PsiModifierList modifiers = field.getModifierList();
if(modifiers != null)
{
if(modifiers.hasModifierProperty(PsiModifier.TRANSIENT))
{
fe.setModifierTransient(true);
}
if(modifiers.hasModifierProperty(PsiModifier.VOLATILE))
{
fe.setModifierVolatile(true);
}
}
PsiElementFactory factory = JavaPsiFacade.getInstance(field.getProject()).getElementFactory();
PsiType type = field.getType();
setElementInfo(fe, factory, type, modifiers);
return fe;
}
/**
* Creates a new {@link MethodElement} object.
*
* @param method the PSI method object.
* @return a new {@link MethodElement} object.
* @since 2.15
*/
public static MethodElement newMethodElement(PsiMethod method)
{
MethodElement me = new MethodElement();
PsiType type = method.getReturnType();
PsiModifierList modifiers = method.getModifierList();
// if something is wrong:
// http://www.intellij.net/forums/thread.jsp?nav=false&forum=18&thread=88676&start=0&msRange=15
if(type == null)
{
log.warn("This method does not have a valid return type: " + method.getName() + ", returnType=" + type);
return me;
}
PsiElementFactory factory = JavaPsiFacade.getInstance(method.getProject()).getElementFactory();
setElementInfo(me, factory, type, modifiers);
// names
String fieldName = PsiAdapter.getGetterFieldName(method);
me.setName(fieldName == null ? method.getName() : fieldName);
me.setFieldName(fieldName);
me.setMethodName(method.getName());
// getter
me.setGetter(PsiAdapter.isGetterMethod(method));
// misc
me.setDeprecated(method.isDeprecated());
me.setReturnTypeVoid(PsiAdapter.isTypeOfVoid(method.getReturnType()));
// modifiers
if(modifiers.hasModifierProperty(PsiModifier.ABSTRACT))
{
me.setModifierAbstract(true);
}
if(modifiers.hasModifierProperty(PsiModifier.SYNCHRONIZED))
{
me.setModifierSynchronized(true);
}
return me;
}
/**
* Sets the basic element information from the given type.
*
* @param element the element to set information from the type
* @param factory
* @param type the type
* @param modifiers modifier list
* @since 2.15
*/
private static void setElementInfo(AbstractElement element, PsiElementFactory factory, PsiType type, PsiModifierList modifiers)
{
// type names
element.setTypeName(PsiAdapter.getTypeClassName(type));
element.setTypeQualifiedName(PsiAdapter.getTypeQualifiedClassName(type));
element.setType(type.getCanonicalText());
// arrays, collections and maps types
if(PsiAdapter.isObjectArrayType(type))
{
element.setObjectArray(true);
element.setArray(true);
// additional specify if the element is a string array
if(PsiAdapter.isStringArrayType(type))
{
element.setStringArray(true);
}
}
else if(PsiAdapter.isPrimitiveArrayType(type))
{
element.setPrimitiveArray(true);
element.setArray(true);
}
if(PsiAdapter.isCollectionType(factory, type))
{
element.setCollection(true);
}
if(PsiAdapter.isListType(factory, type))
{
element.setList(true);
}
if(PsiAdapter.isSetType(factory, type))
{
element.setSet(true);
}
if(PsiAdapter.isMapType(factory, type))
{
element.setMap(true);
}
// other types
if(PsiAdapter.isPrimitiveType(type))
{
element.setPrimitive(true);
}
if(PsiAdapter.isObjectType(factory, type))
{
element.setObject(true);
}
if(PsiAdapter.isStringType(factory, type))
{
element.setString(true);
}
if(PsiAdapter.isNumericType(factory, type))
{
element.setNumeric(true);
}
if(PsiAdapter.isDateType(factory, type))
{
element.setDate(true);
}
if(PsiAdapter.isCalendarType(factory, type))
{
element.setCalendar(true);
}
if(PsiAdapter.isBooleanType(factory, type))
{
element.setBoolean(true);
}
if(PsiType.VOID.equals(type))
{
element.setVoid(true);
}
if(PsiType.LONG.equals(type))
{
element.setLong(true);
}
if(PsiType.FLOAT.equals(type))
{
element.setFloat(true);
}
if(PsiType.DOUBLE.equals(type))
{
element.setDouble(true);
}
if(PsiType.BYTE.equals(type))
{
element.setByte(true);
}
if(PsiType.CHAR.equals(type))
{
element.setChar(true);
}
if(PsiType.SHORT.equals(type))
{
element.setShort(true);
}
element.setNestedArray(PsiAdapter.isNestedArray(type));
// modifiers
if(modifiers != null)
{
if(modifiers.hasModifierProperty(PsiModifier.STATIC))
{
element.setModifierStatic(true);
}
if(modifiers.hasModifierProperty(PsiModifier.FINAL))
{
element.setModifierFinal(true);
}
if(modifiers.hasModifierProperty(PsiModifier.PUBLIC))
{
element.setModifierPublic(true);
}
else if(modifiers.hasModifierProperty(PsiModifier.PROTECTED))
{
element.setModifierProtected(true);
}
else if(modifiers.hasModifierProperty(PsiModifier.PACKAGE_LOCAL))
{
element.setModifierPackageLocal(true);
}
else if(modifiers.hasModifierProperty(PsiModifier.PRIVATE))
{
element.setModifierPrivate(true);
}
}
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.preferences.privacy;
import android.os.Bundle;
import android.preference.CheckBoxPreference;
import android.preference.Preference;
import android.preference.Preference.OnPreferenceChangeListener;
import android.preference.Preference.OnPreferenceClickListener;
import android.preference.PreferenceFragment;
import android.preference.PreferenceScreen;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.preferences.ButtonPreference;
import org.chromium.chrome.browser.preferences.ChromeBaseCheckBoxPreference;
import org.chromium.chrome.browser.preferences.ManagedPreferenceDelegate;
import org.chromium.chrome.browser.preferences.NetworkPredictionOptions;
import org.chromium.chrome.browser.preferences.PrefServiceBridge;
import org.chromium.chrome.browser.preferences.Preferences;
/**
* Fragment to keep track of the all the privacy related preferences.
*/
public class PrivacyPreferences extends PreferenceFragment
implements OnPreferenceChangeListener {
/**
* Set to true in the {@link Preferences#EXTRA_SHOW_FRAGMENT_ARGUMENTS} bundle to
* trigger the clear browsing data dialog when showing the privacy preferences.
*/
public static final String SHOW_CLEAR_BROWSING_DATA_EXTRA =
"ShowClearBrowsingData";
private static final String PREF_NAVIGATION_ERROR = "navigation_error";
private static final String PREF_SEARCH_SUGGESTIONS = "search_suggestions";
private static final String PREF_CONTEXTUAL_SEARCH = "contextual_search";
private static final String PREF_NETWORK_PREDICTIONS = "network_predictions";
private static final String PREF_NETWORK_PREDICTIONS_NO_CELLULAR =
"network_predictions_no_cellular";
private static final String PREF_CRASH_DUMP_UPLOAD = "crash_dump_upload";
private static final String PREF_CRASH_DUMP_UPLOAD_NO_CELLULAR =
"crash_dump_upload_no_cellular";
private static final String PREF_DO_NOT_TRACK = "do_not_track";
private static final String PREF_CLEAR_BROWSING_DATA = "clear_browsing_data";
private static final String PREF_USAGE_AND_CRASH_REPORTING = "usage_and_crash_reports";
private ClearBrowsingDataDialogFragment mClearBrowsingDataDialogFragment;
private ManagedPreferenceDelegate mManagedPreferenceDelegate;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
PrivacyPreferencesManager privacyPrefManager =
PrivacyPreferencesManager.getInstance(getActivity());
privacyPrefManager.migrateNetworkPredictionPreferences();
addPreferencesFromResource(R.xml.privacy_preferences);
getActivity().setTitle(R.string.prefs_privacy);
setHasOptionsMenu(true);
mManagedPreferenceDelegate = createManagedPreferenceDelegate();
NetworkPredictionPreference networkPredictionPref =
(NetworkPredictionPreference) findPreference(PREF_NETWORK_PREDICTIONS);
ChromeBaseCheckBoxPreference networkPredictionNoCellularPref =
(ChromeBaseCheckBoxPreference) findPreference(PREF_NETWORK_PREDICTIONS_NO_CELLULAR);
NetworkPredictionOptions networkPredictionOptions = PrefServiceBridge.getInstance()
.getNetworkPredictionOptions();
PreferenceScreen preferenceScreen = getPreferenceScreen();
boolean isMobileNetworkCapable = privacyPrefManager.isMobileNetworkCapable();
if (isMobileNetworkCapable) {
preferenceScreen.removePreference(networkPredictionNoCellularPref);
networkPredictionPref.setValue(networkPredictionOptions.enumToString());
networkPredictionPref.setOnPreferenceChangeListener(this);
networkPredictionPref.setManagedPreferenceDelegate(mManagedPreferenceDelegate);
} else {
preferenceScreen.removePreference(networkPredictionPref);
networkPredictionNoCellularPref.setChecked(
networkPredictionOptions != NetworkPredictionOptions.NETWORK_PREDICTION_NEVER);
networkPredictionNoCellularPref.setOnPreferenceChangeListener(this);
networkPredictionNoCellularPref.setManagedPreferenceDelegate(
mManagedPreferenceDelegate);
}
// Display the correct settings fragment according to the user experiment group and to type
// of the device, by removing not applicable preference fragments.
CrashDumpUploadPreference uploadCrashDumpPref =
(CrashDumpUploadPreference) findPreference(PREF_CRASH_DUMP_UPLOAD);
ChromeBaseCheckBoxPreference uploadCrashDumpNoCellularPref =
(ChromeBaseCheckBoxPreference) findPreference(PREF_CRASH_DUMP_UPLOAD_NO_CELLULAR);
if (privacyPrefManager.isCellularExperimentEnabled()) {
preferenceScreen.removePreference(uploadCrashDumpNoCellularPref);
preferenceScreen.removePreference(uploadCrashDumpPref);
} else {
preferenceScreen.removePreference(findPreference(PREF_USAGE_AND_CRASH_REPORTING));
if (isMobileNetworkCapable) {
preferenceScreen.removePreference(uploadCrashDumpNoCellularPref);
uploadCrashDumpPref.setOnPreferenceChangeListener(this);
uploadCrashDumpPref.setManagedPreferenceDelegate(mManagedPreferenceDelegate);
} else {
preferenceScreen.removePreference(uploadCrashDumpPref);
uploadCrashDumpNoCellularPref.setOnPreferenceChangeListener(this);
uploadCrashDumpNoCellularPref.setManagedPreferenceDelegate(
mManagedPreferenceDelegate);
}
}
ChromeBaseCheckBoxPreference navigationErrorPref =
(ChromeBaseCheckBoxPreference) findPreference(PREF_NAVIGATION_ERROR);
navigationErrorPref.setOnPreferenceChangeListener(this);
navigationErrorPref.setManagedPreferenceDelegate(mManagedPreferenceDelegate);
ChromeBaseCheckBoxPreference searchSuggestionsPref =
(ChromeBaseCheckBoxPreference) findPreference(PREF_SEARCH_SUGGESTIONS);
searchSuggestionsPref.setOnPreferenceChangeListener(this);
searchSuggestionsPref.setManagedPreferenceDelegate(mManagedPreferenceDelegate);
if (!((Preferences) getActivity()).isContextualSearchEnabled()) {
preferenceScreen.removePreference(findPreference(PREF_CONTEXTUAL_SEARCH));
}
ButtonPreference clearBrowsingData =
(ButtonPreference) findPreference(PREF_CLEAR_BROWSING_DATA);
clearBrowsingData.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
showClearBrowsingDialog();
return true;
}
});
if (getArguments() != null) {
boolean showClearBrowsingData =
getArguments().getBoolean(SHOW_CLEAR_BROWSING_DATA_EXTRA, false);
if (showClearBrowsingData) showClearBrowsingDialog();
}
updateSummaries();
}
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
// CrashDumpUploadPreference listens to its own PreferenceChanged to update its text.
// We have replaced the listener. If we do run into a CrashDumpUploadPreference change,
// we will call onPreferenceChange to change the displayed text.
if (preference instanceof CrashDumpUploadPreference) {
((CrashDumpUploadPreference) preference).onPreferenceChange(preference, newValue);
}
// NetworkPredictionPreference listens to its own PreferenceChanged to update its text.
// We have replaced the listener. If we do run into a NetworkPredictionPreference change,
// we will call onPreferenceChange to change the displayed text.
if (preference instanceof NetworkPredictionPreference) {
((NetworkPredictionPreference) preference).onPreferenceChange(preference, newValue);
}
String key = preference.getKey();
if (PREF_SEARCH_SUGGESTIONS.equals(key)) {
PrefServiceBridge.getInstance().setSearchSuggestEnabled((boolean) newValue);
} else if (PREF_NETWORK_PREDICTIONS.equals(key)) {
PrefServiceBridge.getInstance().setNetworkPredictionOptions(
NetworkPredictionOptions.stringToEnum((String) newValue));
((Preferences) getActivity()).updatePrecachingEnabled();
} else if (PREF_NETWORK_PREDICTIONS_NO_CELLULAR.equals(key)) {
PrefServiceBridge.getInstance().setNetworkPredictionOptions((boolean) newValue
? NetworkPredictionOptions.NETWORK_PREDICTION_ALWAYS
: NetworkPredictionOptions.NETWORK_PREDICTION_NEVER);
((Preferences) getActivity()).updatePrecachingEnabled();
} else if (PREF_NAVIGATION_ERROR.equals(key)) {
PrefServiceBridge.getInstance().setResolveNavigationErrorEnabled((boolean) newValue);
} else if (PREF_CRASH_DUMP_UPLOAD_NO_CELLULAR.equals(key)) {
PrefServiceBridge.getInstance().setCrashReporting((boolean) newValue);
} else if (PREF_CRASH_DUMP_UPLOAD.equals(key)) {
PrivacyPreferencesManager.getInstance(getActivity()).setUploadCrashDump(
(String) newValue);
}
return true;
}
@Override
public void onResume() {
super.onResume();
updateSummaries();
}
/**
* Updates the summaries for several preferences.
*/
public void updateSummaries() {
PrefServiceBridge prefServiceBridge = PrefServiceBridge.getInstance();
CheckBoxPreference navigationErrorPref = (CheckBoxPreference) findPreference(
PREF_NAVIGATION_ERROR);
navigationErrorPref.setChecked(prefServiceBridge.isResolveNavigationErrorEnabled());
CheckBoxPreference searchSuggestionsPref = (CheckBoxPreference) findPreference(
PREF_SEARCH_SUGGESTIONS);
searchSuggestionsPref.setChecked(prefServiceBridge.isSearchSuggestEnabled());
Preference doNotTrackPref = findPreference(PREF_DO_NOT_TRACK);
if (prefServiceBridge.isDoNotTrackEnabled()) {
doNotTrackPref.setSummary(getActivity().getResources().getText(R.string.text_on));
} else {
doNotTrackPref.setSummary(getActivity().getResources().getText(R.string.text_off));
}
Preference contextualPref = findPreference(PREF_CONTEXTUAL_SEARCH);
if (contextualPref != null) {
if (prefServiceBridge.isContextualSearchDisabled()) {
contextualPref.setSummary(getActivity().getResources().getText(R.string.text_off));
} else {
contextualPref.setSummary(getActivity().getResources().getText(R.string.text_on));
}
}
}
private ManagedPreferenceDelegate createManagedPreferenceDelegate() {
return new ManagedPreferenceDelegate() {
@Override
public boolean isPreferenceControlledByPolicy(Preference preference) {
String key = preference.getKey();
PrefServiceBridge prefs = PrefServiceBridge.getInstance();
if (PREF_NAVIGATION_ERROR.equals(key)) {
return prefs.isResolveNavigationErrorManaged();
}
if (PREF_SEARCH_SUGGESTIONS.equals(key)) {
return prefs.isSearchSuggestManaged();
}
if (PREF_NETWORK_PREDICTIONS_NO_CELLULAR.equals(key)
|| PREF_NETWORK_PREDICTIONS.equals(key)) {
return prefs.isNetworkPredictionManaged();
}
if (PREF_CRASH_DUMP_UPLOAD.equals(key)
|| PREF_CRASH_DUMP_UPLOAD_NO_CELLULAR.equals(key)) {
return prefs.isCrashReportManaged();
}
return false;
}
};
}
private void showClearBrowsingDialog() {
mClearBrowsingDataDialogFragment = new ClearBrowsingDataDialogFragment();
mClearBrowsingDataDialogFragment.show(
getFragmentManager(), ClearBrowsingDataDialogFragment.FRAGMENT_TAG);
}
@Override
public void onDestroy() {
super.onDestroy();
if (mClearBrowsingDataDialogFragment != null) {
// In case the progress dialog is still showing and waiting for a callback, dismiss it.
// See bug http://b/13396757.
mClearBrowsingDataDialogFragment.dismissProgressDialog();
}
mClearBrowsingDataDialogFragment = null;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
menu.clear();
MenuItem help = menu.add(
Menu.NONE, R.id.menu_id_help_privacy, Menu.NONE, R.string.menu_help);
help.setIcon(R.drawable.ic_help_and_feedback);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.menu_id_help_privacy) {
((Preferences) getActivity()).showPrivacyPreferencesHelp();
return true;
}
return false;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.dataFlow;
import com.intellij.codeInspection.dataFlow.value.*;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Stream;
/**
* Representation of {@link MethodContract} return value. It may represent the concrete value (e.g. "false") or pose some constraints
* to the method return value (e.g. "!null").
*/
public abstract class ContractReturnValue {
private static final int PARAMETER_ORDINAL_BASE = 10;
private static final int MAX_SUPPORTED_PARAMETER = 100;
private static final Function<PsiMethod, String> NOT_CONSTRUCTOR =
method -> method.isConstructor() ? "not applicable for constructor" : null;
private static final Function<PsiMethod, String> NOT_STATIC =
method -> method.hasModifierProperty(PsiModifier.STATIC) ? "not applicable for static method" : null;
private static final Function<PsiMethod, String> NOT_PRIMITIVE_RETURN =
method -> {
PsiType returnType = method.getReturnType();
return returnType instanceof PsiPrimitiveType
? "not applicable for primitive return type '" + returnType.getPresentableText() + "'"
: null;
};
private static final Function<PsiMethod, String> BOOLEAN_RETURN =
method -> PsiType.BOOLEAN.equals(method.getReturnType()) ? null : "method return type must be 'boolean'";
private final @NotNull String myName;
private final int myOrdinal;
private ContractReturnValue(@NotNull String name, int ordinal) {
myName = name;
myOrdinal = ordinal;
}
/**
* @return a hashcode which is stable across VM restart
*/
@Override
public int hashCode() {
return myOrdinal;
}
/**
* @return a string which is used to represent this return value inside {@link org.jetbrains.annotations.Contract} annotation.
*/
@Override
public String toString() {
return myName;
}
/**
* Checks whether this return value makes sense for the specified method signature. The method body is not checked.
* E.g. "true" contract value makes sense for method returning {@code boolean}, but does not make sense for method returning {@code int}.
* This method can be used to check the contract correctness.
*
* @param method
* @return null if this contract return value makes sense for the supplied return type.
* Otherwise the human-readable error message is returned.
*/
public final String getMethodCompatibilityProblem(PsiMethod method) {
return validators().map(fn -> fn.apply(method)).filter(Objects::nonNull).findFirst()
.map(("Contract return value '" + this + "': ")::concat)
.orElse(null);
}
/**
* Checks whether this return value makes sense for the specified method signature. The method body is not checked.
* E.g. "true" contract value makes sense for method returning {@code boolean}, but does not make sense for method returning {@code int}.
* This method can be used to check the contract correctness.
*
* @param method
* @return true if this contract return value makes sense for the supplied return type.
*/
public final boolean isMethodCompatible(PsiMethod method) {
return validators().map(fn -> fn.apply(method)).allMatch(Objects::isNull);
}
abstract Stream<Function<PsiMethod, String>> validators();
public ContractReturnValue intersect(ContractReturnValue other) {
if (this.equals(other) || other == ANY_VALUE) return this;
if (this == ANY_VALUE) return other;
if (this.isNotNull() && other.isNotNull()) return NOT_NULL_VALUE;
return FAIL_VALUE;
}
static DfaValue merge(DfaValue defaultValue, DfaValue newValue, DfaMemoryState memState) {
if (defaultValue == null || defaultValue == DfaUnknownValue.getInstance()) return newValue;
if (newValue == null || newValue == DfaUnknownValue.getInstance()) return defaultValue;
if (defaultValue instanceof DfaFactMapValue) {
DfaFactMap defaultFacts = ((DfaFactMapValue)defaultValue).getFacts();
if (newValue instanceof DfaFactMapValue) {
DfaFactMap intersection = defaultFacts.intersect(((DfaFactMapValue)newValue).getFacts());
if (intersection != null) {
return defaultValue.getFactory().getFactFactory().createValue(intersection);
}
}
if (newValue instanceof DfaVariableValue) {
defaultFacts.facts(Pair::create).forEach(fact -> memState.applyFact(newValue, fact.getFirst(), fact.getSecond()));
}
}
return newValue;
}
/**
* Converts this return value to the most suitable {@link DfaValue} which represents the same constraints.
*
* @param factory a {@link DfaValueFactory} which can be used to create new values if necessary
* @param defaultValue a default method return type value in the absence of the contracts (may contain method type information)
* @param callState call state
* @return a value which represents the constraints of this contract return value.
*/
public abstract DfaValue getDfaValue(DfaValueFactory factory, DfaValue defaultValue, DfaCallState callState);
/**
* Returns true if the supplied {@link DfaValue} could be compatible with this return value. If false is returned, then
* returning given {@link DfaValue} would violate the contract with this return value. This method can be used
* to check the contract correctness.
*
* @param state memory state to use
* @param value value to check
* @return whether the supplied value could be compatible with this return value.
*/
public abstract boolean isValueCompatible(DfaMemoryState state, DfaValue value);
/**
* Returns a unique non-negative number which identifies this return value. Can be used for serialization. For two return values
* {@code a} and {@code b} the following holds:
* {@code a.ordinal() == b.ordinal() <=> a.equals(b)}.
*
* @return a unique non-negative number which identifies this return value.
*/
public int ordinal() {
return myOrdinal;
}
/**
* @return true if this return value represents a non-null object value (possibly with some other restrictions)
*/
public boolean isNotNull() {
return false;
}
/**
* @return true if this return value represents a null
*/
public boolean isNull() {
return this == NULL_VALUE;
}
/**
* @return true if this return value represents a failure (that is, method must throw to fulfill this contract)
*/
public boolean isFail() {
return this == FAIL_VALUE;
}
/**
* @return true if this return value represents a boolean value (either "true" or "false")
*/
public boolean isBoolean() {
return this instanceof BooleanReturnValue;
}
/**
* Returns a {@code ContractReturnValue} which corresponds to given ordinal index. For any return value {@code x}
* the following holds: {@code ContractReturnValue.valueOf(x.ordinal()).equals(x)}.
*
* @param ordinal ordinal to create a ContractReturnValue object from
* @return a ContractReturnValue object. Returns an object which represents any possible value if the supplied ordinal does not
* correspond to any valid ContractReturnValue.
*/
@NotNull
public static ContractReturnValue valueOf(int ordinal) {
switch (ordinal) {
case 0:
case 1:
return returnNull();
case 2:
return returnNotNull();
case 3:
return returnTrue();
case 4:
return returnFalse();
case 5:
return fail();
case 6:
return returnNew();
case 7:
return returnThis();
default:
if (ordinal >= PARAMETER_ORDINAL_BASE && ordinal <= PARAMETER_ORDINAL_BASE + MAX_SUPPORTED_PARAMETER) {
return returnParameter(ordinal - PARAMETER_ORDINAL_BASE);
}
return returnAny();
}
}
/**
* Returns a {@code ContractReturnValue} which corresponds to given string representation. For any return value {@code x}
* the following holds: {@code ContractReturnValue.valueOf(x.toString()).equals(x)} and for string {@code str} the following holds:
* {@code ContractReturnValue.valueOf(str) == null || ContractReturnValue.valueOf(str).toString().equals(str)}.
*
* @param value string representation of return value
* @return ContractReturnValue object which corresponds to given string representation; null if given value is not supported.
*/
@Nullable
public static ContractReturnValue valueOf(@NotNull String value) {
switch (value) {
case "_":
return returnAny();
case "fail":
return fail();
case "true":
return returnTrue();
case "false":
return returnFalse();
case "null":
return returnNull();
case "!null":
return returnNotNull();
case "new":
return returnNew();
case "this":
return returnThis();
}
if (value.startsWith("param")) {
String suffix = value.substring("param".length());
try {
int paramNumber = Integer.parseInt(suffix) - 1;
if (paramNumber >= 0 && paramNumber <= MAX_SUPPORTED_PARAMETER) {
return new ParameterReturnValue(paramNumber);
}
}
catch (NumberFormatException ignored) {
// unexpected non-integer suffix: ignore
}
}
return null;
}
/**
* @return any possible return value ("top" element)
*/
public static ContractReturnValue returnAny() {
return ANY_VALUE;
}
/**
* @return return value indicating that the method throws an exception ("bottom" element)
*/
public static ContractReturnValue fail() {
return FAIL_VALUE;
}
/**
* @param value a boolean value to return
* @return the corresponding boolean return value
*/
public static BooleanReturnValue returnBoolean(boolean value) {
return value ? returnTrue() : returnFalse();
}
/**
* @return boolean "true" return value
*/
public static BooleanReturnValue returnTrue() {
return BooleanReturnValue.TRUE_VALUE;
}
/**
* @return boolean "false" return value
*/
public static BooleanReturnValue returnFalse() {
return BooleanReturnValue.FALSE_VALUE;
}
/**
* @return "null" return value
*/
public static ContractReturnValue returnNull() {
return NULL_VALUE;
}
/**
* @return non-null return value
*/
public static ContractReturnValue returnNotNull() {
return NOT_NULL_VALUE;
}
/**
* @return non-null new object return value
*/
public static ContractReturnValue returnNew() {
return NEW_VALUE;
}
/**
* @return non-null "this" return value (qualifier)
*/
public static ContractReturnValue returnThis() {
return THIS_VALUE;
}
/**
* @return non-null parameter return value (parameter number is zero-based)
*/
public static ContractReturnValue returnParameter(int n) {
if (n < 0) {
throw new IllegalArgumentException("Negative parameter: " + n);
}
if (n > MAX_SUPPORTED_PARAMETER) return ANY_VALUE;
return new ParameterReturnValue(n);
}
private static final ContractReturnValue ANY_VALUE = new ContractReturnValue("_", 0) {
@Override
Stream<Function<PsiMethod, String>> validators() {
return Stream.of(NOT_CONSTRUCTOR);
}
@Override
public DfaValue getDfaValue(DfaValueFactory factory, DfaValue defaultValue, DfaCallState callState) {
return defaultValue;
}
@Override
public boolean isValueCompatible(DfaMemoryState state, DfaValue value) {
return true;
}
};
private static final ContractReturnValue FAIL_VALUE = new ContractReturnValue("fail", 5) {
@Override
Stream<Function<PsiMethod, String>> validators() {
return Stream.empty();
}
@Override
public DfaValue getDfaValue(DfaValueFactory factory, DfaValue defaultValue, DfaCallState callState) {
return factory.getConstFactory().getContractFail();
}
@Override
public boolean isValueCompatible(DfaMemoryState state, DfaValue value) {
return false;
}
};
private static final ContractReturnValue NULL_VALUE = new ContractReturnValue("null", 1) {
@Override
Stream<Function<PsiMethod, String>> validators() {
return Stream.of(NOT_CONSTRUCTOR, NOT_PRIMITIVE_RETURN);
}
@Override
public DfaValue getDfaValue(DfaValueFactory factory, DfaValue defaultValue, DfaCallState callState) {
return factory.getConstFactory().getNull();
}
@Override
public boolean isValueCompatible(DfaMemoryState state, DfaValue value) {
return !state.isNotNull(value);
}
};
private static final ContractReturnValue NOT_NULL_VALUE = new ContractReturnValue("!null", 2) {
@Override
Stream<Function<PsiMethod, String>> validators() {
return Stream.of(NOT_CONSTRUCTOR, NOT_PRIMITIVE_RETURN);
}
@Override
public boolean isNotNull() {
return true;
}
@Override
public DfaValue getDfaValue(DfaValueFactory factory, DfaValue defaultValue, DfaCallState callState) {
if (defaultValue instanceof DfaVariableValue) {
callState.myMemoryState.forceVariableFact((DfaVariableValue)defaultValue, DfaFactType.NULLABILITY, DfaNullability.NOT_NULL);
return defaultValue;
}
return factory.withFact(defaultValue, DfaFactType.NULLABILITY, DfaNullability.NOT_NULL);
}
@Override
public boolean isValueCompatible(DfaMemoryState state, DfaValue value) {
return !state.isNull(value);
}
};
private static final ContractReturnValue NEW_VALUE = new ContractReturnValue("new", 6) {
@Override
Stream<Function<PsiMethod, String>> validators() {
return Stream.of(NOT_CONSTRUCTOR, NOT_PRIMITIVE_RETURN);
}
@Override
public DfaValue getDfaValue(DfaValueFactory factory, DfaValue defaultValue, DfaCallState callState) {
if (defaultValue instanceof DfaVariableValue) {
defaultValue = factory.getFactFactory().createValue(callState.myMemoryState.getFacts((DfaVariableValue)defaultValue));
}
DfaValue value = factory.withFact(defaultValue, DfaFactType.NULLABILITY, DfaNullability.NOT_NULL);
if (callState.myCallArguments.myPure) {
boolean unmodifiableView =
value instanceof DfaFactMapValue && ((DfaFactMapValue)value).get(DfaFactType.MUTABILITY) == Mutability.UNMODIFIABLE_VIEW;
// Unmodifiable view methods like Collections.unmodifiableList create new object, but their special field "size" is
// actually a delegate, so we cannot trust it if the original value is not local
if (!unmodifiableView) {
value = factory.withFact(value, DfaFactType.LOCALITY, true);
}
}
return value;
}
@Override
public boolean isNotNull() {
return true;
}
@Override
public boolean isValueCompatible(DfaMemoryState state, DfaValue value) {
return !state.isNull(value);
}
};
private static final ContractReturnValue THIS_VALUE = new ContractReturnValue("this", 7) {
@Override
Stream<Function<PsiMethod, String>> validators() {
return Stream.of(NOT_CONSTRUCTOR, NOT_STATIC, NOT_PRIMITIVE_RETURN, method -> {
PsiType returnType = method.getReturnType();
if (returnType instanceof PsiClassType) {
PsiClass aClass = method.getContainingClass();
if (aClass != null && JavaPsiFacade.getElementFactory(method.getProject()).createType(aClass).isConvertibleFrom(returnType)) {
return null;
}
}
return "method return type should be compatible with method containing class";
});
}
@Override
public boolean isNotNull() {
return true;
}
@Override
public DfaValue getDfaValue(DfaValueFactory factory, DfaValue defaultValue, DfaCallState callState) {
DfaValue qualifier = callState.myCallArguments.myQualifier;
if (qualifier != null && qualifier != DfaUnknownValue.getInstance()) {
return merge(defaultValue, qualifier, callState.myMemoryState);
}
return factory.withFact(defaultValue, DfaFactType.NULLABILITY, DfaNullability.NOT_NULL);
}
@Override
public boolean isValueCompatible(DfaMemoryState state, DfaValue value) {
return !state.isNull(value);
}
};
/**
* Boolean return value (either "true" or "false").
*/
public static final class BooleanReturnValue extends ContractReturnValue {
static final BooleanReturnValue TRUE_VALUE = new BooleanReturnValue(true, 3);
static final BooleanReturnValue FALSE_VALUE = new BooleanReturnValue(false, 4);
private final boolean myValue;
private BooleanReturnValue(boolean value, int ordinal) {
super(String.valueOf(value), ordinal);
myValue = value;
}
public boolean getValue() {
return myValue;
}
/**
* @return the return value opposite to this return value
*/
public BooleanReturnValue negate() {
return myValue ? FALSE_VALUE : TRUE_VALUE;
}
@Override
Stream<Function<PsiMethod, String>> validators() {
return Stream.of(NOT_CONSTRUCTOR, BOOLEAN_RETURN);
}
@Override
public DfaValue getDfaValue(DfaValueFactory factory, DfaValue defaultValue, DfaCallState callState) {
return factory.getBoolean(myValue);
}
@Override
public boolean isValueCompatible(DfaMemoryState state, DfaValue value) {
DfaConstValue dfaConst = state.getConstantValue(value);
return dfaConst == null || Boolean.valueOf(myValue).equals(dfaConst.getValue());
}
}
public static final class ParameterReturnValue extends ContractReturnValue {
private final int myParamNumber; // zero-based
public ParameterReturnValue(int n) {
super("param" + (n + 1), n + PARAMETER_ORDINAL_BASE);
myParamNumber = n;
}
public int getParameterNumber() {
return myParamNumber;
}
@Override
Stream<Function<PsiMethod, String>> validators() {
return Stream.of(NOT_CONSTRUCTOR, method -> {
PsiParameter[] parameters = method.getParameterList().getParameters();
if (parameters.length <= myParamNumber) {
return "not applicable for method which has " + parameters.length +
" parameter" + (parameters.length == 1 ? "" : "s");
}
PsiType parameterType = parameters[myParamNumber].getType();
PsiType returnType = method.getReturnType();
if (returnType != null && !returnType.isAssignableFrom(parameterType)) {
return "return type '" +
returnType.getPresentableText() +
"' must be assignable from parameter type '" +
parameterType.getPresentableText() +
"'";
}
return null;
});
}
@Override
public boolean equals(Object obj) {
return this == obj || (obj instanceof ParameterReturnValue && ((ParameterReturnValue)obj).myParamNumber == myParamNumber);
}
@Override
public DfaValue getDfaValue(DfaValueFactory factory, DfaValue defaultValue, DfaCallState callState) {
if (callState.myCallArguments.myArguments != null && callState.myCallArguments.myArguments.length > myParamNumber) {
DfaValue argument = callState.myCallArguments.myArguments[myParamNumber];
return merge(defaultValue, argument, callState.myMemoryState);
}
return defaultValue;
}
@Override
public boolean isValueCompatible(DfaMemoryState state, DfaValue value) {
return true;
}
}
}
| |
/**
*
*/
package org.weekendsoft.mpa.entity;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Query;
import javax.persistence.Table;
import javax.persistence.TypedQuery;
/**
* Entity Bean for accounts
*
* @author Vivek Kant
*
*/
@Entity
@Table(name = "account")
public class Account extends BaseEntity implements Comparable<Account> {
private static final long serialVersionUID = 1L;
public static final String DEFAULT_INCOME_ACCOUNT = "DEFAULT_INCOME" ;
public static final String DEFAULT_EXPENSE_ACCOUNT = "DEFAULT_EXPENSE" ;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name = "account_id")
private int id ;
@Column(name = "account_name")
private String name ;
@Column(name = "instance_id" )
private String instanceId ;
@Column(name = "account_type_id")
private Integer accountTypeId ;
@Column(name = "bank_id")
private Integer bankId ;
@Column(name = "currency_id")
private String currencyId ;
@Column(name = "internal")
private boolean internal ;
@Column(name = "initial_balance")
private double initialBalance ;
public Account( int id ) {
super() ;
this.id = id ;
}
public Account() {
// Default Constructor
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getInstanceId() {
return instanceId;
}
public void setInstanceId(String instanceId) {
this.instanceId = instanceId;
}
public Integer getAccountTypeId() {
return accountTypeId;
}
public void setAccountTypeId(Integer accountTypeId) {
this.accountTypeId = accountTypeId;
}
public Integer getBankId() {
return bankId;
}
public void setBankId(Integer bankId) {
this.bankId = bankId;
}
public String getCurrencyId() {
return currencyId;
}
public void setCurrencyId(String currencyId) {
this.currencyId = currencyId;
}
public boolean isInternal() {
return internal;
}
public void setInternal(boolean internal) {
this.internal = internal;
}
public double getInitialBalance() {
return initialBalance;
}
public void setInitialBalance(double initialBalance) {
this.initialBalance = initialBalance;
}
public int compareTo( Account account2 ) {
return this.name.compareTo( account2.name ) ;
}
@Override
public String toString() {
return "Account [id=" + id + ", name=" + name + ", instanceId=" + instanceId + ", accountTypeId="
+ accountTypeId + ", bankId=" + bankId + ", currencyId=" + currencyId + ", internal=" + internal
+ ", initialBalance=" + initialBalance + "]";
}
public static Account get( int id ) throws Exception {
if ( !init ) init() ;
Account account = em.find( Account.class, id ) ;
return account ;
}
public static Account getAccountByName( String name, String instanceId ) throws Exception {
if ( !init ) init() ;
Account account = null ;
Query q = em.createQuery( "select a from Account a where "
+ "a.instanceId = :instanceId and "
+ "a.name = :name" ) ;
q.setParameter( "instanceId", instanceId ) ;
q.setParameter( "name", name ) ;
q.setMaxResults( 1 ) ;
account = (Account) q.getSingleResult() ;
return account ;
}
public static Account getIncomeAccount( String instanceId ) throws Exception {
return getAccountByName( DEFAULT_INCOME_ACCOUNT, instanceId ) ;
}
public static Account getExpenseAccount( String instanceId ) throws Exception {
return getAccountByName( DEFAULT_EXPENSE_ACCOUNT, instanceId ) ;
}
public static List<Account> getAll( String instanceId ) throws Exception {
if ( !init ) init() ;
TypedQuery<Account> q = em.createQuery( "select a from Account a where "
+ "a.instanceId = :instanceId", Account.class ) ;
q.setParameter( "instanceId", instanceId ) ;
List<Account> results = q.getResultList() ;
return results ;
}
public static void create( Account account ) throws Exception {
if ( !init ) init() ;
em.getTransaction().begin() ;
em.persist( account ) ;
em.flush() ;
em.getTransaction().commit() ;
}
public static void modify( Account account ) throws Exception {
if ( !init ) init() ;
Account original = em.find( Account.class, account.id ) ;
em.getTransaction().begin() ;
copy( original, account ) ;
em.getTransaction().commit() ;
}
public static void delete( Account account ) throws Exception {
em.getTransaction().begin() ;
em.remove( account ) ;
em.getTransaction().commit() ;
}
private static void copy( Account to, Account from ) {
to.accountTypeId = from.accountTypeId ;
to.name = from.name ;
to.bankId = from.bankId ;
to.currencyId = from.currencyId ;
to.initialBalance = from.initialBalance ;
to.instanceId = from.instanceId ;
to.internal = from.internal ;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search;
import org.apache.lucene.index.PrefixCodedTerms;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PointRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
/** Utility class to filter parent and children clauses when building nested
* queries. */
public final class NestedHelper {
private final MapperService mapperService;
public NestedHelper(MapperService mapperService) {
this.mapperService = mapperService;
}
/** Returns true if the given query might match nested documents. */
public boolean mightMatchNestedDocs(Query query) {
if (query instanceof ConstantScoreQuery) {
return mightMatchNestedDocs(((ConstantScoreQuery) query).getQuery());
} else if (query instanceof BoostQuery) {
return mightMatchNestedDocs(((BoostQuery) query).getQuery());
} else if (query instanceof MatchAllDocsQuery) {
return true;
} else if (query instanceof MatchNoDocsQuery) {
return false;
} else if (query instanceof TermQuery) {
// We only handle term(s) queries and range queries, which should already
// cover a high majority of use-cases
return mightMatchNestedDocs(((TermQuery) query).getTerm().field());
} else if (query instanceof TermInSetQuery) {
PrefixCodedTerms terms = ((TermInSetQuery) query).getTermData();
if (terms.size() > 0) {
PrefixCodedTerms.TermIterator it = terms.iterator();
it.next();
return mightMatchNestedDocs(it.field());
} else {
return false;
}
} else if (query instanceof PointRangeQuery) {
return mightMatchNestedDocs(((PointRangeQuery) query).getField());
} else if (query instanceof IndexOrDocValuesQuery) {
return mightMatchNestedDocs(((IndexOrDocValuesQuery) query).getIndexQuery());
} else if (query instanceof BooleanQuery) {
final BooleanQuery bq = (BooleanQuery) query;
final boolean hasRequiredClauses = bq.clauses().stream().anyMatch(BooleanClause::isRequired);
if (hasRequiredClauses) {
return bq.clauses().stream()
.filter(BooleanClause::isRequired)
.map(BooleanClause::getQuery)
.allMatch(this::mightMatchNestedDocs);
} else {
return bq.clauses().stream()
.filter(c -> c.getOccur() == Occur.SHOULD)
.map(BooleanClause::getQuery)
.anyMatch(this::mightMatchNestedDocs);
}
} else if (query instanceof ESToParentBlockJoinQuery) {
return ((ESToParentBlockJoinQuery) query).getPath() != null;
} else {
return true;
}
}
/** Returns true if a query on the given field might match nested documents. */
boolean mightMatchNestedDocs(String field) {
if (field.startsWith("_")) {
// meta field. Every meta field behaves differently, eg. nested
// documents have the same _uid as their parent, put their path in
// the _type field but do not have _field_names. So we just ignore
// meta fields and return true, which is always safe, it just means
// we might add a nested filter when it is nor required.
return true;
}
if (mapperService.fullName(field) == null) {
// field does not exist
return false;
}
for (String parent = parentObject(field); parent != null; parent = parentObject(parent)) {
ObjectMapper mapper = mapperService.getObjectMapper(parent);
if (mapper != null && mapper.nested().isNested()) {
return true;
}
}
return false;
}
/** Returns true if the given query might match parent documents or documents
* that are nested under a different path. */
public boolean mightMatchNonNestedDocs(Query query, String nestedPath) {
if (query instanceof ConstantScoreQuery) {
return mightMatchNonNestedDocs(((ConstantScoreQuery) query).getQuery(), nestedPath);
} else if (query instanceof BoostQuery) {
return mightMatchNonNestedDocs(((BoostQuery) query).getQuery(), nestedPath);
} else if (query instanceof MatchAllDocsQuery) {
return true;
} else if (query instanceof MatchNoDocsQuery) {
return false;
} else if (query instanceof TermQuery) {
return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath);
} else if (query instanceof TermInSetQuery) {
PrefixCodedTerms terms = ((TermInSetQuery) query).getTermData();
if (terms.size() > 0) {
PrefixCodedTerms.TermIterator it = terms.iterator();
it.next();
return mightMatchNonNestedDocs(it.field(), nestedPath);
} else {
return false;
}
} else if (query instanceof PointRangeQuery) {
return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath);
} else if (query instanceof IndexOrDocValuesQuery) {
return mightMatchNonNestedDocs(((IndexOrDocValuesQuery) query).getIndexQuery(), nestedPath);
} else if (query instanceof BooleanQuery) {
final BooleanQuery bq = (BooleanQuery) query;
final boolean hasRequiredClauses = bq.clauses().stream().anyMatch(BooleanClause::isRequired);
if (hasRequiredClauses) {
return bq.clauses().stream()
.filter(BooleanClause::isRequired)
.map(BooleanClause::getQuery)
.allMatch(q -> mightMatchNonNestedDocs(q, nestedPath));
} else {
return bq.clauses().stream()
.filter(c -> c.getOccur() == Occur.SHOULD)
.map(BooleanClause::getQuery)
.anyMatch(q -> mightMatchNonNestedDocs(q, nestedPath));
}
} else {
return true;
}
}
/** Returns true if a query on the given field might match parent documents
* or documents that are nested under a different path. */
boolean mightMatchNonNestedDocs(String field, String nestedPath) {
if (field.startsWith("_")) {
// meta field. Every meta field behaves differently, eg. nested
// documents have the same _uid as their parent, put their path in
// the _type field but do not have _field_names. So we just ignore
// meta fields and return true, which is always safe, it just means
// we might add a nested filter when it is nor required.
return true;
}
if (mapperService.fullName(field) == null) {
return false;
}
for (String parent = parentObject(field); parent != null; parent = parentObject(parent)) {
ObjectMapper mapper = mapperService.getObjectMapper(parent);
if (mapper!= null && mapper.nested().isNested()) {
if (mapper.fullPath().equals(nestedPath)) {
// If the mapper does not include in its parent or in the root object then
// the query might only match nested documents with the given path
return mapper.nested().isIncludeInParent() || mapper.nested().isIncludeInRoot();
} else {
// the first parent nested mapper does not have the expected path
// It might be misconfiguration or a sub nested mapper
return true;
}
}
}
return true; // the field is not a sub field of the nested path
}
private static String parentObject(String field) {
int lastDot = field.lastIndexOf('.');
if (lastDot == -1) {
return null;
}
return field.substring(0, lastDot);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.taeconfigurator.editors.ui.dialogs;
import org.apache.uima.taeconfigurator.editors.ui.AbstractSection;
import org.apache.uima.taeconfigurator.editors.ui.Utility;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.swt.widgets.Widget;
/**
* The Class AbstractDialogMultiColTable.
*/
public abstract class AbstractDialogMultiColTable extends AbstractDialog {
/**
* Checked indicator.
*
* @param col
* the col
* @return the string
*/
protected String checkedIndicator(int col) {
if (col == 1)
return "In";
else
return "Out";
}
/** The Constant UNCHECKED. */
protected static final String UNCHECKED = "";
/** The table. */
Tree f_tree;
/** The enable col 1. */
protected boolean enableCol1 = true;
/** The enable col 2. */
protected boolean enableCol2 = true;
/** The number checked. */
protected int numberChecked = 0;
/**
* Instantiates a new abstract dialog multi col table.
*
* @param aSection
* the a section
* @param title
* the title
* @param description
* the description
*/
protected AbstractDialogMultiColTable(AbstractSection aSection, String title,
String description) {
super(aSection, title, description);
}
/*
* (non-Javadoc)
*
* @see
* org.apache.uima.taeconfigurator.editors.ui.dialogs.AbstractDialog#handleEvent(org.eclipse.swt.
* widgets.Event)
*/
@Override
public void handleEvent(Event event) {
if (event.type == SWT.MouseDown && event.widget == f_tree) {
Point mousePosition = new Point(event.x, event.y);
TreeItem item = f_tree.getItem(mousePosition);
if (null == item) {
jitHowTo(event.widget);
return;
}
int col = getHitColumn(item, mousePosition);
if (col != 1 && col != 2) {
jitHowTo(event.widget);
return;
}
if (col == 1 && !enableCol1) {
setErrorMessage("This resource can't be marked as input");
return;
}
if (col == 2 && !enableCol2) {
setErrorMessage("This resource can't be marked as output");
return;
}
errorMessageUI.setText("");
toggleValue(item, col);
}
super.handleEvent(event);
}
/**
* Jit how to.
*
* @param w
* the w
*/
private void jitHowTo(Widget w) {
Utility.popMessage(w, "Where to mouse click",
"Please click the mouse in the input or output columns to toggle the selection.",
MessageDialog.INFORMATION);
}
/**
* Toggle value.
*
* @param item
* the item
* @param col
* the col
*/
protected void toggleValue(TableItem item, int col) {
item.setText(col,
item.getText(col).equals(checkedIndicator(col)) ? UNCHECKED : checkedIndicator(col));
if (item.getText(col).equals(checkedIndicator(col)))
numberChecked++;
else
numberChecked--;
}
/**
* Toggle value.
*
* @param item
* the item
* @param col
* the col
*/
protected void toggleValue(TreeItem item, int col) {
item.setText(col,
item.getText(col).equals(checkedIndicator(col)) ? UNCHECKED : checkedIndicator(col));
if (item.getText(col).equals(checkedIndicator(col)))
numberChecked++;
else
numberChecked--;
}
/*
* (non-Javadoc)
*
* @see org.apache.uima.taeconfigurator.editors.ui.dialogs.AbstractDialog#isValid()
*/
@Override
public boolean isValid() {
return true;
}
/*
* (non-Javadoc)
*
* @see org.apache.uima.taeconfigurator.editors.ui.dialogs.AbstractDialog#enableOK()
*/
@Override
public void enableOK() {
errorMessageUI.setText("");
okButton.setEnabled(numberChecked > 0);
}
/**
* Sets the checked.
*
* @param item
* the item
* @param col
* the col
* @param value
* the value
*/
protected void setChecked(TableItem item, int col, boolean value) {
boolean prevChecked = checkedIndicator(col).equals(item.getText(col));
item.setText(col, value ? checkedIndicator(col) : UNCHECKED);
if (value && !prevChecked)
numberChecked++;
else if (!value && prevChecked)
numberChecked--;
}
/**
* Sets the checked.
*
* @param item
* the item
* @param col
* the col
* @param value
* the value
*/
protected void setChecked(TreeItem item, int col, boolean value) {
boolean prevChecked = checkedIndicator(col).equals(item.getText(col));
item.setText(col, value ? checkedIndicator(col) : UNCHECKED);
if (value && !prevChecked)
numberChecked++;
else if (!value && prevChecked)
numberChecked--;
}
}
| |
/**
* Copyright (C) 2015 - present by McLeod Moores Software Limited
* Modified from APLv2 code Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
* Please see distribution for license.
*/
package com.opengamma.web.portfolio;
import java.net.URI;
import java.util.List;
import java.util.concurrent.ExecutorService;
import javax.ws.rs.Consumes;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.commons.lang.BooleanUtils;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.impl.flexi.FlexiBean;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.security.SecuritySource;
import com.opengamma.id.ObjectId;
import com.opengamma.id.UniqueId;
import com.opengamma.master.DocumentVisibility;
import com.opengamma.master.portfolio.ManageablePortfolio;
import com.opengamma.master.portfolio.PortfolioDocument;
import com.opengamma.master.portfolio.PortfolioHistoryRequest;
import com.opengamma.master.portfolio.PortfolioHistoryResult;
import com.opengamma.master.portfolio.PortfolioMaster;
import com.opengamma.master.portfolio.PortfolioSearchRequest;
import com.opengamma.master.portfolio.PortfolioSearchResult;
import com.opengamma.master.portfolio.PortfolioSearchSortOrder;
import com.opengamma.master.position.PositionMaster;
import com.opengamma.util.paging.PagingRequest;
import com.opengamma.web.WebPaging;
import com.opengamma.web.analytics.rest.MasterType;
import com.opengamma.web.analytics.rest.Subscribe;
import com.opengamma.web.analytics.rest.SubscribeMaster;
/**
* RESTful resource for all portfolios.
* <p>
* The portfolios resource represents the whole of a portfolio master.
*/
@Path("/portfolios")
public class MinimalWebPortfoliosResource extends AbstractMinimalWebPortfolioResource {
/**
* Creates the resource.
* @param portfolioMaster the portfolio master, not null
* @param positionMaster the position master, not null
* @param securitySource the security source, not null
* @param executor the executor service, not null
*/
public MinimalWebPortfoliosResource(final PortfolioMaster portfolioMaster, final PositionMaster positionMaster, final SecuritySource securitySource,
final ExecutorService executor) {
super(portfolioMaster, positionMaster, securitySource, executor);
}
//-------------------------------------------------------------------------
@GET
@Produces(MediaType.TEXT_HTML)
@SubscribeMaster(MasterType.PORTFOLIO)
public String getHTML(
@QueryParam("pgIdx") final Integer pgIdx,
@QueryParam("pgNum") final Integer pgNum,
@QueryParam("pgSze") final Integer pgSze,
@QueryParam("sort") final String sort,
@QueryParam("name") final String name,
@QueryParam("portfolioId") final List<String> portfolioIdStrs,
@QueryParam("nodeId") final List<String> nodeIdStrs,
@QueryParam("includeHidden") final Boolean includeHidden) {
final PagingRequest pr = buildPagingRequest(pgIdx, pgNum, pgSze);
final PortfolioSearchSortOrder so = buildSortOrder(sort, PortfolioSearchSortOrder.NAME_ASC);
final FlexiBean out = createSearchResultData(pr, so, name, portfolioIdStrs, nodeIdStrs, includeHidden);
return getFreemarker().build(HTML_DIR + "portfolios.ftl", out);
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@SubscribeMaster(MasterType.PORTFOLIO)
public String getJSON(
@QueryParam("pgIdx") final Integer pgIdx,
@QueryParam("pgNum") final Integer pgNum,
@QueryParam("pgSze") final Integer pgSze,
@QueryParam("sort") final String sort,
@QueryParam("name") final String name,
@QueryParam("portfolioId") final List<String> portfolioIdStrs,
@QueryParam("nodeId") final List<String> nodeIdStrs,
@QueryParam("includeHidden") final Boolean includeHidden) {
final PagingRequest pr = buildPagingRequest(pgIdx, pgNum, pgSze);
final PortfolioSearchSortOrder so = buildSortOrder(sort, PortfolioSearchSortOrder.NAME_ASC);
final FlexiBean out = createSearchResultData(pr, so, name, portfolioIdStrs, nodeIdStrs, includeHidden);
return getFreemarker().build(JSON_DIR + "portfolios.ftl", out);
}
private FlexiBean createSearchResultData(final PagingRequest pr, final PortfolioSearchSortOrder sort, final String name,
final List<String> portfolioIdStrs, final List<String> nodeIdStrs, final Boolean includeHidden) {
final FlexiBean out = createRootData();
final PortfolioSearchRequest searchRequest = new PortfolioSearchRequest();
searchRequest.setPagingRequest(pr);
searchRequest.setSortOrder(sort);
searchRequest.setName(StringUtils.trimToNull(name));
searchRequest.setDepth(1); // see PLAT-1733, also, depth is set to 1 for knowing # of childNodes for UI tree
searchRequest.setIncludePositions(true); // initially false because of PLAT-2012, now true for portfolio tree
if (BooleanUtils.isTrue(includeHidden)) {
searchRequest.setVisibility(DocumentVisibility.HIDDEN);
}
for (final String portfolioIdStr : portfolioIdStrs) {
searchRequest.addPortfolioObjectId(ObjectId.parse(portfolioIdStr));
}
for (final String nodeIdStr : nodeIdStrs) {
searchRequest.addNodeObjectId(ObjectId.parse(nodeIdStr));
}
out.put("searchRequest", searchRequest);
if (data().getUriInfo().getQueryParameters().size() > 0) {
final PortfolioSearchResult searchResult = data().getPortfolioMaster().search(searchRequest);
out.put("searchResult", searchResult);
out.put("paging", new WebPaging(searchResult.getPaging(), data().getUriInfo()));
}
return out;
}
//-------------------------------------------------------------------------
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_HTML)
public Response postHTML(@FormParam("name") final String name) {
final String trimmedName = StringUtils.trimToNull(name);
if (trimmedName == null) {
final FlexiBean out = createRootData();
out.put("err_nameMissing", true);
final String html = getFreemarker().build(HTML_DIR + "portfolios-add.ftl", out);
return Response.ok(html).build();
}
final URI uri = createPortfolio(trimmedName);
return Response.seeOther(uri).build();
}
private URI createPortfolio(final String name) {
final ManageablePortfolio portfolio = new ManageablePortfolio(name);
final PortfolioDocument doc = new PortfolioDocument(portfolio);
final PortfolioDocument added = data().getPortfolioMaster().add(doc);
return data().getUriInfo().getAbsolutePathBuilder().path(added.getUniqueId().toLatest().toString()).build();
}
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.APPLICATION_JSON)
public Response postJSON(@FormParam("name") final String name) {
final String trimmedName = StringUtils.trimToNull(name);
final URI uri = createPortfolio(trimmedName);
return Response.created(uri).build();
}
//-------------------------------------------------------------------------
@Path("{portfolioId}")
public MinimalWebPortfolioResource findPortfolio(@Subscribe @PathParam("portfolioId") final String idStr) {
data().setUriPortfolioId(idStr);
final UniqueId oid = UniqueId.parse(idStr);
try {
final PortfolioDocument doc = data().getPortfolioMaster().get(oid);
data().setPortfolio(doc);
data().setNode(doc.getPortfolio().getRootNode());
} catch (final DataNotFoundException ex) {
final PortfolioHistoryRequest historyRequest = new PortfolioHistoryRequest(oid);
historyRequest.setPagingRequest(PagingRequest.ONE);
final PortfolioHistoryResult historyResult = data().getPortfolioMaster().history(historyRequest);
if (historyResult.getDocuments().size() == 0) {
throw ex;
}
data().setPortfolio(historyResult.getFirstDocument());
data().setNode(historyResult.getFirstDocument().getPortfolio().getRootNode());
}
return new MinimalWebPortfolioResource(this);
}
//-------------------------------------------------------------------------
/**
* Creates the output root data.
* @return the output root data, not null
*/
@Override
protected FlexiBean createRootData() {
final FlexiBean out = super.createRootData();
final PortfolioSearchRequest searchRequest = new PortfolioSearchRequest();
out.put("searchRequest", searchRequest);
return out;
}
//-------------------------------------------------------------------------
/**
* Builds a URI for portfolios.
* @param data the data, not null
* @return the URI, not null
*/
public static URI uri(final WebPortfoliosData data) {
return data.getUriInfo().getBaseUriBuilder().path(MinimalWebPortfoliosResource.class).build();
}
}
| |
/*
* Copyright (c) 2010-2014 William Bittle http://www.dyn4j.org/
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions
* and the following disclaimer in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of dyn4j nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.dyn4j.sandbox.panels;
import java.awt.Dimension;
import java.awt.Window;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import javax.swing.GroupLayout;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JTextField;
import org.dyn4j.geometry.Convex;
import org.dyn4j.geometry.Polygon;
import org.dyn4j.geometry.Vector2;
import org.dyn4j.geometry.hull.HullGenerator;
import org.dyn4j.sandbox.dialogs.SampleFileDialog;
import org.dyn4j.sandbox.icons.Icons;
import org.dyn4j.sandbox.resources.Messages;
/**
* Panel used to create a polygon from a file.
* @author William Bittle
* @version 1.0.1
* @since 1.0.0
*/
public class FromFileConvexHullPolygonPanel extends ConvexHullShapePanel implements InputPanel, ActionListener {
/** The version id */
private static final long serialVersionUID = -8005377092903516752L;
/** The polygon read in */
private Polygon polygon;
/** The point cloud read in */
private Vector2[] points;
/** The text field to show the selected file path */
private JTextField txtFile;
/** Panel used to preview the current shape */
private PreviewPanel pnlPreview;
/** The convex hull algorithm */
private HullGenerator hullGenerator = null;
/**
* Full constructor.
* @param hullGenerator the convex hull generation algorithm
*/
public FromFileConvexHullPolygonPanel(HullGenerator hullGenerator) {
this.hullGenerator = hullGenerator;
GroupLayout layout = new GroupLayout(this);
this.setLayout(layout);
JLabel lblFile = new JLabel(Messages.getString("panel.hull.file"), Icons.INFO, JLabel.LEFT);
lblFile.setToolTipText(Messages.getString("panel.hull.file.tooltip"));
this.txtFile = new JTextField();
this.txtFile.setEditable(false);
JButton btnBrowse = new JButton(Messages.getString("button.browse"));
btnBrowse.setToolTipText(Messages.getString("button.browse.tooltip"));
btnBrowse.setActionCommand("browse");
btnBrowse.addActionListener(this);
JButton btnGenerate = new JButton(Messages.getString("panel.hull.viewSample"));
btnGenerate.setToolTipText(Messages.getString("panel.hull.viewSample.tooltip"));
btnGenerate.setActionCommand("generate");
btnGenerate.addActionListener(this);
JLabel lblPreview = new JLabel(Messages.getString("panel.preview"), Icons.INFO, JLabel.LEFT);
lblPreview.setToolTipText(Messages.getString("panel.preview.tooltip"));
this.pnlPreview = new PreviewPanel(new Dimension(250, 225), null, null);
layout.setAutoCreateGaps(true);
layout.setHorizontalGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup()
.addComponent(lblFile)
.addComponent(lblPreview))
.addGroup(layout.createParallelGroup()
.addGroup(layout.createSequentialGroup()
.addComponent(this.txtFile)
.addComponent(btnBrowse))
.addComponent(this.pnlPreview)
.addComponent(btnGenerate)));
layout.setVerticalGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER)
.addComponent(lblFile)
.addComponent(this.txtFile, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(btnBrowse, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addComponent(lblPreview)
.addComponent(this.pnlPreview))
.addComponent(btnGenerate, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE));
}
/* (non-Javadoc)
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed(ActionEvent event) {
if ("browse".equals(event.getActionCommand())) {
// show a jfile chooser to choose a file
JFileChooser fileBrowser = new JFileChooser();
int option = fileBrowser.showOpenDialog(this);
// check the option
if (option == JFileChooser.APPROVE_OPTION) {
// a file was chosen so we need to read it in and parse it
File file = fileBrowser.getSelectedFile();
// load it up
try {
List<Vector2> points = new ArrayList<Vector2>();
BufferedReader br = new BufferedReader(new FileReader(file));
String line;
while ((line = br.readLine()) != null) {
// check for comment line
if (!line.startsWith("#") && !line.isEmpty()) {
// split by any white space character
String[] coords = line.split("\\s+");
points.add(new Vector2(
Double.parseDouble(coords[0]),
Double.parseDouble(coords[1])));
}
}
br.close();
// create the polygon
this.points = new Vector2[points.size()];
points.toArray(this.points);
try {
this.polygon = new Polygon(this.hullGenerator.generate(this.points));
} catch (IllegalArgumentException e) {
// the polygon is not valid
JOptionPane.showMessageDialog(this,
MessageFormat.format(Messages.getString("panel.hull.invalid.message"), e.getMessage()),
Messages.getString("panel.invalid.title"),
JOptionPane.INFORMATION_MESSAGE);
// set the current polygon to null
this.polygon = null;
this.points = null;
}
// set the preview panel to the new points
this.pnlPreview.setHull(this.polygon, this.points);
// set the text of the text field to the file path
this.txtFile.setText(file.getAbsolutePath());
} catch (NumberFormatException e) {
// file data incorrect
JOptionPane.showMessageDialog(this, Messages.getString("panel.pointFile.nonNumericValue"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE);
} catch (ArrayIndexOutOfBoundsException e) {
// file format not correct
JOptionPane.showMessageDialog(this, Messages.getString("panel.pointFile.invalidFormat"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE);
} catch (FileNotFoundException e) {
// file not found
JOptionPane.showMessageDialog(this, MessageFormat.format(Messages.getString("panel.fileNotFound"), file.getAbsolutePath()), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE);
} catch (IOException e) {
// failure to read
JOptionPane.showMessageDialog(this, Messages.getString("panel.ioError"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE);
}
}
} else {
SampleFileDialog.show(this, Messages.getString("panel.hull.sample"));
}
}
/**
* Returns the hull generator currently being used.
* @return HullGenerator
*/
public HullGenerator getHullGenerator() {
return this.hullGenerator;
}
/**
* Sets the hull generator currently being used.
* @param hullGenerator the hull generator
*/
public void setHullGenerator(HullGenerator hullGenerator) {
this.hullGenerator = hullGenerator;
}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.panels.ShapePanel#getShape()
*/
@Override
public Convex getShape() {
return this.polygon;
}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.panels.InputPanel#isValidInput()
*/
@Override
public boolean isValidInput() {
if (this.polygon != null) {
return true;
}
return false;
}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.panels.InputPanel#showInvalidInputMessage(java.awt.Window)
*/
@Override
public void showInvalidInputMessage(Window owner) {
if (!this.isValidInput()) {
JOptionPane.showMessageDialog(this, Messages.getString("panel.hull.invalid"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE);
}
}
}
| |
package com.fsck.k9.mail.transport.smtp;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.SocketException;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Queue;
import android.support.annotation.VisibleForTesting;
import android.text.TextUtils;
import com.fsck.k9.mail.Address;
import com.fsck.k9.mail.AuthType;
import com.fsck.k9.mail.Authentication;
import com.fsck.k9.mail.AuthenticationFailedException;
import com.fsck.k9.mail.CertificateValidationException;
import com.fsck.k9.mail.ConnectionSecurity;
import com.fsck.k9.mail.K9MailLib;
import com.fsck.k9.mail.Message;
import com.fsck.k9.mail.Message.RecipientType;
import com.fsck.k9.mail.MessagingException;
import com.fsck.k9.mail.ServerSettings;
import com.fsck.k9.mail.ServerSettings.Type;
import com.fsck.k9.mail.Transport;
import com.fsck.k9.mail.TransportUris;
import com.fsck.k9.mail.filter.Base64;
import com.fsck.k9.mail.filter.EOLConvertingOutputStream;
import com.fsck.k9.mail.filter.LineWrapOutputStream;
import com.fsck.k9.mail.filter.PeekableInputStream;
import com.fsck.k9.mail.filter.SmtpDataStuffing;
import com.fsck.k9.mail.internet.CharsetSupport;
import com.fsck.k9.mail.oauth.OAuth2AuthorizationCodeFlowTokenProvider;
import com.fsck.k9.mail.oauth.OAuth2TokenProvider;
import com.fsck.k9.mail.oauth.XOAuth2ChallengeParser;
import com.fsck.k9.mail.ssl.TrustedSocketFactory;
import com.fsck.k9.mail.store.StoreConfig;
import javax.net.ssl.SSLException;
import org.apache.commons.io.IOUtils;
import timber.log.Timber;
import static com.fsck.k9.mail.CertificateValidationException.Reason.MissingCapability;
import static com.fsck.k9.mail.K9MailLib.DEBUG_PROTOCOL_SMTP;
public class SmtpTransport extends Transport {
private static final int SMTP_CONTINUE_REQUEST = 334;
private static final int SMTP_AUTHENTICATION_FAILURE_ERROR_CODE = 535;
private final TrustedSocketFactory trustedSocketFactory;
private final OAuth2TokenProvider oauthTokenProvider;
private final String host;
private final int port;
private final String username;
private final String password;
private final String clientCertificateAlias;
private final AuthType authType;
private final ConnectionSecurity connectionSecurity;
private Socket socket;
private PeekableInputStream inputStream;
private OutputStream outputStream;
private boolean is8bitEncodingAllowed;
private boolean isEnhancedStatusCodesProvided;
private int largestAcceptableMessage;
private boolean retryXoauthWithNewToken;
private boolean isPipeliningSupported;
private boolean shouldHideHostname;
public SmtpTransport(StoreConfig storeConfig, TrustedSocketFactory trustedSocketFactory,
OAuth2TokenProvider oAuth2TokenProvider) throws MessagingException {
ServerSettings settings;
try {
settings = TransportUris.decodeTransportUri(storeConfig.getTransportUri());
} catch (IllegalArgumentException e) {
throw new MessagingException("Error while decoding transport URI", e);
}
if (settings.type != Type.SMTP) {
throw new IllegalArgumentException("Expected SMTP StoreConfig!");
}
host = settings.host;
port = settings.port;
connectionSecurity = settings.connectionSecurity;
authType = settings.authenticationType;
username = settings.username;
password = settings.password;
clientCertificateAlias = settings.clientCertificateAlias;
this.trustedSocketFactory = trustedSocketFactory;
this.shouldHideHostname = storeConfig.shouldHideHostname();
this.oauthTokenProvider = oAuth2TokenProvider;
}
@Override
public void open() throws MessagingException {
try {
boolean secureConnection = false;
InetAddress[] addresses = InetAddress.getAllByName(host);
for (int i = 0; i < addresses.length; i++) {
try {
SocketAddress socketAddress = new InetSocketAddress(addresses[i], port);
if (connectionSecurity == ConnectionSecurity.SSL_TLS_REQUIRED) {
socket = trustedSocketFactory.createSocket(null, host, port, clientCertificateAlias);
socket.connect(socketAddress, SOCKET_CONNECT_TIMEOUT);
secureConnection = true;
} else {
socket = new Socket();
socket.connect(socketAddress, SOCKET_CONNECT_TIMEOUT);
}
} catch (SocketException e) {
if (i < (addresses.length - 1)) {
// there are still other addresses for that host to try
continue;
}
throw new MessagingException("Cannot connect to host", e);
}
break; // connection success
}
// RFC 1047
socket.setSoTimeout(SOCKET_READ_TIMEOUT);
inputStream = new PeekableInputStream(new BufferedInputStream(socket.getInputStream(), 1024));
outputStream = new BufferedOutputStream(socket.getOutputStream(), 1024);
// Eat the banner
executeCommand(null);
String hostnameToReportInHelo = buildHostnameToReport();
Map<String, String> extensions = sendHello(hostnameToReportInHelo);
is8bitEncodingAllowed = extensions.containsKey("8BITMIME");
isEnhancedStatusCodesProvided = extensions.containsKey("ENHANCEDSTATUSCODES");
isPipeliningSupported = extensions.containsKey("PIPELINING");
if (connectionSecurity == ConnectionSecurity.STARTTLS_REQUIRED) {
if (extensions.containsKey("STARTTLS")) {
executeCommand("STARTTLS");
socket = trustedSocketFactory.createSocket(
socket,
host,
port,
clientCertificateAlias);
inputStream = new PeekableInputStream(new BufferedInputStream(socket.getInputStream(),
1024));
outputStream = new BufferedOutputStream(socket.getOutputStream(), 1024);
/*
* Now resend the EHLO. Required by RFC2487 Sec. 5.2, and more specifically,
* Exim.
*/
extensions = sendHello(hostnameToReportInHelo);
secureConnection = true;
} else {
/*
* This exception triggers a "Certificate error"
* notification that takes the user to the incoming
* server settings for review. This might be needed if
* the account was configured with an obsolete
* "STARTTLS (if available)" setting.
*/
throw new CertificateValidationException(
"STARTTLS connection security not available");
}
}
boolean authLoginSupported = false;
boolean authPlainSupported = false;
boolean authCramMD5Supported = false;
boolean authExternalSupported = false;
boolean authXoauth2Supported = false;
if (extensions.containsKey("AUTH")) {
List<String> saslMech = Arrays.asList(extensions.get("AUTH").split(" "));
authLoginSupported = saslMech.contains("LOGIN");
authPlainSupported = saslMech.contains("PLAIN");
authCramMD5Supported = saslMech.contains("CRAM-MD5");
authExternalSupported = saslMech.contains("EXTERNAL");
authXoauth2Supported = saslMech.contains("XOAUTH2");
}
parseOptionalSizeValue(extensions);
if (!TextUtils.isEmpty(username)
&& (!TextUtils.isEmpty(password) ||
AuthType.EXTERNAL == authType ||
AuthType.XOAUTH2 == authType)) {
switch (authType) {
/*
* LOGIN is an obsolete option which is unavailable to users,
* but it still may exist in a user's settings from a previous
* version, or it may have been imported.
*/
case LOGIN:
case PLAIN:
// try saslAuthPlain first, because it supports UTF-8 explicitly
if (authPlainSupported) {
saslAuthPlain();
} else if (authLoginSupported) {
saslAuthLogin();
} else {
throw new MessagingException(
"Authentication methods SASL PLAIN and LOGIN are unavailable.");
}
break;
case CRAM_MD5:
if (authCramMD5Supported) {
saslAuthCramMD5();
} else {
throw new MessagingException("Authentication method CRAM-MD5 is unavailable.");
}
break;
case XOAUTH2:
if (authXoauth2Supported && oauthTokenProvider != null) {
saslXoauth2();
} else {
throw new MessagingException("Authentication method XOAUTH2 is unavailable.");
}
break;
case EXTERNAL:
if (authExternalSupported) {
saslAuthExternal();
} else {
/*
* Some SMTP servers are known to provide no error
* indication when a client certificate fails to
* validate, other than to not offer the AUTH EXTERNAL
* capability.
*
* So, we treat it is an error to not offer AUTH
* EXTERNAL when using client certificates. That way, the
* user can be notified of a problem during account setup.
*/
throw new CertificateValidationException(MissingCapability);
}
break;
/*
* AUTOMATIC is an obsolete option which is unavailable to users,
* but it still may exist in a user's settings from a previous
* version, or it may have been imported.
*/
case AUTOMATIC:
if (secureConnection) {
// try saslAuthPlain first, because it supports UTF-8 explicitly
if (authPlainSupported) {
saslAuthPlain();
} else if (authLoginSupported) {
saslAuthLogin();
} else if (authCramMD5Supported) {
saslAuthCramMD5();
} else {
throw new MessagingException("No supported authentication methods available.");
}
} else {
if (authCramMD5Supported) {
saslAuthCramMD5();
} else {
/*
* We refuse to insecurely transmit the password
* using the obsolete AUTOMATIC setting because of
* the potential for a MITM attack. Affected users
* must choose a different setting.
*/
throw new MessagingException(
"Update your outgoing server authentication setting. AUTOMATIC auth. is unavailable.");
}
}
break;
default:
throw new MessagingException(
"Unhandled authentication method found in the server settings (bug).");
}
}
} catch (MessagingException e) {
close();
throw e;
} catch (SSLException e) {
close();
throw new CertificateValidationException(e.getMessage(), e);
} catch (GeneralSecurityException gse) {
close();
throw new MessagingException(
"Unable to open connection to SMTP server due to security error.", gse);
} catch (IOException ioe) {
close();
throw new MessagingException("Unable to open connection to SMTP server.", ioe);
}
}
private String buildHostnameToReport() {
if (shouldHideHostname) {
return "localhost";
}
InetAddress localAddress = socket.getLocalAddress();
String localHostname = getCanonicalHostName(localAddress);
String ipAddr = getHostAddress(localAddress);
if (localHostname.equals("") || localHostname.equals(ipAddr) || localHostname.contains("_")) {
// We don't have a FQDN or the hostname contains invalid
// characters (see issue 2143), so use IP address.
if (!ipAddr.equals("")) {
if (localAddress instanceof Inet6Address) {
return "[IPv6:" + ipAddr + "]";
} else {
return "[" + ipAddr + "]";
}
} else {
// If the IP address is no good, set a sane default
return "android";
}
} else {
return localHostname;
}
}
private void parseOptionalSizeValue(Map<String, String> extensions) {
if (extensions.containsKey("SIZE")) {
String optionalsizeValue = extensions.get("SIZE");
if (optionalsizeValue != null && !"".equals(optionalsizeValue)) {
try {
largestAcceptableMessage = Integer.parseInt(optionalsizeValue);
} catch (NumberFormatException e) {
if (K9MailLib.isDebug() && DEBUG_PROTOCOL_SMTP) {
Timber.d(e, "Tried to parse %s and get an int", optionalsizeValue);
}
}
}
}
}
/**
* Send the client "identity" using the EHLO or HELO command.
*
* <p>
* We first try the EHLO command. If the server sends a negative response, it probably doesn't
* support the EHLO command. So we try the older HELO command that all servers need to support.
* And if that fails, too, we pretend everything is fine and continue unimpressed.
* </p>
*
* @param host
* The EHLO/HELO parameter as defined by the RFC.
*
* @return A (possibly empty) {@code Map<String,String>} of extensions (upper case) and
* their parameters (possibly 0 length) as returned by the EHLO command
*
* @throws IOException
* In case of a network error.
* @throws MessagingException
* In case of a malformed response.
*/
private Map<String, String> sendHello(String host) throws IOException, MessagingException {
Map<String, String> extensions = new HashMap<>();
try {
List<String> results = executeCommand("EHLO %s", host).results;
// Remove the EHLO greeting response
results.remove(0);
for (String result : results) {
String[] pair = result.split(" ", 2);
extensions.put(pair[0].toUpperCase(Locale.US), pair.length == 1 ? "" : pair[1]);
}
} catch (NegativeSmtpReplyException e) {
if (K9MailLib.isDebug()) {
Timber.v("Server doesn't support the EHLO command. Trying HELO...");
}
try {
executeCommand("HELO %s", host);
} catch (NegativeSmtpReplyException e2) {
Timber.w("Server doesn't support the HELO command. Continuing anyway.");
}
}
return extensions;
}
@Override
public void sendMessage(Message message) throws MessagingException {
List<Address> addresses = new ArrayList<>();
{
addresses.addAll(Arrays.asList(message.getRecipients(RecipientType.TO)));
addresses.addAll(Arrays.asList(message.getRecipients(RecipientType.CC)));
addresses.addAll(Arrays.asList(message.getRecipients(RecipientType.BCC)));
}
message.setRecipients(RecipientType.BCC, null);
Map<String, List<String>> charsetAddressesMap = new HashMap<>();
for (Address address : addresses) {
String addressString = address.getAddress();
String charset = CharsetSupport.getCharsetFromAddress(addressString);
List<String> addressesOfCharset = charsetAddressesMap.get(charset);
if (addressesOfCharset == null) {
addressesOfCharset = new ArrayList<>();
charsetAddressesMap.put(charset, addressesOfCharset);
}
addressesOfCharset.add(addressString);
}
for (Map.Entry<String, List<String>> charsetAddressesMapEntry :
charsetAddressesMap.entrySet()) {
String charset = charsetAddressesMapEntry.getKey();
List<String> addressesOfCharset = charsetAddressesMapEntry.getValue();
message.setCharset(charset);
sendMessageTo(addressesOfCharset, message);
}
}
private void sendMessageTo(List<String> addresses, Message message)
throws MessagingException {
close();
open();
// If the message has attachments and our server has told us about a limit on
// the size of messages, count the message's size before sending it
if (largestAcceptableMessage > 0 && message.hasAttachments()) {
if (message.calculateSize() > largestAcceptableMessage) {
throw new MessagingException("Message too large for server", true);
}
}
boolean entireMessageSent = false;
try {
String mailFrom = constructSmtpMailFromCommand(message.getFrom(), is8bitEncodingAllowed);
if (isPipeliningSupported) {
Queue<String> pipelinedCommands = new LinkedList<>();
pipelinedCommands.add(mailFrom);
for (String address : addresses) {
pipelinedCommands.add(String.format("RCPT TO:<%s>", address));
}
pipelinedCommands.add("DATA");
executePipelinedCommands(pipelinedCommands);
readPipelinedResponse(pipelinedCommands);
} else {
executeCommand(mailFrom);
for (String address : addresses) {
executeCommand("RCPT TO:<%s>", address);
}
executeCommand("DATA");
}
EOLConvertingOutputStream msgOut = new EOLConvertingOutputStream(
new LineWrapOutputStream(new SmtpDataStuffing(outputStream), 1000));
message.writeTo(msgOut);
msgOut.endWithCrLfAndFlush();
entireMessageSent = true; // After the "\r\n." is attempted, we may have sent the message
executeCommand(".");
} catch (NegativeSmtpReplyException e) {
throw e;
} catch (Exception e) {
MessagingException me = new MessagingException("Unable to send message", e);
me.setPermanentFailure(entireMessageSent);
throw me;
} finally {
close();
}
}
private static String constructSmtpMailFromCommand(Address[] from, boolean is8bitEncodingAllowed) {
String fromAddress = from[0].getAddress();
if (is8bitEncodingAllowed) {
return String.format("MAIL FROM:<%s> BODY=8BITMIME", fromAddress);
} else {
Timber.d("Server does not support 8bit transfer encoding");
return String.format("MAIL FROM:<%s>", fromAddress);
}
}
@Override
public void close() {
try {
executeCommand("QUIT");
} catch (Exception e) {
// don't care
}
IOUtils.closeQuietly(inputStream);
IOUtils.closeQuietly(outputStream);
IOUtils.closeQuietly(socket);
inputStream = null;
outputStream = null;
socket = null;
}
private String readLine() throws IOException {
StringBuilder sb = new StringBuilder();
int d;
while ((d = inputStream.read()) != -1) {
char c = (char) d;
if (c == '\n') {
break;
} else if (c != '\r') {
sb.append(c);
}
}
String ret = sb.toString();
if (K9MailLib.isDebug() && DEBUG_PROTOCOL_SMTP) {
Timber.d("SMTP <<< %s", ret);
}
return ret;
}
private void writeLine(String s, boolean sensitive) throws IOException {
if (K9MailLib.isDebug() && DEBUG_PROTOCOL_SMTP) {
final String commandToLog;
if (sensitive && !K9MailLib.isDebugSensitive()) {
commandToLog = "SMTP >>> *sensitive*";
} else {
commandToLog = "SMTP >>> " + s;
}
Timber.d(commandToLog);
}
byte[] data = s.concat("\r\n").getBytes();
/*
* Important: Send command + CRLF using just one write() call. Using
* multiple calls will likely result in multiple TCP packets and some
* SMTP servers misbehave if CR and LF arrive in separate pakets.
* See issue 799.
*/
outputStream.write(data);
outputStream.flush();
}
private static class CommandResponse {
private final int replyCode;
private final List<String> results;
CommandResponse(int replyCode, List<String> results) {
this.replyCode = replyCode;
this.results = results;
}
}
private CommandResponse executeSensitiveCommand(String format, Object... args)
throws IOException, MessagingException {
return executeCommand(true, format, args);
}
private CommandResponse executeCommand(String format, Object... args) throws IOException, MessagingException {
return executeCommand(false, format, args);
}
private CommandResponse executeCommand(boolean sensitive, String format, Object... args)
throws IOException, MessagingException {
List<String> results = new ArrayList<>();
if (format != null) {
String command = String.format(Locale.ROOT, format, args);
writeLine(command, sensitive);
}
String line = readCommandResponseLine(results);
int length = line.length();
if (length < 1) {
throw new MessagingException("SMTP response is 0 length");
}
int replyCode = -1;
if (length >= 3) {
try {
replyCode = Integer.parseInt(line.substring(0, 3));
} catch (NumberFormatException e) { /* ignore */ }
}
char replyCodeCategory = line.charAt(0);
boolean isReplyCodeErrorCategory = (replyCodeCategory == '4') || (replyCodeCategory == '5');
if (isReplyCodeErrorCategory) {
if (isEnhancedStatusCodesProvided) {
throw buildEnhancedNegativeSmtpReplyException(replyCode, results);
} else {
String replyText = TextUtils.join(" ", results);
throw new NegativeSmtpReplyException(replyCode, replyText);
}
}
return new CommandResponse(replyCode, results);
}
private MessagingException buildEnhancedNegativeSmtpReplyException(int replyCode, List<String> results) {
StatusCodeClass statusCodeClass = null;
StatusCodeSubject statusCodeSubject = null;
StatusCodeDetail statusCodeDetail = null;
String message = "";
for (String resultLine : results) {
message += resultLine.split(" ", 2)[1] + " ";
}
if (results.size() > 0) {
String[] statusCodeParts = results.get(0).split(" ", 2)[0].split("\\.");
statusCodeClass = StatusCodeClass.parse(statusCodeParts[0]);
statusCodeSubject = StatusCodeSubject.parse(statusCodeParts[1]);
statusCodeDetail = StatusCodeDetail.parse(statusCodeSubject, statusCodeParts[2]);
}
return new EnhancedNegativeSmtpReplyException(replyCode, statusCodeClass, statusCodeSubject, statusCodeDetail,
message.trim());
}
/*
* Read lines as long as the length is 4 or larger, e.g. "220-banner text here".
* Shorter lines are either errors of contain only a reply code.
*/
private String readCommandResponseLine(List<String> results) throws IOException {
String line = readLine();
while (line.length() >= 4) {
if (line.length() > 4) {
// Everything after the first four characters goes into the results array.
results.add(line.substring(4));
}
if (line.charAt(3) != '-') {
// If the fourth character isn't "-" this is the last line of the response.
break;
}
line = readLine();
}
return line;
}
private void executePipelinedCommands(Queue<String> pipelinedCommands) throws IOException {
for (String command : pipelinedCommands) {
writeLine(command, false);
}
}
private void readPipelinedResponse(Queue<String> pipelinedCommands) throws IOException, MessagingException {
String responseLine;
List<String> results = new ArrayList<>();
NegativeSmtpReplyException negativeRecipient = null;
for (String command : pipelinedCommands) {
results.clear();
responseLine = readCommandResponseLine(results);
try {
responseLineToCommandResponse(responseLine, results);
} catch (MessagingException exception) {
if (command.equals("DATA")) {
throw exception;
}
if (command.startsWith("RCPT")) {
negativeRecipient = (NegativeSmtpReplyException) exception;
}
}
}
if (negativeRecipient != null) {
try {
executeCommand(".");
throw negativeRecipient;
} catch (NegativeSmtpReplyException e) {
throw negativeRecipient;
}
}
}
private CommandResponse responseLineToCommandResponse(String line, List<String> results) throws MessagingException {
int length = line.length();
if (length < 1) {
throw new MessagingException("SMTP response to line is 0 length");
}
int replyCode = -1;
if (length >= 3) {
try {
replyCode = Integer.parseInt(line.substring(0, 3));
} catch (NumberFormatException e) { /* ignore */ }
}
char replyCodeCategory = line.charAt(0);
boolean isReplyCodeErrorCategory = (replyCodeCategory == '4') || (replyCodeCategory == '5');
if (isReplyCodeErrorCategory) {
if (isEnhancedStatusCodesProvided) {
throw buildEnhancedNegativeSmtpReplyException(replyCode, results);
} else {
String replyText = TextUtils.join(" ", results);
throw new NegativeSmtpReplyException(replyCode, replyText);
}
}
return new CommandResponse(replyCode, results);
}
private void saslAuthLogin() throws MessagingException, IOException {
try {
executeCommand("AUTH LOGIN");
executeSensitiveCommand(Base64.encode(username));
executeSensitiveCommand(Base64.encode(password));
} catch (NegativeSmtpReplyException exception) {
if (exception.getReplyCode() == SMTP_AUTHENTICATION_FAILURE_ERROR_CODE) {
throw new AuthenticationFailedException("AUTH LOGIN failed (" + exception.getMessage() + ")");
} else {
throw exception;
}
}
}
private void saslAuthPlain() throws MessagingException, IOException {
String data = Base64.encode("\000" + username + "\000" + password);
try {
executeSensitiveCommand("AUTH PLAIN %s", data);
} catch (NegativeSmtpReplyException exception) {
if (exception.getReplyCode() == SMTP_AUTHENTICATION_FAILURE_ERROR_CODE) {
throw new AuthenticationFailedException("AUTH PLAIN failed ("
+ exception.getMessage() + ")");
} else {
throw exception;
}
}
}
private void saslAuthCramMD5() throws MessagingException, IOException {
List<String> respList = executeCommand("AUTH CRAM-MD5").results;
if (respList.size() != 1) {
throw new MessagingException("Unable to negotiate CRAM-MD5");
}
String b64Nonce = respList.get(0);
String b64CRAMString = Authentication.computeCramMd5(username, password, b64Nonce);
try {
executeSensitiveCommand(b64CRAMString);
} catch (NegativeSmtpReplyException exception) {
if (exception.getReplyCode() == SMTP_AUTHENTICATION_FAILURE_ERROR_CODE) {
throw new AuthenticationFailedException(exception.getMessage(), exception);
} else {
throw exception;
}
}
}
private void saslXoauth2() throws MessagingException, IOException {
retryXoauthWithNewToken = true;
try {
attemptXoauth2(username);
} catch (NegativeSmtpReplyException negativeResponse) {
if (negativeResponse.getReplyCode() != SMTP_AUTHENTICATION_FAILURE_ERROR_CODE) {
throw negativeResponse;
}
oauthTokenProvider.invalidateToken(username);
if (!retryXoauthWithNewToken) {
handlePermanentFailure(negativeResponse);
} else {
handleTemporaryFailure(username, negativeResponse);
}
}
}
private void handlePermanentFailure(NegativeSmtpReplyException negativeResponse) throws AuthenticationFailedException {
throw new AuthenticationFailedException(negativeResponse.getMessage(), negativeResponse);
}
private void handleTemporaryFailure(String username, NegativeSmtpReplyException negativeResponseFromOldToken)
throws IOException, MessagingException {
// Token was invalid
//We could avoid this double check if we had a reasonable chance of knowing
//if a token was invalid before use (e.g. due to expiry). But we don't
//This is the intended behaviour per AccountManager
Timber.v(negativeResponseFromOldToken, "Authentication exception, re-trying with new token");
try {
attemptXoauth2(username);
} catch (NegativeSmtpReplyException negativeResponseFromNewToken) {
if (negativeResponseFromNewToken.getReplyCode() != SMTP_AUTHENTICATION_FAILURE_ERROR_CODE) {
throw negativeResponseFromNewToken;
}
//Okay, we failed on a new token.
//Invalidate the token anyway but assume it's permanent.
Timber.v(negativeResponseFromNewToken, "Authentication exception for new token, permanent error assumed");
oauthTokenProvider.invalidateToken(username);
handlePermanentFailure(negativeResponseFromNewToken);
}
}
private void attemptXoauth2(String username) throws MessagingException, IOException {
String token = oauthTokenProvider.getToken(username, OAuth2AuthorizationCodeFlowTokenProvider.OAUTH2_TIMEOUT);
String authString = Authentication.computeXoauth(username, token);
CommandResponse response = executeSensitiveCommand("AUTH XOAUTH2 %s", authString);
if (response.replyCode == SMTP_CONTINUE_REQUEST) {
String replyText = TextUtils.join("", response.results);
retryXoauthWithNewToken = XOAuth2ChallengeParser.shouldRetry(replyText, host);
//Per Google spec, respond to challenge with empty response
executeCommand("");
}
}
private void saslAuthExternal() throws MessagingException, IOException {
executeCommand("AUTH EXTERNAL %s", Base64.encode(username));
}
@VisibleForTesting
protected String getCanonicalHostName(InetAddress localAddress) {
return localAddress.getCanonicalHostName();
}
@VisibleForTesting
protected String getHostAddress(InetAddress localAddress) {
return localAddress.getHostAddress();
}
}
| |
/*
* Copyright 2015 herd contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.herd.service.activiti.task;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.JAXBException;
import org.activiti.bpmn.model.FieldExtension;
import org.activiti.engine.history.HistoricVariableInstance;
import org.activiti.engine.runtime.ProcessInstance;
import org.junit.Assert;
import org.junit.Test;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.finra.herd.dao.impl.MockJdbcOperations;
import org.finra.herd.model.api.xml.JdbcExecutionRequest;
import org.finra.herd.model.api.xml.JdbcExecutionResponse;
import org.finra.herd.model.api.xml.JdbcStatement;
import org.finra.herd.model.api.xml.JdbcStatementStatus;
import org.finra.herd.model.api.xml.Parameter;
import org.finra.herd.service.activiti.ActivitiRuntimeHelper;
public class ExecuteJdbcTest extends HerdActivitiServiceTaskTest
{
private static final String JAVA_DELEGATE_CLASS_NAME = ExecuteJdbc.class.getCanonicalName();
@Test
public void testExecuteJdbcSuccess()
{
JdbcExecutionRequest jdbcExecutionRequest = createDefaultUpdateJdbcExecutionRequest();
List<FieldExtension> fieldExtensionList = new ArrayList<>();
List<Parameter> parameters = new ArrayList<>();
populateParameters(jdbcExecutionRequest, fieldExtensionList, parameters);
try
{
JdbcExecutionResponse expectedJdbcExecutionResponse = new JdbcExecutionResponse();
expectedJdbcExecutionResponse.setStatements(jdbcExecutionRequest.getStatements());
expectedJdbcExecutionResponse.getStatements().get(0).setStatus(JdbcStatementStatus.SUCCESS);
expectedJdbcExecutionResponse.getStatements().get(0).setResult("1");
String expectedJdbcExecutionResponseJson = jsonHelper.objectToJson(expectedJdbcExecutionResponse);
Map<String, Object> variableValuesToValidate = new HashMap<>();
variableValuesToValidate.put(BaseJavaDelegate.VARIABLE_JSON_RESPONSE, expectedJdbcExecutionResponseJson);
testActivitiServiceTaskSuccess(JAVA_DELEGATE_CLASS_NAME, fieldExtensionList, parameters, variableValuesToValidate);
}
catch (Exception e)
{
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testExecuteJdbcErrorValidation()
{
JdbcExecutionRequest jdbcExecutionRequest = createDefaultUpdateJdbcExecutionRequest();
jdbcExecutionRequest.setConnection(null);
List<FieldExtension> fieldExtensionList = new ArrayList<>();
List<Parameter> parameters = new ArrayList<>();
populateParameters(jdbcExecutionRequest, fieldExtensionList, parameters);
try
{
Map<String, Object> variableValuesToValidate = new HashMap<>();
variableValuesToValidate.put(BaseJavaDelegate.VARIABLE_JSON_RESPONSE, VARIABLE_VALUE_IS_NULL);
variableValuesToValidate.put(ActivitiRuntimeHelper.VARIABLE_ERROR_MESSAGE, "JDBC connection is required");
testActivitiServiceTaskFailure(JAVA_DELEGATE_CLASS_NAME, fieldExtensionList, parameters, variableValuesToValidate);
}
catch (Exception e)
{
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testExecuteJdbcErrorStatement()
{
JdbcExecutionRequest jdbcExecutionRequest = createDefaultUpdateJdbcExecutionRequest();
jdbcExecutionRequest.getStatements().get(0).setSql(MockJdbcOperations.CASE_2_SQL);
List<FieldExtension> fieldExtensionList = new ArrayList<>();
List<Parameter> parameters = new ArrayList<>();
populateParameters(jdbcExecutionRequest, fieldExtensionList, parameters);
try
{
JdbcExecutionResponse expectedJdbcExecutionResponse = new JdbcExecutionResponse();
expectedJdbcExecutionResponse.setStatements(jdbcExecutionRequest.getStatements());
expectedJdbcExecutionResponse.getStatements().get(0).setStatus(JdbcStatementStatus.ERROR);
expectedJdbcExecutionResponse.getStatements().get(0).setErrorMessage("java.sql.SQLException: test DataIntegrityViolationException cause");
String expectedJdbcExecutionResponseString = jsonHelper.objectToJson(expectedJdbcExecutionResponse);
Map<String, Object> variableValuesToValidate = new HashMap<>();
variableValuesToValidate.put(BaseJavaDelegate.VARIABLE_JSON_RESPONSE, expectedJdbcExecutionResponseString);
variableValuesToValidate.put(ActivitiRuntimeHelper.VARIABLE_ERROR_MESSAGE, "There are failed executions. See JSON response for details.");
testActivitiServiceTaskFailure(JAVA_DELEGATE_CLASS_NAME, fieldExtensionList, parameters, variableValuesToValidate);
}
catch (Exception e)
{
e.printStackTrace();
Assert.fail();
}
}
/**
* Asserts that the task executes asynchronously when receiveTaskId is specified.
* <p/>
* This is a very special test case which involves multithreading and transactions, therefore we cannot use the standard test methods we have. The
* transaction MUST BE DISABLED for this test to work correctly - since we have 2 threads which both access the database, if we run transactionally, the
* threads cannot share information.
* <p/>
* TODO this test could be made generic once we have async support for other tasks.
*/
@Test
@Transactional(propagation = Propagation.NOT_SUPPORTED)
public void testExecuteJdbcWithReceiveTask() throws Exception
{
// Read workflow XML from classpath and deploy it.
activitiRepositoryService.createDeployment()
.addClasspathResource("org/finra/herd/service/testActivitiWorkflowExecuteJdbcTaskWithReceiveTask.bpmn20.xml").deploy();
JdbcExecutionRequest jdbcExecutionRequest = createDefaultUpdateJdbcExecutionRequest();
// Set workflow variables.
Map<String, Object> variables = new HashMap<>();
variables.put("contentType", "xml");
variables.put("jdbcExecutionRequest", xmlHelper.objectToXml(jdbcExecutionRequest));
// Execute workflow
ProcessInstance processInstance = activitiRuntimeService.startProcessInstanceByKey("test", variables);
// Wait for the process to finish
waitUntilAllProcessCompleted();
// Assert output
Map<String, Object> outputVariables = getProcessInstanceHistoryVariables(processInstance);
JdbcExecutionResponse expectedJdbcExecutionResponse = new JdbcExecutionResponse();
JdbcStatement originalJdbcStatement = jdbcExecutionRequest.getStatements().get(0);
JdbcStatement expectedJdbcStatement = new JdbcStatement();
expectedJdbcStatement.setType(originalJdbcStatement.getType());
expectedJdbcStatement.setSql(originalJdbcStatement.getSql());
expectedJdbcStatement.setStatus(JdbcStatementStatus.SUCCESS);
expectedJdbcStatement.setResult("1");
expectedJdbcExecutionResponse.setStatements(Arrays.asList(expectedJdbcStatement));
String actualJdbcExecutionResponseString = (String) outputVariables.get("service_jsonResponse");
JdbcExecutionResponse actualJdbcExecutionResponse = jsonHelper.unmarshallJsonToObject(JdbcExecutionResponse.class, actualJdbcExecutionResponseString);
Assert.assertEquals("service_jsonResponse", expectedJdbcExecutionResponse, actualJdbcExecutionResponse);
Assert.assertEquals("service_taskStatus", "SUCCESS", outputVariables.get("service_taskStatus"));
}
/**
* Retrieves the historic instance variables of the given process instance.
*
* @param processInstance The process instance which owns the history
*
* @return A map of name-value
*/
private Map<String, Object> getProcessInstanceHistoryVariables(ProcessInstance processInstance)
{
Map<String, Object> outputVariables = new HashMap<>();
List<HistoricVariableInstance> historicVariableInstances =
activitiHistoryService.createHistoricVariableInstanceQuery().processInstanceId(processInstance.getId()).list();
for (HistoricVariableInstance historicVariableInstance : historicVariableInstances)
{
String name = historicVariableInstance.getVariableName();
Object value = historicVariableInstance.getValue();
outputVariables.put(name, value);
}
return outputVariables;
}
private void populateParameters(JdbcExecutionRequest jdbcExecutionRequest, List<FieldExtension> fieldExtensionList, List<Parameter> parameters)
{
try
{
String jdbcExecutionRequestString = xmlHelper.objectToXml(jdbcExecutionRequest);
fieldExtensionList.add(buildFieldExtension("contentType", "${contentType}"));
fieldExtensionList.add(buildFieldExtension("jdbcExecutionRequest", "${jdbcExecutionRequest}"));
parameters.add(buildParameter("contentType", "xml"));
parameters.add(buildParameter("jdbcExecutionRequest", jdbcExecutionRequestString));
}
catch (JAXBException e)
{
throw new IllegalArgumentException(e);
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.impl;
import com.intellij.ide.DataManager;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.ProhibitAWTEvents;
import com.intellij.ide.impl.dataRules.GetDataRule;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.keymap.impl.IdeKeyEventDispatcher;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.KeyedExtensionCollector;
import com.intellij.openapi.util.UserDataHolder;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.openapi.wm.impl.FloatingDecorator;
import com.intellij.reference.SoftReference;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.SwingHelper;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.concurrency.AsyncPromise;
import org.jetbrains.concurrency.Promise;
import javax.swing.*;
import java.awt.*;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.util.List;
import java.util.*;
import java.util.stream.Stream;
import static com.intellij.ide.impl.DataValidators.validOrNull;
public class DataManagerImpl extends DataManager {
private static final Logger LOG = Logger.getInstance(DataManagerImpl.class);
private static final ThreadLocal<int[]> ourGetDataLevel = ThreadLocal.withInitial(() -> new int[1]);
private final KeyedExtensionCollector<GetDataRule, String> myDataRuleCollector = new KeyedExtensionCollector<>(GetDataRule.EP_NAME);
public DataManagerImpl() {
}
private @Nullable Object getData(@NotNull String dataId, final Component focusedComponent) {
GetDataRule rule = getDataRule(dataId);
try (AccessToken ignored = ProhibitAWTEvents.start("getData")) {
for (Component c = focusedComponent; c != null; c = c.getParent()) {
DataProvider dataProvider = getDataProviderEx(c);
if (dataProvider == null) continue;
Object data = getDataFromProvider(dataProvider, dataId, null, rule);
if (data != null) return data;
}
}
return null;
}
@ApiStatus.Internal
public @Nullable Object getDataFromProvider(final @NotNull DataProvider provider, @NotNull String dataId, @Nullable Set<String> alreadyComputedIds) {
return getDataFromProvider(provider, dataId, alreadyComputedIds, getDataRule(dataId));
}
@ApiStatus.Internal
public @Nullable Object getDataFromProvider(@NotNull DataProvider provider,
@NotNull String dataId,
@Nullable Set<String> alreadyComputedIds,
@Nullable GetDataRule dataRule) {
ProgressManager.checkCanceled();
if (alreadyComputedIds != null && alreadyComputedIds.contains(dataId)) {
return null;
}
int[] depth = ourGetDataLevel.get();
try {
depth[0]++;
Object data = provider.getData(dataId);
if (data != null) return validOrNull(data, dataId, provider);
if (dataRule != null) {
final Set<String> ids = alreadyComputedIds == null ? new HashSet<>() : alreadyComputedIds;
ids.add(dataId);
data = dataRule.getData(id -> getDataFromProvider(provider, id, ids));
if (data != null) return validOrNull(data, dataId, provider);
}
return null;
}
finally {
depth[0]--;
if (alreadyComputedIds != null) alreadyComputedIds.remove(dataId);
}
}
public static @Nullable DataProvider getDataProviderEx(@Nullable Object component) {
DataProvider dataProvider = null;
if (component instanceof DataProvider) {
dataProvider = (DataProvider)component;
}
else if (component instanceof TypeSafeDataProvider) {
dataProvider = new TypeSafeDataProviderAdapter((TypeSafeDataProvider) component);
}
else if (component instanceof JComponent) {
dataProvider = getDataProvider((JComponent)component);
}
if (dataProvider instanceof BackgroundableDataProvider) {
dataProvider = ((BackgroundableDataProvider)dataProvider).createBackgroundDataProvider();
}
return dataProvider;
}
public @Nullable GetDataRule getDataRule(@NotNull String dataId) {
String uninjectedId = AnActionEvent.uninjectedId(dataId);
GetDataRule slowRule = dataProvider -> getSlowData(dataId, dataProvider);
List<GetDataRule> rules1 = myDataRuleCollector.forKey(dataId);
List<GetDataRule> rules2 = dataId.equals(uninjectedId) ? Collections.emptyList() : myDataRuleCollector.forKey(uninjectedId);
if (rules1.size() + rules2.size() == 0) return slowRule;
return dataProvider -> {
Object data = slowRule.getData(dataProvider);
if (data != null) return data;
for (GetDataRule rule : rules1) {
data = rule.getData(dataProvider);
if (data != null) return data;
}
for (GetDataRule rule : rules2) {
data = rule.getData(id -> dataProvider.getData(AnActionEvent.injectedId(id)));
if (data != null) return data;
}
return null;
};
}
private static @Nullable Object getSlowData(@NotNull String dataId, @NotNull DataProvider dataProvider) {
Iterable<DataProvider> asyncProviders = PlatformDataKeys.SLOW_DATA_PROVIDERS.getData(dataProvider);
if (asyncProviders == null) return null;
for (DataProvider provider : asyncProviders) {
Object data = provider.getData(dataId);
if (data != null) {
return data;
}
}
return null;
}
@Override
public @NotNull DataContext getDataContext(Component component) {
if (Registry.is("actionSystem.dataContextAssertions")) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (ourGetDataLevel.get()[0] > 0) {
LOG.error("DataContext shall not be created and queried inside another getData() call.");
}
}
return new MyDataContext(component);
}
@Override
public @NotNull DataContext getDataContext(@NotNull Component component, int x, int y) {
if (x < 0 || x >= component.getWidth() || y < 0 || y >= component.getHeight()) {
throw new IllegalArgumentException("wrong point: x=" + x + "; y=" + y);
}
// Point inside JTabbedPane has special meaning. If point is inside tab bounds then
// we construct DataContext by the component which corresponds to the (x, y) tab.
if (component instanceof JTabbedPane) {
JTabbedPane tabbedPane = (JTabbedPane)component;
int index = tabbedPane.getUI().tabForCoordinate(tabbedPane, x, y);
return getDataContext(index != -1 ? tabbedPane.getComponentAt(index) : tabbedPane);
}
else {
return getDataContext(component);
}
}
@Override
public @NotNull DataContext getDataContext() {
Component component = null;
if (Registry.is("actionSystem.getContextByRecentMouseEvent")) {
component = SwingHelper.getComponentFromRecentMouseEvent();
}
return getDataContext(component != null ? component : getFocusedComponent());
}
@Override
public @NotNull Promise<DataContext> getDataContextFromFocusAsync() {
AsyncPromise<DataContext> result = new AsyncPromise<>();
IdeFocusManager.getGlobalInstance()
.doWhenFocusSettlesDown(() -> result.setResult(getDataContext()), ModalityState.any());
return result;
}
private static @Nullable Component getFocusedComponent() {
WindowManager windowManager = WindowManager.getInstance();
if (!(windowManager instanceof WindowManagerEx)) {
return null;
}
WindowManagerEx windowManagerEx = (WindowManagerEx)windowManager;
Window activeWindow = windowManagerEx.getMostRecentFocusedWindow();
if (activeWindow == null) {
activeWindow = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
if (activeWindow == null) {
activeWindow = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusedWindow();
if (activeWindow == null) return null;
}
}
// In case we have an active floating toolwindow and some component in another window focused,
// we want this other component to receive key events.
// Walking up the window ownership hierarchy from the floating toolwindow would have led us to the main IdeFrame
// whereas we want to be able to type in other frames as well.
if (activeWindow instanceof FloatingDecorator) {
IdeFocusManager ideFocusManager = IdeFocusManager.findInstanceByComponent(activeWindow);
IdeFrame lastFocusedFrame = ideFocusManager.getLastFocusedFrame();
JComponent frameComponent = lastFocusedFrame != null ? lastFocusedFrame.getComponent() : null;
Window lastFocusedWindow = frameComponent != null ? SwingUtilities.getWindowAncestor(frameComponent) : null;
boolean toolWindowIsNotFocused = windowManagerEx.getFocusedComponent(activeWindow) == null;
if (toolWindowIsNotFocused && lastFocusedWindow != null) {
activeWindow = lastFocusedWindow;
}
}
// try to find first parent window that has focus
Window window = activeWindow;
Component focusedComponent = null;
while (window != null) {
focusedComponent = windowManagerEx.getFocusedComponent(window);
if (focusedComponent != null) {
break;
}
window = window.getOwner();
}
if (focusedComponent == null) {
focusedComponent = activeWindow;
}
return focusedComponent;
}
@Override
public <T> void saveInDataContext(DataContext dataContext, @NotNull Key<T> dataKey, @Nullable T data) {
if (dataContext instanceof UserDataHolder) {
((UserDataHolder)dataContext).putUserData(dataKey, data);
}
}
@Override
public @Nullable <T> T loadFromDataContext(@NotNull DataContext dataContext, @NotNull Key<T> dataKey) {
return dataContext instanceof UserDataHolder ? ((UserDataHolder)dataContext).getUserData(dataKey) : null;
}
public static @Nullable Editor validateEditor(Editor editor, Component contextComponent) {
if (contextComponent instanceof JComponent) {
final JComponent jComponent = (JComponent)contextComponent;
if (jComponent.getClientProperty(UIUtil.HIDE_EDITOR_FROM_DATA_CONTEXT_PROPERTY) != null) return null;
}
return editor;
}
private static final class NullResult {
public static final NullResult INSTANCE = new NullResult();
}
private static final Set<String> ourSafeKeys = ContainerUtil.set(
CommonDataKeys.PROJECT.getName(),
CommonDataKeys.EDITOR.getName(),
PlatformDataKeys.IS_MODAL_CONTEXT.getName(),
PlatformDataKeys.CONTEXT_COMPONENT.getName(),
PlatformDataKeys.MODALITY_STATE.getName()
);
/**
* todo make private in 2020
* @see DataManager#loadFromDataContext(DataContext, Key)
* @see DataManager#saveInDataContext(DataContext, Key, Object)
* @deprecated use {@link DataManager#getDataContext(Component)} instead
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.1")
public static class MyDataContext implements DataContext, UserDataHolder {
private int myEventCount;
// To prevent memory leak we have to wrap passed component into
// the weak reference. For example, Swing often remembers menu items
// that have DataContext as a field.
private final Reference<Component> myRef;
private Map<Key<?>, Object> myUserData;
private final Map<String, Object> myCachedData = ContainerUtil.createWeakValueMap();
public MyDataContext(@Nullable Component component) {
myEventCount = -1;
myRef = component == null ? null : new WeakReference<>(component);
}
public void setEventCount(int eventCount) {
assert ReflectionUtil.getCallerClass(3) == IdeKeyEventDispatcher.class :
"This method might be accessible from " + IdeKeyEventDispatcher.class.getName() + " only";
myCachedData.clear();
myEventCount = eventCount;
}
@Override
public Object getData(@NotNull String dataId) {
ProgressManager.checkCanceled();
boolean cacheable = Registry.is("actionSystem.cache.data") || ourSafeKeys.contains(dataId);
if (ApplicationManager.getApplication().isDispatchThread()) {
int currentEventCount = IdeEventQueue.getInstance().getEventCount();
if (myEventCount != -1 && myEventCount != currentEventCount) {
LOG.error("cannot share data context between Swing events; initial event count = " + myEventCount + "; current event count = " +
currentEventCount);
cacheable = false;
}
}
Object answer = cacheable ? myCachedData.get(dataId) : null;
if (answer != null) {
return answer != NullResult.INSTANCE ? answer : null;
}
answer = doGetData(dataId);
if (cacheable && !(answer instanceof Stream)) {
myCachedData.put(dataId, answer == null ? NullResult.INSTANCE : answer);
}
return answer;
}
private @Nullable Object doGetData(@NotNull String dataId) {
Component component = SoftReference.dereference(myRef);
if (PlatformDataKeys.IS_MODAL_CONTEXT.is(dataId)) {
if (component == null) {
return null;
}
return IdeKeyEventDispatcher.isModalContext(component);
}
if (PlatformDataKeys.CONTEXT_COMPONENT.is(dataId)) {
return component;
}
if (PlatformDataKeys.MODALITY_STATE.is(dataId)) {
return component != null ? ModalityState.stateForComponent(component) : ModalityState.NON_MODAL;
}
Object data = calcData(dataId, component);
if (CommonDataKeys.EDITOR.is(dataId) || CommonDataKeys.HOST_EDITOR.is(dataId)) {
return validateEditor((Editor)data, component);
}
return data;
}
protected Object calcData(@NotNull String dataId, Component component) {
return ((DataManagerImpl)DataManager.getInstance()).getData(dataId, component);
}
@Override
@NonNls
public String toString() {
return "component=" + SoftReference.dereference(myRef);
}
@Override
public <T> T getUserData(@NotNull Key<T> key) {
//noinspection unchecked
return (T)getOrCreateMap().get(key);
}
@Override
public <T> void putUserData(@NotNull Key<T> key, @Nullable T value) {
getOrCreateMap().put(key, value);
}
private @NotNull Map<Key<?>, Object> getOrCreateMap() {
Map<Key<?>, Object> userData = myUserData;
if (userData == null) {
myUserData = userData = ContainerUtil.createWeakValueMap();
}
return userData;
}
}
}
| |
package us.ridiculousbakery.espressoexpress.ChooseItemFlow_Teddy.Fragments;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.CoordinatorLayout;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.Html;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.Button;
import android.widget.ExpandableListView;
import android.widget.RelativeLayout;
import com.melnykov.fab.FloatingActionButton;
import com.parse.GetCallback;
import com.parse.ParseException;
import com.parse.ParseObject;
import com.parse.ParseQuery;
import com.parse.ParseRelation;
import com.parse.ParseUser;
import org.json.JSONException;
import org.json.JSONObject;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.util.ArrayList;
import us.ridiculousbakery.espressoexpress.Checkout.CartActivity;
import us.ridiculousbakery.espressoexpress.ChooseItemFlow_Teddy.Adapters.MenuAdapter;
import us.ridiculousbakery.espressoexpress.MD5Util;
import us.ridiculousbakery.espressoexpress.Model.Item;
import us.ridiculousbakery.espressoexpress.Model.LineItem;
import us.ridiculousbakery.espressoexpress.Model.Order;
import us.ridiculousbakery.espressoexpress.Model.Store;
import us.ridiculousbakery.espressoexpress.Model.StoreMenu;
import us.ridiculousbakery.espressoexpress.Model.TempOrder;
import us.ridiculousbakery.espressoexpress.Model.TempStore;
import us.ridiculousbakery.espressoexpress.R;
/**
* Created by teddywyly on 6/6/15.
*/
public class MenuFragment extends Fragment implements CustomizeItemDialog.CustomizeItemDialogListener {
private MenuAdapter aMenu;
private StoreMenu storeMenu;
private ExpandableListView elvMenu;
private FloatingActionButton btnCart;
private ArrayList<LineItem> lineItems;
private CustomizeItemDialog customizeDialog;
private Store store;
//================================================================================
// Constructors
//================================================================================
public static MenuFragment newInstance(String storeId) {
MenuFragment fragment = new MenuFragment();
Bundle args = new Bundle();
args.putString("storeId", storeId);
// args.putSerializable("store", store);
fragment.setArguments(args);
return fragment;
}
//================================================================================
// Lifecycle
//================================================================================
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ParseQuery<ParseObject> query = ParseQuery.getQuery("Category");
query.getInBackground("xWMyZ4YEGZ", new GetCallback<ParseObject>() {
public void done(ParseObject object, ParseException e) {
if (e == null) {
// object will be your game score
} else {
// something went wrong
}
}
});
// ParseObject post = ...;
//
// ParseObject category = ParseObject.getIn;
// ParseRelation relation = user.getRelation("posts");
// relation.add(post);
// user.saveInBackground();
// String storeID = getArguments().getString("storeId");
//
//
// store = (Store) getArguments().getSerializable("store");
// storeMenu = store.getStoreMenu();
// aMenu = new MenuAdapter(getActivity(), storeMenu);
// // Create an initializer from aMenu
// lineItems = new ArrayList<>();
}
@Override
public View onCreateView(final LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
final View v = inflater.inflate(R.layout.fragment_menu_list, null, false);
String storeID = getArguments().getString("storeId");
storeID = "1NoCwWrzM5";
lineItems = new ArrayList<>();
elvMenu = (ExpandableListView) v.findViewById(R.id.elvMenu);
elvMenu.setDivider(null);
elvMenu.setDividerHeight(0);
btnCart = (FloatingActionButton) v.findViewById(R.id.btnCart);
setCartButtonHeight();
Store.getInBackground(storeID, new GetCallback<Store>() {
@Override
public void done(final Store store, ParseException e) {
if (e == null) {
Log.d("Success in findg store", "Success in finding stre");
String menuString = (String) store.get("menu");
JSONObject menuJSON = null;
try {
menuJSON = new JSONObject(menuString);
} catch (JSONException e1) {
e1.printStackTrace();
}
storeMenu = StoreMenu.fromJSON(menuJSON);
//storeMenu = store.getStoreMenu();
aMenu = new MenuAdapter(getActivity(), storeMenu);
Log.d("DEBUG", "LISTVIEWEXPAND is " + elvMenu.toString());
elvMenu.setAdapter(aMenu);
elvMenu.setOnChildClickListener(new ExpandableListView.OnChildClickListener() {
@Override
public boolean onChildClick(ExpandableListView parent, View v, int groupPosition, int childPosition, long id) {
Item item = (Item) aMenu.getChild(groupPosition, childPosition);
Log.d("ITEMNAME", item.getName());
showCustomizeItemDialog(item);
return true;
}
});
final View header = inflater.inflate(R.layout.menu_header, null, false);
String imageURL = (String)store.get("imageURL");
//Log.d("IMAGE", imageURL);
FragmentTransaction ft = getChildFragmentManager().beginTransaction();
final MenuHeaderFragment menuHeaderFragment = MenuHeaderFragment.newInstance(store.getName(), imageURL);
ft.replace(R.id.flContainer, menuHeaderFragment);
ft.commit();
elvMenu.addHeaderView(header);
for(int i=0; i < aMenu.getGroupCount(); i++) {
elvMenu.expandGroup(i);
}
elvMenu.setOnScrollListener(new AbsListView.OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
final float headerHeight = header.getHeight() - ((AppCompatActivity) getActivity()).getSupportActionBar().getHeight();
// Log.d("HEIGHT", headerHeight + "");
// Log.d("HEIGHT", header.getTop() + "");
float head = header.getTop() * -1;
float base = (float) (headerHeight / 2.5);
float scalingFactor = 1 - (head / base);
// Log.d("FACTOR", scalingFactor + "");
// Log.d("HEAD", head + "");
//
//
AppCompatActivity act = (AppCompatActivity) getActivity();
ActionBar bar = act.getSupportActionBar();
if (headerHeight - head < bar.getHeight() / 6) {
bar.setBackgroundDrawable(getResources().getDrawable(R.color.colorPrimary));
bar.setTitle(Html.fromHtml("<font color='#ffffff'>" + store.getName() + "</font>"));
bar.setTitle(store.getName());
} else {
bar.setBackgroundDrawable(getResources().getDrawable(android.R.color.transparent));
bar.setTitle("");
}
float magic = headerHeight / 2;
if (head > magic) {
menuHeaderFragment.translateTitleTextVertical(head - magic);
}
float alphaScale = head / headerHeight;
if (alphaScale < 0.4) {
alphaScale = 0.4f;
}
menuHeaderFragment.setOverlayAlpha(alphaScale);
if (scalingFactor > 0.5f) {
menuHeaderFragment.scaleTitleText(scalingFactor);
}
}
});
btnCart.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// Launch Intent!
if (lineItems.size() > 0) {
Intent i = new Intent(getActivity(), CartActivity.class);
TempOrder order = new TempOrder();
TempStore tempStore = new TempStore(store);
order.setStore(tempStore);
order.setLineItems(lineItems);
i.putExtra("order", order);
startActivity(i);
}
}
});
}
}
});
return v;
}
//
private void setCartButtonHeight() {
if (lineItems.size() > 0) {
if (btnCart.getScaleX() == 0.0f) {
AnimatorSet set = new AnimatorSet();
set.playTogether(
ObjectAnimator.ofFloat(btnCart, "scaleX", 0.0f, 1.0f)
.setDuration(500),
ObjectAnimator.ofFloat(btnCart, "scaleY", 0.0f, 1.0f)
.setDuration(500)
);
set.start();
}
} else {
btnCart.setScaleX(0);
btnCart.setScaleY(0);
}
}
//================================================================================
// Navigation
//================================================================================
public void showCustomizeItemDialog(Item item) {
FragmentManager fm = getChildFragmentManager();
customizeDialog = CustomizeItemDialog.newInstance(item);
customizeDialog.show(fm, "fragment_customize_item");
}
//================================================================================
// CustomizeItemDialogListener
//================================================================================
@Override
public void onFinishCustomizingLineItem(LineItem lineItem) {
// Fire Intent!
customizeDialog.dismiss();
lineItems.add(lineItem);
setCartButtonHeight();
}
}
| |
package shadow.interpreter;
import shadow.interpreter.InterpreterException.Error;
import shadow.typecheck.type.ArrayType;
import shadow.typecheck.type.ModifiedType;
import shadow.typecheck.type.Modifiers;
import shadow.typecheck.type.Type;
import java.math.BigInteger;
/**
* An abstract class that is the base of all values (array, boolean, code, number, object, string)
* in Shadow.
*/
public abstract class ShadowValue implements ModifiedType {
// Exposed here for convenience
@SuppressWarnings("StaticInitializerReferencesSubClass")
public static final ShadowInvalid INVALID = ShadowInvalid.INVALID;
private final Modifiers modifiers;
protected ShadowValue() {
this.modifiers = new Modifiers();
}
protected ShadowValue(int modifiers) {
this.modifiers = new Modifiers(modifiers);
}
@Override
public final Modifiers getModifiers() {
return modifiers;
}
@Override
public final void setType(Type type) {
throw new UnsupportedOperationException();
}
/** Applies the specified binary operation to this {@link ShadowValue} and another */
public final ShadowValue apply(BinaryOperator operator, ShadowValue right)
throws InterpreterException {
if (this instanceof ShadowInvalid || right instanceof ShadowInvalid) {
return INVALID;
}
switch (operator) {
case COALESCE:
return coalesce(right);
case OR:
return or(right);
case XOR:
return xor(right);
case AND:
return and(right);
case BITWISE_OR:
return bitwiseOr(right);
case BITWISE_XOR:
return bitwiseXor(right);
case BITWISE_AND:
return bitwiseAnd(right);
case EQUAL:
return equal(right);
case NOT_EQUAL:
return notEqual(right);
case REFERENCE_EQUAL:
return referenceEqual(right);
case REFERENCE_NOT_EQUAL:
return referenceNotEqual(right);
case LESS_THAN:
return lessThan(right);
case GREATER_THAN:
return greaterThan(right);
case LESS_THAN_OR_EQUAL:
return lessThanOrEqual(right);
case GREATER_THAN_OR_EQUAL:
return greaterThanOrEqual(right);
case CAT:
return cat(right);
case RIGHT_SHIFT:
return bitShiftRight(right);
case LEFT_SHIFT:
return bitShiftLeft(right);
case RIGHT_ROTATE:
return bitRotateRight(right);
case LEFT_ROTATE:
return bitRotateLeft(right);
case ADD:
return add(right);
case SUBTRACT:
return subtract(right);
case MULTIPLY:
return multiply(right);
case DIVIDE:
return divide(right);
case MODULUS:
return modulus(right);
default:
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Unexpected binary operator " + operator.getName());
}
}
/** Applies the specified unary operation to this {@link ShadowValue} */
public final ShadowValue apply(UnaryOperator operator) throws InterpreterException {
if (this instanceof ShadowInvalid) {
return INVALID;
}
switch (operator) {
case CAT:
return unaryCat();
case BITWISE_COMPLEMENT:
return bitwiseComplement();
case NOT:
return not();
case NEGATE:
return negate();
default:
throw new UnsupportedOperationException("Unexpected unary operator " + operator.getName());
}
}
public ShadowString unaryCat() {
return new ShadowString(toString());
}
public ShadowValue negate() throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "Negate operation not supported");
}
public ShadowValue bitwiseComplement() throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Bitwise complement operation not supported");
}
public ShadowBoolean not() throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "Not operation not supported");
}
public final ShadowValue coalesce(ShadowValue value) throws InterpreterException {
return (this instanceof ShadowNull) ? value : this;
}
public ShadowValue cat(ShadowValue value) throws InterpreterException {
return new ShadowString(this + value.toString());
}
// binary operations
public ShadowValue add(ShadowValue value) throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "Add operation not supported");
}
public ShadowValue subtract(ShadowValue value) throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "Subtract operation not supported");
}
public ShadowValue multiply(ShadowValue value) throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "Multiply operation not supported");
}
public ShadowValue divide(ShadowValue value) throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "Divide operation not supported");
}
public ShadowValue modulus(ShadowValue value) throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "Modulus operation not supported");
}
public ShadowValue bitShiftLeft(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Left shift operation not supported");
}
public ShadowValue bitShiftRight(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Right shift operation not supported");
}
public ShadowValue bitRotateLeft(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Left rotate operation not supported");
}
public ShadowValue bitRotateRight(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Right rotate operation not supported");
}
public ShadowBoolean equal(ShadowValue value) throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "Equal operation not supported");
}
public final ShadowBoolean notEqual(ShadowValue value) throws InterpreterException {
try {
final ShadowBoolean result = equal(value);
return new ShadowBoolean(!result.getValue());
} catch (Exception e) {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Not equal operation not supported");
}
}
@SuppressWarnings("unused")
public ShadowBoolean referenceEqual(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Reference equal operation not supported");
}
public final ShadowBoolean referenceNotEqual(ShadowValue value) throws InterpreterException {
try {
ShadowBoolean result = this.referenceEqual(value);
return new ShadowBoolean(!result.getValue());
} catch (Exception e) {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Reference not equal operation not supported");
}
}
public ShadowBoolean lessThan(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Less than operation not supported");
}
public ShadowBoolean lessThanOrEqual(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Less than or equal operation not supported");
}
public ShadowBoolean greaterThan(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Greater than operation not supported");
}
public ShadowBoolean greaterThanOrEqual(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Greater than or equal operation not supported");
}
@SuppressWarnings("unused")
public ShadowBoolean or(ShadowValue value) throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "Or operation not supported");
}
@SuppressWarnings("unused")
public ShadowBoolean xor(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Exclusive or operation not supported");
}
@SuppressWarnings("unused")
public ShadowBoolean and(ShadowValue value) throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "And operation not supported");
}
public ShadowValue bitwiseAnd(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Bitwise and operation not supported");
}
public ShadowValue bitwiseOr(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Bitwise or operation not supported");
}
public ShadowValue bitwiseXor(ShadowValue value) throws InterpreterException {
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION, "Bitwise xor operation not supported");
}
public final boolean isSubtype(ShadowValue other) {
return getType().isSubtype(other.getType());
}
public final boolean isStrictSubtype(ShadowValue other) {
return getType().isStrictSubtype(other.getType());
}
public abstract ShadowValue cast(Type type) throws InterpreterException;
public abstract ShadowValue copy() throws InterpreterException;
/**
* Sets the {@link Modifiers} immutable flag on the value.
*
* @return a copy of the value, or this if it is already immutable.
* @throws InterpreterException thrown if impossible to determine value
*/
public ShadowValue freeze() throws InterpreterException {
if (getModifiers().isImmutable()) return this;
// One might ask: Why make a copy here?
// Answer: Because an immutable value can't be changed
// which would be possible when there are other references to the original
final ShadowValue copy = copy();
copy.getModifiers().addModifier(Modifiers.IMMUTABLE);
return copy;
}
@Override
public String toString() {
return getType().toString(Type.PACKAGES | Type.TYPE_PARAMETERS);
}
/**
* Returns a "default" value for a type if it's supported: array or nullable
*
* @param type the {@link ModifiedType} to get the default value for.
* @return the default value.
* @throws InterpreterException thrown if no default value is supported
*/
public static ShadowValue getDefault(ModifiedType type) throws InterpreterException {
if (type.getModifiers().isNullable()) return new ShadowNull(type.getType());
if (type instanceof ArrayType) {
final ArrayType arrayType = (ArrayType) type;
return new ShadowArray(arrayType, 0);
}
throw new InterpreterException(Error.INVALID_TYPE, "Unsupported type " + type.getType());
}
public ShadowInteger hash() throws InterpreterException {
throw new InterpreterException(Error.UNSUPPORTED_OPERATION, "Hash not supported");
}
/**
* Checks to see if two values are equal.
*
* @param second the other value to compare.
* @return true if they are "equal", or false otherwise.
* @throws InterpreterException thrown if comparison is impossible
*/
public boolean equals(ShadowValue second) throws InterpreterException {
ShadowValue first = this;
if (first.isStrictSubtype(second)) first = first.cast(second.getType());
else if (second.isStrictSubtype(first)) second = second.cast(first.getType());
if ((first instanceof ShadowUndefined) != (second instanceof ShadowUndefined)) return false;
if (first.getType().equals(second.getType())) {
if (first instanceof ShadowInteger) {
BigInteger value1 = ((ShadowInteger) first).getValue();
BigInteger value2 = ((ShadowInteger) second).getValue();
return value1.equals(value2);
} else if (first instanceof ShadowFloat) {
float value1 = ((ShadowFloat) first).getValue();
float value2 = ((ShadowFloat) second).getValue();
return value1 == value2;
} else if (first instanceof ShadowDouble) {
double value1 = ((ShadowDouble) first).getValue();
double value2 = ((ShadowDouble) second).getValue();
return value1 == value2;
} else if (first instanceof ShadowString) {
String value1 = ((ShadowString) first).getValue();
String value2 = ((ShadowString) second).getValue();
return value1.equals(value2);
} else if (first instanceof ShadowUndefined) return true;
else if (first instanceof ShadowNull) return second instanceof ShadowNull;
}
return false;
}
/**
* Compares one value to another.
*
* @param second the other value to compare to.
* @return same values as Java's compareTo
* @throws InterpreterException thrown if comparison is impossible
*/
public int compareTo(ShadowValue second) throws InterpreterException {
ShadowValue first = this;
if (first.isStrictSubtype(second)) first = first.cast(second.getType());
else if (second.isStrictSubtype(first)) second = second.cast(first.getType());
if (first.getType().equals(second.getType())) {
if (first instanceof ShadowInteger) {
BigInteger value1 = ((ShadowInteger) first).getValue();
BigInteger value2 = ((ShadowInteger) second).getValue();
return value1.compareTo(value2);
} else if (first instanceof ShadowFloat) {
float value1 = ((ShadowFloat) first).getValue();
float value2 = ((ShadowFloat) second).getValue();
return Float.compare(value1, value2);
} else if (first instanceof ShadowDouble) {
double value1 = ((ShadowDouble) first).getValue();
double value2 = ((ShadowDouble) second).getValue();
return Double.compare(value1, value2);
} else if (first instanceof ShadowString) {
String value1 = ((ShadowString) first).getValue();
String value2 = ((ShadowString) second).getValue();
return value1.compareTo(value2);
}
}
throw new InterpreterException(
Error.MISMATCHED_TYPE,
"Cannot compare types " + first.getType() + " and " + second.getType());
}
/** Returns a valid Shadow literal representation of the value */
public abstract String toLiteral();
public ShadowValue callMethod(String method, ShadowValue... arguments)
throws InterpreterException {
if (arguments.length == 0) {
switch (method) {
case "bitwiseComplement":
return bitwiseComplement();
case "hash":
return hash();
case "negate":
return negate();
case "not":
return not();
case "toString":
return new ShadowString(toLiteral());
}
} else if (arguments.length == 1) {
ShadowValue value = arguments[0];
switch (method) {
case "add":
return add(value);
case "and":
return and(value);
case "bitRotateLeft":
return bitRotateLeft(value);
case "bitRotateRight":
return bitRotateRight(value);
case "bitShiftLeft":
return bitShiftLeft(value);
case "bitShiftRight":
return bitShiftRight(value);
case "bitwiseAnd":
return bitwiseAnd(value);
case "bitwiseOr":
return bitwiseOr(value);
case "bitwiseXor":
return bitwiseXor(value);
case "compareTo":
return new ShadowInteger(compareTo(value));
case "divide":
return divide(value);
case "equal":
return equal(value);
case "greaterThan":
return greaterThan(value);
case "greaterThanOrEqual":
return greaterThanOrEqual(value);
case "lessThan":
return lessThan(value);
case "lessThanOrEqual":
return lessThanOrEqual(value);
case "modulus":
return modulus(value);
case "multiply":
return multiply(value);
case "notEqual":
return notEqual(value);
case "or":
return or(value);
case "referenceEqual":
return referenceEqual(value);
case "referenceNotEqual":
return referenceNotEqual(value);
case "subtract":
return subtract(value);
case "xor":
return xor(value);
}
}
StringBuilder builder = new StringBuilder("(");
boolean first = true;
for (ShadowValue value : arguments) {
if (first) first = false;
else builder.append(", ");
builder.append(value);
}
builder.append(")");
throw new InterpreterException(
Error.UNSUPPORTED_OPERATION,
"Method " + method + " not supported with arguments " + builder);
}
}
| |
/*
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Bob Jervis
* Google Inc.
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package com.google.javascript.rhino.jstype;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.collect.ImmutableList;
import com.google.javascript.rhino.jstype.JSType.EqCache;
import com.google.javascript.rhino.jstype.JSType.MatchStatus;
import com.google.javascript.rhino.jstype.JSType.SubtypingMode;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Manages a mapping from TemplateType to its resolved JSType. Provides utility
* methods for cloning/extending the map.
*
* @author izaakr@google.com (Izaak Rubin)
*/
public class TemplateTypeMap implements Serializable {
// The TemplateType keys of the map.
private final ImmutableList<TemplateType> templateKeys;
// The JSType values, which are index-aligned with their corresponding keys.
// These values are left as specified in the TemplateTypeMap constructor; they
// may refer to TemplateTypes that are keys in this TemplateTypeMap, requiring
// iterative type resolution to find their true, resolved type.
private final ImmutableList<JSType> templateValues;
// The JSType values, which are index-aligned with their corresponding keys.
// These values have been iteratively type-resolved using this TemplateTypeMap
// instance. These fully-resolved values are necessary for determining the
// equivalence of two TemplateTypeMap instances.
private final JSType[] resolvedTemplateValues;
private final JSTypeRegistry registry;
static final TemplateTypeMap createEmpty(JSTypeRegistry registry) {
// This method should only be called during registry initialization.
checkArgument(registry.getEmptyTemplateTypeMap() == null);
return new TemplateTypeMap(registry, ImmutableList.of(), ImmutableList.of());
}
private TemplateTypeMap(
JSTypeRegistry registry,
ImmutableList<TemplateType> templateKeys,
ImmutableList<JSType> templateValues) {
checkNotNull(templateKeys);
checkNotNull(templateValues);
checkArgument(templateValues.size() <= templateKeys.size());
this.registry = registry;
this.templateKeys = templateKeys;
this.templateValues = templateValues;
// Iteratively resolve any JSType values that refer to the TemplateType keys
// of this TemplateTypeMap.
TemplateTypeReplacer replacer = TemplateTypeReplacer.forTotalReplacement(registry, this);
int nValues = this.templateValues.size();
int nKeys = this.templateKeys.size();
JSType[] resolvedValues = new JSType[nKeys];
for (int i = 0; i < nKeys; i++) {
if (i < nValues) {
TemplateType templateKey = this.templateKeys.get(i);
replacer.setKeyType(templateKey);
JSType templateValue = this.templateValues.get(i);
resolvedValues[i] = templateValue.visit(replacer);
} else {
resolvedValues[i] = this.templateKeys.get(i).getBound();
}
}
this.resolvedTemplateValues = resolvedValues;
}
/**
* Create a new map in which any unfilled values in this map have been filled with {@code values}.
*
* <p>If there are fewer {@code values} than unfilled values, `?` will be used to fill the rest.
*/
TemplateTypeMap copyFilledWithValues(ImmutableList<JSType> values) {
int requiredUnknownCount = numUnfilledTemplateKeys() - values.size();
checkArgument(requiredUnknownCount >= 0, requiredUnknownCount);
if (numUnfilledTemplateKeys() == 0) {
return this; // Nothing will change.
}
ArrayList<JSType> extendedValues = new ArrayList<>();
extendedValues.addAll(this.templateValues);
extendedValues.addAll(values);
padToSameLength(this.templateKeys, extendedValues);
return new TemplateTypeMap(
this.registry, this.templateKeys, ImmutableList.copyOf(extendedValues));
}
/**
* Create a new map in which the keys and values have been extended by {@code extension}.
*
* <p>Before extension, any unfilled values in the initial map will be filled with `?`.
*/
public TemplateTypeMap copyWithExtension(TemplateTypeMap extension) {
return copyWithExtension(extension.templateKeys, extension.templateValues);
}
/**
* Create a new map in which the keys and values have been extended by {@code keys} and {@code
* values} respectively.
*
* <p>Before extension, any unfilled values in the initial map will be filled with `?`.
*/
public TemplateTypeMap copyWithExtension(
ImmutableList<TemplateType> keys, ImmutableList<JSType> values) {
int extendedUnfilledCount = keys.size() - values.size();
checkArgument(extendedUnfilledCount >= 0, extendedUnfilledCount);
if (numUnfilledTemplateKeys() == 0 && keys.isEmpty()) {
return this; // Nothing will change.
}
ImmutableList<TemplateType> extendedKeys =
ImmutableList.<TemplateType>builder().addAll(this.templateKeys).addAll(keys).build();
ArrayList<JSType> extendedValues = new ArrayList<>();
extendedValues.addAll(this.templateValues);
padToSameLength(this.templateKeys, extendedValues);
extendedValues.addAll(values);
return new TemplateTypeMap(this.registry, extendedKeys, ImmutableList.copyOf(extendedValues));
}
/**
* Create a new map in which keys contained in {@code removals} are eliminated.
*
* <p>The keys in {@code removals} will only be removed if they are unfilled.
*/
TemplateTypeMap copyWithoutKeys(Set<TemplateType> removals) {
ImmutableList.Builder<TemplateType> keys = ImmutableList.builder();
keys.addAll(templateKeys.subList(0, templateValues.size()));
for (int i = templateValues.size(); i < templateKeys.size(); i++) {
TemplateType key = templateKeys.get(i);
if (!removals.contains(key)) {
keys.add(key);
}
}
// There are some checks we could do for this before calculating the removal, but it was less
// error prone to only check in one place.
if (keys.build().size() == templateKeys.size()) {
return this; // Nothing will change.
}
return new TemplateTypeMap(this.registry, keys.build(), this.templateValues);
}
public int size() {
return this.templateKeys.size();
}
/**
* Returns true if the map is empty; false otherwise.
*/
public boolean isEmpty() {
return templateKeys.isEmpty();
}
/** Returns a list of all template keys. */
public ImmutableList<TemplateType> getTemplateKeys() {
return templateKeys;
}
public ImmutableList<JSType> getTemplateValues() {
return templateValues;
}
/**
* Returns true if this map contains the specified template key, false
* otherwise.
*/
@SuppressWarnings("ReferenceEquality")
public boolean hasTemplateKey(TemplateType templateKey) {
// Note: match by identity, not equality
for (TemplateType entry : templateKeys) {
if (entry == templateKey) {
return true;
}
}
return false;
}
/**
* Returns the number of template keys in this map that do not have a
* corresponding JSType value.
*/
int numUnfilledTemplateKeys() {
return templateKeys.size() - templateValues.size();
}
boolean isFull() {
return numUnfilledTemplateKeys() == 0;
}
boolean isPartiallyFull() {
return !this.templateValues.isEmpty();
}
/**
* Returns a list of template keys in this map that do not have corresponding
* JSType values.
*/
ImmutableList<TemplateType> getUnfilledTemplateKeys() {
return templateKeys.subList(templateValues.size(), templateKeys.size());
}
/**
* Returns true if there is a JSType value associated with the specified
* template key; false otherwise.
*/
public boolean hasTemplateType(TemplateType key) {
return getTemplateTypeIndex(key) != -1;
}
JSType getUnresolvedOriginalTemplateType(TemplateType key) {
int index = getTemplateTypeIndex(key);
return (index == -1)
? registry.getNativeType(JSTypeNative.UNKNOWN_TYPE)
: templateValues.get(index);
}
public TemplateType getTemplateTypeKeyByName(String keyName) {
for (TemplateType key : templateKeys) {
if (key.getReferenceName().equals(keyName)) {
return key;
}
}
return null;
}
/**
* Returns the index of the JSType value associated with the specified
* template key. If no JSType value is associated, returns -1.
*/
private int getTemplateTypeIndex(TemplateType key) {
int maxIndex = Math.min(templateKeys.size(), templateValues.size());
for (int i = maxIndex - 1; i >= 0; i--) {
if (JSType.areIdentical(templateKeys.get(i), key)) {
return i;
}
}
return -1;
}
/**
* Returns the JSType value associated with the specified template key. If no
* JSType value is associated, returns the upper bound for generic, UNKNOWN_TYPE if unspecified.
*/
public JSType getResolvedTemplateType(TemplateType key) {
int index = getTemplateTypeIndex(key);
return (index == -1)
? unknownIfUnbounded(key)
: resolvedTemplateValues[index];
}
/**
* An enum tracking the three different equivalence match states for a
* template key-value pair.
*/
private enum EquivalenceMatch {
NO_KEY_MATCH, VALUE_MISMATCH, VALUE_MATCH
}
/**
* Determines if this map and the specified map have equivalent template
* types.
*/
public boolean checkEquivalenceHelper(
TemplateTypeMap that, EquivalenceMethod eqMethod, SubtypingMode subtypingMode) {
return checkEquivalenceHelper(that, eqMethod, EqCache.create(), subtypingMode);
}
public boolean checkEquivalenceHelper(TemplateTypeMap that,
EquivalenceMethod eqMethod, EqCache eqCache, SubtypingMode subtypingMode) {
@Nullable MatchStatus status = eqCache.checkCache(this, that);
if (status == null) {
boolean result =
checkEquivalenceHelper(eqMethod, this, that, eqCache, subtypingMode)
&& checkEquivalenceHelper(eqMethod, that, this, eqCache, subtypingMode);
eqCache.updateCache(this, that, MatchStatus.valueOf(result));
return result;
} else {
return status.subtypeValue();
}
}
private static boolean checkEquivalenceHelper(EquivalenceMethod eqMethod,
TemplateTypeMap thisMap, TemplateTypeMap thatMap,
EqCache eqCache, SubtypingMode subtypingMode) {
ImmutableList<TemplateType> thisKeys = thisMap.getTemplateKeys();
ImmutableList<TemplateType> thatKeys = thatMap.getTemplateKeys();
for (int i = 0; i < thisKeys.size(); i++) {
TemplateType thisKey = thisKeys.get(i);
JSType thisType = thisMap.getResolvedTemplateType(thisKey);
EquivalenceMatch thisMatch = EquivalenceMatch.NO_KEY_MATCH;
for (int j = 0; j < thatKeys.size(); j++) {
TemplateType thatKey = thatKeys.get(j);
JSType thatType = thatMap.getResolvedTemplateType(thatKey);
// Cross-compare every key-value pair in this TemplateTypeMap with
// those in that TemplateTypeMap. Update the Equivalence match for both
// key-value pairs involved.
if (JSType.areIdentical(thisKey, thatKey)) {
EquivalenceMatch newMatchType = EquivalenceMatch.VALUE_MISMATCH;
if (thisType.checkEquivalenceHelper(thatType, eqMethod, eqCache)
|| (subtypingMode == SubtypingMode.IGNORE_NULL_UNDEFINED
&& thisType.isSubtype(thatType, subtypingMode)
&& thatType.isSubtype(thatType, subtypingMode))) {
newMatchType = EquivalenceMatch.VALUE_MATCH;
}
if (thisMatch != EquivalenceMatch.VALUE_MATCH) {
thisMatch = newMatchType;
}
}
}
if (failedEquivalenceCheck(thisMatch, eqMethod)) {
return false;
}
}
return true;
}
/**
* Determines if the specified EquivalenceMatch is considered a failing
* condition for an equivalence check, given the EquivalenceMethod used for
* the check.
*/
private static boolean failedEquivalenceCheck(
EquivalenceMatch eqMatch, EquivalenceMethod eqMethod) {
return eqMatch == EquivalenceMatch.VALUE_MISMATCH
|| (eqMatch == EquivalenceMatch.NO_KEY_MATCH && eqMethod != EquivalenceMethod.INVARIANT);
}
boolean hasAnyTemplateTypesInternal() {
if (resolvedTemplateValues != null) {
for (JSType templateValue : resolvedTemplateValues) {
if (templateValue.hasAnyTemplateTypes()) {
return true;
}
}
}
return false;
}
@Override
public String toString() {
String s = "";
int len = templateKeys.size();
s += "{ ";
for (int i = 0; i < len; i++) {
s += "(";
s += templateKeys.get(i);
s += ",";
s += (i < templateValues.size()) ? templateValues.get(i) : "";
s += ",";
s += (resolvedTemplateValues != null && i < resolvedTemplateValues.length)
? resolvedTemplateValues[i]
: "";
s += ") ";
}
s += "}";
return s;
}
private void padToSameLength(ImmutableList<TemplateType> keys, ArrayList<JSType> builder) {
checkArgument(builder.size() <= keys.size());
for (int i = builder.size(); i < keys.size(); i++) {
builder.add(unknownIfUnbounded(keys.get(i)));
}
}
private JSType unknownIfUnbounded(TemplateType type) {
return type.getBound().isUnknownType()
? this.registry.getNativeType(JSTypeNative.UNKNOWN_TYPE)
: type;
}
}
| |
package org.zalando.logbook.okhttp2;
import com.squareup.okhttp.MediaType;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.RequestBody;
import lombok.AllArgsConstructor;
import lombok.Getter;
import okio.Buffer;
import org.zalando.logbook.HttpHeaders;
import org.zalando.logbook.HttpRequest;
import org.zalando.logbook.Origin;
import javax.annotation.Nullable;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicReference;
import static com.squareup.okhttp.HttpUrl.defaultPort;
import static com.squareup.okhttp.RequestBody.create;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.zalando.fauxpas.FauxPas.throwingUnaryOperator;
@AllArgsConstructor
final class LocalRequest implements HttpRequest {
private final AtomicReference<State> state = new AtomicReference<>(new Unbuffered());
private final Request request;
private interface State {
default State with() {
return this;
}
default State without() {
return this;
}
default State buffer() throws IOException {
return this;
}
Request getRequest();
default byte[] getBody() {
return new byte[0];
}
}
@AllArgsConstructor
private abstract class AbstractState implements State {
@Override
public Request getRequest() {
return request;
}
}
private final class Unbuffered extends AbstractState {
@Override
public State with() {
return new Offering();
}
}
private final class Offering extends AbstractState {
@Override
public State without() {
return new Unbuffered();
}
@Override
public State buffer() throws IOException {
@Nullable final RequestBody entity = request.body();
if (entity == null) {
return new Passing();
} else {
final Buffer buffer = new Buffer();
entity.writeTo(buffer);
final byte[] body = buffer.readByteArray();
final Request copy = request.newBuilder()
.method(request.method(), create(entity.contentType(), body))
.build();
return new Buffering(copy, body);
}
}
}
@AllArgsConstructor
private static final class Buffering implements State {
@Getter
private final Request request;
private final byte[] body;
@Override
public State without() {
return new Ignoring(request, body);
}
@Override
public byte[] getBody() {
return body;
}
}
@AllArgsConstructor
private static final class Ignoring implements State {
@Getter
private final Request request;
private final byte[] body;
@Override
public State with() {
return new Buffering(request, body);
}
}
private final class Passing extends AbstractState {
}
@Override
public String getRemote() {
return "localhost";
}
@Override
public String getMethod() {
return request.method();
}
@Override
public String getScheme() {
return request.httpUrl().scheme();
}
@Override
public String getHost() {
return request.httpUrl().host();
}
@Override
public Optional<Integer> getPort() {
final int port = request.httpUrl().port();
final int defaultPort = defaultPort(request.httpUrl().scheme());
return port == defaultPort ? Optional.empty() : Optional.of(port);
}
@Override
public String getPath() {
return request.url().getPath();
}
@Override
public String getQuery() {
return Optional.ofNullable(request.httpUrl().query()).orElse("");
}
@Override
public String getProtocolVersion() {
// TODO find the real thing
return "HTTP/1.1";
}
@Override
public Origin getOrigin() {
return Origin.LOCAL;
}
@Override
public HttpHeaders getHeaders() {
return HttpHeaders.of(request.headers().toMultimap());
}
@Override
public String getContentType() {
return contentType().map(MediaType::toString).orElse("");
}
@Override
public Charset getCharset() {
return contentType().map(MediaType::charset).orElse(UTF_8);
}
private Optional<MediaType> contentType() {
return Optional.ofNullable(request.body())
.map(RequestBody::contentType);
}
@Override
public HttpRequest withBody() {
state.updateAndGet(State::with);
return this;
}
@Override
public HttpRequest withoutBody() {
state.updateAndGet(State::without);
return this;
}
Request toRequest() {
return buffer().getRequest();
}
@Override
public byte[] getBody() {
return buffer().getBody();
}
private State buffer() {
return state.updateAndGet(throwingUnaryOperator(State::buffer));
}
}
| |
/*L
* Copyright Washington University in St. Louis
* Copyright SemanticBits
* Copyright Persistent Systems
* Copyright Krishagni
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/wustl-common-utilities/LICENSE.txt for details.
*/
/* Generated By:JavaCC: Do not edit this line. SqlFormatterTokenManager.java */
package edu.wustl.common.util;
import java.io.*;
/** Token Manager. */
public class SqlFormatterTokenManager implements SqlFormatterConstants
{
StringBuilder res;
private int indent = 0;
private static final String INDENT = " ";
private int nesting = 0;
private void updateRes(int cons) {
res.append(image);
res.insert(res.length()-tokenImage[cons].length()+2, NLI());
}
private String NLI() {
return "\n" + indent();
}
private String indent() {
String s="";
for(int i=0;i<indent;i++) s+=INDENT;
return s;
}
/** Debug output. */
public java.io.PrintStream debugStream = System.out;
/** Set debug output. */
public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
private int jjMoveStringLiteralDfa0_0()
{
return jjMoveNfa_0(6, 0);
}
static final long[] jjbitVec0 = {
0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
};
private int jjMoveNfa_0(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 8;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 6:
if (kind > 9)
kind = 9;
break;
case 0:
if (curChar == 32 && kind > 8)
kind = 8;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 6:
if (kind > 9)
kind = 9;
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 5;
break;
case 1:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 0;
break;
case 2:
if ((0x800000008L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 1;
break;
case 3:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 4:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 3;
break;
case 5:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 4;
break;
case 7:
if (kind > 9)
kind = 9;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 6:
if ((jjbitVec0[i2] & l2) != 0L && kind > 9)
kind = 9;
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 8 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_5(int pos, long active0)
{
switch (pos)
{
default :
return -1;
}
}
private final int jjStartNfa_5(int pos, long active0)
{
return jjMoveNfa_5(jjStopStringLiteralDfa_5(pos, active0), pos + 1);
}
private int jjStopAtPos(int pos, int kind)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
return pos + 1;
}
private int jjMoveStringLiteralDfa0_5()
{
switch(curChar)
{
case 40:
return jjStopAtPos(0, 31);
case 41:
return jjStopAtPos(0, 32);
default :
return jjMoveNfa_5(0, 0);
}
}
private int jjMoveNfa_5(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 1;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 0:
kind = 33;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
kind = 33;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
if ((jjbitVec0[i2] & l2) != 0L && kind > 33)
kind = 33;
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 1 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_3(int pos, long active0)
{
switch (pos)
{
default :
return -1;
}
}
private final int jjStartNfa_3(int pos, long active0)
{
return jjMoveNfa_3(jjStopStringLiteralDfa_3(pos, active0), pos + 1);
}
private int jjMoveStringLiteralDfa0_3()
{
switch(curChar)
{
case 40:
return jjStopAtPos(0, 21);
default :
return jjMoveNfa_3(3, 0);
}
}
private int jjMoveNfa_3(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 5;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 3:
if (kind > 23)
kind = 23;
if (curChar == 32)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 0:
if (curChar == 32 && kind > 22)
kind = 22;
break;
case 4:
if (kind > 23)
kind = 23;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 3:
if (kind > 23)
kind = 23;
break;
case 1:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 0;
break;
case 2:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 1;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 3:
if ((jjbitVec0[i2] & l2) != 0L && kind > 23)
kind = 23;
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 5 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_1(int pos, long active0)
{
switch (pos)
{
default :
return -1;
}
}
private final int jjStartNfa_1(int pos, long active0)
{
return jjMoveNfa_1(jjStopStringLiteralDfa_1(pos, active0), pos + 1);
}
private int jjMoveStringLiteralDfa0_1()
{
switch(curChar)
{
case 40:
return jjStopAtPos(0, 11);
case 41:
return jjStopAtPos(0, 12);
case 44:
return jjStopAtPos(0, 10);
default :
return jjMoveNfa_1(5, 0);
}
}
private int jjMoveNfa_1(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 7;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 5:
if (kind > 14)
kind = 14;
if (curChar == 32)
jjstateSet[jjnewStateCnt++] = 4;
break;
case 0:
if (curChar == 32 && kind > 13)
kind = 13;
break;
case 6:
if (kind > 14)
kind = 14;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 5:
if (kind > 14)
kind = 14;
break;
case 1:
if ((0x200000002000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 0;
break;
case 2:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 1;
break;
case 3:
if ((0x4000000040000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 4:
if ((0x4000000040L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 3;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 5:
if ((jjbitVec0[i2] & l2) != 0L && kind > 14)
kind = 14;
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 7 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_4(int pos, long active0)
{
switch (pos)
{
default :
return -1;
}
}
private final int jjStartNfa_4(int pos, long active0)
{
return jjMoveNfa_4(jjStopStringLiteralDfa_4(pos, active0), pos + 1);
}
private int jjMoveStringLiteralDfa0_4()
{
switch(curChar)
{
case 40:
return jjStopAtPos(0, 24);
case 41:
return jjStopAtPos(0, 25);
default :
return jjMoveNfa_4(0, 0);
}
}
private int jjMoveNfa_4(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 32;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 0:
if (kind > 27)
kind = 27;
if (curChar == 32)
jjAddStates(0, 3);
break;
case 1:
if (curChar == 32)
jjAddStates(0, 3);
break;
case 2:
if (curChar == 32 && kind > 26)
kind = 26;
break;
case 5:
if (curChar == 32 && kind > 28)
kind = 28;
break;
case 10:
if (curChar == 32)
jjstateSet[jjnewStateCnt++] = 9;
break;
case 16:
if (curChar == 32 && kind > 29)
kind = 29;
break;
case 21:
if (curChar == 32)
jjstateSet[jjnewStateCnt++] = 20;
break;
case 26:
if (curChar == 32 && kind > 30)
kind = 30;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
if (kind > 27)
kind = 27;
break;
case 3:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 4:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 3;
break;
case 6:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 5;
break;
case 7:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 6;
break;
case 8:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 7;
break;
case 9:
if ((0x40000000400L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 8;
break;
case 11:
if ((0x4000000040000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 10;
break;
case 12:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 11;
break;
case 13:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 12;
break;
case 14:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 13;
break;
case 15:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 14;
break;
case 17:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 16;
break;
case 18:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 17;
break;
case 19:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 18;
break;
case 20:
if ((0x40000000400L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 19;
break;
case 22:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 21;
break;
case 23:
if ((0x4000000040L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 22;
break;
case 24:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 23;
break;
case 25:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 24;
break;
case 27:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 26;
break;
case 28:
if ((0x4000000040000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 27;
break;
case 29:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 28;
break;
case 30:
if ((0x10000000100L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 29;
break;
case 31:
if ((0x80000000800000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 30;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
if ((jjbitVec0[i2] & l2) != 0L && kind > 27)
kind = 27;
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 32 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_2(int pos, long active0)
{
switch (pos)
{
default :
return -1;
}
}
private final int jjStartNfa_2(int pos, long active0)
{
return jjMoveNfa_2(jjStopStringLiteralDfa_2(pos, active0), pos + 1);
}
private int jjMoveStringLiteralDfa0_2()
{
switch(curChar)
{
case 40:
return jjStopAtPos(0, 18);
case 41:
return jjStopAtPos(0, 19);
default :
return jjMoveNfa_2(0, 0);
}
}
private int jjMoveNfa_2(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 29;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 0:
if (kind > 20)
kind = 20;
if (curChar == 32)
jjAddStates(4, 6);
break;
case 1:
if (curChar == 32)
jjAddStates(4, 6);
break;
case 2:
if (curChar == 32 && kind > 15)
kind = 15;
break;
case 7:
if (curChar == 32)
jjstateSet[jjnewStateCnt++] = 6;
break;
case 13:
if (curChar == 32 && kind > 16)
kind = 16;
break;
case 18:
if (curChar == 32)
jjstateSet[jjnewStateCnt++] = 17;
break;
case 23:
if (curChar == 32 && kind > 17)
kind = 17;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
if (kind > 20)
kind = 20;
break;
case 3:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 4:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 3;
break;
case 5:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 4;
break;
case 6:
if ((0x40000000400L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 5;
break;
case 8:
if ((0x4000000040000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 7;
break;
case 9:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 8;
break;
case 10:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 9;
break;
case 11:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 10;
break;
case 12:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 11;
break;
case 14:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 13;
break;
case 15:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 14;
break;
case 16:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 15;
break;
case 17:
if ((0x40000000400L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 16;
break;
case 19:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 18;
break;
case 20:
if ((0x4000000040L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 19;
break;
case 21:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 20;
break;
case 22:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 21;
break;
case 24:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 23;
break;
case 25:
if ((0x4000000040000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 24;
break;
case 26:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 25;
break;
case 27:
if ((0x10000000100L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 26;
break;
case 28:
if ((0x80000000800000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 27;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
if ((jjbitVec0[i2] & l2) != 0L && kind > 20)
kind = 20;
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 29 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
static final int[] jjnextStates = {
4, 15, 25, 31, 12, 22, 28,
};
/** Token literal values. */
public static final String[] jjstrLiteralImages = {
"", null, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, null, null, "\50", "\51", null, null, null, null, "\50", "\51",
null, null, null, null, null, "\50", "\51", null, };
/** Lexer state names. */
public static final String[] lexStateNames = {
"DEFAULT",
"SELECT_ST",
"FROM_ST",
"JOIN_ST",
"ON_ST",
"WHERE_ST",
};
/** Lex State array. */
public static final int[] jjnewLexState = {
-1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, 3, 3, 5, 0, -1, -1, 0, 4, -1, -1,
-1, -1, -1, 3, 3, 5, -1, -1, -1,
};
static final long[] jjtoToken = {
0x38f3c0001L,
};
static final long[] jjtoSkip = {
0x70438300L,
};
static final long[] jjtoMore = {
0x807c00L,
};
protected SimpleCharStream input_stream;
private final int[] jjrounds = new int[32];
private final int[] jjstateSet = new int[64];
private final StringBuffer jjimage = new StringBuffer();
private StringBuffer image = jjimage;
private int jjimageLen;
private int lengthOfMatch;
protected char curChar;
/** Constructor. */
public SqlFormatterTokenManager(SimpleCharStream stream){
if (SimpleCharStream.staticFlag)
throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
input_stream = stream;
}
/** Constructor. */
public SqlFormatterTokenManager(SimpleCharStream stream, int lexState){
this(stream);
SwitchTo(lexState);
}
/** Reinitialise parser. */
public void ReInit(SimpleCharStream stream)
{
jjmatchedPos = jjnewStateCnt = 0;
curLexState = defaultLexState;
input_stream = stream;
ReInitRounds();
}
private void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 32; i-- > 0;)
jjrounds[i] = 0x80000000;
}
/** Reinitialise parser. */
public void ReInit(SimpleCharStream stream, int lexState)
{
ReInit(stream);
SwitchTo(lexState);
}
/** Switch to specified lex state. */
public void SwitchTo(int lexState)
{
if (lexState >= 6 || lexState < 0)
throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
else
curLexState = lexState;
}
protected Token jjFillToken()
{
final Token t;
final String curTokenImage;
final int beginLine;
final int endLine;
final int beginColumn;
final int endColumn;
String im = jjstrLiteralImages[jjmatchedKind];
curTokenImage = (im == null) ? input_stream.GetImage() : im;
beginLine = input_stream.getBeginLine();
beginColumn = input_stream.getBeginColumn();
endLine = input_stream.getEndLine();
endColumn = input_stream.getEndColumn();
t = Token.newToken(jjmatchedKind, curTokenImage);
t.beginLine = beginLine;
t.endLine = endLine;
t.beginColumn = beginColumn;
t.endColumn = endColumn;
return t;
}
int curLexState = 0;
int defaultLexState = 0;
int jjnewStateCnt;
int jjround;
int jjmatchedPos;
int jjmatchedKind;
/** Get the next Token. */
public Token getNextToken()
{
Token matchedToken;
int curPos = 0;
EOFLoop :
for (;;)
{
try
{
curChar = input_stream.BeginToken();
}
catch(java.io.IOException e)
{
jjmatchedKind = 0;
matchedToken = jjFillToken();
TokenLexicalActions(matchedToken);
return matchedToken;
}
image = jjimage;
image.setLength(0);
jjimageLen = 0;
for (;;)
{
switch(curLexState)
{
case 0:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_0();
break;
case 1:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_1();
break;
case 2:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_2();
break;
case 3:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_3();
break;
case 4:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_4();
break;
case 5:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_5();
break;
}
if (jjmatchedKind != 0x7fffffff)
{
if (jjmatchedPos + 1 < curPos)
input_stream.backup(curPos - jjmatchedPos - 1);
if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
TokenLexicalActions(matchedToken);
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
return matchedToken;
}
else if ((jjtoSkip[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
SkipLexicalActions(null);
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
continue EOFLoop;
}
MoreLexicalActions();
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
curPos = 0;
jjmatchedKind = 0x7fffffff;
try {
curChar = input_stream.readChar();
continue;
}
catch (java.io.IOException e1) { }
}
int error_line = input_stream.getEndLine();
int error_column = input_stream.getEndColumn();
String error_after = null;
boolean EOFSeen = false;
try { input_stream.readChar(); input_stream.backup(1); }
catch (java.io.IOException e1) {
EOFSeen = true;
error_after = curPos <= 1 ? "" : input_stream.GetImage();
if (curChar == '\n' || curChar == '\r') {
error_line++;
error_column = 0;
}
else
error_column++;
}
if (!EOFSeen) {
input_stream.backup(1);
error_after = curPos <= 1 ? "" : input_stream.GetImage();
}
throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
}
}
}
void SkipLexicalActions(Token matchedToken)
{
switch(jjmatchedKind)
{
case 8 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
res.append("select ");
break;
case 15 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
updateRes(INJOIN);
break;
case 16 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
updateRes(LJOIN);
break;
case 17 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
updateRes(WHERE); nesting=0;
break;
case 22 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
updateRes(ON);
break;
case 28 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
updateRes(INJOIN);
break;
case 29 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
updateRes(LJOIN);
break;
case 30 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
updateRes(WHERE);
break;
default :
break;
}
}
void MoreLexicalActions()
{
jjimageLen += (lengthOfMatch = jjmatchedPos + 1);
switch(jjmatchedKind)
{
case 10 :
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
image.append(NLI());
break;
case 11 :
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
nesting++;
break;
case 12 :
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
nesting--;
break;
case 13 :
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
if(nesting == 0) {updateRes(FROM); SwitchTo(FROM_ST);}
break;
default :
break;
}
}
void TokenLexicalActions(Token matchedToken)
{
switch(jjmatchedKind)
{
case 0 :
image.setLength(0);
res.append(image);
break;
case 18 :
image.append(jjstrLiteralImages[18]);
lengthOfMatch = jjstrLiteralImages[18].length();
indent++; res.append("("+NLI());
break;
case 19 :
image.append(jjstrLiteralImages[19]);
lengthOfMatch = jjstrLiteralImages[19].length();
res.append(")"); nesting--; if(nesting < 0) {indent--; nesting=0; SwitchTo(ON_ST);}
break;
case 20 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
res.append(image);
break;
case 21 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
indent++; res.append("("+NLI());
break;
case 24 :
image.append(jjstrLiteralImages[24]);
lengthOfMatch = jjstrLiteralImages[24].length();
res.append("("); nesting++;
break;
case 25 :
image.append(jjstrLiteralImages[25]);
lengthOfMatch = jjstrLiteralImages[25].length();
res.append(")"); nesting--; if(nesting < 0) {indent--; nesting=0;}
break;
case 26 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
updateRes(ON);
break;
case 27 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
res.append(image);
break;
case 31 :
image.append(jjstrLiteralImages[31]);
lengthOfMatch = jjstrLiteralImages[31].length();
res.append("("); nesting++;
break;
case 32 :
image.append(jjstrLiteralImages[32]);
lengthOfMatch = jjstrLiteralImages[32].length();
res.append(")"); nesting--; if(nesting < 0) {indent--; nesting=0; SwitchTo(ON_ST);}
break;
case 33 :
image.append(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));
res.append(image);
break;
default :
break;
}
}
private void jjCheckNAdd(int state)
{
if (jjrounds[state] != jjround)
{
jjstateSet[jjnewStateCnt++] = state;
jjrounds[state] = jjround;
}
}
private void jjAddStates(int start, int end)
{
do {
jjstateSet[jjnewStateCnt++] = jjnextStates[start];
} while (start++ != end);
}
private void jjCheckNAddTwoStates(int state1, int state2)
{
jjCheckNAdd(state1);
jjCheckNAdd(state2);
}
}
| |
/*
* Copyright 2005-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.ldap.core;
import javax.naming.Name;
import javax.naming.directory.BasicAttributes;
import javax.naming.directory.DirContext;
import javax.naming.ldap.LdapContext;
import junit.framework.TestCase;
import org.easymock.MockControl;
import org.springframework.ldap.NameNotFoundException;
public class LdapTemplateLookupTest extends TestCase {
private static final String DEFAULT_BASE_STRING = "o=example.com";
private MockControl contextSourceControl;
private ContextSource contextSourceMock;
private MockControl dirContextControl;
private DirContext dirContextMock;
private MockControl attributesMapperControl;
private AttributesMapper attributesMapperMock;
private MockControl nameControl;
private Name nameMock;
private MockControl contextMapperControl;
private ContextMapper contextMapperMock;
private LdapTemplate tested;
protected void setUp() throws Exception {
super.setUp();
// Setup ContextSource mock
contextSourceControl = MockControl.createControl(ContextSource.class);
contextSourceMock = (ContextSource) contextSourceControl.getMock();
// Setup LdapContext mock
dirContextControl = MockControl.createControl(LdapContext.class);
dirContextMock = (LdapContext) dirContextControl.getMock();
// Setup Name mock
nameControl = MockControl.createControl(Name.class);
nameMock = (Name) nameControl.getMock();
contextMapperControl = MockControl.createControl(ContextMapper.class);
contextMapperMock = (ContextMapper) contextMapperControl.getMock();
attributesMapperControl = MockControl
.createControl(AttributesMapper.class);
attributesMapperMock = (AttributesMapper) attributesMapperControl
.getMock();
tested = new LdapTemplate(contextSourceMock);
}
protected void tearDown() throws Exception {
super.tearDown();
contextSourceControl = null;
contextSourceMock = null;
dirContextControl = null;
dirContextMock = null;
nameControl = null;
nameMock = null;
contextMapperControl = null;
contextMapperMock = null;
attributesMapperControl = null;
attributesMapperMock = null;
}
protected void replay() {
contextSourceControl.replay();
dirContextControl.replay();
nameControl.replay();
contextMapperControl.replay();
attributesMapperControl.replay();
}
protected void verify() {
contextSourceControl.verify();
dirContextControl.verify();
nameControl.verify();
contextMapperControl.verify();
attributesMapperControl.verify();
}
private void expectGetReadOnlyContext() {
contextSourceControl.expectAndReturn(contextSourceMock
.getReadOnlyContext(), dirContextMock);
}
// Tests for lookup(name)
public void testLookup() throws Exception {
expectGetReadOnlyContext();
Object expected = new Object();
dirContextControl.expectAndReturn(dirContextMock.lookup(nameMock),
expected);
dirContextMock.close();
replay();
Object actual = tested.lookup(nameMock);
verify();
assertSame(expected, actual);
}
public void testLookup_String() throws Exception {
expectGetReadOnlyContext();
Object expected = new Object();
dirContextControl.expectAndReturn(dirContextMock
.lookup(DEFAULT_BASE_STRING), expected);
dirContextMock.close();
replay();
Object actual = tested.lookup(DEFAULT_BASE_STRING);
verify();
assertSame(expected, actual);
}
public void testLookup_NamingException() throws Exception {
expectGetReadOnlyContext();
javax.naming.NameNotFoundException ne = new javax.naming.NameNotFoundException();
dirContextControl.expectAndThrow(dirContextMock.lookup(nameMock), ne);
dirContextMock.close();
replay();
try {
tested.lookup(nameMock);
fail("NameNotFoundException expected");
} catch (NameNotFoundException expected) {
assertTrue(true);
}
verify();
}
// Tests for lookup(name, AttributesMapper)
public void testLookup_AttributesMapper() throws Exception {
expectGetReadOnlyContext();
BasicAttributes expectedAttributes = new BasicAttributes();
dirContextControl.expectAndReturn(dirContextMock
.getAttributes(nameMock), expectedAttributes);
dirContextMock.close();
Object expected = new Object();
attributesMapperControl.expectAndReturn(attributesMapperMock
.mapFromAttributes(expectedAttributes), expected);
replay();
Object actual = tested.lookup(nameMock, attributesMapperMock);
verify();
assertSame(expected, actual);
}
public void testLookup_String_AttributesMapper() throws Exception {
expectGetReadOnlyContext();
BasicAttributes expectedAttributes = new BasicAttributes();
dirContextControl.expectAndReturn(dirContextMock
.getAttributes(DEFAULT_BASE_STRING), expectedAttributes);
dirContextMock.close();
Object expected = new Object();
attributesMapperControl.expectAndReturn(attributesMapperMock
.mapFromAttributes(expectedAttributes), expected);
replay();
Object actual = tested
.lookup(DEFAULT_BASE_STRING, attributesMapperMock);
verify();
assertSame(expected, actual);
}
public void testLookup_AttributesMapper_NamingException() throws Exception {
expectGetReadOnlyContext();
javax.naming.NameNotFoundException ne = new javax.naming.NameNotFoundException();
dirContextControl.expectAndThrow(
dirContextMock.getAttributes(nameMock), ne);
dirContextMock.close();
replay();
try {
tested.lookup(nameMock, attributesMapperMock);
fail("NameNotFoundException expected");
} catch (NameNotFoundException expected) {
assertTrue(true);
}
verify();
}
// Tests for lookup(name, ContextMapper)
public void testLookup_ContextMapper() throws Exception {
expectGetReadOnlyContext();
Object transformed = new Object();
Object expected = new Object();
dirContextControl.expectAndReturn(dirContextMock.lookup(nameMock),
expected);
dirContextMock.close();
contextMapperControl.expectAndReturn(contextMapperMock
.mapFromContext(expected), transformed);
replay();
Object actual = tested.lookup(nameMock, contextMapperMock);
verify();
assertSame(transformed, actual);
}
public void testLookup_String_ContextMapper() throws Exception {
expectGetReadOnlyContext();
Object transformed = new Object();
Object expected = new Object();
dirContextControl.expectAndReturn(dirContextMock
.lookup(DEFAULT_BASE_STRING), expected);
dirContextMock.close();
contextMapperControl.expectAndReturn(contextMapperMock
.mapFromContext(expected), transformed);
replay();
Object actual = tested.lookup(DEFAULT_BASE_STRING, contextMapperMock);
verify();
assertSame(transformed, actual);
}
public void testLookup_ContextMapper_NamingException() throws Exception {
expectGetReadOnlyContext();
javax.naming.NameNotFoundException ne = new javax.naming.NameNotFoundException();
dirContextControl.expectAndThrow(dirContextMock.lookup(nameMock), ne);
dirContextMock.close();
replay();
try {
tested.lookup(nameMock, contextMapperMock);
fail("NameNotFoundException expected");
} catch (NameNotFoundException expected) {
assertTrue(true);
}
verify();
}
// Tests for lookup(name, attributes, AttributesMapper)
public void testLookup_ReturnAttributes_AttributesMapper() throws Exception {
expectGetReadOnlyContext();
String[] attributeNames = new String[] { "cn" };
BasicAttributes expectedAttributes = new BasicAttributes();
expectedAttributes.put("cn", "Some Name");
dirContextControl.expectAndReturn(dirContextMock.getAttributes(
nameMock, attributeNames), expectedAttributes);
dirContextMock.close();
Object expected = new Object();
attributesMapperControl.expectAndReturn(attributesMapperMock
.mapFromAttributes(expectedAttributes), expected);
replay();
Object actual = tested.lookup(nameMock, attributeNames,
attributesMapperMock);
verify();
assertSame(expected, actual);
}
public void testLookup_String_ReturnAttributes_AttributesMapper()
throws Exception {
expectGetReadOnlyContext();
String[] attributeNames = new String[] { "cn" };
BasicAttributes expectedAttributes = new BasicAttributes();
expectedAttributes.put("cn", "Some Name");
dirContextControl.expectAndReturn(dirContextMock.getAttributes(
DEFAULT_BASE_STRING, attributeNames), expectedAttributes);
dirContextMock.close();
Object expected = new Object();
attributesMapperControl.expectAndReturn(attributesMapperMock
.mapFromAttributes(expectedAttributes), expected);
replay();
Object actual = tested.lookup(DEFAULT_BASE_STRING, attributeNames,
attributesMapperMock);
verify();
assertSame(expected, actual);
}
// Tests for lookup(name, attributes, ContextMapper)
public void testLookup_ReturnAttributes_ContextMapper() throws Exception {
expectGetReadOnlyContext();
String[] attributeNames = new String[] { "cn" };
BasicAttributes expectedAttributes = new BasicAttributes();
expectedAttributes.put("cn", "Some Name");
DistinguishedName name = new DistinguishedName(DEFAULT_BASE_STRING);
DirContextAdapter adapter = new DirContextAdapter(expectedAttributes,
name);
dirContextControl.expectAndReturn(dirContextMock.getAttributes(name,
attributeNames), expectedAttributes);
dirContextMock.close();
Object transformed = new Object();
contextMapperControl.expectAndReturn(contextMapperMock
.mapFromContext(adapter), transformed);
replay();
Object actual = tested.lookup(name, attributeNames, contextMapperMock);
verify();
assertSame(transformed, actual);
}
public void testLookup_String_ReturnAttributes_ContextMapper()
throws Exception {
expectGetReadOnlyContext();
String[] attributeNames = new String[] { "cn" };
BasicAttributes expectedAttributes = new BasicAttributes();
expectedAttributes.put("cn", "Some Name");
dirContextControl.expectAndReturn(dirContextMock.getAttributes(
DEFAULT_BASE_STRING, attributeNames), expectedAttributes);
dirContextMock.close();
DistinguishedName name = new DistinguishedName(DEFAULT_BASE_STRING);
DirContextAdapter adapter = new DirContextAdapter(expectedAttributes,
name);
Object transformed = new Object();
contextMapperControl.expectAndReturn(contextMapperMock
.mapFromContext(adapter), transformed);
replay();
Object actual = tested.lookup(DEFAULT_BASE_STRING, attributeNames,
contextMapperMock);
verify();
assertSame(transformed, actual);
}
}
| |
// Generated from MetaModelParser.g4 by ANTLR 4.5.1
package metadslx.compiler;
import metadslx.core.*;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ErrorNode;
import org.antlr.v4.runtime.tree.TerminalNode;
/**
* This class provides an empty implementation of {@link MetaModelParserListener},
* which can be extended to create a listener which only needs to handle a subset
* of the available methods.
*/
public class MetaModelParserBaseListener implements MetaModelParserListener {
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMain(MetaModelParser.MainContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMain(MetaModelParser.MainContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterQualifiedName(MetaModelParser.QualifiedNameContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitQualifiedName(MetaModelParser.QualifiedNameContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIdentifierList(MetaModelParser.IdentifierListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIdentifierList(MetaModelParser.IdentifierListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterQualifiedNameList(MetaModelParser.QualifiedNameListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitQualifiedNameList(MetaModelParser.QualifiedNameListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAnnotation(MetaModelParser.AnnotationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAnnotation(MetaModelParser.AnnotationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAnnotationParams(MetaModelParser.AnnotationParamsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAnnotationParams(MetaModelParser.AnnotationParamsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAnnotationParamList(MetaModelParser.AnnotationParamListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAnnotationParamList(MetaModelParser.AnnotationParamListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAnnotationParam(MetaModelParser.AnnotationParamContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAnnotationParam(MetaModelParser.AnnotationParamContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNamespaceDeclaration(MetaModelParser.NamespaceDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNamespaceDeclaration(MetaModelParser.NamespaceDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMetamodelDeclaration(MetaModelParser.MetamodelDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMetamodelDeclaration(MetaModelParser.MetamodelDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMetamodelPropertyList(MetaModelParser.MetamodelPropertyListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMetamodelPropertyList(MetaModelParser.MetamodelPropertyListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMetamodelProperty(MetaModelParser.MetamodelPropertyContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMetamodelProperty(MetaModelParser.MetamodelPropertyContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDeclaration(MetaModelParser.DeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDeclaration(MetaModelParser.DeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEnumDeclaration(MetaModelParser.EnumDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEnumDeclaration(MetaModelParser.EnumDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEnumValues(MetaModelParser.EnumValuesContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEnumValues(MetaModelParser.EnumValuesContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEnumValue(MetaModelParser.EnumValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEnumValue(MetaModelParser.EnumValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEnumMemberDeclaration(MetaModelParser.EnumMemberDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEnumMemberDeclaration(MetaModelParser.EnumMemberDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterClassDeclaration(MetaModelParser.ClassDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitClassDeclaration(MetaModelParser.ClassDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterClassAncestors(MetaModelParser.ClassAncestorsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitClassAncestors(MetaModelParser.ClassAncestorsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterClassAncestor(MetaModelParser.ClassAncestorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitClassAncestor(MetaModelParser.ClassAncestorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterClassMemberDeclaration(MetaModelParser.ClassMemberDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitClassMemberDeclaration(MetaModelParser.ClassMemberDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFieldDeclaration(MetaModelParser.FieldDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFieldDeclaration(MetaModelParser.FieldDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFieldModifier(MetaModelParser.FieldModifierContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFieldModifier(MetaModelParser.FieldModifierContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRedefinitions(MetaModelParser.RedefinitionsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRedefinitions(MetaModelParser.RedefinitionsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSubsettings(MetaModelParser.SubsettingsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSubsettings(MetaModelParser.SubsettingsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNameUseList(MetaModelParser.NameUseListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNameUseList(MetaModelParser.NameUseListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterConstDeclaration(MetaModelParser.ConstDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitConstDeclaration(MetaModelParser.ConstDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunctionDeclaration(MetaModelParser.FunctionDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunctionDeclaration(MetaModelParser.FunctionDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterReturnType(MetaModelParser.ReturnTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitReturnType(MetaModelParser.ReturnTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypeOfReference(MetaModelParser.TypeOfReferenceContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypeOfReference(MetaModelParser.TypeOfReferenceContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypeReference(MetaModelParser.TypeReferenceContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypeReference(MetaModelParser.TypeReferenceContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSimpleType(MetaModelParser.SimpleTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSimpleType(MetaModelParser.SimpleTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterClassType(MetaModelParser.ClassTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitClassType(MetaModelParser.ClassTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterObjectType(MetaModelParser.ObjectTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitObjectType(MetaModelParser.ObjectTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPrimitiveType(MetaModelParser.PrimitiveTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPrimitiveType(MetaModelParser.PrimitiveTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterVoidType(MetaModelParser.VoidTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitVoidType(MetaModelParser.VoidTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInvisibleType(MetaModelParser.InvisibleTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInvisibleType(MetaModelParser.InvisibleTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNullableType(MetaModelParser.NullableTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNullableType(MetaModelParser.NullableTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCollectionType(MetaModelParser.CollectionTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCollectionType(MetaModelParser.CollectionTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCollectionKind(MetaModelParser.CollectionKindContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCollectionKind(MetaModelParser.CollectionKindContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterOperationDeclaration(MetaModelParser.OperationDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitOperationDeclaration(MetaModelParser.OperationDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterParameterList(MetaModelParser.ParameterListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitParameterList(MetaModelParser.ParameterListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterParameter(MetaModelParser.ParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitParameter(MetaModelParser.ParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterConstructorDeclaration(MetaModelParser.ConstructorDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitConstructorDeclaration(MetaModelParser.ConstructorDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInitializerDeclaration(MetaModelParser.InitializerDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInitializerDeclaration(MetaModelParser.InitializerDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSynthetizedPropertyInitializer(MetaModelParser.SynthetizedPropertyInitializerContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSynthetizedPropertyInitializer(MetaModelParser.SynthetizedPropertyInitializerContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInheritedPropertyInitializer(MetaModelParser.InheritedPropertyInitializerContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInheritedPropertyInitializer(MetaModelParser.InheritedPropertyInitializerContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionList(MetaModelParser.ExpressionListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionList(MetaModelParser.ExpressionListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionOrNewExpressionList(MetaModelParser.ExpressionOrNewExpressionListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionOrNewExpressionList(MetaModelParser.ExpressionOrNewExpressionListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionOrNewExpression(MetaModelParser.ExpressionOrNewExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionOrNewExpression(MetaModelParser.ExpressionOrNewExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIndexerExpression(MetaModelParser.IndexerExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIndexerExpression(MetaModelParser.IndexerExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAdditiveExpression(MetaModelParser.AdditiveExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAdditiveExpression(MetaModelParser.AdditiveExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIdentifierExpression(MetaModelParser.IdentifierExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIdentifierExpression(MetaModelParser.IdentifierExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterComparisonExpression(MetaModelParser.ComparisonExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitComparisonExpression(MetaModelParser.ComparisonExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPostExpression(MetaModelParser.PostExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPostExpression(MetaModelParser.PostExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBracketExpression(MetaModelParser.BracketExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBracketExpression(MetaModelParser.BracketExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCastExpression(MetaModelParser.CastExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCastExpression(MetaModelParser.CastExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBitwiseAndExpression(MetaModelParser.BitwiseAndExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBitwiseAndExpression(MetaModelParser.BitwiseAndExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLogicalAndExpression(MetaModelParser.LogicalAndExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLogicalAndExpression(MetaModelParser.LogicalAndExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunctionCallExpression(MetaModelParser.FunctionCallExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunctionCallExpression(MetaModelParser.FunctionCallExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMemberAccessExpression(MetaModelParser.MemberAccessExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMemberAccessExpression(MetaModelParser.MemberAccessExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypeConversionExpression(MetaModelParser.TypeConversionExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypeConversionExpression(MetaModelParser.TypeConversionExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterUnaryExpression(MetaModelParser.UnaryExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitUnaryExpression(MetaModelParser.UnaryExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBitwiseXorExpression(MetaModelParser.BitwiseXorExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBitwiseXorExpression(MetaModelParser.BitwiseXorExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterConstantExpression(MetaModelParser.ConstantExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitConstantExpression(MetaModelParser.ConstantExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterShiftExpression(MetaModelParser.ShiftExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitShiftExpression(MetaModelParser.ShiftExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterConditionalExpression(MetaModelParser.ConditionalExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitConditionalExpression(MetaModelParser.ConditionalExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAssignmentExpression(MetaModelParser.AssignmentExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAssignmentExpression(MetaModelParser.AssignmentExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMultiplicativeExpression(MetaModelParser.MultiplicativeExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMultiplicativeExpression(MetaModelParser.MultiplicativeExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLogicalOrExpression(MetaModelParser.LogicalOrExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLogicalOrExpression(MetaModelParser.LogicalOrExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPreExpression(MetaModelParser.PreExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPreExpression(MetaModelParser.PreExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBitwiseOrExpression(MetaModelParser.BitwiseOrExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBitwiseOrExpression(MetaModelParser.BitwiseOrExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypeofExpression(MetaModelParser.TypeofExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypeofExpression(MetaModelParser.TypeofExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypeCheckExpression(MetaModelParser.TypeCheckExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypeCheckExpression(MetaModelParser.TypeCheckExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterThisExpression(MetaModelParser.ThisExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitThisExpression(MetaModelParser.ThisExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEqualityExpression(MetaModelParser.EqualityExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEqualityExpression(MetaModelParser.EqualityExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNullCoalescingExpression(MetaModelParser.NullCoalescingExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNullCoalescingExpression(MetaModelParser.NullCoalescingExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLiteralExpression(MetaModelParser.LiteralExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLiteralExpression(MetaModelParser.LiteralExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNewObjectExpression(MetaModelParser.NewObjectExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNewObjectExpression(MetaModelParser.NewObjectExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNewCollectionExpression(MetaModelParser.NewCollectionExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNewCollectionExpression(MetaModelParser.NewCollectionExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNewPropertyInitList(MetaModelParser.NewPropertyInitListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNewPropertyInitList(MetaModelParser.NewPropertyInitListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNewPropertyInit(MetaModelParser.NewPropertyInitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNewPropertyInit(MetaModelParser.NewPropertyInitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPostOperator(MetaModelParser.PostOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPostOperator(MetaModelParser.PostOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPreOperator(MetaModelParser.PreOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPreOperator(MetaModelParser.PreOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterUnaryOperator(MetaModelParser.UnaryOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitUnaryOperator(MetaModelParser.UnaryOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMultiplicativeOperator(MetaModelParser.MultiplicativeOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMultiplicativeOperator(MetaModelParser.MultiplicativeOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAdditiveOperator(MetaModelParser.AdditiveOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAdditiveOperator(MetaModelParser.AdditiveOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterShiftOperator(MetaModelParser.ShiftOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitShiftOperator(MetaModelParser.ShiftOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterComparisonOperator(MetaModelParser.ComparisonOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitComparisonOperator(MetaModelParser.ComparisonOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEqualityOperator(MetaModelParser.EqualityOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEqualityOperator(MetaModelParser.EqualityOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAssignmentOperator(MetaModelParser.AssignmentOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAssignmentOperator(MetaModelParser.AssignmentOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAssociationDeclaration(MetaModelParser.AssociationDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAssociationDeclaration(MetaModelParser.AssociationDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIdentifier(MetaModelParser.IdentifierContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIdentifier(MetaModelParser.IdentifierContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLiteral(MetaModelParser.LiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLiteral(MetaModelParser.LiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNullLiteral(MetaModelParser.NullLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNullLiteral(MetaModelParser.NullLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBooleanLiteral(MetaModelParser.BooleanLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBooleanLiteral(MetaModelParser.BooleanLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIntegerLiteral(MetaModelParser.IntegerLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIntegerLiteral(MetaModelParser.IntegerLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDecimalLiteral(MetaModelParser.DecimalLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDecimalLiteral(MetaModelParser.DecimalLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterScientificLiteral(MetaModelParser.ScientificLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitScientificLiteral(MetaModelParser.ScientificLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStringLiteral(MetaModelParser.StringLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStringLiteral(MetaModelParser.StringLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEveryRule(ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEveryRule(ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitTerminal(TerminalNode node) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitErrorNode(ErrorNode node) { }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.repair;
import java.net.InetAddress;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.rows.UnfilteredRowIterator;
import org.apache.cassandra.db.rows.UnfilteredRowIterators;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.repair.messages.ValidationComplete;
import org.apache.cassandra.streaming.PreviewKind;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.MerkleTree;
import org.apache.cassandra.utils.MerkleTree.RowHash;
import org.apache.cassandra.utils.MerkleTrees;
/**
* Handles the building of a merkle tree for a column family.
*
* Lifecycle:
* 1. prepare() - Initialize tree with samples.
* 2. add() - 0 or more times, to add hashes to the tree.
* 3. complete() - Enqueues any operations that were blocked waiting for a valid tree.
*/
public class Validator implements Runnable
{
private static final Logger logger = LoggerFactory.getLogger(Validator.class);
public final RepairJobDesc desc;
public final InetAddress initiator;
public final int nowInSec;
private final boolean evenTreeDistribution;
public final boolean isIncremental;
// null when all rows with the min token have been consumed
private long validated;
private MerkleTrees trees;
// current range being updated
private MerkleTree.TreeRange range;
// iterator for iterating sub ranges (MT's leaves)
private MerkleTrees.TreeRangeIterator ranges;
// last key seen
private DecoratedKey lastKey;
private final PreviewKind previewKind;
public Validator(RepairJobDesc desc, InetAddress initiator, int nowInSec, PreviewKind previewKind)
{
this(desc, initiator, nowInSec, false, false, previewKind);
}
public Validator(RepairJobDesc desc, InetAddress initiator, int nowInSec, boolean isIncremental, PreviewKind previewKind)
{
this(desc, initiator, nowInSec, false, isIncremental, previewKind);
}
public Validator(RepairJobDesc desc, InetAddress initiator, int nowInSec, boolean evenTreeDistribution, boolean isIncremental, PreviewKind previewKind)
{
this.desc = desc;
this.initiator = initiator;
this.nowInSec = nowInSec;
this.isIncremental = isIncremental;
this.previewKind = previewKind;
validated = 0;
range = null;
ranges = null;
this.evenTreeDistribution = evenTreeDistribution;
}
public void prepare(ColumnFamilyStore cfs, MerkleTrees tree)
{
this.trees = tree;
if (!tree.partitioner().preservesOrder() || evenTreeDistribution)
{
// You can't beat an even tree distribution for md5
tree.init();
}
else
{
List<DecoratedKey> keys = new ArrayList<>();
Random random = new Random();
for (Range<Token> range : tree.ranges())
{
for (DecoratedKey sample : cfs.keySamples(range))
{
assert range.contains(sample.getToken()) : "Token " + sample.getToken() + " is not within range " + desc.ranges;
keys.add(sample);
}
if (keys.isEmpty())
{
// use an even tree distribution
tree.init(range);
}
else
{
int numKeys = keys.size();
// sample the column family using random keys from the index
while (true)
{
DecoratedKey dk = keys.get(random.nextInt(numKeys));
if (!tree.split(dk.getToken()))
break;
}
keys.clear();
}
}
}
logger.debug("Prepared AEService trees of size {} for {}", trees.size(), desc);
ranges = tree.invalids();
}
/**
* Called (in order) for every row present in the CF.
* Hashes the row, and adds it to the tree being built.
*
* @param partition Partition to add hash
*/
public void add(UnfilteredRowIterator partition)
{
assert Range.isInRanges(partition.partitionKey().getToken(), desc.ranges) : partition.partitionKey().getToken() + " is not contained in " + desc.ranges;
assert lastKey == null || lastKey.compareTo(partition.partitionKey()) < 0
: "partition " + partition.partitionKey() + " received out of order wrt " + lastKey;
lastKey = partition.partitionKey();
if (range == null)
range = ranges.next();
// generate new ranges as long as case 1 is true
if (!findCorrectRange(lastKey.getToken()))
{
// add the empty hash, and move to the next range
ranges = trees.invalids();
findCorrectRange(lastKey.getToken());
}
assert range.contains(lastKey.getToken()) : "Token not in MerkleTree: " + lastKey.getToken();
// case 3 must be true: mix in the hashed row
RowHash rowHash = rowHash(partition);
if (rowHash != null)
{
range.addHash(rowHash);
}
}
public boolean findCorrectRange(Token t)
{
while (!range.contains(t) && ranges.hasNext())
{
range = ranges.next();
}
return range.contains(t);
}
static class CountingDigest extends MessageDigest
{
private long count;
private MessageDigest underlying;
public CountingDigest(MessageDigest underlying)
{
super(underlying.getAlgorithm());
this.underlying = underlying;
}
@Override
protected void engineUpdate(byte input)
{
underlying.update(input);
count += 1;
}
@Override
protected void engineUpdate(byte[] input, int offset, int len)
{
underlying.update(input, offset, len);
count += len;
}
@Override
protected byte[] engineDigest()
{
return underlying.digest();
}
@Override
protected void engineReset()
{
underlying.reset();
}
}
private MerkleTree.RowHash rowHash(UnfilteredRowIterator partition)
{
validated++;
// MerkleTree uses XOR internally, so we want lots of output bits here
CountingDigest digest = new CountingDigest(FBUtilities.newMessageDigest("SHA-256"));
UnfilteredRowIterators.digest(partition, digest, MessagingService.current_version);
// only return new hash for merkle tree in case digest was updated - see CASSANDRA-8979
return digest.count > 0
? new MerkleTree.RowHash(partition.partitionKey().getToken(), digest.digest(), digest.count)
: null;
}
/**
* Registers the newly created tree for rendezvous in Stage.ANTIENTROPY.
*/
public void complete()
{
completeTree();
StageManager.getStage(Stage.ANTI_ENTROPY).execute(this);
if (logger.isDebugEnabled())
{
// log distribution of rows in tree
logger.debug("Validated {} partitions for {}. Partitions per leaf are:", validated, desc.sessionId);
trees.logRowCountPerLeaf(logger);
logger.debug("Validated {} partitions for {}. Partition sizes are:", validated, desc.sessionId);
trees.logRowSizePerLeaf(logger);
}
}
@VisibleForTesting
public void completeTree()
{
assert ranges != null : "Validator was not prepared()";
ranges = trees.invalids();
while (ranges.hasNext())
{
range = ranges.next();
range.ensureHashInitialised();
}
}
/**
* Called when some error during the validation happened.
* This sends RepairStatus to inform the initiator that the validation has failed.
* The actual reason for failure should be looked up in the log of the host calling this function.
*/
public void fail()
{
logger.error("Failed creating a merkle tree for {}, {} (see log for details)", desc, initiator);
// send fail message only to nodes >= version 2.0
MessagingService.instance().sendOneWay(new ValidationComplete(desc).createMessage(), initiator);
}
/**
* Called after the validation lifecycle to respond with the now valid tree. Runs in Stage.ANTIENTROPY.
*/
public void run()
{
// respond to the request that triggered this validation
if (!initiator.equals(FBUtilities.getBroadcastAddress()))
{
logger.info("{} Sending completed merkle tree to {} for {}.{}", previewKind.logPrefix(desc.sessionId), initiator, desc.keyspace, desc.columnFamily);
Tracing.traceRepair("Sending completed merkle tree to {} for {}.{}", initiator, desc.keyspace, desc.columnFamily);
}
MessagingService.instance().sendOneWay(new ValidationComplete(desc, trees).createMessage(), initiator);
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.taobao.android.dx.io.instructions;
import com.taobao.android.dex.DexException;
import com.taobao.android.dx.io.IndexType;
import com.taobao.android.dx.io.OpcodeInfo;
import com.taobao.android.dx.io.Opcodes;
import com.taobao.android.dx.util.Hex;
import java.io.EOFException;
/**
* Representation of an instruction format, which knows how to decode into
* and encode from instances of {@link DecodedInstruction}.
*/
public enum InstructionCodec {
FORMAT_00X() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
return new ZeroRegisterDecodedInstruction(
this, opcodeUnit, 0, null,
0, 0L);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(insn.getOpcodeUnit());
}
},
FORMAT_10X() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int literal = byte1(opcodeUnit); // should be zero
return new ZeroRegisterDecodedInstruction(
this, opcode, 0, null,
0, literal);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(insn.getOpcodeUnit());
}
},
FORMAT_12X() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = nibble2(opcodeUnit);
int b = nibble3(opcodeUnit);
return new TwoRegisterDecodedInstruction(
this, opcode, 0, null,
0, 0L,
a, b);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcodeUnit(),
makeByte(insn.getA(), insn.getB())));
}
},
FORMAT_11N() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = nibble2(opcodeUnit);
int literal = (nibble3(opcodeUnit) << 28) >> 28; // sign-extend
return new OneRegisterDecodedInstruction(
this, opcode, 0, null,
0, literal,
a);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcodeUnit(),
makeByte(insn.getA(), insn.getLiteralNibble())));
}
},
FORMAT_11X() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
return new OneRegisterDecodedInstruction(
this, opcode, 0, null,
0, 0L,
a);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(codeUnit(insn.getOpcode(), insn.getA()));
}
},
FORMAT_10T() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int baseAddress = in.cursor() - 1;
int opcode = byte0(opcodeUnit);
int target = (byte) byte1(opcodeUnit); // sign-extend
return new ZeroRegisterDecodedInstruction(
this, opcode, 0, null,
baseAddress + target, 0L);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
int relativeTarget = insn.getTargetByte(out.cursor());
out.write(codeUnit(insn.getOpcode(), relativeTarget));
}
},
FORMAT_20T() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int baseAddress = in.cursor() - 1;
int opcode = byte0(opcodeUnit);
int literal = byte1(opcodeUnit); // should be zero
int target = (short) in.read(); // sign-extend
return new ZeroRegisterDecodedInstruction(
this, opcode, 0, null,
baseAddress + target, literal);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
short relativeTarget = insn.getTargetUnit(out.cursor());
out.write(insn.getOpcodeUnit(), relativeTarget);
}
},
FORMAT_20BC() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
// Note: We use the literal field to hold the decoded AA value.
int opcode = byte0(opcodeUnit);
int literal = byte1(opcodeUnit);
int index = in.read();
return new ZeroRegisterDecodedInstruction(
this, opcode, index, IndexType.VARIES,
0, literal);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcode(), insn.getLiteralByte()),
insn.getIndexUnit());
}
},
FORMAT_22X() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
int b = in.read();
return new TwoRegisterDecodedInstruction(
this, opcode, 0, null,
0, 0L,
a, b);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcode(), insn.getA()),
insn.getBUnit());
}
},
FORMAT_21T() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int baseAddress = in.cursor() - 1;
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
int target = (short) in.read(); // sign-extend
return new OneRegisterDecodedInstruction(
this, opcode, 0, null,
baseAddress + target, 0L,
a);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
short relativeTarget = insn.getTargetUnit(out.cursor());
out.write(codeUnit(insn.getOpcode(), insn.getA()), relativeTarget);
}
},
FORMAT_21S() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
int literal = (short) in.read(); // sign-extend
return new OneRegisterDecodedInstruction(
this, opcode, 0, null,
0, literal,
a);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcode(), insn.getA()),
insn.getLiteralUnit());
}
},
FORMAT_21H() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
long literal = (short) in.read(); // sign-extend
/*
* Format 21h decodes differently depending on the opcode,
* because the "signed hat" might represent either a 32-
* or 64- bit value.
*/
literal <<= (opcode == Opcodes.CONST_HIGH16) ? 16 : 48;
return new OneRegisterDecodedInstruction(
this, opcode, 0, null,
0, literal,
a);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
// See above.
int opcode = insn.getOpcode();
int shift = (opcode == Opcodes.CONST_HIGH16) ? 16 : 48;
short literal = (short) (insn.getLiteral() >> shift);
out.write(codeUnit(opcode, insn.getA()), literal);
}
},
FORMAT_21C() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
int index = in.read();
IndexType indexType = OpcodeInfo.getIndexType(opcode);
return new OneRegisterDecodedInstruction(
this, opcode, index, indexType,
0, 0L,
a);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcode(), insn.getA()),
insn.getIndexUnit());
}
},
FORMAT_23X() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
int bc = in.read();
int b = byte0(bc);
int c = byte1(bc);
return new ThreeRegisterDecodedInstruction(
this, opcode, 0, null,
0, 0L,
a, b, c);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcode(), insn.getA()),
codeUnit(insn.getB(), insn.getC()));
}
},
FORMAT_22B() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
int bc = in.read();
int b = byte0(bc);
int literal = (byte) byte1(bc); // sign-extend
return new TwoRegisterDecodedInstruction(
this, opcode, 0, null,
0, literal,
a, b);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcode(), insn.getA()),
codeUnit(insn.getB(),
insn.getLiteralByte()));
}
},
FORMAT_22T() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int baseAddress = in.cursor() - 1;
int opcode = byte0(opcodeUnit);
int a = nibble2(opcodeUnit);
int b = nibble3(opcodeUnit);
int target = (short) in.read(); // sign-extend
return new TwoRegisterDecodedInstruction(
this, opcode, 0, null,
baseAddress + target, 0L,
a, b);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
short relativeTarget = insn.getTargetUnit(out.cursor());
out.write(
codeUnit(insn.getOpcode(),
makeByte(insn.getA(), insn.getB())),
relativeTarget);
}
},
FORMAT_22S() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = nibble2(opcodeUnit);
int b = nibble3(opcodeUnit);
int literal = (short) in.read(); // sign-extend
return new TwoRegisterDecodedInstruction(
this, opcode, 0, null,
0, literal,
a, b);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcode(),
makeByte(insn.getA(), insn.getB())),
insn.getLiteralUnit());
}
},
FORMAT_22C() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = nibble2(opcodeUnit);
int b = nibble3(opcodeUnit);
int index = in.read();
IndexType indexType = OpcodeInfo.getIndexType(opcode);
return new TwoRegisterDecodedInstruction(
this, opcode, index, indexType,
0, 0L,
a, b);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcode(),
makeByte(insn.getA(), insn.getB())),
insn.getIndexUnit());
}
},
FORMAT_22CS() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = nibble2(opcodeUnit);
int b = nibble3(opcodeUnit);
int index = in.read();
return new TwoRegisterDecodedInstruction(
this, opcode, index, IndexType.FIELD_OFFSET,
0, 0L,
a, b);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(
codeUnit(insn.getOpcode(),
makeByte(insn.getA(), insn.getB())),
insn.getIndexUnit());
}
},
FORMAT_30T() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int baseAddress = in.cursor() - 1;
int opcode = byte0(opcodeUnit);
int literal = byte1(opcodeUnit); // should be zero
int target = in.readInt();
return new ZeroRegisterDecodedInstruction(
this, opcode, 0, null,
baseAddress + target, literal);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
int relativeTarget = insn.getTarget(out.cursor());
out.write(insn.getOpcodeUnit(),
unit0(relativeTarget), unit1(relativeTarget));
}
},
FORMAT_32X() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int literal = byte1(opcodeUnit); // should be zero
int a = in.read();
int b = in.read();
return new TwoRegisterDecodedInstruction(
this, opcode, 0, null,
0, literal,
a, b);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
out.write(insn.getOpcodeUnit(), insn.getAUnit(), insn.getBUnit());
}
},
FORMAT_31I() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
int literal = in.readInt();
return new OneRegisterDecodedInstruction(
this, opcode, 0, null,
0, literal,
a);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
int literal = insn.getLiteralInt();
out.write(
codeUnit(insn.getOpcode(), insn.getA()),
unit0(literal),
unit1(literal));
}
},
FORMAT_31T() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int baseAddress = in.cursor() - 1;
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
int target = baseAddress + in.readInt();
/*
* Switch instructions need to "forward" their addresses to their
* payload target instructions.
*/
switch (opcode) {
case Opcodes.PACKED_SWITCH:
case Opcodes.SPARSE_SWITCH: {
in.setBaseAddress(target, baseAddress);
break;
}
}
return new OneRegisterDecodedInstruction(
this, opcode, 0, null,
target, 0L,
a);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
int relativeTarget = insn.getTarget(out.cursor());
out.write(
codeUnit(insn.getOpcode(), insn.getA()),
unit0(relativeTarget), unit1(relativeTarget));
}
},
FORMAT_31C() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
int index = in.readInt();
IndexType indexType = OpcodeInfo.getIndexType(opcode);
return new OneRegisterDecodedInstruction(
this, opcode, index, indexType,
0, 0L,
a);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
int index = insn.getIndex();
out.write(
codeUnit(insn.getOpcode(), insn.getA()),
unit0(index),
unit1(index));
}
},
FORMAT_35C() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
return decodeRegisterList(this, opcodeUnit, in);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
encodeRegisterList(insn, out);
}
},
FORMAT_35MS() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
return decodeRegisterList(this, opcodeUnit, in);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
encodeRegisterList(insn, out);
}
},
FORMAT_35MI() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
return decodeRegisterList(this, opcodeUnit, in);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
encodeRegisterList(insn, out);
}
},
FORMAT_3RC() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
return decodeRegisterRange(this, opcodeUnit, in);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
encodeRegisterRange(insn, out);
}
},
FORMAT_3RMS() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
return decodeRegisterRange(this, opcodeUnit, in);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
encodeRegisterRange(insn, out);
}
},
FORMAT_3RMI() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
return decodeRegisterRange(this, opcodeUnit, in);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
encodeRegisterRange(insn, out);
}
},
FORMAT_51L() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int opcode = byte0(opcodeUnit);
int a = byte1(opcodeUnit);
long literal = in.readLong();
return new OneRegisterDecodedInstruction(
this, opcode, 0, null,
0, literal,
a);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
long literal = insn.getLiteral();
out.write(
codeUnit(insn.getOpcode(), insn.getA()),
unit0(literal),
unit1(literal),
unit2(literal),
unit3(literal));
}
},
FORMAT_PACKED_SWITCH_PAYLOAD() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int baseAddress = in.baseAddressForCursor() - 1; // already read opcode
int size = in.read();
int firstKey = in.readInt();
int[] targets = new int[size];
for (int i = 0; i < size; i++) {
targets[i] = baseAddress + in.readInt();
}
return new PackedSwitchPayloadDecodedInstruction(
this, opcodeUnit, firstKey, targets);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
PackedSwitchPayloadDecodedInstruction payload =
(PackedSwitchPayloadDecodedInstruction) insn;
int[] targets = payload.getTargets();
int baseAddress = out.baseAddressForCursor();
out.write(payload.getOpcodeUnit());
out.write(asUnsignedUnit(targets.length));
out.writeInt(payload.getFirstKey());
for (int target : targets) {
out.writeInt(target - baseAddress);
}
}
},
FORMAT_SPARSE_SWITCH_PAYLOAD() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int baseAddress = in.baseAddressForCursor() - 1; // already read opcode
int size = in.read();
int[] keys = new int[size];
int[] targets = new int[size];
for (int i = 0; i < size; i++) {
keys[i] = in.readInt();
}
for (int i = 0; i < size; i++) {
targets[i] = baseAddress + in.readInt();
}
return new SparseSwitchPayloadDecodedInstruction(
this, opcodeUnit, keys, targets);
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
SparseSwitchPayloadDecodedInstruction payload =
(SparseSwitchPayloadDecodedInstruction) insn;
int[] keys = payload.getKeys();
int[] targets = payload.getTargets();
int baseAddress = out.baseAddressForCursor();
out.write(payload.getOpcodeUnit());
out.write(asUnsignedUnit(targets.length));
for (int key : keys) {
out.writeInt(key);
}
for (int target : targets) {
out.writeInt(target - baseAddress);
}
}
},
FORMAT_FILL_ARRAY_DATA_PAYLOAD() {
@Override public DecodedInstruction decode(int opcodeUnit,
CodeInput in) throws EOFException {
int elementWidth = in.read();
int size = in.readInt();
switch (elementWidth) {
case 1: {
byte[] array = new byte[size];
boolean even = true;
for (int i = 0, value = 0; i < size; i++, even = !even) {
if (even) {
value = in.read();
}
array[i] = (byte) (value & 0xff);
value >>= 8;
}
return new FillArrayDataPayloadDecodedInstruction(
this, opcodeUnit, array);
}
case 2: {
short[] array = new short[size];
for (int i = 0; i < size; i++) {
array[i] = (short) in.read();
}
return new FillArrayDataPayloadDecodedInstruction(
this, opcodeUnit, array);
}
case 4: {
int[] array = new int[size];
for (int i = 0; i < size; i++) {
array[i] = in.readInt();
}
return new FillArrayDataPayloadDecodedInstruction(
this, opcodeUnit, array);
}
case 8: {
long[] array = new long[size];
for (int i = 0; i < size; i++) {
array[i] = in.readLong();
}
return new FillArrayDataPayloadDecodedInstruction(
this, opcodeUnit, array);
}
}
throw new DexException("bogus element_width: "
+ Hex.u2(elementWidth));
}
@Override public void encode(DecodedInstruction insn, CodeOutput out) {
FillArrayDataPayloadDecodedInstruction payload =
(FillArrayDataPayloadDecodedInstruction) insn;
short elementWidth = payload.getElementWidthUnit();
Object data = payload.getData();
out.write(payload.getOpcodeUnit());
out.write(elementWidth);
out.writeInt(payload.getSize());
switch (elementWidth) {
case 1: out.write((byte[]) data); break;
case 2: out.write((short[]) data); break;
case 4: out.write((int[]) data); break;
case 8: out.write((long[]) data); break;
default: {
throw new DexException("bogus element_width: "
+ Hex.u2(elementWidth));
}
}
}
};
/**
* Decodes an instruction specified by the given opcode unit, reading
* any required additional code units from the given input source.
*/
public abstract DecodedInstruction decode(int opcodeUnit, CodeInput in)
throws EOFException;
/**
* Encodes the given instruction.
*/
public abstract void encode(DecodedInstruction insn, CodeOutput out);
/**
* Helper method that decodes any of the register-list formats.
*/
private static DecodedInstruction decodeRegisterList(
InstructionCodec format, int opcodeUnit, CodeInput in)
throws EOFException {
int opcode = byte0(opcodeUnit);
int e = nibble2(opcodeUnit);
int registerCount = nibble3(opcodeUnit);
int index = in.read();
int abcd = in.read();
int a = nibble0(abcd);
int b = nibble1(abcd);
int c = nibble2(abcd);
int d = nibble3(abcd);
IndexType indexType = OpcodeInfo.getIndexType(opcode);
// TODO: Having to switch like this is less than ideal.
switch (registerCount) {
case 0:
return new ZeroRegisterDecodedInstruction(
format, opcode, index, indexType,
0, 0L);
case 1:
return new OneRegisterDecodedInstruction(
format, opcode, index, indexType,
0, 0L,
a);
case 2:
return new TwoRegisterDecodedInstruction(
format, opcode, index, indexType,
0, 0L,
a, b);
case 3:
return new ThreeRegisterDecodedInstruction(
format, opcode, index, indexType,
0, 0L,
a, b, c);
case 4:
return new FourRegisterDecodedInstruction(
format, opcode, index, indexType,
0, 0L,
a, b, c, d);
case 5:
return new FiveRegisterDecodedInstruction(
format, opcode, index, indexType,
0, 0L,
a, b, c, d, e);
}
throw new DexException("bogus registerCount: "
+ Hex.uNibble(registerCount));
}
/**
* Helper method that encodes any of the register-list formats.
*/
private static void encodeRegisterList(DecodedInstruction insn,
CodeOutput out) {
out.write(codeUnit(insn.getOpcode(),
makeByte(insn.getE(), insn.getRegisterCount())),
insn.getIndexUnit(),
codeUnit(insn.getA(), insn.getB(), insn.getC(), insn.getD()));
}
/**
* Helper method that decodes any of the three-unit register-range formats.
*/
private static DecodedInstruction decodeRegisterRange(
InstructionCodec format, int opcodeUnit, CodeInput in)
throws EOFException {
int opcode = byte0(opcodeUnit);
int registerCount = byte1(opcodeUnit);
int index = in.read();
int a = in.read();
IndexType indexType = OpcodeInfo.getIndexType(opcode);
return new RegisterRangeDecodedInstruction(
format, opcode, index, indexType,
0, 0L,
a, registerCount);
}
/**
* Helper method that encodes any of the three-unit register-range formats.
*/
private static void encodeRegisterRange(DecodedInstruction insn,
CodeOutput out) {
out.write(codeUnit(insn.getOpcode(), insn.getRegisterCount()),
insn.getIndexUnit(),
insn.getAUnit());
}
private static short codeUnit(int lowByte, int highByte) {
if ((lowByte & ~0xff) != 0) {
throw new IllegalArgumentException("bogus lowByte");
}
if ((highByte & ~0xff) != 0) {
throw new IllegalArgumentException("bogus highByte");
}
return (short) (lowByte | (highByte << 8));
}
private static short codeUnit(int nibble0, int nibble1, int nibble2,
int nibble3) {
if ((nibble0 & ~0xf) != 0) {
throw new IllegalArgumentException("bogus nibble0");
}
if ((nibble1 & ~0xf) != 0) {
throw new IllegalArgumentException("bogus nibble1");
}
if ((nibble2 & ~0xf) != 0) {
throw new IllegalArgumentException("bogus nibble2");
}
if ((nibble3 & ~0xf) != 0) {
throw new IllegalArgumentException("bogus nibble3");
}
return (short) (nibble0 | (nibble1 << 4)
| (nibble2 << 8) | (nibble3 << 12));
}
private static int makeByte(int lowNibble, int highNibble) {
if ((lowNibble & ~0xf) != 0) {
throw new IllegalArgumentException("bogus lowNibble");
}
if ((highNibble & ~0xf) != 0) {
throw new IllegalArgumentException("bogus highNibble");
}
return lowNibble | (highNibble << 4);
}
private static short asUnsignedUnit(int value) {
if ((value & ~0xffff) != 0) {
throw new IllegalArgumentException("bogus unsigned code unit");
}
return (short) value;
}
private static short unit0(int value) {
return (short) value;
}
private static short unit1(int value) {
return (short) (value >> 16);
}
private static short unit0(long value) {
return (short) value;
}
private static short unit1(long value) {
return (short) (value >> 16);
}
private static short unit2(long value) {
return (short) (value >> 32);
}
private static short unit3(long value) {
return (short) (value >> 48);
}
private static int byte0(int value) {
return value & 0xff;
}
private static int byte1(int value) {
return (value >> 8) & 0xff;
}
private static int byte2(int value) {
return (value >> 16) & 0xff;
}
private static int byte3(int value) {
return value >>> 24;
}
private static int nibble0(int value) {
return value & 0xf;
}
private static int nibble1(int value) {
return (value >> 4) & 0xf;
}
private static int nibble2(int value) {
return (value >> 8) & 0xf;
}
private static int nibble3(int value) {
return (value >> 12) & 0xf;
}
}
| |
/*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.http;
import com.bedatadriven.jackson.datatype.jts.JtsModule;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.BeanDescription;
import com.fasterxml.jackson.databind.SerializationConfig;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.ser.BeanPropertyWriter;
import com.fasterxml.jackson.databind.ser.BeanSerializerModifier;
import com.fasterxml.jackson.databind.util.StdDateFormat;
import com.graphhopper.GraphHopper;
import com.graphhopper.GraphHopperAPI;
import com.graphhopper.http.health.GraphHopperHealthCheck;
import com.graphhopper.http.health.GraphHopperStorageHealthCheck;
import com.graphhopper.isochrone.algorithm.DelaunayTriangulationIsolineBuilder;
import com.graphhopper.jackson.GraphHopperModule;
import com.graphhopper.reader.gtfs.GraphHopperGtfs;
import com.graphhopper.reader.gtfs.GtfsStorage;
import com.graphhopper.reader.gtfs.PtFlagEncoder;
import com.graphhopper.reader.gtfs.RealtimeFeed;
import com.graphhopper.resources.*;
import com.graphhopper.routing.util.CarFlagEncoder;
import com.graphhopper.routing.util.EncodingManager;
import com.graphhopper.routing.util.FootFlagEncoder;
import com.graphhopper.storage.GHDirectory;
import com.graphhopper.storage.GraphHopperStorage;
import com.graphhopper.storage.index.LocationIndex;
import com.graphhopper.util.CmdArgs;
import com.graphhopper.util.TranslationMap;
import io.dropwizard.ConfiguredBundle;
import io.dropwizard.lifecycle.Managed;
import io.dropwizard.setup.Bootstrap;
import io.dropwizard.setup.Environment;
import org.glassfish.hk2.api.Factory;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import javax.inject.Inject;
import javax.ws.rs.ext.WriterInterceptor;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
public class GraphHopperBundle implements ConfiguredBundle<GraphHopperBundleConfiguration> {
static class TranslationMapFactory implements Factory<TranslationMap> {
@Inject
GraphHopper graphHopper;
@Override
public TranslationMap provide() {
return graphHopper.getTranslationMap();
}
@Override
public void dispose(TranslationMap instance) {
}
}
static class GraphHopperStorageFactory implements Factory<GraphHopperStorage> {
@Inject
GraphHopper graphHopper;
@Override
public GraphHopperStorage provide() {
return graphHopper.getGraphHopperStorage();
}
@Override
public void dispose(GraphHopperStorage instance) {
}
}
static class EncodingManagerFactory implements Factory<EncodingManager> {
@Inject
GraphHopper graphHopper;
@Override
public EncodingManager provide() {
return graphHopper.getEncodingManager();
}
@Override
public void dispose(EncodingManager instance) {
}
}
static class LocationIndexFactory implements Factory<LocationIndex> {
@Inject
GraphHopper graphHopper;
@Override
public LocationIndex provide() {
return graphHopper.getLocationIndex();
}
@Override
public void dispose(LocationIndex instance) {
}
}
static class HasElevation implements Factory<Boolean> {
@Inject
GraphHopper graphHopper;
@Override
public Boolean provide() {
return graphHopper.hasElevation();
}
@Override
public void dispose(Boolean instance) {
}
}
static class RasterHullBuilderFactory implements Factory<DelaunayTriangulationIsolineBuilder> {
DelaunayTriangulationIsolineBuilder builder = new DelaunayTriangulationIsolineBuilder();
@Override
public DelaunayTriangulationIsolineBuilder provide() {
return builder;
}
@Override
public void dispose(DelaunayTriangulationIsolineBuilder delaunayTriangulationIsolineBuilder) {
}
}
@Override
public void initialize(Bootstrap<?> bootstrap) {
bootstrap.getObjectMapper().setDateFormat(new StdDateFormat());
bootstrap.getObjectMapper().registerModule(new JtsModule());
bootstrap.getObjectMapper().registerModule(new GraphHopperModule());
bootstrap.getObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL);
// Because VirtualEdgeIteratorState has getters which throw Exceptions.
// http://stackoverflow.com/questions/35359430/how-to-make-jackson-ignore-properties-if-the-getters-throw-exceptions
bootstrap.getObjectMapper().registerModule(new SimpleModule().setSerializerModifier(new BeanSerializerModifier() {
@Override
public List<BeanPropertyWriter> changeProperties(SerializationConfig config, BeanDescription beanDesc, List<BeanPropertyWriter> beanProperties) {
return beanProperties.stream().map(bpw -> new BeanPropertyWriter(bpw) {
@Override
public void serializeAsField(Object bean, JsonGenerator gen, SerializerProvider prov) throws Exception {
try {
super.serializeAsField(bean, gen, prov);
} catch (Exception e) {
// Ignoring expected exception, see above.
}
}
}).collect(Collectors.toList());
}
}));
}
@Override
public void run(GraphHopperBundleConfiguration configuration, Environment environment) {
configuration.getGraphHopperConfiguration().merge(CmdArgs.readFromSystemProperties());
// If the "?type=gpx" parameter is present, sets a corresponding media type header
environment.jersey().register(new TypeGPXFilter());
// Together, these two take care that MultiExceptions thrown from RouteResource
// come out as JSON or GPX, depending on the media type
environment.jersey().register(new MultiExceptionMapper());
environment.jersey().register(new MultiExceptionGPXMessageBodyWriter());
environment.jersey().register(new IllegalArgumentExceptionMapper());
environment.jersey().register(new GHPointConverterProvider());
if (configuration.getGraphHopperConfiguration().has("gtfs.file")) {
// switch to different API implementation when using Pt
runPtGraphHopper(configuration.getGraphHopperConfiguration(), environment);
} else {
runRegularGraphHopper(configuration.getGraphHopperConfiguration(), environment);
}
}
private void runPtGraphHopper(CmdArgs configuration, Environment environment) {
final PtFlagEncoder ptFlagEncoder = new PtFlagEncoder();
final GHDirectory ghDirectory = GraphHopperGtfs.createGHDirectory(configuration.get("graph.location", "target/tmp"));
final GtfsStorage gtfsStorage = GraphHopperGtfs.createGtfsStorage();
final EncodingManager encodingManager = new EncodingManager.Builder(8).add(ptFlagEncoder).add(new FootFlagEncoder()).add(new CarFlagEncoder()).build();
final GraphHopperStorage graphHopperStorage = GraphHopperGtfs.createOrLoad(ghDirectory, encodingManager, ptFlagEncoder, gtfsStorage,
configuration.has("gtfs.file") ? Arrays.asList(configuration.get("gtfs.file", "").split(",")) : Collections.emptyList(),
configuration.has("datareader.file") ? Arrays.asList(configuration.get("datareader.file", "").split(",")) : Collections.emptyList());
final TranslationMap translationMap = GraphHopperGtfs.createTranslationMap();
final LocationIndex locationIndex = GraphHopperGtfs.createOrLoadIndex(ghDirectory, graphHopperStorage);
final GraphHopperAPI graphHopper = new GraphHopperGtfs(ptFlagEncoder, translationMap, graphHopperStorage, locationIndex, gtfsStorage, RealtimeFeed.empty(gtfsStorage));
environment.jersey().register(new AbstractBinder() {
@Override
protected void configure() {
bind(configuration).to(CmdArgs.class);
bind(graphHopper).to(GraphHopperAPI.class);
bind(false).to(Boolean.class).named("hasElevation");
bind(locationIndex).to(LocationIndex.class);
bind(translationMap).to(TranslationMap.class);
bind(encodingManager).to(EncodingManager.class);
bind(graphHopperStorage).to(GraphHopperStorage.class);
bindFactory(RasterHullBuilderFactory.class).to(DelaunayTriangulationIsolineBuilder.class);
}
});
environment.jersey().register(NearestResource.class);
environment.jersey().register(RouteResource.class);
environment.jersey().register(new PtIsochroneResource(gtfsStorage, encodingManager, graphHopperStorage, locationIndex));
environment.jersey().register(I18NResource.class);
environment.jersey().register(InfoResource.class);
// Say we only support pt, even though we now have several flag encoders. Yes, I know, we're almost there.
environment.jersey().register((WriterInterceptor) context -> {
if (context.getEntity() instanceof InfoResource.Info) {
InfoResource.Info info = (InfoResource.Info) context.getEntity();
info.supported_vehicles = new String[]{"pt"};
info.features.remove("car");
info.features.remove("foot");
context.setEntity(info);
}
context.proceed();
});
environment.lifecycle().manage(new Managed() {
@Override
public void start() throws Exception {
}
@Override
public void stop() throws Exception {
locationIndex.close();
graphHopperStorage.close();
}
});
environment.healthChecks().register("graphhopper-storage", new GraphHopperStorageHealthCheck(graphHopperStorage));
}
private void runRegularGraphHopper(CmdArgs configuration, Environment environment) {
final GraphHopperManaged graphHopperManaged = new GraphHopperManaged(configuration, environment.getObjectMapper());
environment.lifecycle().manage(graphHopperManaged);
environment.jersey().register(new AbstractBinder() {
@Override
protected void configure() {
bind(configuration).to(CmdArgs.class);
bind(graphHopperManaged).to(GraphHopperManaged.class);
bind(graphHopperManaged.getGraphHopper()).to(GraphHopper.class);
bind(graphHopperManaged.getGraphHopper()).to(GraphHopperAPI.class);
bindFactory(HasElevation.class).to(Boolean.class).named("hasElevation");
bindFactory(LocationIndexFactory.class).to(LocationIndex.class);
bindFactory(TranslationMapFactory.class).to(TranslationMap.class);
bindFactory(EncodingManagerFactory.class).to(EncodingManager.class);
bindFactory(GraphHopperStorageFactory.class).to(GraphHopperStorage.class);
bindFactory(RasterHullBuilderFactory.class).to(DelaunayTriangulationIsolineBuilder.class);
}
});
if (configuration.getBool("web.change_graph.enabled", false)) {
environment.jersey().register(ChangeGraphResource.class);
}
environment.jersey().register(NearestResource.class);
environment.jersey().register(RouteResource.class);
environment.jersey().register(IsochroneResource.class);
environment.jersey().register(I18NResource.class);
environment.jersey().register(InfoResource.class);
environment.healthChecks().register("graphhopper", new GraphHopperHealthCheck(graphHopperManaged.getGraphHopper()));
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.sql.v2014_04_01.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.management.sql.v2014_04_01.ElasticPoolUpdate;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.Validator;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.HTTP;
import retrofit2.http.PATCH;
import retrofit2.http.Path;
import retrofit2.http.PUT;
import retrofit2.http.Query;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in ElasticPools.
*/
public class ElasticPoolsInner {
/** The Retrofit service to perform REST calls. */
private ElasticPoolsService service;
/** The service client containing this operation class. */
private SqlManagementClientImpl client;
/**
* Initializes an instance of ElasticPoolsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public ElasticPoolsInner(Retrofit retrofit, SqlManagementClientImpl client) {
this.service = retrofit.create(ElasticPoolsService.class);
this.client = client;
}
/**
* The interface defining all the services for ElasticPools to be
* used by Retrofit to perform actually REST calls.
*/
interface ElasticPoolsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.sql.v2014_04_01.ElasticPools createOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/elasticPools/{elasticPoolName}")
Observable<Response<ResponseBody>> createOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("serverName") String serverName, @Path("elasticPoolName") String elasticPoolName, @Query("api-version") String apiVersion, @Body ElasticPoolInner parameters, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.sql.v2014_04_01.ElasticPools beginCreateOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/elasticPools/{elasticPoolName}")
Observable<Response<ResponseBody>> beginCreateOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("serverName") String serverName, @Path("elasticPoolName") String elasticPoolName, @Query("api-version") String apiVersion, @Body ElasticPoolInner parameters, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.sql.v2014_04_01.ElasticPools update" })
@PATCH("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/elasticPools/{elasticPoolName}")
Observable<Response<ResponseBody>> update(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("serverName") String serverName, @Path("elasticPoolName") String elasticPoolName, @Query("api-version") String apiVersion, @Body ElasticPoolUpdate parameters, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.sql.v2014_04_01.ElasticPools beginUpdate" })
@PATCH("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/elasticPools/{elasticPoolName}")
Observable<Response<ResponseBody>> beginUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("serverName") String serverName, @Path("elasticPoolName") String elasticPoolName, @Query("api-version") String apiVersion, @Body ElasticPoolUpdate parameters, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.sql.v2014_04_01.ElasticPools delete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/elasticPools/{elasticPoolName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> delete(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("serverName") String serverName, @Path("elasticPoolName") String elasticPoolName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.sql.v2014_04_01.ElasticPools get" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/elasticPools/{elasticPoolName}")
Observable<Response<ResponseBody>> get(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("serverName") String serverName, @Path("elasticPoolName") String elasticPoolName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.sql.v2014_04_01.ElasticPools listByServer" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/elasticPools")
Observable<Response<ResponseBody>> listByServer(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("serverName") String serverName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Creates a new elastic pool or updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be operated on (updated or created).
* @param parameters The required parameters for creating or updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ElasticPoolInner object if successful.
*/
public ElasticPoolInner createOrUpdate(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolInner parameters) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters).toBlocking().last().body();
}
/**
* Creates a new elastic pool or updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be operated on (updated or created).
* @param parameters The required parameters for creating or updating an elastic pool.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ElasticPoolInner> createOrUpdateAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolInner parameters, final ServiceCallback<ElasticPoolInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters), serviceCallback);
}
/**
* Creates a new elastic pool or updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be operated on (updated or created).
* @param parameters The required parameters for creating or updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ElasticPoolInner> createOrUpdateAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolInner parameters) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters).map(new Func1<ServiceResponse<ElasticPoolInner>, ElasticPoolInner>() {
@Override
public ElasticPoolInner call(ServiceResponse<ElasticPoolInner> response) {
return response.body();
}
});
}
/**
* Creates a new elastic pool or updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be operated on (updated or created).
* @param parameters The required parameters for creating or updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<ElasticPoolInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolInner parameters) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serverName == null) {
throw new IllegalArgumentException("Parameter serverName is required and cannot be null.");
}
if (elasticPoolName == null) {
throw new IllegalArgumentException("Parameter elasticPoolName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
Observable<Response<ResponseBody>> observable = service.createOrUpdate(this.client.subscriptionId(), resourceGroupName, serverName, elasticPoolName, this.client.apiVersion(), parameters, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<ElasticPoolInner>() { }.getType());
}
/**
* Creates a new elastic pool or updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be operated on (updated or created).
* @param parameters The required parameters for creating or updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ElasticPoolInner object if successful.
*/
public ElasticPoolInner beginCreateOrUpdate(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolInner parameters) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters).toBlocking().single().body();
}
/**
* Creates a new elastic pool or updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be operated on (updated or created).
* @param parameters The required parameters for creating or updating an elastic pool.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ElasticPoolInner> beginCreateOrUpdateAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolInner parameters, final ServiceCallback<ElasticPoolInner> serviceCallback) {
return ServiceFuture.fromResponse(beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters), serviceCallback);
}
/**
* Creates a new elastic pool or updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be operated on (updated or created).
* @param parameters The required parameters for creating or updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ElasticPoolInner object
*/
public Observable<ElasticPoolInner> beginCreateOrUpdateAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolInner parameters) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters).map(new Func1<ServiceResponse<ElasticPoolInner>, ElasticPoolInner>() {
@Override
public ElasticPoolInner call(ServiceResponse<ElasticPoolInner> response) {
return response.body();
}
});
}
/**
* Creates a new elastic pool or updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be operated on (updated or created).
* @param parameters The required parameters for creating or updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ElasticPoolInner object
*/
public Observable<ServiceResponse<ElasticPoolInner>> beginCreateOrUpdateWithServiceResponseAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolInner parameters) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serverName == null) {
throw new IllegalArgumentException("Parameter serverName is required and cannot be null.");
}
if (elasticPoolName == null) {
throw new IllegalArgumentException("Parameter elasticPoolName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
return service.beginCreateOrUpdate(this.client.subscriptionId(), resourceGroupName, serverName, elasticPoolName, this.client.apiVersion(), parameters, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ElasticPoolInner>>>() {
@Override
public Observable<ServiceResponse<ElasticPoolInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ElasticPoolInner> clientResponse = beginCreateOrUpdateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ElasticPoolInner> beginCreateOrUpdateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<ElasticPoolInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<ElasticPoolInner>() { }.getType())
.register(201, new TypeToken<ElasticPoolInner>() { }.getType())
.register(202, new TypeToken<Void>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be updated.
* @param parameters The required parameters for updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ElasticPoolInner object if successful.
*/
public ElasticPoolInner update(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolUpdate parameters) {
return updateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters).toBlocking().last().body();
}
/**
* Updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be updated.
* @param parameters The required parameters for updating an elastic pool.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ElasticPoolInner> updateAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolUpdate parameters, final ServiceCallback<ElasticPoolInner> serviceCallback) {
return ServiceFuture.fromResponse(updateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters), serviceCallback);
}
/**
* Updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be updated.
* @param parameters The required parameters for updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ElasticPoolInner> updateAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolUpdate parameters) {
return updateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters).map(new Func1<ServiceResponse<ElasticPoolInner>, ElasticPoolInner>() {
@Override
public ElasticPoolInner call(ServiceResponse<ElasticPoolInner> response) {
return response.body();
}
});
}
/**
* Updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be updated.
* @param parameters The required parameters for updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<ElasticPoolInner>> updateWithServiceResponseAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolUpdate parameters) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serverName == null) {
throw new IllegalArgumentException("Parameter serverName is required and cannot be null.");
}
if (elasticPoolName == null) {
throw new IllegalArgumentException("Parameter elasticPoolName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
Observable<Response<ResponseBody>> observable = service.update(this.client.subscriptionId(), resourceGroupName, serverName, elasticPoolName, this.client.apiVersion(), parameters, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<ElasticPoolInner>() { }.getType());
}
/**
* Updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be updated.
* @param parameters The required parameters for updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ElasticPoolInner object if successful.
*/
public ElasticPoolInner beginUpdate(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolUpdate parameters) {
return beginUpdateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters).toBlocking().single().body();
}
/**
* Updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be updated.
* @param parameters The required parameters for updating an elastic pool.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ElasticPoolInner> beginUpdateAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolUpdate parameters, final ServiceCallback<ElasticPoolInner> serviceCallback) {
return ServiceFuture.fromResponse(beginUpdateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters), serviceCallback);
}
/**
* Updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be updated.
* @param parameters The required parameters for updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ElasticPoolInner object
*/
public Observable<ElasticPoolInner> beginUpdateAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolUpdate parameters) {
return beginUpdateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters).map(new Func1<ServiceResponse<ElasticPoolInner>, ElasticPoolInner>() {
@Override
public ElasticPoolInner call(ServiceResponse<ElasticPoolInner> response) {
return response.body();
}
});
}
/**
* Updates an existing elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be updated.
* @param parameters The required parameters for updating an elastic pool.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ElasticPoolInner object
*/
public Observable<ServiceResponse<ElasticPoolInner>> beginUpdateWithServiceResponseAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolUpdate parameters) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serverName == null) {
throw new IllegalArgumentException("Parameter serverName is required and cannot be null.");
}
if (elasticPoolName == null) {
throw new IllegalArgumentException("Parameter elasticPoolName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
return service.beginUpdate(this.client.subscriptionId(), resourceGroupName, serverName, elasticPoolName, this.client.apiVersion(), parameters, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ElasticPoolInner>>>() {
@Override
public Observable<ServiceResponse<ElasticPoolInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ElasticPoolInner> clientResponse = beginUpdateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ElasticPoolInner> beginUpdateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<ElasticPoolInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<ElasticPoolInner>() { }.getType())
.register(202, new TypeToken<Void>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Deletes the elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void delete(String resourceGroupName, String serverName, String elasticPoolName) {
deleteWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName).toBlocking().single().body();
}
/**
* Deletes the elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be deleted.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> deleteAsync(String resourceGroupName, String serverName, String elasticPoolName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName), serviceCallback);
}
/**
* Deletes the elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> deleteAsync(String resourceGroupName, String serverName, String elasticPoolName) {
return deleteWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Deletes the elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String serverName, String elasticPoolName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serverName == null) {
throw new IllegalArgumentException("Parameter serverName is required and cannot be null.");
}
if (elasticPoolName == null) {
throw new IllegalArgumentException("Parameter elasticPoolName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.delete(this.client.subscriptionId(), resourceGroupName, serverName, elasticPoolName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = deleteDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> deleteDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<Void, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.register(204, new TypeToken<Void>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Gets an elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be retrieved.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ElasticPoolInner object if successful.
*/
public ElasticPoolInner get(String resourceGroupName, String serverName, String elasticPoolName) {
return getWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName).toBlocking().single().body();
}
/**
* Gets an elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be retrieved.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ElasticPoolInner> getAsync(String resourceGroupName, String serverName, String elasticPoolName, final ServiceCallback<ElasticPoolInner> serviceCallback) {
return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName), serviceCallback);
}
/**
* Gets an elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be retrieved.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ElasticPoolInner object
*/
public Observable<ElasticPoolInner> getAsync(String resourceGroupName, String serverName, String elasticPoolName) {
return getWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName).map(new Func1<ServiceResponse<ElasticPoolInner>, ElasticPoolInner>() {
@Override
public ElasticPoolInner call(ServiceResponse<ElasticPoolInner> response) {
return response.body();
}
});
}
/**
* Gets an elastic pool.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param elasticPoolName The name of the elastic pool to be retrieved.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ElasticPoolInner object
*/
public Observable<ServiceResponse<ElasticPoolInner>> getWithServiceResponseAsync(String resourceGroupName, String serverName, String elasticPoolName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serverName == null) {
throw new IllegalArgumentException("Parameter serverName is required and cannot be null.");
}
if (elasticPoolName == null) {
throw new IllegalArgumentException("Parameter elasticPoolName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.get(this.client.subscriptionId(), resourceGroupName, serverName, elasticPoolName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ElasticPoolInner>>>() {
@Override
public Observable<ServiceResponse<ElasticPoolInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ElasticPoolInner> clientResponse = getDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ElasticPoolInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<ElasticPoolInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<ElasticPoolInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Returns a list of elastic pools in a server.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the List<ElasticPoolInner> object if successful.
*/
public List<ElasticPoolInner> listByServer(String resourceGroupName, String serverName) {
return listByServerWithServiceResponseAsync(resourceGroupName, serverName).toBlocking().single().body();
}
/**
* Returns a list of elastic pools in a server.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<ElasticPoolInner>> listByServerAsync(String resourceGroupName, String serverName, final ServiceCallback<List<ElasticPoolInner>> serviceCallback) {
return ServiceFuture.fromResponse(listByServerWithServiceResponseAsync(resourceGroupName, serverName), serviceCallback);
}
/**
* Returns a list of elastic pools in a server.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<ElasticPoolInner> object
*/
public Observable<List<ElasticPoolInner>> listByServerAsync(String resourceGroupName, String serverName) {
return listByServerWithServiceResponseAsync(resourceGroupName, serverName).map(new Func1<ServiceResponse<List<ElasticPoolInner>>, List<ElasticPoolInner>>() {
@Override
public List<ElasticPoolInner> call(ServiceResponse<List<ElasticPoolInner>> response) {
return response.body();
}
});
}
/**
* Returns a list of elastic pools in a server.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<ElasticPoolInner> object
*/
public Observable<ServiceResponse<List<ElasticPoolInner>>> listByServerWithServiceResponseAsync(String resourceGroupName, String serverName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serverName == null) {
throw new IllegalArgumentException("Parameter serverName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listByServer(this.client.subscriptionId(), resourceGroupName, serverName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<ElasticPoolInner>>>>() {
@Override
public Observable<ServiceResponse<List<ElasticPoolInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<ElasticPoolInner>> result = listByServerDelegate(response);
List<ElasticPoolInner> items = null;
if (result.body() != null) {
items = result.body().items();
}
ServiceResponse<List<ElasticPoolInner>> clientResponse = new ServiceResponse<List<ElasticPoolInner>>(items, result.response());
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<ElasticPoolInner>> listByServerDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<ElasticPoolInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<ElasticPoolInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
package org.aml.typesystem.java;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Map;
import org.aml.typesystem.AbstractType;
import org.aml.typesystem.BuiltIns;
import org.aml.typesystem.IAnnotationModel;
import org.aml.typesystem.IFieldModel;
import org.aml.typesystem.IMember;
import org.aml.typesystem.IMethodModel;
import org.aml.typesystem.ITypeModel;
import org.aml.typesystem.ITypeRegistry;
import org.aml.typesystem.TypeOps;
import org.aml.typesystem.TypeRegistryImpl;
import org.aml.typesystem.meta.facets.Annotation;
import org.aml.typesystem.meta.facets.Default;
import org.aml.typesystem.meta.restrictions.ComponentShouldBeOfType;
import org.aml.typesystem.reflection.AnnotationModel;
/**
* <p>JavaTypeBuilder class.</p>
*
* @author kor
* @version $Id: $Id
*/
public class JavaTypeBuilder {
protected TypeRegistryImpl typeRegistry = new TypeRegistryImpl(BuiltIns.getBuiltInTypes());
protected TypeRegistryImpl annotationsTypeRegistry = new TypeRegistryImpl(BuiltIns.getBuiltInTypes());
protected TypeBuilderConfig config = new TypeBuilderConfig();
/**
* <p>Getter for the field <code>config</code>.</p>
*
* @return a {@link org.aml.typesystem.java.TypeBuilderConfig} object.
*/
public TypeBuilderConfig getConfig() {
return config;
}
/**
* <p>Setter for the field <code>config</code>.</p>
*
* @param config a {@link org.aml.typesystem.java.TypeBuilderConfig} object.
*/
public void setConfig(TypeBuilderConfig config) {
this.config = config;
}
static class AnnotationsConfigInput {
protected boolean required;
protected boolean nullable;
protected AbstractType type;
public boolean isProperty;
public boolean skipMember;
public String propName;
}
/**
* <p>getType.</p>
*
* @param mdl a {@link org.aml.typesystem.ITypeModel} object.
* @return a {@link org.aml.typesystem.AbstractType} object.
*/
public AbstractType getType(ITypeModel mdl) {
if (mdl==null){
return null;
}
if (mdl.getFullyQualifiedName().equals(Object.class.getName())) {
return BuiltIns.OBJECT;
}
if (mdl.getFullyQualifiedName().equals(String.class.getName())) {
return BuiltIns.STRING;
}
if (mdl.getFullyQualifiedName().equals(boolean.class.getName())) {
return BuiltIns.BOOLEAN;
}
String name = config.getNamingConvention().name(mdl);
TypeRegistryImpl typeRegistry = mdl.isAnnotation() ? this.annotationsTypeRegistry : this.typeRegistry;
AbstractType existingType = typeRegistry.getType(name);
if (existingType != null) {
return existingType;
}
if (mdl.isAnnotation()) {
if (mdl.getMethods().length == 0) {
AbstractType tp = TypeOps.derive(name, BuiltIns.NIL);
typeRegistry.registerType(tp);
return tp;
}
if (mdl.getMethods().length == 1) {
IMethodModel iMethodModel = mdl.getMethods()[0];
if (iMethodModel.getName().equals("value")) {
ITypeModel type = iMethodModel.getType();
AbstractType tp = TypeOps.derive(name, getType(type));
if (iMethodModel.defaultValue() != null) {
tp.addMeta(new Default(iMethodModel.defaultValue()));
}
typeRegistry.registerType(tp);
return tp;
}
}
}
AbstractType type = BuiltinsBuilder.getInstance().getType(mdl);
if (type != null) {
typeRegistry.registerType(type);
return type;
}
IFieldModel[] fields = mdl.getFields();
if (mdl.isEnum()) {
AbstractType superType = BuiltIns.STRING;
AbstractType tp = TypeOps.derive(name, superType);
typeRegistry.registerType(tp);
ArrayList<String> enumValues = new ArrayList<>();
for (IFieldModel f : fields) {
if (f.isStatic() && f.isPublic()) {
enumValues.add(f.getName());
}
}
tp.addMeta(new org.aml.typesystem.meta.restrictions.Enum(enumValues));
return tp;
}
ITypeModel componentType = mdl.getComponentType();
if (componentType != null) {
AbstractType superType = BuiltIns.ARRAY;
AbstractType tp = TypeOps.derive("", superType);
if (componentType.isAnnotation()){
componentType=new MaskedAnnotationType(componentType);
}
tp.addMeta(new ComponentShouldBeOfType(getType(componentType)));
return tp;
}
AbstractType superType = BuiltIns.OBJECT;
ITypeModel superClass = mdl.getSuperClass();
if (superClass != null) {
superType = getType(superClass);
}
AbstractType tp = TypeOps.derive(name, superType);
typeRegistry.registerType(tp);
IMemberFilter memberFilter = config.getMemberFilter();
if (mdl.isAnnotation()||mdl instanceof MaskedAnnotationType) {
memberFilter = new IMemberFilter() {
@Override
public boolean accept(IMember member) {
return member instanceof IMethodModel;
}
};
}
ArrayList<IMember> members = new ArrayList<>();
for (IFieldModel f : fields) {
if (f.isStatic()) {
continue;
}
if (memberFilter.accept(f)) {
members.add(f);
}
}
for (IMethodModel m : mdl.getMethods()) {
if (m.isStatic()) {
continue;
}
if (memberFilter.accept(m)) {
members.add(m);
}
}
LinkedHashMap<String, IMember> props = new LinkedHashMap<>();
for (IMember m : members) {
String pName = config.getPropertyNameBuilder().buildName(m);
props.put(pName, m);
}
for (String p : props.keySet()) {
IMember iMember = props.get(p);
boolean optional = config.getCheckNullable().isOptional(iMember);
ArrayList<IAnnotationModel> toProcess = new ArrayList<>();
AbstractType buildType = buildType(iMember);
AnnotationsConfigInput cfg = new AnnotationsConfigInput();
boolean hasDefault = iMember.defaultValue() != null;
if (hasDefault) {
if (!buildType.isAnonimous()) {
buildType = TypeOps.derive("", buildType);
}
buildType.addMeta(new Default(iMember.defaultValue()));
}
cfg.isProperty = true;
cfg.type = buildType;
cfg.required = !optional;
cfg.propName=p;
cfg.nullable=config.getCheckNullable().isNullable(iMember);
for (IAnnotationModel annotation : iMember.getAnnotations()) {
if (!config.getAnnotationsProcessingConfig().process(cfg, annotation)) {
if (config.getAnnotationsFilter().preserve(annotation)) {
toProcess.add(annotation);
}
}
}
cfg.type = appendAnnotations(cfg.type, toProcess, true);
if (cfg.nullable){
if (!cfg.type.isAnonimous()) {
cfg.type = TypeOps.derive("", cfg.type);
}
cfg.type.setNullable(true);
}
if (!cfg.skipMember){
tp.declareProperty(cfg.propName, cfg.type, !cfg.required);
}
}
ArrayList<IAnnotationModel> toProcess = new ArrayList<>();
AnnotationsConfigInput cfg = new AnnotationsConfigInput();
cfg.type = tp;
for (IAnnotationModel annotation : mdl.getAnnotations()) {
if (!config.getAnnotationsProcessingConfig().process(cfg, annotation)) {
if (config.getAnnotationsFilter().preserve(annotation)) {
toProcess.add(annotation);
}
}
}
tp = appendAnnotations(tp, toProcess, false);
return tp;
}
private AbstractType appendAnnotations(AbstractType type, ArrayList<IAnnotationModel> toProcess, boolean prop) {
if (toProcess.isEmpty()) {
return type;
}
if (!type.isAnonimous() && prop) {
type = TypeOps.derive("", type);
}
for (IAnnotationModel m : toProcess) {
ITypeModel at = m.getType();
AbstractType type2 = getType(at);
Object vl = dumpValue(m);
type.addMeta(new Annotation(m.getName(), vl, type2));
}
return type;
}
private Object dumpValue(IAnnotationModel m) {
LinkedHashMap<String, Object> res = new LinkedHashMap<>();
Map<String, Object> allValues = m.allValues();
for (String s : allValues.keySet()) {
Object value = allValues.get(s);
if (value instanceof Annotation) {
value = dumpValue(new AnnotationModel((java.lang.annotation.Annotation) value));
}
if (value instanceof Object[]) {
Object[] arr = (Object[]) value;
Object[] newVals = new Object[arr.length];
for (int a = 0; a < arr.length; a++) {
Object object = arr[a];
newVals[a] = object;
Class<?>[] interfaces = object.getClass().getInterfaces();
if (interfaces.length == 1) {
Class<?> clz = interfaces[0];
if (clz.isAnnotation()) {
AnnotationModel m2 = new AnnotationModel((java.lang.annotation.Annotation) object);
ITypeModel type = m2.getType();
newVals[a] = dumpValue(m2);
}
}
}
value = newVals;
}
res.put(s, value);
}
if (res.size() == 1 && res.containsKey("value") && m.getType().getMethods().length == 1) {
return res.get("value");
}
return res;
}
private AbstractType buildType(IMember iMember) {
if (iMember.isCollection()) {
ITypeModel collectionMemberType = iMember.getCollectionMemberType();
return TypeOps.array(getType(collectionMemberType));
}
if (iMember.getType().isAnnotation()){
AbstractType type = getType(new MaskedAnnotationType(iMember.getType()));
return type;
}
AbstractType type = getType(iMember.getType());
return type;
}
/**
* <p>getRegistry.</p>
*
* @return a {@link org.aml.typesystem.ITypeRegistry} object.
*/
public ITypeRegistry getRegistry() {
return this.typeRegistry;
}
/**
* <p>getAnnotationTypeRegistry.</p>
*
* @return a {@link org.aml.typesystem.ITypeRegistry} object.
*/
public ITypeRegistry getAnnotationTypeRegistry() {
return this.annotationsTypeRegistry;
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* SpotInstanceRequestIdSetItemType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* SpotInstanceRequestIdSetItemType bean class
*/
public class SpotInstanceRequestIdSetItemType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = SpotInstanceRequestIdSetItemType
Namespace URI = http://ec2.amazonaws.com/doc/2009-10-31/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for SpotInstanceRequestId
*/
protected java.lang.String localSpotInstanceRequestId ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getSpotInstanceRequestId(){
return localSpotInstanceRequestId;
}
/**
* Auto generated setter method
* @param param SpotInstanceRequestId
*/
public void setSpotInstanceRequestId(java.lang.String param){
this.localSpotInstanceRequestId=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
SpotInstanceRequestIdSetItemType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2009-10-31/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":SpotInstanceRequestIdSetItemType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"SpotInstanceRequestIdSetItemType",
xmlWriter);
}
}
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"spotInstanceRequestId", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"spotInstanceRequestId");
}
} else {
xmlWriter.writeStartElement("spotInstanceRequestId");
}
if (localSpotInstanceRequestId==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("spotInstanceRequestId cannot be null!!");
}else{
xmlWriter.writeCharacters(localSpotInstanceRequestId);
}
xmlWriter.writeEndElement();
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"spotInstanceRequestId"));
if (localSpotInstanceRequestId != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSpotInstanceRequestId));
} else {
throw new org.apache.axis2.databinding.ADBException("spotInstanceRequestId cannot be null!!");
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static SpotInstanceRequestIdSetItemType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
SpotInstanceRequestIdSetItemType object =
new SpotInstanceRequestIdSetItemType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"SpotInstanceRequestIdSetItemType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (SpotInstanceRequestIdSetItemType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","spotInstanceRequestId").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setSpotInstanceRequestId(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.