gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package org.springframework.social.spotify.api.operations; import java.util.Arrays; import java.util.Collection; import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.mockito.ArgumentMatcher; import org.mockito.Mockito; import org.springframework.social.spotify.api.Artist; import org.springframework.social.spotify.api.CursorPaging; import feign.Client; import feign.Request; import feign.Response; public class FollowOperationsTest extends AbstractOperationsTest<FollowOperations> { public FollowOperationsTest() { super(FollowOperations.class); } @Test public void testFollowingArtists() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); Response response = response("followingArtists.json"); ArgumentMatcher<Request> request = super.requestMatcher("GET", "/v1/me/following?type=artist", headers); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); CursorPaging<Artist> actual = followOperations.followingArtists(); Assert.assertNotNull(actual); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testFollowingArtistsLimitAfter() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); Response response = response("followingArtists.json"); ArgumentMatcher<Request> request = super.requestMatcher("GET", "/v1/me/following?type=artist&limit=20&after=0aV6DOiouImYTqrR5YlIqx", headers); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String after = "0aV6DOiouImYTqrR5YlIqx"; CursorPaging<Artist> actual = followOperations.followingArtists(20, after); Assert.assertNotNull(actual); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testFollowArtist() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); Response response = noContentResponse(); ArgumentMatcher<Request> request = super.requestMatcher("PUT", "/v1/me/following?type=artist&ids=74ASZWbe4lXaubB36ztrGX%2C08td7MxkoHQkXnWAYD8d6Q", headers); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String id1 = "74ASZWbe4lXaubB36ztrGX"; String id2 = "08td7MxkoHQkXnWAYD8d6Q"; followOperations.followArtist(id1, id2); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testUnfollowArtist() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); Response response = noContentResponse(); ArgumentMatcher<Request> request = super.requestMatcher("DELETE", "/v1/me/following?type=artist&ids=74ASZWbe4lXaubB36ztrGX%2C08td7MxkoHQkXnWAYD8d6Q", headers); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String id1 = "74ASZWbe4lXaubB36ztrGX"; String id2 = "08td7MxkoHQkXnWAYD8d6Q"; followOperations.unfollowArtist(id1, id2); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testIsFollowingArtist() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); Response response = response("isFollowingArtist.json"); ArgumentMatcher<Request> request = super.requestMatcher("GET", "/v1/me/following/contains?type=artist&ids=74ASZWbe4lXaubB36ztrGX%2C08td7MxkoHQkXnWAYD8d6Q", headers); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String id1 = "74ASZWbe4lXaubB36ztrGX"; String id2 = "08td7MxkoHQkXnWAYD8d6Q"; Boolean[] actual = followOperations.isFollowingArtist(id1, id2); Boolean[] expected = expected("isFollowingArtist.json", Boolean[].class); Assert.assertNotNull(actual); Assert.assertArrayEquals(expected, actual); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testFollowUser() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); Response response = noContentResponse(); ArgumentMatcher<Request> request = super.requestMatcher("PUT", "/v1/me/following?type=user&ids=exampleuser01", headers); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String id1 = "exampleuser01"; followOperations.followUser(id1); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testUnfollowUser() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); Response response = noContentResponse(); ArgumentMatcher<Request> request = super.requestMatcher("DELETE", "/v1/me/following?type=user&ids=exampleuser01", headers); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String id1 = "exampleuser01"; followOperations.unfollowUser(id1); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testIsFollowingUser() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); Response response = response("isFollowingUser.json"); ArgumentMatcher<Request> request = super.requestMatcher("GET", "/v1/me/following/contains?type=user&ids=exampleuser01%2Cexampleuser02", headers); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String id1 = "exampleuser01"; String id2 = "exampleuser02"; Boolean[] actual = followOperations.isFollowingUser(id1, id2); Boolean[] expected = expected("isFollowingUser.json", Boolean[].class); Assert.assertNotNull(actual); Assert.assertArrayEquals(expected, actual); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testFollowPlaylistPublic() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); headers.put("Content-Type", Arrays.asList("application/json")); headers.put("Content-Length", Arrays.asList("17")); Response response = noContentResponse(); byte[] body = loadContent("followPlaylistRequestPublic.json"); ArgumentMatcher<Request> request = super.requestMatcher("PUT", "/v1/users/jmperezperez/playlists/2v3iNvBX8Ay1Gt2uXtUKUT/followers", headers, body); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String ownerId = "jmperezperez"; String playlistId = "2v3iNvBX8Ay1Gt2uXtUKUT"; followOperations.followPlaylist(ownerId, playlistId, true); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testFollowPlaylistPrivate() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); headers.put("Content-Type", Arrays.asList("application/json")); headers.put("Content-Length", Arrays.asList("18")); Response response = noContentResponse(); byte[] body = loadContent("followPlaylistRequestPrivate.json"); ArgumentMatcher<Request> request = super.requestMatcher("PUT", "/v1/users/jmperezperez/playlists/2v3iNvBX8Ay1Gt2uXtUKUT/followers", headers, body); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String ownerId = "jmperezperez"; String playlistId = "2v3iNvBX8Ay1Gt2uXtUKUT"; followOperations.followPlaylist(ownerId, playlistId, false); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testUnfollowPlaylist() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); Response response = noContentResponse(); ArgumentMatcher<Request> request = super.requestMatcher("DELETE", "/v1/users/jmperezperez/playlists/2v3iNvBX8Ay1Gt2uXtUKUT/followers", headers); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String ownerId = "jmperezperez"; String playlistId = "2v3iNvBX8Ay1Gt2uXtUKUT"; followOperations.unfollowPlaylist(ownerId, playlistId); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } @Test public void testIsFollowingPlaylist() throws Exception { Client client = Mockito.mock(Client.class); Map<String, Collection<String>> headers = headers(); Response response = response("isFollowingPlaylist.json"); ArgumentMatcher<Request> request = super.requestMatcher("GET", "/v1/users/jmperezperez/playlists/2v3iNvBX8Ay1Gt2uXtUKUT/followers/contains?ids=possan%2Celogain", headers); Mockito.when(client.execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher()))).thenReturn(response); FollowOperations followOperations = builder(client); String id1 = "possan"; String id2 = "elogain"; String ownerId = "jmperezperez"; String playlistId = "2v3iNvBX8Ay1Gt2uXtUKUT"; Boolean[] actual = followOperations.isFollowingPlaylist(ownerId, playlistId, id1, id2); Boolean[] expected = expected("isFollowingPlaylist.json", Boolean[].class); Assert.assertNotNull(actual); Assert.assertArrayEquals(expected, actual); Mockito.verify(client).execute(Mockito.argThat(request), Mockito.argThat(optionsMatcher())); } }
package gudusoft.gsqlparser.sql2xml.model; import org.simpleframework.xml.Element; public class nonparenthesized_value_expression_primary { @Element(required = false) private unsigned_value_specification unsigned_value_specification; @Element(required = false) private column_reference column_reference; @Element(required = false) private set_function_specification set_function_specification; @Element(required = false) private window_function window_function; @Element(required = false) private scalar_subquery scalar_subquery; @Element(required = false) private case_expression case_expression; @Element(required = false) private cast_specification cast_specification; @Element(required = false) private field_reference field_reference; @Element(required = false) private subtype_treatment subtype_treatment; @Element(required = false) private method_invocation method_invocation; @Element(required = false) private static_method_invocation static_method_invocation; @Element(required = false) private new_specification new_specification; @Element(required = false) private attribute_or_method_reference attribute_or_method_reference; @Element(required = false) private reference_resolution reference_resolution; @Element(required = false) private collection_value_constructor collection_value_constructor; @Element(required = false) private array_element_reference array_element_reference; @Element(required = false) private multiset_element_reference multiset_element_reference; @Element(required = false) private next_value_expression next_value_expression; @Element(required = false) private routine_invocation routine_invocation; public unsigned_value_specification getUnsigned_value_specification( ) { return unsigned_value_specification; } public void setUnsigned_value_specification( unsigned_value_specification unsigned_value_specification ) { this.unsigned_value_specification = unsigned_value_specification; } public column_reference getColumn_reference( ) { return column_reference; } public void setColumn_reference( column_reference column_reference ) { this.column_reference = column_reference; } public set_function_specification getSet_function_specification( ) { return set_function_specification; } public void setSet_function_specification( set_function_specification set_function_specification ) { this.set_function_specification = set_function_specification; } public window_function getWindow_function( ) { return window_function; } public void setWindow_function( window_function window_function ) { this.window_function = window_function; } public scalar_subquery getScalar_subquery( ) { return scalar_subquery; } public void setScalar_subquery( scalar_subquery scalar_subquery ) { this.scalar_subquery = scalar_subquery; } public case_expression getCase_expression( ) { return case_expression; } public void setCase_expression( case_expression case_expression ) { this.case_expression = case_expression; } public cast_specification getCast_specification( ) { return cast_specification; } public void setCast_specification( cast_specification cast_specification ) { this.cast_specification = cast_specification; } public field_reference getField_reference( ) { return field_reference; } public void setField_reference( field_reference field_reference ) { this.field_reference = field_reference; } public subtype_treatment getSubtype_treatment( ) { return subtype_treatment; } public void setSubtype_treatment( subtype_treatment subtype_treatment ) { this.subtype_treatment = subtype_treatment; } public method_invocation getMethod_invocation( ) { return method_invocation; } public void setMethod_invocation( method_invocation method_invocation ) { this.method_invocation = method_invocation; } public static_method_invocation getStatic_method_invocation( ) { return static_method_invocation; } public void setStatic_method_invocation( static_method_invocation static_method_invocation ) { this.static_method_invocation = static_method_invocation; } public new_specification getNew_specification( ) { return new_specification; } public void setNew_specification( new_specification new_specification ) { this.new_specification = new_specification; } public attribute_or_method_reference getAttribute_or_method_reference( ) { return attribute_or_method_reference; } public void setAttribute_or_method_reference( attribute_or_method_reference attribute_or_method_reference ) { this.attribute_or_method_reference = attribute_or_method_reference; } public reference_resolution getReference_resolution( ) { return reference_resolution; } public void setReference_resolution( reference_resolution reference_resolution ) { this.reference_resolution = reference_resolution; } public collection_value_constructor getCollection_value_constructor( ) { return collection_value_constructor; } public void setCollection_value_constructor( collection_value_constructor collection_value_constructor ) { this.collection_value_constructor = collection_value_constructor; } public array_element_reference getArray_element_reference( ) { return array_element_reference; } public void setArray_element_reference( array_element_reference array_element_reference ) { this.array_element_reference = array_element_reference; } public multiset_element_reference getMultiset_element_reference( ) { return multiset_element_reference; } public void setMultiset_element_reference( multiset_element_reference multiset_element_reference ) { this.multiset_element_reference = multiset_element_reference; } public next_value_expression getNext_value_expression( ) { return next_value_expression; } public void setNext_value_expression( next_value_expression next_value_expression ) { this.next_value_expression = next_value_expression; } public routine_invocation getRoutine_invocation( ) { return routine_invocation; } public void setRoutine_invocation( routine_invocation routine_invocation ) { this.routine_invocation = routine_invocation; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.core.logic.report; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.apache.syncope.common.lib.SyncopeConstants; import org.apache.syncope.common.lib.report.GroupReportletConf; import org.apache.syncope.common.lib.report.GroupReportletConf.Feature; import org.apache.syncope.common.lib.report.ReportletConf; import org.apache.syncope.common.lib.to.AnyTO; import org.apache.syncope.common.lib.to.AttrTO; import org.apache.syncope.common.lib.to.GroupTO; import org.apache.syncope.common.lib.types.AnyTypeKind; import org.apache.syncope.core.persistence.api.dao.GroupDAO; import org.apache.syncope.core.persistence.api.dao.search.OrderByClause; import org.apache.syncope.core.persistence.api.entity.group.Group; import org.apache.syncope.core.persistence.api.search.SearchCondConverter; import org.apache.syncope.core.persistence.api.dao.AnySearchDAO; import org.apache.syncope.core.persistence.api.dao.ReportletConfClass; import org.apache.syncope.core.persistence.api.entity.user.UMembership; import org.apache.syncope.core.provisioning.api.data.GroupDataBinder; import org.springframework.beans.factory.annotation.Autowired; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import org.xml.sax.helpers.AttributesImpl; @ReportletConfClass(GroupReportletConf.class) public class GroupReportlet extends AbstractReportlet { private static final int PAGE_SIZE = 10; @Autowired private GroupDAO groupDAO; @Autowired private AnySearchDAO searchDAO; @Autowired private GroupDataBinder groupDataBinder; private GroupReportletConf conf; private void doExtractResources(final ContentHandler handler, final AnyTO anyTO) throws SAXException { if (anyTO.getResources().isEmpty()) { LOG.debug("No resources found for {}[{}]", anyTO.getClass().getSimpleName(), anyTO.getKey()); } else { AttributesImpl atts = new AttributesImpl(); handler.startElement("", "", "resources", null); for (String resourceName : anyTO.getResources()) { atts.clear(); atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, resourceName); handler.startElement("", "", "resource", atts); handler.endElement("", "", "resource"); } handler.endElement("", "", "resources"); } } private void doExtractAttributes(final ContentHandler handler, final AnyTO anyTO, final Collection<String> attrs, final Collection<String> derAttrs, final Collection<String> virAttrs) throws SAXException { AttributesImpl atts = new AttributesImpl(); if (!attrs.isEmpty()) { Map<String, AttrTO> attrMap = anyTO.getPlainAttrMap(); handler.startElement("", "", "attributes", null); for (String attrName : attrs) { atts.clear(); atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, attrName); handler.startElement("", "", "attribute", atts); if (attrMap.containsKey(attrName)) { for (String value : attrMap.get(attrName).getValues()) { handler.startElement("", "", "value", null); handler.characters(value.toCharArray(), 0, value.length()); handler.endElement("", "", "value"); } } else { LOG.debug("{} not found for {}[{}]", attrName, anyTO.getClass().getSimpleName(), anyTO.getKey()); } handler.endElement("", "", "attribute"); } handler.endElement("", "", "attributes"); } if (!derAttrs.isEmpty()) { Map<String, AttrTO> derAttrMap = anyTO.getDerAttrMap(); handler.startElement("", "", "derivedAttributes", null); for (String attrName : derAttrs) { atts.clear(); atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, attrName); handler.startElement("", "", "derivedAttribute", atts); if (derAttrMap.containsKey(attrName)) { for (String value : derAttrMap.get(attrName).getValues()) { handler.startElement("", "", "value", null); handler.characters(value.toCharArray(), 0, value.length()); handler.endElement("", "", "value"); } } else { LOG.debug("{} not found for {}[{}]", attrName, anyTO.getClass().getSimpleName(), anyTO.getKey()); } handler.endElement("", "", "derivedAttribute"); } handler.endElement("", "", "derivedAttributes"); } if (!virAttrs.isEmpty()) { Map<String, AttrTO> virAttrMap = anyTO.getVirAttrMap(); handler.startElement("", "", "virtualAttributes", null); for (String attrName : virAttrs) { atts.clear(); atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, attrName); handler.startElement("", "", "virtualAttribute", atts); if (virAttrMap.containsKey(attrName)) { for (String value : virAttrMap.get(attrName).getValues()) { handler.startElement("", "", "value", null); handler.characters(value.toCharArray(), 0, value.length()); handler.endElement("", "", "value"); } } else { LOG.debug("{} not found for {}[{}]", attrName, anyTO.getClass().getSimpleName(), anyTO.getKey()); } handler.endElement("", "", "virtualAttribute"); } handler.endElement("", "", "virtualAttributes"); } } private void doExtract(final ContentHandler handler, final List<Group> groups) throws SAXException { AttributesImpl atts = new AttributesImpl(); for (Group group : groups) { atts.clear(); for (Feature feature : conf.getFeatures()) { String type = null; String value = null; switch (feature) { case key: type = ReportXMLConst.XSD_STRING; value = group.getKey(); break; case name: type = ReportXMLConst.XSD_STRING; value = String.valueOf(group.getName()); break; case groupOwner: type = ReportXMLConst.XSD_STRING; value = group.getGroupOwner().getKey(); break; case userOwner: type = ReportXMLConst.XSD_STRING; value = group.getUserOwner().getKey(); break; default: } if (type != null && value != null) { atts.addAttribute("", "", feature.name(), type, value); } } handler.startElement("", "", "group", atts); // Using GroupTO for attribute values, since the conversion logic of // values to String is already encapsulated there GroupTO groupTO = groupDataBinder.getGroupTO(group, true); doExtractAttributes(handler, groupTO, conf.getPlainAttrs(), conf.getDerAttrs(), conf.getVirAttrs()); // to get resources associated to a group if (conf.getFeatures().contains(Feature.resources)) { doExtractResources(handler, groupTO); } //to get users asscoiated to a group is preferred GroupDAO to GroupTO if (conf.getFeatures().contains(Feature.users)) { handler.startElement("", "", "users", null); for (UMembership memb : groupDAO.findUMemberships(group)) { atts.clear(); atts.addAttribute("", "", "key", ReportXMLConst.XSD_STRING, memb.getLeftEnd().getKey()); atts.addAttribute("", "", "username", ReportXMLConst.XSD_STRING, String.valueOf(memb.getLeftEnd().getUsername())); handler.startElement("", "", "user", atts); handler.endElement("", "", "user"); } handler.endElement("", "", "users"); } handler.endElement("", "", "group"); } } private void doExtractConf(final ContentHandler handler) throws SAXException { if (conf == null) { LOG.debug("Report configuration is not present"); } AttributesImpl atts = new AttributesImpl(); handler.startElement("", "", "configurations", null); handler.startElement("", "", "groupAttributes", atts); for (Feature feature : conf.getFeatures()) { atts.clear(); handler.startElement("", "", "feature", atts); handler.characters(feature.name().toCharArray(), 0, feature.name().length()); handler.endElement("", "", "feature"); } for (String attr : conf.getPlainAttrs()) { atts.clear(); handler.startElement("", "", "attribute", atts); handler.characters(attr.toCharArray(), 0, attr.length()); handler.endElement("", "", "attribute"); } for (String derAttr : conf.getDerAttrs()) { atts.clear(); handler.startElement("", "", "derAttribute", atts); handler.characters(derAttr.toCharArray(), 0, derAttr.length()); handler.endElement("", "", "derAttribute"); } for (String virAttr : conf.getVirAttrs()) { atts.clear(); handler.startElement("", "", "virAttribute", atts); handler.characters(virAttr.toCharArray(), 0, virAttr.length()); handler.endElement("", "", "virAttribute"); } handler.endElement("", "", "groupAttributes"); handler.endElement("", "", "configurations"); } private int count() { return StringUtils.isBlank(conf.getMatchingCond()) ? groupDAO.count() : searchDAO.count(SyncopeConstants.FULL_ADMIN_REALMS, SearchCondConverter.convert(conf.getMatchingCond()), AnyTypeKind.GROUP); } @Override protected void doExtract(final ReportletConf conf, final ContentHandler handler) throws SAXException { if (conf instanceof GroupReportletConf) { this.conf = GroupReportletConf.class.cast(conf); } else { throw new ReportException(new IllegalArgumentException("Invalid configuration provided")); } doExtractConf(handler); if (StringUtils.isBlank(this.conf.getMatchingCond())) { doExtract(handler, groupDAO.findAll()); } else { for (int page = 1; page <= (count() / PAGE_SIZE) + 1; page++) { List<Group> groups = searchDAO.search(SyncopeConstants.FULL_ADMIN_REALMS, SearchCondConverter.convert(this.conf.getMatchingCond()), page, PAGE_SIZE, Collections.<OrderByClause>emptyList(), AnyTypeKind.GROUP); doExtract(handler, groups); } } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.memberPushDown; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.*; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.refactoring.util.RefactoringConflictsUtil; import com.intellij.refactoring.util.RefactoringUIUtil; import com.intellij.refactoring.util.classMembers.ClassMemberReferencesVisitor; import com.intellij.refactoring.util.classMembers.MemberInfo; import com.intellij.util.containers.MultiMap; import java.util.*; public class PushDownConflicts { private final PsiClass myClass; private final Set<PsiMember> myMovedMembers; private final Set<PsiMethod> myAbstractMembers; private final MultiMap<PsiElement, String> myConflicts; public PushDownConflicts(PsiClass aClass, MemberInfo[] memberInfos, MultiMap<PsiElement, String> conflicts) { myClass = aClass; myMovedMembers = new HashSet<>(); myAbstractMembers = new HashSet<>(); for (MemberInfo memberInfo : memberInfos) { final PsiMember member = memberInfo.getMember(); if (memberInfo.isChecked() && (!(memberInfo.getMember() instanceof PsiClass) || memberInfo.getOverrides() == null)) { myMovedMembers.add(member); if (memberInfo.isToAbstract()) { myAbstractMembers.add((PsiMethod)member); } } } myConflicts = conflicts; } public boolean isAnyConflicts() { return !myConflicts.isEmpty(); } public MultiMap<PsiElement, String> getConflicts() { return myConflicts; } public Set<PsiMember> getMovedMembers() { return myMovedMembers; } public Set<PsiMethod> getAbstractMembers() { return myAbstractMembers; } public void checkSourceClassConflicts() { final PsiElement[] children = myClass.getChildren(); for (PsiElement child : children) { if (child instanceof PsiMember && !myMovedMembers.contains(child)) { child.accept(new UsedMovedMembersConflictsCollector(child)); } } final PsiAnnotation annotation = AnnotationUtil.findAnnotation(myClass, CommonClassNames.JAVA_LANG_FUNCTIONAL_INTERFACE); if (annotation != null && myMovedMembers.contains(LambdaUtil.getFunctionalInterfaceMethod(myClass))) { myConflicts.putValue(annotation, RefactoringBundle.message("functional.interface.broken")); } boolean isAbstract = myClass.hasModifierProperty(PsiModifier.ABSTRACT); for (PsiMember member : myMovedMembers) { if (!member.hasModifierProperty(PsiModifier.STATIC)) { member.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitMethodCallExpression(PsiMethodCallExpression expression) { super.visitMethodCallExpression(expression); if (expression.getMethodExpression().getQualifierExpression() instanceof PsiSuperExpression) { final PsiMethod resolvedMethod = expression.resolveMethod(); if (resolvedMethod != null) { final PsiClass resolvedClass = resolvedMethod.getContainingClass(); if (resolvedClass != null && myClass.isInheritor(resolvedClass, true)) { final PsiMethod methodBySignature = myClass.findMethodBySignature(resolvedMethod, false); if (methodBySignature != null && !myMovedMembers.contains(methodBySignature)) { myConflicts.putValue(expression, "Super method call will resolve to another method"); } } } } } }); } if (!member.hasModifierProperty(PsiModifier.STATIC) && member instanceof PsiMethod && !myAbstractMembers.contains(member)) { Set<PsiClass> unrelatedDefaults = new LinkedHashSet<>(); for (PsiMethod superMethod : ((PsiMethod)member).findSuperMethods()) { if (!isAbstract && superMethod.hasModifierProperty(PsiModifier.ABSTRACT)) { myConflicts.putValue(member, "Non abstract " + RefactoringUIUtil.getDescription(myClass, false) + " will miss implementation of " + RefactoringUIUtil.getDescription(superMethod, false)); break; } if (superMethod.hasModifierProperty(PsiModifier.DEFAULT)) { unrelatedDefaults.add(superMethod.getContainingClass()); if (unrelatedDefaults.size() > 1) { List<PsiClass> supers = new ArrayList<>(unrelatedDefaults); supers.sort(Comparator.comparing(PsiClass::getName)); myConflicts.putValue(member, CommonRefactoringUtil.capitalize(RefactoringUIUtil.getDescription(myClass, false) + " will inherit unrelated defaults from " + StringUtil.join(supers, aClass -> RefactoringUIUtil.getDescription(aClass, false)," and "))); break; } } } } } } public void checkTargetClassConflicts(final PsiElement targetElement, final PsiElement context) { if (targetElement instanceof PsiFunctionalExpression) { myConflicts.putValue(targetElement, RefactoringBundle.message("functional.interface.broken")); return; } final PsiClass targetClass = targetElement instanceof PsiClass ? (PsiClass)targetElement : null; if (targetClass != null) { for (final PsiMember movedMember : myMovedMembers) { checkMemberPlacementInTargetClassConflict(targetClass, movedMember); } } Members: for (PsiMember member : myMovedMembers) { if (member.hasModifierProperty(PsiModifier.STATIC)) continue; for (PsiReference ref : ReferencesSearch.search(member, member.getResolveScope(), false)) { final PsiElement element = ref.getElement(); if (element instanceof PsiReferenceExpression) { if (myConflicts.containsKey(element)) continue; final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)element; final PsiExpression qualifier = referenceExpression.getQualifierExpression(); if (qualifier instanceof PsiSuperExpression && isSuperCallToBeInlined(member, targetClass, myClass, element)) continue; if (qualifier != null) { final PsiType qualifierType = qualifier.getType(); PsiClass aClass = null; if (qualifierType instanceof PsiClassType) { aClass = ((PsiClassType)qualifierType).resolve(); } else { if (qualifier instanceof PsiReferenceExpression) { final PsiElement resolved = ((PsiReferenceExpression)qualifier).resolve(); if (resolved instanceof PsiClass) { aClass = (PsiClass)resolved; } } } if (!InheritanceUtil.isInheritorOrSelf(aClass, targetClass, true)) { myConflicts.putValue(referenceExpression, RefactoringBundle.message("pushed.members.will.not.be.visible.from.certain.call.sites")); break Members; } } } } } RefactoringConflictsUtil.analyzeAccessibilityConflicts(myMovedMembers, targetClass, myConflicts, null, context, myAbstractMembers); } public void checkMemberPlacementInTargetClassConflict(final PsiClass targetClass, final PsiMember movedMember) { if (movedMember instanceof PsiField) { String name = movedMember.getName(); final PsiField field = targetClass.findFieldByName(name, false); if (field != null) { String message = RefactoringBundle.message("0.already.contains.field.1", RefactoringUIUtil.getDescription(targetClass, false), CommonRefactoringUtil.htmlEmphasize(name)); myConflicts.putValue(field, CommonRefactoringUtil.capitalize(message)); } } else if (movedMember instanceof PsiMethod) { final PsiModifierList modifierList = movedMember.getModifierList(); assert modifierList != null; if (!modifierList.hasModifierProperty(PsiModifier.ABSTRACT)) { PsiMethod method = (PsiMethod)movedMember; final PsiMethod overrider = MethodSignatureUtil.findMethodBySuperMethod(targetClass, method, false); if (overrider != null && ReferencesSearch.search(method, new LocalSearchScope(overrider)).findAll().size() != 1) { String message = RefactoringBundle.message("0.is.already.overridden.in.1", RefactoringUIUtil.getDescription(method, true), RefactoringUIUtil.getDescription(targetClass, false)); myConflicts.putValue(overrider, CommonRefactoringUtil.capitalize(message)); } } } else if (movedMember instanceof PsiClass) { PsiClass aClass = (PsiClass)movedMember; final String name = aClass.getName(); final PsiClass[] allInnerClasses = targetClass.getAllInnerClasses(); for (PsiClass innerClass : allInnerClasses) { if (innerClass.equals(movedMember)) continue; if (name.equals(innerClass.getName())) { String message = RefactoringBundle.message("0.already.contains.inner.class.named.1", RefactoringUIUtil.getDescription(targetClass, false), CommonRefactoringUtil.htmlEmphasize(name)); myConflicts.putValue(innerClass, message); } } } if (movedMember.hasModifierProperty(PsiModifier.STATIC) && PsiUtil.getEnclosingStaticElement(targetClass, null) == null && !(targetClass.getParent() instanceof PsiFile)) { myConflicts.putValue(movedMember, "Static " + RefactoringUIUtil.getDescription(movedMember, false) + " can't be pushed to non-static " + RefactoringUIUtil.getDescription(targetClass, false)); } } public static boolean isSuperCallToBeInlined(PsiMember member, PsiClass targetClass, PsiClass sourceClass, PsiElement referenceOnSuper) { if (member instanceof PsiMethod) { PsiSubstitutor substitutor = TypeConversionUtil.getSuperClassSubstitutor(sourceClass, targetClass, PsiSubstitutor.EMPTY); PsiMethod methodInTarget = MethodSignatureUtil.findMethodBySuperSignature(targetClass, ((PsiMethod)member).getSignature(substitutor), true); return methodInTarget != null && PsiTreeUtil.isAncestor(methodInTarget, referenceOnSuper, false); } return false; } private class UsedMovedMembersConflictsCollector extends ClassMemberReferencesVisitor { private final PsiElement mySource; UsedMovedMembersConflictsCollector(PsiElement source) { super(myClass); mySource = source; } @Override protected void visitClassMemberReferenceElement(PsiMember classMember, PsiJavaCodeReferenceElement classMemberReference) { if(myMovedMembers.contains(classMember) && !myAbstractMembers.contains(classMember)) { String message = RefactoringBundle.message("0.uses.1.which.is.pushed.down", RefactoringUIUtil.getDescription(mySource, false), RefactoringUIUtil.getDescription(classMember, false)); message = CommonRefactoringUtil.capitalize(message); myConflicts.putValue(mySource, message); } } } }
/* * Copyright 2015 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.server.http.file; import static java.util.Objects.requireNonNull; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.net.URLConnection; import java.nio.file.Path; import java.nio.file.Paths; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.Unpooled; /** * A virtual file system that provides the files requested by {@link HttpFileService}. */ @FunctionalInterface public interface HttpVfs { /** * Creates a new {@link HttpVfs} with the specified {@code rootDir} in an O/S file system. */ static HttpVfs ofFileSystem(String rootDir) { return new FileSystemHttpVfs(Paths.get(requireNonNull(rootDir, "rootDir"))); } /** * Creates a new {@link HttpVfs} with the specified {@code rootDir} in an O/S file system. */ static HttpVfs ofFileSystem(Path rootDir) { return new FileSystemHttpVfs(rootDir); } /** * Creates a new {@link HttpVfs} with the specified {@code rootDir} in the current class path. */ static HttpVfs ofClassPath(String rootDir) { return ofClassPath(HttpVfs.class.getClassLoader(), rootDir); } /** * Creates a new {@link HttpVfs} with the specified {@code rootDir} in the current class path. */ static HttpVfs ofClassPath(ClassLoader classLoader, String rootDir) { return new ClassPathHttpVfs(classLoader, rootDir); } /** * Finds the file at the specified {@code path}. * * @param path an absolute path whose component separator is {@code '/'} * * @return the {@link Entry} of the file at the specified {@code path} if found. * {@link Entry#NONE} if not found. */ Entry get(String path); /** * A file entry in an {@link HttpVfs}. */ interface Entry { /** * A non-existent entry. */ Entry NONE = new Entry() { @Override public String mimeType() { throw new IllegalStateException(); } @Override public long lastModifiedMillis() { return 0; } @Override public ByteBuf readContent(ByteBufAllocator alloc) throws IOException { throw new FileNotFoundException(); } @Override public String toString() { return "none"; } }; /** * Returns the MIME type of the entry. */ String mimeType(); /** * Returns the modification time of the entry. * * @return {@code 0} if the entry does not exist. */ long lastModifiedMillis(); /** * Reads the content of the entry into a new buffer. */ ByteBuf readContent(ByteBufAllocator alloc) throws IOException; } /** * A skeletal {@link Entry} implementation. */ abstract class AbstractEntry implements Entry { private final String path; private final String mimeType; /** * Creates a new instance with the specified {@code path}. */ protected AbstractEntry(String path) { this.path = requireNonNull(path, "path"); mimeType = URLConnection.guessContentTypeFromName(path); } @Override public String mimeType() { return mimeType; } @Override public String toString() { return path; } /** * Reads the content of the entry into a new buffer. * Use {@link #readContent(ByteBufAllocator, InputStream, int)} when the length of the stream is known. */ protected ByteBuf readContent(ByteBufAllocator alloc, InputStream in) throws IOException { ByteBuf buf = null; boolean success = false; try { buf = alloc.directBuffer(); for (;;) { if (buf.writeBytes(in, 8192) < 0) { break; } } success = true; if (buf.isReadable()) { return buf; } else { buf.release(); return Unpooled.EMPTY_BUFFER; } } finally { if (!success && buf != null) { buf.release(); } } } /** * Reads the content of the entry into a new buffer. * Use {@link #readContent(ByteBufAllocator, InputStream)} when the length of the stream is unknown. */ protected ByteBuf readContent(ByteBufAllocator alloc, InputStream in, int length) throws IOException { if (length == 0) { return Unpooled.EMPTY_BUFFER; } ByteBuf buf = null; boolean success = false; try { buf = alloc.directBuffer(length); int remaining = length; for (;;) { final int readBytes = buf.writeBytes(in, remaining); if (readBytes < 0) { break; } remaining -= readBytes; if (remaining <= 0) { break; } } success = true; return buf; } finally { if (!success && buf != null) { buf.release(); } } } } /** * An {@link Entry} whose content is backed by a byte array. */ final class ByteArrayEntry extends AbstractEntry { private final long lastModifiedMillis = System.currentTimeMillis(); private final byte[] content; /** * Creates a new instance with the specified {@code path} and byte array. */ public ByteArrayEntry(String path, byte[] content) { super(path); this.content = requireNonNull(content, "content"); } @Override public long lastModifiedMillis() { return lastModifiedMillis; } @Override public ByteBuf readContent(ByteBufAllocator alloc) { return Unpooled.wrappedBuffer(content); } } }
package org.apache.cassandra.index.internal; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.Future; import java.util.function.BiFunction; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.ColumnDefinition; import org.apache.cassandra.cql3.Operator; import org.apache.cassandra.cql3.statements.IndexTarget; import org.apache.cassandra.db.*; import org.apache.cassandra.db.compaction.CompactionManager; import org.apache.cassandra.db.filter.RowFilter; import org.apache.cassandra.db.lifecycle.SSTableSet; import org.apache.cassandra.db.lifecycle.View; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.CollectionType; import org.apache.cassandra.db.partitions.PartitionIterator; import org.apache.cassandra.db.partitions.PartitionUpdate; import org.apache.cassandra.db.rows.*; import org.apache.cassandra.dht.LocalPartitioner; import org.apache.cassandra.exceptions.InvalidRequestException; import org.apache.cassandra.index.Index; import org.apache.cassandra.index.IndexRegistry; import org.apache.cassandra.index.SecondaryIndexBuilder; import org.apache.cassandra.index.internal.composites.CompositesSearcher; import org.apache.cassandra.index.internal.keys.KeysSearcher; import org.apache.cassandra.index.transactions.IndexTransaction; import org.apache.cassandra.index.transactions.UpdateTransaction; import org.apache.cassandra.io.sstable.ReducingKeyIterator; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.schema.IndexMetadata; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.concurrent.OpOrder; import org.apache.cassandra.utils.concurrent.Refs; /** * Clone of KeysIndex used in CassandraIndexTest#testCustomIndexWithCFS to verify * behaviour of flushing CFS backed CUSTOM indexes */ public class CustomCassandraIndex implements Index { private static final Logger logger = LoggerFactory.getLogger(CassandraIndex.class); public final ColumnFamilyStore baseCfs; protected IndexMetadata metadata; protected ColumnFamilyStore indexCfs; protected ColumnDefinition indexedColumn; protected CassandraIndexFunctions functions; public CustomCassandraIndex(ColumnFamilyStore baseCfs, IndexMetadata indexDef) { this.baseCfs = baseCfs; setMetadata(indexDef); } /** * Returns true if an index of this type can support search predicates of the form [column] OPERATOR [value] * @param indexedColumn * @param operator * @return */ protected boolean supportsOperator(ColumnDefinition indexedColumn, Operator operator) { return operator.equals(Operator.EQ); } public ColumnDefinition getIndexedColumn() { return indexedColumn; } public ClusteringComparator getIndexComparator() { return indexCfs.metadata.comparator; } public ColumnFamilyStore getIndexCfs() { return indexCfs; } public void register(IndexRegistry registry) { registry.registerIndex(this); } public Callable<?> getInitializationTask() { // if we're just linking in the index on an already-built index post-restart // we've nothing to do. Otherwise, submit for building via SecondaryIndexBuilder return isBuilt() ? null : getBuildIndexTask(); } public IndexMetadata getIndexMetadata() { return metadata; } public String getIndexName() { // should return metadata.name, see CASSANDRA-10127 return indexCfs.name; } public Optional<ColumnFamilyStore> getBackingTable() { return indexCfs == null ? Optional.empty() : Optional.of(indexCfs); } public Callable<Void> getBlockingFlushTask() { return () -> { indexCfs.forceBlockingFlush(); return null; }; } public Callable<?> getInvalidateTask() { return () -> { markRemoved(); invalidate(); return null; }; } public Callable<?> getMetadataReloadTask(IndexMetadata indexDef) { setMetadata(indexDef); return () -> { indexCfs.metadata.reloadIndexMetadataProperties(baseCfs.metadata); indexCfs.reload(); return null; }; } private void setMetadata(IndexMetadata indexDef) { metadata = indexDef; functions = getFunctions(baseCfs.metadata, indexDef); CFMetaData cfm = indexCfsMetadata(baseCfs.metadata, indexDef); indexCfs = ColumnFamilyStore.createColumnFamilyStore(baseCfs.keyspace, cfm.cfName, cfm, baseCfs.getTracker().loadsstables); assert indexDef.columns.size() == 1 : "Build in indexes on multiple target columns are not supported"; indexedColumn = indexDef.indexedColumn(baseCfs.metadata); } public Callable<?> getTruncateTask(final long truncatedAt) { return () -> { indexCfs.discardSSTables(truncatedAt); return null; }; } public boolean indexes(PartitionColumns columns) { // if we have indexes on the partition key or clustering columns, return true return isPrimaryKeyIndex() || columns.contains(indexedColumn); } public boolean supportsExpression(ColumnDefinition column, Operator operator) { return indexedColumn.name.equals(column.name) && supportsOperator(indexedColumn, operator); } private boolean supportsExpression(RowFilter.Expression expression) { return supportsExpression(expression.column(), expression.operator()); } public long getEstimatedResultRows() { return indexCfs.getMeanColumns(); } /** * No post processing of query results, just return them unchanged */ public BiFunction<PartitionIterator, ReadCommand, PartitionIterator> postProcessorFor(ReadCommand command) { return (partitionIterator, readCommand) -> partitionIterator; } public RowFilter getPostIndexQueryFilter(RowFilter filter) { return getTargetExpression(filter.getExpressions()).map(filter::without) .orElse(filter); } private Optional<RowFilter.Expression> getTargetExpression(List<RowFilter.Expression> expressions) { return expressions.stream().filter(this::supportsExpression).findFirst(); } public Index.Searcher searcherFor(ReadCommand command) { return null; /* Optional<RowFilter.Expression> target = getTargetExpression(command.rowFilter().getExpressions()); if (target.isPresent()) { target.get().validateForIndexing(); switch (getIndexMetadata().indexType) { case COMPOSITES: return new CompositesSearcher(command, target.get(), this); case KEYS: return new KeysSearcher(command, target.get(), this); default: throw new IllegalStateException(String.format("Unsupported index type %s for index %s on %s", metadata.indexType, metadata.name, indexedColumn.name.toString())); } } return null; */ } public void validate(PartitionUpdate update) throws InvalidRequestException { switch (indexedColumn.kind) { case PARTITION_KEY: validatePartitionKey(update.partitionKey()); break; case CLUSTERING: validateClusterings(update); break; case REGULAR: validateRows(update); break; case STATIC: validateRows(Collections.singleton(update.staticRow())); break; } } protected CBuilder buildIndexClusteringPrefix(ByteBuffer partitionKey, ClusteringPrefix prefix, CellPath path) { CBuilder builder = CBuilder.create(getIndexComparator()); builder.add(partitionKey); return builder; } protected ByteBuffer getIndexedValue(ByteBuffer partitionKey, Clustering clustering, CellPath path, ByteBuffer cellValue) { return cellValue; } public IndexEntry decodeEntry(DecoratedKey indexedValue, Row indexEntry) { throw new UnsupportedOperationException("KEYS indexes do not use a specialized index entry format"); } public boolean isStale(Row row, ByteBuffer indexValue, int nowInSec) { if (row == null) return true; Cell cell = row.getCell(indexedColumn); return (cell == null || !cell.isLive(nowInSec) || indexedColumn.type.compare(indexValue, cell.value()) != 0); } public Indexer indexerFor(final DecoratedKey key, final int nowInSec, final OpOrder.Group opGroup, final IndexTransaction.Type transactionType) { return new Indexer() { public void begin() { } public void partitionDelete(DeletionTime deletionTime) { } public void rangeTombstone(RangeTombstone tombstone) { } public void insertRow(Row row) { if (isPrimaryKeyIndex()) { indexPrimaryKey(row.clustering(), getPrimaryKeyIndexLiveness(row), row.deletion()); } else { if (indexedColumn.isComplex()) indexCells(row.clustering(), row.getComplexColumnData(indexedColumn)); else indexCell(row.clustering(), row.getCell(indexedColumn)); } } public void removeRow(Row row) { if (isPrimaryKeyIndex()) indexPrimaryKey(row.clustering(), row.primaryKeyLivenessInfo(), row.deletion()); if (indexedColumn.isComplex()) removeCells(row.clustering(), row.getComplexColumnData(indexedColumn)); else removeCell(row.clustering(), row.getCell(indexedColumn)); } public void updateRow(Row oldRow, Row newRow) { if (isPrimaryKeyIndex()) indexPrimaryKey(newRow.clustering(), newRow.primaryKeyLivenessInfo(), newRow.deletion()); if (indexedColumn.isComplex()) { indexCells(newRow.clustering(), newRow.getComplexColumnData(indexedColumn)); removeCells(oldRow.clustering(), oldRow.getComplexColumnData(indexedColumn)); } else { indexCell(newRow.clustering(), newRow.getCell(indexedColumn)); removeCell(oldRow.clustering(), oldRow.getCell(indexedColumn)); } } public void finish() { } private void indexCells(Clustering clustering, Iterable<Cell> cells) { if (cells == null) return; for (Cell cell : cells) indexCell(clustering, cell); } private void indexCell(Clustering clustering, Cell cell) { if (cell == null || !cell.isLive(nowInSec)) return; insert(key.getKey(), clustering, cell, LivenessInfo.create(cell.timestamp(), cell.ttl(), cell.localDeletionTime()), opGroup); } private void removeCells(Clustering clustering, Iterable<Cell> cells) { if (cells == null) return; for (Cell cell : cells) removeCell(clustering, cell); } private void removeCell(Clustering clustering, Cell cell) { if (cell == null || !cell.isLive(nowInSec)) return; delete(key.getKey(), clustering, cell, opGroup, nowInSec); } private void indexPrimaryKey(final Clustering clustering, final LivenessInfo liveness, final Row.Deletion deletion) { if (liveness.timestamp() != LivenessInfo.NO_TIMESTAMP) insert(key.getKey(), clustering, null, liveness, opGroup); if (!deletion.isLive()) delete(key.getKey(), clustering, deletion.time(), opGroup); } private LivenessInfo getPrimaryKeyIndexLiveness(Row row) { long timestamp = row.primaryKeyLivenessInfo().timestamp(); int ttl = row.primaryKeyLivenessInfo().ttl(); for (Cell cell : row.cells()) { long cellTimestamp = cell.timestamp(); if (cell.isLive(nowInSec)) { if (cellTimestamp > timestamp) { timestamp = cellTimestamp; ttl = cell.ttl(); } } } return LivenessInfo.create(baseCfs.metadata, timestamp, ttl, nowInSec); } }; } /** * Specific to internal indexes, this is called by a * searcher when it encounters a stale entry in the index * @param indexKey the partition key in the index table * @param indexClustering the clustering in the index table * @param deletion deletion timestamp etc * @param opGroup the operation under which to perform the deletion */ public void deleteStaleEntry(DecoratedKey indexKey, Clustering indexClustering, DeletionTime deletion, OpOrder.Group opGroup) { doDelete(indexKey, indexClustering, deletion, opGroup); logger.debug("Removed index entry for stale value {}", indexKey); } /** * Called when adding a new entry to the index */ private void insert(ByteBuffer rowKey, Clustering clustering, Cell cell, LivenessInfo info, OpOrder.Group opGroup) { DecoratedKey valueKey = getIndexKeyFor(getIndexedValue(rowKey, clustering, cell)); Row row = BTreeRow.noCellLiveRow(buildIndexClustering(rowKey, clustering, cell), info); PartitionUpdate upd = partitionUpdate(valueKey, row); indexCfs.apply(upd, UpdateTransaction.NO_OP, opGroup, null); logger.debug("Inserted entry into index for value {}", valueKey); } /** * Called when deleting entries on non-primary key columns */ private void delete(ByteBuffer rowKey, Clustering clustering, Cell cell, OpOrder.Group opGroup, int nowInSec) { DecoratedKey valueKey = getIndexKeyFor(getIndexedValue(rowKey, clustering, cell)); doDelete(valueKey, buildIndexClustering(rowKey, clustering, cell), new DeletionTime(cell.timestamp(), nowInSec), opGroup); } /** * Called when deleting entries from indexes on primary key columns */ private void delete(ByteBuffer rowKey, Clustering clustering, DeletionTime deletion, OpOrder.Group opGroup) { DecoratedKey valueKey = getIndexKeyFor(getIndexedValue(rowKey, clustering, null)); doDelete(valueKey, buildIndexClustering(rowKey, clustering, null), deletion, opGroup); } private void doDelete(DecoratedKey indexKey, Clustering indexClustering, DeletionTime deletion, OpOrder.Group opGroup) { Row row = BTreeRow.emptyDeletedRow(indexClustering, Row.Deletion.regular(deletion)); PartitionUpdate upd = partitionUpdate(indexKey, row); indexCfs.apply(upd, UpdateTransaction.NO_OP, opGroup, null); logger.debug("Removed index entry for value {}", indexKey); } private void validatePartitionKey(DecoratedKey partitionKey) throws InvalidRequestException { assert indexedColumn.isPartitionKey(); validateIndexedValue(getIndexedValue(partitionKey.getKey(), null, null )); } private void validateClusterings(PartitionUpdate update) throws InvalidRequestException { assert indexedColumn.isClusteringColumn(); for (Row row : update) validateIndexedValue(getIndexedValue(null, row.clustering(), null)); } private void validateRows(Iterable<Row> rows) { assert !indexedColumn.isPrimaryKeyColumn(); for (Row row : rows) { if (indexedColumn.isComplex()) { ComplexColumnData data = row.getComplexColumnData(indexedColumn); if (data != null) { for (Cell cell : data) { validateIndexedValue(getIndexedValue(null, null, cell.path(), cell.value())); } } } else { validateIndexedValue(getIndexedValue(null, null, row.getCell(indexedColumn))); } } } private void validateIndexedValue(ByteBuffer value) { if (value != null && value.remaining() >= FBUtilities.MAX_UNSIGNED_SHORT) throw new InvalidRequestException(String.format( "Cannot index value of size %d for index %s on %s.%s(%s) (maximum allowed size=%d)", value.remaining(), getIndexName(), baseCfs.metadata.ksName, baseCfs.metadata.cfName, indexedColumn.name.toString(), FBUtilities.MAX_UNSIGNED_SHORT)); } private ByteBuffer getIndexedValue(ByteBuffer rowKey, Clustering clustering, Cell cell) { return getIndexedValue(rowKey, clustering, cell == null ? null : cell.path(), cell == null ? null : cell.value() ); } private Clustering buildIndexClustering(ByteBuffer rowKey, Clustering clustering, Cell cell) { return buildIndexClusteringPrefix(rowKey, clustering, cell == null ? null : cell.path()).build(); } private DecoratedKey getIndexKeyFor(ByteBuffer value) { return indexCfs.decorateKey(value); } private PartitionUpdate partitionUpdate(DecoratedKey valueKey, Row row) { return PartitionUpdate.singleRowUpdate(indexCfs.metadata, valueKey, row); } private void invalidate() { // interrupt in-progress compactions Collection<ColumnFamilyStore> cfss = Collections.singleton(indexCfs); CompactionManager.instance.interruptCompactionForCFs(cfss, true); CompactionManager.instance.waitForCessation(cfss); indexCfs.keyspace.writeOrder.awaitNewBarrier(); indexCfs.forceBlockingFlush(); indexCfs.readOrdering.awaitNewBarrier(); indexCfs.invalidate(); } private boolean isBuilt() { return SystemKeyspace.isIndexBuilt(baseCfs.keyspace.getName(), getIndexName()); } private void markBuilt() { SystemKeyspace.setIndexBuilt(baseCfs.keyspace.getName(), getIndexName()); } private void markRemoved() { SystemKeyspace.setIndexRemoved(baseCfs.keyspace.getName(), getIndexName()); } private boolean isPrimaryKeyIndex() { return indexedColumn.isPrimaryKeyColumn(); } private Callable<?> getBuildIndexTask() { return () -> { buildBlocking(); return null; }; } private void buildBlocking() { baseCfs.forceBlockingFlush(); try (ColumnFamilyStore.RefViewFragment viewFragment = baseCfs.selectAndReference(View.select(SSTableSet.CANONICAL)); Refs<SSTableReader> sstables = viewFragment.refs) { if (sstables.isEmpty()) { logger.info("No SSTable data for {}.{} to build index {} from, marking empty index as built", baseCfs.metadata.ksName, baseCfs.metadata.cfName, getIndexName()); markBuilt(); return; } logger.info("Submitting index build of {} for data in {}", getIndexName(), getSSTableNames(sstables)); SecondaryIndexBuilder builder = new SecondaryIndexBuilder(baseCfs, Collections.singleton(this), new ReducingKeyIterator(sstables)); Future<?> future = CompactionManager.instance.submitIndexBuild(builder); FBUtilities.waitOnFuture(future); indexCfs.forceBlockingFlush(); markBuilt(); } logger.info("Index build of {} complete", getIndexName()); } private static String getSSTableNames(Collection<SSTableReader> sstables) { return StreamSupport.stream(sstables.spliterator(), false) .map(SSTableReader::toString) .collect(Collectors.joining(", ")); } /** * Construct the CFMetadata for an index table, the clustering columns in the index table * vary dependent on the kind of the indexed value. * @param baseCfsMetadata * @param indexMetadata * @return */ public static final CFMetaData indexCfsMetadata(CFMetaData baseCfsMetadata, IndexMetadata indexMetadata) { CassandraIndexFunctions utils = getFunctions(baseCfsMetadata, indexMetadata); ColumnDefinition indexedColumn = indexMetadata.indexedColumn(baseCfsMetadata); AbstractType<?> indexedValueType = utils.getIndexedValueType(indexedColumn); CFMetaData.Builder builder = CFMetaData.Builder.create(baseCfsMetadata.ksName, baseCfsMetadata.indexColumnFamilyName(indexMetadata)) .withId(baseCfsMetadata.cfId) .withPartitioner(new LocalPartitioner(indexedValueType)) .addPartitionKey(indexedColumn.name, indexedColumn.type); builder.addClusteringColumn("partition_key", baseCfsMetadata.partitioner.partitionOrdering()); builder = utils.addIndexClusteringColumns(builder, baseCfsMetadata, indexedColumn); return builder.build().reloadIndexMetadataProperties(baseCfsMetadata); } /** * Factory method for new CassandraIndex instances * @param baseCfs * @param indexMetadata * @return */ public static final CassandraIndex newIndex(ColumnFamilyStore baseCfs, IndexMetadata indexMetadata) { return getFunctions(baseCfs.metadata, indexMetadata).newIndexInstance(baseCfs, indexMetadata); } private static CassandraIndexFunctions getFunctions(CFMetaData baseCfMetadata, IndexMetadata indexDef) { if (indexDef.isKeys()) return CassandraIndexFunctions.KEYS_INDEX_FUNCTIONS; ColumnDefinition indexedColumn = indexDef.indexedColumn(baseCfMetadata); if (indexedColumn.type.isCollection() && indexedColumn.type.isMultiCell()) { switch (((CollectionType)indexedColumn.type).kind) { case LIST: return CassandraIndexFunctions.COLLECTION_VALUE_INDEX_FUNCTIONS; case SET: return CassandraIndexFunctions.COLLECTION_KEY_INDEX_FUNCTIONS; case MAP: if (indexDef.options.containsKey(IndexTarget.INDEX_KEYS_OPTION_NAME)) return CassandraIndexFunctions.COLLECTION_KEY_INDEX_FUNCTIONS; else if (indexDef.options.containsKey(IndexTarget.INDEX_ENTRIES_OPTION_NAME)) return CassandraIndexFunctions.COLLECTION_ENTRY_INDEX_FUNCTIONS; else return CassandraIndexFunctions.COLLECTION_VALUE_INDEX_FUNCTIONS; } } switch (indexedColumn.kind) { case CLUSTERING: return CassandraIndexFunctions.CLUSTERING_COLUMN_INDEX_FUNCTIONS; case REGULAR: return CassandraIndexFunctions.REGULAR_COLUMN_INDEX_FUNCTIONS; case PARTITION_KEY: return CassandraIndexFunctions.PARTITION_KEY_INDEX_FUNCTIONS; //case COMPACT_VALUE: // return new CompositesIndexOnCompactValue(); } throw new AssertionError(); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.notification; import com.intellij.execution.filters.HyperlinkInfo; import com.intellij.execution.impl.ConsoleViewUtil; import com.intellij.execution.impl.EditorHyperlinkSupport; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.icons.AllIcons; import com.intellij.notification.impl.NotificationSettings; import com.intellij.notification.impl.NotificationsConfigurationImpl; import com.intellij.notification.impl.NotificationsManagerImpl; import com.intellij.notification.impl.ui.NotificationsUtil; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.actions.ScrollToTheEndToolbarAction; import com.intellij.openapi.editor.colors.EditorColorsListener; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.editor.colors.impl.DelegateColorScheme; import com.intellij.openapi.editor.event.EditorMouseEvent; import com.intellij.openapi.editor.ex.*; import com.intellij.openapi.editor.ex.util.EditorUtil; import com.intellij.openapi.editor.impl.EditorImpl; import com.intellij.openapi.editor.markup.*; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ProjectManagerListener; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.ColorUtil; import com.intellij.ui.Gray; import com.intellij.ui.JBColor; import com.intellij.ui.awt.RelativePoint; import com.intellij.util.EditorPopupHandler; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.DateFormatUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.MouseEvent; import java.util.ArrayList; import java.util.List; /** * @author peter */ class EventLogConsole { private static final Key<String> GROUP_ID = Key.create("GROUP_ID"); private static final Key<String> NOTIFICATION_ID = Key.create("NOTIFICATION_ID"); private final NotNullLazyValue<Editor> myLogEditor = new NotNullLazyValue<Editor>() { @NotNull @Override protected Editor compute() { return createLogEditor(); } }; private final NotNullLazyValue<EditorHyperlinkSupport> myHyperlinkSupport = new NotNullLazyValue<EditorHyperlinkSupport>() { @NotNull @Override protected EditorHyperlinkSupport compute() { return new EditorHyperlinkSupport(getConsoleEditor(), myProjectModel.getProject()); } }; private final LogModel myProjectModel; private String myLastDate; private List<RangeHighlighter> myNMoreHighlighters; EventLogConsole(LogModel model) { myProjectModel = model; } private Editor createLogEditor() { Project project = myProjectModel.getProject(); final EditorEx editor = ConsoleViewUtil.setupConsoleEditor(project, false, false); editor.getSettings().setWhitespacesShown(false); installNotificationsFont(editor); myProjectModel.getProject().getMessageBus().connect().subscribe(ProjectManager.TOPIC, new ProjectManagerListener() { @Override public void projectClosed(Project project) { if (project == myProjectModel.getProject()) { EditorFactory.getInstance().releaseEditor(editor); } } }); ((EditorMarkupModel)editor.getMarkupModel()).setErrorStripeVisible(true); final ClearLogAction clearLog = new ClearLogAction(this); clearLog.registerCustomShortcutSet(ActionManager.getInstance().getAction(IdeActions.CONSOLE_CLEAR_ALL).getShortcutSet(), editor.getContentComponent()); editor.setContextMenuGroupId(null); // disabling default context menu editor.addEditorMouseListener(new EditorPopupHandler() { @Override public void invokePopup(final EditorMouseEvent event) { final ActionManager actionManager = ActionManager.getInstance(); DefaultActionGroup actions = createPopupActions(actionManager, clearLog, editor, event); final ActionPopupMenu menu = actionManager.createActionPopupMenu(ActionPlaces.EDITOR_POPUP, actions); final MouseEvent mouseEvent = event.getMouseEvent(); menu.getComponent().show(mouseEvent.getComponent(), mouseEvent.getX(), mouseEvent.getY()); } }); return editor; } private void installNotificationsFont(@NotNull final EditorEx editor) { final DelegateColorScheme globalScheme = new DelegateColorScheme(EditorColorsManager.getInstance().getGlobalScheme()) { @Override public String getEditorFontName() { return getConsoleFontName(); } @Override public int getEditorFontSize() { return getConsoleFontSize(); } @Override public String getConsoleFontName() { return NotificationsUtil.getFontName(); } @Override public int getConsoleFontSize() { Pair<String, Integer> data = NotificationsUtil.getFontData(); return data == null ? super.getConsoleFontSize() : data.second; } @Override public void setEditorFontName(String fontName) { } @Override public void setConsoleFontName(String fontName) { } @Override public void setEditorFontSize(int fontSize) { } @Override public void setConsoleFontSize(int fontSize) { } }; ApplicationManager.getApplication().getMessageBus().connect(myProjectModel).subscribe(EditorColorsManager.TOPIC, new EditorColorsListener() { @Override public void globalSchemeChange(EditorColorsScheme scheme) { globalScheme.setDelegate(EditorColorsManager.getInstance().getGlobalScheme()); editor.reinitSettings(); } }); editor.setColorsScheme(ConsoleViewUtil.updateConsoleColorScheme(editor.createBoundColorSchemeDelegate(globalScheme))); if (editor instanceof EditorImpl) { ((EditorImpl)editor).setUseEditorAntialiasing(false); } } private static DefaultActionGroup createPopupActions(ActionManager actionManager, ClearLogAction action, EditorEx editor, EditorMouseEvent event) { AnAction[] children = ((ActionGroup)actionManager.getAction(IdeActions.GROUP_CONSOLE_EDITOR_POPUP)).getChildren(null); DefaultActionGroup group = new DefaultActionGroup(); group.add(new EventLogToolWindowFactory.ToggleSoftWraps(editor)); group.add(new ScrollToTheEndToolbarAction(editor)); group.addSeparator(); addConfigureNotificationAction(editor, event, group); group.addAll(children); group.addSeparator(); group.add(action); return group; } private static void addConfigureNotificationAction(@NotNull EditorEx editor, @NotNull EditorMouseEvent event, @NotNull DefaultActionGroup actions) { LogicalPosition position = editor.xyToLogicalPosition(event.getMouseEvent().getPoint()); if (EditorUtil.inVirtualSpace(editor, position)) { return; } int offset = editor.logicalPositionToOffset(position); editor.getMarkupModel().processRangeHighlightersOverlappingWith(offset, offset, rangeHighlighter -> { String groupId = GROUP_ID.get(rangeHighlighter); if (groupId != null) { addConfigureNotificationAction(actions, groupId); return false; } return true; }); } private static void addConfigureNotificationAction(@NotNull DefaultActionGroup actions, @NotNull String groupId) { DefaultActionGroup displayTypeGroup = new DefaultActionGroup("Notification Display Type", true); NotificationSettings settings = NotificationsConfigurationImpl.getSettings(groupId); NotificationDisplayType current = settings.getDisplayType(); for (NotificationDisplayType type : NotificationDisplayType.values()) { if (type != NotificationDisplayType.TOOL_WINDOW || NotificationsConfigurationImpl.getInstanceImpl().hasToolWindowCapability(groupId)) { displayTypeGroup.add(new DisplayTypeAction(settings, type, current)); } } actions.add(displayTypeGroup); actions.addSeparator(); } private static class DisplayTypeAction extends ToggleAction { private final NotificationSettings mySettings; private final NotificationDisplayType myType; private final NotificationDisplayType myCurrent; public DisplayTypeAction(@NotNull NotificationSettings settings, @NotNull NotificationDisplayType type, @NotNull NotificationDisplayType current) { super(type.getTitle()); mySettings = settings; myType = type; myCurrent = current; } @Override public boolean isSelected(AnActionEvent e) { return myType == myCurrent; } @Override public void setSelected(AnActionEvent e, boolean state) { if (state) { NotificationsConfigurationImpl.getInstanceImpl().changeSettings(mySettings.withDisplayType(myType)); } } } void doPrintNotification(final Notification notification) { Editor editor = getConsoleEditor(); if (editor.isDisposed()) { return; } Document document = editor.getDocument(); boolean scroll = document.getTextLength() == editor.getCaretModel().getOffset() || !editor.getContentComponent().hasFocus(); if (document.getTextLength() > 0) { append(document, "\n"); } String lastDate = DateFormatUtil.formatDate(notification.getTimestamp()); if (document.getTextLength() == 0 || !lastDate.equals(myLastDate)) { myLastDate = lastDate; append(document, lastDate + "\n"); } int startDateOffset = document.getTextLength(); String date = DateFormatUtil.formatTime(notification.getTimestamp()) + "\t"; append(document, date); int tabs = calculateTabs(editor, startDateOffset); int titleStartOffset = document.getTextLength(); int startLine = document.getLineCount() - 1; EventLog.LogEntry pair = EventLog.formatForLog(notification, StringUtil.repeatSymbol('\t', tabs)); final NotificationType type = notification.getType(); TextAttributesKey key = type == NotificationType.ERROR ? ConsoleViewContentType.LOG_ERROR_OUTPUT_KEY : type == NotificationType.INFORMATION ? ConsoleViewContentType.NORMAL_OUTPUT_KEY : ConsoleViewContentType.LOG_WARNING_OUTPUT_KEY; int msgStart = document.getTextLength(); append(document, pair.message); TextAttributes attributes = EditorColorsManager.getInstance().getGlobalScheme().getAttributes(key); int layer = HighlighterLayer.CARET_ROW + 1; RangeHighlighter highlighter = editor.getMarkupModel() .addRangeHighlighter(msgStart, document.getTextLength(), layer, attributes, HighlighterTargetArea.LINES_IN_RANGE); GROUP_ID.set(highlighter, notification.getGroupId()); NOTIFICATION_ID.set(highlighter, notification.id); for (Pair<TextRange, HyperlinkInfo> link : pair.links) { final RangeHighlighter rangeHighlighter = myHyperlinkSupport.getValue() .createHyperlink(link.first.getStartOffset() + msgStart, link.first.getEndOffset() + msgStart, null, link.second); if (link.second instanceof EventLog.ShowBalloon) { ((EventLog.ShowBalloon)link.second).setRangeHighlighter(rangeHighlighter); } } append(document, "\n"); if (scroll) { editor.getCaretModel().moveToOffset(document.getTextLength()); editor.getScrollingModel().scrollToCaret(ScrollType.MAKE_VISIBLE); } if (notification.isImportant()) { highlightNotification(notification, pair.status, startLine, document.getLineCount() - 1, titleStartOffset, pair.titleLength); } } private static int calculateTabs(@NotNull Editor editor, int startDateOffset) { Document document = editor.getDocument(); int startOffset = document.getTextLength(); Point dateStartPoint = editor.logicalPositionToXY(editor.offsetToLogicalPosition(startDateOffset)); Point dateEndPoint = editor.logicalPositionToXY(editor.offsetToLogicalPosition(startOffset)); int width = dateEndPoint.x - dateStartPoint.x; document.insertString(startOffset, "\n"); Point startPoint = editor.logicalPositionToXY(editor.offsetToLogicalPosition(startOffset + 1)); for (int count = 1; ; count++) { document.insertString(startOffset + count, "\t"); Point endPoint = editor.logicalPositionToXY(editor.offsetToLogicalPosition(document.getTextLength())); int tabWidth = endPoint.x - startPoint.x; if (width <= tabWidth) { document.deleteString(startOffset, document.getTextLength()); return count; } } } private void highlightNotification(final Notification notification, String message, final int startLine, final int endLine, int titleOffset, int titleLength) { final MarkupModel markupModel = getConsoleEditor().getMarkupModel(); TextAttributes bold = new TextAttributes(null, null, null, null, Font.BOLD); final RangeHighlighter colorHighlighter = markupModel .addRangeHighlighter(titleOffset, titleOffset + titleLength, HighlighterLayer.CARET_ROW + 1, bold, HighlighterTargetArea.EXACT_RANGE); Color color = notification.getType() == NotificationType.ERROR ? JBColor.RED : notification.getType() == NotificationType.WARNING ? JBColor.YELLOW : JBColor.GREEN; colorHighlighter.setErrorStripeMarkColor(color); colorHighlighter.setErrorStripeTooltip(message); final Runnable removeHandler = () -> { if (colorHighlighter.isValid()) { markupModel.removeHighlighter(colorHighlighter); } TextAttributes italic = new TextAttributes(Gray.x80, null, null, null, Font.PLAIN); for (int line = startLine; line < endLine; line++) { for (RangeHighlighter highlighter : myHyperlinkSupport.getValue().findAllHyperlinksOnLine(line)) { markupModel .addRangeHighlighter(highlighter.getStartOffset(), highlighter.getEndOffset(), HighlighterLayer.CARET_ROW + 2, italic, HighlighterTargetArea.EXACT_RANGE); myHyperlinkSupport.getValue().removeHyperlink(highlighter); } } }; if (!notification.isExpired()) { myProjectModel.removeHandlers.put(notification, removeHandler); } else { removeHandler.run(); } } public Editor getConsoleEditor() { return myLogEditor.getValue(); } public void clearNMore() { if (myNMoreHighlighters != null) { MarkupModel model = getConsoleEditor().getMarkupModel(); for (RangeHighlighter highlighter : myNMoreHighlighters) { model.removeHighlighter(highlighter); } myNMoreHighlighters = null; } } public void showNotification(@NotNull final List<String> ids) { clearNMore(); myNMoreHighlighters = new ArrayList<>(); EditorEx editor = (EditorEx)getConsoleEditor(); List<RangeHighlighterEx> highlighters = ContainerUtil.mapNotNull(ids, this::findHighlighter); if (!highlighters.isEmpty()) { editor.getCaretModel().moveToOffset(highlighters.get(0).getStartOffset()); editor.getScrollingModel().scrollToCaret(ScrollType.CENTER_UP); List<Point> ranges = new ArrayList<>(); Point currentRange = null; DocumentEx document = editor.getDocument(); for (RangeHighlighterEx highlighter : highlighters) { int startLine = document.getLineNumber(highlighter.getStartOffset()); int endLine = document.getLineNumber(highlighter.getEndOffset()) + 1; if (currentRange != null && startLine - 1 == currentRange.y) { currentRange.y = endLine; } else { ranges.add(currentRange = new Point(startLine, endLine)); } } //noinspection UseJBColor TextAttributes attributes = new TextAttributes(null, ColorUtil.mix(editor.getBackgroundColor(), new Color(0x808080), 0.1), null, EffectType.BOXED, Font.PLAIN); MarkupModelEx markupModel = editor.getMarkupModel(); for (Point range : ranges) { int start = document.getLineStartOffset(range.x); int end = document.getLineStartOffset(range.y); myNMoreHighlighters .add(markupModel.addRangeHighlighter(start, end, HighlighterLayer.CARET_ROW + 2, attributes, HighlighterTargetArea.EXACT_RANGE)); } } } @Nullable private RangeHighlighterEx findHighlighter(@NotNull final String id) { EditorEx editor = (EditorEx)getConsoleEditor(); final Ref<RangeHighlighterEx> highlighter = new Ref<>(); editor.getMarkupModel() .processRangeHighlightersOverlappingWith(0, editor.getDocument().getTextLength(), rangeHighlighter -> { if (id.equals(NOTIFICATION_ID.get(rangeHighlighter))) { highlighter.set(rangeHighlighter); return false; } return true; }); return highlighter.get(); } @Nullable public RelativePoint getRangeHighlighterLocation(RangeHighlighter range) { Editor editor = getConsoleEditor(); Project project = editor.getProject(); Window window = NotificationsManagerImpl.findWindowForBalloon(project); if (range != null && window != null) { Point point = editor.visualPositionToXY(editor.offsetToVisualPosition(range.getStartOffset())); return new RelativePoint(window, SwingUtilities.convertPoint(editor.getContentComponent(), point, window)); } return null; } private static void append(Document document, String s) { document.insertString(document.getTextLength(), s); } public static class ClearLogAction extends DumbAwareAction { private final EventLogConsole myConsole; public ClearLogAction(EventLogConsole console) { super("Clear All", "Clear the contents of the Event Log", AllIcons.Actions.GC); myConsole = console; } @Override public void update(AnActionEvent e) { Editor editor = e.getData(CommonDataKeys.EDITOR); e.getPresentation().setEnabled(editor != null && editor.getDocument().getTextLength() > 0); } @Override public void actionPerformed(final AnActionEvent e) { LogModel model = myConsole.myProjectModel; for (Notification notification : model.getNotifications()) { notification.expire(); model.removeNotification(notification); } model.setStatusMessage(null, 0); final Editor editor = e.getData(CommonDataKeys.EDITOR); if (editor != null) { editor.getDocument().deleteString(0, editor.getDocument().getTextLength()); } } } }
package de.terrestris.shoguncore.service; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import javax.servlet.http.HttpServletRequest; import org.hibernate.HibernateException; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import de.terrestris.shoguncore.dao.RegistrationTokenDao; import de.terrestris.shoguncore.dao.RoleDao; import de.terrestris.shoguncore.dao.UserDao; import de.terrestris.shoguncore.helper.IdHelper; import de.terrestris.shoguncore.model.Role; import de.terrestris.shoguncore.model.User; import de.terrestris.shoguncore.model.token.RegistrationToken; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = {"classpath*:META-INF/spring/test-encoder-bean.xml"}) public class UserServiceTest extends PermissionAwareCrudServiceTest<User, UserDao<User>, UserService<User, UserDao<User>>> { @Mock private RegistrationTokenService<RegistrationToken, RegistrationTokenDao<RegistrationToken>> registrationTokenService; @Mock private RoleService<Role, RoleDao<Role>> roleService; @Mock private Role defaultUserRole; /** * The autowired PasswordEncoder */ @Autowired private PasswordEncoder passwordEncoder; @Override @Before public void setUp() { super.setUp(); // set the pw encoder crudService.setPasswordEncoder(passwordEncoder); } /** * @throws Exception */ public void setUpImplToTest() throws Exception { implToTest = new User(); } @Override protected UserService<User, UserDao<User>> getCrudService() { return new UserService<User, UserDao<User>>(); } @SuppressWarnings("unchecked") @Override protected Class<UserDao<User>> getDaoClass() { return (Class<UserDao<User>>) new UserDao<User>().getClass(); } @Test public void findByAccountName_shouldFindAsExpected() { String accountName = "testaccount"; User expectedUser = new User("Test", "User", accountName); // mock the dao when(dao.findByAccountName(accountName)).thenReturn(expectedUser); User actualUser = crudService.findByAccountName(accountName); verify(dao, times(1)).findByAccountName(accountName); verifyNoMoreInteractions(dao); assertEquals(expectedUser, actualUser); } @Test public void findByAccountName_shouldFindNothing() { String accountName = "nonexistingaccount"; User expectedUser = null; // mock the dao when(dao.findByAccountName(accountName)).thenReturn(expectedUser); User actualUser = crudService.findByAccountName(accountName); verify(dao, times(1)).findByAccountName(accountName); verifyNoMoreInteractions(dao); assertEquals(expectedUser, actualUser); } @Test(expected = HibernateException.class) public void findByAccountName_shouldThrowHibernateException() { String accountName = "erroraccount"; // mock the dao doThrow(new HibernateException("errormsg")) .when(dao).findByAccountName(accountName); crudService.findByAccountName(accountName); verify(dao, times(1)).findByAccountName(accountName); verifyNoMoreInteractions(dao); } @Test public void findByEmail_shouldFindAsExpected() { String eMail = "mail@example.com"; User expectedUser = new User(); expectedUser.setEmail(eMail); // mock the dao when(dao.findByEmail(eMail)).thenReturn(expectedUser); User actualUser = crudService.findByEmail(eMail); verify(dao, times(1)).findByEmail(eMail); verifyNoMoreInteractions(dao); assertEquals(expectedUser, actualUser); } @Test public void findByEmail_shouldFindNothing() { String eMail = "nonexisting@example.com"; User expectedUser = null; // mock the dao when(dao.findByEmail(eMail)).thenReturn(expectedUser); User actualUser = crudService.findByEmail(eMail); verify(dao, times(1)).findByEmail(eMail); verifyNoMoreInteractions(dao); assertEquals(expectedUser, actualUser); } @Test(expected = HibernateException.class) public void findByEmail_shouldThrowHibernateException() { String email = "errormail@example.com"; // mock the dao doThrow(new HibernateException("errormsg")) .when(dao).findByEmail(email); crudService.findByEmail(email); verify(dao, times(1)).findByEmail(email); verifyNoMoreInteractions(dao); } @Test public void registerUser_shouldRegisterNonExistingUserAsExpected() throws Exception { String email = "test@example.com"; String rawPassword = "p@sSw0rd"; boolean isActive = false; // mock the dao // there is no existing user -> return null (in the findByEmail method) when(dao.findByEmail(email)).thenReturn(null); // the saveOrUpdate will be called in the persistNewUser method doNothing().when(dao).saveOrUpdate(any(User.class)); // mock the registrationTokenService (which sends the mail) doNothing().when(registrationTokenService) .sendRegistrationActivationMail( any(HttpServletRequest.class), any(User.class)); HttpServletRequest requestMock = mock(HttpServletRequest.class); // create user instance User user = new User(); user.setEmail(email); user.setAccountName(email); user.setPassword(rawPassword); user.setActive(isActive); // finally call the method that is tested here User registeredUser = crudService.registerUser(user, requestMock); verify(dao, times(1)).findByEmail(email); verify(dao, times(1)).saveOrUpdate(any(User.class)); verifyNoMoreInteractions(dao); verify(registrationTokenService, times(1)) .sendRegistrationActivationMail( any(HttpServletRequest.class), any(User.class)); verifyNoMoreInteractions(registrationTokenService); assertTrue(passwordEncoder.matches(rawPassword, registeredUser.getPassword())); assertEquals(email, registeredUser.getAccountName()); assertEquals(email, registeredUser.getEmail()); assertEquals(isActive, registeredUser.isActive()); } @Test public void registerUser_shouldThrowExceptionIfUserAlreadyExists() { String email = "test@example.com"; String rawPassword = "p@sSw0rd"; boolean isActive = false; User existingUser = new User(); existingUser.setEmail(email); // there is an existing user -> return null (in the findByEmail method) when(dao.findByEmail(email)).thenReturn(existingUser); HttpServletRequest requestMock = mock(HttpServletRequest.class); // finally call the method that is tested here try { // create user instance User user = new User(); user.setEmail(email); user.setAccountName(email); user.setPassword(rawPassword); user.setActive(isActive); crudService.registerUser(user, requestMock); fail("Should have thrown Exception, but did not!"); } catch (Exception e) { final String msg = e.getMessage(); assertEquals("User with eMail '" + email + "' already exists.", msg); } } @Test public void activateUser_shouldActivateUserAsExpected() throws Exception { // an inactive user that is assigend to a token User user = new User(); user.setActive(false); // create a token that is associated with the user RegistrationToken token = new RegistrationToken(user); // the token value that will be used for the call of // activateUser(tokenValue) String tokenValue = token.getToken(); // mock the registrationTokenService when(registrationTokenService.findByTokenValue(tokenValue)).thenReturn(token); doNothing().when(registrationTokenService).validateToken(token); doNothing().when(registrationTokenService).deleteTokenAfterActivation(token); //mock the role service final String defaultUserRoleName = "ROLE_USER"; when(defaultUserRole.getName()).thenReturn(defaultUserRoleName); when(roleService.findByRoleName(defaultUserRoleName)).thenReturn(defaultUserRole); // mock the dao doNothing().when(dao).saveOrUpdate(any(User.class)); // be sure that the user is not active before activating assertFalse(user.isActive()); // finally call the method that is tested here crudService.activateUser(tokenValue); // check first if user is active now assertTrue(user.isActive()); // check if user has at least one role assertFalse(user.getRoles().isEmpty()); // verify method invocations verify(registrationTokenService, times(1)).findByTokenValue(tokenValue); verify(registrationTokenService, times(1)).validateToken(token); verify(registrationTokenService, times(1)).deleteTokenAfterActivation(token); verifyNoMoreInteractions(registrationTokenService); verify(defaultUserRole, times(1)).getName(); verifyNoMoreInteractions(defaultUserRole); verify(roleService, times(1)).findByRoleName(defaultUserRoleName); verifyNoMoreInteractions(roleService); verify(dao, times(1)).saveOrUpdate(any(User.class)); verifyNoMoreInteractions(dao); } @Test public void activateUser_shouldThrowExceptionIfTokenCouldNotBeValidated() throws Exception { // an inactive user that is assigend to a token User user = new User(); user.setActive(false); // create a token that is associated with the user RegistrationToken token = new RegistrationToken(user); // the token value that will be used for the call of // activateUser(tokenValue) String tokenValue = token.getToken(); // mock the registrationTokenService final String expectedErrorMsg = "invalid token"; when(registrationTokenService.findByTokenValue(tokenValue)).thenReturn(token); doThrow(new Exception(expectedErrorMsg)).when(registrationTokenService).validateToken(token); // finally call the method that is tested here try { crudService.activateUser(tokenValue); fail("Should have thrown Exception, but did not!"); } catch (Exception e) { final String actualErrorMsg = e.getMessage(); assertEquals(expectedErrorMsg, actualErrorMsg); // verify method invocations verify(registrationTokenService, times(1)).findByTokenValue(tokenValue); verify(registrationTokenService, times(1)).validateToken(token); verifyNoMoreInteractions(registrationTokenService); verifyNoMoreInteractions(dao); } } @Test public void persistNewUser_shouldPersistAndEncrypt() { String rawPassword = "p@sSw0rd"; boolean encryptPassword = true; User unpersistedUser = new User(); unpersistedUser.setPassword(rawPassword); // mock the dao doNothing().when(dao).saveOrUpdate(any(User.class)); // finally call the method that is tested here User persistedUser = crudService.persistNewUser(unpersistedUser, encryptPassword); // verify method invocations verify(dao, times(1)).saveOrUpdate(any(User.class)); verifyNoMoreInteractions(dao); // check if the password has been encrypted assertTrue(passwordEncoder.matches(rawPassword, persistedUser.getPassword())); } @Test public void persistNewUser_shouldPersistButNotEncrypt() { String password = "p@sSw0rd"; boolean encryptPassword = false; User unpersistedUser = new User(); unpersistedUser.setPassword(password); // mock the dao doNothing().when(dao).saveOrUpdate(any(User.class)); // finally call the method that is tested here User persistedUser = crudService.persistNewUser(unpersistedUser, encryptPassword); // verify method invocations verify(dao, times(1)).saveOrUpdate(any(User.class)); verifyNoMoreInteractions(dao); // verify that the password is the same as before and was not encrypted assertEquals(password, persistedUser.getPassword()); } @Test public void persistNewUser_doesNothingIfUserHasId() throws NoSuchFieldException, IllegalAccessException { String rawPassword = "p@sSw0rd"; boolean encryptPassword = true; Integer userId = 42; User unpersistedUser = new User("Dummy", "User", "dummyuser"); unpersistedUser.setPassword(rawPassword); IdHelper.setIdOnPersistentObject(unpersistedUser, userId); // finally call the method that is tested here User persistedUser = crudService.persistNewUser(unpersistedUser, encryptPassword); // verify method invocations verifyNoMoreInteractions(dao); // verify that nothing else happened (i.e. no password encryption) assertEquals(unpersistedUser, persistedUser); } @Test public void updatePassword_shouldUpdatePasswordAsExpected() throws Exception { String oldPassword = "eNcrYpt3dOldP4ssw0rd"; String newPassword = "r4Wn3Wp@sSw0rd"; Integer userId = 42; User user = new User(); user.setPassword(oldPassword); IdHelper.setIdOnPersistentObject(user, userId); // mock the dao doNothing().when(dao).saveOrUpdate(any(User.class)); // finally call the method that is tested here crudService.updatePassword(user, newPassword); // verify method invocations verify(dao, times(1)).saveOrUpdate(any(User.class)); verifyNoMoreInteractions(dao); // verify password String encryptedNewPassword = user.getPassword(); assertFalse(oldPassword.equals(encryptedNewPassword)); assertTrue(passwordEncoder.matches(newPassword, encryptedNewPassword)); } @Test public void updatePassword_shouldThrowIfUserHasNoId() { String newPassword = "r4Wn3Wp@sSw0rd"; // user without id User user = new User(); // call the method that is tested here try { crudService.updatePassword(user, newPassword); fail("Should have thrown Exception, but did not!"); } catch (Exception e) { assertEquals("The ID of the user object is null.", e.getMessage()); // verify method invocations verifyNoMoreInteractions(dao); } } @Test public void getUserBySession_shouldReturnUserFromSession() throws NoSuchFieldException, IllegalAccessException { // mock a user for the security context User incompleteSecurityContextUser = new User(); Integer userId = 42; IdHelper.setIdOnPersistentObject(incompleteSecurityContextUser, userId); // mock a "complete" user equivalent coming from db String accountName = "someUser"; String firstName = "John"; User completeUserFromDatabase = new User(); IdHelper.setIdOnPersistentObject(completeUserFromDatabase, userId); completeUserFromDatabase.setAccountName(accountName); completeUserFromDatabase.setFirstName(firstName); // mock the security context Authentication authentication = new UsernamePasswordAuthenticationToken(incompleteSecurityContextUser, "somePw"); SecurityContextHolder.getContext().setAuthentication(authentication); // mock the dao when(dao.findById(userId)).thenReturn(completeUserFromDatabase); // finally test/call the method User fullUserBySession = crudService.getUserBySession(); assertEquals(userId, fullUserBySession.getId()); assertEquals(accountName, fullUserBySession.getAccountName()); assertEquals(firstName, fullUserBySession.getFirstName()); verify(dao, times(1)).findById(userId); verifyNoMoreInteractions(dao); } }
package de.peeeq.eclipsewurstplugin; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_ANNOTATION; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_BOLD; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_COLOR; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_COMMENT; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_CONSTRUCTOR; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_DATATYPE; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_FIELD; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_FUNCTION; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_HOTDOC; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_INTERFACE; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_ITALIC; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_JASSTYPE; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_KEYWORD; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_PARAM; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_ANNOTATION; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_COMMENT; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_CONSTRUCTOR; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_DATATYPE; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_FIELD; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_FUNCTION; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_HOTDOC; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_INTERFACE; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_JASSTYPE; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_KEYWORD; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_PARAM; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_STRING; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_TEXT; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_RGB_VAR; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_STRIKETHROUGH; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_STRING; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_TEXT; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_UNDERLINE; import static de.peeeq.eclipsewurstplugin.WurstConstants.SYNTAXCOLOR_VAR; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogRecord; import org.eclipse.core.runtime.ILog; import org.eclipse.core.runtime.Status; import org.eclipse.jdt.annotation.Nullable; import org.eclipse.jface.dialogs.IDialogSettings; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.preference.PreferenceConverter; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.swt.graphics.RGB; import org.eclipse.swt.widgets.Display; import org.eclipse.ui.IEditorReference; import org.eclipse.ui.IWorkbenchWindow; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.plugin.AbstractUIPlugin; import org.osgi.framework.BundleContext; import de.peeeq.eclipsewurstplugin.editor.WurstEditor; import de.peeeq.eclipsewurstplugin.editor.highlighting.ScannerFactory; import de.peeeq.eclipsewurstplugin.ui.WurstPerspective; import de.peeeq.wurstscript.WLogger; import de.peeeq.wurstscript.utils.WinRegistry; /** * The activator class controls the plug-in life cycle */ public class WurstPlugin extends AbstractUIPlugin { private final class PluginLogHandler extends Handler { private final ILog log; private PluginLogHandler(ILog log) { this.log = log; } @Override public void publish(LogRecord record) { int level = Status.WARNING; if (record.getLevel() == Level.SEVERE) { level = Status.ERROR; } else if (record.getLevel() == Level.INFO) { level = Status.INFO; } log.log(new Status(level, PLUGIN_ID, Status.OK, record.getMessage(), record.getThrown())); } @Override public void flush() { } @Override public void close() throws SecurityException { } } // The plug-in ID public static final String PLUGIN_ID = "EclipseWurstPlugin"; //$NON-NLS-1$ public static final String WURST_PERSPECTIVE_ID = "de.peeeq.eclipsewurstplugin.wurstperspective"; // The shared instance private static WurstPlugin plugin; private @Nullable ScannerFactory scanners; /** * The constructor */ public WurstPlugin() { } /* * (non-Javadoc) * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext) */ @Override public void start(BundleContext context) throws Exception { super.start(context); WLogger.setHandler(new PluginLogHandler(getLog())); WLogger.setLevel(Level.INFO); plugin = this; // TODO not sure where to load the console Display.getDefault().asyncExec(() -> { try { WurstPerspective.findConsole(); } catch (Throwable t) { // ignore error t.printStackTrace(); } }); } /* * (non-Javadoc) * @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext) */ @Override public void stop(BundleContext context) throws Exception { plugin = null; super.stop(context); } /** * Returns the shared instance * * @return the shared instance */ public static WurstPlugin getDefault() { return plugin; } /** * Returns an image descriptor for the image file at the given * plug-in relative path * * @param path the path * @return the image descriptor */ public static ImageDescriptor getImageDescriptor(String path) { return imageDescriptorFromPlugin(PLUGIN_ID, path); } @Override protected void initializeDefaultPreferences(IPreferenceStore store) { initializePreferenceStore(); } private void initializePreferenceStore(){ //Initialize default values of preferenceStore //Colors for syntax highlighting setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_TEXT, SYNTAXCOLOR_RGB_TEXT); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_KEYWORD, SYNTAXCOLOR_RGB_KEYWORD); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_JASSTYPE, SYNTAXCOLOR_RGB_JASSTYPE); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_STRING, SYNTAXCOLOR_RGB_STRING); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_ANNOTATION, SYNTAXCOLOR_RGB_ANNOTATION); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_COMMENT, SYNTAXCOLOR_RGB_COMMENT); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_HOTDOC, SYNTAXCOLOR_RGB_HOTDOC); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_FUNCTION, SYNTAXCOLOR_RGB_FUNCTION); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_DATATYPE, SYNTAXCOLOR_RGB_DATATYPE); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_VAR, SYNTAXCOLOR_RGB_VAR ); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_PARAM, SYNTAXCOLOR_RGB_PARAM ); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_FIELD, SYNTAXCOLOR_RGB_FIELD ); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_INTERFACE, SYNTAXCOLOR_RGB_INTERFACE ); setDefaultValue(SYNTAXCOLOR_COLOR + SYNTAXCOLOR_CONSTRUCTOR, SYNTAXCOLOR_RGB_CONSTRUCTOR); //Style for syntax highlighting setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_KEYWORD, true); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_JASSTYPE, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_STRING, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_ANNOTATION, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_COMMENT, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_HOTDOC, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_FUNCTION, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_DATATYPE, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_VAR, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_PARAM, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_FIELD, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_INTERFACE, false); setDefaultValue(SYNTAXCOLOR_BOLD + SYNTAXCOLOR_CONSTRUCTOR,false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_KEYWORD, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_JASSTYPE, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_STRING, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_ANNOTATION, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_COMMENT, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_HOTDOC, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_FUNCTION, true); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_DATATYPE, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_VAR, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_PARAM, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_FIELD, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_INTERFACE, false); setDefaultValue(SYNTAXCOLOR_ITALIC + SYNTAXCOLOR_CONSTRUCTOR,true); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_KEYWORD, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_JASSTYPE, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_STRING, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_ANNOTATION, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_COMMENT, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_HOTDOC, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_FUNCTION, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_DATATYPE, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_VAR, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_PARAM, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_FIELD, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_INTERFACE, false); setDefaultValue(SYNTAXCOLOR_UNDERLINE + SYNTAXCOLOR_CONSTRUCTOR,false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_KEYWORD, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_JASSTYPE, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_STRING, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_ANNOTATION, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_COMMENT, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_HOTDOC, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_FUNCTION, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_DATATYPE, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_VAR, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_PARAM, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_FIELD, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_INTERFACE, false); setDefaultValue(SYNTAXCOLOR_STRIKETHROUGH + SYNTAXCOLOR_CONSTRUCTOR,false); setDefaultValue(WurstConstants.WURST_ENABLE_AUTOCOMPLETE, true); setDefaultValue(WurstConstants.WURST_AUTOCOMPLETION_DELAY, "0.5"); setDefaultValue(WurstConstants.WURST_ENABLE_RECONCILING, true); setDefaultValue(WurstConstants.WURST_RECONCILATION_DELAY, "0.5"); setDefaultValue(WurstConstants.WURST_IGNORE_ERRORS, false); setDefaultValue(WurstConstants.WURST_WC3_PATH, "C:\\Warcraft III\\"); try { // try to use the registry to find wc3 path WinRegistry reg = new WinRegistry(); String installPath = reg.readString(WinRegistry.HKEY_CURRENT_USER, "Software\\Blizzard Entertainment\\Warcraft III", "InstallPath"); if (installPath != null) { setDefaultValue(WurstConstants.WURST_WC3_PATH, installPath); } } catch (NoSuchMethodException e) { // ignore, registry not supported WLogger.info("Registry not supported"); } catch (Exception e) { e.printStackTrace(); } } private void setDefaultValue(String name, boolean value){ getDefaultPreferenceStore().setDefault(name, value); } private void setDefaultValue(String name, String value){ getDefaultPreferenceStore().setDefault(name, value); } private void setDefaultValue(String name, RGB value){ PreferenceConverter.setDefault(getDefaultPreferenceStore(), name, value); } public static IPreferenceStore getDefaultPreferenceStore(){ return WurstPlugin.getDefault().getPreferenceStore(); } public ScannerFactory scanners() { ScannerFactory result = scanners; if (result == null) { result = scanners = new ScannerFactory(); } return result; } public static WurstEclipseConfig config() { return new WurstEclipseConfig(getDefaultPreferenceStore()); } public static void refreshEditors() { IWorkbenchWindow wb = PlatformUI.getWorkbench().getActiveWorkbenchWindow(); for (IEditorReference ref:wb.getActivePage().getEditorReferences()) { if (ref.getEditor(false) instanceof WurstEditor) { WurstEditor ed = (WurstEditor) ref.getEditor(false); ed.refresh(); } } } public IDialogSettings getDialogSettingsSection(String name) { IDialogSettings dialogSettings= getDialogSettings(); IDialogSettings section= dialogSettings.getSection(name); if (section == null) { section= dialogSettings.addNewSection(name); } return section; } }
/** * SIX OVAL - https://nakamura5akihito.github.io/ * Copyright (C) 2010 Akihito Nakamura * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.opensec.six.oval.model.macos; import io.opensec.six.oval.model.ComponentType; import io.opensec.six.oval.model.Family; import io.opensec.six.oval.model.sc.EntityItemStringType; import io.opensec.six.oval.model.sc.ItemType; import io.opensec.six.oval.model.sc.StatusEnumeration; /** * The diskutil item holds verification information about an individual disk on a Mac OS system. * * @author Akihito Nakamura, AIST * @see <a href="http://oval.mitre.org/language/">OVAL Language</a> */ public class DiskUtilItem extends ItemType { //{0..1} private EntityItemStringType device; private EntityItemStringType filepath; private EntityItemPermissionCompareType uread; private EntityItemPermissionCompareType uwrite; private EntityItemPermissionCompareType uexec; private EntityItemPermissionCompareType gread; private EntityItemPermissionCompareType gwrite; private EntityItemPermissionCompareType gexec; private EntityItemPermissionCompareType oread; private EntityItemPermissionCompareType owrite; private EntityItemPermissionCompareType oexec; /** * Constructor. */ public DiskUtilItem() { this( 0 ); } public DiskUtilItem( final int id ) { this( id, null ); } public DiskUtilItem( final int id, final StatusEnumeration status ) { super( id, status ); _oval_family = Family.MACOS; _oval_component = ComponentType.DISKUTIL; } /** */ public void setDevice( final EntityItemStringType device ) { this.device = device; } public EntityItemStringType getDevice() { return device; } /** */ public void setFilepath( final EntityItemStringType filepath ) { this.filepath = filepath; } public EntityItemStringType getFilepath() { return filepath; } /** */ public void setUread( final EntityItemPermissionCompareType uread ) { this.uread = uread; } public EntityItemPermissionCompareType getUread() { return uread; } /** */ public void setUwrite( final EntityItemPermissionCompareType uwrite ) { this.uwrite = uwrite; } public EntityItemPermissionCompareType getUwrite() { return uwrite; } /** */ public void setUexec( final EntityItemPermissionCompareType uexec ) { this.uexec = uexec; } public EntityItemPermissionCompareType getUexec() { return uexec; } /** */ public void setGread( final EntityItemPermissionCompareType gread ) { this.gread = gread; } public EntityItemPermissionCompareType getGread() { return gread; } /** */ public void setGwrite( final EntityItemPermissionCompareType gwrite ) { this.gwrite = gwrite; } public EntityItemPermissionCompareType getGwrite() { return gwrite; } /** */ public void setGexec( final EntityItemPermissionCompareType gexec ) { this.gexec = gexec; } public EntityItemPermissionCompareType getGexec() { return gexec; } /** */ public void setOread( final EntityItemPermissionCompareType oread ) { this.oread = oread; } public EntityItemPermissionCompareType getOread() { return oread; } /** */ public void setOwrite( final EntityItemPermissionCompareType owrite ) { this.owrite = owrite; } public EntityItemPermissionCompareType getOwrite() { return owrite; } /** */ public void setOexec( final EntityItemPermissionCompareType oexec ) { this.oexec = oexec; } public EntityItemPermissionCompareType getOexec() { return oexec; } //************************************************************** // java.lang.Object //************************************************************** @Override public int hashCode() { return super.hashCode(); } @Override public boolean equals( final Object obj ) { if (!(obj instanceof DiskUtilItem)) { return false; } return super.equals( obj ); } @Override public String toString() { return "diskutil_item[" + super.toString() + ", device=" + getDevice() + ", filepath=" + getFilepath() + ", uread=" + getUread() + ", uwrite=" + getUwrite() + ", uexec=" + getUexec() + ", gread=" + getGread() + ", gwrite=" + getGwrite() + ", gexec=" + getGexec() + ", oread=" + getOread() + ", owrite=" + getOwrite() + ", oexec=" + getOexec() + "]"; } } //
/** * Copyright 2011-2012 @WalmartLabs, a division of Wal-Mart Stores, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.walmartlabs.mupd8.network.client; import java.net.InetSocketAddress; import java.util.concurrent.*; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFactory; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import com.walmartlabs.mupd8.network.common.*; import org.jboss.netty.handler.codec.oneone.OneToOneEncoder; import org.jboss.netty.handler.codec.replay.ReplayingDecoder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class Client { private class Endpoint { public String _host; public int _port; public Endpoint(String host, int port) { _host = host; _port = port; } @Override public String toString() { return "(" + _host + ", " + _port + ")"; } } private final Listener listener; private ClientBootstrap bootstrap; private ClientHandler handler; // Store connections to all hosts in cluster private ConcurrentHashMap<String, Channel> connectors; // Store endpoints for all hosts in cluster private ConcurrentHashMap<String, Endpoint> endpoints; private Executor bossPool; private ThreadPoolExecutor workerPool; private OneToOneEncoder encoder; private Callable<ReplayingDecoder<Decoder.DecodingState>> decoderFactory; private static final Logger logger = LoggerFactory.getLogger(Client.class); public Client(Listener listener, OneToOneEncoder pencoder, Callable<ReplayingDecoder<Decoder.DecodingState>> pdecoderFactory) { this.listener = listener; this.encoder = pencoder; this.decoderFactory = pdecoderFactory; connectors = new ConcurrentHashMap<String, Channel>(); endpoints = new ConcurrentHashMap<String, Endpoint>(); } public void init() { // Standard netty bootstrapping stuff. bossPool = Executors.newCachedThreadPool(); workerPool = (ThreadPoolExecutor)Executors.newCachedThreadPool(); ChannelFactory factory = new NioClientSocketChannelFactory(bossPool, workerPool); handler = new ClientHandler(listener); bootstrap = new ClientBootstrap(factory); bootstrap.setOption("reuseAddress", true); bootstrap.setOption("tcpNoDelay", true); bootstrap.setOption("keepAlive", true); bootstrap.setOption("sendBufferSize", 1048576); bootstrap.setOption("receiveBufferSize", 1048576); bootstrap.setPipelineFactory(new ChannelPipelineFactory() { @Override public ChannelPipeline getPipeline() throws Exception { ChannelPipeline pipeline = Channels.pipeline(); pipeline.addLast("encoder", encoder); pipeline.addLast("decoder", decoderFactory.call()); pipeline.addLast("handler", handler); return pipeline; } }); } public void addEndpoint(String host, int port) { endpoints.put(host, this.new Endpoint(host, port)); } public void removeEndpoint(String host) { if (endpoints.containsKey(host)) { endpoints.remove(host); } disconnect(host); } private boolean connect(String host, int port) { InetSocketAddress remoteAddr = new InetSocketAddress(host, port); ChannelFuture future = null; // retry 3 times for now int i = 0; for (; i < 3; i++) { future= bootstrap.connect(remoteAddr); if (!future.awaitUninterruptibly().isSuccess()) { logger.error("CLIENT - Failed to connect to server at " + remoteAddr.getHostName() + ":" + remoteAddr.getPort()); try { Thread.sleep(500); } catch (InterruptedException e) { } } else break; } if (i >= 3) return false; logger.info("CLIENT - Connected to " + remoteAddr.getHostName() + ":" + remoteAddr.getPort()); Channel connector = future.getChannel(); connectors.put(host, connector); return connector.isConnected(); } public boolean isConnected(String connId) { if (connectors.containsKey(connId)) return connectors.get(connId).isConnected(); else return false; } private void disconnect(String host) { if (connectors.containsKey(host)) { Channel connector = connectors.get(host); if (connector != null) { connector.close().awaitUninterruptibly(); } connectors.remove(host); } } // close all the connections and shut down the client public void stop() { int largestPoolSize = workerPool.getLargestPoolSize(); logger.info("Largest pool size for client worker pool: " + largestPoolSize); for (Channel connector : connectors.values()) { if (connector != null) connector.close().awaitUninterruptibly(); } connectors.clear(); this.bootstrap.releaseExternalResources(); logger.info("CLIENT stopped..."); } public boolean send(String connId, Object packet) { if (!endpoints.containsKey(connId)) { logger.warn("CLIENT - endpoint of " + connId + " doesn't exist."); return false; } // Make connect only when it is used. if (!connectors.containsKey(connId)) { logger.warn("CLIENT - connection to (" + connId + ", " + endpoints.get(connId) + ") doesn't exist; going to make connection."); if (!connect(endpoints.get(connId)._host, endpoints.get(connId)._port)) { logger.error("CLIENT - connecting to " + connId + " failed."); return false; } } Channel connector = connectors.get(connId); if (connector.isConnected()) { connector.write(packet); return true; } else { logger.error("CLIENT - " + connId + " is not connected!"); if (!connect(endpoints.get(connId)._host, endpoints.get(connId)._port)) { logger.error("CLIENT - reconnecting to " + connId + " failed."); return false; } connector = connectors.get(connId); if (connector.isConnected()) { connector.write(packet); return true; } else { logger.error("CLIENT - " + connId + " still is not connected!"); return false; } } } }
/* * Copyright 2013 Netherlands eScience Center * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.esciencecenter.amuse.distributed.resources; import ibis.ipl.server.Server; import java.io.InputStream; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import nl.esciencecenter.amuse.distributed.AmuseConfiguration; import nl.esciencecenter.amuse.distributed.DistributedAmuseException; import nl.esciencecenter.xenon.XenonException; import nl.esciencecenter.xenon.adaptors.schedulers.ssh.SshSchedulerAdaptor; import nl.esciencecenter.xenon.credentials.Credential; import nl.esciencecenter.xenon.credentials.DefaultCredential; import nl.esciencecenter.xenon.filesystems.FileSystem; import nl.esciencecenter.xenon.utils.LocalFileSystemUtils; import nl.esciencecenter.xenon.filesystems.Path; import nl.esciencecenter.xenon.schedulers.Scheduler; import nl.esciencecenter.xenon.adaptors.schedulers.slurm.SlurmSchedulerAdaptor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Manages a resource. Copies files, possibly starts a hub, etc. * * @author Niels Drost * */ public class ResourceManager { public static final String WHITESPACE_REGEX = ";"; public static final String EQUALS_REGEX = "\\s*=\\s*"; private static final Logger logger = LoggerFactory.getLogger(ResourceManager.class); private static int nextID = 0; private static int getNextID() { return nextID++; } private final int id; private final String name; private final String username; private final String location; private final String gateway; private final String amuseDir; private final String tmpDir; private final String schedulerType; private final String hubQueueName; private final int hubTimeMinutes; private final Map<String, String> properties = new HashMap<String, String>(); private final Credential credential; private final AmuseConfiguration configuration; private final boolean startHub; private final Scheduler scheduler; private final Path home; private final FileSystem filesystem; private final String keyfile; private final Hub hub; private static void waitUntilHubStarted(Server iplServer, String hubAddress, String name) throws DistributedAmuseException { logger.info("waiting for new remote hub on {} at {} to connect to the local hub", name, hubAddress); for (int i = 0; i < 40; i++) { String[] knownHubAddresses = iplServer.getHubs(); logger.debug("ipl hub addresses now " + Arrays.toString(iplServer.getHubs())); for (String knownHub : knownHubAddresses) { if (knownHub.equals(hubAddress)) { logger.info("new hub at {} now connected to local hub", name); return; } } try { Thread.sleep(500); } catch (InterruptedException e) { //IGNORE } } throw new DistributedAmuseException("Local and new remote Hub at " + name + " not able to communicate"); } public ResourceManager(String name, String username, String location, String gateway, String amuseDir, String tmpDir, String schedulerType, String hubQueueName, int hubTimeMinutes, boolean startHub, Server iplServer) throws DistributedAmuseException { this.id = getNextID(); this.name = name; this.location = location; this.gateway = gateway; this.amuseDir = amuseDir; this.tmpDir = tmpDir; this.schedulerType = schedulerType; this.hubQueueName = hubQueueName; this.hubTimeMinutes = hubTimeMinutes; this.username = username; this.keyfile = "id_rsa"; // hardcoded and non functional atm //local resources _never_ have a hub this.startHub = (schedulerType.equals("local")) ? false : startHub; if (gateway != null && !gateway.isEmpty()) { properties.put(SshSchedulerAdaptor.GATEWAY, gateway); } //~ if(getSchedulerType().equals("slurm")) { //~ properties.put(SlurmSchedulerAdaptor.IGNORE_VERSION_PROPERTY, "true"); //~ } if(username != null) { credential = new DefaultCredential(username); } else { credential = new DefaultCredential(); } filesystem = _getFileSystem(); home = _getHome(); logger.info("found home of resource {} to be {}", name, home); this.configuration = downloadConfiguration(filesystem); if (!configuration.isJavaEnabled()) { throw new DistributedAmuseException("Resource " + name + " not suitable as target for distributed AMUSE, java not enabled in configuration"); } scheduler = createScheduler(); if (this.startHub) { this.hub = new Hub(this, this.configuration, iplServer.getHubs()); iplServer.addHubs(this.hub.getAddress()); String hubAddress = this.hub.getAddress(); logger.debug("just added new hub " + hubAddress); waitUntilHubStarted(iplServer, hubAddress, name); } else { this.hub = null; } logger.info("Created new resource {}", this); } private Scheduler createScheduler() throws DistributedAmuseException { try { if (isLocal()) { return Scheduler.create("local"); } return Scheduler.create(getSchedulerType(), getLocation(), getCredential(), getProperties()); } catch (XenonException e) { throw new DistributedAmuseException("cannot create scheduler connection for resource " + this.name, e); } } private Path _getHome() throws DistributedAmuseException { return getFileSystem().getWorkingDirectory(); } private FileSystem _getFileSystem() throws DistributedAmuseException { try { if (isLocal()) { FileSystem filesystem = LocalFileSystemUtils.getLocalFileSystems()[0]; filesystem.setWorkingDirectory(new Path(System.getProperty("user.home"))); return filesystem; } logger.info("trying with {} {} {}", username, getLocation(), getProperties()); return FileSystem.create("sftp", getLocation(), getCredential(), getProperties()); } catch (XenonException e) { throw new DistributedAmuseException("cannot open filesystem for resource " + this.name, e); } } private AmuseConfiguration downloadConfiguration(FileSystem filesystem) throws DistributedAmuseException { try { Path amuseHome; if (this.amuseDir.startsWith("/")) { amuseHome = new Path(this.amuseDir); } else { Path userHome = filesystem.getWorkingDirectory(); amuseHome = userHome.resolve(this.amuseDir); } Path amuseConfig = amuseHome.resolve("config.mk"); logger.debug("Downloading amuse config for " + getName() + " from " + amuseConfig); InputStream in = filesystem.readFromFile(amuseConfig); return new AmuseConfiguration(amuseHome.toAbsolutePath().toString(), in); } catch (Exception e) { throw new DistributedAmuseException("cannot download configuration file for resource " + this.name, e); } } public int getId() { return id; } public String getName() { return name; } public String getLocation() { return location; } public String getGateway() { return gateway; } public String getAmuseDir() { return amuseDir; } public String getTmpDir() { return tmpDir; } public String getSchedulerType() { return schedulerType; } public AmuseConfiguration getConfiguration() { return configuration; } @Override public int hashCode() { return new Integer(id).hashCode(); } @Override public boolean equals(Object other) { if (other == null) { return false; } if (!(other instanceof ResourceManager)) { return false; } return id == ((ResourceManager) other).id; } public void stop() { logger.debug("Stopping resource {}", this); if (hub != null) { hub.stopHub(); } try { scheduler.close(); } catch (XenonException e) { logger.warn("Error while closing scheduler for " + this, e); } try { filesystem.close(); } catch (XenonException e) { logger.warn("Error while closing filesystem for " + this, e); } } public Hub getHub() { return hub; } public boolean hasHub() { return hub != null; } public boolean isLocal() { return location == null || location.equals("localhost") || location.equals("local"); } @Override public String toString() { return "Resource [id=" + id + ", name=" + name + ", username=" + username + ", location=" + location + ", amuseDir=" + amuseDir + ", schedulerType=" + schedulerType + ", configuration=" + configuration + ", startHub=" + startHub + ", hub=" + hub + "]"; } public Map<String, String> getStatusMap() throws DistributedAmuseException { Map<String, String> result = new LinkedHashMap<String, String>(); result.put("ID", Integer.toString(id)); result.put("Name", name); result.put("Username", username); result.put("Location", location); result.put("Gateway", gateway); result.put("Amuse dir", amuseDir); result.put("Scheduler type", schedulerType); result.put("Java path", configuration.getJava()); result.put("MPI enabled", Boolean.toString(configuration.isMPIEnabled())); result.put("Mpiexec", configuration.getMpiexec()); return result; } public Path getHome() { return home; } public Scheduler getScheduler() { return scheduler; } public int getHubTimeMinutes() { return this.hubTimeMinutes; } public String getHubQueueName() { return this.hubQueueName; } public FileSystem getFileSystem() { return filesystem; } public Map<String, String> getProperties() { return properties; } public Credential getCredential() { return credential; } public String getKeyfile() { return keyfile; } }
package com.github.tkurz.sparqlmm; import com.github.tkurz.media.fragments.exceptions.MediaFragmentURISyntaxException; import com.github.tkurz.sparqlmm.function.temporal.relation.*; import org.junit.*; import org.openrdf.query.*; import org.openrdf.query.algebra.evaluation.function.FunctionRegistry; import org.openrdf.repository.Repository; import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryException; import org.openrdf.repository.sail.SailRepository; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFParseException; import org.openrdf.sail.memory.MemoryStore; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; /** * ... * <p/> * Author: Thomas Kurz (tkurz@apache.org) */ public class TemporalRelationTest { private final String TEST1 = "/test1.ttl"; private final String TEST2 = "/test2.ttl"; private final String BASE_URI = "http://test.org/resource/"; private RepositoryConnection connection; @BeforeClass public static void beforeClass() { FunctionRegistry.getInstance().add(new PrecededByFunction()); FunctionRegistry.getInstance().add(new PrecedesFunction()); FunctionRegistry.getInstance().add(new ContainsFunction()); FunctionRegistry.getInstance().add(new EqualFunction()); FunctionRegistry.getInstance().add(new FinishesFunction()); FunctionRegistry.getInstance().add(new MeetsFunction()); FunctionRegistry.getInstance().add(new OverlapsFunction()); FunctionRegistry.getInstance().add(new StartsFunction()); } private void importFile(String filename) throws RepositoryException, RDFParseException, IOException { //import file InputStream in = this.getClass().getResourceAsStream(filename); connection.add(in,BASE_URI, RDFFormat.TURTLE); } @Before public void before() throws IOException, RepositoryException, RDFParseException { Repository repo = new SailRepository(new MemoryStore()); repo.initialize(); connection = repo.getConnection(); } @After public void after() throws RepositoryException { if(connection!=null) connection.close(); } @Test public void testAfterFunction() throws MalformedQueryException, RepositoryException, QueryEvaluationException, MediaFragmentURISyntaxException, RDFParseException, IOException { importFile(TEST1); String query = "PREFIX ma: <http://www.w3.org/ns/ma-ont#>" + "PREFIX mm: <" + Constants.NAMESPACE + ">" + "SELECT ?t1 ?t2 WHERE {" + " ?f1 rdfs:label ?t1." + " ?f2 rdfs:label ?t2." + " FILTER mm:after(?f1,?f2)" + "} ORDER BY ?t1 ?t2"; TupleQuery q = connection.prepareTupleQuery(QueryLanguage.SPARQL,query); TupleQueryResult r = q.evaluate(); ArrayList<BindingSet> list = new ArrayList<BindingSet>(); while(r.hasNext()) { list.add(r.next()); } Assert.assertTrue(list.size() == 11); for(BindingSet set : list) { String t1 = set.getBinding("t1").getValue().stringValue(); String t2 = set.getBinding("t2").getValue().stringValue(); Assert.assertEquals(t1.charAt(0),t2.charAt(0)); Assert.assertTrue( Integer.valueOf(t1.charAt(2)) > Integer.valueOf(t2.charAt(2)) ); } } @Test public void testBeforeFunction() throws MalformedQueryException, RepositoryException, QueryEvaluationException, MediaFragmentURISyntaxException, RDFParseException, IOException { importFile(TEST1); String query = "PREFIX ma: <http://www.w3.org/ns/ma-ont#>" + "PREFIX mm: <" + Constants.NAMESPACE + ">" + "SELECT ?t1 ?t2 WHERE {" + " ?f1 rdfs:label ?t1." + " ?f2 rdfs:label ?t2." + " FILTER mm:precedes(?f1,?f2)" + "} ORDER BY ?t1 ?t2"; TupleQuery q = connection.prepareTupleQuery(QueryLanguage.SPARQL,query); TupleQueryResult r = q.evaluate(); ArrayList<BindingSet> list = new ArrayList<BindingSet>(); while(r.hasNext()) { list.add(r.next()); } Assert.assertTrue(list.size() == 11); for(BindingSet set : list) { String t1 = set.getBinding("t1").getValue().stringValue(); String t2 = set.getBinding("t2").getValue().stringValue(); Assert.assertEquals(t1.charAt(0),t2.charAt(0)); Assert.assertTrue( Integer.valueOf(t1.charAt(2)) < Integer.valueOf(t2.charAt(2)) ); } } @Test public void testContainsFunction() throws MalformedQueryException, RepositoryException, QueryEvaluationException, MediaFragmentURISyntaxException, RDFParseException, IOException { importFile(TEST1); String query = "PREFIX ma: <http://www.w3.org/ns/ma-ont#>" + "PREFIX mm: <" + Constants.NAMESPACE + ">" + "SELECT ?t1 ?t2 WHERE {" + " ?f1 rdfs:label ?t1." + " ?f2 rdfs:label ?t2." + " FILTER mm:temporalContains(?f1,?f2)" + "} ORDER BY ?t1 ?t2"; TupleQuery q = connection.prepareTupleQuery(QueryLanguage.SPARQL,query); TupleQueryResult r = q.evaluate(); Assert.assertTrue(r.hasNext()); BindingSet set = r.next(); Assert.assertEquals("2_4",set.getBinding("t1").getValue().stringValue()); Assert.assertEquals("2_5",set.getBinding("t2").getValue().stringValue()); Assert.assertFalse(r.hasNext()); /* String query1 = "PREFIX ma: <http://www.w3.org/ns/ma-ont#>" + "PREFIX mm: <" + Constants.NAMESPACE + ">" + "SELECT ?t1 ?t2 WHERE {" + " ?f1 rdfs:label ?t1." + " ?f2 rdfs:label ?t2." + " FILTER mm:temporalContains(?f1,?f2,true)" + "} ORDER BY ?t1 ?t2"; TupleQuery q1 = connection.prepareTupleQuery(QueryLanguage.SPARQL,query1); TupleQueryResult r1 = q1.evaluate(); Assert.assertTrue(r1.hasNext()); int size = 0; while(r1.hasNext()) { size++; r1.next(); } Assert.assertEquals(10, size); */ } @Test public void testEqualFunction() throws MalformedQueryException, RepositoryException, QueryEvaluationException, MediaFragmentURISyntaxException, RDFParseException, IOException { importFile(TEST1); String query = "PREFIX ma: <http://www.w3.org/ns/ma-ont#>" + "PREFIX mm: <" + Constants.NAMESPACE + ">" + "SELECT ?t1 ?t2 WHERE {" + " ?f1 rdfs:label ?t1." + " ?f2 rdfs:label ?t2." + " FILTER mm:temporalEquals(?f1,?f2)" + "} ORDER BY ?t1 ?t2"; TupleQuery q = connection.prepareTupleQuery(QueryLanguage.SPARQL,query); TupleQueryResult r = q.evaluate(); ArrayList<BindingSet> list = new ArrayList<BindingSet>(); while(r.hasNext()) { list.add(r.next()); } Assert.assertTrue(list.size() == 9); for(BindingSet set : list) { String t1 = set.getBinding("t1").getValue().stringValue(); String t2 = set.getBinding("t2").getValue().stringValue(); Assert.assertEquals(t1,t2); } } @Test public void testFinishesFunction() throws MalformedQueryException, RepositoryException, QueryEvaluationException, MediaFragmentURISyntaxException, RDFParseException, IOException { importFile(TEST2); String query = "PREFIX ma: <http://www.w3.org/ns/ma-ont#>" + "PREFIX mm: <" + Constants.NAMESPACE + ">" + "SELECT ?t1 ?t2 WHERE {" + " ?f1 rdfs:label ?t1." + " ?f2 rdfs:label ?t2." + " FILTER mm:finishes(?f1,?f2)" + "} ORDER BY ?t1 ?t2"; TupleQuery q = connection.prepareTupleQuery(QueryLanguage.SPARQL,query); TupleQueryResult r = q.evaluate(); ArrayList<BindingSet> list = new ArrayList<BindingSet>(); while(r.hasNext()) { list.add(r.next()); } Assert.assertTrue(list.size() == 1); String t1 = list.get(0).getBinding("t1").getValue().stringValue(); String t2 = list.get(0).getBinding("t2").getValue().stringValue(); Assert.assertEquals("1_4",t1); Assert.assertEquals("1_3",t2); } @Test public void testMeetsFunction() throws MalformedQueryException, RepositoryException, QueryEvaluationException, MediaFragmentURISyntaxException, RDFParseException, IOException { importFile(TEST1); String query = "PREFIX ma: <http://www.w3.org/ns/ma-ont#>" + "PREFIX mm: <" + Constants.NAMESPACE + ">" + "SELECT ?t1 ?t2 WHERE {" + " ?f1 rdfs:label ?t1." + " ?f2 rdfs:label ?t2." + " FILTER mm:temporalMeets(?f1,?f2)" + "} ORDER BY ?t1 ?t2"; TupleQuery q = connection.prepareTupleQuery(QueryLanguage.SPARQL,query); TupleQueryResult r = q.evaluate(); ArrayList<BindingSet> list = new ArrayList<BindingSet>(); while(r.hasNext()) { list.add(r.next()); } Assert.assertTrue(list.size() == 2); String t1 = list.get(0).getBinding("t1").getValue().stringValue(); String t2 = list.get(0).getBinding("t2").getValue().stringValue(); Assert.assertEquals("1_1",t1); Assert.assertEquals("1_2",t2); String t5 = list.get(1).getBinding("t1").getValue().stringValue(); String t6 = list.get(1).getBinding("t2").getValue().stringValue(); Assert.assertEquals("2_1",t5); Assert.assertEquals("2_2",t6); } @Test public void testOverlapsFunction() throws MalformedQueryException, RepositoryException, QueryEvaluationException, MediaFragmentURISyntaxException, RDFParseException, IOException { importFile(TEST1); String query = "PREFIX ma: <http://www.w3.org/ns/ma-ont#>" + "PREFIX mm: <" + Constants.NAMESPACE + ">" + "SELECT ?t1 ?t2 WHERE {" + " ?f1 rdfs:label ?t1." + " ?f2 rdfs:label ?t2." + " FILTER mm:temporalOverlaps(?f1,?f2)" + "} ORDER BY ?t1 ?t2"; TupleQuery q = connection.prepareTupleQuery(QueryLanguage.SPARQL,query); TupleQueryResult r = q.evaluate(); ArrayList<BindingSet> list = new ArrayList<BindingSet>(); while(r.hasNext()) { list.add(r.next()); } Assert.assertTrue(list.size() == 2); String t1 = list.get(0).getBinding("t1").getValue().stringValue(); String t2 = list.get(0).getBinding("t2").getValue().stringValue(); Assert.assertEquals("1_3",t1); Assert.assertEquals("1_4",t2); String t3 = list.get(1).getBinding("t1").getValue().stringValue(); String t4 = list.get(1).getBinding("t2").getValue().stringValue(); Assert.assertEquals("2_3",t3); Assert.assertEquals("2_4",t4); } @Test public void testStartsFunction() throws MalformedQueryException, RepositoryException, QueryEvaluationException, MediaFragmentURISyntaxException, RDFParseException, IOException { importFile(TEST2); String query = "PREFIX ma: <http://www.w3.org/ns/ma-ont#>" + "PREFIX mm: <" + Constants.NAMESPACE + ">" + "SELECT ?t1 ?t2 WHERE {" + " ?f1 rdfs:label ?t1." + " ?f2 rdfs:label ?t2." + " FILTER mm:starts(?f1,?f2)" + "} ORDER BY ?t1 ?t2"; TupleQuery q = connection.prepareTupleQuery(QueryLanguage.SPARQL,query); TupleQueryResult r = q.evaluate(); ArrayList<BindingSet> list = new ArrayList<BindingSet>(); while(r.hasNext()) { list.add(r.next()); } Assert.assertTrue(list.size() == 2); String t1 = list.get(0).getBinding("t1").getValue().stringValue(); String t2 = list.get(0).getBinding("t2").getValue().stringValue(); Assert.assertEquals("1_2",t1); Assert.assertEquals("1_1",t2); String t3 = list.get(1).getBinding("t1").getValue().stringValue(); String t4 = list.get(1).getBinding("t2").getValue().stringValue(); Assert.assertEquals("2_2",t3); Assert.assertEquals("2_1",t4); } }
/** * Copyright (C) 2014-2017 Xavier Witdouck * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.zavtech.morpheus.util; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.Spliterator; import java.util.Spliterators; import java.util.TreeMap; import java.util.TreeSet; import java.util.function.Consumer; import java.util.stream.Stream; import java.util.stream.StreamSupport; /** * A convenience factory class for building collections of various kinds * * <p><strong>This is open source software released under the <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache 2.0 License</a></strong></p> * * @author Xavier Witdouck */ public class Collect { /** * Private constructor */ private Collect() { super(); } /** * Returns a new array List of the values specified * @param values the values to create a new Set from * @param <T> the element type * @return the newly created set */ @SafeVarargs public static <T> List<T> asList(T... values) { return Collect.asList(false, values); } /** * Returns a new array List of the values specified * @param values the values to create a new Set from * @param <T> the element type * @return the newly created set */ public static <T> List<T> asList(Iterable<T> values) { return Collect.asList(false, values); } /** * Returns a new Stream of the values from the Iterator * @param values the values to create a new Set from * @param <T> the element type * @return the newly created set */ public static <T> Stream<T> asStream(Iterator<T> values) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(values, Spliterator.ORDERED), false); } /** * Returns a new Stream of the values from the Iterable * @param values the values to create a new Set from * @param <T> the element type * @return the newly created set */ public static <T> Stream<T> asStream(Iterable<T> values) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(values.iterator(), Spliterator.ORDERED), false); } /** * Returns a new linked List of the values specified * @param values the values to create a new Set from * @param <T> the element type * @return the newly created set */ @SafeVarargs public static <T> List<T> asList(boolean linked, T... values) { if (linked) { final List<T> result = new LinkedList<>(); for (T value : values) result.add(value); return result; } else { final List<T> result = new ArrayList<>(values.length); for (T value : values) result.add(value); return result; } } /** * Returns a new linked List of the values specified * @param values the values to create a new Set from * @param <T> the element type * @return the newly created set */ public static <T> List<T> asList(boolean linked, Iterable<T> values) { if (linked) { final List<T> result = new LinkedList<>(); for (T value : values) result.add(value); return result; } else { final List<T> result = new ArrayList<>(); for (T value : values) result.add(value); return result; } } /** * Returns a new Set of the values specified * @param values the values to create a new Set from * @param <T> the element type * @return the newly created set */ @SafeVarargs public static <T> Set<T> asSet(T... values) { final Set<T> result = new HashSet<>(values.length); for (T value : values) result.add(value); return result; } /** * Returns a new Set of the values specified * @param values the values to create a new Set from * @param <T> the element type * @return the newly created set */ @SafeVarargs public static <T> SortedSet<T> asSortedSet(T... values) { final SortedSet<T> result = new TreeSet<>(); for (T value : values) result.add(value); return result; } /** * Returns a new created Map initialized with whatever the consumer does * @param mapper the consumer that sets up mappings * @param <K> the key type * @param <V> the value type * @return the newly created map */ public static <K,V> Map<K,V> asMap(Consumer<Map<K,V>> mapper) { final Map<K,V> map = new HashMap<>(); mapper.accept(map); return map; } /** * Returns a new created Map initialized with whatever the consumer does * @param initialCapacity the initial capacity for apply * @param mapper the consumer that sets up mappings * @param <K> the key type * @param <V> the value type * @return the newly created map */ public static <K,V> Map<K,V> asMap(int initialCapacity, Consumer<Map<K,V>> mapper) { final Map<K,V> map = new HashMap<>(initialCapacity); mapper.accept(map); return map; } /** * Returns a new created Map initialized with whatever the consumer does * @param mapper the consumer that sets up mappings * @param <K> the key type * @param <V> the value type * @return the newly created map */ public static <K,V> SortedMap<K,V> asSortedMap(Consumer<Map<K,V>> mapper) { final SortedMap<K,V> map = new TreeMap<>(); mapper.accept(map); return map; } /** * Returns a new created Map initialized with whatever the consumer does * @param mapper the consumer that sets up mappings * @param <K> the key type * @param <V> the value type * @return the newly created map */ public static <K,V> Map<K,V> asOrderedMap(Consumer<Map<K,V>> mapper) { final Map<K,V> map = new LinkedHashMap<>(); mapper.accept(map); return map; } /** * Returns a new Iterable wrapper of the stream * @param stream the stream to wrap * @param <T> the entity type * @return the newly created iterable */ public static <T> Iterable<T> asIterable(Stream<T> stream) { return new Iterable<T>() { @Override public Iterator<T> iterator() { return stream.iterator(); } }; } /** * Returns a apply that reverses the input apply * @param map the apply reference to reverse * @param <K> the type for key * @param <V> the type for value * @return the reverse mapped */ public static <K,V> Map<V,K> reverse(Map<K,V> map) { if (map instanceof SortedMap) { final Map<V,K> result = new TreeMap<>(); map.forEach((key, value) -> result.put(value, key)); return result; } else if (map instanceof LinkedHashMap) { final Map<V,K> result = new LinkedHashMap<>(map.size()); map.forEach((key, value) -> result.put(value, key)); return result; } else { final Map<V,K> result = new HashMap<>(map.size()); map.forEach((key, value) -> result.put(value, key)); return result; } } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.uberfire.client.workbench.panels.impl; import com.google.gwt.event.shared.EventBus; import com.google.gwt.user.client.ui.HasWidgets; import org.jboss.errai.ioc.client.QualifierUtil; import org.jboss.errai.ioc.client.container.IOC; import org.jboss.errai.ioc.client.container.SyncBeanManagerImpl; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InOrder; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.invocation.InvocationOnMock; import org.mockito.runners.MockitoJUnitRunner; import org.mockito.stubbing.Answer; import org.uberfire.backend.vfs.ObservablePath; import org.uberfire.backend.vfs.Path; import org.uberfire.client.mvp.*; import org.uberfire.client.util.MockIOCBeanDef; import org.uberfire.client.workbench.LayoutSelection; import org.uberfire.client.workbench.PanelManager; import org.uberfire.client.workbench.WorkbenchLayout; import org.uberfire.client.workbench.events.*; import org.uberfire.mvp.Command; import org.uberfire.mvp.ParameterizedCommand; import org.uberfire.mvp.PlaceRequest; import org.uberfire.mvp.impl.ConditionalPlaceRequest; import org.uberfire.mvp.impl.DefaultPlaceRequest; import org.uberfire.mvp.impl.PathPlaceRequest; import org.uberfire.workbench.model.ActivityResourceType; import org.uberfire.workbench.model.PanelDefinition; import org.uberfire.workbench.model.PerspectiveDefinition; import org.uberfire.workbench.model.Position; import org.uberfire.workbench.model.impl.PanelDefinitionImpl; import org.uberfire.workbench.model.impl.PartDefinitionImpl; import org.uberfire.workbench.model.impl.PerspectiveDefinitionImpl; import org.uberfire.workbench.model.menu.Menus; import org.uberfire.workbench.type.ResourceTypeDefinition; import javax.enterprise.context.Dependent; import javax.enterprise.event.Event; import java.lang.annotation.Annotation; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import static java.util.Collections.singleton; import static org.junit.Assert.*; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.isNull; import static org.mockito.Matchers.refEq; import static org.mockito.Mockito.*; @RunWith( MockitoJUnitRunner.class ) public class PlaceManagerTest { @Mock Event<BeforeClosePlaceEvent> workbenchPartBeforeCloseEvent; @Mock Event<ClosePlaceEvent> workbenchPartCloseEvent; @Mock Event<PlaceLostFocusEvent> workbenchPartLostFocusEvent; @Mock Event<NewSplashScreenActiveEvent> newSplashScreenActiveEvent; @Mock ActivityManager activityManager; @Mock PlaceHistoryHandler placeHistoryHandler; @Mock Event<SelectPlaceEvent> selectWorkbenchPartEvent; @Mock PanelManager panelManager; @Mock PerspectiveManager perspectiveManager; @Mock WorkbenchLayout workbenchLayout; @Mock LayoutSelection layoutSelection; /** * This is the thing we're testing. Weeee! */ @InjectMocks PlaceManagerImpl placeManager; /** * Returned by the mock activityManager for the special "workbench.activity.notfound" place. */ private final Activity notFoundActivity = mock( Activity.class ); /** * The setup method makes this the current place. */ private final PlaceRequest kansas = new DefaultPlaceRequest( "kansas" ); /** * The setup method links this activity with the kansas PlaceRequest. */ private final WorkbenchScreenActivity kansasActivity = mock( WorkbenchScreenActivity.class ); /** * This panel will always be returned from panelManager.getRoot(). */ private final PanelDefinition rootPanel = new PanelDefinitionImpl( MultiListWorkbenchPanelPresenter.class.getName() ); @Before public void setup() { ( ( SyncBeanManagerImpl ) IOC.getBeanManager() ).reset(); when( activityManager.getActivities( any( PlaceRequest.class ) ) ).thenReturn( singleton( notFoundActivity ) ); // for now (and this will have to change for UF-61), PathPlaceRequest performs an IOC lookup for ObservablePath in its constructor // as part of UF-61, we'll need to refactor ObservablePath and PathFactory so they ask for any beans they need as constructor params. final ObservablePath mockObservablePath = mock( ObservablePath.class ); when( mockObservablePath.wrap( any( Path.class ) ) ).thenReturn( mockObservablePath ); IOC.getBeanManager().registerBean( new MockIOCBeanDef<ObservablePath, ObservablePath>( mockObservablePath, ObservablePath.class, Dependent.class, new HashSet<Annotation>( Arrays.asList( QualifierUtil.DEFAULT_QUALIFIERS ) ), "ObservablePath", true ) ); // every test starts in Kansas, with no side effect interactions recorded when( activityManager.getActivities( kansas ) ).thenReturn( singleton( ( Activity ) kansasActivity ) ); setupPanelManagerMock(); when( kansasActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); when( kansasActivity.isDynamic() ).thenReturn( false ); placeManager.goTo( kansas, ( PanelDefinition ) null ); resetInjectedMocks(); reset( kansasActivity ); when( kansasActivity.onMayClose() ).thenReturn( true ); when( kansasActivity.preferredWidth() ).thenReturn( 123 ); when( kansasActivity.preferredHeight() ).thenReturn( 456 ); // arrange for the mock PerspectiveManager to invoke the doWhenFinished callbacks doAnswer( new Answer<Void>() { @SuppressWarnings( {"rawtypes", "unchecked"} ) @Override public Void answer( InvocationOnMock invocation ) throws Throwable { ParameterizedCommand callback = ( ParameterizedCommand ) invocation.getArguments()[2]; PerspectiveActivity perspectiveActivity = ( PerspectiveActivity ) invocation.getArguments()[1]; callback.execute( perspectiveActivity.getDefaultPerspectiveLayout() ); return null; } } ).when( perspectiveManager ).switchToPerspective( any( PlaceRequest.class ), any( PerspectiveActivity.class ), any( ParameterizedCommand.class ) ); doAnswer( new Answer<Void>() { @Override public Void answer( InvocationOnMock invocation ) throws Throwable { Command callback = ( Command ) invocation.getArguments()[0]; callback.execute(); return null; } } ).when( perspectiveManager ).savePerspectiveState( any( Command.class ) ); } /** * Resets all the mocks that were injected into the PlaceManager under test. This should probably only be used in * the setup method. */ @SuppressWarnings( "unchecked" ) private void resetInjectedMocks() { reset( workbenchPartBeforeCloseEvent ); reset( workbenchPartCloseEvent ); reset( workbenchPartLostFocusEvent ); reset( newSplashScreenActiveEvent ); reset( activityManager ); reset( placeHistoryHandler ); reset( selectWorkbenchPartEvent ); reset( panelManager ); reset( perspectiveManager ); reset( workbenchLayout ); setupPanelManagerMock(); } private void setupPanelManagerMock() { when( panelManager.getRoot() ).thenReturn( rootPanel ); when( panelManager.addWorkbenchPanel( any( PanelDefinition.class ), any( Position.class ), any( Integer.class ), any( Integer.class ), any( Integer.class ), any( Integer.class ) ) ) .thenAnswer( new Answer<PanelDefinition>() { @Override public PanelDefinition answer( InvocationOnMock invocation ) throws Throwable { return ( PanelDefinition ) invocation.getArguments()[0]; } } ); } @Test public void testPlaceManagerGetsInitializedToADefaultPlace() throws Exception { placeManager.initPlaceHistoryHandler(); verify( placeHistoryHandler ).register( any( PlaceManager.class ), any( EventBus.class ), any( PlaceRequest.class ) ); } @Test public void testGoToConditionalPlaceById() throws Exception { PlaceRequest dora = new ConditionalPlaceRequest( "dora" ).when( p -> true ) .orElse( new DefaultPlaceRequest( "other" ) ); WorkbenchScreenActivity doraActivity = mock( WorkbenchScreenActivity.class ); when( doraActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); when( activityManager.getActivities( dora ) ).thenReturn( singleton( ( Activity ) doraActivity ) ); placeManager.goTo( dora ); verifyActivityLaunchSideEffects( dora, doraActivity, null ); } @Test public void testGoToConditionalPlaceByIdOrElse() throws Exception { DefaultPlaceRequest other = new DefaultPlaceRequest( "other" ); PlaceRequest dora = new ConditionalPlaceRequest( "dora" ).when( p -> false ) .orElse( other ); WorkbenchScreenActivity doraActivity = mock( WorkbenchScreenActivity.class ); WorkbenchScreenActivity otherActivity = mock( WorkbenchScreenActivity.class ); when( doraActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); when( otherActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); when( activityManager.getActivities( dora ) ).thenReturn( singleton( ( Activity ) doraActivity ) ); when( activityManager.getActivities( other ) ).thenReturn( singleton( ( Activity ) otherActivity ) ); placeManager.goTo( dora ); verify( doraActivity, never() ).onOpen(); verify( otherActivity ).onOpen(); verifyActivityLaunchSideEffects( other, otherActivity, null ); } @Test public void testGoToNewPlaceById() throws Exception { PlaceRequest oz = new DefaultPlaceRequest( "oz" ); WorkbenchScreenActivity ozActivity = mock( WorkbenchScreenActivity.class ); when( ozActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); when( ozActivity.isDynamic() ).thenReturn( false ); when( ozActivity.preferredWidth() ).thenReturn( -1 ); when( ozActivity.preferredHeight() ).thenReturn( -1 ); when( activityManager.getActivities( oz ) ).thenReturn( singleton( ( Activity ) ozActivity ) ); placeManager.goTo( oz, ( PanelDefinition ) null ); verifyActivityLaunchSideEffects( oz, ozActivity, null ); } @Test public void testGoToPlaceWeAreAlreadyAt() throws Exception { when( kansasActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); placeManager.goTo( kansas, ( PanelDefinition ) null ); // note "refEq" tests equality field by field using reflection. don't read it as "reference equals!" :) verify( selectWorkbenchPartEvent ).fire( refEq( new SelectPlaceEvent( kansas ) ) ); verifyNoActivityLaunchSideEffects( kansas, kansasActivity ); } @Test public void testGoToNowhereDoesNothing() throws Exception { placeManager.goTo( PlaceRequest.NOWHERE, ( PanelDefinition ) null ); verifyNoActivityLaunchSideEffects( kansas, kansasActivity ); } // XXX would like to remove this behaviour (should throw NPE) but too many things are up in the air right now @Test public void testGoToNullDoesNothing() throws Exception { placeManager.goTo( ( PlaceRequest ) null, ( PanelDefinition ) null ); verifyNoActivityLaunchSideEffects( kansas, kansasActivity ); } @Test public void testGoToPlaceByPath() throws Exception { PathPlaceRequest yellowBrickRoad = new FakePathPlaceRequest( mock( ObservablePath.class ) ); WorkbenchScreenActivity ozActivity = mock( WorkbenchScreenActivity.class ); when( ozActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); when( activityManager.getActivities( yellowBrickRoad ) ).thenReturn( singleton( ( Activity ) ozActivity ) ); placeManager.goTo( yellowBrickRoad, ( PanelDefinition ) null ); verifyActivityLaunchSideEffects( yellowBrickRoad, ozActivity, null ); // special contract just for path-type place requests (subject to preference) verify( yellowBrickRoad.getPath(), never() ).onDelete( any( Command.class ) ); } class FakePathPlaceRequest extends PathPlaceRequest { final ObservablePath path; FakePathPlaceRequest( ObservablePath path ) { this.path = path; } @Override public ObservablePath getPath() { return path; } @Override public int hashCode() { return 42; } } @Test public void testNormalCloseExistingScreenActivity() throws Exception { when( kansasActivity.onMayClose() ).thenReturn( true ); when( kansasActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); placeManager.closePlace( kansas ); verify( workbenchPartBeforeCloseEvent ).fire( refEq( new BeforeClosePlaceEvent( kansas, false, true ) ) ); verify( workbenchPartCloseEvent ).fire( refEq( new ClosePlaceEvent( kansas ) ) ); verify( kansasActivity ).onMayClose(); verify( kansasActivity ).onClose(); verify( kansasActivity, never() ).onShutdown(); verify( activityManager ).destroyActivity( kansasActivity ); verify( panelManager ).removePartForPlace( kansas ); assertEquals( PlaceStatus.CLOSE, placeManager.getStatus( kansas ) ); assertNull( placeManager.getActivity( kansas ) ); assertFalse( placeManager.getActivePlaceRequests().contains( kansas ) ); } @Test public void testCanceledCloseExistingScreenActivity() throws Exception { when( kansasActivity.onMayClose() ).thenReturn( false ); when( kansasActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); placeManager.closePlace( kansas ); verify( workbenchPartBeforeCloseEvent ).fire( refEq( new BeforeClosePlaceEvent( kansas, false, true ) ) ); verify( workbenchPartCloseEvent, never() ).fire( refEq( new ClosePlaceEvent( kansas ) ) ); verify( kansasActivity ).onMayClose(); verify( kansasActivity, never() ).onClose(); verify( kansasActivity, never() ).onShutdown(); verify( activityManager, never() ).destroyActivity( kansasActivity ); verify( panelManager, never() ).removePartForPlace( kansas ); assertEquals( PlaceStatus.OPEN, placeManager.getStatus( kansas ) ); assertSame( kansasActivity, placeManager.getActivity( kansas ) ); assertTrue( placeManager.getActivePlaceRequests().contains( kansas ) ); } @Test public void testForceCloseExistingScreenActivity() throws Exception { when( kansasActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); placeManager.forceClosePlace( kansas ); verify( workbenchPartBeforeCloseEvent ).fire( refEq( new BeforeClosePlaceEvent( kansas, true, true ) ) ); verify( workbenchPartCloseEvent ).fire( refEq( new ClosePlaceEvent( kansas ) ) ); verify( kansasActivity, never() ).onMayClose(); verify( kansasActivity ).onClose(); verify( kansasActivity, never() ).onShutdown(); verify( activityManager ).destroyActivity( kansasActivity ); verify( panelManager ).removePartForPlace( kansas ); assertEquals( PlaceStatus.CLOSE, placeManager.getStatus( kansas ) ); assertNull( placeManager.getActivity( kansas ) ); assertFalse( placeManager.getActivePlaceRequests().contains( kansas ) ); } /** * Tests the basics of launching a perspective. We call it "empty" because this perspective doesn't have any panels * or parts in its definition. */ @Test public void testLaunchingEmptyPerspective() throws Exception { PerspectiveActivity ozPerspectiveActivity = mock( PerspectiveActivity.class ); PlaceRequest ozPerspectivePlace = new DefaultPlaceRequest( "oz_perspective" ); PerspectiveDefinition ozPerspectiveDef = new PerspectiveDefinitionImpl(); when( activityManager.getActivities( ozPerspectivePlace ) ) .thenReturn( singleton( ( Activity ) ozPerspectiveActivity ) ); when( ozPerspectiveActivity.getDefaultPerspectiveLayout() ).thenReturn( ozPerspectiveDef ); when( ozPerspectiveActivity.getPlace() ).thenReturn( ozPerspectivePlace ); when( ozPerspectiveActivity.isType( ActivityResourceType.PERSPECTIVE.name() ) ).thenReturn( true ); placeManager.goTo( ozPerspectivePlace ); // verify perspective changed to oz verify( perspectiveManager ).savePerspectiveState( any( Command.class ) ); verify( perspectiveManager ).switchToPerspective( any( PlaceRequest.class ), eq( ozPerspectiveActivity ), any( ParameterizedCommand.class ) ); verify( ozPerspectiveActivity ).onOpen(); assertEquals( PlaceStatus.OPEN, placeManager.getStatus( ozPerspectivePlace ) ); assertTrue( placeManager.getActivePlaceRequests().contains( ozPerspectivePlace ) ); assertEquals( ozPerspectiveActivity, placeManager.getActivity( ozPerspectivePlace ) ); verify( workbenchLayout ).onResize(); } @Test public void testSwitchingPerspectives() throws Exception { PerspectiveActivity ozPerspectiveActivity = mock( PerspectiveActivity.class ); PlaceRequest ozPerspectivePlace = new DefaultPlaceRequest( "oz_perspective" ); PerspectiveDefinition ozPerspectiveDef = new PerspectiveDefinitionImpl(); when( activityManager.getActivities( ozPerspectivePlace ) ) .thenReturn( singleton( ( Activity ) ozPerspectiveActivity ) ); when( ozPerspectiveActivity.getDefaultPerspectiveLayout() ).thenReturn( ozPerspectiveDef ); when( ozPerspectiveActivity.getPlace() ).thenReturn( ozPerspectivePlace ); when( ozPerspectiveActivity.isType( ActivityResourceType.PERSPECTIVE.name() ) ).thenReturn( true ); when( kansasActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); // we'll pretend we started in kansas PerspectiveActivity kansasPerspectiveActivity = mock( PerspectiveActivity.class ); when( perspectiveManager.getCurrentPerspective() ).thenReturn( kansasPerspectiveActivity ); placeManager.goTo( ozPerspectivePlace ); // verify proper shutdown of kansasPerspective and its contents InOrder inOrder = inOrder( activityManager, kansasPerspectiveActivity, kansasActivity, workbenchLayout ); // shut down the screens first inOrder.verify( kansasActivity ).onClose(); inOrder.verify( activityManager ).destroyActivity( kansasActivity ); // then the perspective inOrder.verify( kansasPerspectiveActivity ).onClose(); inOrder.verify( activityManager ).destroyActivity( kansasPerspectiveActivity ); inOrder.verify( workbenchLayout ).onResize(); } @Test public void testSwitchingFromPerspectiveToSelf() throws Exception { PerspectiveActivity ozPerspectiveActivity = mock( PerspectiveActivity.class ); PlaceRequest ozPerspectivePlace = new DefaultPlaceRequest( "oz_perspective" ); PerspectiveDefinition ozPerspectiveDef = new PerspectiveDefinitionImpl(); when( activityManager.getActivities( ozPerspectivePlace ) ) .thenReturn( singleton( ( Activity ) ozPerspectiveActivity ) ); when( ozPerspectiveActivity.getDefaultPerspectiveLayout() ).thenReturn( ozPerspectiveDef ); when( ozPerspectiveActivity.getPlace() ).thenReturn( ozPerspectivePlace ); // we'll pretend we started in oz when( perspectiveManager.getCurrentPerspective() ).thenReturn( ozPerspectiveActivity ); placeManager.goTo( ozPerspectivePlace ); // verify no side effects (should stay put) verify( ozPerspectiveActivity, never() ).onOpen(); verify( perspectiveManager, never() ).savePerspectiveState( any( Command.class ) ); verify( perspectiveManager, never() ) .switchToPerspective( any( PlaceRequest.class ), any( PerspectiveActivity.class ), any( ParameterizedCommand.class ) ); } /** * This test verifies that when launching a screen which is "owned by" a perspective other than the current one, the * PlaceManager first switches to the owning perspective and then launches the requested screen. */ @Test public void testLaunchingActivityTiedToDifferentPerspective() throws Exception { PerspectiveActivity ozPerspectiveActivity = mock( PerspectiveActivity.class ); PlaceRequest ozPerspectivePlace = new DefaultPlaceRequest( "oz_perspective" ); PerspectiveDefinition ozPerspectiveDef = new PerspectiveDefinitionImpl(); when( activityManager.getActivities( ozPerspectivePlace ) ) .thenReturn( singleton( ( Activity ) ozPerspectiveActivity ) ); when( ozPerspectiveActivity.getDefaultPerspectiveLayout() ).thenReturn( ozPerspectiveDef ); when( ozPerspectiveActivity.getPlace() ).thenReturn( ozPerspectivePlace ); when( ozPerspectiveActivity.isType( ActivityResourceType.PERSPECTIVE.name() ) ).thenReturn( true ); PlaceRequest emeraldCityPlace = new DefaultPlaceRequest( "emerald_city" ); WorkbenchScreenActivity emeraldCityActivity = mock( WorkbenchScreenActivity.class ); when( activityManager.getActivities( emeraldCityPlace ) ) .thenReturn( singleton( ( Activity ) emeraldCityActivity ) ); when( emeraldCityActivity.getOwningPlace() ).thenReturn( ozPerspectivePlace ); when( emeraldCityActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); placeManager.goTo( emeraldCityPlace, ( PanelDefinition ) null ); // verify perspective changed to oz verify( perspectiveManager ).savePerspectiveState( any( Command.class ) ); verify( perspectiveManager ).switchToPerspective( any( PlaceRequest.class ), eq( ozPerspectiveActivity ), any( ParameterizedCommand.class ) ); assertEquals( PlaceStatus.OPEN, placeManager.getStatus( ozPerspectivePlace ) ); // verify perspective opened before the activity that launches inside it InOrder inOrder = inOrder( ozPerspectiveActivity, emeraldCityActivity ); inOrder.verify( ozPerspectiveActivity ).onOpen(); inOrder.verify( emeraldCityActivity ).onOpen(); // and the workbench activity should have launched (after the perspective change) verifyActivityLaunchSideEffects( emeraldCityPlace, emeraldCityActivity, null ); } @Test public void testPerspectiveLaunchWithSplashScreen() throws Exception { final PlaceRequest perspectivePlace = new DefaultPlaceRequest( "Somewhere" ); final PerspectiveActivity perspectiveActivity = mock( PerspectiveActivity.class ); final PerspectiveDefinition perspectiveDef = new PerspectiveDefinitionImpl( SimpleWorkbenchPanelPresenter.class.getName() ); when( perspectiveActivity.getDefaultPerspectiveLayout() ).thenReturn( perspectiveDef ); when( activityManager.getActivities( perspectivePlace ) ) .thenReturn( singleton( ( Activity ) perspectiveActivity ) ); final SplashScreenActivity splashScreenActivity = mock( SplashScreenActivity.class ); when( activityManager.getSplashScreenInterceptor( perspectivePlace ) ).thenReturn( splashScreenActivity ); when( perspectiveActivity.isType( ActivityResourceType.PERSPECTIVE.name() ) ).thenReturn( true ); when( splashScreenActivity.isType( ActivityResourceType.SPLASH.name() ) ).thenReturn( true ); placeManager.goTo( perspectivePlace ); // splash screen should be open and registered as an active splash screen verify( splashScreenActivity, never() ).onStartup( any( PlaceRequest.class ) ); InOrder inOrder = inOrder( splashScreenActivity, newSplashScreenActiveEvent ); inOrder.verify( splashScreenActivity ).onOpen(); inOrder.verify( newSplashScreenActiveEvent ).fire( any( NewSplashScreenActiveEvent.class ) ); assertTrue( placeManager.getActiveSplashScreens().contains( splashScreenActivity ) ); // perspective should be open, and should be the activity registered for its own place verify( perspectiveActivity, never() ).onStartup( any( PlaceRequest.class ) ); verify( perspectiveActivity ).onOpen(); assertEquals( PlaceStatus.OPEN, placeManager.getStatus( perspectivePlace ) ); assertSame( perspectiveActivity, placeManager.getActivity( perspectivePlace ) ); } @Test public void testProperSplashScreenShutdownOnPerspectiveSwitch() throws Exception { final PlaceRequest perspectivePlace = new DefaultPlaceRequest( "Somewhere" ); final PerspectiveActivity perspectiveActivity = mock( PerspectiveActivity.class ); final PerspectiveDefinition perspectiveDef = new PerspectiveDefinitionImpl( SimpleWorkbenchPanelPresenter.class.getName() ); when( perspectiveActivity.getDefaultPerspectiveLayout() ).thenReturn( perspectiveDef ); when( perspectiveActivity.isType( ActivityResourceType.PERSPECTIVE.name() ) ).thenReturn( true ); when( activityManager.getActivities( perspectivePlace ) ) .thenReturn( singleton( ( Activity ) perspectiveActivity ) ); // first splash screen: linked to the perspective itself final SplashScreenActivity splashScreenActivity1 = mock( SplashScreenActivity.class ); when( activityManager.getSplashScreenInterceptor( perspectivePlace ) ).thenReturn( splashScreenActivity1 ); when( splashScreenActivity1.isType( ActivityResourceType.SPLASH.name() ) ).thenReturn( true ); // second splash screen: linked to a screen that we will display in the perspective final SplashScreenActivity splashScreenActivity2 = mock( SplashScreenActivity.class ); when( activityManager.getSplashScreenInterceptor( kansas ) ).thenReturn( splashScreenActivity2 ); when( activityManager.getActivities( kansas ) ).thenReturn( singleton( ( Activity ) kansasActivity ) ); when( splashScreenActivity2.isType( ActivityResourceType.SPLASH.name() ) ).thenReturn( true ); when( kansasActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); placeManager.goTo( perspectivePlace ); placeManager.goTo( kansas ); assertTrue( placeManager.getActiveSplashScreens().contains( splashScreenActivity1 ) ); assertTrue( placeManager.getActiveSplashScreens().contains( splashScreenActivity2 ) ); // now switch to another perspective and ensure both kinds of splash screens got closed final PlaceRequest otherPerspectivePlace = new DefaultPlaceRequest( "Elsewhere" ); final PerspectiveActivity otherPerspectiveActivity = mock( PerspectiveActivity.class ); final PerspectiveDefinition otherPerspectiveDef = new PerspectiveDefinitionImpl( SimpleWorkbenchPanelPresenter.class.getName() ); when( otherPerspectiveActivity.getDefaultPerspectiveLayout() ).thenReturn( otherPerspectiveDef ); when( otherPerspectiveActivity.isType( ActivityResourceType.PERSPECTIVE.name() ) ).thenReturn( true ); when( activityManager.getActivities( otherPerspectivePlace ) ) .thenReturn( singleton( ( Activity ) otherPerspectiveActivity ) ); placeManager.goTo( otherPerspectivePlace ); assertTrue( placeManager.getActiveSplashScreens().isEmpty() ); verify( splashScreenActivity1 ).closeIfOpen(); verify( splashScreenActivity2 ).closeIfOpen(); // splash screens are Application Scoped, but we still "destroy" them (activity manager will call their onShutdown) verify( activityManager ).destroyActivity( splashScreenActivity1 ); verify( activityManager ).destroyActivity( splashScreenActivity2 ); } @Test public void testPartLaunchWithSplashScreen() throws Exception { PlaceRequest oz = new DefaultPlaceRequest( "oz" ); WorkbenchScreenActivity ozActivity = mock( WorkbenchScreenActivity.class ); when( activityManager.getActivities( oz ) ).thenReturn( singleton( ( Activity ) ozActivity ) ); when( ozActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); final SplashScreenActivity lollipopGuildActivity = mock( SplashScreenActivity.class ); when( activityManager.getSplashScreenInterceptor( oz ) ).thenReturn( lollipopGuildActivity ); when( lollipopGuildActivity.isType( ActivityResourceType.SPLASH.name() ) ).thenReturn( true ); placeManager.goTo( oz, ( PanelDefinition ) null ); assertTrue( placeManager.getActiveSplashScreens().contains( lollipopGuildActivity ) ); verify( lollipopGuildActivity, never() ).onStartup( any( PlaceRequest.class ) ); InOrder inOrder = inOrder( lollipopGuildActivity, newSplashScreenActiveEvent ); inOrder.verify( lollipopGuildActivity ).onOpen(); inOrder.verify( newSplashScreenActiveEvent ).fire( any( NewSplashScreenActiveEvent.class ) ); } @Test public void testProperSplashScreenShutdownOnPartClose() throws Exception { PlaceRequest oz = new DefaultPlaceRequest( "oz" ); WorkbenchScreenActivity ozActivity = mock( WorkbenchScreenActivity.class ); when( activityManager.getActivities( oz ) ).thenReturn( singleton( ( Activity ) ozActivity ) ); final SplashScreenActivity lollipopGuildActivity = mock( SplashScreenActivity.class ); when( lollipopGuildActivity.isType( ActivityResourceType.SPLASH.name() ) ).thenReturn( true ); when( activityManager.getSplashScreenInterceptor( oz ) ).thenReturn( lollipopGuildActivity ); when( ozActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); placeManager.goTo( oz, ( PanelDefinition ) null ); placeManager.closePlace( oz ); assertTrue( placeManager.getActiveSplashScreens().isEmpty() ); verify( lollipopGuildActivity ).closeIfOpen(); // splash screens are Application Scoped, but we still "destroy" them (activity manager will call their onShutdown) verify( activityManager ).destroyActivity( lollipopGuildActivity ); } /** * Ensures that splash screens can't be launched on their own (they should only launch as a side effect of launching * a place that they intercept). This test came from the original test suite, and may not be all that relevant * anymore: it assumes that the ActivityManager might resolve a PlaceRequest to a SplashScreenActivity, and this is * currently not in the ActivityManager contract. */ @Test public void testSplashScreenActivityShouldNotLaunchOnItsOwn() throws Exception { final PlaceRequest somewhere = new DefaultPlaceRequest( "Somewhere" ); final SplashScreenActivity splashScreenActivity = mock( SplashScreenActivity.class ); when( activityManager.getActivities( somewhere ) ).thenReturn( singleton( ( Activity ) splashScreenActivity ) ); placeManager.goTo( somewhere ); verify( splashScreenActivity, never() ).onStartup( eq( somewhere ) ); verify( splashScreenActivity, never() ).onOpen(); verify( newSplashScreenActiveEvent, never() ).fire( any( NewSplashScreenActiveEvent.class ) ); assertFalse( placeManager.getActiveSplashScreens().contains( splashScreenActivity ) ); } /** * Ensures that context activities can't be launched on their own (they should only launch as a side effect of launching * a place that they relate to). This test was moved here from the original test suite. */ @Test public void testContextActivityShouldNotLaunchOnItsOwn() throws Exception { final PlaceRequest somewhere = new DefaultPlaceRequest( "Somewhere" ); final ContextActivity activity = mock( ContextActivity.class ); when( activityManager.getActivities( somewhere ) ).thenReturn( singleton( ( Activity ) activity ) ); placeManager.goTo( somewhere ); verify( activity, never() ).onStartup( eq( somewhere ) ); verify( activity, never() ).onOpen(); } @Test public void testLaunchingPopup() throws Exception { final PlaceRequest popupPlace = new DefaultPlaceRequest( "Somewhere" ); final AbstractPopupActivity popupActivity = mock( AbstractPopupActivity.class ); when( activityManager.getActivities( popupPlace ) ).thenReturn( singleton( ( Activity ) popupActivity ) ); when( popupActivity.isType( ActivityResourceType.POPUP.name() ) ).thenReturn( true ); placeManager.goTo( popupPlace ); verify( popupActivity, never() ).onStartup( any( PlaceRequest.class ) ); verify( popupActivity, times( 1 ) ).onOpen(); verify( placeHistoryHandler, times( 1 ) ).onPlaceChange( popupPlace ); assertEquals( PlaceStatus.OPEN, placeManager.getStatus( popupPlace ) ); // TODO this test was moved here from the old test suite. it may not verify all required side effects of launching a popup. } @Test public void testLaunchingPopupThatIsAlreadyOpen() throws Exception { final PlaceRequest popupPlace = new DefaultPlaceRequest( "Somewhere" ); final AbstractPopupActivity popupActivity = mock( AbstractPopupActivity.class ); when( activityManager.getActivities( popupPlace ) ).thenReturn( singleton( ( Activity ) popupActivity ) ); when( popupActivity.isType( ActivityResourceType.POPUP.name() ) ).thenReturn( true ); placeManager.goTo( popupPlace ); placeManager.goTo( popupPlace ); verify( popupActivity, never() ).onStartup( any( PlaceRequest.class ) ); verify( popupActivity, times( 1 ) ).onOpen(); verify( placeHistoryHandler, times( 1 ) ).onPlaceChange( popupPlace ); assertEquals( PlaceStatus.OPEN, placeManager.getStatus( popupPlace ) ); } @Test public void testReLaunchingClosedPopup() throws Exception { final PlaceRequest popupPlace = new DefaultPlaceRequest( "Somewhere" ); final AbstractPopupActivity popupActivity = mock( AbstractPopupActivity.class ); when( popupActivity.onMayClose() ).thenReturn( true ); when( popupActivity.isType( ActivityResourceType.POPUP.name() ) ).thenReturn( true ); when( activityManager.getActivities( popupPlace ) ).thenReturn( singleton( ( Activity ) popupActivity ) ); placeManager.goTo( popupPlace ); placeManager.closePlace( popupPlace ); placeManager.goTo( popupPlace ); verify( popupActivity, times( 2 ) ).onOpen(); verify( popupActivity, times( 1 ) ).onClose(); assertEquals( PlaceStatus.OPEN, placeManager.getStatus( popupPlace ) ); } @Test public void testPopupCancelsClose() throws Exception { final PlaceRequest popupPlace = new DefaultPlaceRequest( "Somewhere" ); final AbstractPopupActivity popupActivity = mock( AbstractPopupActivity.class ); when( popupActivity.onMayClose() ).thenReturn( false ); when( popupActivity.isType( ActivityResourceType.POPUP.name() ) ).thenReturn( true ); when( activityManager.getActivities( popupPlace ) ).thenReturn( singleton( ( Activity ) popupActivity ) ); placeManager.goTo( popupPlace ); placeManager.closePlace( popupPlace ); verify( popupActivity, never() ).onClose(); assertEquals( PlaceStatus.OPEN, placeManager.getStatus( popupPlace ) ); } @Test public void testLaunchActivityInCustomPanel() throws Exception { PanelDefinition customPanelDef = new PanelDefinitionImpl( UnanchoredStaticWorkbenchPanelPresenter.class.getName() ); when( panelManager.addCustomPanel( any( HasWidgets.class ), eq( UnanchoredStaticWorkbenchPanelPresenter.class.getName() ) ) ) .thenReturn( customPanelDef ); PlaceRequest emeraldCityPlace = new DefaultPlaceRequest( "emerald_city" ); WorkbenchScreenActivity emeraldCityActivity = mock( WorkbenchScreenActivity.class ); when( emeraldCityActivity.preferredWidth() ).thenReturn( 555 ); when( emeraldCityActivity.preferredHeight() ).thenReturn( -1 ); when( activityManager.getActivities( emeraldCityPlace ) ) .thenReturn( singleton( ( Activity ) emeraldCityActivity ) ); when( emeraldCityActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); HasWidgets customContainer = mock( HasWidgets.class ); placeManager.goTo( emeraldCityPlace, customContainer ); verifyActivityLaunchSideEffects( emeraldCityPlace, emeraldCityActivity, customPanelDef ); verify( panelManager ).addWorkbenchPart( eq( emeraldCityPlace ), eq( new PartDefinitionImpl( emeraldCityPlace ) ), eq( customPanelDef ), isNull( Menus.class ), any( UIPart.class ), isNull( String.class ), isNull( Integer.class ), isNull( Integer.class ) ); assertNull( customPanelDef.getParent() ); } @Test public void testLaunchExistingActivityInCustomPanel() throws Exception { HasWidgets customContainer = mock( HasWidgets.class ); when( kansasActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); placeManager.goTo( kansas, customContainer ); verify( panelManager, never() ) .addCustomPanel( customContainer, StaticWorkbenchPanelPresenter.class.getName() ); verifyNoActivityLaunchSideEffects( kansas, kansasActivity ); verify( selectWorkbenchPartEvent ).fire( refEq( new SelectPlaceEvent( kansas ) ) ); } @Test public void testClosingActivityInCustomPanel() throws Exception { PanelDefinition customPanelDef = new PanelDefinitionImpl( UnanchoredStaticWorkbenchPanelPresenter.class.getName() ); when( panelManager.addCustomPanel( any( HasWidgets.class ), eq( UnanchoredStaticWorkbenchPanelPresenter.class.getName() ) ) ) .thenReturn( customPanelDef ); PlaceRequest emeraldCityPlace = new DefaultPlaceRequest( "emerald_city" ); WorkbenchScreenActivity emeraldCityActivity = mock( WorkbenchScreenActivity.class ); when( emeraldCityActivity.onMayClose() ).thenReturn( true ); when( emeraldCityActivity.preferredWidth() ).thenReturn( 555 ); when( emeraldCityActivity.preferredHeight() ).thenReturn( -1 ); when( emeraldCityActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); when( activityManager.getActivities( emeraldCityPlace ) ) .thenReturn( singleton( ( Activity ) emeraldCityActivity ) ); HasWidgets customContainer = mock( HasWidgets.class ); placeManager.goTo( emeraldCityPlace, customContainer ); placeManager.closePlace( emeraldCityPlace ); assertTrue( customPanelDef.getParts().isEmpty() ); verify( panelManager ).removeWorkbenchPanel( customPanelDef ); } @Test public void testClosingAllPlacesIncludesCustomPanels() throws Exception { PanelDefinition customPanelDef = new PanelDefinitionImpl( UnanchoredStaticWorkbenchPanelPresenter.class.getName() ); when( panelManager.addCustomPanel( any( HasWidgets.class ), eq( UnanchoredStaticWorkbenchPanelPresenter.class.getName() ) ) ) .thenReturn( customPanelDef ); PlaceRequest emeraldCityPlace = new DefaultPlaceRequest( "emerald_city" ); WorkbenchScreenActivity emeraldCityActivity = mock( WorkbenchScreenActivity.class ); when( emeraldCityActivity.onMayClose() ).thenReturn( true ); when( emeraldCityActivity.preferredWidth() ).thenReturn( 555 ); when( emeraldCityActivity.preferredHeight() ).thenReturn( -1 ); when( emeraldCityActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); when( activityManager.getActivities( emeraldCityPlace ) ) .thenReturn( singleton( ( Activity ) emeraldCityActivity ) ); HasWidgets customContainer = mock( HasWidgets.class ); placeManager.goTo( emeraldCityPlace, customContainer ); placeManager.closeAllPlaces(); assertTrue( customPanelDef.getParts().isEmpty() ); verify( panelManager ).removeWorkbenchPanel( customPanelDef ); } @Test public void testGetActivitiesForResourceType_NoMatches() throws Exception { final ObservablePath path = mock( ObservablePath.class ); final PathPlaceRequest yellowBrickRoad = new FakePathPlaceRequest( path ); final WorkbenchScreenActivity ozActivity = mock( WorkbenchScreenActivity.class ); when( activityManager.getActivities( yellowBrickRoad ) ).thenReturn( singleton( ( Activity ) ozActivity ) ); when( ozActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); placeManager.goTo( yellowBrickRoad ); verifyActivityLaunchSideEffects( yellowBrickRoad, ozActivity, null ); final ResourceTypeDefinition resourceType = mock( ResourceTypeDefinition.class ); when( resourceType.accept( path ) ).thenReturn( false ); final Collection<PathPlaceRequest> resolvedActivities = placeManager .getActivitiesForResourceType( resourceType ); assertNotNull( resolvedActivities ); assertEquals( 0, resolvedActivities.size() ); } @Test public void testGetActivitiesForResourceType_Matches() throws Exception { final ObservablePath path = mock( ObservablePath.class ); final PathPlaceRequest yellowBrickRoad = new FakePathPlaceRequest( path ); final WorkbenchScreenActivity ozActivity = mock( WorkbenchScreenActivity.class ); when( activityManager.getActivities( yellowBrickRoad ) ).thenReturn( singleton( ( Activity ) ozActivity ) ); when( ozActivity.isType( ActivityResourceType.SCREEN.name() ) ).thenReturn( true ); placeManager.goTo( yellowBrickRoad ); verifyActivityLaunchSideEffects( yellowBrickRoad, ozActivity, null ); final ResourceTypeDefinition resourceType = mock( ResourceTypeDefinition.class ); when( resourceType.accept( path ) ).thenReturn( true ); final Collection<PathPlaceRequest> resolvedActivities = placeManager .getActivitiesForResourceType( resourceType ); assertNotNull( resolvedActivities ); assertEquals( 1, resolvedActivities.size() ); try { resolvedActivities.clear(); fail( "PlaceManager.getActivitiesForResourceType() should return an unmodifiable collection." ); } catch ( UnsupportedOperationException uoe ) { //This is correct. The result should be an unmodifiable collection } } // TODO test going to an unresolvable/unknown place // TODO test going to a place with a specific target panel (part of the PerspectiveManager/PlaceManager contract) // TODO test closing all panels when there are a variety of different types of panels open // TODO compare/contrast closeAllPlaces with closeAllCurrentPanels (former is public API; latter is called before launching a new perspective) /** * Verifies that all the expected side effects of a screen or editor activity launch have happened. * * @param placeRequest The place request that was passed to some variant of PlaceManager.goTo(). * @param activity <b>A Mockito mock<b> of the activity that was resolved for <tt>placeRequest</tt>. */ private void verifyActivityLaunchSideEffects( PlaceRequest placeRequest, WorkbenchActivity activity, PanelDefinition expectedPanel ) { // as of UberFire 0.4. this event only happens if the place is already visible. // it might be be better if the event was fired unconditionally. needs investigation. verify( selectWorkbenchPartEvent, never() ).fire( any( SelectPlaceEvent.class ) ); // we know the activity was created (or we wouldn't be here), but should verify that only happened one time verify( activityManager, times( 1 ) ).getActivities( placeRequest ); // contract between PlaceManager and PanelManager Integer preferredWidth = activity.preferredWidth(); Integer preferredHeight = activity.preferredHeight(); Integer expectedPartWidth; Integer expectedPartHeight; if ( expectedPanel == null ) { PanelDefinition rootPanel = panelManager.getRoot(); verify( panelManager ).addWorkbenchPanel( rootPanel, null, preferredHeight, preferredWidth, null, null ); expectedPartWidth = null; expectedPartHeight = null; } else { expectedPartWidth = expectedPanel.getWidth(); expectedPartHeight = expectedPanel.getHeight(); } verify( panelManager ).addWorkbenchPart( eq( placeRequest ), eq( new PartDefinitionImpl( placeRequest ) ), expectedPanel == null ? any( PanelDefinition.class ) : eq( expectedPanel ), isNull( Menus.class ), any( UIPart.class ), isNull( String.class ), eq( expectedPartWidth ), eq( expectedPartHeight ) ); // contract between PlaceManager and PlaceHistoryHandler verify( placeHistoryHandler ).onPlaceChange( placeRequest ); // state changes in PlaceManager itself (contract between PlaceManager and everyone) assertTrue( "Actual place requests: " + placeManager.getActivePlaceRequests(), placeManager.getActivePlaceRequests().contains( placeRequest ) ); assertSame( activity, placeManager.getActivity( placeRequest ) ); assertEquals( PlaceStatus.OPEN, placeManager.getStatus( placeRequest ) ); // contract between PlaceManager and Activity verify( activity, never() ).onStartup( any( PlaceRequest.class ) ); // this is ActivityManager's job verify( activity, times( 1 ) ).onOpen(); } /** * Verifies that the "place change" side effects have not happened, and that the given activity is still current. * * @param expectedCurrentPlace The place request that placeManager should still consider "current." * @param activity <b>A Mockito mock<b> of the activity tied to <tt>expectedCurrentPlace</tt>. */ private void verifyNoActivityLaunchSideEffects( PlaceRequest expectedCurrentPlace, WorkbenchScreenActivity activity ) { // contract between PlaceManager and PanelManager verify( panelManager, never() ).addWorkbenchPanel( eq( panelManager.getRoot() ), any( Position.class ), any( Integer.class ), any( Integer.class ), any( Integer.class ), any( Integer.class ) ); verify( panelManager, never() ).addWorkbenchPanel( eq( panelManager.getRoot() ), any( PanelDefinition.class ), any( Position.class ) ); // contract between PlaceManager and PlaceHistoryHandler verify( placeHistoryHandler, never() ).onPlaceChange( any( PlaceRequest.class ) ); // state changes in PlaceManager itself (contract between PlaceManager and everyone) assertTrue( "Actual place requests: " + placeManager.getActivePlaceRequests(), placeManager.getActivePlaceRequests().contains( expectedCurrentPlace ) ); assertSame( activity, placeManager.getActivity( expectedCurrentPlace ) ); assertEquals( PlaceStatus.OPEN, placeManager.getStatus( expectedCurrentPlace ) ); // contract between PlaceManager and Activity verify( activity, never() ).onStartup( any( PlaceRequest.class ) ); verify( activity, never() ).onOpen(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.orm.entities; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import javax.persistence.Basic; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinColumns; import javax.persistence.ManyToOne; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.OneToMany; import javax.persistence.OneToOne; import javax.persistence.Table; import javax.persistence.TableGenerator; import javax.persistence.Transient; import javax.persistence.UniqueConstraint; import org.apache.ambari.server.controller.spi.Resource; import org.apache.ambari.server.security.SecurityHelper; import org.apache.ambari.server.security.SecurityHelperImpl; import org.apache.ambari.server.security.authorization.AmbariAuthorizationFilter; import org.apache.ambari.server.view.ViewContextImpl; import org.apache.ambari.server.view.ViewRegistry; import org.apache.ambari.server.view.configuration.InstanceConfig; import org.apache.ambari.server.view.validation.InstanceValidationResultImpl; import org.apache.ambari.server.view.validation.ValidationException; import org.apache.ambari.server.view.validation.ValidationResultImpl; import org.apache.ambari.view.ClusterType; import org.apache.ambari.view.ResourceProvider; import org.apache.ambari.view.ViewContext; import org.apache.ambari.view.ViewDefinition; import org.apache.ambari.view.ViewInstanceDefinition; import org.apache.ambari.view.migration.ViewDataMigrationContext; import org.apache.ambari.view.migration.ViewDataMigrator; import org.apache.ambari.view.validation.ValidationResult; import org.apache.ambari.view.validation.Validator; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; /** * Represents an instance of a View. */ @Table(name = "viewinstance", uniqueConstraints = @UniqueConstraint( name = "UQ_viewinstance_name", columnNames = {"view_name", "name"} ) ) @NamedQueries({ @NamedQuery( name = "allViewInstances", query = "SELECT viewInstance FROM ViewInstanceEntity viewInstance"), @NamedQuery( name = "viewInstanceByResourceId", query = "SELECT viewInstance FROM ViewInstanceEntity viewInstance " + "WHERE viewInstance.resource.id=:resourceId"), @NamedQuery( name = "getResourceIdByViewInstance", query = "SELECT viewInstance.resource FROM ViewInstanceEntity viewInstance " + "WHERE viewInstance.viewName = :viewName AND viewInstance.name = :instanceName"), }) @TableGenerator(name = "view_instance_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value" , pkColumnValue = "view_instance_id_seq" , initialValue = 1 ) @Entity public class ViewInstanceEntity implements ViewInstanceDefinition { @Id @Column(name = "view_instance_id", nullable = false) @GeneratedValue(strategy = GenerationType.TABLE, generator = "view_instance_id_generator") private Long viewInstanceId; @Column(name = "view_name", nullable = false, insertable = false, updatable = false) private String viewName; /** * The instance name. */ @Column(name = "name", nullable = false, insertable = true, updatable = false) private String name; /** * The public view instance name. */ @Column @Basic private String label; /** * The description. */ @Column @Basic private String description; /** * The associated cluster handle. */ @Column(name = "cluster_handle", nullable = true) private Long clusterHandle; /** * Cluster Type for cluster Handle */ @Enumerated(value = EnumType.STRING) @Column(name = "cluster_type", nullable = false, length = 100) private ClusterType clusterType = ClusterType.LOCAL_AMBARI; /** * Visible flag. */ @Column @Basic private char visible; /** * The icon path. */ @Column @Basic private String icon; @OneToOne(cascade = CascadeType.ALL) @JoinColumns({ @JoinColumn(name = "short_url", referencedColumnName = "url_id", nullable = true) }) private ViewURLEntity viewUrl; /** * The big icon path. */ @Column @Basic private String icon64; /** * The XML driven instance flag. */ @Column(name="xml_driven") @Basic private char xmlDriven = 'N'; /** * Indicates whether or not to alter the names of the data store entities to * avoid db reserved word conflicts. */ @Column(name = "alter_names", nullable = false) @Basic private Integer alterNames; /** * The instance properties. */ @OneToMany(cascade = CascadeType.ALL, mappedBy = "viewInstance") private Collection<ViewInstancePropertyEntity> properties = new HashSet<>(); /** * The instance data. */ @OneToMany(cascade = CascadeType.ALL, mappedBy = "viewInstance") private Collection<ViewInstanceDataEntity> data = new HashSet<>(); /** * The list of view entities. */ @OneToMany(cascade = CascadeType.ALL, mappedBy = "viewInstance") private Collection<ViewEntityEntity> entities = new HashSet<>(); @ManyToOne @JoinColumn(name = "view_name", referencedColumnName = "view_name", nullable = false) private ViewEntity view; @OneToOne(cascade = CascadeType.ALL) @JoinColumns({ @JoinColumn(name = "resource_id", referencedColumnName = "resource_id", nullable = false) }) private ResourceEntity resource; // ----- transient data ---------------------------------------------------- /** * The associated configuration. This will be null if the instance was not * defined in the archive. */ @Transient private final InstanceConfig instanceConfig; /** * The mapping of resource type to resource provider. Calculated when the * instance is added. */ @Transient private final Map<Resource.Type, ResourceProvider> resourceProviders = new HashMap<>(); /** * The mapping of the resource plural name to service. Calculated when the * instance is added. */ @Transient private final Map<String, Object> services = new HashMap<>(); /** * Helper class. */ // TODO : we should @Inject this. @Transient private SecurityHelper securityHelper = SecurityHelperImpl.getInstance(); /** * The view data migrator. */ @Transient private ViewDataMigrator dataMigrator; // ----- Constructors ------------------------------------------------------ public ViewInstanceEntity() { instanceConfig = null; alterNames = 1; } /** * Construct a view instance definition. * * @param view the parent view definition * @param instanceConfig the associated configuration */ public ViewInstanceEntity(ViewEntity view, InstanceConfig instanceConfig) { name = instanceConfig.getName(); this.instanceConfig = instanceConfig; this.view = view; viewName = view.getName(); description = instanceConfig.getDescription(); clusterHandle = null; visible = instanceConfig.isVisible() ? 'Y' : 'N'; alterNames = 1; clusterType = ClusterType.LOCAL_AMBARI; String label = instanceConfig.getLabel(); this.label = (label == null || label.length() == 0) ? view.getLabel() : label; String icon = instanceConfig.getIcon(); this.icon = (icon == null || icon.length() == 0) ? view.getIcon() : icon; String icon64 = instanceConfig.getIcon64(); this.icon64 = (icon64 == null || icon64.length() == 0) ? view.getIcon64() : icon64; } /** * Construct a view instance definition. * * @param view the parent view definition * @param name the instance name */ public ViewInstanceEntity(ViewEntity view, String name) { this(view, name, view.getLabel()); } /** * Construct a view instance definition. * * @param view the parent view definition * @param name the instance name * @param label the instance label */ public ViewInstanceEntity(ViewEntity view, String name, String label) { this.name = name; instanceConfig = null; this.view = view; viewName = view.getName(); description = null; clusterHandle = null; visible = 'Y'; alterNames = 1; this.label = label; } // ----- ViewInstanceDefinition -------------------------------------------- @Override public String getInstanceName() { return name; } @Override public String getViewName() { return viewName; } @Override public Map<String, String> getPropertyMap() { Map<String, String> propertyMap = new HashMap<>(); for (ViewInstancePropertyEntity viewInstancePropertyEntity : getProperties()) { propertyMap.put(viewInstancePropertyEntity.getName(), viewInstancePropertyEntity.getValue()); } for (ViewParameterEntity viewParameterEntity : view.getParameters()) { String parameterName = viewParameterEntity.getName(); if (!propertyMap.containsKey(parameterName)) { propertyMap.put(parameterName, viewParameterEntity.getDefaultValue()); } } return propertyMap; } @Override public Map<String, String> getInstanceDataMap() { Map<String, String> applicationData = new HashMap<>(); String user = getCurrentUserName(); for (ViewInstanceDataEntity viewInstanceDataEntity : data) { if (viewInstanceDataEntity.getUser().equals(user)) { applicationData.put(viewInstanceDataEntity.getName(), viewInstanceDataEntity.getValue()); } } return applicationData; } @Override public ViewDefinition getViewDefinition() { return view; } @Override public String getLabel() { return label; } @Override public String getDescription() { return description; } @Override public Long getClusterHandle() { return clusterHandle; } @Override public boolean isVisible() { return visible == 'y' || visible == 'Y'; } // ----- ViewInstanceEntity ------------------------------------------------ /** * Get the view instance id. * * @return the instance id */ public Long getViewInstanceId() { return viewInstanceId; } /** * Set the given view instance id. * * @param viewInstanceId the instance id */ public void setViewInstanceId(Long viewInstanceId) { this.viewInstanceId = viewInstanceId; } /** * Set the view name. * * @param viewName the view name */ public void setViewName(String viewName) { this.viewName = viewName; } /** * Get the name of this instance. * * @return the instance name */ public String getName() { return name; } /** * Set the name of this instance. * * @param name the instance name */ public void setName(String name) { this.name = name; } /** * Set the label. * * @param label the label */ public void setLabel(String label) { this.label = label; } /** * Set the description. * * @param description the description */ public void setDescription(String description) { this.description = description; } /** * Set a cluster association for this view instance with the Ambari cluster * identified by the given cluster handle. For a local cluster reference, * the cluster handle is simply the unique cluster id. * * @param clusterHandle the cluster identifier */ public void setClusterHandle(Long clusterHandle) { this.clusterHandle = clusterHandle; } /** * Get the type of cluster the view instance is attached to * * @return clusterType the type of cluster for cluster handle */ @Override public ClusterType getClusterType() { return clusterType; } /** * Set the type of cluster for cluster handle * * @param clusterType */ public void setClusterType(ClusterType clusterType) { this.clusterType = clusterType; } /** * Set the visible flag. * * @param visible visible flag */ public void setVisible(boolean visible) { this.visible = (visible ? 'Y' : 'N'); } /** * Get the icon path. * * @return the icon path */ public String getIcon() { return icon; } /** * Set the icon path. * * @param icon the icon path */ public void setIcon(String icon) { this.icon = icon; } /** * Get the big icon path. * * @return the big icon path */ public String getIcon64() { return icon64; } /** * Set the big icon path. * * @param icon64 the big icon path */ public void setIcon64(String icon64) { this.icon64 = icon64; } /** * Get the xml driven flag. * * @return the xml driven flag */ public boolean isXmlDriven() { return xmlDriven == 'y' || xmlDriven == 'Y'; } /** * Set the xml driven flag. * * @param xmlDriven the xml driven flag */ public void setXmlDriven(boolean xmlDriven) { this.xmlDriven = (xmlDriven) ? 'Y' : 'N'; } /** * Determine whether or not to alter the names of the * data store entities to avoid db reserved word conflicts. * * @return true if the view is a system view */ public boolean alterNames() { return alterNames == 1; } /** * Set the flag which indicates whether or not to alter the names of the * data store entities to avoid db reserved word conflicts. * * @param alterNames the alterNames flag; true if the data store names should be altered */ public void setAlterNames(boolean alterNames) { this.alterNames = alterNames ? 1 : 0; } /** * Get the instance properties. * * @return the instance properties */ public Collection<ViewInstancePropertyEntity> getProperties() { return properties; } /** * Add a property value to this instance. * * @param key the property key * @param value the property value */ public void putProperty(String key, String value) { removeProperty(key); ViewInstancePropertyEntity viewInstancePropertyEntity = new ViewInstancePropertyEntity(); viewInstancePropertyEntity.setViewName(viewName); viewInstancePropertyEntity.setViewInstanceName(name); viewInstancePropertyEntity.setName(key); viewInstancePropertyEntity.setValue(value); viewInstancePropertyEntity.setViewInstanceEntity(this); properties.add(viewInstancePropertyEntity); } /** * Remove the property identified by the given key from this instance. * * @param key the key */ public void removeProperty(String key) { ViewInstancePropertyEntity entity = getProperty(key); if (entity != null) { properties.remove(entity); } } /** * Get the instance property entity for the given key. * * @param key the key * @return the instance property entity identified by the given key */ public ViewInstancePropertyEntity getProperty(String key) { for (ViewInstancePropertyEntity viewInstancePropertyEntity : properties) { if (viewInstancePropertyEntity.getName().equals(key)) { return viewInstancePropertyEntity; } } return null; } /** * Set the collection of instance property entities. * * @param properties the collection of instance property entities */ public void setProperties(Collection<ViewInstancePropertyEntity> properties) { this.properties = properties; } /** * Get the instance data. * * @return the instance data */ public Collection<ViewInstanceDataEntity> getData() { return data; } /** * Set the collection of instance data entities. * * @param data the collection of instance data entities */ public void setData(Collection<ViewInstanceDataEntity> data) { this.data = data; } /** * Get the view entities. * * @return the view entities */ public Collection<ViewEntityEntity> getEntities() { return entities; } /** * Set the view entities. * * @param entities the view entities */ public void setEntities(Collection<ViewEntityEntity> entities) { this.entities = entities; } /** * Associate the given instance data value with the given key. * * @param key the key * @param value the value */ public void putInstanceData(String key, String value) { removeInstanceData(key); ViewInstanceDataEntity viewInstanceDataEntity = new ViewInstanceDataEntity(); viewInstanceDataEntity.setViewName(viewName); viewInstanceDataEntity.setViewInstanceName(name); viewInstanceDataEntity.setName(key); viewInstanceDataEntity.setUser(getCurrentUserName()); viewInstanceDataEntity.setValue(value); viewInstanceDataEntity.setViewInstanceEntity(this); data.add(viewInstanceDataEntity); } /** * Remove the instance data entity associated with the given key. * * @param key the key */ public void removeInstanceData(String key) { ViewInstanceDataEntity entity = getInstanceData(key); if (entity != null) { data.remove(entity); } } /** * Get the instance data entity for the given key. * * @param key the key * @return the instance data entity associated with the given key */ public ViewInstanceDataEntity getInstanceData(String key) { String user = getCurrentUserName(); for (ViewInstanceDataEntity viewInstanceDataEntity : data) { if (viewInstanceDataEntity.getName().equals(key) && viewInstanceDataEntity.getUser().equals(user)) { return viewInstanceDataEntity; } } return null; } /** * Get the parent view entity. * * @return the parent view entity */ public ViewEntity getViewEntity() { return view; } /** * Set the parent view entity. * * @param view the parent view entity */ public void setViewEntity(ViewEntity view) { this.view = view; } /** * Get the associated configuration. * * @return the configuration */ public InstanceConfig getConfiguration() { return instanceConfig; } /** * Add a resource provider for the given resource type. * * @param type the resource type * @param provider the resource provider */ public void addResourceProvider(Resource.Type type, ResourceProvider provider) { resourceProviders.put(type, provider); } /** * Get the resource provider for the given resource type. * * @param type the resource type * @return the resource provider */ public ResourceProvider getResourceProvider(Resource.Type type) { return resourceProviders.get(type); } /** * Get the resource provider for the given resource type name (scoped to this view). * * @param type the resource type name * @return the resource provider */ public ResourceProvider getResourceProvider(String type) { String typeName = view.getQualifiedResourceTypeName(type); return resourceProviders.get(Resource.Type.valueOf(typeName)); } /** * Add a service for the given plural resource name. * * @param pluralName the plural resource name * @param service the service */ public void addService(String pluralName, Object service) { services.put(pluralName, service); } /** * Get the service associated with the given plural resource name. * * @param pluralName the plural resource name * @return the service associated with the given name */ public Object getService(String pluralName) { return services.get(pluralName); } /** * Get the context path for the UI for this view. * * @return the context path */ public String getContextPath() { return getContextPath(view.getCommonName(), view.getVersion(), getName()); } /** * Get the context path for a view instance with the given names. * * @param viewName the view name * @param viewInstanceName the instance name * @return the context path */ public static String getContextPath(String viewName, String version, String viewInstanceName) { return AmbariAuthorizationFilter.VIEWS_CONTEXT_PATH_PREFIX + viewName + "/" + version + "/" + viewInstanceName; } /** * Get the current user name. * * @return the current user name; empty String if user is not known */ public String getUsername() { return securityHelper.getCurrentUserName(); } /** * Get the admin resource entity. * * @return the resource entity */ public ResourceEntity getResource() { return resource; } /** * Set the admin resource entity. * * @param resource the resource entity */ public void setResource(ResourceEntity resource) { this.resource = resource; } /** * Get the data migrator instance for view instance. * * @param dataMigrationContext the data migration context to inject into migrator instance. * @return the data migrator. * @throws ClassNotFoundException if class defined in the archive could not be loaded */ public ViewDataMigrator getDataMigrator(ViewDataMigrationContext dataMigrationContext) throws ClassNotFoundException { if (view != null) { if (dataMigrator == null && view.getConfiguration().getDataMigrator() != null) { ClassLoader cl = view.getClassLoader(); dataMigrator = getDataMigrator(view.getConfiguration().getDataMigratorClass(cl), new ViewContextImpl(view, ViewRegistry.getInstance()), dataMigrationContext); } } return dataMigrator; } // get the data migrator class; inject a migration and view contexts private static ViewDataMigrator getDataMigrator(Class<? extends ViewDataMigrator> clazz, final ViewContext viewContext, final ViewDataMigrationContext dataMigrationContext) { Injector viewInstanceInjector = Guice.createInjector(new AbstractModule() { @Override protected void configure() { bind(ViewContext.class) .toInstance(viewContext); bind(ViewDataMigrationContext.class) .toInstance(dataMigrationContext); } }); return viewInstanceInjector.getInstance(clazz); } /** * Validate the state of the instance. * * @param viewEntity the view entity to which this instance will be bound * @param context the validation context * * @throws ValidationException if the instance can not be validated */ public void validate(ViewEntity viewEntity, Validator.ValidationContext context) throws ValidationException { InstanceValidationResultImpl result = getValidationResult(viewEntity, context); if (!result.isValid()) { throw new ValidationException(result.toJson()); } } /** * Get the validation the state of the instance. * * @param viewEntity the view entity to which this instance will be bound * @param context the validation context * * @return the instance validation result */ public InstanceValidationResultImpl getValidationResult(ViewEntity viewEntity, Validator.ValidationContext context) throws IllegalStateException { Map<String, ValidationResult> propertyResults = new HashMap<>(); if (context.equals(Validator.ValidationContext.PRE_CREATE) || context.equals(Validator.ValidationContext.PRE_UPDATE)) { // make sure that there is an instance property value defined // for each required view parameter Set<String> requiredParameterNames = new HashSet<>(); for (ViewParameterEntity parameter : viewEntity.getParameters()) { if (parameter.isRequired()) { // Don't enforce 'required' validation for cluster config parameters since // the value will be obtained through cluster association, not user input if (parameter.getClusterConfig()== null) { requiredParameterNames.add(parameter.getName()); } } } Map<String, String> propertyMap = getPropertyMap(); for (Map.Entry<String, String> entry : propertyMap.entrySet()) { if (entry.getValue() != null) { requiredParameterNames.remove(entry.getKey()); } } // required but missing instance properties... for (String requiredParameterName : requiredParameterNames) { propertyResults.put(requiredParameterName, new ValidationResultImpl(false, "No property values exist for the required parameter " + requiredParameterName + ".")); } } ValidationResult instanceResult = null; Validator validator = viewEntity.getValidator(); // if the view provides its own validator, run it if (validator != null) { instanceResult = validator.validateInstance(this, context); for ( String property : getPropertyMap().keySet()) { if (!propertyResults.containsKey(property)) { propertyResults.put(property, ValidationResultImpl.create(validator.validateProperty(property, this, context))); } } } return new InstanceValidationResultImpl(ValidationResultImpl.create(instanceResult), propertyResults); } // ----- helper methods ---------------------------------------------------- // get the current user name public String getCurrentUserName() { String currentUserName = getUsername(); return currentUserName == null || currentUserName.length() == 0 ? " " : currentUserName; } /** * Set the security helper. * * @param securityHelper the helper */ protected void setSecurityHelper(SecurityHelper securityHelper) { this.securityHelper = securityHelper; } // ----- Object overrides -------------------------------------------------- @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ViewInstanceEntity that = (ViewInstanceEntity) o; return name.equals(that.name) && viewName.equals(that.viewName); } @Override public int hashCode() { int result = viewName.hashCode(); result = 31 * result + name.hashCode(); return result; } /** * Get the view URL associated with the instance * @return */ public ViewURLEntity getViewUrl() { return viewUrl; } /** * Set the view URL associated with the instance * @param viewUrl */ public void setViewUrl(ViewURLEntity viewUrl) { this.viewUrl = viewUrl; } /** * Remove the URL associated with this entity */ public void clearUrl() { viewUrl = null; } //----- ViewInstanceVersionDTO inner class -------------------------------------------------- /** * Keeps information about view name, version and instance name. */ public static class ViewInstanceVersionDTO { /** * View name. */ private final String viewName; /** * View version. */ private final String version; /** * View instance name. */ private final String instanceName; /** * Constructor. * * @param viewName view name * @param version view version * @param instanceName view instance name */ public ViewInstanceVersionDTO(String viewName, String version, String instanceName) { this.viewName = viewName; this.version = version; this.instanceName = instanceName; } /** * Get the view name. * * @return the view name */ public String getViewName() { return viewName; } /** * Get the view version. * * @return the view version */ public String getVersion() { return version; } /** * Get the view instance name. * * @return the view instance name */ public String getInstanceName() { return instanceName; } } @Override public String toString() { return "ViewInstanceEntity{" + "viewInstanceId=" + viewInstanceId + ", viewName='" + viewName + '\'' + ", name='" + name + '\'' + ", label='" + label + '\'' + '}'; } }
/* * * * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. * * */ package eu.amidst.flinklink.core.conceptdrift; import eu.amidst.core.distribution.Normal_MultinomialNormalParents; import eu.amidst.core.variables.Variable; import eu.amidst.dynamic.datastream.DynamicDataInstance; import eu.amidst.dynamic.io.DynamicBayesianNetworkLoader; import eu.amidst.dynamic.io.DynamicBayesianNetworkWriter; import eu.amidst.dynamic.models.DynamicBayesianNetwork; import eu.amidst.dynamic.models.DynamicDAG; import eu.amidst.dynamic.variables.DynamicVariables; import eu.amidst.flinklink.Main; import eu.amidst.flinklink.core.data.DataFlink; import eu.amidst.flinklink.core.io.DataFlinkLoader; import eu.amidst.flinklink.core.io.DataFlinkWriter; import eu.amidst.flinklink.core.utils.DBNSampler; import junit.framework.TestCase; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.configuration.Configuration; import java.util.HashSet; import java.util.List; import java.util.Random; /** * Created by andresmasegosa on 9/12/15. */ public class IDAConceptDriftDetectorDBNTest extends TestCase { public static int NSETS = 15; public static int SAMPLESIZE = 1000; public static int BATCHSIZE = 500; public static void createDataSets(String networkName, List<String> hiddenVars, List<String> noisyVars) throws Exception { //Set-up Flink session. Configuration conf = new Configuration(); conf.setInteger("taskmanager.network.numberOfBuffers", 12000); final ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(conf); env.getConfig().disableSysoutLogging(); env.setParallelism(Main.PARALLELISM); DynamicBayesianNetwork dbn = DynamicBayesianNetworkLoader.loadFromFile("networks/simulated/" + networkName + ".dbn"); dbn.randomInitialization(new Random(1)); if (Main.VERBOSE) System.out.println(dbn.toString()); DBNSampler sampler = new DBNSampler(dbn); sampler.setNSamples(SAMPLESIZE); sampler.setBatchSize(BATCHSIZE); sampler.setSeed(0); if (hiddenVars!=null) { for (String hiddenVar : hiddenVars) { sampler.setHiddenVar(dbn.getDynamicVariables().getVariableByName(hiddenVar)); } } if (noisyVars!=null){ for (String noisyVar : noisyVars) { sampler.setMARVar(dbn.getDynamicVariables().getVariableByName(noisyVar), 0.1); } } DataFlink<DynamicDataInstance> data0 = sampler.cascadingSample(env,null); DataFlinkWriter.writeDataToARFFFolder(data0, "../datasets/simulated/conceptdrift/data0.arff"); data0 = DataFlinkLoader.loadDynamicDataFromFolder(env, "../datasets/simulated//conceptdrift/data0.arff", false); List<Long> list = data0.getDataSet().map(d -> d.getSequenceID()).collect(); if (Main.VERBOSE) System.out.println(list); HashSet<Long> noDupSet = new HashSet(); noDupSet.addAll(list); assertEquals(SAMPLESIZE, noDupSet.size()); if (Main.VERBOSE) System.out.println(noDupSet); DataFlink<DynamicDataInstance> dataPrev = data0; for (int i = 1; i < NSETS; i++) { if (Main.VERBOSE) System.out.println("--------------- DATA " + i + " --------------------------"); DataFlink<DynamicDataInstance> dataNew = sampler.cascadingSampleConceptDrift(env,dataPrev, i%4==1); DataFlinkWriter.writeDataToARFFFolder(dataNew, "../datasets/simulated/conceptdrift/data" + i + ".arff"); dataNew = DataFlinkLoader.loadDynamicDataFromFolder(env, "../datasets/simulated/conceptdrift/data" + i + ".arff", false); dataPrev = dataNew; } } public static void createDBN1(int nvars, boolean connect) throws Exception { DynamicVariables dynamicVariables = new DynamicVariables(); Variable classVar = dynamicVariables.newMultinomialDynamicVariable("C", 2); for (int i = 0; i < nvars; i++) { dynamicVariables.newGaussianDynamicVariable("A" + i); } DynamicDAG dag = new DynamicDAG(dynamicVariables); for (int i = 0; i < nvars; i++) { dag.getParentSetTimeT(dynamicVariables.getVariableByName("A" + i)).addParent(classVar); if (connect) dag.getParentSetTimeT(dynamicVariables.getVariableByName("A" + i)).addParent(dynamicVariables.getVariableByName("A" + i).getInterfaceVariable()); } //dag.getParentSetTimeT(classVar).addParent(classVar.getInterfaceVariable()); dag.setName("dbn1"); DynamicBayesianNetwork dbn = new DynamicBayesianNetwork(dag); dbn.randomInitialization(new Random(0)); if (Main.VERBOSE) System.out.println(dbn.toString()); DynamicBayesianNetworkWriter.save(dbn, "../networks/simulated/dbn1.dbn"); } public static void testUpdateN(String networkName, double threshold) throws Exception { //Set-up Flink session. Configuration conf = new Configuration(); conf.setInteger("taskmanager.network.numberOfBuffers", 12000); final ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(conf); env.getConfig().disableSysoutLogging(); env.setParallelism(Main.PARALLELISM); DynamicBayesianNetwork dbn = DynamicBayesianNetworkLoader.loadFromFile("../networks/simulated/" + networkName+".dbn"); dbn.randomInitialization(new Random(0)); if (Main.VERBOSE) System.out.println(dbn.toString()); DataFlink<DynamicDataInstance> data0 = DataFlinkLoader.loadDynamicDataFromFolder(env, "../datasets/simulated/conceptdrift/data0.arff", false); dbn.getDynamicVariables().setAttributes(data0.getAttributes()); IDAConceptDriftDetectorDBN learn = new IDAConceptDriftDetectorDBN(); learn.setBatchSize(100); learn.setClassIndex(0); learn.setAttributes(data0.getAttributes()); learn.setNumberOfGlobalVars(1); learn.setTransitionVariance(0.1); learn.setSeed(0); learn.initLearning(); double[] output = new double[NSETS]; if (Main.VERBOSE) System.out.println("--------------- DATA " + 0 + " --------------------------"); double[] out = learn.updateModelWithNewTimeSlice(0, data0); output[0] = out[0]; for (int i = 1; i < NSETS; i++) { if (Main.VERBOSE) System.out.println("--------------- DATA " + i + " --------------------------"); DataFlink<DynamicDataInstance> dataNew = DataFlinkLoader.loadDynamicDataFromFolder(env, "../datasets/simulated/conceptdrift/data" + i + ".arff", false); out = learn.updateModelWithNewTimeSlice(i, dataNew); output[i] = out[0]; if (Main.VERBOSE) System.out.println(learn.getLearntDynamicBayesianNetwork()); } if (Main.VERBOSE) System.out.println(learn.getLearntDynamicBayesianNetwork()); for (int i = 0; i < NSETS; i++) { if (Main.VERBOSE) System.out.println("E(H_"+i+") =\t" + output[i]); } } public static void createDataSetsDBN2(String networkName, List<String> hiddenVars, List<String> noisyVars) throws Exception { //Set-up Flink session. Configuration conf = new Configuration(); conf.setInteger("taskmanager.network.numberOfBuffers", 12000); final ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(conf); env.getConfig().disableSysoutLogging(); env.setParallelism(Main.PARALLELISM); DynamicBayesianNetwork dbn = DynamicBayesianNetworkLoader.loadFromFile("../networks/simulated/" + networkName + ".dbn"); dbn.randomInitialization(new Random(0)); for (Variable variable : dbn.getDynamicVariables()) { if (!variable.getName().startsWith("A")) continue; Normal_MultinomialNormalParents dist = dbn.getConditionalDistributionTimeT(variable); dist.getNormal_NormalParentsDistribution(0).setCoeffParents(new double[]{1.0}); dist.getNormal_NormalParentsDistribution(0).setIntercept(10); dist.getNormal_NormalParentsDistribution(1).setCoeffParents(new double[]{1.0}); dist.getNormal_NormalParentsDistribution(1).setIntercept(10); } if (Main.VERBOSE) System.out.println(dbn.toString()); DBNSampler sampler = new DBNSampler(dbn); sampler.setNSamples(SAMPLESIZE); sampler.setBatchSize(BATCHSIZE); sampler.setSeed(1); if (hiddenVars!=null) { for (String hiddenVar : hiddenVars) { sampler.setHiddenVar(dbn.getDynamicVariables().getVariableByName(hiddenVar)); } } if (noisyVars!=null){ for (String noisyVar : noisyVars) { sampler.setMARVar(dbn.getDynamicVariables().getVariableByName(noisyVar), 0.1); } } DataFlink<DynamicDataInstance> data0 = sampler.cascadingSample(env,null); DataFlinkWriter.writeDataToARFFFolder(data0, "../datasets/simulated/conceptdrift/data0.arff"); data0 = DataFlinkLoader.loadDynamicDataFromFolder(env, "../datasets/simulated/conceptdrift/data0.arff", false); List<Long> list = data0.getDataSet().map(d -> d.getSequenceID()).collect(); if (Main.VERBOSE) System.out.println(list); HashSet<Long> noDupSet = new HashSet(); noDupSet.addAll(list); assertEquals(SAMPLESIZE, noDupSet.size()); if (Main.VERBOSE) System.out.println(noDupSet); DataFlink<DynamicDataInstance> dataPrev = data0; for (int i = 1; i < NSETS; i++) { if (Main.VERBOSE) System.out.println("--------------- DATA " + i + " --------------------------"); if (i==5){ for (Variable variable : dbn.getDynamicVariables()) { if (!variable.getName().startsWith("A")) continue; Normal_MultinomialNormalParents dist = dbn.getConditionalDistributionTimeT(variable); dist.getNormal_NormalParentsDistribution(0).setCoeffParents(new double[]{1.0}); dist.getNormal_NormalParentsDistribution(0).setIntercept(0); dist.getNormal_NormalParentsDistribution(1).setCoeffParents(new double[]{1.0}); dist.getNormal_NormalParentsDistribution(1).setIntercept(0); } if (Main.VERBOSE) System.out.println(dbn); sampler.setDBN(dbn); } if (i==10){ for (Variable variable : dbn.getDynamicVariables()) { if (!variable.getName().startsWith("A")) continue; Normal_MultinomialNormalParents dist = dbn.getConditionalDistributionTimeT(variable); dist.getNormal_NormalParentsDistribution(0).setCoeffParents(new double[]{1.0}); dist.getNormal_NormalParentsDistribution(0).setIntercept(-10); dist.getNormal_NormalParentsDistribution(1).setCoeffParents(new double[]{1.0}); dist.getNormal_NormalParentsDistribution(1).setIntercept(-10); } if (Main.VERBOSE) System.out.println(dbn); sampler.setDBN(dbn); } DataFlink<DynamicDataInstance> dataNew = sampler.cascadingSample(env,dataPrev);//i%4==1); DataFlinkWriter.writeDataToARFFFolder(dataNew, "../datasets/simulated/conceptdrift/data" + i + ".arff"); dataNew = DataFlinkLoader.loadDynamicDataFromFolder(env, "../datasets/simulated/conceptdrift/data" + i + ".arff", false); dataPrev = dataNew; } } public static void test1() throws Exception { String networkName = "dbn1"; createDBN1(10,true); createDataSetsDBN2(networkName,null,null); testUpdateN(networkName, 0.1); } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.pzexoplayer.text.eia608; import com.google.android.pzexoplayer.C; import com.google.android.pzexoplayer.ExoPlaybackException; import com.google.android.pzexoplayer.MediaFormatHolder; import com.google.android.pzexoplayer.SampleHolder; import com.google.android.pzexoplayer.SampleSource; import com.google.android.pzexoplayer.SampleSource.SampleSourceReader; import com.google.android.pzexoplayer.TrackRenderer; import com.google.android.pzexoplayer.text.Cue; import com.google.android.pzexoplayer.text.TextRenderer; import com.google.android.pzexoplayer.util.Assertions; import com.google.android.pzexoplayer.util.Util; import android.os.Handler; import android.os.Handler.Callback; import android.os.Looper; import android.os.Message; import java.io.IOException; import java.util.Collections; import java.util.TreeSet; /** * A {@link TrackRenderer} for EIA-608 closed captions in a media stream. */ public final class Eia608TrackRenderer extends TrackRenderer implements Callback { private static final int MSG_INVOKE_RENDERER = 0; private static final int CC_MODE_UNKNOWN = 0; private static final int CC_MODE_ROLL_UP = 1; private static final int CC_MODE_POP_ON = 2; private static final int CC_MODE_PAINT_ON = 3; // The default number of rows to display in roll-up captions mode. private static final int DEFAULT_CAPTIONS_ROW_COUNT = 4; // The maximum duration that captions are parsed ahead of the current position. private static final int MAX_SAMPLE_READAHEAD_US = 5000000; private final SampleSourceReader source; private final Eia608Parser eia608Parser; private final TextRenderer textRenderer; private final Handler textRendererHandler; private final MediaFormatHolder formatHolder; private final SampleHolder sampleHolder; private final StringBuilder captionStringBuilder; private final TreeSet<ClosedCaptionList> pendingCaptionLists; private int trackIndex; private boolean inputStreamEnded; private int captionMode; private int captionRowCount; private String caption; private String lastRenderedCaption; /** * @param source A source from which samples containing EIA-608 closed captions can be read. * @param textRenderer The text renderer. * @param textRendererLooper The looper associated with the thread on which textRenderer should be * invoked. If the renderer makes use of standard Android UI components, then this should * normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public Eia608TrackRenderer(SampleSource source, TextRenderer textRenderer, Looper textRendererLooper) { this.source = source.register(); this.textRenderer = Assertions.checkNotNull(textRenderer); textRendererHandler = textRendererLooper == null ? null : new Handler(textRendererLooper, this); eia608Parser = new Eia608Parser(); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); captionStringBuilder = new StringBuilder(); pendingCaptionLists = new TreeSet<>(); } @Override protected int doPrepare(long positionUs) { boolean sourcePrepared = source.prepare(positionUs); if (!sourcePrepared) { return TrackRenderer.STATE_UNPREPARED; } int trackCount = source.getTrackCount(); for (int i = 0; i < trackCount; i++) { if (eia608Parser.canParse(source.getTrackInfo(i).mimeType)) { trackIndex = i; return TrackRenderer.STATE_PREPARED; } } return TrackRenderer.STATE_IGNORE; } @Override protected void onEnabled(long positionUs, boolean joining) { source.enable(trackIndex, positionUs); seekToInternal(); } @Override protected void seekTo(long positionUs) throws ExoPlaybackException { source.seekToUs(positionUs); seekToInternal(); } private void seekToInternal() { inputStreamEnded = false; pendingCaptionLists.clear(); clearPendingSample(); captionRowCount = DEFAULT_CAPTIONS_ROW_COUNT; setCaptionMode(CC_MODE_UNKNOWN); invokeRenderer(null); } @Override protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { source.continueBuffering(trackIndex, positionUs); if (isSamplePending()) { maybeParsePendingSample(positionUs); } int result = inputStreamEnded ? SampleSource.END_OF_STREAM : SampleSource.SAMPLE_READ; while (!isSamplePending() && result == SampleSource.SAMPLE_READ) { result = source.readData(trackIndex, positionUs, formatHolder, sampleHolder, false); if (result == SampleSource.SAMPLE_READ) { maybeParsePendingSample(positionUs); } else if (result == SampleSource.END_OF_STREAM) { inputStreamEnded = true; } } while (!pendingCaptionLists.isEmpty()) { if (pendingCaptionLists.first().timeUs > positionUs) { // We're too early to render any of the pending caption lists. return; } // Remove and consume the next caption list. ClosedCaptionList nextCaptionList = pendingCaptionLists.pollFirst(); consumeCaptionList(nextCaptionList); // Update the renderer, unless the caption list was marked for decoding only. if (!nextCaptionList.decodeOnly) { invokeRenderer(caption); } } } @Override protected void onDisabled() { source.disable(trackIndex); } @Override protected void maybeThrowError() throws ExoPlaybackException { try { source.maybeThrowError(); } catch (IOException e) { throw new ExoPlaybackException(e); } } @Override protected long getDurationUs() { return source.getTrackInfo(trackIndex).durationUs; } @Override protected long getBufferedPositionUs() { return TrackRenderer.END_OF_TRACK_US; } @Override protected boolean isEnded() { return inputStreamEnded; } @Override protected boolean isReady() { return true; } private void invokeRenderer(String text) { if (Util.areEqual(lastRenderedCaption, text)) { // No change. return; } this.lastRenderedCaption = text; if (textRendererHandler != null) { textRendererHandler.obtainMessage(MSG_INVOKE_RENDERER, text).sendToTarget(); } else { invokeRendererInternal(text); } } @SuppressWarnings("unchecked") @Override public boolean handleMessage(Message msg) { switch (msg.what) { case MSG_INVOKE_RENDERER: invokeRendererInternal((String) msg.obj); return true; } return false; } private void invokeRendererInternal(String cueText) { if (cueText == null) { textRenderer.onCues(Collections.<Cue>emptyList()); } else { textRenderer.onCues(Collections.singletonList(new Cue(cueText))); } } private void maybeParsePendingSample(long positionUs) { if (sampleHolder.timeUs > positionUs + MAX_SAMPLE_READAHEAD_US) { // We're too early to parse the sample. return; } ClosedCaptionList holder = eia608Parser.parse(sampleHolder); clearPendingSample(); if (holder != null) { pendingCaptionLists.add(holder); } } private void consumeCaptionList(ClosedCaptionList captionList) { int captionBufferSize = captionList.captions.length; if (captionBufferSize == 0) { return; } for (int i = 0; i < captionBufferSize; i++) { ClosedCaption caption = captionList.captions[i]; if (caption.type == ClosedCaption.TYPE_CTRL) { ClosedCaptionCtrl captionCtrl = (ClosedCaptionCtrl) caption; if (captionCtrl.isMiscCode()) { handleMiscCode(captionCtrl); } else if (captionCtrl.isPreambleAddressCode()) { handlePreambleAddressCode(); } } else { handleText((ClosedCaptionText) caption); } } if (captionMode == CC_MODE_ROLL_UP || captionMode == CC_MODE_PAINT_ON) { caption = getDisplayCaption(); } } private void handleText(ClosedCaptionText captionText) { if (captionMode != CC_MODE_UNKNOWN) { captionStringBuilder.append(captionText.text); } } private void handleMiscCode(ClosedCaptionCtrl captionCtrl) { switch (captionCtrl.cc2) { case ClosedCaptionCtrl.ROLL_UP_CAPTIONS_2_ROWS: captionRowCount = 2; setCaptionMode(CC_MODE_ROLL_UP); return; case ClosedCaptionCtrl.ROLL_UP_CAPTIONS_3_ROWS: captionRowCount = 3; setCaptionMode(CC_MODE_ROLL_UP); return; case ClosedCaptionCtrl.ROLL_UP_CAPTIONS_4_ROWS: captionRowCount = 4; setCaptionMode(CC_MODE_ROLL_UP); return; case ClosedCaptionCtrl.RESUME_CAPTION_LOADING: setCaptionMode(CC_MODE_POP_ON); return; case ClosedCaptionCtrl.RESUME_DIRECT_CAPTIONING: setCaptionMode(CC_MODE_PAINT_ON); return; } if (captionMode == CC_MODE_UNKNOWN) { return; } switch (captionCtrl.cc2) { case ClosedCaptionCtrl.ERASE_DISPLAYED_MEMORY: caption = null; if (captionMode == CC_MODE_ROLL_UP || captionMode == CC_MODE_PAINT_ON) { captionStringBuilder.setLength(0); } return; case ClosedCaptionCtrl.ERASE_NON_DISPLAYED_MEMORY: captionStringBuilder.setLength(0); return; case ClosedCaptionCtrl.END_OF_CAPTION: caption = getDisplayCaption(); captionStringBuilder.setLength(0); return; case ClosedCaptionCtrl.CARRIAGE_RETURN: maybeAppendNewline(); return; case ClosedCaptionCtrl.BACKSPACE: if (captionStringBuilder.length() > 0) { captionStringBuilder.setLength(captionStringBuilder.length() - 1); } return; } } private void handlePreambleAddressCode() { // TODO: Add better handling of this with specific positioning. maybeAppendNewline(); } private void setCaptionMode(int captionMode) { if (this.captionMode == captionMode) { return; } this.captionMode = captionMode; // Clear the working memory. captionStringBuilder.setLength(0); if (captionMode == CC_MODE_ROLL_UP || captionMode == CC_MODE_UNKNOWN) { // When switching to roll-up or unknown, we also need to clear the caption. caption = null; } } private void maybeAppendNewline() { int buildLength = captionStringBuilder.length(); if (buildLength > 0 && captionStringBuilder.charAt(buildLength - 1) != '\n') { captionStringBuilder.append('\n'); } } private String getDisplayCaption() { int buildLength = captionStringBuilder.length(); if (buildLength == 0) { return null; } boolean endsWithNewline = captionStringBuilder.charAt(buildLength - 1) == '\n'; if (buildLength == 1 && endsWithNewline) { return null; } int endIndex = endsWithNewline ? buildLength - 1 : buildLength; if (captionMode != CC_MODE_ROLL_UP) { return captionStringBuilder.substring(0, endIndex); } int startIndex = 0; int searchBackwardFromIndex = endIndex; for (int i = 0; i < captionRowCount && searchBackwardFromIndex != -1; i++) { searchBackwardFromIndex = captionStringBuilder.lastIndexOf("\n", searchBackwardFromIndex - 1); } if (searchBackwardFromIndex != -1) { startIndex = searchBackwardFromIndex + 1; } captionStringBuilder.delete(0, startIndex); return captionStringBuilder.substring(0, endIndex - startIndex); } private void clearPendingSample() { sampleHolder.timeUs = C.UNKNOWN_TIME_US; sampleHolder.clearData(); } private boolean isSamplePending() { return sampleHolder.timeUs != C.UNKNOWN_TIME_US; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.ocm.mapper; import java.util.Collection; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.apache.jackrabbit.ocm.exception.JcrMappingException; import org.apache.jackrabbit.ocm.mapper.impl.digester.DigesterMapperImpl; import org.apache.jackrabbit.ocm.mapper.model.BeanDescriptor; import org.apache.jackrabbit.ocm.mapper.model.ClassDescriptor; import org.apache.jackrabbit.ocm.mapper.model.CollectionDescriptor; import org.apache.jackrabbit.ocm.mapper.model.FieldDescriptor; import org.apache.jackrabbit.ocm.testmodel.A; import org.apache.jackrabbit.ocm.testmodel.B; import org.apache.jackrabbit.ocm.testmodel.C; import org.apache.jackrabbit.ocm.testmodel.OcmTestProperty; import org.apache.jackrabbit.ocm.testmodel.inheritance.Ancestor; import org.apache.jackrabbit.ocm.testmodel.inheritance.AnotherDescendant; import org.apache.jackrabbit.ocm.testmodel.inheritance.Descendant; import org.apache.jackrabbit.ocm.testmodel.inheritance.SubDescendant; import org.apache.jackrabbit.ocm.testmodel.inheritance.impl.CmsObjectImpl; import org.apache.jackrabbit.ocm.testmodel.inheritance.impl.DocumentImpl; import org.apache.jackrabbit.ocm.testmodel.interfaces.CmsObject; import org.apache.jackrabbit.ocm.testmodel.interfaces.Document; import org.apache.jackrabbit.ocm.testmodel.interfaces.Interface; import org.apache.jackrabbit.ocm.testmodel.proxy.Main; /** * Test Digester Mapper * * @author <a href="mailto:christophe.lombart@sword-technologies.com">Christophe Lombart</a> */ public class DigesterMapperImplTest extends TestCase { /** * <p>Defines the test case name for junit.</p> * @param testName The test case name. */ public DigesterMapperImplTest(String testName) { super(testName); } public static Test suite() { // All methods starting with "test" will be executed in the test suite. return new TestSuite(DigesterMapperImplTest.class); } /** * Simple test mapper * */ public void testMapper() { try { Mapper mapper = new DigesterMapperImpl( "./src/test/test-config/jcrmapping-testdigester.xml"); assertNotNull("Mapper is null", mapper); ClassDescriptor classDescriptor = mapper.getClassDescriptorByClass(A.class); assertNotNull("ClassDescriptor is null", classDescriptor); assertTrue("Invalid classname", classDescriptor.getClassName().equals(A.class.getName())); assertTrue("Invalid path field", classDescriptor.getPathFieldDescriptor().getFieldName().equals("path")); assertEquals("Invalid mixins", "mixin:a", classDescriptor.getJcrMixinTypes()[0]); FieldDescriptor fieldDescriptor = classDescriptor .getFieldDescriptor("a1"); assertNotNull("FieldDescriptor is null", fieldDescriptor); assertTrue("Invalid jcrName for field a1", fieldDescriptor.getJcrName().equals("a1")); BeanDescriptor beanDescriptor = classDescriptor.getBeanDescriptor("b"); assertNotNull("BeanDescriptor is null", beanDescriptor); assertTrue("Invalid jcrName for field b", beanDescriptor .getJcrName().equals("b")); assertNotNull("Invalid bean default converter", beanDescriptor.getConverter()); CollectionDescriptor collectionDescriptor = classDescriptor.getCollectionDescriptor("collection"); assertNotNull("CollectionDescriptor is null", collectionDescriptor); assertTrue("Invalid jcrName for field collection",collectionDescriptor.getJcrName().equals("collection")); } catch (JcrMappingException e) { e.printStackTrace(); fail("Impossible to retrieve the converter " + e); } } /** * Simple test mapper * */ public void testUuid() { try { Mapper mapper = new DigesterMapperImpl( "./src/test/test-config/jcrmapping-testdigester.xml"); assertNotNull("Mapper is null", mapper); ClassDescriptor classDescriptor = mapper.getClassDescriptorByClass(org.apache.jackrabbit.ocm.testmodel.uuid.A.class); assertNotNull("ClassDescriptor is null", classDescriptor); assertTrue("Invalid uuid field", classDescriptor.getUuidFieldDescriptor().getFieldName().equals("uuid")); } catch (JcrMappingException e) { e.printStackTrace(); fail("Impossible to retrieve the converter " + e); } } /** * Simple test mapper * */ public void testDiscriminatorSetting() { try { Mapper mapper = new DigesterMapperImpl("./src/test/test-config/jcrmapping-testdigester.xml"); assertNotNull("Mapper is null", mapper); ClassDescriptor classDescriptor = mapper.getClassDescriptorByNodeType("ocm:C"); //ClassDescriptor classDescriptor = mapper.getClassDescriptorByClass(C.class); assertNotNull("ClassDescriptor is null", classDescriptor); assertTrue("Invalid classname", classDescriptor.getClassName().equals(C.class.getName())); } catch (JcrMappingException e) { e.printStackTrace(); fail("Impossible to retrieve the converter " + e); } } /** * Test optional mapping properties * */ public void testMapperOptionalProperties() { try { String[] files = { "./src/test/test-config/jcrmapping.xml", "./src/test/test-config/jcrmapping-jcrnodetypes.xml"}; Mapper mapper = new DigesterMapperImpl(files); assertNotNull("Mapper is null", mapper); ClassDescriptor classDescriptor = mapper.getClassDescriptorByClass(B.class); assertNotNull("ClassDescriptor is null", classDescriptor); assertTrue("Invalid classname", classDescriptor.getClassName() .equals(B.class.getName())); assertEquals(classDescriptor.getJcrSuperTypes(), "nt:base"); FieldDescriptor b1Field = classDescriptor.getFieldDescriptor("b1"); assertNotNull("FieldDescriptor is null", b1Field); assertEquals(b1Field.getFieldName(), "b1"); assertEquals(b1Field.getJcrType(), "String"); assertFalse(b1Field.isJcrAutoCreated()); assertFalse(b1Field.isJcrMandatory()); assertFalse(b1Field.isJcrProtected()); assertFalse(b1Field.isJcrMultiple()); assertEquals(b1Field.getJcrOnParentVersion(), "IGNORE"); FieldDescriptor b2Field = classDescriptor.getFieldDescriptor("b2"); assertNotNull("FieldDescriptor is null", b2Field); assertEquals(b2Field.getFieldName(), "b2"); assertEquals(b2Field.getJcrType(), "String"); assertFalse(b2Field.isJcrAutoCreated()); assertFalse(b2Field.isJcrMandatory()); assertFalse(b2Field.isJcrProtected()); assertFalse(b2Field.isJcrMultiple()); assertEquals(b2Field.getJcrOnParentVersion(), "IGNORE"); ClassDescriptor classDescriptor2 = mapper .getClassDescriptorByClass(A.class); assertNotNull("ClassDescriptor is null", classDescriptor2); assertTrue("Invalid classname", classDescriptor2.getClassName() .equals(A.class.getName())); BeanDescriptor beanDescriptor = classDescriptor2 .getBeanDescriptor("b"); assertNotNull(beanDescriptor); assertEquals(beanDescriptor.getFieldName(), "b"); assertEquals(beanDescriptor.getJcrType(), "nt:unstructured"); assertFalse(beanDescriptor.isJcrAutoCreated()); assertFalse(beanDescriptor.isJcrMandatory()); assertFalse(beanDescriptor.isJcrProtected()); assertFalse(beanDescriptor.isJcrSameNameSiblings()); assertEquals(beanDescriptor.getJcrOnParentVersion(), "IGNORE"); CollectionDescriptor collectionDescriptor = classDescriptor2 .getCollectionDescriptor("collection"); assertNotNull(collectionDescriptor); assertEquals(collectionDescriptor.getJcrType(), "nt:unstructured"); assertFalse(collectionDescriptor.isJcrAutoCreated()); assertFalse(collectionDescriptor.isJcrMandatory()); assertFalse(collectionDescriptor.isJcrProtected()); assertFalse(collectionDescriptor.isJcrSameNameSiblings()); assertEquals(collectionDescriptor.getJcrOnParentVersion(), "IGNORE"); classDescriptor = mapper.getClassDescriptorByClass(OcmTestProperty.class); assertNotNull(classDescriptor); FieldDescriptor fieldDescriptor = classDescriptor.getFieldDescriptor("requiredWithConstraintsProp"); assertNotNull(fieldDescriptor.getJcrValueConstraints()); assertTrue("Invalid constaint", fieldDescriptor.getJcrValueConstraints()[0].equals("abc") ); assertTrue("Invalid constaint", fieldDescriptor.getJcrValueConstraints()[1].equals("def") ); assertTrue("Invalid constaint", fieldDescriptor.getJcrValueConstraints()[2].equals("ghi") ); fieldDescriptor = classDescriptor.getFieldDescriptor("autoCreatedProp"); assertNotNull(fieldDescriptor.getJcrDefaultValue()); assertTrue("Invalid default value", fieldDescriptor.getJcrDefaultValue().equals("aaa") ); } catch (JcrMappingException e) { e.printStackTrace(); fail("Impossible to retrieve the converter " + e); } } /** * * Test Node Type per hierarchy setting */ public void testMapperNtHierarchy() { try { String[] files = { "./src/test/test-config/jcrmapping.xml", "./src/test/test-config/jcrmapping-atomic.xml", "./src/test/test-config/jcrmapping-beandescriptor.xml", "./src/test/test-config/jcrmapping-inheritance.xml" }; Mapper mapper = new DigesterMapperImpl(files); assertNotNull("Mapper is null", mapper); ClassDescriptor classDescriptor = mapper .getClassDescriptorByClass(Ancestor.class); assertNotNull("Classdescriptor is null", classDescriptor); assertEquals("Incorrect path field", classDescriptor .getPathFieldDescriptor().getFieldName(), "path"); assertTrue("The ancestor class has no discriminator", classDescriptor.hasDiscriminator()); assertTrue("The ancestor class is not abstract", classDescriptor .isAbstract()); assertNull("The ancestor class has an ancestor", classDescriptor .getSuperClassDescriptor()); assertTrue( "Ancestor class doesn't have a node type per hierarchy strategy", classDescriptor.usesNodeTypePerHierarchyStrategy()); assertFalse( "Ancestor class have a node type per hierarchy strategy", classDescriptor.usesNodeTypePerConcreteClassStrategy()); Collection descendandDescriptors = classDescriptor .getDescendantClassDescriptors(); assertEquals("Invalid number of descendants", descendandDescriptors .size(), 2); classDescriptor = mapper.getClassDescriptorByClass(Descendant.class); assertNotNull("Classdescriptor is null", classDescriptor); assertEquals("Incorrect path field", classDescriptor .getPathFieldDescriptor().getFieldName(), "path"); assertTrue("The descendant class has no discriminator", classDescriptor.hasDiscriminator()); assertNotNull("ancerstorField is null in the descendant class", classDescriptor.getFieldDescriptor("ancestorField")); assertFalse("The descendant class is abstract", classDescriptor .isAbstract()); assertNotNull("The descendant class has not an ancestor", classDescriptor.getSuperClassDescriptor()); assertEquals("Invalid ancestor class for the descendant class", classDescriptor.getSuperClassDescriptor().getClassName(), "org.apache.jackrabbit.ocm.testmodel.inheritance.Ancestor"); descendandDescriptors = classDescriptor .getDescendantClassDescriptors(); assertEquals("Invalid number of descendants", descendandDescriptors .size(), 1); assertTrue( "Descendant class doesn't have a node type per hierarchy strategy", classDescriptor.usesNodeTypePerHierarchyStrategy()); assertFalse( "Descendant class have a node type per hierarchy strategy", classDescriptor.usesNodeTypePerConcreteClassStrategy()); classDescriptor = mapper.getClassDescriptorByClass(SubDescendant.class); assertNotNull("Classdescriptor is null", classDescriptor); assertEquals("Incorrect path field", classDescriptor .getPathFieldDescriptor().getFieldName(), "path"); assertTrue("The subdescendant class has no discriminator", classDescriptor.hasDiscriminator()); assertNotNull("ancestorField is null in the descendant class", classDescriptor.getFieldDescriptor("ancestorField")); assertFalse("The subdescendant class is abstract", classDescriptor .isAbstract()); assertNotNull("The subdescendant class has not an ancestor", classDescriptor.getSuperClassDescriptor()); assertEquals("Invalid ancestor class for the descendant class", classDescriptor.getSuperClassDescriptor().getClassName(), "org.apache.jackrabbit.ocm.testmodel.inheritance.Descendant"); descendandDescriptors = classDescriptor .getDescendantClassDescriptors(); assertEquals("Invalid number of descendants", descendandDescriptors .size(), 0); assertTrue( "SubDescendant class doesn't have a node type per hierarchy strategy", classDescriptor.usesNodeTypePerHierarchyStrategy()); assertFalse( "SubDescendant class have a node type per hierarchy strategy", classDescriptor.usesNodeTypePerConcreteClassStrategy()); } catch (JcrMappingException e) { e.printStackTrace(); fail("Impossible to retrieve the converter " + e); } } /** * * Test Node Type per concrete class setting */ public void testMapperNtConcreteClass() { try { String[] files = { "./src/test/test-config/jcrmapping.xml", "./src/test/test-config/jcrmapping-atomic.xml", "./src/test/test-config/jcrmapping-beandescriptor.xml", "./src/test/test-config/jcrmapping-inheritance.xml" }; // String[] files = { "./src/test/test-config/jcrmapping-inheritance.xml"}; Mapper mapper = new DigesterMapperImpl(files); assertNotNull("Mapper is null", mapper); ClassDescriptor classDescriptor = mapper.getClassDescriptorByClass(CmsObjectImpl.class); assertNotNull("Classdescriptor is null", classDescriptor); assertEquals("Incorrect path field", classDescriptor .getPathFieldDescriptor().getFieldName(), "path"); assertFalse("The cms object class has discriminator", classDescriptor.hasDiscriminator()); assertTrue("The cmsobject class is not abstract", classDescriptor .isAbstract()); assertNull("The cmsobject class has an ancestor", classDescriptor .getSuperClassDescriptor()); assertFalse( "The cmsobject class have a node type per hierarchy strategy", classDescriptor.usesNodeTypePerHierarchyStrategy()); assertTrue( "The cmsobject class have not a node type per hierarchy strategy", classDescriptor.usesNodeTypePerConcreteClassStrategy()); assertTrue("The cmsobject class has no descendant ", classDescriptor.hasDescendants()); assertEquals("Invalid number of descendants", classDescriptor .getDescendantClassDescriptors().size(), 2); classDescriptor = mapper.getClassDescriptorByClass(DocumentImpl.class); assertNotNull("Classdescriptor is null", classDescriptor); assertEquals("Incorrect path field", classDescriptor .getPathFieldDescriptor().getFieldName(), "path"); assertFalse("The document class has discriminator", classDescriptor.hasDiscriminator()); assertFalse("The document class is abstract", classDescriptor .isAbstract()); assertNotNull("The document class has not an ancestor", classDescriptor.getSuperClassDescriptor()); assertEquals("The document class has an invalid ancestor ancestor", classDescriptor.getSuperClassDescriptor().getClassName(), "org.apache.jackrabbit.ocm.testmodel.inheritance.impl.ContentImpl"); assertFalse( "The document class have a node type per hierarchy strategy", classDescriptor.usesNodeTypePerHierarchyStrategy()); assertTrue( "The document class have not a node type per hierarchy strategy", classDescriptor.usesNodeTypePerConcreteClassStrategy()); assertFalse("The document class has no descendant ", classDescriptor.hasDescendants()); assertEquals("Invalid number of descendants", classDescriptor .getDescendantClassDescriptors().size(), 0); } catch (JcrMappingException e) { e.printStackTrace(); fail("Impossible to retrieve the converter " + e); } } /** * Test interface setting */ public void testInterfaceWithDiscriminator() { try { String[] files = {"./src/test/test-config/jcrmapping-inheritance.xml"}; Mapper mapper = new DigesterMapperImpl(files); assertNotNull("Mapper is null", mapper); ClassDescriptor classDescriptor = mapper.getClassDescriptorByClass(Interface.class); assertNotNull("Classdescriptor is null", classDescriptor); assertTrue("Interface is not an interface", classDescriptor.isInterface()); assertTrue("Interface has not a discriminator", classDescriptor.hasDiscriminator()); String[] mixinTypes = classDescriptor.getJcrMixinTypes(); assertEquals("Invalid mixin type for the interface",mixinTypes.length , 0); assertNull("The interface has an ancestor", classDescriptor.getSuperClassDescriptor()); assertTrue("The interface has not implementation/descendant", classDescriptor.hasDescendants()); Collection descendants = classDescriptor.getDescendantClassDescriptors(); assertEquals("Invalid number of implementation/descendants", descendants.size(), 1); assertEquals("Invalid interface implementation",( (ClassDescriptor) descendants.iterator().next()).getClassName(), "org.apache.jackrabbit.ocm.testmodel.inheritance.AnotherDescendant"); assertTrue("Invalid extend strategy", classDescriptor.usesNodeTypePerHierarchyStrategy()); assertFalse("Incalid extend strategy", classDescriptor.usesNodeTypePerConcreteClassStrategy()); classDescriptor = mapper.getClassDescriptorByClass(AnotherDescendant.class); assertNotNull("Classdescriptor is null", classDescriptor); assertFalse("Interface is an interface", classDescriptor.isInterface()); assertTrue("AnotherDescendant has not a discriminator", classDescriptor.hasDiscriminator()); assertEquals("Invalid number of implemented interface", classDescriptor.getImplements().size(), 1); assertEquals("Invalid interface name", classDescriptor.getImplements().iterator().next(), "org.apache.jackrabbit.ocm.testmodel.interfaces.Interface"); assertTrue("Invalid extend strategy", classDescriptor.usesNodeTypePerHierarchyStrategy()); assertFalse("Invalid extend strategy", classDescriptor.usesNodeTypePerConcreteClassStrategy()); } catch (JcrMappingException e) { e.printStackTrace(); fail("Impossible to retrieve the converter " + e); } } /** * Test interface setting */ public void testInterfaceWithoutDiscriminator() { try { String[] files = {"./src/test/test-config/jcrmapping-inheritance.xml"}; Mapper mapper = new DigesterMapperImpl(files); assertNotNull("Mapper is null", mapper); ClassDescriptor classDescriptor = mapper.getClassDescriptorByClass(CmsObject.class); assertNotNull("Classdescriptor is null", classDescriptor); assertTrue("CmsObject is not an interface", classDescriptor.isInterface()); assertFalse("Interface has a discriminator", classDescriptor.hasDiscriminator()); String[] mixinTypes = classDescriptor.getJcrMixinTypes(); assertEquals("Invalid mixin type for the interface",mixinTypes.length , 0); assertNull("The interface has an ancestor", classDescriptor.getSuperClassDescriptor()); assertTrue("The interface has not implementation/descendant", classDescriptor.hasDescendants()); Collection descendants = classDescriptor.getDescendantClassDescriptors(); assertEquals("Invalid number of implementation/descendants", descendants.size(),3); assertFalse("Invalid extend strategy", classDescriptor.usesNodeTypePerHierarchyStrategy()); assertTrue("Invalid extend strategy", classDescriptor.usesNodeTypePerConcreteClassStrategy()); classDescriptor = mapper.getClassDescriptorByClass(Document.class); assertNotNull("Classdescriptor is null", classDescriptor); assertTrue("Document is not an interface", classDescriptor.isInterface()); assertFalse("Document has a discriminator", classDescriptor.hasDiscriminator()); assertEquals("Invalid number of implemented interface", classDescriptor.getImplements().size(), 0); assertFalse("Invalid extend strategy", classDescriptor.usesNodeTypePerHierarchyStrategy()); assertTrue("Invalid extend strategy", classDescriptor.usesNodeTypePerConcreteClassStrategy()); descendants = classDescriptor.getDescendantClassDescriptors(); assertEquals("Invalid number of implementation/descendants", descendants.size(),1); classDescriptor = mapper.getClassDescriptorByClass(DocumentImpl.class); assertNotNull("Classdescriptor is null", classDescriptor); assertFalse("DocumentImpl is an interface", classDescriptor.isInterface()); assertFalse("DocumentImpl has a discriminator", classDescriptor.hasDiscriminator()); assertTrue("DocumentImpl has not interface", classDescriptor.hasInterfaces()); assertEquals("Invalid number of implemented interface", classDescriptor.getImplements().size(), 1); assertFalse("Invalid extend strategy", classDescriptor.usesNodeTypePerHierarchyStrategy()); assertTrue("Invalid extend strategy", classDescriptor.usesNodeTypePerConcreteClassStrategy()); } catch (JcrMappingException e) { e.printStackTrace(); fail("Impossible to retrieve the converter " + e); } } /** * * Test Node Type per concrete class setting */ public void testProxy() { try { String[] files = { "./src/test/test-config/jcrmapping-proxy.xml" }; Mapper mapper = new DigesterMapperImpl(files); assertNotNull("Mapper is null", mapper); ClassDescriptor classDescriptor = mapper.getClassDescriptorByClass(Main.class); assertNotNull("ClassDescriptor is null", classDescriptor); assertTrue("Invalid proxy setting for bean field proxyDetail ", classDescriptor.getBeanDescriptor("proxyDetail").isProxy()); assertFalse("Invalid proxy setting for bean field detail ", classDescriptor.getBeanDescriptor("detail").isProxy()); assertTrue("Invalid proxy setting for collection field proxyDetail ", classDescriptor.getCollectionDescriptor("proxyCollection").isProxy()); } catch (JcrMappingException e) { e.printStackTrace(); fail("Impossible to retrieve the converter " + e); } } }
package com.yummy.common.utils.vcode; import java.awt.*; import java.awt.image.BufferedImage; import java.awt.image.DataBufferByte; import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; /** * Class AnimatedGifEncoder - Encodes a GIF file consisting of one or * more frames. * <pre> * Example: * AnimatedGifEncoder e = new AnimatedGifEncoder(); * e.start(outputFileName); * e.setDelay(1000); // 1 frame per sec * e.addFrame(image1); * e.addFrame(image2); * e.finish(); * </pre> * No copyright asserted on the source code of this class. May be used * for any purpose, however, refer to the Unisys LZW patent for restrictions * on use of the associated Encoder class. Please forward any corrections * to questions at fmsware.com. * * @author yummy * @version 1.03 November 2003 * */ public class GifEncoder { protected int width; // image size protected int height; protected Color transparent = null; // transparent color if given protected int transIndex; // transparent index in color table protected int repeat = -1; // no repeat protected int delay = 0; // frame delay (hundredths) protected boolean started = false; // ready to output frames protected OutputStream out; protected BufferedImage image; // current frame protected byte[] pixels; // BGR byte array from frame protected byte[] indexedPixels; // converted frame indexed to palette protected int colorDepth; // number of bit planes protected byte[] colorTab; // RGB palette protected boolean[] usedEntry = new boolean[256]; // active palette entries protected int palSize = 7; // color table size (bits-1) protected int dispose = -1; // disposal code (-1 = use default) protected boolean closeStream = false; // close stream when finished protected boolean firstFrame = true; protected boolean sizeSet = false; // if false, get size from first frame protected int sample = 10; // default sample interval for quantizer /** * Sets the delay time between each frame, or changes it * for subsequent frames (applies to last frame added). * * @param ms int delay time in milliseconds */ public void setDelay(int ms) { delay = Math.round(ms / 10.0f); } /** * Sets the GIF frame disposal code for the last added frame * and any subsequent frames. Default is 0 if no transparent * color has been set, otherwise 2. * @param code int disposal code. */ public void setDispose(int code) { if (code >= 0) { dispose = code; } } /** * Sets the number of times the set of GIF frames * should be played. Default is 1; 0 means play * indefinitely. Must be invoked before the first * image is added. * * @param iter int number of iterations. * @return */ public void setRepeat(int iter) { if (iter >= 0) { repeat = iter; } } /** * Sets the transparent color for the last added frame * and any subsequent frames. * Since all colors are subject to modification * in the quantization process, the color in the final * palette for each frame closest to the given color * becomes the transparent color for that frame. * May be set to null to indicate no transparent color. * * @param c Color to be treated as transparent on display. */ public void setTransparent(Color c) { transparent = c; } /** * Adds next GIF frame. The frame is not written immediately, but is * actually deferred until the next frame is received so that timing * data can be inserted. Invoking <code>finish()</code> flushes all * frames. If <code>setSize</code> was not invoked, the size of the * first image is used for all subsequent frames. * * @param im BufferedImage containing frame to write. * @return true if successful. */ public boolean addFrame(BufferedImage im) { if ((im == null) || !started) { return false; } boolean ok = true; try { if (!sizeSet) { // use first frame's size setSize(im.getWidth(), im.getHeight()); } image = im; getImagePixels(); // convert to correct format if necessary analyzePixels(); // build color table & map pixels if (firstFrame) { writeLSD(); // logical screen descriptior writePalette(); // global color table if (repeat >= 0) { // use NS app extension to indicate reps writeNetscapeExt(); } } writeGraphicCtrlExt(); // write graphic control extension writeImageDesc(); // image descriptor if (!firstFrame) { writePalette(); // local color table } writePixels(); // encode and write pixel data firstFrame = false; } catch (IOException e) { ok = false; } return ok; } //added by alvaro public boolean outFlush() { boolean ok = true; try { out.flush(); return ok; } catch (IOException e) { ok = false; } return ok; } public byte[] getFrameByteArray() { return ((ByteArrayOutputStream) out).toByteArray(); } /** * Flushes any pending data and closes output file. * If writing to an OutputStream, the stream is not * closed. */ public boolean finish() { if (!started) return false; boolean ok = true; started = false; try { out.write(0x3b); // gif trailer out.flush(); if (closeStream) { out.close(); } } catch (IOException e) { ok = false; } return ok; } public void reset() { // reset for subsequent use transIndex = 0; out = null; image = null; pixels = null; indexedPixels = null; colorTab = null; closeStream = false; firstFrame = true; } /** * Sets frame rate in frames per second. Equivalent to * <code>setDelay(1000/fps)</code>. * * @param fps float frame rate (frames per second) */ public void setFrameRate(float fps) { if (fps != 0f) { delay = Math.round(100f / fps); } } /** * Sets quality of color quantization (conversion of images * to the maximum 256 colors allowed by the GIF specification). * Lower values (minimum = 1) produce better colors, but slow * processing significantly. 10 is the default, and produces * good color mapping at reasonable speeds. Values greater * than 20 do not yield significant improvements in speed. * * @param quality int greater than 0. * @return */ public void setQuality(int quality) { if (quality < 1) quality = 1; sample = quality; } /** * Sets the GIF frame size. The default size is the * size of the first frame added if this method is * not invoked. * * @param w int frame width. * @param h int frame width. */ public void setSize(int w, int h) { if (started && !firstFrame) return; width = w; height = h; if (width < 1) width = 320; if (height < 1) height = 240; sizeSet = true; } /** * Initiates GIF file creation on the given stream. The stream * is not closed automatically. * * @param os OutputStream on which GIF images are written. * @return false if initial write failed. */ public boolean start(OutputStream os) { if (os == null) return false; boolean ok = true; closeStream = false; out = os; try { writeString("GIF89a"); // header } catch (IOException e) { ok = false; } return started = ok; } /** * Initiates writing of a GIF file with the specified name. * * @param file String containing output file name. * @return false if open or initial write failed. */ public boolean start(String file) { boolean ok = true; try { out = new BufferedOutputStream(new FileOutputStream(file)); ok = start(out); closeStream = true; } catch (IOException e) { ok = false; } return started = ok; } /** * Analyzes image colors and creates color map. */ protected void analyzePixels() { int len = pixels.length; int nPix = len / 3; indexedPixels = new byte[nPix]; Quant nq = new Quant(pixels, len, sample); // initialize quantizer colorTab = nq.process(); // create reduced palette // convert map from BGR to RGB for (int i = 0; i < colorTab.length; i += 3) { byte temp = colorTab[i]; colorTab[i] = colorTab[i + 2]; colorTab[i + 2] = temp; usedEntry[i / 3] = false; } // map image pixels to new palette int k = 0; for (int i = 0; i < nPix; i++) { int index = nq.map(pixels[k++] & 0xff, pixels[k++] & 0xff, pixels[k++] & 0xff); usedEntry[index] = true; indexedPixels[i] = (byte) index; } pixels = null; colorDepth = 8; palSize = 7; // get closest match to transparent color if specified if (transparent != null) { transIndex = findClosest(transparent); } } /** * Returns index of palette color closest to c * */ protected int findClosest(Color c) { if (colorTab == null) return -1; int r = c.getRed(); int g = c.getGreen(); int b = c.getBlue(); int minpos = 0; int dmin = 256 * 256 * 256; int len = colorTab.length; for (int i = 0; i < len;) { int dr = r - (colorTab[i++] & 0xff); int dg = g - (colorTab[i++] & 0xff); int db = b - (colorTab[i] & 0xff); int d = dr * dr + dg * dg + db * db; int index = i / 3; if (usedEntry[index] && (d < dmin)) { dmin = d; minpos = index; } i++; } return minpos; } /** * Extracts image pixels into byte array "pixels" */ protected void getImagePixels() { int w = image.getWidth(); int h = image.getHeight(); int type = image.getType(); if ((w != width) || (h != height) || (type != BufferedImage.TYPE_3BYTE_BGR)) { // create new image with right size/format BufferedImage temp = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR); Graphics2D g = temp.createGraphics(); g.drawImage(image, 0, 0, null); image = temp; } pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); } /** * Writes Graphic Control Extension */ protected void writeGraphicCtrlExt() throws IOException { out.write(0x21); // extension introducer out.write(0xf9); // GCE label out.write(4); // data block size int transp, disp; if (transparent == null) { transp = 0; disp = 0; // dispose = no action } else { transp = 1; disp = 2; // force clear if using transparent color } if (dispose >= 0) { disp = dispose & 7; // user override } disp <<= 2; // packed fields out.write(0 | // 1:3 reserved disp | // 4:6 disposal 0 | // 7 user input - 0 = none transp); // 8 transparency flag writeShort(delay); // delay x 1/100 sec out.write(transIndex); // transparent color index out.write(0); // block terminator } /** * Writes Image Descriptor */ protected void writeImageDesc() throws IOException { out.write(0x2c); // image separator writeShort(0); // image position x,y = 0,0 writeShort(0); writeShort(width); // image size writeShort(height); // packed fields if (firstFrame) { // no LCT - GCT is used for first (or only) frame out.write(0); } else { // specify normal LCT out.write(0x80 | // 1 local color table 1=yes 0 | // 2 interlace - 0=no 0 | // 3 sorted - 0=no 0 | // 4-5 reserved palSize); // 6-8 size of color table } } /** * Writes Logical Screen Descriptor */ protected void writeLSD() throws IOException { // logical screen size writeShort(width); writeShort(height); // packed fields out.write((0x80 | // 1 : global color table flag = 1 (gct used) 0x70 | // 2-4 : color resolution = 7 0x00 | // 5 : gct sort flag = 0 palSize)); // 6-8 : gct size out.write(0); // background color index out.write(0); // pixel aspect ratio - assume 1:1 } /** * Writes Netscape application extension to define * repeat count. */ protected void writeNetscapeExt() throws IOException { out.write(0x21); // extension introducer out.write(0xff); // app extension label out.write(11); // block size writeString("NETSCAPE" + "2.0"); // app id + auth code out.write(3); // sub-block size out.write(1); // loop sub-block id writeShort(repeat); // loop count (extra iterations, 0=repeat forever) out.write(0); // block terminator } /** * Writes color table */ protected void writePalette() throws IOException { out.write(colorTab, 0, colorTab.length); int n = (3 * 256) - colorTab.length; for (int i = 0; i < n; i++) { out.write(0); } } /** * Encodes and writes pixel data */ protected void writePixels() throws IOException { Encoder encoder = new Encoder(width, height, indexedPixels, colorDepth); encoder.encode(out); } /** * Write 16-bit value to output stream, LSB first */ protected void writeShort(int value) throws IOException { out.write(value & 0xff); out.write((value >> 8) & 0xff); } /** * Writes string to output stream */ protected void writeString(String s) throws IOException { for (int i = 0; i < s.length(); i++) { out.write((byte) s.charAt(i)); } } }
/** * * Copyright (c) 2005, European Commission project OneLab under contract 034819 * (http://www.one-lab.org) * * All rights reserved. * Redistribution and use in source and binary forms, with or * without modification, are permitted provided that the following * conditions are met: * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the distribution. * - Neither the name of the University Catholique de Louvain - UCL * nor the names of its contributors may be used to endorse or * promote products derived from this software without specific prior * written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.util.bloom; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Collection; import java.util.List; import org.apache.hadoop.io.Writable; import org.apache.hadoop.util.hash.Hash; /** * Defines the general behavior of a filter. * <p> * A filter is a data structure which aims at offering a lossy summary of a set * <code>A</code>. The key idea is to map entries of <code>A</code> (also called * <i>keys</i>) into several positions in a vector through the use of several * hash functions. * <p> * Typically, a filter will be implemented as a Bloom filter (or a Bloom filter * extension). * <p> * It must be extended in order to define the real behavior. * * @see Key The general behavior of a key * @see HashFunction A hash function */ public abstract class Filter implements Writable { private static final int VERSION = -1; // negative to accommodate for old // format /** The vector size of <i>this</i> filter. */ protected int vectorSize; /** The hash function used to map a key to several positions in the vector. */ protected HashFunction hash; /** The number of hash function to consider. */ protected int nbHash; /** Type of hashing function to use. */ protected int hashType; protected Filter() { } /** * Constructor. * * @param vectorSize * The vector size of <i>this</i> filter. * @param nbHash * The number of hash functions to consider. * @param hashType * type of the hashing function (see {@link Hash}). */ protected Filter(int vectorSize, int nbHash, int hashType) { this.vectorSize = vectorSize; this.nbHash = nbHash; this.hashType = hashType; this.hash = new HashFunction(this.vectorSize, this.nbHash, this.hashType); } /** * Adds a key to <i>this</i> filter. * * @param key * The key to add. */ public abstract void add(Key key); /** * Determines wether a specified key belongs to <i>this</i> filter. * * @param key * The key to test. * @return boolean True if the specified key belongs to <i>this</i> filter. * False otherwise. */ public abstract boolean membershipTest(Key key); /** * Peforms a logical AND between <i>this</i> filter and a specified filter. * <p> * <b>Invariant</b>: The result is assigned to <i>this</i> filter. * * @param filter * The filter to AND with. */ public abstract void and(Filter filter); /** * Peforms a logical OR between <i>this</i> filter and a specified filter. * <p> * <b>Invariant</b>: The result is assigned to <i>this</i> filter. * * @param filter * The filter to OR with. */ public abstract void or(Filter filter); /** * Peforms a logical XOR between <i>this</i> filter and a specified filter. * <p> * <b>Invariant</b>: The result is assigned to <i>this</i> filter. * * @param filter * The filter to XOR with. */ public abstract void xor(Filter filter); /** * Performs a logical NOT on <i>this</i> filter. * <p> * The result is assigned to <i>this</i> filter. */ public abstract void not(); /** * Adds a list of keys to <i>this</i> filter. * * @param keys * The list of keys. */ public void add(List<Key> keys) { if (keys == null) { throw new IllegalArgumentException("ArrayList<Key> may not be null"); } for (Key key : keys) { add(key); } }// end add() /** * Adds a collection of keys to <i>this</i> filter. * * @param keys * The collection of keys. */ public void add(Collection<Key> keys) { if (keys == null) { throw new IllegalArgumentException( "Collection<Key> may not be null"); } for (Key key : keys) { add(key); } }// end add() /** * Adds an array of keys to <i>this</i> filter. * * @param keys * The array of keys. */ public void add(Key[] keys) { if (keys == null) { throw new IllegalArgumentException("Key[] may not be null"); } for (int i = 0; i < keys.length; i++) { add(keys[i]); } }// end add() // Writable interface public void write(DataOutput out) throws IOException { out.writeInt(VERSION); out.writeInt(this.nbHash); out.writeByte(this.hashType); out.writeInt(this.vectorSize); } public void readFields(DataInput in) throws IOException { int ver = in.readInt(); if (ver > 0) { // old unversioned format this.nbHash = ver; this.hashType = Hash.JENKINS_HASH; } else if (ver == VERSION) { this.nbHash = in.readInt(); this.hashType = in.readByte(); } else { throw new IOException("Unsupported version: " + ver); } this.vectorSize = in.readInt(); this.hash = new HashFunction(this.vectorSize, this.nbHash, this.hashType); } }// end class
/* * Copyright 2018 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.zuul; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.netflix.config.DynamicIntProperty; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import javax.inject.Inject; import javax.inject.Singleton; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class manages the directory polling for changes and new Groovy filters. * Polling interval and directories are specified in the initialization of the class, and a poller will check * for changes and additions. * * @author Mikey Cohen * Date: 12/7/11 * Time: 12:09 PM */ @Singleton public class FilterFileManager { private static final Logger LOG = LoggerFactory.getLogger(FilterFileManager.class); private static final DynamicIntProperty FILE_PROCESSOR_THREADS = new DynamicIntProperty("zuul.filterloader.threads", 1); private static final DynamicIntProperty FILE_PROCESSOR_TASKS_TIMEOUT_SECS = new DynamicIntProperty("zuul.filterloader.tasks.timeout", 120); Thread poller; boolean bRunning = true; private final FilterFileManagerConfig config; private final FilterLoader filterLoader; private final ExecutorService processFilesService; @Inject public FilterFileManager(FilterFileManagerConfig config, FilterLoader filterLoader) { this.config = config; this.filterLoader = filterLoader; ThreadFactory tf = new ThreadFactoryBuilder().setDaemon(true).setNameFormat("FilterFileManager_ProcessFiles-%d").build(); this.processFilesService = Executors.newFixedThreadPool(FILE_PROCESSOR_THREADS.get(), tf); } /** * Initialized the GroovyFileManager. * * @throws Exception */ @Inject public void init() throws Exception { long startTime = System.currentTimeMillis(); filterLoader.putFiltersForClasses(config.getClassNames()); manageFiles(); startPoller(); LOG.warn("Finished loading all zuul filters. Duration = " + (System.currentTimeMillis() - startTime) + " ms."); } /** * Shuts down the poller */ public void shutdown() { stopPoller(); } void stopPoller() { bRunning = false; } void startPoller() { poller = new Thread("GroovyFilterFileManagerPoller") { { setDaemon(true); } public void run() { while (bRunning) { try { sleep(config.getPollingIntervalSeconds() * 1000); manageFiles(); } catch (Exception e) { LOG.error("Error checking and/or loading filter files from Poller thread.", e); } } } }; poller.start(); } /** * Returns the directory File for a path. A Runtime Exception is thrown if the directory is in valid * * @param sPath * @return a File representing the directory path */ public File getDirectory(String sPath) { File directory = new File(sPath); if (!directory.isDirectory()) { URL resource = FilterFileManager.class.getClassLoader().getResource(sPath); try { directory = new File(resource.toURI()); } catch (Exception e) { LOG.error("Error accessing directory in classloader. path=" + sPath, e); } if (!directory.isDirectory()) { throw new RuntimeException(directory.getAbsolutePath() + " is not a valid directory"); } } return directory; } /** * Returns a List<File> of all Files from all polled directories * * @return */ List<File> getFiles() { List<File> list = new ArrayList<File>(); for (String sDirectory : config.getDirectories()) { if (sDirectory != null) { File directory = getDirectory(sDirectory); File[] aFiles = directory.listFiles(config.getFilenameFilter()); if (aFiles != null) { list.addAll(Arrays.asList(aFiles)); } } } return list; } /** * puts files into the FilterLoader. The FilterLoader will only add new or changed filters * * @param aFiles a List<File> * @throws IOException * @throws InstantiationException * @throws IllegalAccessException */ void processGroovyFiles(List<File> aFiles) throws Exception { List<Callable<Boolean>> tasks = new ArrayList<>(); for (File file : aFiles) { tasks.add(() -> { try { return filterLoader.putFilter(file); } catch(Exception e) { LOG.error("Error loading groovy filter from disk! file = " + String.valueOf(file), e); return false; } }); } processFilesService.invokeAll(tasks, FILE_PROCESSOR_TASKS_TIMEOUT_SECS.get(), TimeUnit.SECONDS); } void manageFiles() { try { List<File> aFiles = getFiles(); processGroovyFiles(aFiles); } catch (Exception e) { String msg = "Error updating groovy filters from disk!"; LOG.error(msg, e); throw new RuntimeException(msg, e); } } public static class FilterFileManagerConfig { private String[] directories; private String[] classNames; private int pollingIntervalSeconds; private FilenameFilter filenameFilter; public FilterFileManagerConfig(String[] directories, String[] classNames, int pollingIntervalSeconds, FilenameFilter filenameFilter) { this.directories = directories; this.classNames = classNames; this.pollingIntervalSeconds = pollingIntervalSeconds; this.filenameFilter = filenameFilter; } public String[] getDirectories() { return directories; } public String[] getClassNames() { return classNames; } public int getPollingIntervalSeconds() { return pollingIntervalSeconds; } public FilenameFilter getFilenameFilter() { return filenameFilter; } } }
package io.katharsis.dispatcher.controller.resource; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.JsonNode; import io.katharsis.dispatcher.controller.BaseControllerTest; import io.katharsis.queryParams.QueryParams; import io.katharsis.request.dto.DataBody; import io.katharsis.request.dto.RequestBody; import io.katharsis.request.dto.ResourceRelationships; import io.katharsis.request.path.JsonPath; import io.katharsis.request.path.ResourcePath; import io.katharsis.resource.exception.RequestBodyException; import io.katharsis.resource.mock.models.Memorandum; import io.katharsis.resource.mock.models.Task; import io.katharsis.resource.mock.models.ComplexPojo; import io.katharsis.response.BaseResponseContext; import io.katharsis.response.ResourceResponseContext; import org.junit.Assert; import org.junit.Test; import static org.assertj.core.api.Assertions.assertThat; public class ResourcePatchTest extends BaseControllerTest { private static final String REQUEST_TYPE = "PATCH"; @Test public void onGivenRequestCollectionGetShouldDenyIt() { // GIVEN JsonPath jsonPath = pathBuilder.buildPath("/tasks/"); ResourcePatch sut = new ResourcePatch(resourceRegistry, typeParser, objectMapper); // WHEN boolean result = sut.isAcceptable(jsonPath, REQUEST_TYPE); // THEN Assert.assertEquals(result, false); } @Test public void onGivenRequestResourceGetShouldAcceptIt() { // GIVEN JsonPath jsonPath = pathBuilder.buildPath("/tasks/1"); ResourcePatch sut = new ResourcePatch(resourceRegistry, typeParser, objectMapper); // WHEN boolean result = sut.isAcceptable(jsonPath, REQUEST_TYPE); // THEN Assert.assertEquals(result, true); } @Test public void onNoBodyResourceShouldThrowException() throws Exception { // GIVEN ResourcePost sut = new ResourcePost(resourceRegistry, typeParser, objectMapper); // THEN expectedException.expect(RuntimeException.class); // WHEN sut.handle(new ResourcePath("fridges"), new QueryParams(), null, null); } @Test public void onGivenRequestResourceGetShouldHandleIt() throws Exception { // GIVEN RequestBody newTaskBody = new RequestBody(); DataBody data = new DataBody(); newTaskBody.setData(data); data.setType("tasks"); data.setAttributes(objectMapper.createObjectNode() .put("name", "sample task")); JsonPath taskPath = pathBuilder.buildPath("/tasks"); // WHEN ResourcePost resourcePost = new ResourcePost(resourceRegistry, typeParser, objectMapper); ResourceResponseContext taskResponse = resourcePost.handle(taskPath, new QueryParams(), null, newTaskBody); assertThat(taskResponse.getResponse().getEntity()).isExactlyInstanceOf(Task.class); Long taskId = ((Task) (taskResponse.getResponse().getEntity())).getId(); assertThat(taskId).isNotNull(); // GIVEN RequestBody taskPatch = new RequestBody(); data = new DataBody(); taskPatch.setData(data); data.setType("tasks"); data.setAttributes(objectMapper.createObjectNode() .put("name", "task updated")); JsonPath jsonPath = pathBuilder.buildPath("/tasks/" + taskId); ResourcePatch sut = new ResourcePatch(resourceRegistry, typeParser, objectMapper); // WHEN BaseResponseContext response = sut.handle(jsonPath, new QueryParams(), null, taskPatch); // THEN Assert.assertNotNull(response); assertThat(response.getResponse().getEntity()).isExactlyInstanceOf(Task.class); assertThat(((Task) (response.getResponse().getEntity())).getName()).isEqualTo("task updated"); } @Test public void onGivenRequestResourceShouldThrowException() throws Exception { // GIVEN RequestBody newTaskBody = new RequestBody(); DataBody data = new DataBody(); newTaskBody.setData(data); data.setType("tasks"); data.setAttributes(objectMapper.createObjectNode() .put("name", "sample task")); JsonPath taskPath = pathBuilder.buildPath("/tasks"); // WHEN ResourcePost resourcePost = new ResourcePost(resourceRegistry, typeParser, objectMapper); ResourceResponseContext taskResponse = resourcePost.handle(taskPath, new QueryParams(), null, newTaskBody); assertThat(taskResponse.getResponse().getEntity()).isExactlyInstanceOf(Task.class); Long taskId = ((Task) (taskResponse.getResponse().getEntity())).getId(); assertThat(taskId).isNotNull(); // GIVEN RequestBody taskPatch = new RequestBody(); data = new DataBody(); taskPatch.setData(data); data.setType("WRONG_AND_MISSING_TYPE"); data.setAttributes(objectMapper.createObjectNode() .put("name", "task updated")); JsonPath jsonPath = pathBuilder.buildPath("/tasks/" + taskId); ResourcePatch sut = new ResourcePatch(resourceRegistry, typeParser, objectMapper); // WHEN BaseResponseContext response = null; try { response = sut.handle(jsonPath, new QueryParams(), null, taskPatch); Assert.fail("Should have recieved exception."); } catch (RequestBodyException rbe) { // Got correct exception } catch (Error ex) { Assert.fail("Got bad exception: " + ex); } } @Test public void onInheritedResourceShouldUpdateInheritedResource() throws Exception { // GIVEN RequestBody memorandumBody = new RequestBody(); DataBody data = new DataBody(); memorandumBody.setData(data); data.setType("memoranda"); ObjectNode attributes = objectMapper.createObjectNode() .put("title", "sample title") .put("body", "sample body"); data.setAttributes(attributes); JsonPath documentsPath = pathBuilder.buildPath("/documents"); ResourcePost resourcePost = new ResourcePost(resourceRegistry, typeParser, objectMapper); // WHEN ResourceResponseContext taskResponse = resourcePost.handle(documentsPath, new QueryParams(), null, memorandumBody); // THEN assertThat(taskResponse.getResponse().getEntity()).isExactlyInstanceOf(Memorandum.class); Long memorandumId = ((Memorandum) (taskResponse.getResponse().getEntity())).getId(); assertThat(memorandumId).isNotNull(); // -------------------------- // GIVEN memorandumBody = new RequestBody(); data = new DataBody(); memorandumBody.setData(data); data.setType("memoranda"); data.setAttributes(objectMapper.createObjectNode() .put("title", "new title") .put("body", "new body")); JsonPath documentPath = pathBuilder.buildPath("/documents/" + memorandumId); ResourcePatch sut = new ResourcePatch(resourceRegistry, typeParser, objectMapper); // WHEN BaseResponseContext memorandumResponse = sut.handle(documentPath, new QueryParams(), null, memorandumBody); // THEN assertThat(memorandumResponse.getResponse().getEntity()).isExactlyInstanceOf(Memorandum.class); Memorandum persistedMemorandum = (Memorandum) (memorandumResponse.getResponse().getEntity()); assertThat(persistedMemorandum.getId()).isNotNull(); assertThat(persistedMemorandum.getTitle()).isEqualTo("new title"); assertThat(persistedMemorandum.getBody()).isEqualTo("new body"); } @Test public void onResourceRelationshipNullifiedShouldSaveIt() throws Exception { // GIVEN RequestBody newTaskBody = new RequestBody(); DataBody data = new DataBody(); newTaskBody.setData(data); data.setType("tasks"); data.setAttributes(objectMapper.createObjectNode() .put("name", "sample task")); JsonPath taskPath = pathBuilder.buildPath("/tasks"); // WHEN ResourcePost resourcePost = new ResourcePost(resourceRegistry, typeParser, objectMapper); ResourceResponseContext taskResponse = resourcePost.handle(taskPath, new QueryParams(), null, newTaskBody); assertThat(taskResponse.getResponse().getEntity()).isExactlyInstanceOf(Task.class); Long taskId = ((Task) (taskResponse.getResponse().getEntity())).getId(); assertThat(taskId).isNotNull(); // GIVEN RequestBody taskPatch = new RequestBody(); data = new DataBody(); taskPatch.setData(data); data.setType("tasks"); data.setAttributes(objectMapper.createObjectNode() .put("name", "task updated")); data.setRelationships(new ResourceRelationships()); data.getRelationships() .setAdditionalProperty("project", null); JsonPath jsonPath = pathBuilder.buildPath("/tasks/" + taskId); ResourcePatch sut = new ResourcePatch(resourceRegistry, typeParser, objectMapper); // WHEN BaseResponseContext response = sut.handle(jsonPath, new QueryParams(), null, taskPatch); // THEN Assert.assertNotNull(response); assertThat(response.getResponse().getEntity()).isExactlyInstanceOf(Task.class); assertThat(((Task) (response.getResponse().getEntity())).getName()).isEqualTo("task updated"); assertThat(((Task) (response.getResponse().getEntity())).getProject()).isNull(); } @Test public void onGivenRequestResourcePatchShouldHandleMissingFields() throws Exception { JsonPath complexPojoPath = pathBuilder.buildPath("/complexpojos/1"); // WHEN ResourceGet resourceGet = new ResourceGet(resourceRegistry, typeParser, includeFieldSetter); BaseResponseContext complexPojoResponse = resourceGet.handle(complexPojoPath, new QueryParams(), null, null); assertThat(complexPojoResponse.getResponse().getEntity()).isExactlyInstanceOf(ComplexPojo.class); Long complexPojoId = ((ComplexPojo) (complexPojoResponse.getResponse().getEntity())).getId(); assertThat(complexPojoId).isNotNull(); assertThat(((ComplexPojo) (complexPojoResponse.getResponse().getEntity())).getContainedPojo().getUpdateableProperty1()).isEqualTo("value from repository mock"); // GIVEN RequestBody complexPojoPatch = new RequestBody(); DataBody data = new DataBody(); complexPojoPatch.setData(data); data.setType("complexpojos"); String rawPatchData = "" + "{" + " 'containedPojo':{" + " 'updateableProperty1':'updated value'" + " }," + " 'updateableProperty':'wasNullBefore'" + "}"; rawPatchData = rawPatchData.replaceAll("'", "\""); JsonNode patchAttributes = objectMapper.readTree(rawPatchData); data.setAttributes(patchAttributes); JsonPath jsonPath = pathBuilder.buildPath("/complexpojos/" + complexPojoId); ResourcePatch sut = new ResourcePatch(resourceRegistry, typeParser, objectMapper); // WHEN BaseResponseContext response = sut.handle(jsonPath, new QueryParams(), null, complexPojoPatch); // THEN Assert.assertNotNull(response); assertThat(response.getResponse().getEntity()).isExactlyInstanceOf(ComplexPojo.class); assertThat(((ComplexPojo) (response.getResponse().getEntity())).getContainedPojo().getUpdateableProperty1()).isEqualTo("updated value"); assertThat(((ComplexPojo) (response.getResponse().getEntity())).getContainedPojo().getUpdateableProperty2()).isEqualTo("value from repository mock"); assertThat(((ComplexPojo) (response.getResponse().getEntity())).getUpdateableProperty()).isEqualTo("wasNullBefore"); } }
package org.Json; /* Copyright (c) 2002 JSON.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software shall be used for Good, not Evil. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /** * This provides static methods to convert comma delimited text into a * JSONArray, and to covert a JSONArray into comma delimited text. Comma * delimited text is a very popular format for data interchange. It is * understood by most database, spreadsheet, and organizer programs. * <p/> * Each row of text represents a row in a table or a data record. Each row * ends with a NEWLINE character. Each row contains one or more values. * Values are separated by commas. A value can contain any character except * for comma, unless is is wrapped in single quotes or double quotes. * <p/> * The first row usually contains the names of the columns. * <p/> * A comma delimited list can be converted into a JSONArray of JSONObjects. * The names for the elements in the JSONObjects can be taken from the names * in the first row. * * @author JSON.org * @version 2014-05-03 */ public class CDL { /** * Get the next value. The value can be wrapped in quotes. The value can * be empty. * * @param x A JSONTokener of the source text. * @return The value string, or null if empty. * @throws JSONException if the quoted string is badly formed. */ private static String getValue(JSONTokener x) throws JSONException { char c; char q; StringBuffer sb; do { c = x.next(); } while (c == ' ' || c == '\t'); switch (c) { case 0: return null; case '"': case '\'': q = c; sb = new StringBuffer(); for (; ; ) { c = x.next(); if (c == q) { break; } if (c == 0 || c == '\n' || c == '\r') { throw x.syntaxError("Missing close quote '" + q + "'."); } sb.append(c); } return sb.toString(); case ',': x.back(); return ""; default: x.back(); return x.nextTo(','); } } /** * Produce a JSONArray of strings from a row of comma delimited values. * * @param x A JSONTokener of the source text. * @return A JSONArray of strings. * @throws JSONException */ public static JSONArray rowToJSONArray(JSONTokener x) throws JSONException { JSONArray ja = new JSONArray(); for (; ; ) { String value = getValue(x); char c = x.next(); if (value == null || (ja.length() == 0 && value.length() == 0 && c != ',')) { return null; } ja.put(value); for (; ; ) { if (c == ',') { break; } if (c != ' ') { if (c == '\n' || c == '\r' || c == 0) { return ja; } throw x.syntaxError("Bad character '" + c + "' (" + (int) c + ")."); } c = x.next(); } } } /** * Produce a JSONObject from a row of comma delimited text, using a * parallel JSONArray of strings to provides the names of the elements. * * @param names A JSONArray of names. This is commonly obtained from the * first row of a comma delimited text file using the rowToJSONArray * method. * @param x A JSONTokener of the source text. * @return A JSONObject combining the names and values. * @throws JSONException */ public static JSONObject rowToJSONObject(JSONArray names, JSONTokener x) throws JSONException { JSONArray ja = rowToJSONArray(x); return ja != null ? ja.toJSONObject(names) : null; } /** * Produce a comma delimited text row from a JSONArray. Values containing * the comma character will be quoted. Troublesome characters may be * removed. * * @param ja A JSONArray of strings. * @return A string ending in NEWLINE. */ public static String rowToString(JSONArray ja) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < ja.length(); i += 1) { if (i > 0) { sb.append(','); } Object object = ja.opt(i); if (object != null) { String string = object.toString(); if (string.length() > 0 && (string.indexOf(',') >= 0 || string.indexOf('\n') >= 0 || string.indexOf('\r') >= 0 || string.indexOf(0) >= 0 || string.charAt(0) == '"')) { sb.append('"'); int length = string.length(); for (int j = 0; j < length; j += 1) { char c = string.charAt(j); if (c >= ' ' && c != '"') { sb.append(c); } } sb.append('"'); } else { sb.append(string); } } } sb.append('\n'); return sb.toString(); } /** * Produce a JSONArray of JSONObjects from a comma delimited text string, * using the first row as a source of names. * * @param string The comma delimited text. * @return A JSONArray of JSONObjects. * @throws JSONException */ public static JSONArray toJSONArray(String string) throws JSONException { return toJSONArray(new JSONTokener(string)); } /** * Produce a JSONArray of JSONObjects from a comma delimited text string, * using the first row as a source of names. * * @param x The JSONTokener containing the comma delimited text. * @return A JSONArray of JSONObjects. * @throws JSONException */ public static JSONArray toJSONArray(JSONTokener x) throws JSONException { return toJSONArray(rowToJSONArray(x), x); } /** * Produce a JSONArray of JSONObjects from a comma delimited text string * using a supplied JSONArray as the source of element names. * * @param names A JSONArray of strings. * @param string The comma delimited text. * @return A JSONArray of JSONObjects. * @throws JSONException */ public static JSONArray toJSONArray(JSONArray names, String string) throws JSONException { return toJSONArray(names, new JSONTokener(string)); } /** * Produce a JSONArray of JSONObjects from a comma delimited text string * using a supplied JSONArray as the source of element names. * * @param names A JSONArray of strings. * @param x A JSONTokener of the source text. * @return A JSONArray of JSONObjects. * @throws JSONException */ public static JSONArray toJSONArray(JSONArray names, JSONTokener x) throws JSONException { if (names == null || names.length() == 0) { return null; } JSONArray ja = new JSONArray(); for (; ; ) { JSONObject jo = rowToJSONObject(names, x); if (jo == null) { break; } ja.put(jo); } if (ja.length() == 0) { return null; } return ja; } /** * Produce a comma delimited text from a JSONArray of JSONObjects. The * first row will be a list of names obtained by inspecting the first * JSONObject. * * @param ja A JSONArray of JSONObjects. * @return A comma delimited text. * @throws JSONException */ public static String toString(JSONArray ja) throws JSONException { JSONObject jo = ja.optJSONObject(0); if (jo != null) { JSONArray names = jo.names(); if (names != null) { return rowToString(names) + toString(names, ja); } } return null; } /** * Produce a comma delimited text from a JSONArray of JSONObjects using * a provided list of names. The list of names is not included in the * output. * * @param names A JSONArray of strings. * @param ja A JSONArray of JSONObjects. * @return A comma delimited text. * @throws JSONException */ public static String toString(JSONArray names, JSONArray ja) throws JSONException { if (names == null || names.length() == 0) { return null; } StringBuffer sb = new StringBuffer(); for (int i = 0; i < ja.length(); i += 1) { JSONObject jo = ja.optJSONObject(i); if (jo != null) { sb.append(rowToString(jo.toJSONArray(names))); } } return sb.toString(); } }
package org.springframework.security.core.authority.mapping; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import org.junit.Test; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.authority.GrantedAuthorityImpl; import org.springframework.security.core.authority.mapping.Attributes2GrantedAuthoritiesMapper; import org.springframework.security.core.authority.mapping.MapBasedAttributes2GrantedAuthoritiesMapper; /** * * @author Ruud Senden */ @SuppressWarnings("unchecked") public class MapBasedAttributes2GrantedAuthoritiesMapperTest { @Test(expected=IllegalArgumentException.class) public void testAfterPropertiesSetNoMap() throws Exception { MapBasedAttributes2GrantedAuthoritiesMapper mapper = new MapBasedAttributes2GrantedAuthoritiesMapper(); mapper.afterPropertiesSet(); } @Test(expected=IllegalArgumentException.class) public void testAfterPropertiesSetEmptyMap() throws Exception { MapBasedAttributes2GrantedAuthoritiesMapper mapper = new MapBasedAttributes2GrantedAuthoritiesMapper(); mapper.setAttributes2grantedAuthoritiesMap(new HashMap()); mapper.afterPropertiesSet(); } @Test(expected=IllegalArgumentException.class) public void testAfterPropertiesSetInvalidKeyTypeMap() throws Exception { MapBasedAttributes2GrantedAuthoritiesMapper mapper = new MapBasedAttributes2GrantedAuthoritiesMapper(); HashMap m = new HashMap(); m.put(new Object(),"ga1"); mapper.setAttributes2grantedAuthoritiesMap(m); mapper.afterPropertiesSet(); } @Test(expected=IllegalArgumentException.class) public void testAfterPropertiesSetInvalidValueTypeMap1() throws Exception { MapBasedAttributes2GrantedAuthoritiesMapper mapper = new MapBasedAttributes2GrantedAuthoritiesMapper(); HashMap m = new HashMap(); m.put("role1",new Object()); mapper.setAttributes2grantedAuthoritiesMap(m); mapper.afterPropertiesSet(); } @Test(expected=IllegalArgumentException.class) public void testAfterPropertiesSetInvalidValueTypeMap2() throws Exception { MapBasedAttributes2GrantedAuthoritiesMapper mapper = new MapBasedAttributes2GrantedAuthoritiesMapper(); HashMap m = new HashMap(); m.put("role1",new Object[]{new String[]{"ga1","ga2"}, new Object()}); mapper.setAttributes2grantedAuthoritiesMap(m); mapper.afterPropertiesSet(); } @Test public void testAfterPropertiesSetValidMap() throws Exception { MapBasedAttributes2GrantedAuthoritiesMapper mapper = new MapBasedAttributes2GrantedAuthoritiesMapper(); HashMap m = getValidAttributes2GrantedAuthoritiesMap(); mapper.setAttributes2grantedAuthoritiesMap(m); mapper.afterPropertiesSet(); } @Test public void testMapping1() throws Exception { String[] roles = { "role1" }; String[] expectedGas = { "ga1" }; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMapping2() throws Exception { String[] roles = { "role2" }; String[] expectedGas = { "ga2" }; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMapping3() throws Exception { String[] roles = { "role3" }; String[] expectedGas = { "ga3", "ga4" }; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMapping4() throws Exception { String[] roles = { "role4" }; String[] expectedGas = { "ga5", "ga6" }; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMapping5() throws Exception { String[] roles = { "role5" }; String[] expectedGas = { "ga7", "ga8", "ga9" }; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMapping6() throws Exception { String[] roles = { "role6" }; String[] expectedGas = { "ga10", "ga11", "ga12" }; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMapping7() throws Exception { String[] roles = { "role7" }; String[] expectedGas = { "ga13", "ga14" }; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMapping8() throws Exception { String[] roles = { "role8" }; String[] expectedGas = { "ga13", "ga14" }; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMapping9() throws Exception { String[] roles = { "role9" }; String[] expectedGas = {}; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMapping10() throws Exception { String[] roles = { "role10" }; String[] expectedGas = {}; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMapping11() throws Exception { String[] roles = { "role11" }; String[] expectedGas = {}; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testNonExistingMapping() throws Exception { String[] roles = { "nonExisting" }; String[] expectedGas = {}; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } @Test public void testMappingCombination() throws Exception { String[] roles = { "role1", "role2", "role3", "role4", "role5", "role6", "role7", "role8", "role9", "role10", "role11" }; String[] expectedGas = { "ga1", "ga2", "ga3", "ga4", "ga5", "ga6", "ga7", "ga8", "ga9", "ga10", "ga11", "ga12", "ga13", "ga14"}; Attributes2GrantedAuthoritiesMapper mapper = getDefaultMapper(); testGetGrantedAuthorities(mapper, roles, expectedGas); } private HashMap getValidAttributes2GrantedAuthoritiesMap() { HashMap m = new HashMap(); m.put("role1","ga1"); m.put("role2",new GrantedAuthorityImpl("ga2")); m.put("role3",Arrays.asList(new Object[]{"ga3",new GrantedAuthorityImpl("ga4")})); m.put("role4","ga5,ga6"); m.put("role5",Arrays.asList(new Object[]{"ga7","ga8",new Object[]{new GrantedAuthorityImpl("ga9")}})); m.put("role6",new Object[]{"ga10","ga11",new Object[]{new GrantedAuthorityImpl("ga12")}}); m.put("role7",new String[]{"ga13","ga14"}); m.put("role8",new String[]{"ga13","ga14",null}); m.put("role9",null); m.put("role10",new Object[]{}); m.put("role11",Arrays.asList(new Object[]{null})); return m; } private MapBasedAttributes2GrantedAuthoritiesMapper getDefaultMapper() throws Exception { MapBasedAttributes2GrantedAuthoritiesMapper mapper = new MapBasedAttributes2GrantedAuthoritiesMapper(); mapper.setAttributes2grantedAuthoritiesMap(getValidAttributes2GrantedAuthoritiesMap()); mapper.afterPropertiesSet(); return mapper; } private void testGetGrantedAuthorities(Attributes2GrantedAuthoritiesMapper mapper, String[] roles, String[] expectedGas) { List<GrantedAuthority> result = mapper.getGrantedAuthorities(Arrays.asList(roles)); Collection resultColl = new ArrayList(result.size()); for (int i = 0; i < result.size(); i++) { resultColl.add(result.get(i).getAuthority()); } Collection expectedColl = Arrays.asList(expectedGas); assertTrue("Role collections should match; result: " + resultColl + ", expected: " + expectedColl, expectedColl .containsAll(resultColl) && resultColl.containsAll(expectedColl)); } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.android.inspections.lint; import com.android.ide.common.res2.ResourceItem; import com.android.ide.common.resources.ResourceUrl; import com.android.ide.common.resources.configuration.FolderConfiguration; import com.android.resources.ResourceFolderType; import com.android.resources.ResourceType; import com.android.tools.idea.rendering.LocalResourceRepository; import com.android.tools.idea.rendering.ProjectResourceRepository; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.intellij.ide.highlighter.XmlFileType; import com.intellij.openapi.application.Result; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.xml.XmlAttribute; import com.intellij.psi.xml.XmlAttributeValue; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.refactoring.psi.SearchUtils; import org.jetbrains.android.util.AndroidResourceUtil; import org.jetbrains.annotations.NotNull; import org.must.android.module.extension.AndroidModuleExtension; import java.util.List; import java.util.Set; import static com.android.SdkConstants.*; /** * Quickfix which migrates a drawable resource into a mipmap resource, moving bitmap and drawable XML * folders into mipmap folders (created if necessary) as well as updating resource references in XML * and Java files */ class MigrateDrawableToMipmapFix implements AndroidLintQuickFix { private final ResourceUrl myUrl; MigrateDrawableToMipmapFix(@NotNull ResourceUrl url) { myUrl = url; } @Override public void apply(@NotNull PsiElement startElement, @NotNull PsiElement endElement, @NotNull AndroidQuickfixContexts.Context context) { Project project = startElement.getProject(); AndroidModuleExtension facet = ModuleUtilCore.getExtension(startElement, AndroidModuleExtension.class); if (facet == null) { return; } final List<PsiFile> bitmaps = Lists.newArrayList(); final Set<PsiElement> references = Sets.newHashSet(); GlobalSearchScope useScope = GlobalSearchScope.projectScope(project); ProjectResourceRepository projectResources = facet.getProjectResources(true); List<ResourceItem> resourceItems = projectResources.getResourceItem(myUrl.type, myUrl.name); if (resourceItems != null) { for (ResourceItem item : resourceItems) { PsiFile file = LocalResourceRepository.getItemPsiFile(project, item); if (file == null) { continue; } bitmaps.add(file); Iterable<PsiReference> allReferences = SearchUtils.findAllReferences(file, useScope); for (PsiReference next : allReferences) { PsiElement element = next.getElement(); if (element != null) { references.add(element); } } } } PsiField[] resourceFields = AndroidResourceUtil.findResourceFields(facet, ResourceType.DRAWABLE.getName(), myUrl.name, true); if (resourceFields.length == 1) { Iterable<PsiReference> allReferences = SearchUtils.findAllReferences(resourceFields[0], useScope); for (PsiReference next : allReferences) { PsiElement element = next.getElement(); if (element != null) { references.add(element); } } } Set<PsiFile> applicableFiles = Sets.newHashSet(); applicableFiles.addAll(bitmaps); for (PsiElement element : references) { PsiFile containingFile = element.getContainingFile(); if (containingFile != null) { applicableFiles.add(containingFile); } } WriteCommandAction<Void> action = new WriteCommandAction<Void>(project, "Migrate Drawable to Bitmap", applicableFiles.toArray(new PsiFile[applicableFiles.size()])) { @Override protected void run(@NotNull Result<Void> result) throws Throwable { // Move each drawable bitmap from drawable-my-qualifiers to bitmap-my-qualifiers for (PsiFile bitmap : bitmaps) { VirtualFile file = bitmap.getVirtualFile(); if (file == null) { continue; } VirtualFile parent = file.getParent(); if (parent == null) { // shouldn't happen for bitmaps found in the resource repository continue; } if (file.getFileType() == XmlFileType.INSTANCE && parent.getName().startsWith(FD_RES_VALUES)) { // Resource alias rather than an actual drawable XML file: update the type reference instead XmlFile xmlFile = (XmlFile)bitmap; XmlTag root = xmlFile.getRootTag(); if (root != null) { for (XmlTag item : root.getSubTags()) { String name = item.getAttributeValue(ATTR_NAME); if (myUrl.name.equals(name)) { if (ResourceType.DRAWABLE.getName().equals(item.getName())) { item.setName(ResourceType.MIPMAP.getName()); } else if (ResourceType.DRAWABLE.getName().equals(item.getAttributeValue(ATTR_TYPE))) { item.setAttribute(ATTR_TYPE, ResourceType.MIPMAP.getName()); } } } } continue; // Don't move the file } VirtualFile res = parent.getParent(); if (res == null) { // shouldn't happen for bitmaps found in the resource repository continue; } FolderConfiguration configuration = FolderConfiguration.getConfigForFolder(parent.getName()); if (configuration == null) { continue; } String targetFolderName = configuration.getFolderName(ResourceFolderType.MIPMAP); VirtualFile targetFolder = res.findChild(targetFolderName); if (targetFolder == null) { targetFolder = res.createChildDirectory(this, targetFolderName); } file.move(this, targetFolder); } // Update references for (PsiElement reference : references) { if (reference instanceof XmlAttributeValue) { // Convert @drawable/foo references to @mipmap/foo XmlAttributeValue value = (XmlAttributeValue)reference; XmlAttribute attribute = (XmlAttribute)value.getParent(); attribute.setValue(ResourceUrl.create(ResourceType.MIPMAP, myUrl.name, false, false).toString()); } else if (reference instanceof PsiReferenceExpression) { // Convert R.drawable.foo references to R.mipmap.foo PsiReferenceExpression inner = (PsiReferenceExpression)reference; PsiExpression qualifier = inner.getQualifierExpression(); if (qualifier instanceof PsiReferenceExpression) { PsiReferenceExpression outer = (PsiReferenceExpression)qualifier; if (outer.getReferenceNameElement() instanceof PsiIdentifier) { PsiIdentifier identifier = (PsiIdentifier)outer.getReferenceNameElement(); if (ResourceType.DRAWABLE.getName().equals(identifier.getText())) { Project project = reference.getProject(); final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(project); PsiIdentifier newIdentifier = elementFactory.createIdentifier(ResourceType.MIPMAP.getName()); identifier.replace(newIdentifier); } } } } } } }; action.execute(); } @Override public boolean isApplicable(@NotNull PsiElement startElement, @NotNull PsiElement endElement, @NotNull AndroidQuickfixContexts.ContextType contextType) { return true; } @NotNull @Override public String getName() { return "Convert " + myUrl + " to @mipmap/" + myUrl.name; } }
package com.dreamteam.paca; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.annotation.TargetApi; import android.content.Context; import android.os.Build; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.view.animation.AccelerateInterpolator; import android.view.animation.DecelerateInterpolator; import android.view.animation.OvershootInterpolator; import android.widget.Button; import android.widget.FrameLayout; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.TextSwitcher; import com.android.volley.AuthFailureError; import com.android.volley.Request; import com.android.volley.RequestQueue; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.NetworkImageView; import com.android.volley.toolbox.StringRequest; import com.android.volley.toolbox.Volley; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import butterknife.ButterKnife; import butterknife.InjectView; /** * Created by froger_mcs on 05.11.14. * Edited by: * Nicky Thai * Dan Flanagan */ public class FeedAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> implements View.OnClickListener { private static final int VIEW_TYPE_DEFAULT = 1; private static final int VIEW_TYPE_LOADER = 2; private static final String ADDRESS_URI = "http://nthai.cs.trincoll.edu/Pictures/"; private static final DecelerateInterpolator DECCELERATE_INTERPOLATOR = new DecelerateInterpolator(); private static final AccelerateInterpolator ACCELERATE_INTERPOLATOR = new AccelerateInterpolator(); private static final OvershootInterpolator OVERSHOOT_INTERPOLATOR = new OvershootInterpolator(4); private static final int ANIMATED_ITEMS_COUNT = 2; private Context mContext; private ArrayList<JSONObject> feedItems; private int lastAnimatedPosition = -1; private int itemsCount = 0; private boolean animateItems = false; private final Map<RecyclerView.ViewHolder, AnimatorSet> likeAnimations = new HashMap<>(); private final ArrayList<Integer> likedPositions = new ArrayList<>(); private boolean showLoadingView = false; private int loadingViewSize = Utils.dpToPx(200); public FeedAdapter(Context context) { new FeedAdapter(context, new ArrayList<JSONObject>()); } public FeedAdapter(Context context, ArrayList<JSONObject> items) { this.mContext = context; feedItems = items; } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { final View view = LayoutInflater.from(mContext).inflate(R.layout.item_feed, parent, false); final CellFeedViewHolder cellFeedViewHolder = new CellFeedViewHolder(view); if (viewType == VIEW_TYPE_DEFAULT) { cellFeedViewHolder.btnDisLike.setOnClickListener(this); cellFeedViewHolder.btnLike.setOnClickListener(this); } else if (viewType == VIEW_TYPE_LOADER) { View bgView = new View(mContext); bgView.setLayoutParams(new FrameLayout.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT )); bgView.setBackgroundColor(0x77ffffff); cellFeedViewHolder.vImageRoot.addView(bgView); cellFeedViewHolder.vProgressBg = bgView; FrameLayout.LayoutParams params = new FrameLayout.LayoutParams(loadingViewSize, loadingViewSize); params.gravity = Gravity.CENTER; SendingProgressView sendingProgressView = new SendingProgressView(mContext); sendingProgressView.setLayoutParams(params); cellFeedViewHolder.vImageRoot.addView(sendingProgressView); cellFeedViewHolder.vSendingProgress = sendingProgressView; } return cellFeedViewHolder; } @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) private void runEnterAnimation(View view, int position) { if (!animateItems || position >= ANIMATED_ITEMS_COUNT - 1) { return; } if (position > lastAnimatedPosition) { lastAnimatedPosition = position; view.setTranslationY(Utils.getScreenHeight(mContext)); view.animate() .translationY(0) .setInterpolator(new DecelerateInterpolator(3.f)) .setDuration(700) .start(); } } @Override public void onBindViewHolder(RecyclerView.ViewHolder viewHolder, int position) { runEnterAnimation(viewHolder.itemView, position); final CellFeedViewHolder holder = (CellFeedViewHolder) viewHolder; if (getItemViewType(position) == VIEW_TYPE_DEFAULT) { bindDefaultFeedItem(position, holder); } else if (getItemViewType(position) == VIEW_TYPE_LOADER) { bindLoadingFeedItem(position, holder); } } @TargetApi(Build.VERSION_CODES.HONEYCOMB) private void bindDefaultFeedItem(int position, CellFeedViewHolder holder) { position = position % feedItems.size(); String uri; try { uri = ADDRESS_URI + feedItems.get(position).getString("address"); } catch (JSONException e) { uri = GalleryActivity.TOKEN_IMAGE_ADDESS; } holder.ivFeedCenter.setImageUrl(uri, ((GalleryActivity) mContext).getImageLoader()); updateLikesCounter(holder); updateHeartButton(holder, false); holder.btnDisLike.setTag(holder); holder.ivFeedCenter.setTag(holder); holder.btnLike.setTag(holder); if (likeAnimations.containsKey(holder)) { likeAnimations.get(holder).cancel(); } resetLikeAnimationState(holder); } private void bindLoadingFeedItem(int position, final CellFeedViewHolder holder) { holder.ivFeedCenter.setImageResource(R.drawable.img_feed_center_1); //holder.ivFeedBottom.setImageResource(R.drawable.img_feed_bottom_1); holder.vSendingProgress.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { @Override public boolean onPreDraw() { holder.vSendingProgress.getViewTreeObserver().removeOnPreDrawListener(this); holder.vSendingProgress.simulateProgress(); return true; } }); holder.vSendingProgress.setOnLoadingFinishedListener(new SendingProgressView.OnLoadingFinishedListener() { @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void onLoadingFinished() { holder.vSendingProgress.animate().scaleY(0).scaleX(0).setDuration(200).setStartDelay(100); holder.vProgressBg.animate().alpha(0.f).setDuration(200).setStartDelay(100) .setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { holder.vSendingProgress.setScaleX(1); holder.vSendingProgress.setScaleY(1); holder.vProgressBg.setAlpha(1); showLoadingView = false; notifyItemChanged(0); } }) .start(); } }); } @Override public int getItemViewType(int position) { if (showLoadingView && position == 0) { return VIEW_TYPE_LOADER; } else { return VIEW_TYPE_DEFAULT; } } @Override public int getItemCount() { return itemsCount; } public ArrayList<JSONObject> getFeedItems() { return feedItems; } public void setFeedItems(ArrayList<JSONObject> feedItems) { this.feedItems = feedItems; } private void updateLikesCounter(CellFeedViewHolder holder) { int currentLikesCount; try { currentLikesCount = feedItems.get(holder.getPosition()).getInt("likes"); } catch (JSONException e) { currentLikesCount = 0; } String likesCountText = mContext.getResources().getQuantityString( R.plurals.likes_count, currentLikesCount, currentLikesCount ); holder.tsLikesCounter.setCurrentText(likesCountText); } private void updateLikesCounter(CellFeedViewHolder holder, boolean animated, boolean like) { int currentLikesCount; try { currentLikesCount = feedItems.get(holder.getPosition()).getInt("likes"); } catch (JSONException e) { currentLikesCount = 0; } currentLikesCount += like ? 1 : -1; String likesCountText = mContext.getResources().getQuantityString( R.plurals.likes_count, currentLikesCount, currentLikesCount ); if (animated) { holder.tsLikesCounter.setText(likesCountText); } else { holder.tsLikesCounter.setCurrentText(likesCountText); } RequestQueue requestQueue = Volley.newRequestQueue(mContext); final JSONObject pictureObject = feedItems.get(holder.getPosition()); final String likes = Integer.toString(currentLikesCount); StringRequest stringRequest = new StringRequest(Request.Method.POST, UploadPhotoTask.DATABASE_OPERATION_URI, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(GalleryActivity.TAG, "success: " + response); //notifyUploadSuccess(); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(GalleryActivity.TAG, "failed: " + error.getMessage()); //notifyUploadFailure(); } }) { @Override protected Map<String, String> getParams() throws AuthFailureError { Map<String, String> postParams = new HashMap<>(); postParams.put("request_code", "1"); postParams.put("id", pictureObject.optString("id", "0")); postParams.put("likes", likes); return postParams; } @Override public Map<String, String> getHeaders() throws AuthFailureError { Map<String,String> params = new HashMap<>(); params.put("Content-Type", "application/x-www-form-urlencoded"); return params; } }; requestQueue.add(stringRequest); } @TargetApi(Build.VERSION_CODES.HONEYCOMB) private void updateHeartButton(final CellFeedViewHolder holder, boolean animated) { if (animated) { if (!likeAnimations.containsKey(holder)) { AnimatorSet animatorSet = new AnimatorSet(); likeAnimations.put(holder, animatorSet); ObjectAnimator rotationAnim = ObjectAnimator.ofFloat(holder.btnLike, "rotation", 0f, 360f); rotationAnim.setDuration(300); rotationAnim.setInterpolator(ACCELERATE_INTERPOLATOR); ObjectAnimator bounceAnimX = ObjectAnimator.ofFloat(holder.btnLike, "scaleX", 0.2f, 1f); bounceAnimX.setDuration(300); bounceAnimX.setInterpolator(OVERSHOOT_INTERPOLATOR); ObjectAnimator bounceAnimY = ObjectAnimator.ofFloat(holder.btnLike, "scaleY", 0.2f, 1f); bounceAnimY.setDuration(300); bounceAnimY.setInterpolator(OVERSHOOT_INTERPOLATOR); bounceAnimY.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { holder.btnLike.setImageResource(R.drawable.ic_heart_red); } }); animatorSet.play(rotationAnim); animatorSet.play(bounceAnimX).with(bounceAnimY).after(rotationAnim); animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { resetLikeAnimationState(holder); } }); animatorSet.start(); } } else { if (likedPositions.contains(holder.getPosition())) { holder.btnLike.setImageResource(R.drawable.ic_heart_red); } else { holder.btnLike.setImageResource(R.drawable.ic_heart_outline_grey); } } } @TargetApi(Build.VERSION_CODES.HONEYCOMB) private void updateHeartButton2(final CellFeedViewHolder holder, boolean animated) { if (animated) { if (!likeAnimations.containsKey(holder)) { AnimatorSet animatorSet = new AnimatorSet(); likeAnimations.put(holder, animatorSet); ObjectAnimator rotationAnim = ObjectAnimator.ofFloat(holder.btnDisLike, "rotation", 0f, 360f); rotationAnim.setDuration(300); rotationAnim.setInterpolator(ACCELERATE_INTERPOLATOR); ObjectAnimator bounceAnimX = ObjectAnimator.ofFloat(holder.btnDisLike, "scaleX", 0.2f, 1f); bounceAnimX.setDuration(300); bounceAnimX.setInterpolator(OVERSHOOT_INTERPOLATOR); ObjectAnimator bounceAnimY = ObjectAnimator.ofFloat(holder.btnDisLike, "scaleY", 0.2f, 1f); bounceAnimY.setDuration(300); bounceAnimY.setInterpolator(OVERSHOOT_INTERPOLATOR); bounceAnimY.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { holder.btnDisLike.setImageResource(R.drawable.ic_heart_red); } }); animatorSet.play(rotationAnim); animatorSet.play(bounceAnimX).with(bounceAnimY).after(rotationAnim); animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { resetLikeAnimationState(holder); } }); animatorSet.start(); } } else { if (likedPositions.contains(holder.getPosition())) { holder.btnDisLike.setImageResource(R.drawable.ic_heart_red); } else { holder.btnDisLike.setImageResource(R.drawable.ic_heart_outline_grey); } } } @Override public void onClick(View view) { final int viewId = view.getId(); switch (viewId) { case R.id.btn_dislike: CellFeedViewHolder holderD = (CellFeedViewHolder) view.getTag(); if (!likedPositions.contains(holderD.getPosition())) { likedPositions.add(holderD.getPosition()); updateLikesCounter(holderD, true, false); updateHeartButton2(holderD, true); } break; case R.id.btn_like: CellFeedViewHolder holderL = (CellFeedViewHolder) view.getTag(); if (!likedPositions.contains(holderL.getPosition())) { likedPositions.add(holderL.getPosition()); updateLikesCounter(holderL, true, true); updateHeartButton(holderL, true); } break; } } private void resetLikeAnimationState(CellFeedViewHolder holder) { likeAnimations.remove(holder); holder.vBgLike.setVisibility(View.GONE); holder.ivLike.setVisibility(View.GONE); } public void updateItems(boolean animated) { itemsCount = 10; animateItems = animated; notifyDataSetChanged(); } public void showLoadingView() { showLoadingView = true; notifyItemChanged(0); } public static class CellFeedViewHolder extends RecyclerView.ViewHolder { @InjectView(R.id.ivFeedCenter) NetworkImageView ivFeedCenter; @InjectView(R.id.btn_dislike) ImageButton btnDisLike; @InjectView(R.id.btn_like) ImageButton btnLike; @InjectView(R.id.vBgLike) View vBgLike; @InjectView(R.id.ivLike) ImageView ivLike; @InjectView(R.id.tsLikesCounter) TextSwitcher tsLikesCounter; @InjectView(R.id.vImageRoot) FrameLayout vImageRoot; SendingProgressView vSendingProgress; View vProgressBg; public CellFeedViewHolder(View view) { super(view); ButterKnife.inject(this, view); } } }
/** * Copyright (c) 2011, Ben Fortuna * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * o Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * o Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * o Neither the name of Ben Fortuna nor the names of any other contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.fortuna.ical4j.model; import java.net.URISyntaxException; import net.fortuna.ical4j.util.Strings; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; /** * Defines an iCalendar parameter. Subclasses of this class provide additional validation and typed values for specific * iCalendar parameters. * * Note that subclasses must provide a reference to the factory used to create the * parameter to support parameter cloning (copy). If no factory is specified an * {@link UnsupportedOperationException} will be thrown by the {@link #copy()} method. * * @author Ben Fortuna * * $Id: Parameter.java,v 1.27 2011/02/20 03:12:32 fortuna Exp $ [Apr 5, 2004] */ public abstract class Parameter extends Content { private static final long serialVersionUID = -2058497904769713528L; /** * Region abbreviation. */ public static final String ABBREV = "ABBREV"; /** * Alternate text representation. */ public static final String ALTREP = "ALTREP"; /** * Common name. */ public static final String CN = "CN"; /** * Calendar user type. */ public static final String CUTYPE = "CUTYPE"; /** * Delegator. */ public static final String DELEGATED_FROM = "DELEGATED-FROM"; /** * Delegatee. */ public static final String DELEGATED_TO = "DELEGATED-TO"; /** * Directory entry. */ public static final String DIR = "DIR"; /** * Inline encoding. */ public static final String ENCODING = "ENCODING"; /** * Format type. */ public static final String FMTTYPE = "FMTTYPE"; /** * Free/busy time type. */ public static final String FBTYPE = "FBTYPE"; /** * Language for text. */ public static final String LANGUAGE = "LANGUAGE"; /** * Group or list membership. */ public static final String MEMBER = "MEMBER"; /** * Participation status. */ public static final String PARTSTAT = "PARTSTAT"; /** * Voter public comment. */ public static final String PUBLIC_COMMENT = "PUBLIC-COMMENT"; /** * Recurrence identifier range. */ public static final String RANGE = "RANGE"; /** * Alarm trigger relationship. */ public static final String RELATED = "RELATED"; /** * Relationship type. */ public static final String RELTYPE = "RELTYPE"; /** * Participation role. */ public static final String ROLE = "ROLE"; /** * RSVP expectation. */ public static final String RSVP = "RSVP"; /** * Schedule agent. */ public static final String SCHEDULE_AGENT = "SCHEDULE-AGENT"; /** * Schedule status. */ public static final String SCHEDULE_STATUS = "SCHEDULE-STATUS"; /** * Sent by. */ public static final String SENT_BY = "SENT-BY"; /** * Type. */ public static final String STAY_INFORMED = "STAY-INFORMED"; /** * Type. */ public static final String TYPE = "TYPE"; /** * Reference to time zone object. */ public static final String TZID = "TZID"; /** * Property value data type. */ public static final String VALUE = "VALUE"; /** * Reference to vvenue component. */ public static final String VVENUE = "VVENUE"; /** * Prefix to all experimental parameters. */ public static final String EXPERIMENTAL_PREFIX = "X-"; private String name; private final ParameterFactory factory; /** * @param aName the parameter identifier * @param factory the factory used to create the parameter */ public Parameter(final String aName, ParameterFactory factory) { this.name = aName; this.factory = factory; } /** * {@inheritDoc} */ public final String toString() { final StringBuffer b = new StringBuffer(); b.append(getName()); b.append('='); if (isQuotable()) { b.append(Strings.quote(Strings.valueOf(getValue()))); } else { b.append(Strings.valueOf(getValue())); } return b.toString(); } /** * Indicates whether the current parameter value should be quoted. * @return true if the value should be quoted, otherwise false */ protected boolean isQuotable() { return Strings.PARAM_QUOTE_PATTERN.matcher(Strings.valueOf(getValue())) .find(); } /** * @return Returns the name. */ public final String getName() { return name; } /** * {@inheritDoc} */ public final boolean equals(final Object arg0) { if (arg0 instanceof Parameter) { final Parameter p = (Parameter) arg0; return new EqualsBuilder().append(getName(), p.getName()) .append(getValue(), p.getValue()).isEquals(); } return super.equals(arg0); } /** * {@inheritDoc} */ public final int hashCode() { // as parameter name is case-insensitive generate hash for uppercase.. return new HashCodeBuilder().append(getName().toUpperCase()).append( getValue()).toHashCode(); } /** * Deep copy of parameter. * @return new parameter * @throws URISyntaxException where an invalid URI is encountered */ public final Parameter copy() throws URISyntaxException { if (factory == null) { throw new UnsupportedOperationException("No factory specified"); } return factory.createParameter(getName(), getValue()); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.trans.steps.getxmldata; import java.io.InputStream; import java.io.StringReader; import java.lang.reflect.InvocationTargetException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.dom4j.Document; import org.dom4j.Element; import org.dom4j.Node; import org.dom4j.io.SAXReader; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.dialogs.ProgressMonitorDialog; import org.eclipse.jface.operation.IRunnableWithProgress; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.steps.getxmldata.GetXMLDataMeta; import org.pentaho.di.trans.steps.getxmldata.IgnoreDTDEntityResolver; import org.pentaho.di.ui.core.dialog.ErrorDialog; /** * Takes care of displaying a dialog that will handle the wait while we're finding out loop nodes for an XML file * * @author Samatar * @since 07-apr-2010 */ public class LoopNodesImportProgressDialog { private static Class<?> PKG = GetXMLDataMeta.class; // for i18n purposes, needed by Translator2!! private Shell shell; private GetXMLDataMeta meta; private String[] Xpaths; private String filename; private String xml; private String url; private String encoding; private ArrayList<String> listpath; private int nr; /** * Creates a new dialog that will handle the wait while we're finding out loop nodes for an XML file */ public LoopNodesImportProgressDialog( Shell shell, GetXMLDataMeta meta, String filename, String encoding ) { this.shell = shell; this.meta = meta; this.Xpaths = null; this.filename = filename; this.encoding = encoding; this.listpath = new ArrayList<String>(); this.nr = 0; this.xml = null; this.url = null; } public LoopNodesImportProgressDialog( Shell shell, GetXMLDataMeta meta, String xmlSource, boolean useUrl ) { this.shell = shell; this.meta = meta; this.Xpaths = null; this.filename = null; this.encoding = null; this.listpath = new ArrayList<String>(); this.nr = 0; if ( useUrl ) { this.xml = null; this.url = xmlSource; } else { this.xml = xmlSource; this.url = null; } } public String[] open() { IRunnableWithProgress op = new IRunnableWithProgress() { public void run( IProgressMonitor monitor ) throws InvocationTargetException, InterruptedException { try { Xpaths = doScan( monitor ); } catch ( Exception e ) { e.printStackTrace(); throw new InvocationTargetException( e, BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Exception.ErrorScanningFile", filename, e.toString() ) ); } } }; try { ProgressMonitorDialog pmd = new ProgressMonitorDialog( shell ); pmd.run( true, true, op ); } catch ( InvocationTargetException e ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.ErrorScanningFile.Title" ), BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.ErrorScanningFile.Message" ), e ); } catch ( InterruptedException e ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.ErrorScanningFile.Title" ), BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.ErrorScanningFile.Message" ), e ); } return Xpaths; } @SuppressWarnings( "unchecked" ) private String[] doScan( IProgressMonitor monitor ) throws Exception { monitor.beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.ScanningFile", filename ), 1 ); SAXReader reader = new SAXReader(); monitor.worked( 1 ); if ( monitor.isCanceled() ) { return null; } // Validate XML against specified schema? if ( meta.isValidating() ) { reader.setValidation( true ); reader.setFeature( "http://apache.org/xml/features/validation/schema", true ); } else { // Ignore DTD reader.setEntityResolver( new IgnoreDTDEntityResolver() ); } monitor.worked( 1 ); monitor .beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.ReadingDocument" ), 1 ); if ( monitor.isCanceled() ) { return null; } InputStream is = null; try { Document document = null; if ( !Utils.isEmpty( filename ) ) { is = KettleVFS.getInputStream( filename ); document = reader.read( is, encoding ); } else { if ( !Utils.isEmpty( xml ) ) { document = reader.read( new StringReader( xml ) ); } else { document = reader.read( new URL( url ) ); } } monitor.worked( 1 ); monitor.beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.DocumentOpened" ), 1 ); monitor.worked( 1 ); monitor.beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.ReadingNode" ), 1 ); if ( monitor.isCanceled() ) { return null; } List<Node> nodes = document.selectNodes( document.getRootElement().getName() ); monitor.worked( 1 ); monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.FetchNodes" ) ); if ( monitor.isCanceled() ) { return null; } for ( Node node : nodes ) { if ( monitor.isCanceled() ) { return null; } if ( !listpath.contains( node.getPath() ) ) { nr++; monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.FetchNodes", String.valueOf( nr ) ) ); monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.AddingNode", node .getPath() ) ); listpath.add( node.getPath() ); addLoopXPath( node, monitor ); } } monitor.worked( 1 ); } finally { try { if ( is != null ) { is.close(); } } catch ( Exception e ) { /* Ignore */ } } String[] list_xpath = listpath.toArray( new String[listpath.size()] ); monitor.setTaskName( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.NodesReturned" ) ); monitor.done(); return list_xpath; } private void addLoopXPath( Node node, IProgressMonitor monitor ) { Element ce = (Element) node; monitor.worked( 1 ); // List child for ( int j = 0; j < ce.nodeCount(); j++ ) { if ( monitor.isCanceled() ) { return; } Node cnode = ce.node( j ); if ( !Utils.isEmpty( cnode.getName() ) ) { Element cce = (Element) cnode; if ( !listpath.contains( cnode.getPath() ) ) { nr++; monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.FetchNodes", String.valueOf( nr ) ) ); monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.AddingNode", cnode.getPath() ) ); listpath.add( cnode.getPath() ); } // let's get child nodes if ( cce.nodeCount() > 1 ) { addLoopXPath( cnode, monitor ); } } } } }
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.util.state; import com.google.cloud.dataflow.sdk.annotations.Experimental; import com.google.cloud.dataflow.sdk.annotations.Experimental.Kind; import com.google.cloud.dataflow.sdk.coders.Coder; import com.google.cloud.dataflow.sdk.transforms.Combine.CombineFn; import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow; import com.google.cloud.dataflow.sdk.transforms.windowing.OutputTimeFn; import com.google.cloud.dataflow.sdk.util.state.StateTag.StateBinder; import org.joda.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; import javax.annotation.Nullable; /** * In-memory implementation of {@link StateInternals}. Used in {@code BatchModeExecutionContext} * and for running tests that need state. */ @Experimental(Kind.STATE) public class InMemoryStateInternals extends MergingStateInternals { private interface InMemoryState { boolean isEmptyForTesting(); } protected final StateTable inMemoryState = new StateTable() { @Override protected StateBinder binderForNamespace(final StateNamespace namespace) { return new StateBinder() { @Override public <T> ValueState<T> bindValue(StateTag<ValueState<T>> address, Coder<T> coder) { return new InMemoryValue<T>(); } @Override public <T> BagState<T> bindBag(final StateTag<BagState<T>> address, Coder<T> elemCoder) { return new InMemoryBag<T>(); } @Override public <InputT, AccumT, OutputT> CombiningValueStateInternal<InputT, AccumT, OutputT> bindCombiningValue( StateTag<CombiningValueStateInternal<InputT, AccumT, OutputT>> address, Coder<AccumT> accumCoder, final CombineFn<InputT, AccumT, OutputT> combineFn) { return new InMemoryCombiningValue<InputT, AccumT, OutputT>(combineFn); } @Override public <W extends BoundedWindow> WatermarkStateInternal bindWatermark( StateTag<WatermarkStateInternal> address, OutputTimeFn<? super W> outputTimeFn) { return new WatermarkStateInternalImplementation(outputTimeFn); } }; } }; public void clear() { inMemoryState.clear(); } /** * Return true if the given state is empty. This is used by the test framework to make sure * that the state has been properly cleaned up. */ protected boolean isEmptyForTesting(State state) { return ((InMemoryState) state).isEmptyForTesting(); } @Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return inMemoryState.get(namespace, address); } private final class InMemoryValue<T> implements ValueState<T>, InMemoryState { private boolean isCleared = true; private T value = null; @Override public void clear() { // Even though we're clearing we can't remove this from the in-memory state map, since // other users may already have a handle on this Value. value = null; isCleared = true; } @Override public StateContents<T> get() { return new StateContents<T>() { @Override public T read() { return value; } }; } @Override public void set(T input) { isCleared = false; this.value = input; } @Override public boolean isEmptyForTesting() { return isCleared; } } private final class WatermarkStateInternalImplementation implements WatermarkStateInternal, InMemoryState { private final OutputTimeFn<?> outputTimeFn; @Nullable private Instant combinedHold = null; public WatermarkStateInternalImplementation(OutputTimeFn<?> outputTimeFn) { this.outputTimeFn = outputTimeFn; } @Override public void clear() { // Even though we're clearing we can't remove this from the in-memory state map, since // other users may already have a handle on this WatermarkBagInternal. combinedHold = null; } /** * {@inheritDoc} * * <p>Does nothing. There is only one hold and it is not extraneous. * See {@link MergedWatermarkStateInternal} for a nontrivial implementation. */ @Override public void releaseExtraneousHolds() { } @Override public StateContents<Instant> get() { return new StateContents<Instant>() { @Override public Instant read() { return combinedHold; } }; } @Override public void add(Instant outputTime) { combinedHold = combinedHold == null ? outputTime : outputTimeFn.combine(combinedHold, outputTime); } @Override public boolean isEmptyForTesting() { return combinedHold == null; } @Override public StateContents<Boolean> isEmpty() { return new StateContents<Boolean>() { @Override public Boolean read() { return combinedHold == null; } }; } @Override public String toString() { return Objects.toString(combinedHold); } } private final class InMemoryCombiningValue<InputT, AccumT, OutputT> implements CombiningValueStateInternal<InputT, AccumT, OutputT>, InMemoryState { private boolean isCleared = true; private final CombineFn<InputT, AccumT, OutputT> combineFn; private AccumT accum; private InMemoryCombiningValue(CombineFn<InputT, AccumT, OutputT> combineFn) { this.combineFn = combineFn; accum = combineFn.createAccumulator(); } @Override public void clear() { // Even though we're clearing we can't remove this from the in-memory state map, since // other users may already have a handle on this CombiningValue. accum = combineFn.createAccumulator(); isCleared = true; } @Override public StateContents<OutputT> get() { return new StateContents<OutputT>() { @Override public OutputT read() { return combineFn.extractOutput(accum); } }; } @Override public void add(InputT input) { isCleared = false; accum = combineFn.addInput(accum, input); } @Override public StateContents<AccumT> getAccum() { return new StateContents<AccumT>() { @Override public AccumT read() { return accum; } }; } @Override public StateContents<Boolean> isEmpty() { return new StateContents<Boolean>() { @Override public Boolean read() { return isCleared; } }; } @Override public void addAccum(AccumT accum) { isCleared = false; this.accum = combineFn.mergeAccumulators(Arrays.asList(this.accum, accum)); } @Override public boolean isEmptyForTesting() { return isCleared; } } private static final class InMemoryBag<T> implements BagState<T>, InMemoryState { private List<T> contents = new ArrayList<>(); @Override public void clear() { // Even though we're clearing we can't remove this from the in-memory state map, since // other users may already have a handle on this Bag. // The result of get/read below must be stable for the lifetime of the bundle within which it // was generated. In batch and direct runners the bundle lifetime can be // greater than the window lifetime, in which case this method can be called while // the result is still in use. We protect against this by hot-swapping instead of // clearing the contents. contents = new ArrayList<>(); } @Override public StateContents<Iterable<T>> get() { return new StateContents<Iterable<T>>() { @Override public Iterable<T> read() { return contents; } }; } @Override public void add(T input) { contents.add(input); } @Override public boolean isEmptyForTesting() { return contents.isEmpty(); } @Override public StateContents<Boolean> isEmpty() { return new StateContents<Boolean>() { @Override public Boolean read() { return contents.isEmpty(); } }; } } }
package io.dropwizard.flyway; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.flywaydb.core.Flyway; import org.hibernate.validator.constraints.NotEmpty; import javax.sql.DataSource; import javax.validation.constraints.NotNull; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; public class FlywayFactory { @JsonProperty @NotEmpty private String encoding = StandardCharsets.UTF_8.name(); @JsonProperty @NotNull private List<String> schemas = ImmutableList.of(); @JsonProperty @NotNull private List<String> callbacks = ImmutableList.of(); @JsonProperty @NotEmpty private String metaDataTableName = "schema_version"; @JsonProperty @NotEmpty private String sqlMigrationPrefix = "V"; @JsonProperty @NotEmpty private String sqlMigrationSeparator = "__"; @JsonProperty @NotNull private String sqlMigrationSuffix = ".sql"; @JsonProperty @NotEmpty private String placeholderPrefix = "${"; @JsonProperty @NotEmpty private String placeholderSuffix = "}"; @JsonProperty @NotNull private Map<String, String> placeholders = ImmutableMap.of(); @JsonProperty @NotEmpty private List<String> locations = ImmutableList.of("db/migration"); @JsonProperty @NotNull private List<String> resolvers = ImmutableList.of(); @JsonIgnore private ClassLoader classLoader = null; @JsonProperty @NotNull private boolean outOfOrder = false; @JsonProperty @NotNull private String baselineDescription = "<< Flyway Baseline >>"; @JsonProperty @NotNull private boolean baselineOnMigrate = false; @JsonProperty @NotNull private boolean validateOnMigrate = true; public String getEncoding() { return encoding; } public void setEncoding(final String encoding) { this.encoding = encoding; } public List<String> getSchemas() { return schemas; } public void setSchemas(final List<String> schemas) { this.schemas = schemas; } public List<String> getCallbacks() { return callbacks; } public void setCallbacks(final List<String> callbacks) { this.callbacks = callbacks; } public String getMetaDataTableName() { return metaDataTableName; } public void setMetaDataTableName(final String metaDataTableName) { this.metaDataTableName = metaDataTableName; } public String getSqlMigrationPrefix() { return sqlMigrationPrefix; } public void setSqlMigrationPrefix(final String sqlMigrationPrefix) { this.sqlMigrationPrefix = sqlMigrationPrefix; } public String getSqlMigrationSuffix() { return sqlMigrationSuffix; } public void setSqlMigrationSuffix(final String sqlMigrationSuffix) { this.sqlMigrationSuffix = sqlMigrationSuffix; } public String getSqlMigrationSeparator() { return sqlMigrationSeparator; } public void setSqlMigrationSeparator(final String sqlMigrationSeparator) { this.sqlMigrationSeparator = sqlMigrationSeparator; } public String getPlaceholderPrefix() { return placeholderPrefix; } public void setPlaceholderPrefix(final String placeholderPrefix) { this.placeholderPrefix = placeholderPrefix; } public String getPlaceholderSuffix() { return placeholderSuffix; } public void setPlaceholderSuffix(final String placeholderSuffix) { this.placeholderSuffix = placeholderSuffix; } public Map<String, String> getPlaceholders() { return placeholders; } public void setPlaceholders(final Map<String, String> placeholders) { this.placeholders = placeholders; } public List<String> getLocations() { return locations; } public void setLocations(final List<String> locations) { this.locations = locations; } public List<String> getResolvers() { return resolvers; } public void setResolvers(final List<String> resolvers) { this.resolvers = resolvers; } public ClassLoader getClassLoader() { return classLoader; } public void setClassLoader(ClassLoader classLoader) { this.classLoader = classLoader; } public boolean getOutOfOrder() { return outOfOrder; } public void setOutOfOrder(boolean outOfOrder) { this.outOfOrder = outOfOrder; } public String getBaselineDescription() { return baselineDescription; } public void setBaselineDescription(String baselineDescription) { this.baselineDescription = baselineDescription; } public boolean getBaselineOnMigrate() { return baselineOnMigrate; } public void setBaselineOnMigrate(boolean baselineOnMigrate) { this.baselineOnMigrate = baselineOnMigrate; } public boolean getValidateOnMigrate() { return validateOnMigrate; } public void setValidateOnMigrate(boolean validateOnMigrate) { this.validateOnMigrate = validateOnMigrate; } public Flyway build(final DataSource dataSource) { final String[] fwSchemas = new String[schemas.size()]; final String[] fwCallbacks = new String[callbacks.size()]; final String[] fwLocations = new String[locations.size()]; final String[] fwResolvers = new String[resolvers.size()]; final Flyway flyway = new Flyway(); if (null != classLoader) { flyway.setClassLoader(classLoader); } flyway.setDataSource(dataSource); flyway.setEncoding(encoding); flyway.setTable(metaDataTableName); flyway.setSqlMigrationPrefix(sqlMigrationPrefix); flyway.setSqlMigrationSeparator(sqlMigrationSeparator); flyway.setSqlMigrationSuffix(sqlMigrationSuffix); flyway.setSchemas(schemas.toArray(fwSchemas)); flyway.setCallbacksAsClassNames(callbacks.toArray(fwCallbacks)); flyway.setPlaceholderPrefix(placeholderPrefix); flyway.setPlaceholderSuffix(placeholderSuffix); flyway.setPlaceholders(placeholders); flyway.setLocations(locations.toArray(fwLocations)); flyway.setResolversAsClassNames(resolvers.toArray(fwResolvers)); flyway.setOutOfOrder(outOfOrder); flyway.setBaselineDescription(baselineDescription); flyway.setBaselineOnMigrate(baselineOnMigrate); flyway.setValidateOnMigrate(validateOnMigrate); return flyway; } }
package com.mongodb.orm.builder; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.FactoryConfigurationError; import javax.xml.parsers.ParserConfigurationException; import org.springframework.core.NestedIOException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; /** * XML node parser * @author: xiangping_yu * @data : 2014-3-6 * @since : 1.5 */ public class NodeletParser { private Map<String, Nodelet> letMap = new HashMap<String, Nodelet>(); private boolean validation; private EntityResolver entityResolver; /** * Registers a nodelet for the specified XPath. Current XPaths supported * are: * <ul> * <li> Attribute Path - /rootElement/childElement/@theAttribute * <li> Element Path - /rootElement/childElement/theElement * <li> All Elements Named - //theElement * </ul> */ public void addNodelet(String xpath, Nodelet nodelet) { letMap.put(xpath, nodelet); } /** * Begins parsing from the provided Reader. */ public void parse(Reader reader) throws NestedIOException { try { Document doc = createDocument(reader); parse(doc.getLastChild()); } catch (Exception e) { throw new NestedIOException("Error parsing XML. Cause: " + e, e); } } public void parse(InputStream inputStream) throws NestedIOException { try { Document doc = createDocument(inputStream); parse(doc.getLastChild()); } catch (Exception e) { throw new NestedIOException("Error parsing XML. Cause: " + e, e); } } /** * Begins parsing from the provided Node. */ public void parse(Node node) { Path path = new Path(); process(node, path); } /** * A recursive method that walkes the DOM tree, registers XPaths and * calls Nodelets registered under those XPaths. */ private void process(Node node, Path path) { if (node instanceof Element) { // Element String elementName = node.getNodeName(); path.add(elementName); processNodelet(node, path.toString()); processNodelet(node, new StringBuffer("//").append(elementName).toString()); // Children NodeList children = node.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { process(children.item(i), path); } path.remove(); } } private void processNodelet(Node node, String pathString) { Nodelet nodelet = (Nodelet) letMap.get(pathString); if (nodelet != null) { try { nodelet.process(node); } catch (Exception e) { throw new RuntimeException("Error parsing XPath '" + pathString + "'. Cause: " + e, e); } } } /** * Creates a JAXP Document from a reader. */ private Document createDocument(Reader reader) throws ParserConfigurationException, FactoryConfigurationError, SAXException, IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(validation); factory.setIgnoringComments(true); factory.setIgnoringElementContentWhitespace(true); DocumentBuilder builder = factory.newDocumentBuilder(); builder.setEntityResolver(entityResolver); builder.setErrorHandler(new ErrorHandler() { public void error(SAXParseException exception) throws SAXException { throw exception; } public void fatalError(SAXParseException exception) throws SAXException { throw exception; } public void warning(SAXParseException exception) throws SAXException { } }); return builder.parse(new InputSource(reader)); } /** * Creates a JAXP Document from an InoutStream. */ private Document createDocument(InputStream inputStream) throws ParserConfigurationException, FactoryConfigurationError, SAXException, IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(validation); factory.setIgnoringElementContentWhitespace(true); factory.setIgnoringComments(true); DocumentBuilder builder = factory.newDocumentBuilder(); builder.setEntityResolver(entityResolver); builder.setErrorHandler(new ErrorHandler() { public void error(SAXParseException exception) throws SAXException { throw exception; } public void fatalError(SAXParseException exception) throws SAXException { throw exception; } public void warning(SAXParseException exception) throws SAXException { } }); InputSource input = new InputSource(inputStream); input.setEncoding("UTF-8"); return builder.parse(new InputSource(inputStream)); } public void setValidation(boolean validation) { this.validation = validation; } public void setEntityResolver(EntityResolver resolver) { this.entityResolver = resolver; } /** * Inner helper class that assists with building XPath paths. * <p/> * Note: Currently this is a bit slow and could be optimized. */ private static class Path { private List<String> nodeList = new ArrayList<String>(); public void add(String node) { nodeList.add(node); } public void remove() { nodeList.remove(nodeList.size() - 1); } public String toString() { StringBuilder builder = new StringBuilder("/"); for (int i = 0; i < nodeList.size(); i++) { builder.append(nodeList.get(i)); if (i < nodeList.size() - 1) { builder.append("/"); } } return builder.toString(); } } }
package nl.bennyjacobs.aapbridge.dbus.message; import java.nio.ByteBuffer; import java.nio.ByteOrder; import android.util.Log; import nl.bennyjacobs.aapbridge.dbus.RemoteDbusException; import nl.bennyjacobs.aapbridge.dbus.RemotePayloadException; import nl.bennyjacobs.aapbridge.dbus.RemoteException; import nl.bennyjacobs.aapbridge.dbus.message.types.DbusArray; import nl.bennyjacobs.aapbridge.dbus.message.types.DbusObjectPath; import nl.bennyjacobs.aapbridge.dbus.message.types.DbusSignature; import nl.bennyjacobs.aapbridge.dbus.message.types.DbusStruct; import nl.bennyjacobs.aapbridge.dbus.message.types.DbusVariant; import static nl.bennyjacobs.aapbridge.TAG.TAG; /** * <p>Represents a DbusMessage and may contain a array of d-bus values converted to java types.</p> * * <p>You can access the values by calling {@link #getValues()} and casting the Objects to the expected type, this method might throw a {@link RemoteException}</p> */ public class DbusMessage { private DbusObjectPath objectPath; private String interfaceName; private String member; private String errorName; private int reply_serial = -1; private String destinationBusname; private String senderBusname; private DbusSignature signature; private int unix_fds = -1; private DbusStruct arguments; private void parseHeader(DbusArray arr) { for(Object o : arr) { DbusStruct struct = (DbusStruct) o; byte headertype = (Byte) struct.getContent()[0]; DbusVariant variant = (DbusVariant) struct.getContent()[1]; switch(headertype){ case 1: this.objectPath = (DbusObjectPath) variant.getEmbeddedThing(); break; case 2: this.interfaceName = (String) variant.getEmbeddedThing(); break; case 3: this.member = (String) variant.getEmbeddedThing(); break; case 4: this.errorName = (String) variant.getEmbeddedThing(); break; case 5: this.reply_serial = (Integer) variant.getEmbeddedThing(); break; case 6: this.destinationBusname = (String) variant.getEmbeddedThing(); break; case 7: this.senderBusname = (String) variant.getEmbeddedThing(); break; case 8: this.signature = (DbusSignature) variant.getEmbeddedThing(); break; case 9: this.unix_fds = (Integer) variant.getEmbeddedThing(); break; default: Log.w(TAG, "Warning unknwon dbus message header "+headertype+" : "+variant); } } } public DbusMessage(ByteBuffer bb) { ByteOrder endian = null; { byte endian_marshalled = bb.get(); if(endian_marshalled == 'l') { endian = ByteOrder.LITTLE_ENDIAN; } else if(endian_marshalled == 'B') { endian = ByteOrder.BIG_ENDIAN; } else { throw new Error("Invalid endian: "+endian_marshalled); } bb.order(endian); } byte msg_type = bb.get(); byte flags = bb.get(); byte protocol_version = bb.get(); int msg_size = bb.getInt(); int serial = bb.getInt(); DbusArray arr = new DbusArray("a(yv)",bb); parseHeader(arr); if(signature != null) arguments = new DbusStruct("("+signature.getSignatureString()+")",bb); } /** * <p> * The d-bus data is converted to java data types. This is done according to the following table: * </p> * * <table> * <tr><th>D-bus type</th><th>Java type</th></tr> * <tr><td>Byte</td><td>{@link java.lang.Byte}</td></tr> * <tr><td>Boolean</td><td>{@link java.lang.Boolean}</td></tr> * <tr><td>INT16</td><td>{@link java.lang.Short}</td></tr> * <tr><td>UINT16</td><td><code>Undefined</code></td></tr> * <tr><td>INT32</td><td>{@link java.lang.Integer}</td></tr> * <tr><td>UINT32</td><td><code>Undefined</code></td></tr> * <tr><td>INT64</td><td>{@link java.lang.Long}</td></tr> * <tr><td>UINT64</td><td><code>Undefined</code></td></tr> * <tr><td>DOUBLE</td><td>{@link java.lang.Double}</td></tr> * <tr><td>STRING</td><td>{@link java.lang.String}</td></tr> * <tr><td>OBJECT_PATH</td><td><code>Undefined</code></td></tr> * <tr><td>SIGNATURE</td><td><code>Undefined</code></td></tr> * <tr><td>ARRAY</td><td><code>Undefined</code></td></tr> * <tr><td>STRUCT</td><td><code>Undefined</code></td></tr> * <tr><td>VARIANT</td><td><code>Undefined</code></td></tr> * <tr><td>DICT</td><td><code>Undefined</code></td></tr> * <tr><td>UNIX_FD</td><td><code>Undefined</code></td></tr> * </table> * * <p>Undefined in the table above could be any kind of Object or null.</p> * * <p> * Only you know what return values a method might return. You should cast the values to the correct type during runtime. * </p> * * <p>This method throws a {@link RemoteException} if a error occurred on the remote device. This can be related * to d-bus (Cant find the bus-name), the payload d-bus interface (incorrect d-bus signature) or the payload itself * (payload generated exceptions). Remote d-bus exceptions should never actually happen, you may consider it a severe * error and terminate any connection, as the remote payload might not be reachable. Remote payload exceptions on the * other hand are generated by the remote payload, the programmer who wrote the payload should decide himself * what should happen on the Android side</p> * * @return The top level d-bus values or null if this message diddent contain any values * @throws RemoteDbusException If the remote d-bus deamon reported a exception in this d-bus message * @throws RemotePayloadException If the remote payload reported a error/exception in this d-bus message */ public Object[] getValues() throws RemotePayloadException, RemoteDbusException { if(errorName != null) { String errString; try { errString = arguments.getContent()[0].toString(); } catch(Exception e) { errString = "No error description"; } if(errorName.startsWith("org.freedesktop.DBus.Error") || errorName.startsWith("org.freedesktop.DBus.Exceptions")) { try { throw Class.forName(errorName).asSubclass(RemoteDbusException.class).getConstructor(String.class).newInstance(errString); } catch (RemoteDbusException e) { throw e; } catch (Exception e) { throw new RemoteDbusException(errorName, errString); } } else { try { throw Class.forName(errorName).asSubclass(RemotePayloadException.class).getConstructor(String.class).newInstance(errString); } catch (RemotePayloadException e) { throw e; } catch (Exception e) { throw new RemotePayloadException(errorName, errString); } } } if(arguments == null) return null; return arguments.getContent(); } /** * NOT part of public API. */ DbusStruct getValuesStruct() { return arguments; } /** * @return the object path */ public DbusObjectPath getObjectPath() { return objectPath; } /** * @return the interface name */ public String getInterfaceName() { return interfaceName; } /** * @return the member name (This is a signal name or a method name) */ public String getMember() { return member; } /** * @return the destination Busname */ public String getDestinationBusname() { return destinationBusname; } /** * @return the sender Busname */ public String getSenderBusname() { return senderBusname; } @Override public String toString() { StringBuilder sb = new StringBuilder(); if(objectPath != null) { sb.append("Object path : "); sb.append(objectPath); sb.append('\n'); } if(interfaceName != null) { sb.append("DbusInterface name : "); sb.append(interfaceName); sb.append('\n'); } if(member != null) { sb.append("Member name : "); sb.append(member); sb.append('\n'); } if(errorName != null) { sb.append("Error name : "); sb.append(errorName); sb.append('\n'); } if(reply_serial != -1) { sb.append("Reply serial : "); sb.append(reply_serial); sb.append('\n'); } if(destinationBusname != null) { sb.append("Destination busname : "); sb.append(destinationBusname); sb.append('\n'); } if(senderBusname != null) { sb.append("Sender busname : "); sb.append(senderBusname); sb.append('\n'); } if(signature != null) { sb.append("Signature : "); sb.append(signature); sb.append('\n'); } if(unix_fds != -1) { sb.append("Unix FD : "); sb.append(unix_fds); sb.append('\n'); } if(arguments != null) { sb.append(arguments.toString()); sb.append('\n'); } return sb.toString(); } }
/* * Copyright (C) 2012, 2013 the diamond:dogs|group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package at.diamonddogs.builder; import java.net.URI; import java.net.URL; import android.net.Uri; import at.diamonddogs.data.dataobjects.Authentication; import at.diamonddogs.data.dataobjects.WebRequest; import at.diamonddogs.data.dataobjects.WebRequest.Type; import at.diamonddogs.service.processor.ServiceProcessor; /** * This builder enables programmers to create uniform {@link WebRequest}s based * on the provided configuration. The builder's purpose is not to micro-manage * {@link WebRequest} creation but to provide a configured {@link WebRequest} * template. */ public class WebRequestBuilder { /** * The configuration used for {@link WebRequest} construction */ private final WebRequestBuilderConfiguration configuration; /** * The {@link WebRequest} that is currently build */ private WebRequest webRequest; private String baseUrl; /** * This enum is an abstraction for read timeout durations. The exact * duration is controlled by the provided * {@link WebRequestBuilderConfiguration} */ // @formatter:off @SuppressWarnings("javadoc") public enum ReadTimeout { SHORT, MEDIUM, LONG, VERYLONG } // @formatter:on /** * This enum is an abstraction for connection timeout durations. The exact * duration is controlled by the provided * {@link WebRequestBuilderConfiguration} */ // @formatter:off @SuppressWarnings("javadoc") public enum ConnectionTimeout { SHORT, MEDIUM, LONG, VERYLONG } // @formatter:on /** * Constructor with configuration option * * @param configuration * the {@link WebRequestBuilderConfiguration} to use when * building {@link WebRequest}s */ public WebRequestBuilder(WebRequestBuilderConfiguration configuration) { this.configuration = configuration; } /** * Constructor with configuration option and baseUrl * * @param configuration * the {@link WebRequestBuilderConfiguration} to use when * building {@link WebRequest}s * @param baseUrl * the base url of the webservice */ public WebRequestBuilder(WebRequestBuilderConfiguration configuration, String baseUrl) { this.configuration = configuration; this.baseUrl = baseUrl; } /** * This constructor utilzes {@link WebRequestBuilderDefaultConfig} to * configure {@link WebRequest}s */ public WebRequestBuilder() { this.configuration = new WebRequestBuilderDefaultConfig(); } /** * This constructor utilzes {@link WebRequestBuilderDefaultConfig} to * configure {@link WebRequest}s * * @param baseUrl * the base url of the webserice */ public WebRequestBuilder(String baseUrl) { this.configuration = new WebRequestBuilderDefaultConfig(); this.baseUrl = baseUrl; } /** * Creates a new {@link WebRequest}, must be called before attempting to do * anything else. Resets any existing {@link WebRequest} instances. * * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder newWebRequest() { return newWebRequest(new WebRequest()); } /** * Configures a {@link WebRequest} */ public WebRequestBuilder newWebRequest(WebRequest wr) { if (webRequest != null) { throw new IllegalStateException("webRequest already initialized"); } webRequest = wr; if (baseUrl != null) { webRequest.setUrl(baseUrl); } webRequest.setFollowRedirects(configuration.isFollowRedirectEnabled()); webRequest.setUseOfflineCache(configuration.isOfflineCachingEnabled()); webRequest.setCheckConnectivity(configuration.isConnectivityCheckEnabled()); webRequest.setCheckConnectivityPing(configuration.isConnectivityPingEnabled()); webRequest.setRetryInterval(configuration.getRetryInterval()); webRequest.setNumberOfRetries(configuration.getRetryCount()); webRequest.setCacheTime(configuration.getDefaultCacheTime()); return this; } /** * Attaches basic auth data to a {@link WebRequest} * * @param user * the user * @param password * the password * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder attachBasicAuthData(String user, String password) { throwOnError(); webRequest.setAuthentication(new Authentication(user, password)); return this; } /** * Sets the target url of the {@link WebRequest} * * @param url * the url * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder setUrl(String url) { throwOnError(); webRequest.setUrl(url); return this; } /** * Sets the target url of the {@link WebRequest} * * @param url * the url * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder setUrl(URL url) { throwOnError(); webRequest.setUrl(url); return this; } /** * Sets the target url of the {@link WebRequest} * * @param url * the url * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder setUrl(URI url) { throwOnError(); webRequest.setUrl(url); return this; } /** * Sets the target url of the {@link WebRequest} * * @param url * the url * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder setUrl(Uri url) { throwOnError(); webRequest.setUrl(url); return this; } /** * Set the type of {@link WebRequest} * * @param type * the type * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder setType(Type type) { throwOnError(); webRequest.setRequestType(type); return this; } /** * Sets the abstracted read timeout * * @param timeout * the read timeout to be used in this {@link WebRequest} * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder setReadTimeout(ReadTimeout timeout) { throwOnError(); webRequest.setReadTimeout(getReadTimeout(timeout)); return this; } private int getReadTimeout(ReadTimeout timeout) { switch (timeout) { case SHORT: return configuration.getReadTimeoutShort(); case MEDIUM: return configuration.getReadTimeoutMedium(); case LONG: return configuration.getReadTimeoutLong(); case VERYLONG: return configuration.getReadTimeoutVeryLong(); default: return 1000; } } /** * Sets the abstracted connection timeout * * @param timeout * the connection timeout to be used in this {@link WebRequest} * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder setConnectionTimeout(ConnectionTimeout timeout) { throwOnError(); webRequest.setConnectionTimeout(getConnectionTimeout(timeout)); return this; } private int getConnectionTimeout(ConnectionTimeout timeout) { switch (timeout) { case SHORT: return configuration.getConnectionTimeoutShort(); case MEDIUM: return configuration.getConnectionTimeoutMedium(); case LONG: return configuration.getConnectionTimeoutLong(); case VERYLONG: return configuration.getConnectionTimeoutVeryLong(); default: return 1000; } } /** * Sets the ID of the {@link ServiceProcessor} that should handle the * {@link WebRequest} * * @param processorid * the processor's id * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder withProcessorId(int processorid) { throwOnError(); webRequest.setProcessorId(processorid); return this; } /** * Sets a custom cache time. This method allows overriding the default cache * time which is provided by the {@link WebRequestBuilderConfiguration} used * by this {@link WebRequestBuilder} * * @param time * the cachetime * @return the {@link WebRequestBuilder} instance (allows chaining) */ public WebRequestBuilder setCacheTime(long time) { throwOnError(); webRequest.setCacheTime(time); return this; } /** * Append path to the baseUrl of the specific request. Requires the user to * set a baseUrl using the correct constructor. * * @param path * the path to append * @return the {@link WebRequestBuilder} instance (allows chaining) * @throws NullPointerException * if no baseUrl has been set */ public WebRequestBuilder appendToUrl(String path) { if (baseUrl == null) { throw new NullPointerException("No baseUrl set!"); } String url = webRequest.getUrl().toString(); webRequest.setUrl(Uri.withAppendedPath(Uri.parse(url), path)); return this; } /** * Returns the {@link WebRequest} that has been constructed so far and * resets the {@link WebRequest} * * @return a {@link WebRequest} */ public WebRequest getWebRequest() { throwOnError(); WebRequest ret = webRequest; webRequest = null; return ret; } private void throwOnError() { if (configuration == null) { throw new IllegalStateException("configuration is null"); } if (webRequest == null) { throw new IllegalStateException("request is null"); } } }
package org.gluu.service.cache; import com.couchbase.client.java.cluster.User; import com.google.common.collect.Maps; import org.gluu.persist.annotation.AttributeName; import org.gluu.persist.annotation.DN; import org.gluu.persist.annotation.JsonObject; import javax.annotation.Nonnull; import javax.persistence.Transient; import java.io.Serializable; import java.util.Date; import java.util.Map; /** * @author Yuriy Zabrovarnyy */ public class SampleSessionId implements Serializable { private static final long serialVersionUID = -137476411915686378L; @DN private String dn; @AttributeName(name = "oxAuthSessionId") private String id; @AttributeName(name = "oxLastAccessTime") private Date lastUsedAt; @AttributeName(name = "oxAuthUserDN") private String userDn; @AttributeName(name = "authnTime") private Date authenticationTime; @AttributeName(name = "oxState") private SessionIdState state; @AttributeName(name = "oxAuthSessionState") private String sessionState; @AttributeName(name = "oxAuthPermissionGranted") private Boolean permissionGranted; @AttributeName(name = "oxAsJwt") private Boolean isJwt = false; @AttributeName(name = "oxJwt") private String jwt; @JsonObject @AttributeName(name = "oxAuthPermissionGrantedMap") private SessionIdAccessMap permissionGrantedMap; @JsonObject @AttributeName(name = "oxInvolvedClients") private SessionIdAccessMap involvedClients; @JsonObject @AttributeName(name = "oxAuthSessionAttribute") private Map<String, String> sessionAttributes; @Transient private transient boolean persisted; @Transient private User user; public SampleSessionId() { } public String getDn() { return dn; } public void setDn(String p_dn) { dn = p_dn; } public String getJwt() { return jwt; } public void setJwt(String jwt) { this.jwt = jwt; } public Boolean getIsJwt() { return isJwt; } public void setIsJwt(Boolean isJwt) { this.isJwt = isJwt; } public SessionIdAccessMap getInvolvedClients() { if (involvedClients == null) { involvedClients = new SessionIdAccessMap(); } return involvedClients; } public void setInvolvedClients(SessionIdAccessMap involvedClients) { this.involvedClients = involvedClients; } public SessionIdState getState() { return state; } public void setState(SessionIdState state) { this.state = state; } public String getSessionState() { return sessionState; } public void setSessionState(String sessionState) { this.sessionState = sessionState; } public String getOPBrowserState() { return sessionAttributes.get("opbs"); } public String getId() { return id; } public void setId(String p_id) { id = p_id; } public Date getLastUsedAt() { return lastUsedAt; } public void setLastUsedAt(Date p_lastUsedAt) { lastUsedAt = p_lastUsedAt; } public String getUserDn() { return userDn; } public void setUserDn(String p_userDn) { userDn = p_userDn != null ? p_userDn : ""; } public User getUser() { return user; } public void setUser(User user) { this.user = user; } public Date getAuthenticationTime() { return authenticationTime; } public void setAuthenticationTime(Date authenticationTime) { this.authenticationTime = authenticationTime; } public Boolean getPermissionGranted() { return permissionGranted; } public void setPermissionGranted(Boolean permissionGranted) { this.permissionGranted = permissionGranted; } public SessionIdAccessMap getPermissionGrantedMap() { return permissionGrantedMap; } public void setPermissionGrantedMap(SessionIdAccessMap permissionGrantedMap) { this.permissionGrantedMap = permissionGrantedMap; } public Boolean isPermissionGrantedForClient(String clientId) { return permissionGrantedMap != null && permissionGrantedMap.get(clientId); } public void addPermission(String clientId, Boolean granted) { if (permissionGrantedMap == null) { permissionGrantedMap = new SessionIdAccessMap(); } permissionGrantedMap.put(clientId, granted); } @Nonnull public Map<String, String> getSessionAttributes() { if (sessionAttributes == null) { sessionAttributes = Maps.newHashMap(); } return sessionAttributes; } public void setSessionAttributes(Map<String, String> sessionAttributes) { this.sessionAttributes = sessionAttributes; } public boolean isPersisted() { return persisted; } public void setPersisted(boolean persisted) { this.persisted = persisted; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SampleSessionId id1 = (SampleSessionId) o; return !(id != null ? !id.equals(id1.id) : id1.id != null); } @Override public int hashCode() { return id != null ? id.hashCode() : 0; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("SessionState {"); sb.append("dn='").append(dn).append('\''); sb.append(", id='").append(id).append('\''); sb.append(", lastUsedAt=").append(lastUsedAt); sb.append(", userDn='").append(userDn).append('\''); sb.append(", authenticationTime=").append(authenticationTime); sb.append(", state=").append(state); sb.append(", sessionState='").append(sessionState).append('\''); sb.append(", permissionGranted=").append(permissionGranted); sb.append(", isJwt=").append(isJwt); sb.append(", jwt=").append(jwt); sb.append(", permissionGrantedMap=").append(permissionGrantedMap); sb.append(", involvedClients=").append(involvedClients); sb.append(", sessionAttributes=").append(sessionAttributes); sb.append(", persisted=").append(persisted); sb.append("}"); return sb.toString(); } }
/* * Copyright (c) 2008-2013, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.examples.model; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; import com.hazelcast.nio.serialization.DataSerializable; import java.io.IOException; public class State implements DataSerializable { private int id; private String name; private String abbreviation; private String country; private String type; private int sort; private String status; private String occupied; private String notes; private int fipsState; private String assocPress; private String standardFederalRegion; private int censusRegion; private String censusRegionName; private int censusDivision; private String censusDevisionName; private int circuitCourt; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getAbbreviation() { return abbreviation; } public void setAbbreviation(String abbreviation) { this.abbreviation = abbreviation; } public String getCountry() { return country; } public void setCountry(String country) { this.country = country; } public String getType() { return type; } public void setType(String type) { this.type = type; } public int getSort() { return sort; } public void setSort(int sort) { this.sort = sort; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getOccupied() { return occupied; } public void setOccupied(String occupied) { this.occupied = occupied; } public String getNotes() { return notes; } public void setNotes(String notes) { this.notes = notes; } public int getFipsState() { return fipsState; } public void setFipsState(int fipsState) { this.fipsState = fipsState; } public String getAssocPress() { return assocPress; } public void setAssocPress(String assocPress) { this.assocPress = assocPress; } public String getStandardFederalRegion() { return standardFederalRegion; } public void setStandardFederalRegion(String standardFederalRegion) { this.standardFederalRegion = standardFederalRegion; } public int getCensusRegion() { return censusRegion; } public void setCensusRegion(int censusRegion) { this.censusRegion = censusRegion; } public String getCensusRegionName() { return censusRegionName; } public void setCensusRegionName(String censusRegionName) { this.censusRegionName = censusRegionName; } public int getCensusDivision() { return censusDivision; } public void setCensusDivision(int censusDivision) { this.censusDivision = censusDivision; } public String getCensusDevisionName() { return censusDevisionName; } public void setCensusDevisionName(String censusDevisionName) { this.censusDevisionName = censusDevisionName; } public int getCircuitCourt() { return circuitCourt; } public void setCircuitCourt(int circuitCourt) { this.circuitCourt = circuitCourt; } @Override public void writeData(ObjectDataOutput out) throws IOException { out.writeInt(id); out.writeUTF(name); out.writeUTF(abbreviation); out.writeUTF(country); out.writeUTF(type); out.writeInt(sort); out.writeUTF(status); out.writeUTF(occupied); out.writeUTF(notes); out.writeInt(fipsState); out.writeUTF(assocPress); out.writeUTF(standardFederalRegion); out.writeInt(censusRegion); out.writeUTF(censusRegionName); out.writeInt(censusDivision); out.writeUTF(censusDevisionName); out.writeInt(circuitCourt); } @Override public void readData(ObjectDataInput in) throws IOException { id = in.readInt(); name = in.readUTF(); abbreviation = in.readUTF(); country = in.readUTF(); type = in.readUTF(); sort = in.readInt(); status = in.readUTF(); occupied = in.readUTF(); notes = in.readUTF(); fipsState = in.readInt(); assocPress = in.readUTF(); standardFederalRegion = in.readUTF(); censusRegion = in.readInt(); censusRegionName = in.readUTF(); censusDivision = in.readInt(); censusDevisionName = in.readUTF(); circuitCourt = in.readInt(); } @Override public String toString() { return "State{" + "id=" + id + ", name='" + name + '\'' + ", abbreviation='" + abbreviation + '\'' + ", country='" + country + '\'' + ", type='" + type + '\'' + ", sort=" + sort + ", status='" + status + '\'' + ", occupied='" + occupied + '\'' + ", notes='" + notes + '\'' + ", fipsState=" + fipsState + ", assocPress='" + assocPress + '\'' + ", standardFederalRegion='" + standardFederalRegion + '\'' + ", censusRegion=" + censusRegion + ", censusRegionName='" + censusRegionName + '\'' + ", censusDivision=" + censusDivision + ", censusDevisionName='" + censusDevisionName + '\'' + ", circuitCourt=" + circuitCourt + '}'; } }
package com.weinyc.sa.core.reflect; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import javax.mail.internet.InternetAddress; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import com.weinyc.sa.core.annotation.ParseMethodType; import com.weinyc.sa.core.annotation.RequestParse; public class ReflectUtils { private static final Logger logger = Logger.getLogger(ReflectUtils.class.getName()) ; public static Class<?> classForName (String className) throws ClassNotFoundException { if(className == null){ return null; } className = className.trim(); try { return Class.forName(className); } catch (ClassNotFoundException cnfe) { try { Thread thread = Thread.currentThread(); ClassLoader threadClassLoader = thread.getContextClassLoader(); return Class.forName(className, false, threadClassLoader); } catch (ClassNotFoundException cnfe2) { throw cnfe2; } } } public static Map<String, Field> getDeclaredFields(Map<String, Field> fields, Class<?> type, boolean allSuper) { if (fields == null) { fields = new HashMap<>(); } for (Field field : type.getDeclaredFields()) { if (!fields.containsKey(field.getName())) { fields.put(field.getName(), field); } } if (allSuper && type.getSuperclass() != null) { fields = getDeclaredFields(fields, type.getSuperclass() , allSuper); } return fields; } public static List<Field> getDeclaredFields (List<Field> fields, Class<?> type, boolean allSuper) { if (fields == null) { fields = new ArrayList<>(); } fields.addAll(Arrays.asList(type.getDeclaredFields())); if ( allSuper && type.getSuperclass() != null) { fields = getDeclaredFields(fields, type.getSuperclass() , allSuper); } return fields; } public static final Method findGetter (Object instance, Field field, Map<String, PropertyDescriptor> name2Property, RequestParse annotation) { return findGetter(instance.getClass(), field, name2Property, annotation); } public static final Method findGetter (Class<?> instanceClass, Field field, Map<String, PropertyDescriptor> name2Property, RequestParse annotation) { String fieldName = field.getName(); if (annotation == null || StringUtils.isEmpty(annotation.getter())) { PropertyDescriptor pro = name2Property == null ? null : name2Property.get(fieldName); if (pro != null && pro.getReadMethod() != null) { return pro.getReadMethod(); } } else { Method getter = findMethod(instanceClass, annotation.getter(), new Class<?>[] {}); if (getter != null) { return getter; } } { Method getter = findMethod(instanceClass, "get" + fieldName.substring(0, 1).toUpperCase() + (fieldName.length() > 1 ? fieldName.substring(1) : "" ), new Class<?>[] {}); if (getter != null) { return getter; } } if ( (!Boolean.TYPE.equals(field.getType()) && !Boolean.class.equals(field.getClass()) )) { } else { Method getter = findMethod(instanceClass, "is" + fieldName.substring(0, 1).toUpperCase() + (fieldName.length() > 1 ? fieldName.substring(1) : "" ), new Class<?>[] {}); if (getter != null) { return getter; } } if ( (Boolean.TYPE.equals(field.getType()) || Boolean.class.equals(field.getClass()) ) && fieldName.startsWith("is") && fieldName.length() > 3 && fieldName.charAt(3) >= 'A' && fieldName.charAt(3) <= 'Z') { Method getter = findMethod(instanceClass, fieldName, new Class<?>[] {}); if (getter != null) { return getter; } } return null; } protected static boolean _SHOW_EXCEPTION = true; public static final Method findMethod(Class<?> instanceClazz, String name, Class<?>[] clazz) { try { Method setter = instanceClazz.getMethod(name, clazz); return setter; } catch (SecurityException e) { logger.log(Level.WARNING, null, e); } catch (NoSuchMethodException e) { if (_SHOW_EXCEPTION) { logger.log(Level.WARNING, "{0}", e); System.err.println("BaseServicerParaBean.__findMethod__"); System.err.println("BaseServicerParaBean.__findMethod__ (" + name +")" + e); } } return null; } public static final Method findMethod (Object instance, String name, Class<?>[] clazz) { return findMethod(instance.getClass(), name, clazz); } public static final Map<String, PropertyDescriptor> findFieldName2PropertyDescriptor (Class<?> ownerClass){ Map<String, PropertyDescriptor> fieldName2PropertyDescriptor = new HashMap<>(); try { BeanInfo info = Introspector.getBeanInfo(ownerClass, Object.class); PropertyDescriptor[] props = info.getPropertyDescriptors(); for (PropertyDescriptor prop : props) { fieldName2PropertyDescriptor.put(prop.getName(), prop); } } catch (IntrospectionException e) { logger.log(Level.WARNING, "{0}", e); } return fieldName2PropertyDescriptor; } public static final Method findSetter (Object instance, Field field, Map<String, PropertyDescriptor> name2Property, RequestParse annotation) { return findSetter(instance.getClass(), field, name2Property, annotation); } public static final Method findSetter (Class<?> instanceClass, Field field, Map<String, PropertyDescriptor> name2Property, RequestParse annotation) { String fieldName = field.getName(); if (annotation == null || StringUtils.isEmpty(annotation.setter())) { PropertyDescriptor pro = name2Property == null ? null : name2Property.get(fieldName); if (pro != null && pro.getWriteMethod() != null) { return pro.getWriteMethod(); } } else { Method setter = findMethod(instanceClass, annotation.setter(), new Class[] { field.getType() }); if (setter != null) { return setter; } setter = findMethod(instanceClass, annotation.setter(), new Class[] { String.class }); if (setter != null) { return setter; } } if (annotation != null && !StringUtils.isEmpty(annotation.parser())) { } else { String setname = "set" + fieldName.substring(0, 1).toUpperCase() + (fieldName.length() > 1 ? fieldName.substring(1) : "" ); { Method setter = findMethod(instanceClass, setname, new Class[] { field.getType() }); if (setter != null) { return setter; } } { Method setter = findMethod(instanceClass, setname, new Class[] { String.class }); if (setter != null) { return setter; } } } return null; } public static final Method findParser (Object instance, Field field, RequestParse annotation) { if (annotation != null && StringUtils.isNotEmpty(annotation.parser())) { try { Method parser = findMethod(instance, annotation.parser(), annotation.parseType() == ParseMethodType.ParseRequestWithAction ? new Class[] { HttpServletRequest.class, String.class } : new Class[] { HttpServletRequest.class }); if (parser != null) { return parser; } } catch (SecurityException e) { logger.log(Level.WARNING, null, e); } } return null; } public final static Object value(Object instance, Field field, Method setterMethod){ Object value = null; if(field != null){ try{ field.setAccessible(true); value = field.get(instance); }catch(Exception e){ logger.log(Level.WARNING, null, e); } } return value; } public static void updateFieldValue (Object instance, Field field, Method setterMethod, Object value) throws IllegalArgumentException, IllegalAccessException, InvocationTargetException { try { if (setterMethod != null) { setterMethod.invoke(instance, value); } else { field.setAccessible(true); field.set(instance, value); } } catch (IllegalAccessException | IllegalArgumentException | SecurityException | InvocationTargetException e) { e.printStackTrace(); } } public static void updateFieldValue (Object instance, Field field, Method setterMethod, boolean value) throws IllegalArgumentException, IllegalAccessException, InvocationTargetException { if (setterMethod != null) { setterMethod.invoke(instance, value); } else { field.setAccessible(true); field.setBoolean(instance, value); } } public static Class<?> findPropertyType (Field field, Method setterMethod) { if (setterMethod != null) { Class<?>[] parameterTypes = setterMethod.getParameterTypes(); if (parameterTypes == null || parameterTypes.length != 1) {} else { return parameterTypes[0]; } } if (field != null) { return field.getType(); } return null; } public static boolean isPrimitive (Class<?> clazz) { return clazz.isPrimitive() || clazz.equals(String.class) || clazz.equals(Boolean.class) || clazz.equals(Byte.class) || clazz.equals(Character.class) || clazz.equals(Double.class) || clazz.equals(Float.class) || clazz.equals(Integer.class) || clazz.equals(Long.class) || clazz.equals(Short.class) || clazz.equals(Date.class) || clazz.equals(InternetAddress.class); } public static Field getField (Class<?> ownerClass, String fieldName) { Field field = null; try { field = ownerClass.getDeclaredField(fieldName); return field; } catch (NoSuchFieldException | SecurityException e) { logger.log(Level.WARNING, null, e); } if (ownerClass.getSuperclass() != null) { return getField(ownerClass.getSuperclass(), fieldName); } else { try { throw new NoSuchFieldException(fieldName); } catch (NoSuchFieldException e) { logger.log(Level.WARNING, null, e); } return field; } } }
// ********************************************************************** // // <copyright> // // BBN Technologies // 10 Moulton Street // Cambridge, MA 02138 // (617) 873-8000 // // Copyright (C) BBNT Solutions LLC. All rights reserved. // // </copyright> // ********************************************************************** // // $Source: /cvs/distapps/openmap/src/openmap/com/bbn/openmap/gui/dock/DockLayout.java,v $ // $RCSfile: DockLayout.java,v $ // $Revision: 1.5 $ // $Date: 2005/08/09 17:50:51 $ // $Author: dietrick $ // // ********************************************************************** package com.bbn.openmap.gui.dock; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.Insets; import java.awt.LayoutManager2; import java.awt.Rectangle; import java.util.Iterator; import javax.swing.JComponent; import com.bbn.openmap.util.Debug; /** * A Layout manager used for the docking component... * * @author Ben Lubin * @version $Revision: 1.5 $ on $Date: 2005/08/09 17:50:51 $ * @since 12/5/02 */ public class DockLayout implements LayoutManager2 { private BasicDockPanel p; /** Size of the background control */ protected Rectangle occludingBounds; /** Size of the background control */ protected Rectangle backgroundBounds; public DockLayout(BasicDockPanel p) { this.p = p; } //Layout Query Functions: ///////////////////////// /** Account for the extra size of the cardinal DockWrapper. */ protected int getHeightAtLeftCardinal(DockWrapper dw) { return getHeightAtLeft(dw) - p.getOverlapTolerance(); } /** Account for the extra size of the cardinal DockWrapper. */ protected int getHeightAtRightCardinal(DockWrapper dw) { return getHeightAtRight(dw) - p.getOverlapTolerance(); } /** Account for the extra size of the cardinal DockWrapper. */ protected int getWidthAtYCardinal(DockWrapper dw, int y) { return getWidthAtY(dw, y) - p.getOverlapTolerance(); } /** * Get the height at the left point for a north or south * DockWrapper. */ protected int getHeightAtLeft(DockWrapper dw) { int ret = dw.getPreferredSize().height; java.util.List l = dw.getDockedWrappers(); if (!l.isEmpty()) { ret += getHeightAtLeft((DockWrapper) l.get(0)); } return ret; } /** * Get the height at the right point for a north or south * DockWrapper. */ protected int getHeightAtRight(DockWrapper dw) { int ret = dw.getPreferredSize().height; java.util.List l = dw.getDockedWrappers(); if (!l.isEmpty()) { ret += getHeightAtRight((DockWrapper) l.get(l.size() - 1)); } return ret; } /** * Get the width at the given point for a east or west * DockWrapper. */ protected int getWidthAtY(DockWrapper dw, int y) { Rectangle bounds = dw.getBounds(); int ret = 0; if ((bounds.y <= y) && (bounds.y + bounds.height >= y)) { ret = bounds.width; } for (Iterator iter = dw.getDockedWrappers().iterator(); iter.hasNext();) { DockWrapper c = (DockWrapper) iter.next(); ret += getWidthAtY(c, y); } return ret; } //Layout Functions: /////////////////// /** * Layout the entire container. */ protected void layoutContainer() { Rectangle inBounds = p.getBounds(); Insets insets = p.getInsets(); inBounds.x += insets.left; inBounds.width -= insets.left; inBounds.width -= insets.right; inBounds.y += insets.top; inBounds.height -= insets.top; inBounds.height -= insets.bottom; backgroundBounds = (Rectangle) inBounds.clone(); occludingBounds = (Rectangle) inBounds.clone(); layoutCardinals(); layoutEast(p.getEast(), occludingBounds.x + occludingBounds.width, occludingBounds.y, occludingBounds.width, occludingBounds.height); layoutWest(p.getWest(), occludingBounds.x, occludingBounds.y, occludingBounds.width, occludingBounds.height); int southLeft = inBounds.x + getWidthAtYCardinal(p.getWest(), inBounds.y + inBounds.height - getHeightAtLeftCardinal(p.getSouth())); int southRight = inBounds.x + inBounds.width - getWidthAtYCardinal(p.getEast(), inBounds.y + inBounds.height - getHeightAtRightCardinal(p.getSouth())); layoutSouth(p.getSouth(), southLeft, occludingBounds.y + occludingBounds.height, southRight - southLeft, occludingBounds.height); int northLeft = inBounds.x + getWidthAtYCardinal(p.getWest(), inBounds.y + getHeightAtLeftCardinal(p.getNorth())); int northRight = inBounds.x + inBounds.width - getWidthAtYCardinal(p.getEast(), inBounds.y + getHeightAtRightCardinal(p.getNorth())); layoutNorth(p.getNorth(), northLeft, occludingBounds.y, northRight - northLeft, occludingBounds.height); layoutBackground(); } protected void layoutCardinals() { p.getNorth().setBounds(0, 0, p.getWidth(), p.getOverlapTolerance()); p.getSouth().setBounds(0, p.getHeight() - p.getOverlapTolerance(), p.getWidth(), p.getOverlapTolerance()); p.getEast().setBounds(p.getWidth() - p.getOverlapTolerance(), 0, p.getOverlapTolerance(), p.getWidth()); p.getWest().setBounds(0, 0, p.getOverlapTolerance(), p.getWidth()); } /** * Layout west side. * * @param x right most edge. * @param y top most edge. */ protected void layoutEast(DockWrapper base, int x, int y, int maxwidth, int maxheight) { for (Iterator iter = base.getDockedWrappers().iterator(); iter.hasNext();) { DockWrapper dw = (DockWrapper) iter.next(); Dimension d = dw.getPreferredSize(); int w = min(d.width, maxwidth); int h = min(d.height, maxheight); dw.setBounds(x - w, y, w, h); if (!dw.canOcclude()) { updateRight(backgroundBounds, x - w); } updateRight(occludingBounds, x - w); layoutEast(dw, x - w, y, maxwidth - w, maxheight); y += h; maxheight -= h; } } /** * Layout west side. * * @param x left most edge. * @param y top most edge. */ protected void layoutWest(DockWrapper base, int x, int y, int maxwidth, int maxheight) { for (Iterator iter = base.getDockedWrappers().iterator(); iter.hasNext();) { DockWrapper dw = (DockWrapper) iter.next(); Dimension d = dw.getPreferredSize(); int w = min(d.width, maxwidth); int h = min(d.height, maxheight); dw.setBounds(x, y, w, h); if (!dw.canOcclude()) { updateLeft(backgroundBounds, x + w); } updateLeft(occludingBounds, x + w); layoutWest(dw, x + w, y, maxwidth - w, maxheight); y += h; maxheight -= h; } } /** * Layout north side. * * @param x left most edge. * @param y top most edge. */ protected void layoutNorth(DockWrapper base, int x, int y, int maxwidth, int maxheight) { for (Iterator iter = base.getDockedWrappers().iterator(); iter.hasNext();) { DockWrapper dw = (DockWrapper) iter.next(); Dimension d = dw.getPreferredSize(); int w = min(d.width, maxwidth); int h = min(d.height, maxheight); dw.setBounds(x, y, w, h); if (!dw.canOcclude()) { updateTop(backgroundBounds, y + h); } updateTop(occludingBounds, y + h); layoutNorth(dw, x, y + h, maxwidth, maxheight - h); x += w; maxwidth -= w; } } /** * Layout north side. * * @param x left most edge. * @param y bottom most edge. */ protected void layoutSouth(DockWrapper base, int x, int y, int maxwidth, int maxheight) { for (Iterator iter = base.getDockedWrappers().iterator(); iter.hasNext();) { DockWrapper dw = (DockWrapper) iter.next(); Dimension d = dw.getPreferredSize(); int w = min(d.width, maxwidth); int h = min(d.height, maxheight); dw.setBounds(x, y - h, w, h); // System.out.println("RES: "+ dw.getBounds()); if (!dw.canOcclude()) { updateBottom(backgroundBounds, y - h); } updateBottom(occludingBounds, y - h); layoutSouth(dw, x, y - h, maxwidth, maxheight - h); x += w; maxwidth -= w; } } protected void layoutBackground() { JComponent background = p.getBackgroundComponent(); if (background != null) { /* * backgroundBounds.x += 10; backgroundBounds.y += 10; * backgroundBounds.width -= 20; backgroundBounds.height -= * 20; */ background.setBounds(backgroundBounds); } } // Utilities: ///////////// protected int min(int a, int b) { return Math.min(a, b); } protected int max(int a, int b) { return Math.max(a, b); } protected void updateLeft(Rectangle r, int left) { int tmp = left - r.x; if (tmp > 0) { r.x += tmp; r.width -= tmp; } } protected void updateRight(Rectangle r, int right) { r.width = min(r.x + r.width, right); } protected void updateTop(Rectangle r, int top) { int tmp = top - r.y; if (tmp > 0) { r.y += tmp; r.height -= tmp; } } protected void updateBottom(Rectangle r, int bottom) { r.height = min(r.y + r.height, bottom); } // From LayoutManager2: /////////////////////// public void layoutContainer(Container parent) { if (parent != p) { Debug.error("DockLayout: Asked to layout unexpected container"); return; } layoutContainer(); } public void addLayoutComponent(String name, Component comp) {} public void addLayoutComponent(Component comp, Object constraints) {} public void removeLayoutComponent(Component comp) {} public Dimension preferredLayoutSize(Container parent) { JComponent background = p.getBackgroundComponent(); if (background != null) { return background.getPreferredSize(); } return new Dimension(0, 0); } public Dimension minimumLayoutSize(Container parent) { JComponent background = p.getBackgroundComponent(); if (background != null) { return background.getMinimumSize(); } return new Dimension(0, 0); } public Dimension maximumLayoutSize(Container parent) { JComponent background = p.getBackgroundComponent(); if (background != null) { return background.getMaximumSize(); } return new Dimension(0, 0); } public float getLayoutAlignmentX(Container target) { return .5f; } public float getLayoutAlignmentY(Container target) { return .5f; } public void invalidateLayout(Container target) {} }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.cache.hibernate; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.OneToMany; import javax.persistence.OneToOne; import javax.persistence.PersistenceException; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.cache.IgniteCacheProxy; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.hibernate.ObjectNotFoundException; import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.Transaction; import org.hibernate.annotations.NaturalId; import org.hibernate.annotations.NaturalIdCache; import org.hibernate.boot.Metadata; import org.hibernate.boot.MetadataSources; import org.hibernate.boot.registry.StandardServiceRegistry; import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.cache.spi.GeneralDataRegion; import org.hibernate.cache.spi.TransactionalDataRegion; import org.hibernate.cache.spi.access.AccessType; import org.hibernate.cfg.Environment; import org.hibernate.exception.ConstraintViolationException; import org.hibernate.mapping.PersistentClass; import org.hibernate.mapping.RootClass; import org.hibernate.stat.NaturalIdCacheStatistics; import org.hibernate.stat.SecondLevelCacheStatistics; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; import static org.apache.ignite.cache.hibernate.HibernateRegionFactory.DFLT_ACCESS_TYPE_PROPERTY; import static org.apache.ignite.cache.hibernate.HibernateRegionFactory.IGNITE_INSTANCE_NAME_PROPERTY; import static org.apache.ignite.cache.hibernate.HibernateRegionFactory.REGION_CACHE_PROPERTY; import static org.hibernate.cfg.Environment.CACHE_REGION_FACTORY; import static org.hibernate.cfg.Environment.GENERATE_STATISTICS; import static org.hibernate.cfg.Environment.HBM2DDL_AUTO; import static org.hibernate.cfg.Environment.RELEASE_CONNECTIONS; import static org.hibernate.cfg.Environment.USE_QUERY_CACHE; import static org.hibernate.cfg.Environment.USE_SECOND_LEVEL_CACHE; /** * * Tests Hibernate L2 cache. */ public class HibernateL2CacheSelfTest extends GridCommonAbstractTest { /** */ private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** */ public static final String CONNECTION_URL = "jdbc:h2:mem:example;DB_CLOSE_DELAY=-1"; /** */ public static final String ENTITY_NAME = Entity.class.getName(); /** */ public static final String ENTITY2_NAME = Entity2.class.getName(); /** */ public static final String VERSIONED_ENTITY_NAME = VersionedEntity.class.getName(); /** */ public static final String CHILD_ENTITY_NAME = ChildEntity.class.getName(); /** */ public static final String PARENT_ENTITY_NAME = ParentEntity.class.getName(); /** */ public static final String CHILD_COLLECTION_REGION = ENTITY_NAME + ".children"; /** */ public static final String NATURAL_ID_REGION = "org.apache.ignite.cache.hibernate.HibernateL2CacheSelfTest$Entity##NaturalId"; /** */ public static final String NATURAL_ID_REGION2 = "org.apache.ignite.cache.hibernate.HibernateL2CacheSelfTest$Entity2##NaturalId"; /** */ private SessionFactory sesFactory1; /** */ private SessionFactory sesFactory2; /** * First Hibernate test entity. */ @javax.persistence.Entity @NaturalIdCache @SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"}) public static class Entity { /** */ private int id; /** */ private String name; /** */ private Collection<ChildEntity> children; /** * Default constructor required by Hibernate. */ public Entity() { // No-op. } /** * @param id ID. * @param name Name. */ public Entity(int id, String name) { this.id = id; this.name = name; } /** * @return ID. */ @Id public int getId() { return id; } /** * @param id ID. */ public void setId(int id) { this.id = id; } /** * @return Name. */ @NaturalId(mutable = true) public String getName() { return name; } /** * @param name Name. */ public void setName(String name) { this.name = name; } /** * @return Children. */ @OneToMany(cascade=javax.persistence.CascadeType.ALL, fetch=FetchType.LAZY) @JoinColumn(name="ENTITY_ID") public Collection<ChildEntity> getChildren() { return children; } /** * @param children Children. */ public void setChildren(Collection<ChildEntity> children) { this.children = children; } } /** * Second Hibernate test entity. */ @javax.persistence.Entity @NaturalIdCache @SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"}) public static class Entity2 { /** */ private int id; /** */ private String name; /** */ private Collection<ChildEntity> children; /** * Default constructor required by Hibernate. */ public Entity2() { // No-op. } /** * @param id ID. * @param name Name. */ public Entity2(int id, String name) { this.id = id; this.name = name; } /** * @return ID. */ @Id public int getId() { return id; } /** * @param id ID. */ public void setId(int id) { this.id = id; } /** * @return Name. */ @NaturalId(mutable = true) public String getName() { return name; } /** * @param name Name. */ public void setName(String name) { this.name = name; } } /** * Hibernate child entity referenced by {@link Entity}. */ @javax.persistence.Entity @SuppressWarnings("PublicInnerClass") public static class ChildEntity { /** */ private int id; /** * Default constructor required by Hibernate. */ public ChildEntity() { // No-op. } /** * @param id ID. */ public ChildEntity(int id) { this.id = id; } /** * @return ID. */ @Id @GeneratedValue public int getId() { return id; } /** * @param id ID. */ public void setId(int id) { this.id = id; } } /** * Hibernate entity referencing {@link Entity}. */ @javax.persistence.Entity @SuppressWarnings("PublicInnerClass") public static class ParentEntity { /** */ private int id; /** */ private Entity entity; /** * Default constructor required by Hibernate. */ public ParentEntity() { // No-op. } /** * @param id ID. * @param entity Referenced entity. */ public ParentEntity(int id, Entity entity) { this.id = id; this.entity = entity; } /** * @return ID. */ @Id public int getId() { return id; } /** * @param id ID. */ public void setId(int id) { this.id = id; } /** * @return Referenced entity. */ @OneToOne public Entity getEntity() { return entity; } /** * @param entity Referenced entity. */ public void setEntity(Entity entity) { this.entity = entity; } } /** * Hibernate entity. */ @javax.persistence.Entity @SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"}) public static class VersionedEntity { /** */ private int id; /** */ private long ver; /** * Default constructor required by Hibernate. */ public VersionedEntity() { } /** * @param id ID. */ public VersionedEntity(int id) { this.id = id; } /** * @return ID. */ @Id public int getId() { return id; } /** * @param id ID. */ public void setId(int id) { this.id = id; } /** * @return Version. */ @javax.persistence.Version public long getVersion() { return ver; } /** * @param ver Version. */ public void setVersion(long ver) { this.ver = ver; } } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); TcpDiscoverySpi discoSpi = new TcpDiscoverySpi(); discoSpi.setIpFinder(IP_FINDER); cfg.setDiscoverySpi(discoSpi); cfg.setCacheConfiguration(generalRegionConfiguration("org.hibernate.cache.spi.UpdateTimestampsCache"), generalRegionConfiguration("org.hibernate.cache.internal.StandardQueryCache"), transactionalRegionConfiguration(ENTITY_NAME), transactionalRegionConfiguration(ENTITY2_NAME), transactionalRegionConfiguration(VERSIONED_ENTITY_NAME), transactionalRegionConfiguration(PARENT_ENTITY_NAME), transactionalRegionConfiguration(CHILD_ENTITY_NAME), transactionalRegionConfiguration(CHILD_COLLECTION_REGION), transactionalRegionConfiguration(NATURAL_ID_REGION), transactionalRegionConfiguration(NATURAL_ID_REGION2)); return cfg; } /** * @param regionName Region name. * @return Cache configuration for {@link GeneralDataRegion}. */ private CacheConfiguration generalRegionConfiguration(String regionName) { CacheConfiguration cfg = new CacheConfiguration(); cfg.setName(regionName); cfg.setCacheMode(PARTITIONED); cfg.setAtomicityMode(ATOMIC); cfg.setWriteSynchronizationMode(FULL_SYNC); cfg.setBackups(1); cfg.setAffinity(new RendezvousAffinityFunction(false, 10)); return cfg; } /** * @param regionName Region name. * @return Cache configuration for {@link TransactionalDataRegion}. */ protected CacheConfiguration transactionalRegionConfiguration(String regionName) { CacheConfiguration cfg = new CacheConfiguration(); cfg.setName(regionName); cfg.setCacheMode(PARTITIONED); cfg.setAtomicityMode(TRANSACTIONAL); cfg.setWriteSynchronizationMode(FULL_SYNC); cfg.setBackups(1); cfg.setAffinity(new RendezvousAffinityFunction(false, 10)); return cfg; } /** * @return Hibernate registry builder. */ protected StandardServiceRegistryBuilder registryBuilder() { StandardServiceRegistryBuilder builder = new StandardServiceRegistryBuilder(); builder.applySetting("hibernate.connection.url", CONNECTION_URL); return builder; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { startGrids(2); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { cleanup(); } /** * @return Hibernate L2 cache access types to test. */ protected AccessType[] accessTypes() { return new AccessType[]{AccessType.READ_ONLY, AccessType.NONSTRICT_READ_WRITE, AccessType.READ_WRITE}; } /** * @throws Exception If failed. */ public void testCollectionCache() throws Exception { for (AccessType accessType : accessTypes()) testCollectionCache(accessType); } /** * @param accessType Cache access type. * @throws Exception If failed. */ @SuppressWarnings("unchecked") private void testCollectionCache(AccessType accessType) throws Exception { createSessionFactories(accessType); Map<Integer, Integer> idToChildCnt = new HashMap<>(); try { Session ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); for (int i = 0; i < 3; i++) { Entity e = new Entity(i, "name-" + i); Collection<ChildEntity> children = new ArrayList<>(); for (int j = 0; j < 3; j++) children.add(new ChildEntity()); e.setChildren(children); idToChildCnt.put(e.getId(), e.getChildren().size()); ses.save(e); } tx.commit(); } finally { ses.close(); } // Load children, this should populate cache. ses = sesFactory1.openSession(); try { List<Entity> list = ses.createCriteria(ENTITY_NAME).list(); assertEquals(idToChildCnt.size(), list.size()); for (Entity e : list) assertEquals((int)idToChildCnt.get(e.getId()), e.getChildren().size()); } finally { ses.close(); } assertCollectionCache(sesFactory2, idToChildCnt, 3, 0); assertCollectionCache(sesFactory1, idToChildCnt, 3, 0); if (accessType == AccessType.READ_ONLY) return; // Update children for one entity. ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); Entity e1 = (Entity)ses.load(Entity.class, 1); e1.getChildren().remove(e1.getChildren().iterator().next()); ses.update(e1); idToChildCnt.put(e1.getId(), e1.getChildren().size()); tx.commit(); } finally { ses.close(); } assertCollectionCache(sesFactory2, idToChildCnt, 2, 1); // After update collection cache entry is removed. assertCollectionCache(sesFactory1, idToChildCnt, 3, 0); // 'assertCollectionCache' loads children in cache. // Update children for the same entity using another SessionFactory. ses = sesFactory2.openSession(); try { Transaction tx = ses.beginTransaction(); Entity e1 = (Entity)ses.load(Entity.class, 1); e1.getChildren().remove(e1.getChildren().iterator().next()); ses.update(e1); idToChildCnt.put(e1.getId(), e1.getChildren().size()); tx.commit(); } finally { ses.close(); } assertCollectionCache(sesFactory2, idToChildCnt, 2, 1); // After update collection cache entry is removed. assertCollectionCache(sesFactory1, idToChildCnt, 3, 0); // 'assertCollectionCache' loads children in cache. } finally { cleanup(); } } /** * @throws Exception If failed. */ public void testEntityCache() throws Exception { for (AccessType accessType : accessTypes()) testEntityCache(accessType); } /** * @param accessType Cache access type. * @throws Exception If failed. */ private void testEntityCache(AccessType accessType) throws Exception { createSessionFactories(accessType); Map<Integer, String> idToName = new HashMap<>(); try { Session ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); for (int i = 0; i < 2; i++) { String name = "name-" + i; ses.save(new Entity(i, name)); idToName.put(i, name); } tx.commit(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName, 100); assertEntityCache(ENTITY_NAME, sesFactory1, idToName, 100); if (accessType == AccessType.READ_ONLY) return; ses = sesFactory1.openSession(); try { // Updates and inserts in single transaction. Transaction tx = ses.beginTransaction(); Entity e0 = (Entity)ses.load(Entity.class, 0); e0.setName("name-0-changed1"); ses.update(e0); idToName.put(0, e0.getName()); ses.save(new Entity(2, "name-2")); idToName.put(2, "name-2"); Entity e1 = (Entity)ses.load(Entity.class, 1); e1.setName("name-1-changed1"); ses.update(e1); idToName.put(1, e1.getName()); ses.save(new Entity(3, "name-3")); idToName.put(3, "name-3"); tx.commit(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName); assertEntityCache(ENTITY_NAME, sesFactory1, idToName); ses = sesFactory1.openSession(); try { // Updates, inserts and deletes in single transaction. Transaction tx = ses.beginTransaction(); ses.save(new Entity(4, "name-4")); idToName.put(4, "name-4"); Entity e0 = (Entity)ses.load(Entity.class, 0); e0.setName("name-0-changed2"); ses.update(e0); idToName.put(e0.getId(), e0.getName()); ses.delete(ses.load(Entity.class, 1)); idToName.remove(1); Entity e2 = (Entity)ses.load(Entity.class, 2); e2.setName("name-2-changed1"); ses.update(e2); idToName.put(e2.getId(), e2.getName()); ses.delete(ses.load(Entity.class, 3)); idToName.remove(3); ses.save(new Entity(5, "name-5")); idToName.put(5, "name-5"); tx.commit(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName, 1, 3); assertEntityCache(ENTITY_NAME, sesFactory1, idToName, 1, 3); // Try to update the same entity using another SessionFactory. ses = sesFactory2.openSession(); try { Transaction tx = ses.beginTransaction(); Entity e0 = (Entity)ses.load(Entity.class, 0); e0.setName("name-0-changed3"); ses.update(e0); idToName.put(e0.getId(), e0.getName()); tx.commit(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName); assertEntityCache(ENTITY_NAME, sesFactory1, idToName); } finally { cleanup(); } } /** * @throws Exception If failed. */ public void testTwoEntitiesSameCache() throws Exception { for (AccessType accessType : accessTypes()) testTwoEntitiesSameCache(accessType); } /** * @param accessType Cache access type. * @throws Exception If failed. */ private void testTwoEntitiesSameCache(AccessType accessType) throws Exception { createSessionFactories(accessType); try { Session ses = sesFactory1.openSession(); Map<Integer, String> idToName1 = new HashMap<>(); Map<Integer, String> idToName2 = new HashMap<>(); try { Transaction tx = ses.beginTransaction(); for (int i = 0; i < 2; i++) { String name = "name-" + i; ses.save(new Entity(i, name)); ses.save(new Entity2(i, name)); idToName1.put(i, name); idToName2.put(i, name); } tx.commit(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName1, 100); assertEntityCache(ENTITY_NAME, sesFactory1, idToName1, 100); assertEntityCache(ENTITY2_NAME, sesFactory2, idToName2, 100); assertEntityCache(ENTITY2_NAME, sesFactory1, idToName2, 100); if (accessType == AccessType.READ_ONLY) return; ses = sesFactory1.openSession(); try { // Updates both entities in single transaction. Transaction tx = ses.beginTransaction(); Entity e = (Entity)ses.load(Entity.class, 0); e.setName("name-0-changed1"); ses.update(e); Entity2 e2 = (Entity2)ses.load(Entity2.class, 0); e2.setName("name-e2-0-changed1"); ses.update(e2); idToName1.put(0, e.getName()); idToName2.put(0, e2.getName()); tx.commit(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName1, 100); assertEntityCache(ENTITY_NAME, sesFactory1, idToName1, 100); assertEntityCache(ENTITY2_NAME, sesFactory2, idToName2, 100); assertEntityCache(ENTITY2_NAME, sesFactory1, idToName2, 100); ses = sesFactory1.openSession(); try { // Remove entity1 and insert entity2 in single transaction. Transaction tx = ses.beginTransaction(); Entity e = (Entity)ses.load(Entity.class, 0); ses.delete(e); Entity2 e2 = new Entity2(2, "name-2"); ses.save(e2); idToName1.remove(0); idToName2.put(2, e2.getName()); tx.commit(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName1, 0, 100); assertEntityCache(ENTITY_NAME, sesFactory1, idToName1, 0, 100); assertEntityCache(ENTITY2_NAME, sesFactory2, idToName2, 100); assertEntityCache(ENTITY2_NAME, sesFactory1, idToName2, 100); ses = sesFactory1.openSession(); Transaction tx = ses.beginTransaction(); try { // Update, remove, insert in single transaction, transaction fails. Entity e = (Entity)ses.load(Entity.class, 1); e.setName("name-1-changed1"); ses.update(e); // Valid update. ses.save(new Entity(2, "name-2")); // Valid insert. ses.delete(ses.load(Entity2.class, 0)); // Valid delete. Entity2 e2 = (Entity2)ses.load(Entity2.class, 1); e2.setName("name-2"); // Invalid update, not-unique name. ses.update(e2); tx.commit(); fail("Commit must fail."); } catch (PersistenceException e) { log.info("Expected exception: " + e); tx.rollback(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName1, 0, 2, 100); assertEntityCache(ENTITY_NAME, sesFactory1, idToName1, 0, 2, 100); assertEntityCache(ENTITY2_NAME, sesFactory2, idToName2, 100); assertEntityCache(ENTITY2_NAME, sesFactory1, idToName2, 100); ses = sesFactory2.openSession(); try { // Update, remove, insert in single transaction. tx = ses.beginTransaction(); Entity e = (Entity)ses.load(Entity.class, 1); e.setName("name-1-changed1"); ses.update(e); idToName1.put(1, e.getName()); ses.save(new Entity(2, "name-2")); idToName1.put(2, "name-2"); ses.delete(ses.load(Entity2.class, 0)); idToName2.remove(0); Entity2 e2 = (Entity2)ses.load(Entity2.class, 1); e2.setName("name-e2-2-changed"); ses.update(e2); idToName2.put(1, e2.getName()); tx.commit(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName1, 0, 100); assertEntityCache(ENTITY_NAME, sesFactory1, idToName1, 0, 100); assertEntityCache(ENTITY2_NAME, sesFactory2, idToName2, 0, 100); assertEntityCache(ENTITY2_NAME, sesFactory1, idToName2, 0, 100); } finally { cleanup(); } } /** * @throws Exception If failed. */ public void testVersionedEntity() throws Exception { for (AccessType accessType : accessTypes()) testVersionedEntity(accessType); } /** * @param accessType Cache access type. * @throws Exception If failed. */ private void testVersionedEntity(AccessType accessType) throws Exception { createSessionFactories(accessType); try { Session ses = sesFactory1.openSession(); VersionedEntity e0 = new VersionedEntity(0); try { Transaction tx = ses.beginTransaction(); ses.save(e0); tx.commit(); } finally { ses.close(); } ses = sesFactory1.openSession(); long ver; try { ver = ((VersionedEntity)ses.load(VersionedEntity.class, 0)).getVersion(); } finally { ses.close(); } SecondLevelCacheStatistics stats1 = sesFactory1.getStatistics().getSecondLevelCacheStatistics(VERSIONED_ENTITY_NAME); SecondLevelCacheStatistics stats2 = sesFactory2.getStatistics().getSecondLevelCacheStatistics(VERSIONED_ENTITY_NAME); assertEquals(1, stats1.getElementCountInMemory()); assertEquals(1, stats2.getElementCountInMemory()); ses = sesFactory2.openSession(); try { assertEquals(ver, ((VersionedEntity)ses.load(VersionedEntity.class, 0)).getVersion()); } finally { ses.close(); } assertEquals(1, stats2.getElementCountInMemory()); assertEquals(1, stats2.getHitCount()); if (accessType == AccessType.READ_ONLY) return; e0.setVersion(ver - 1); ses = sesFactory1.openSession(); Transaction tx = ses.beginTransaction(); try { ses.update(e0); tx.commit(); fail("Commit must fail."); } catch (PersistenceException e) { log.info("Expected exception: " + e); } finally { tx.rollback(); ses.close(); } sesFactory1.getStatistics().clear(); stats1 = sesFactory1.getStatistics().getSecondLevelCacheStatistics(VERSIONED_ENTITY_NAME); ses = sesFactory1.openSession(); try { assertEquals(ver, ((VersionedEntity)ses.load(VersionedEntity.class, 0)).getVersion()); } finally { ses.close(); } assertEquals(1, stats1.getElementCountInMemory()); assertEquals(1, stats1.getHitCount()); assertEquals(1, stats2.getElementCountInMemory()); assertEquals(1, stats2.getHitCount()); } finally { cleanup(); } } /** * @throws Exception If failed. */ public void testNaturalIdCache() throws Exception { fail("https://issues.apache.org/jira/browse/IGNITE-1084"); for (AccessType accessType : accessTypes()) testNaturalIdCache(accessType); } /** * @param accessType Cache access type. * @throws Exception If failed. */ private void testNaturalIdCache(AccessType accessType) throws Exception { createSessionFactories(accessType); Map<String, Integer> nameToId = new HashMap<>(); try { Session ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); for (int i = 0; i < 3; i++) { String name = "name-" + i; ses.save(new Entity(i, name)); nameToId.put(name, i); } tx.commit(); } finally { ses.close(); } ses = sesFactory1.openSession(); try { for (Map.Entry<String, Integer> e : nameToId.entrySet()) ((Entity)ses.bySimpleNaturalId(Entity.class).load(e.getKey())).getId(); } finally { ses.close(); } assertNaturalIdCache(sesFactory2, nameToId, "name-100"); assertNaturalIdCache(sesFactory1, nameToId, "name-100"); if (accessType == AccessType.READ_ONLY) return; // Update naturalId. ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); Entity e1 = (Entity)ses.load(Entity.class, 1); nameToId.remove(e1.getName()); e1.setName("name-1-changed1"); nameToId.put(e1.getName(), e1.getId()); tx.commit(); } finally { ses.close(); } assertNaturalIdCache(sesFactory2, nameToId, "name-1"); assertNaturalIdCache(sesFactory1, nameToId, "name-1"); // Update entity using another SessionFactory. ses = sesFactory2.openSession(); try { Transaction tx = ses.beginTransaction(); Entity e1 = (Entity)ses.load(Entity.class, 1); nameToId.remove(e1.getName()); e1.setName("name-1-changed2"); nameToId.put(e1.getName(), e1.getId()); tx.commit(); } finally { ses.close(); } assertNaturalIdCache(sesFactory2, nameToId, "name-1-changed1"); assertNaturalIdCache(sesFactory1, nameToId, "name-1-changed1"); // Try invalid NaturalId update. ses = sesFactory1.openSession(); Transaction tx = ses.beginTransaction(); try { Entity e1 = (Entity)ses.load(Entity.class, 1); e1.setName("name-0"); // Invalid update (duplicated name). tx.commit(); fail("Commit must fail."); } catch (ConstraintViolationException e) { log.info("Expected exception: " + e); tx.rollback(); } finally { ses.close(); } assertNaturalIdCache(sesFactory2, nameToId); assertNaturalIdCache(sesFactory1, nameToId); // Delete entity. ses = sesFactory2.openSession(); try { tx = ses.beginTransaction(); Entity e2 = (Entity)ses.load(Entity.class, 2); ses.delete(e2); nameToId.remove(e2.getName()); tx.commit(); } finally { ses.close(); } assertNaturalIdCache(sesFactory2, nameToId, "name-2"); assertNaturalIdCache(sesFactory1, nameToId, "name-2"); } finally { cleanup(); } } /** * @throws Exception If failed. */ public void testEntityCacheTransactionFails() throws Exception { for (AccessType accessType : accessTypes()) testEntityCacheTransactionFails(accessType); } /** * @param accessType Cache access type. * @throws Exception If failed. */ private void testEntityCacheTransactionFails(AccessType accessType) throws Exception { createSessionFactories(accessType); Map<Integer, String> idToName = new HashMap<>(); try { Session ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); for (int i = 0; i < 3; i++) { String name = "name-" + i; ses.save(new Entity(i, name)); idToName.put(i, name); } tx.commit(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName, 100); assertEntityCache(ENTITY_NAME, sesFactory1, idToName, 100); ses = sesFactory1.openSession(); Transaction tx = ses.beginTransaction(); try { ses.save(new Entity(3, "name-3")); // Valid insert. ses.save(new Entity(0, "name-0")); // Invalid insert (duplicated ID). tx.commit(); fail("Commit must fail."); } catch (PersistenceException e) { log.info("Expected exception: " + e); tx.rollback(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName, 3); assertEntityCache(ENTITY_NAME, sesFactory1, idToName, 3); if (accessType == AccessType.READ_ONLY) return; ses = sesFactory1.openSession(); tx = ses.beginTransaction(); try { Entity e0 = (Entity)ses.load(Entity.class, 0); Entity e1 = (Entity)ses.load(Entity.class, 1); e0.setName("name-10"); // Valid update. e1.setName("name-2"); // Invalid update (violates unique constraint). ses.update(e0); ses.update(e1); tx.commit(); fail("Commit must fail."); } catch (PersistenceException e) { log.info("Expected exception: " + e); tx.rollback(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName); assertEntityCache(ENTITY_NAME, sesFactory1, idToName); ses = sesFactory1.openSession(); try { // Create parent entity referencing Entity with ID = 0. tx = ses.beginTransaction(); ses.save(new ParentEntity(0, (Entity) ses.load(Entity.class, 0))); tx.commit(); } finally { ses.close(); } ses = sesFactory1.openSession(); tx = ses.beginTransaction(); try { ses.save(new Entity(3, "name-3")); // Valid insert. Entity e1 = (Entity)ses.load(Entity.class, 1); e1.setName("name-10"); // Valid update. ses.delete(ses.load(Entity.class, 0)); // Invalid delete (there is a parent entity referencing it). tx.commit(); fail("Commit must fail."); } catch (PersistenceException e) { log.info("Expected exception: " + e); tx.rollback(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName, 3); assertEntityCache(ENTITY_NAME, sesFactory1, idToName, 3); ses = sesFactory1.openSession(); tx = ses.beginTransaction(); try { ses.delete(ses.load(Entity.class, 1)); // Valid delete. idToName.remove(1); ses.delete(ses.load(Entity.class, 0)); // Invalid delete (there is a parent entity referencing it). tx.commit(); fail("Commit must fail."); } catch (PersistenceException e) { log.info("Expected exception: " + e); tx.rollback(); } finally { ses.close(); } assertEntityCache(ENTITY_NAME, sesFactory2, idToName); assertEntityCache(ENTITY_NAME, sesFactory1, idToName); } finally { cleanup(); } } /** * @throws Exception If failed. */ public void testQueryCache() throws Exception { for (AccessType accessType : accessTypes()) testQueryCache(accessType); } /** * @param accessType Cache access type. * @throws Exception If failed. */ private void testQueryCache(AccessType accessType) throws Exception { createSessionFactories(accessType); try { Session ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); for (int i = 0; i < 5; i++) ses.save(new Entity(i, "name-" + i)); tx.commit(); } finally { ses.close(); } // Run some queries. ses = sesFactory1.openSession(); try { Query qry1 = ses.createQuery("from " + ENTITY_NAME + " where id > 2"); qry1.setCacheable(true); assertEquals(2, qry1.list().size()); Query qry2 = ses.createQuery("from " + ENTITY_NAME + " where name = 'name-0'"); qry2.setCacheable(true); assertEquals(1, qry2.list().size()); } finally { ses.close(); } assertEquals(0, sesFactory1.getStatistics().getQueryCacheHitCount()); assertEquals(2, sesFactory1.getStatistics().getQueryCacheMissCount()); assertEquals(2, sesFactory1.getStatistics().getQueryCachePutCount()); // Run queries using another SessionFactory. ses = sesFactory2.openSession(); try { Query qry1 = ses.createQuery("from " + ENTITY_NAME + " where id > 2"); qry1.setCacheable(true); assertEquals(2, qry1.list().size()); Query qry2 = ses.createQuery("from " + ENTITY_NAME + " where name = 'name-0'"); qry2.setCacheable(true); assertEquals(1, qry2.list().size()); Query qry3 = ses.createQuery("from " + ENTITY_NAME + " where id > 1"); qry3.setCacheable(true); assertEquals(3, qry3.list().size()); } finally { ses.close(); } assertEquals(2, sesFactory2.getStatistics().getQueryCacheHitCount()); assertEquals(1, sesFactory2.getStatistics().getQueryCacheMissCount()); assertEquals(1, sesFactory2.getStatistics().getQueryCachePutCount()); // Update entity, it should invalidate query cache. ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); ses.save(new Entity(5, "name-5")); tx.commit(); } finally { ses.close(); } // Run queries. ses = sesFactory1.openSession(); sesFactory1.getStatistics().clear(); try { Query qry1 = ses.createQuery("from " + ENTITY_NAME + " where id > 2"); qry1.setCacheable(true); assertEquals(3, qry1.list().size()); Query qry2 = ses.createQuery("from " + ENTITY_NAME + " where name = 'name-0'"); qry2.setCacheable(true); assertEquals(1, qry2.list().size()); } finally { ses.close(); } assertEquals(0, sesFactory1.getStatistics().getQueryCacheHitCount()); assertEquals(2, sesFactory1.getStatistics().getQueryCacheMissCount()); assertEquals(2, sesFactory1.getStatistics().getQueryCachePutCount()); // Clear query cache using another SessionFactory. sesFactory2.getCache().evictDefaultQueryRegion(); ses = sesFactory1.openSession(); // Run queries again. sesFactory1.getStatistics().clear(); try { Query qry1 = ses.createQuery("from " + ENTITY_NAME + " where id > 2"); qry1.setCacheable(true); assertEquals(3, qry1.list().size()); Query qry2 = ses.createQuery("from " + ENTITY_NAME + " where name = 'name-0'"); qry2.setCacheable(true); assertEquals(1, qry2.list().size()); } finally { ses.close(); } assertEquals(0, sesFactory1.getStatistics().getQueryCacheHitCount()); assertEquals(2, sesFactory1.getStatistics().getQueryCacheMissCount()); assertEquals(2, sesFactory1.getStatistics().getQueryCachePutCount()); } finally { cleanup(); } } /** * @throws Exception If failed. */ public void testRegionClear() throws Exception { for (AccessType accessType : accessTypes()) testRegionClear(accessType); } /** * @param accessType Cache access type. * @throws Exception If failed. */ private void testRegionClear(AccessType accessType) throws Exception { createSessionFactories(accessType); try { final int ENTITY_CNT = 100; Session ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); for (int i = 0; i < ENTITY_CNT; i++) { Entity e = new Entity(i, "name-" + i); Collection<ChildEntity> children = new ArrayList<>(); for (int j = 0; j < 3; j++) children.add(new ChildEntity()); e.setChildren(children); ses.save(e); } tx.commit(); } finally { ses.close(); } loadEntities(sesFactory2, ENTITY_CNT); SecondLevelCacheStatistics stats1 = sesFactory1.getStatistics().getSecondLevelCacheStatistics(ENTITY_NAME); SecondLevelCacheStatistics stats2 = sesFactory2.getStatistics().getSecondLevelCacheStatistics(ENTITY_NAME); NaturalIdCacheStatistics idStats1 = sesFactory1.getStatistics().getNaturalIdCacheStatistics(NATURAL_ID_REGION); NaturalIdCacheStatistics idStats2 = sesFactory2.getStatistics().getNaturalIdCacheStatistics(NATURAL_ID_REGION); SecondLevelCacheStatistics colStats1 = sesFactory1.getStatistics().getSecondLevelCacheStatistics(CHILD_COLLECTION_REGION); SecondLevelCacheStatistics colStats2 = sesFactory2.getStatistics().getSecondLevelCacheStatistics(CHILD_COLLECTION_REGION); assertEquals(ENTITY_CNT, stats1.getElementCountInMemory()); assertEquals(ENTITY_CNT, stats2.getElementCountInMemory()); assertEquals(ENTITY_CNT, idStats1.getElementCountInMemory()); assertEquals(ENTITY_CNT, idStats2.getElementCountInMemory()); assertEquals(ENTITY_CNT, colStats1.getElementCountInMemory()); assertEquals(ENTITY_CNT, colStats2.getElementCountInMemory()); // Test cache is cleared after update query. ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); ses.createQuery("delete from " + ENTITY_NAME + " where name='no such name'").executeUpdate(); ses.createQuery("delete from " + ChildEntity.class.getName() + " where id=-1").executeUpdate(); tx.commit(); } finally { ses.close(); } assertEquals(0, stats1.getElementCountInMemory()); assertEquals(0, stats2.getElementCountInMemory()); assertEquals(0, idStats1.getElementCountInMemory()); assertEquals(0, idStats2.getElementCountInMemory()); assertEquals(0, colStats1.getElementCountInMemory()); assertEquals(0, colStats2.getElementCountInMemory()); // Load some data in cache. loadEntities(sesFactory1, 10); assertEquals(10, stats1.getElementCountInMemory()); assertEquals(10, stats2.getElementCountInMemory()); assertEquals(10, idStats1.getElementCountInMemory()); assertEquals(10, idStats2.getElementCountInMemory()); // Test evictAll method. sesFactory2.getCache().evictEntityRegion(ENTITY_NAME); assertEquals(0, stats1.getElementCountInMemory()); assertEquals(0, stats2.getElementCountInMemory()); sesFactory2.getCache().evictNaturalIdRegion(ENTITY_NAME); assertEquals(0, idStats1.getElementCountInMemory()); assertEquals(0, idStats2.getElementCountInMemory()); sesFactory2.getCache().evictCollectionRegion(CHILD_COLLECTION_REGION); assertEquals(0, colStats1.getElementCountInMemory()); assertEquals(0, colStats2.getElementCountInMemory()); } finally { cleanup(); } } /** * @param sesFactory Session factory. * @param nameToId Name-ID mapping. * @param absentNames Absent entities' names. */ private void assertNaturalIdCache(SessionFactory sesFactory, Map<String, Integer> nameToId, String... absentNames) { sesFactory.getStatistics().clear(); NaturalIdCacheStatistics stats = sesFactory.getStatistics().getNaturalIdCacheStatistics(NATURAL_ID_REGION); long hitBefore = stats.getHitCount(); long missBefore = stats.getMissCount(); final Session ses = sesFactory.openSession(); try { for (Map.Entry<String, Integer> e : nameToId.entrySet()) assertEquals((int)e.getValue(), ((Entity)ses.bySimpleNaturalId(Entity.class).load(e.getKey())).getId()); for (String name : absentNames) assertNull((ses.bySimpleNaturalId(Entity.class).load(name))); assertEquals(nameToId.size() + hitBefore, stats.getHitCount()); assertEquals(absentNames.length + missBefore, stats.getMissCount()); } finally { ses.close(); } } /** * @param sesFactory Session factory. * @param idToChildCnt Number of children per entity. * @param expHit Expected cache hits. * @param expMiss Expected cache misses. */ @SuppressWarnings("unchecked") private void assertCollectionCache(SessionFactory sesFactory, Map<Integer, Integer> idToChildCnt, int expHit, int expMiss) { sesFactory.getStatistics().clear(); Session ses = sesFactory.openSession(); try { for(Map.Entry<Integer, Integer> e : idToChildCnt.entrySet()) { Entity entity = (Entity)ses.load(Entity.class, e.getKey()); assertEquals((int)e.getValue(), entity.getChildren().size()); } } finally { ses.close(); } SecondLevelCacheStatistics stats = sesFactory.getStatistics().getSecondLevelCacheStatistics(CHILD_COLLECTION_REGION); assertEquals(expHit, stats.getHitCount()); assertEquals(expMiss, stats.getMissCount()); } /** * @param sesFactory Session factory. * @param cnt Number of entities to load. */ private void loadEntities(SessionFactory sesFactory, int cnt) { Session ses = sesFactory.openSession(); try { for (int i = 0; i < cnt; i++) { Entity e = (Entity)ses.load(Entity.class, i); assertEquals("name-" + i, e.getName()); assertFalse(e.getChildren().isEmpty()); ses.bySimpleNaturalId(Entity.class).load(e.getName()); } } finally { ses.close(); } } /** * @param entityName Entity name. * @param sesFactory Session factory. * @param idToName ID to name mapping. * @param absentIds Absent entities' IDs. */ private void assertEntityCache(String entityName, SessionFactory sesFactory, Map<Integer, String> idToName, Integer... absentIds) { assert entityName.equals(ENTITY_NAME) || entityName.equals(ENTITY2_NAME) : entityName; sesFactory.getStatistics().clear(); final Session ses = sesFactory.openSession(); final boolean entity1 = entityName.equals(ENTITY_NAME); try { if (entity1) { for (Map.Entry<Integer, String> e : idToName.entrySet()) assertEquals(e.getValue(), ((Entity)ses.load(Entity.class, e.getKey())).getName()); } else { for (Map.Entry<Integer, String> e : idToName.entrySet()) assertEquals(e.getValue(), ((Entity2)ses.load(Entity2.class, e.getKey())).getName()); } for (final int id : absentIds) { GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { if (entity1) ((Entity)ses.load(Entity.class, id)).getName(); else ((Entity2)ses.load(Entity2.class, id)).getName(); return null; } }, ObjectNotFoundException.class, null); } SecondLevelCacheStatistics stats = sesFactory.getStatistics().getSecondLevelCacheStatistics(entityName); assertEquals(idToName.size(), stats.getHitCount()); assertEquals(absentIds.length, stats.getMissCount()); } finally { ses.close(); } } /** * Creates session factories. * * @param accessType Cache access type. */ private void createSessionFactories(AccessType accessType) { sesFactory1 = startHibernate(accessType, getTestIgniteInstanceName(0)); sesFactory2 = startHibernate(accessType, getTestIgniteInstanceName(1)); } /** * Starts Hibernate. * * @param accessType Cache access type. * @param igniteInstanceName Ignite instance name. * @return Session factory. */ private SessionFactory startHibernate(org.hibernate.cache.spi.access.AccessType accessType, String igniteInstanceName) { StandardServiceRegistryBuilder builder = registryBuilder(); builder.applySetting(HBM2DDL_AUTO, "create"); builder.applySetting(GENERATE_STATISTICS, "true"); builder.applySetting(USE_SECOND_LEVEL_CACHE, "true"); builder.applySetting(USE_QUERY_CACHE, "true"); builder.applySetting(CACHE_REGION_FACTORY, HibernateRegionFactory.class.getName()); builder.applySetting(RELEASE_CONNECTIONS, "on_close"); builder.applySetting(IGNITE_INSTANCE_NAME_PROPERTY, igniteInstanceName); // Use the same cache for Entity and Entity2. builder.applySetting(REGION_CACHE_PROPERTY + ENTITY2_NAME, ENTITY_NAME); builder.applySetting(DFLT_ACCESS_TYPE_PROPERTY, accessType.name()); builder.applySetting(Environment.DIALECT, "org.hibernate.dialect.H2Dialect"); builder.applySetting("hibernate.show_sql", false); StandardServiceRegistry srvcRegistry = builder.build(); MetadataSources metadataSources = new MetadataSources(srvcRegistry); for (Class entityClass : getAnnotatedClasses()) metadataSources.addAnnotatedClass(entityClass); Metadata metadata = metadataSources.buildMetadata(); for (PersistentClass entityBinding : metadata.getEntityBindings()) { if (!entityBinding.isInherited()) ((RootClass)entityBinding).setCacheConcurrencyStrategy(accessType.getExternalName()); } for (org.hibernate.mapping.Collection collectionBinding : metadata.getCollectionBindings()) collectionBinding.setCacheConcurrencyStrategy( accessType.getExternalName() ); return metadata.buildSessionFactory(); } /** * @return Entities classes. */ private Class[] getAnnotatedClasses() { return new Class[]{Entity.class, Entity2.class, VersionedEntity.class, ChildEntity.class, ParentEntity.class}; } /** * Closes session factories and clears data from caches. * * @throws Exception If failed. */ private void cleanup() throws Exception { if (sesFactory1 != null) sesFactory1.close(); sesFactory1 = null; if (sesFactory2 != null) sesFactory2.close(); sesFactory2 = null; for (IgniteCacheProxy<?, ?> cache : ((IgniteKernal)grid(0)).caches()) cache.clear(); } }
package stsc.general.statistic; import java.io.File; import java.io.IOException; import java.nio.file.FileSystems; import java.nio.file.Path; import java.util.Date; import org.joda.time.LocalDate; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import stsc.common.Day; import stsc.common.Settings; import stsc.common.Side; import stsc.common.stocks.Stock; import stsc.common.storage.StockStorage; import stsc.general.trading.BrokerImpl; import stsc.general.trading.TradingLog; import stsc.storage.mocks.StockStorageMock; public class StatisticsProcessorTest { private static final StockStorage stockStorage = StockStorageMock.getStockStorage(); @Rule public TemporaryFolder testFolder = new TemporaryFolder(); private Day gd(Stock s, int i) { return s.getDays().get(i); } private Date gdd(Stock s, int i) { return gd(s, i).getDate(); } @Test public void testStatistics() throws Exception { final Stock aapl = stockStorage.getStock("aapl").get(); final Stock adm = stockStorage.getStock("adm").get(); int aaplIndex = aapl.findDayIndex(new LocalDate(2013, 9, 4).toDate()); int admIndex = adm.findDayIndex(new LocalDate(2013, 9, 4).toDate()); final TradingLog tradingLog = new BrokerImpl(stockStorage).getTradingLog(); final StatisticsProcessor statistics = new StatisticsProcessor(tradingLog); statistics.setStockDay("aapl", gd(aapl, ++aaplIndex)); statistics.setStockDay("adm", gd(adm, ++admIndex)); final int aaplLongSize = 100; final int admShortSize = 200; tradingLog.addBuyRecord(gdd(aapl, aaplIndex), "aapl", Side.LONG, aaplLongSize); tradingLog.addBuyRecord(gdd(adm, admIndex), "adm", Side.SHORT, admShortSize); statistics.processEod(); statistics.setStockDay("aapl", gd(aapl, ++aaplIndex)); statistics.setStockDay("adm", gd(adm, ++admIndex)); final double aaplLongIn = gd(aapl, aaplIndex).getPrices().getOpen(); final double admShortIn = gd(adm, admIndex).getPrices().getOpen(); tradingLog.addSellRecord(gdd(aapl, aaplIndex), "aapl", Side.LONG, aaplLongSize); tradingLog.addSellRecord(gdd(adm, admIndex), "adm", Side.SHORT, admShortSize); statistics.processEod(); statistics.setStockDay("aapl", gd(aapl, ++aaplIndex)); statistics.setStockDay("adm", gd(adm, ++admIndex)); final double aaplLongOut = gd(aapl, aaplIndex).getPrices().getOpen(); final double admShortOut = gd(adm, admIndex).getPrices().getOpen(); statistics.processEod(); final Metrics metrics = statistics.calculate(); final double aaplPriceDiff = aaplLongSize * aaplLongOut * (1.0 - statistics.getCommision()) - aaplLongIn * aaplLongSize * (1.0 + statistics.getCommision()); final double admPriceDiff = -(admShortSize * admShortOut * (1.0 - statistics.getCommision()) - admShortSize * admShortIn * (1.0 + statistics.getCommision())); Assert.assertEquals(3, metrics.getIntegerMetric(MetricType.period).intValue()); Assert.assertEquals(Math.max(0.0, Math.max(aaplPriceDiff, admPriceDiff)), metrics.getDoubleMetric(MetricType.maxWin), Settings.doubleEpsilon); Assert.assertEquals(-Math.min(0.0, Math.min(admPriceDiff, aaplPriceDiff)), metrics.getDoubleMetric(MetricType.maxLoss), Settings.doubleEpsilon); Assert.assertEquals(aaplPriceDiff + admPriceDiff, metrics.getEquityCurveInMoney().getLastElement().value, Settings.doubleEpsilon); } @Test public void testReverseStatistics() throws Exception { final Stock aapl = stockStorage.getStock("aapl").get(); final Stock adm = stockStorage.getStock("adm").get(); int aaplIndex = aapl.findDayIndex(new LocalDate(2013, 9, 4).toDate()); int admIndex = adm.findDayIndex(new LocalDate(2013, 9, 4).toDate()); TradingLog tradingLog = new BrokerImpl(stockStorage).getTradingLog(); StatisticsProcessor statistics = new StatisticsProcessor(tradingLog); statistics.setStockDay("aapl", gd(aapl, ++aaplIndex)); statistics.setStockDay("adm", gd(adm, ++admIndex)); final int aaplLongSize = 100; final int admShortSize = 200; tradingLog.addBuyRecord(gdd(aapl, aaplIndex), "aapl", Side.SHORT, aaplLongSize); tradingLog.addBuyRecord(gdd(adm, admIndex), "adm", Side.LONG, admShortSize); statistics.processEod(); statistics.setStockDay("aapl", gd(aapl, ++aaplIndex)); statistics.setStockDay("adm", gd(adm, ++admIndex)); final double aaplLongIn = gd(aapl, aaplIndex).getPrices().getOpen(); final double admShortIn = gd(adm, admIndex).getPrices().getOpen(); tradingLog.addSellRecord(gdd(aapl, aaplIndex), "aapl", Side.SHORT, aaplLongSize); tradingLog.addSellRecord(gdd(adm, admIndex), "adm", Side.LONG, admShortSize); statistics.processEod(); statistics.setStockDay("aapl", gd(aapl, ++aaplIndex)); statistics.setStockDay("adm", gd(adm, ++admIndex)); final double aaplLongOut = gd(aapl, aaplIndex).getPrices().getOpen(); final double admShortOut = gd(adm, admIndex).getPrices().getOpen(); statistics.processEod(); Metrics metrics = statistics.calculate(); final double aaplPriceDiff = -(aaplLongSize * aaplLongOut * (1.0 - statistics.getCommision()) - aaplLongIn * aaplLongSize * (1.0 + statistics.getCommision())); final double admPriceDiff = admShortSize * admShortOut * (1.0 - statistics.getCommision()) - admShortSize * admShortIn * (1.0 + statistics.getCommision()); Assert.assertEquals(3.0, metrics.getMetric(MetricType.period), Settings.doubleEpsilon); Assert.assertEquals(Math.max(0.0, Math.max(aaplPriceDiff, admPriceDiff)), metrics.getDoubleMetric(MetricType.maxWin), Settings.doubleEpsilon); Assert.assertEquals(-Math.min(0.0, Math.min(admPriceDiff, aaplPriceDiff)), metrics.getDoubleMetric(MetricType.maxLoss), Settings.doubleEpsilon); Assert.assertEquals(aaplPriceDiff + admPriceDiff, metrics.getEquityCurveInMoney().getLastElement().value, Settings.doubleEpsilon); } @Test public void testProbabilityStatistics() throws IOException { final Stock aapl = stockStorage.getStock("aapl").get(); final Stock adm = stockStorage.getStock("adm").get(); final Stock spy = stockStorage.getStock("spy").get(); int aaplIndex = aapl.findDayIndex(new LocalDate(2013, 9, 4).toDate()); int admIndex = adm.findDayIndex(new LocalDate(2013, 9, 4).toDate()); int spyIndex = spy.findDayIndex(new LocalDate(2013, 9, 4).toDate()); final TradingLog tradingLog = new BrokerImpl(stockStorage).getTradingLog(); final StatisticsProcessor statistics = new StatisticsProcessor(tradingLog); statistics.setStockDay("aapl", gd(aapl, ++aaplIndex)); statistics.setStockDay("adm", gd(adm, ++admIndex)); statistics.setStockDay("spy", gd(spy, ++spyIndex)); tradingLog.addBuyRecord(gdd(aapl, aaplIndex), "aapl", Side.LONG, 100); tradingLog.addBuyRecord(gdd(adm, admIndex), "adm", Side.LONG, 200); tradingLog.addBuyRecord(gdd(spy, spyIndex), "spy", Side.SHORT, 30); statistics.processEod(); statistics.setStockDay("aapl", gd(aapl, ++aaplIndex)); statistics.setStockDay("adm", gd(adm, ++admIndex)); final double aaplLongIn1 = gd(aapl, aaplIndex).getPrices().getOpen(); final double admLongIn1 = gd(adm, admIndex).getPrices().getOpen(); final double spyShortIn = gd(spy, spyIndex).getPrices().getOpen(); spyIndex++; tradingLog.addBuyRecord(gdd(aapl, aaplIndex), "aapl", Side.LONG, 100); tradingLog.addBuyRecord(gdd(adm, admIndex), "adm", Side.LONG, 500); statistics.processEod(); statistics.setStockDay("aapl", gd(aapl, ++aaplIndex)); statistics.setStockDay("adm", gd(adm, ++admIndex)); statistics.setStockDay("spy", gd(spy, ++spyIndex)); statistics.processEod(); final double aaplLongIn2 = gd(aapl, aaplIndex).getPrices().getOpen(); final double admLongIn2 = gd(adm, admIndex).getPrices().getOpen(); tradingLog.addSellRecord(gdd(aapl, aaplIndex), "aapl", Side.LONG, 200); tradingLog.addSellRecord(gdd(adm, admIndex), "adm", Side.LONG, 700); tradingLog.addSellRecord(gdd(spy, spyIndex), "spy", Side.SHORT, 30); statistics.processEod(); final double aaplLongOut = gd(aapl, aaplIndex).getPrices().getOpen(); final double admLongOut = gd(adm, admIndex).getPrices().getOpen(); final double spyShortOut = gd(spy, spyIndex).getPrices().getOpen(); final Metrics metrics = statistics.calculate(); final double c = statistics.getCommision(); final double aaplDiff = aaplLongOut * 200 * (1 - c) - (aaplLongIn2 + aaplLongIn1) * 100 * (1 + c); final double admDiff = admLongOut * 700 * (1 - c) - admLongIn1 * 200 * (1 + c) - admLongIn2 * 500 * (1 + c); final double spyDiff = -spyShortOut * 30 * (1 - c) + spyShortIn * 30 * (1 + c); final double lastResult = aaplDiff + admDiff + spyDiff; final double expectedAvGain = 100.0 * lastResult / metrics.getMetric(MetricType.maxSpentMoney); Assert.assertEquals(4.0, metrics.getMetric(MetricType.period), Settings.doubleEpsilon); Assert.assertEquals(lastResult, metrics.getEquityCurveInMoney().getLastElement().value, Settings.doubleEpsilon); Assert.assertEquals(expectedAvGain, metrics.getMetric(MetricType.avGain), Settings.doubleEpsilon); Assert.assertEquals(0.75, metrics.getMetric(MetricType.freq), Settings.doubleEpsilon); Assert.assertEquals(0.333333, metrics.getMetric(MetricType.winProb), Settings.doubleEpsilon); Assert.assertEquals(62.450142, metrics.getMetric(MetricType.avWin), Settings.doubleEpsilon); Assert.assertEquals(67.199905, metrics.getMetric(MetricType.avLoss), Settings.doubleEpsilon); Assert.assertEquals(62.450142, aaplDiff, Settings.doubleEpsilon); Assert.assertEquals(-admDiff, metrics.getMetric(MetricType.maxLoss), Settings.doubleEpsilon); Assert.assertEquals(0.929318, metrics.getMetric(MetricType.avWinAvLoss), Settings.doubleEpsilon); Assert.assertEquals(-0.384037, metrics.getMetric(MetricType.kelly), Settings.doubleEpsilon); } // @Test public void testEquityCurveOn518DaysStatistics() throws IOException { final Metrics stats = testTradingHelper(518, true); Assert.assertEquals(-16.710421, stats.getMetric(MetricType.avGain), Settings.doubleEpsilon); Assert.assertEquals(0.301158, stats.getMetric(MetricType.freq), Settings.doubleEpsilon); Assert.assertEquals(430.313881, stats.getMetric(MetricType.avWin), Settings.doubleEpsilon); Assert.assertEquals(-0.091019, stats.getMetric(MetricType.kelly), Settings.doubleEpsilon); Assert.assertEquals(-0.765763, stats.getMetric(MetricType.sharpeRatio), Settings.doubleEpsilon); Assert.assertEquals(-0.668416, stats.getMetric(MetricType.startMonthAvGain), Settings.doubleEpsilon); Assert.assertEquals(4.930223, stats.getMetric(MetricType.startMonthStDevGain), Settings.doubleEpsilon); Assert.assertEquals(15.162020, stats.getMetric(MetricType.startMonthMax), Settings.doubleEpsilon); Assert.assertEquals(-7.577820, stats.getMetric(MetricType.startMonthMin), Settings.doubleEpsilon); Assert.assertEquals(-21.378450, stats.getMetric(MetricType.month12AvGain), Settings.doubleEpsilon); Assert.assertEquals(9.683782, stats.getMetric(MetricType.month12StDevGain), Settings.doubleEpsilon); Assert.assertEquals(0.0, stats.getMetric(MetricType.month12Max), Settings.doubleEpsilon); Assert.assertEquals(-34.239761, stats.getMetric(MetricType.month12Min), Settings.doubleEpsilon); Assert.assertEquals(145, stats.getMetric(MetricType.ddDurationAverage), Settings.doubleEpsilon); Assert.assertEquals(675.0, stats.getMetric(MetricType.ddDurationMax), Settings.doubleEpsilon); Assert.assertEquals(12.568085, stats.getMetric(MetricType.ddValueAverage), Settings.doubleEpsilon); Assert.assertEquals(48.826069, stats.getMetric(MetricType.ddValueMax), Settings.doubleEpsilon); } // @Test public void testEquityCurveOn251DaysStatistics() throws IOException { final Metrics stats = testTradingHelper(251, true); Assert.assertEquals(-9.350847, stats.getMetric(MetricType.avGain), Settings.doubleEpsilon); Assert.assertEquals(0.298804, stats.getMetric(MetricType.freq), Settings.doubleEpsilon); Assert.assertEquals(499.964045, stats.getMetric(MetricType.avWin), Settings.doubleEpsilon); Assert.assertEquals(-0.077171, stats.getMetric(MetricType.kelly), Settings.doubleEpsilon); Assert.assertEquals(-0.611352, stats.getMetric(MetricType.sharpeRatio), Settings.doubleEpsilon); Assert.assertEquals(-0.779237, stats.getMetric(MetricType.startMonthAvGain), Settings.doubleEpsilon); Assert.assertEquals(6.554998, stats.getMetric(MetricType.startMonthStDevGain), Settings.doubleEpsilon); Assert.assertEquals(17.914611, stats.getMetric(MetricType.startMonthMax), Settings.doubleEpsilon); Assert.assertEquals(-6.845838, stats.getMetric(MetricType.startMonthMin), Settings.doubleEpsilon); Assert.assertEquals(-9.350847, stats.getMetric(MetricType.month12AvGain), Settings.doubleEpsilon); Assert.assertEquals(0.0, stats.getMetric(MetricType.month12StDevGain), Settings.doubleEpsilon); Assert.assertEquals(0.0, stats.getMetric(MetricType.month12Max), Settings.doubleEpsilon); Assert.assertEquals(-9.350847, stats.getMetric(MetricType.month12Min), Settings.doubleEpsilon); Assert.assertEquals(67.2, stats.getMetric(MetricType.ddDurationAverage), Settings.doubleEpsilon); Assert.assertEquals(286.0, stats.getMetric(MetricType.ddDurationMax), Settings.doubleEpsilon); Assert.assertEquals(10.581057, stats.getMetric(MetricType.ddValueAverage), Settings.doubleEpsilon); Assert.assertEquals(36.346692, stats.getMetric(MetricType.ddValueMax), Settings.doubleEpsilon); } // @Test public void testStatisticsOnLastClose() throws IOException, IllegalArgumentException, IllegalAccessException { final Path testPath = FileSystems.getDefault().getPath(testFolder.getRoot().getAbsolutePath()); final Metrics stats = testTradingHelper(3, false); stats.print(testPath.resolve("out.csv")); Assert.assertEquals(0.0, stats.getMetric(MetricType.ddValueMax), Settings.doubleEpsilon); final File file = testPath.resolve("out.csv").toFile(); Assert.assertTrue(file.exists()); Assert.assertEquals(456.0, file.length(), Settings.doubleEpsilon); file.delete(); } private Metrics testTradingHelper(int daysCount, boolean closeOnExit) throws IOException { final Stock aapl = stockStorage.getStock("aapl").get(); final Stock adm = stockStorage.getStock("adm").get(); final Stock spy = stockStorage.getStock("spy").get(); int aaplIndex = aapl.findDayIndex(new LocalDate(2008, 9, 4).toDate()); int admIndex = adm.findDayIndex(new LocalDate(2008, 9, 4).toDate()); int spyIndex = spy.findDayIndex(new LocalDate(2008, 9, 4).toDate()); TradingLog tradingLog = new BrokerImpl(stockStorage).getTradingLog(); StatisticsProcessor statisticsProcessor = new StatisticsProcessor(tradingLog); final int buySellEach = 5; boolean opened = false; for (int i = 0; i < daysCount; ++i) { statisticsProcessor.setStockDay("aapl", gd(aapl, ++aaplIndex)); statisticsProcessor.setStockDay("adm", gd(adm, ++admIndex)); statisticsProcessor.setStockDay("spy", gd(spy, ++spyIndex)); if (i % buySellEach == 0 && i % (buySellEach * 2) == 0) { tradingLog.addBuyRecord(gdd(aapl, aaplIndex), "aapl", Side.SHORT, 100); tradingLog.addBuyRecord(gdd(adm, admIndex), "adm", Side.LONG, 200); tradingLog.addBuyRecord(gdd(spy, spyIndex), "spy", Side.SHORT, 100); opened = true; } if (i % buySellEach == 0 && i % (buySellEach * 2) != 0) { tradingLog.addSellRecord(gdd(aapl, aaplIndex), "aapl", Side.SHORT, 100); tradingLog.addSellRecord(gdd(adm, admIndex), "adm", Side.LONG, 200); tradingLog.addSellRecord(gdd(spy, spyIndex), "spy", Side.SHORT, 100); opened = false; } if ((i == (daysCount - 1)) && opened && closeOnExit) { tradingLog.addSellRecord(gdd(aapl, aaplIndex), "aapl", Side.SHORT, 100); tradingLog.addSellRecord(gdd(adm, admIndex), "adm", Side.LONG, 200); tradingLog.addSellRecord(gdd(spy, spyIndex), "spy", Side.SHORT, 100); opened = false; } statisticsProcessor.processEod(); } final Metrics metrics = statisticsProcessor.calculate(); Assert.assertEquals(daysCount, metrics.getIntegerMetric(MetricType.period).intValue()); return metrics; } }
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.plugin.common; import android.util.Log; import io.flutter.BuildConfig; import java.io.ByteArrayOutputStream; import java.math.BigInteger; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * MessageCodec using the Flutter standard binary encoding. * * <p>This codec is guaranteed to be compatible with the corresponding <a * href="https://docs.flutter.io/flutter/services/StandardMessageCodec-class.html">StandardMessageCodec</a> * on the Dart side. These parts of the Flutter SDK are evolved synchronously. * * <p>Supported messages are acyclic values of these forms: * * <ul> * <li>null * <li>Booleans * <li>Bytes, Shorts, Integers, Longs * <li>BigIntegers (see below) * <li>Floats, Doubles * <li>Strings * <li>byte[], int[], long[], double[] * <li>Lists of supported values * <li>Maps with supported keys and values * </ul> * * <p>On the Dart side, these values are represented as follows: * * <ul> * <li>null: null * <li>Boolean: bool * <li>Byte, Short, Integer, Long: int * <li>Float, Double: double * <li>String: String * <li>byte[]: Uint8List * <li>int[]: Int32List * <li>long[]: Int64List * <li>double[]: Float64List * <li>List: List * <li>Map: Map * </ul> * * <p>BigIntegers are represented in Dart as strings with the hexadecimal representation of the * integer's value. * * <p>To extend the codec, overwrite the writeValue and readValueOfType methods. */ public class StandardMessageCodec implements MessageCodec<Object> { private static final String TAG = "StandardMessageCodec#"; public static final StandardMessageCodec INSTANCE = new StandardMessageCodec(); @Override public ByteBuffer encodeMessage(Object message) { if (message == null) { return null; } final ExposedByteArrayOutputStream stream = new ExposedByteArrayOutputStream(); writeValue(stream, message); final ByteBuffer buffer = ByteBuffer.allocateDirect(stream.size()); buffer.put(stream.buffer(), 0, stream.size()); return buffer; } @Override public Object decodeMessage(ByteBuffer message) { if (message == null) { return null; } message.order(ByteOrder.nativeOrder()); final Object value = readValue(message); if (message.hasRemaining()) { throw new IllegalArgumentException("Message corrupted"); } return value; } private static final boolean LITTLE_ENDIAN = ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN; private static final Charset UTF8 = Charset.forName("UTF8"); private static final byte NULL = 0; private static final byte TRUE = 1; private static final byte FALSE = 2; private static final byte INT = 3; private static final byte LONG = 4; private static final byte BIGINT = 5; private static final byte DOUBLE = 6; private static final byte STRING = 7; private static final byte BYTE_ARRAY = 8; private static final byte INT_ARRAY = 9; private static final byte LONG_ARRAY = 10; private static final byte DOUBLE_ARRAY = 11; private static final byte LIST = 12; private static final byte MAP = 13; /** * Writes an int representing a size to the specified stream. Uses an expanding code of 1 to 5 * bytes to optimize for small values. */ protected static final void writeSize(ByteArrayOutputStream stream, int value) { if (BuildConfig.DEBUG && 0 > value) { Log.e(TAG, "Attempted to write a negative size."); } if (value < 254) { stream.write(value); } else if (value <= 0xffff) { stream.write(254); writeChar(stream, value); } else { stream.write(255); writeInt(stream, value); } } /** Writes the least significant two bytes of the specified int to the specified stream. */ protected static final void writeChar(ByteArrayOutputStream stream, int value) { if (LITTLE_ENDIAN) { stream.write(value); stream.write(value >>> 8); } else { stream.write(value >>> 8); stream.write(value); } } /** Writes the specified int as 4 bytes to the specified stream. */ protected static final void writeInt(ByteArrayOutputStream stream, int value) { if (LITTLE_ENDIAN) { stream.write(value); stream.write(value >>> 8); stream.write(value >>> 16); stream.write(value >>> 24); } else { stream.write(value >>> 24); stream.write(value >>> 16); stream.write(value >>> 8); stream.write(value); } } /** Writes the specified long as 8 bytes to the specified stream. */ protected static final void writeLong(ByteArrayOutputStream stream, long value) { if (LITTLE_ENDIAN) { stream.write((byte) value); stream.write((byte) (value >>> 8)); stream.write((byte) (value >>> 16)); stream.write((byte) (value >>> 24)); stream.write((byte) (value >>> 32)); stream.write((byte) (value >>> 40)); stream.write((byte) (value >>> 48)); stream.write((byte) (value >>> 56)); } else { stream.write((byte) (value >>> 56)); stream.write((byte) (value >>> 48)); stream.write((byte) (value >>> 40)); stream.write((byte) (value >>> 32)); stream.write((byte) (value >>> 24)); stream.write((byte) (value >>> 16)); stream.write((byte) (value >>> 8)); stream.write((byte) value); } } /** Writes the specified double as 8 bytes to the specified stream. */ protected static final void writeDouble(ByteArrayOutputStream stream, double value) { writeLong(stream, Double.doubleToLongBits(value)); } /** Writes the length and then the actual bytes of the specified array to the specified stream. */ protected static final void writeBytes(ByteArrayOutputStream stream, byte[] bytes) { writeSize(stream, bytes.length); stream.write(bytes, 0, bytes.length); } /** * Writes a number of padding bytes to the specified stream to ensure that the next value is * aligned to a whole multiple of the specified alignment. An example usage with alignment = 8 is * to ensure doubles are word-aligned in the stream. */ protected static final void writeAlignment(ByteArrayOutputStream stream, int alignment) { final int mod = stream.size() % alignment; if (mod != 0) { for (int i = 0; i < alignment - mod; i++) { stream.write(0); } } } /** * Writes a type discriminator byte and then a byte serialization of the specified value to the * specified stream. * * <p>Subclasses can extend the codec by overriding this method, calling super for values that the * extension does not handle. */ protected void writeValue(ByteArrayOutputStream stream, Object value) { if (value == null || value.equals(null)) { stream.write(NULL); } else if (value == Boolean.TRUE) { stream.write(TRUE); } else if (value == Boolean.FALSE) { stream.write(FALSE); } else if (value instanceof Number) { if (value instanceof Integer || value instanceof Short || value instanceof Byte) { stream.write(INT); writeInt(stream, ((Number) value).intValue()); } else if (value instanceof Long) { stream.write(LONG); writeLong(stream, (long) value); } else if (value instanceof Float || value instanceof Double) { stream.write(DOUBLE); writeAlignment(stream, 8); writeDouble(stream, ((Number) value).doubleValue()); } else if (value instanceof BigInteger) { stream.write(BIGINT); writeBytes(stream, ((BigInteger) value).toString(16).getBytes(UTF8)); } else { throw new IllegalArgumentException("Unsupported Number type: " + value.getClass()); } } else if (value instanceof String) { stream.write(STRING); writeBytes(stream, ((String) value).getBytes(UTF8)); } else if (value instanceof byte[]) { stream.write(BYTE_ARRAY); writeBytes(stream, (byte[]) value); } else if (value instanceof int[]) { stream.write(INT_ARRAY); final int[] array = (int[]) value; writeSize(stream, array.length); writeAlignment(stream, 4); for (final int n : array) { writeInt(stream, n); } } else if (value instanceof long[]) { stream.write(LONG_ARRAY); final long[] array = (long[]) value; writeSize(stream, array.length); writeAlignment(stream, 8); for (final long n : array) { writeLong(stream, n); } } else if (value instanceof double[]) { stream.write(DOUBLE_ARRAY); final double[] array = (double[]) value; writeSize(stream, array.length); writeAlignment(stream, 8); for (final double d : array) { writeDouble(stream, d); } } else if (value instanceof List) { stream.write(LIST); final List<?> list = (List) value; writeSize(stream, list.size()); for (final Object o : list) { writeValue(stream, o); } } else if (value instanceof Map) { stream.write(MAP); final Map<?, ?> map = (Map) value; writeSize(stream, map.size()); for (final Entry<?, ?> entry : map.entrySet()) { writeValue(stream, entry.getKey()); writeValue(stream, entry.getValue()); } } else { throw new IllegalArgumentException("Unsupported value: " + value); } } /** Reads an int representing a size as written by writeSize. */ protected static final int readSize(ByteBuffer buffer) { if (!buffer.hasRemaining()) { throw new IllegalArgumentException("Message corrupted"); } final int value = buffer.get() & 0xff; if (value < 254) { return value; } else if (value == 254) { return buffer.getChar(); } else { return buffer.getInt(); } } /** Reads a byte array as written by writeBytes. */ protected static final byte[] readBytes(ByteBuffer buffer) { final int length = readSize(buffer); final byte[] bytes = new byte[length]; buffer.get(bytes); return bytes; } /** Reads alignment padding bytes as written by writeAlignment. */ protected static final void readAlignment(ByteBuffer buffer, int alignment) { final int mod = buffer.position() % alignment; if (mod != 0) { buffer.position(buffer.position() + alignment - mod); } } /** Reads a value as written by writeValue. */ protected final Object readValue(ByteBuffer buffer) { if (!buffer.hasRemaining()) { throw new IllegalArgumentException("Message corrupted"); } final byte type = buffer.get(); return readValueOfType(type, buffer); } /** * Reads a value of the specified type. * * <p>Subclasses may extend the codec by overriding this method, calling super for types that the * extension does not handle. */ protected Object readValueOfType(byte type, ByteBuffer buffer) { final Object result; switch (type) { case NULL: result = null; break; case TRUE: result = true; break; case FALSE: result = false; break; case INT: result = buffer.getInt(); break; case LONG: result = buffer.getLong(); break; case BIGINT: { final byte[] hex = readBytes(buffer); result = new BigInteger(new String(hex, UTF8), 16); break; } case DOUBLE: readAlignment(buffer, 8); result = buffer.getDouble(); break; case STRING: { final byte[] bytes = readBytes(buffer); result = new String(bytes, UTF8); break; } case BYTE_ARRAY: { result = readBytes(buffer); break; } case INT_ARRAY: { final int length = readSize(buffer); final int[] array = new int[length]; readAlignment(buffer, 4); buffer.asIntBuffer().get(array); result = array; buffer.position(buffer.position() + 4 * length); break; } case LONG_ARRAY: { final int length = readSize(buffer); final long[] array = new long[length]; readAlignment(buffer, 8); buffer.asLongBuffer().get(array); result = array; buffer.position(buffer.position() + 8 * length); break; } case DOUBLE_ARRAY: { final int length = readSize(buffer); final double[] array = new double[length]; readAlignment(buffer, 8); buffer.asDoubleBuffer().get(array); result = array; buffer.position(buffer.position() + 8 * length); break; } case LIST: { final int size = readSize(buffer); final List<Object> list = new ArrayList<>(size); for (int i = 0; i < size; i++) { list.add(readValue(buffer)); } result = list; break; } case MAP: { final int size = readSize(buffer); final Map<Object, Object> map = new HashMap<>(); for (int i = 0; i < size; i++) { map.put(readValue(buffer), readValue(buffer)); } result = map; break; } default: throw new IllegalArgumentException("Message corrupted"); } return result; } static final class ExposedByteArrayOutputStream extends ByteArrayOutputStream { byte[] buffer() { return buf; } } }
/* * Copyright (c) 2012 Guidewire Software, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gw.vark; import gw.internal.gosu.parser.IGosuAnnotation; import gw.lang.Gosu; import gw.lang.launch.IDefaultProgramSource; import gw.lang.launch.IProgramSource; import gw.lang.mode.GosuMode; import gw.lang.mode.RequiresInit; import gw.lang.parser.IDynamicFunctionSymbol; import gw.lang.parser.exceptions.ParseResultsException; import gw.lang.parser.statements.IFunctionStatement; import gw.lang.reflect.IMethodInfo; import gw.lang.reflect.IType; import gw.lang.reflect.TypeSystem; import gw.util.GosuExceptionUtil; import gw.util.StreamUtil; import gw.vark.typeloader.AntlibTypeLoader; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.BuildLogger; import org.apache.tools.ant.DefaultLogger; import org.apache.tools.ant.ExitStatusException; import org.apache.tools.ant.Project; import org.apache.tools.ant.util.ClasspathUtils; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.lang.reflect.Modifier; import java.net.URL; import java.util.List; import java.util.Properties; // TODO - test that the project base dir is right if we're using a URL-based program source @RequiresInit public class Aardvark extends GosuMode { public static final String DEFAULT_BUILD_FILE_NAME = "build.vark"; public static final int GOSU_MODE_PRIORITY_AARDVARK_HELP = 0; public static final int GOSU_MODE_PRIORITY_AARDVARK_VERSION = 1; public static final int GOSU_MODE_PRIORITY_AARDVARK_INTERACTIVE = 2; public static final int GOSU_MODE_PRIORITY_AARDVARK_EDITOR = 3; public static final int GOSU_MODE_PRIORITY_AARDVARK = 4; private static BuildLogger _logger; private static Project _antProjectInstance; static final int EXITCODE_VARKFILE_NOT_FOUND = 4; static final int EXITCODE_GOSU_VERIFY_FAILED = 8; private static String RAW_VARK_FILE_PATH = ""; public static Project getProject() { if (_antProjectInstance == null) { throw new NoProjectInstanceException(); } return _antProjectInstance; } public static void setProject(Project project, BuildLogger logger) { _antProjectInstance = project; if (logger != null) { project.removeBuildListener(_logger); _logger = logger; logger.setMessageOutputLevel(Project.MSG_INFO); logger.setOutputPrintStream(System.out); logger.setErrorPrintStream(System.err); project.addBuildListener(logger); } } @SuppressWarnings("UnusedDeclaration") public static String getRawVarkFilePath() { return RAW_VARK_FILE_PATH; } // this is a convenience when working in a dev environment when we might not want to use the Launcher public static void main( String... args ) throws Exception { Gosu.main(args); } private AardvarkOptions _options; public Aardvark() { this(new DefaultLogger()); } Aardvark(BuildLogger logger) { logger.setMessageOutputLevel( Project.MSG_INFO ); logger.setOutputPrintStream(System.out); logger.setErrorPrintStream(System.err); _logger = logger; } @Override public int getPriority() { return GOSU_MODE_PRIORITY_AARDVARK; } @Override public boolean accept() { _options = new AardvarkOptions(_argInfo); return true; } @Override public int run() throws Exception { RAW_VARK_FILE_PATH = _argInfo.getProgramSource().getRawPath(); AardvarkProgram aardvarkProject; Project antProject = new Project(); setProject(antProject, _logger); if (_options.getLogger() != null) { _logger = newLogger(_options.getLogger()); } _logger.setMessageOutputLevel(_options.getLogLevel().getLevel()); if ("true".equals(System.getProperty("aardvark.dev"))) { System.err.println("aardvark.dev is on"); pushAntlibTypeloader(); } IProgramSource programSource = _argInfo.getProgramSource(); InputStream in = null; try { in = programSource.openInputStream(); log("Buildfile: " + programSource.getRawPath()); aardvarkProject = AardvarkProgram.parseWithTimer(antProject, programSource.getFile(), in); } catch (FileNotFoundException e) { if (programSource instanceof IDefaultProgramSource) { logErr("Default vark buildfile " + Aardvark.DEFAULT_BUILD_FILE_NAME + " doesn't exist"); } else { logErr("Specified vark buildfile " + programSource.getRawPath() + " doesn't exist"); } return EXITCODE_VARKFILE_NOT_FOUND; } catch (ParseResultsException e) { logErr(e.getMessage()); return EXITCODE_GOSU_VERIFY_FAILED; } finally { try { StreamUtil.close(in); } catch (IOException e) { } } int exitCode = 1; try { try { if (_options.isHelp()) { aardvarkProject.printProjectHelp(); } else { aardvarkProject.runBuild(_options.getTargetCalls()); } exitCode = 0; } catch (ExitStatusException ese) { exitCode = ese.getStatus(); if (exitCode != 0) { throw ese; } } } catch (BuildException e) { //printMessage(e); // (logger should have displayed the message along with "BUILD FAILED" } catch (Throwable e) { e.printStackTrace(); printMessage(e); } return exitCode; } public static void pushAntlibTypeloader() { AntlibTypeLoader loader = new AntlibTypeLoader(TypeSystem.getCurrentModule()); TypeSystem.pushTypeLoader(TypeSystem.getCurrentModule(), loader); loader.init(); } private void printMessage(Throwable t) { String message = t.getMessage(); if (message != null) { logErr(message); } } public static boolean isTargetMethod(IType gosuProgram, IMethodInfo methodInfo) { return methodInfo.isPublic() && (methodInfo.hasAnnotation(TypeSystem.get(gw.vark.annotations.Target.class)) || (methodInfo.getParameters().length == 0 && methodInfo.getOwnersType().equals( gosuProgram ))); } public static boolean isTargetMethod(IFunctionStatement target) { if (target != null && target.getDynamicFunctionSymbol() != null && target.getDynamicFunctionSymbol().getModifierInfo() != null) { IDynamicFunctionSymbol dfs = target.getDynamicFunctionSymbol(); return isPublic(dfs.getModifiers()) && (findAnnotation(dfs.getModifierInfo().getAnnotations(), TypeSystem.get(gw.vark.annotations.Target.class)) || dfs.getArgs().size() == 0); } return false; } private static boolean isPublic(int mod) { return !Modifier.isPrivate(mod) && !Modifier.isProtected(mod); } private static boolean findAnnotation(List<IGosuAnnotation> annotations, IType annotationType) { for (IGosuAnnotation annotation : annotations) { if (annotation.getExpression().getType().equals(annotationType)) { return true; } } return false; } private BuildLogger newLogger(String loggerClassName) { try { return (BuildLogger) ClasspathUtils.newInstance(loggerClassName, Aardvark.class.getClassLoader(), BuildLogger.class); } catch (BuildException e) { logErr("The specified logger class " + loggerClassName + " could not be used because " + e.getMessage()); throw e; } } private void log(String message) { getProject().log(message); } private void logErr(String message) { getProject().log(message, Project.MSG_ERR); } public static String getVersion() { URL versionResource = Aardvark.class.getResource("/META-INF/maven/org.gosu-lang.aardvark/aardvark-core/pom.properties"); if (versionResource == null) { return "Aardvark version (unknown development build)"; } InputStream in = null; try { in = versionResource.openStream(); Properties props = new Properties(); props.load(in); Reader reader = StreamUtil.getInputStreamReader(versionResource.openStream()); return "Aardvark version " + props.getProperty("version"); } catch (IOException e) { throw GosuExceptionUtil.forceThrow(e); } finally { StreamUtil.closeNoThrow(in); } } }
package com.vaadin.data.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.Collection; import com.vaadin.data.Container; import com.vaadin.data.Container.Hierarchical; import com.vaadin.data.Container.Sortable; import com.vaadin.data.Item; public abstract class AbstractHierarchicalContainerTestBase extends AbstractContainerTestBase { /** * @param container * The container to validate * @param expectedFirstItemId * Expected first item id * @param expectedLastItemId * Expected last item id * @param itemIdInSet * An item id that is in the container * @param itemIdNotInSet * An item id that is not in the container * @param checkGetItemNull * true if getItem() should return null for itemIdNotInSet, false * to skip the check (container.containsId() is checked in any * case) * @param expectedSize * Expected number of items in the container. Not related to * hierarchy. * @param expectedTraversalSize * Expected number of items found when traversing from the roots * down to all available nodes. * @param expectedRootSize * Expected number of root items * @param rootsHaveChildren * true if all roots have children, false otherwise (skips some * asserts) */ protected void validateHierarchicalContainer(Hierarchical container, Object expectedFirstItemId, Object expectedLastItemId, Object itemIdInSet, Object itemIdNotInSet, boolean checkGetItemNull, int expectedSize, int expectedRootSize, boolean rootsHaveChildren) { validateContainer(container, expectedFirstItemId, expectedLastItemId, itemIdInSet, itemIdNotInSet, checkGetItemNull, expectedSize); // rootItemIds Collection<?> rootIds = container.rootItemIds(); assertEquals(expectedRootSize, rootIds.size()); for (Object rootId : rootIds) { // All roots must be in container assertTrue(container.containsId(rootId)); // All roots must have no parent assertNull(container.getParent(rootId)); // all roots must be roots assertTrue(container.isRoot(rootId)); if (rootsHaveChildren) { // all roots have children allowed in this case assertTrue(container.areChildrenAllowed(rootId)); // all roots have children in this case Collection<?> children = container.getChildren(rootId); assertNotNull(rootId + " should have children", children); assertTrue(rootId + " should have children", (children.size() > 0)); // getParent for (Object childId : children) { assertEquals(container.getParent(childId), rootId); } } } // isRoot should return false for unknown items assertFalse(container.isRoot(itemIdNotInSet)); // hasChildren should return false for unknown items assertFalse(container.hasChildren(itemIdNotInSet)); // areChildrenAllowed should return false for unknown items assertFalse(container.areChildrenAllowed(itemIdNotInSet)); // removeItem of unknown items should return false assertFalse(container.removeItem(itemIdNotInSet)); assertEquals(expectedSize, countNodes(container)); validateHierarchy(container); } private int countNodes(Hierarchical container) { int totalNodes = 0; for (Object rootId : container.rootItemIds()) { totalNodes += countNodes(container, rootId); } return totalNodes; } private int countNodes(Hierarchical container, Object itemId) { int nodes = 1; // This Collection<?> children = container.getChildren(itemId); if (children != null) { for (Object id : children) { nodes += countNodes(container, id); } } return nodes; } private void validateHierarchy(Hierarchical container) { for (Object rootId : container.rootItemIds()) { validateHierarchy(container, rootId, null); } } private void validateHierarchy(Hierarchical container, Object itemId, Object parentId) { Collection<?> children = container.getChildren(itemId); // getParent assertEquals(container.getParent(itemId), parentId); if (!container.areChildrenAllowed(itemId)) { // If no children is allowed the item should have no children assertFalse(container.hasChildren(itemId)); assertTrue(children == null || children.size() == 0); return; } if (children != null) { for (Object id : children) { validateHierarchy(container, id, itemId); } } } protected void testHierarchicalContainer(Container.Hierarchical container) { initializeContainer(container); int packages = 21 + 3; int expectedSize = sampleData.length + packages; validateHierarchicalContainer(container, "com", "org.vaadin.test.LastClass", "com.vaadin.server.ApplicationResource", "blah", true, expectedSize, 2, true); } protected void testHierarchicalSorting(Container.Hierarchical container) { Container.Sortable sortable = (Sortable) container; initializeContainer(container); // Must be able to sort based on PROP1 and PROP2 for this test assertTrue(sortable.getSortableContainerPropertyIds().contains( FULLY_QUALIFIED_NAME)); assertTrue(sortable.getSortableContainerPropertyIds().contains( REVERSE_FULLY_QUALIFIED_NAME)); sortable.sort(new Object[] { FULLY_QUALIFIED_NAME }, new boolean[] { true }); int packages = 21 + 3; int expectedSize = sampleData.length + packages; validateHierarchicalContainer(container, "com", "org.vaadin.test.LastClass", "com.vaadin.server.ApplicationResource", "blah", true, expectedSize, 2, true); sortable.sort(new Object[] { REVERSE_FULLY_QUALIFIED_NAME }, new boolean[] { true }); validateHierarchicalContainer(container, "com.vaadin.server.ApplicationPortlet2", "com.vaadin.data.util.ObjectProperty", "com.vaadin.server.ApplicationResource", "blah", true, expectedSize, 2, true); } protected void initializeContainer(Container.Hierarchical container) { container.removeAllItems(); Object[] propertyIds = container.getContainerPropertyIds().toArray(); for (Object propertyId : propertyIds) { container.removeContainerProperty(propertyId); } container.addContainerProperty(FULLY_QUALIFIED_NAME, String.class, ""); container.addContainerProperty(SIMPLE_NAME, String.class, ""); container.addContainerProperty(REVERSE_FULLY_QUALIFIED_NAME, String.class, null); container.addContainerProperty(ID_NUMBER, Integer.class, null); for (int i = 0; i < sampleData.length; i++) { String id = sampleData[i]; // Add path as parent String paths[] = id.split("\\."); String path = paths[0]; // Adds "com" and other items multiple times so should return null // for all but the first time if (container.addItem(path) != null) { assertTrue(container.setChildrenAllowed(path, false)); Item item = container.getItem(path); item.getItemProperty(FULLY_QUALIFIED_NAME).setValue(path); item.getItemProperty(SIMPLE_NAME).setValue(getSimpleName(path)); item.getItemProperty(REVERSE_FULLY_QUALIFIED_NAME).setValue( reverse(path)); item.getItemProperty(ID_NUMBER).setValue(1); } for (int j = 1; j < paths.length; j++) { String parent = path; path = path + "." + paths[j]; // Adds "com" and other items multiple times so should return // null for all but the first time if (container.addItem(path) != null) { assertTrue(container.setChildrenAllowed(path, false)); Item item = container.getItem(path); item.getItemProperty(FULLY_QUALIFIED_NAME).setValue(path); item.getItemProperty(SIMPLE_NAME).setValue( getSimpleName(path)); item.getItemProperty(REVERSE_FULLY_QUALIFIED_NAME) .setValue(reverse(path)); item.getItemProperty(ID_NUMBER).setValue(1); } assertTrue(container.setChildrenAllowed(parent, true)); assertTrue( "Failed to set " + parent + " as parent for " + path, container.setParent(path, parent)); } Item item = container.getItem(id); assertNotNull(item); String parent = id.substring(0, id.lastIndexOf('.')); assertTrue(container.setParent(id, parent)); item.getItemProperty(FULLY_QUALIFIED_NAME).setValue(sampleData[i]); item.getItemProperty(SIMPLE_NAME).setValue( getSimpleName(sampleData[i])); item.getItemProperty(REVERSE_FULLY_QUALIFIED_NAME).setValue( reverse(sampleData[i])); item.getItemProperty(ID_NUMBER).setValue(i % 2); } } protected void testRemoveHierarchicalWrapperSubtree( Container.Hierarchical container) { initializeContainer(container); // remove root item removeItemRecursively(container, "org"); int packages = 21 + 3 - 3; int expectedSize = sampleData.length + packages - 1; validateContainer(container, "com", "com.vaadin.util.SerializerHelper", "com.vaadin.server.ApplicationResource", "blah", true, expectedSize); // rootItemIds Collection<?> rootIds = container.rootItemIds(); assertEquals(1, rootIds.size()); } private void removeItemRecursively(Container.Hierarchical container, Object itemId) { if (container instanceof ContainerHierarchicalWrapper) { ((ContainerHierarchicalWrapper) container) .removeItemRecursively("org"); } else { HierarchicalContainer.removeItemRecursively(container, itemId); } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.cpp; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.ActionOwner; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ResourceSet; import com.google.devtools.build.lib.analysis.actions.AbstractFileWriteAction; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.vfs.PathFragment; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; /** * Creates C++ module map artifact genfiles. These are then passed to Clang to * do dependency checking. */ @Immutable public final class CppModuleMapAction extends AbstractFileWriteAction { private static final String GUID = "4f407081-1951-40c1-befc-d6b4daff5de3"; // C++ module map of the current target private final CppModuleMap cppModuleMap; /** * If set, the paths in the module map are relative to the current working directory instead * of relative to the module map file's location. */ private final boolean moduleMapHomeIsCwd; // Data required to build the actual module map. // NOTE: If you add a field here, you'll likely need to add it to the cache key in computeKey(). private final ImmutableList<Artifact> privateHeaders; private final ImmutableList<Artifact> publicHeaders; private final ImmutableList<CppModuleMap> dependencies; private final ImmutableList<PathFragment> additionalExportedHeaders; private final boolean compiledModule; private final boolean generateSubmodules; private final boolean externDependencies; public CppModuleMapAction( ActionOwner owner, CppModuleMap cppModuleMap, Iterable<Artifact> privateHeaders, Iterable<Artifact> publicHeaders, Iterable<CppModuleMap> dependencies, Iterable<PathFragment> additionalExportedHeaders, boolean compiledModule, boolean moduleMapHomeIsCwd, boolean generateSubmodules, boolean externDependencies) { super(owner, ImmutableList.<Artifact>of(), cppModuleMap.getArtifact(), /*makeExecutable=*/false); this.cppModuleMap = cppModuleMap; this.moduleMapHomeIsCwd = moduleMapHomeIsCwd; this.privateHeaders = ImmutableList.copyOf(privateHeaders); this.publicHeaders = ImmutableList.copyOf(publicHeaders); this.dependencies = ImmutableList.copyOf(dependencies); this.additionalExportedHeaders = ImmutableList.copyOf(additionalExportedHeaders); this.compiledModule = compiledModule; this.generateSubmodules = generateSubmodules; this.externDependencies = externDependencies; } @Override public DeterministicWriter newDeterministicWriter(ActionExecutionContext ctx) { return new DeterministicWriter() { @Override public void writeOutputFile(OutputStream out) throws IOException { StringBuilder content = new StringBuilder(); PathFragment fragment = cppModuleMap.getArtifact().getExecPath(); int segmentsToExecPath = fragment.segmentCount() - 1; // For details about the different header types, see: // http://clang.llvm.org/docs/Modules.html#header-declaration String leadingPeriods = moduleMapHomeIsCwd ? "" : Strings.repeat("../", segmentsToExecPath); content.append("module \"").append(cppModuleMap.getName()).append("\" {\n"); content.append(" export *\n"); HashSet<PathFragment> deduper = new HashSet<>(); for (Artifact artifact : publicHeaders) { appendHeader( content, "", artifact.getExecPath(), leadingPeriods, /*canCompile=*/ true, deduper); } for (Artifact artifact : privateHeaders) { appendHeader( content, "private", artifact.getExecPath(), leadingPeriods, /*canCompile=*/ true, deduper); } for (PathFragment additionalExportedHeader : additionalExportedHeaders) { appendHeader( content, "", additionalExportedHeader, leadingPeriods, /*canCompile*/ false, deduper); } for (CppModuleMap dep : dependencies) { content.append(" use \"").append(dep.getName()).append("\"\n"); } content.append("}"); if (externDependencies) { for (CppModuleMap dep : dependencies) { content .append("\nextern module \"") .append(dep.getName()) .append("\" \"") .append(leadingPeriods) .append(dep.getArtifact().getExecPath()) .append("\""); } } out.write(content.toString().getBytes(StandardCharsets.ISO_8859_1)); } }; } private void appendHeader(StringBuilder content, String visibilitySpecifier, PathFragment path, String leadingPeriods, boolean canCompile, HashSet<PathFragment> deduper) { if (deduper.contains(path)) { return; } deduper.add(path); if (generateSubmodules) { content.append(" module \"").append(path).append("\" {\n"); content.append(" export *\n "); } content.append(" "); if (!visibilitySpecifier.isEmpty()) { content.append(visibilitySpecifier).append(" "); } if (!canCompile || !shouldCompileHeader(path)) { content.append("textual "); } content.append("header \"").append(leadingPeriods).append(path).append("\""); if (generateSubmodules) { content.append("\n }"); } content.append("\n"); } private boolean shouldCompileHeader(PathFragment path) { return compiledModule && !CppFileTypes.CPP_TEXTUAL_INCLUDE.matches(path); } @Override public String getMnemonic() { return "CppModuleMap"; } @Override protected String computeKey() { Fingerprint f = new Fingerprint(); f.addString(GUID); f.addInt(privateHeaders.size()); for (Artifact artifact : privateHeaders) { f.addPath(artifact.getExecPath()); } f.addInt(publicHeaders.size()); for (Artifact artifact : publicHeaders) { f.addPath(artifact.getExecPath()); } f.addInt(dependencies.size()); for (CppModuleMap dep : dependencies) { f.addPath(dep.getArtifact().getExecPath()); } f.addInt(additionalExportedHeaders.size()); for (PathFragment path : additionalExportedHeaders) { f.addPath(path); } f.addPath(cppModuleMap.getArtifact().getExecPath()); f.addString(cppModuleMap.getName()); f.addBoolean(moduleMapHomeIsCwd); f.addBoolean(compiledModule); f.addBoolean(generateSubmodules); f.addBoolean(externDependencies); return f.hexDigestAndReset(); } @Override public ResourceSet estimateResourceConsumptionLocal() { return ResourceSet.createWithRamCpuIo(/*memoryMb=*/0, /*cpuUsage=*/0, /*ioUsage=*/0.02); } @VisibleForTesting public Collection<Artifact> getPublicHeaders() { return publicHeaders; } @VisibleForTesting public Collection<Artifact> getPrivateHeaders() { return privateHeaders; } @VisibleForTesting public ImmutableList<PathFragment> getAdditionalExportedHeaders() { return additionalExportedHeaders; } @VisibleForTesting public Collection<Artifact> getDependencyArtifacts() { List<Artifact> artifacts = new ArrayList<>(); for (CppModuleMap map : dependencies) { artifacts.add(map.getArtifact()); } return artifacts; } }
package org.apache.lucene.search.similarities; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.SmallFloat; /** * A subclass of {@code Similarity} that provides a simplified API for its * descendants. Subclasses are only required to implement the {@link #score} * and {@link #toString()} methods. Implementing * {@link #explain(Explanation, BasicStats, int, float, float)} is optional, * inasmuch as SimilarityBase already provides a basic explanation of the score * and the term frequency. However, implementers of a subclass are encouraged to * include as much detail about the scoring method as possible. * <p> * Note: multi-word queries such as phrase queries are scored in a different way * than Lucene's default ranking algorithm: whereas it "fakes" an IDF value for * the phrase as a whole (since it does not know it), this class instead scores * phrases as a summation of the individual term scores. * @lucene.experimental */ public abstract class SimilarityBase extends Similarity { /** For {@link #log2(double)}. Precomputed for efficiency reasons. */ private static final double LOG_2 = Math.log(2); /** * True if overlap tokens (tokens with a position of increment of zero) are * discounted from the document's length. */ protected boolean discountOverlaps = true; /** * Sole constructor. (For invocation by subclass * constructors, typically implicit.) */ public SimilarityBase() {} /** Determines whether overlap tokens (Tokens with * 0 position increment) are ignored when computing * norm. By default this is true, meaning overlap * tokens do not count when computing norms. * * @lucene.experimental * * @see #computeNorm */ public void setDiscountOverlaps(boolean v) { discountOverlaps = v; } /** * Returns true if overlap tokens are discounted from the document's length. * @see #setDiscountOverlaps */ public boolean getDiscountOverlaps() { return discountOverlaps; } @Override public final SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) { BasicStats stats[] = new BasicStats[termStats.length]; for (int i = 0; i < termStats.length; i++) { stats[i] = newStats(collectionStats.field(), queryBoost); fillBasicStats(stats[i], collectionStats, termStats[i]); } return stats.length == 1 ? stats[0] : new MultiSimilarity.MultiStats(stats); } /** Factory method to return a custom stats object */ protected BasicStats newStats(String field, float queryBoost) { return new BasicStats(field, queryBoost); } /** Fills all member fields defined in {@code BasicStats} in {@code stats}. * Subclasses can override this method to fill additional stats. */ protected void fillBasicStats(BasicStats stats, CollectionStatistics collectionStats, TermStatistics termStats) { // #positions(field) must be >= #positions(term) assert collectionStats.sumTotalTermFreq() == -1 || collectionStats.sumTotalTermFreq() >= termStats.totalTermFreq(); long numberOfDocuments = collectionStats.maxDoc(); long docFreq = termStats.docFreq(); long totalTermFreq = termStats.totalTermFreq(); // codec does not supply totalTermFreq: substitute docFreq if (totalTermFreq == -1) { totalTermFreq = docFreq; } final long numberOfFieldTokens; final float avgFieldLength; long sumTotalTermFreq = collectionStats.sumTotalTermFreq(); if (sumTotalTermFreq <= 0) { // field does not exist; // We have to provide something if codec doesnt supply these measures, // or if someone omitted frequencies for the field... negative values cause // NaN/Inf for some scorers. numberOfFieldTokens = docFreq; avgFieldLength = 1; } else { numberOfFieldTokens = sumTotalTermFreq; avgFieldLength = (float)numberOfFieldTokens / numberOfDocuments; } // TODO: add sumDocFreq for field (numberOfFieldPostings) stats.setNumberOfDocuments(numberOfDocuments); stats.setNumberOfFieldTokens(numberOfFieldTokens); stats.setAvgFieldLength(avgFieldLength); stats.setDocFreq(docFreq); stats.setTotalTermFreq(totalTermFreq); } /** * Scores the document {@code doc}. * <p>Subclasses must apply their scoring formula in this class.</p> * @param stats the corpus level statistics. * @param freq the term frequency. * @param docLen the document length. * @return the score. */ protected abstract float score(BasicStats stats, float freq, float docLen); /** * Subclasses should implement this method to explain the score. {@code expl} * already contains the score, the name of the class and the doc id, as well * as the term frequency and its explanation; subclasses can add additional * clauses to explain details of their scoring formulae. * <p>The default implementation does nothing.</p> * * @param expl the explanation to extend with details. * @param stats the corpus level statistics. * @param doc the document id. * @param freq the term frequency. * @param docLen the document length. */ protected void explain( Explanation expl, BasicStats stats, int doc, float freq, float docLen) {} /** * Explains the score. The implementation here provides a basic explanation * in the format <em>score(name-of-similarity, doc=doc-id, * freq=term-frequency), computed from:</em>, and * attaches the score (computed via the {@link #score(BasicStats, float, float)} * method) and the explanation for the term frequency. Subclasses content with * this format may add additional details in * {@link #explain(Explanation, BasicStats, int, float, float)}. * * @param stats the corpus level statistics. * @param doc the document id. * @param freq the term frequency and its explanation. * @param docLen the document length. * @return the explanation. */ protected Explanation explain( BasicStats stats, int doc, Explanation freq, float docLen) { Explanation result = new Explanation(); result.setValue(score(stats, freq.getValue(), docLen)); result.setDescription("score(" + getClass().getSimpleName() + ", doc=" + doc + ", freq=" + freq.getValue() +"), computed from:"); result.addDetail(freq); explain(result, stats, doc, freq.getValue(), docLen); return result; } @Override public ExactSimScorer exactSimScorer(SimWeight stats, AtomicReaderContext context) throws IOException { if (stats instanceof MultiSimilarity.MultiStats) { // a multi term query (e.g. phrase). return the summation, // scoring almost as if it were boolean query SimWeight subStats[] = ((MultiSimilarity.MultiStats) stats).subStats; ExactSimScorer subScorers[] = new ExactSimScorer[subStats.length]; for (int i = 0; i < subScorers.length; i++) { BasicStats basicstats = (BasicStats) subStats[i]; subScorers[i] = new BasicExactDocScorer(basicstats, context.reader().getNormValues(basicstats.field)); } return new MultiSimilarity.MultiExactDocScorer(subScorers); } else { BasicStats basicstats = (BasicStats) stats; return new BasicExactDocScorer(basicstats, context.reader().getNormValues(basicstats.field)); } } @Override public SloppySimScorer sloppySimScorer(SimWeight stats, AtomicReaderContext context) throws IOException { if (stats instanceof MultiSimilarity.MultiStats) { // a multi term query (e.g. phrase). return the summation, // scoring almost as if it were boolean query SimWeight subStats[] = ((MultiSimilarity.MultiStats) stats).subStats; SloppySimScorer subScorers[] = new SloppySimScorer[subStats.length]; for (int i = 0; i < subScorers.length; i++) { BasicStats basicstats = (BasicStats) subStats[i]; subScorers[i] = new BasicSloppyDocScorer(basicstats, context.reader().getNormValues(basicstats.field)); } return new MultiSimilarity.MultiSloppyDocScorer(subScorers); } else { BasicStats basicstats = (BasicStats) stats; return new BasicSloppyDocScorer(basicstats, context.reader().getNormValues(basicstats.field)); } } /** * Subclasses must override this method to return the name of the Similarity * and preferably the values of parameters (if any) as well. */ @Override public abstract String toString(); // ------------------------------ Norm handling ------------------------------ /** Norm -> document length map. */ private static final float[] NORM_TABLE = new float[256]; static { for (int i = 0; i < 256; i++) { float floatNorm = SmallFloat.byte315ToFloat((byte)i); NORM_TABLE[i] = 1.0f / (floatNorm * floatNorm); } } /** Encodes the document length in the same way as {@link TFIDFSimilarity}. */ @Override public long computeNorm(FieldInvertState state) { final float numTerms; if (discountOverlaps) numTerms = state.getLength() - state.getNumOverlap(); else numTerms = state.getLength() / state.getBoost(); return encodeNormValue(state.getBoost(), numTerms); } /** Decodes a normalization factor (document length) stored in an index. * @see #encodeNormValue(float,float) */ protected float decodeNormValue(byte norm) { return NORM_TABLE[norm & 0xFF]; // & 0xFF maps negative bytes to positive above 127 } /** Encodes the length to a byte via SmallFloat. */ protected byte encodeNormValue(float boost, float length) { return SmallFloat.floatToByte315((boost / (float) Math.sqrt(length))); } // ----------------------------- Static methods ------------------------------ /** Returns the base two logarithm of {@code x}. */ public static double log2(double x) { // Put this to a 'util' class if we need more of these. return Math.log(x) / LOG_2; } // --------------------------------- Classes --------------------------------- /** Delegates the {@link #score(int, int)} and * {@link #explain(int, Explanation)} methods to * {@link SimilarityBase#score(BasicStats, float, float)} and * {@link SimilarityBase#explain(BasicStats, int, Explanation, float)}, * respectively. */ private class BasicExactDocScorer extends ExactSimScorer { private final BasicStats stats; private final NumericDocValues norms; BasicExactDocScorer(BasicStats stats, NumericDocValues norms) throws IOException { this.stats = stats; this.norms = norms; } @Override public float score(int doc, int freq) { // We have to supply something in case norms are omitted return SimilarityBase.this.score(stats, freq, norms == null ? 1F : decodeNormValue((byte)norms.get(doc))); } @Override public Explanation explain(int doc, Explanation freq) { return SimilarityBase.this.explain(stats, doc, freq, norms == null ? 1F : decodeNormValue((byte)norms.get(doc))); } } /** Delegates the {@link #score(int, float)} and * {@link #explain(int, Explanation)} methods to * {@link SimilarityBase#score(BasicStats, float, float)} and * {@link SimilarityBase#explain(BasicStats, int, Explanation, float)}, * respectively. */ private class BasicSloppyDocScorer extends SloppySimScorer { private final BasicStats stats; private final NumericDocValues norms; BasicSloppyDocScorer(BasicStats stats, NumericDocValues norms) throws IOException { this.stats = stats; this.norms = norms; } @Override public float score(int doc, float freq) { // We have to supply something in case norms are omitted return SimilarityBase.this.score(stats, freq, norms == null ? 1F : decodeNormValue((byte)norms.get(doc))); } @Override public Explanation explain(int doc, Explanation freq) { return SimilarityBase.this.explain(stats, doc, freq, norms == null ? 1F : decodeNormValue((byte)norms.get(doc))); } @Override public float computeSlopFactor(int distance) { return 1.0f / (distance + 1); } @Override public float computePayloadFactor(int doc, int start, int end, BytesRef payload) { return 1f; } } }
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.clustered.client.internal.store; import org.ehcache.clustered.client.config.ClusteredResourcePool; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.internal.store.ServerStoreProxy.ServerCallback; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.config.units.MemoryUnit; import org.ehcache.impl.serialization.LongSerializer; import org.junit.Test; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; public class StrongServerStoreProxyTest extends AbstractServerStoreProxyTest { private static SimpleClusterTierClientEntity createClientEntity(String name, boolean create) throws Exception { ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4L, MemoryUnit.MB); ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class .getName(), Consistency.STRONG, false); return createClientEntity(name, serverStoreConfiguration, create); } @Test public void testServerSideEvictionFiresInvalidations() throws Exception { SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testServerSideEvictionFiresInvalidations", true); SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testServerSideEvictionFiresInvalidations", false); final List<Long> store1InvalidatedHashes = new CopyOnWriteArrayList<>(); final List<Long> store2InvalidatedHashes = new CopyOnWriteArrayList<>(); StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity1, new ServerCallback() { @Override public void onInvalidateHash(long hash) { store1InvalidatedHashes.add(hash); } @Override public void onInvalidateAll() { fail("should not be called"); } @Override public Chain compact(Chain chain) { throw new AssertionError(); } }); StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testServerSideEvictionFiresInvalidations", clientEntity2, new ServerCallback() { @Override public void onInvalidateHash(long hash) { store2InvalidatedHashes.add(hash); } @Override public void onInvalidateAll() { fail("should not be called"); } @Override public Chain compact(Chain chain) { return chain; } }); final int ITERATIONS = 40; for (int i = 0; i < ITERATIONS; i++) { serverStoreProxy1.append(i, createPayload(i, 512 * 1024)); } int evictionCount = 0; int entryCount = 0; for (int i = 0; i < ITERATIONS; i++) { Chain elements1 = serverStoreProxy1.get(i); Chain elements2 = serverStoreProxy2.get(i); assertThat(chainsEqual(elements1, elements2), is(true)); if (!elements1.isEmpty()) { entryCount++; } else { evictionCount++; } } // there has to be server-side evictions, otherwise this test is useless assertThat(store1InvalidatedHashes.size(), greaterThan(0)); // test that each time the server evicted, the originating client got notified assertThat(store1InvalidatedHashes.size(), is(ITERATIONS - entryCount)); // test that each time the server evicted, the other client got notified on top of normal invalidations assertThat(store2InvalidatedHashes.size(), is(ITERATIONS + evictionCount)); } @Test public void testHashInvalidationListenerWithAppend() throws Exception { SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testHashInvalidationListenerWithAppend", true); SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testHashInvalidationListenerWithAppend", false); final AtomicReference<Long> invalidatedHash = new AtomicReference<>(); StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testHashInvalidationListenerWithAppend", clientEntity1, mock(ServerCallback.class)); StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testHashInvalidationListenerWithAppend", clientEntity2, new ServerCallback() { @Override public void onInvalidateHash(long hash) { invalidatedHash.set(hash); } @Override public void onInvalidateAll() { throw new AssertionError("Should not be called"); } @Override public Chain compact(Chain chain) { throw new AssertionError(); } }); serverStoreProxy1.append(1L, createPayload(1L)); assertThat(invalidatedHash.get(), is(1L)); } @Test public void testConcurrentHashInvalidationListenerWithAppend() throws Exception { SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testConcurrentHashInvalidationListenerWithAppend", true); SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testConcurrentHashInvalidationListenerWithAppend", false); final AtomicBoolean invalidating = new AtomicBoolean(); final CountDownLatch latch = new CountDownLatch(2); StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testConcurrentHashInvalidationListenerWithAppend", clientEntity1, mock(ServerCallback.class)); StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testConcurrentHashInvalidationListenerWithAppend", clientEntity2, new ServerCallback() { @Override public void onInvalidateHash(long hash) { if (!invalidating.compareAndSet(false, true)) { fail("Both threads entered the listener concurrently"); } try { Thread.sleep(100); } catch (InterruptedException ie) { throw new AssertionError(ie); } invalidating.set(false); latch.countDown(); } @Override public void onInvalidateAll() { throw new AssertionError("Should not be called"); } @Override public Chain compact(Chain chain) { throw new AssertionError(); } }); ExecutorService executor = Executors.newCachedThreadPool(); try { executor.submit(() -> { serverStoreProxy1.append(1L, createPayload(1L)); return null; }); executor.submit(() -> { serverStoreProxy1.append(1L, createPayload(1L)); return null; }); if (!latch.await(5, TimeUnit.SECONDS)) { fail("Both listeners were not called"); } } finally { executor.shutdown(); } } @Test public void testHashInvalidationListenerWithGetAndAppend() throws Exception { SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testHashInvalidationListenerWithGetAndAppend", true); SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testHashInvalidationListenerWithGetAndAppend", false); final AtomicReference<Long> invalidatedHash = new AtomicReference<>(); StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testHashInvalidationListenerWithGetAndAppend", clientEntity1, mock(ServerCallback.class)); StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testHashInvalidationListenerWithGetAndAppend", clientEntity2, new ServerCallback() { @Override public void onInvalidateHash(long hash) { invalidatedHash.set(hash); } @Override public void onInvalidateAll() { throw new AssertionError("Should not be called"); } @Override public Chain compact(Chain chain) { throw new AssertionError(); } }); serverStoreProxy1.getAndAppend(1L, createPayload(1L)); assertThat(invalidatedHash.get(), is(1L)); } @Test public void testAllInvalidationListener() throws Exception { SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testAllInvalidationListener", true); SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testAllInvalidationListener", false); final AtomicBoolean invalidatedAll = new AtomicBoolean(); StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testAllInvalidationListener", clientEntity1, mock(ServerCallback.class)); StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testAllInvalidationListener", clientEntity2, new ServerCallback() { @Override public void onInvalidateHash(long hash) { throw new AssertionError("Should not be called"); } @Override public void onInvalidateAll() { invalidatedAll.set(true); } @Override public Chain compact(Chain chain) { throw new AssertionError(); } }); serverStoreProxy1.clear(); assertThat(invalidatedAll.get(), is(true)); } @Test public void testConcurrentAllInvalidationListener() throws Exception { SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testConcurrentAllInvalidationListener", true); SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testConcurrentAllInvalidationListener", false); final AtomicBoolean invalidating = new AtomicBoolean(); final CountDownLatch latch = new CountDownLatch(2); StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testConcurrentAllInvalidationListener", clientEntity1, mock(ServerCallback.class)); StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testConcurrentAllInvalidationListener", clientEntity2, new ServerCallback() { @Override public void onInvalidateHash(long hash) { throw new AssertionError("Should not be called"); } @Override public void onInvalidateAll() { if (!invalidating.compareAndSet(false, true)) { fail("Both threads entered the listener concurrently"); } try { Thread.sleep(100); } catch (InterruptedException ie) { throw new AssertionError(ie); } invalidating.set(false); latch.countDown(); } @Override public Chain compact(Chain chain) { throw new AssertionError(); } }); ExecutorService executor = Executors.newCachedThreadPool(); try { executor.submit(() -> { serverStoreProxy1.clear(); return null; }); executor.submit(() -> { serverStoreProxy1.clear(); return null; }); if (!latch.await(5, TimeUnit.SECONDS)) { fail("Both listeners were not called"); } } finally { executor.shutdown(); } } @Test public void testAppendInvalidationUnblockedByDisconnection() throws Exception { SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testAppendInvalidationUnblockedByDisconnection", true); SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testAppendInvalidationUnblockedByDisconnection", false); StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testAppendInvalidationUnblockedByDisconnection", clientEntity1, mock(ServerCallback.class)); StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testAppendInvalidationUnblockedByDisconnection", clientEntity2, new ServerCallback() { @Override public void onInvalidateHash(long hash) { clientEntity1.fireDisconnectionEvent(); } @Override public void onInvalidateAll() { throw new AssertionError("Should not be called"); } @Override public Chain compact(Chain chain) { throw new AssertionError(); } }); try { serverStoreProxy1.append(1L, createPayload(1L)); fail("expected RuntimeException"); } catch (RuntimeException re) { assertThat(re.getCause(), instanceOf(IllegalStateException.class)); } } @Test public void testClearInvalidationUnblockedByDisconnection() throws Exception { SimpleClusterTierClientEntity clientEntity1 = createClientEntity("testClearInvalidationUnblockedByDisconnection", true); SimpleClusterTierClientEntity clientEntity2 = createClientEntity("testClearInvalidationUnblockedByDisconnection", false); StrongServerStoreProxy serverStoreProxy1 = new StrongServerStoreProxy("testClearInvalidationUnblockedByDisconnection", clientEntity1, mock(ServerCallback.class)); StrongServerStoreProxy serverStoreProxy2 = new StrongServerStoreProxy("testClearInvalidationUnblockedByDisconnection", clientEntity2, new ServerCallback() { @Override public void onInvalidateHash(long hash) { throw new AssertionError("Should not be called"); } @Override public void onInvalidateAll() { clientEntity1.fireDisconnectionEvent(); } @Override public Chain compact(Chain chain) { throw new AssertionError(); } }); try { serverStoreProxy1.clear(); fail("expected RuntimeException"); } catch (RuntimeException re) { assertThat(re.getCause(), instanceOf(IllegalStateException.class)); } } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.config; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; import com.hazelcast.nio.serialization.IdentifiedDataSerializable; import java.io.IOException; import java.util.ArrayList; import java.util.List; import static com.hazelcast.internal.serialization.impl.SerializationUtil.readNullableList; import static com.hazelcast.internal.serialization.impl.SerializationUtil.writeNullableList; import static com.hazelcast.util.Preconditions.checkAsyncBackupCount; import static com.hazelcast.util.Preconditions.checkBackupCount; /** * Provides configuration service for Collection. * * @param <T> Type of Collection such as List, Set */ public abstract class CollectionConfig<T extends CollectionConfig> implements IdentifiedDataSerializable { /** * Default maximum size for the Configuration. */ public static final int DEFAULT_MAX_SIZE = 0; /** * The default number of synchronous backups */ public static final int DEFAULT_SYNC_BACKUP_COUNT = 1; /** * The default number of asynchronous backups */ public static final int DEFAULT_ASYNC_BACKUP_COUNT = 0; private String name; private List<ItemListenerConfig> listenerConfigs; private int backupCount = DEFAULT_SYNC_BACKUP_COUNT; private int asyncBackupCount = DEFAULT_ASYNC_BACKUP_COUNT; private int maxSize = DEFAULT_MAX_SIZE; private boolean statisticsEnabled = true; protected CollectionConfig() { } protected CollectionConfig(CollectionConfig config) { this.name = config.name; this.listenerConfigs = new ArrayList<ItemListenerConfig>(config.getItemListenerConfigs()); this.backupCount = config.backupCount; this.asyncBackupCount = config.asyncBackupCount; this.maxSize = config.maxSize; this.statisticsEnabled = config.statisticsEnabled; } public abstract T getAsReadOnly(); /** * Gets the name of this collection. * * @return the name of this collection */ public String getName() { return name; } /** * Sets the name of this collection. * * @param name the name of this collection * @return the updated collection configuration */ public T setName(String name) { this.name = name; return (T) this; } /** * Gets the list of ItemListenerConfigs. * * @return the list of ItemListenerConfigs */ public List<ItemListenerConfig> getItemListenerConfigs() { if (listenerConfigs == null) { listenerConfigs = new ArrayList<ItemListenerConfig>(); } return listenerConfigs; } /** * Sets the list of ItemListenerConfigs. * * @param listenerConfigs the list of ItemListenerConfigs to set * @return this collection configuration */ public T setItemListenerConfigs(List<ItemListenerConfig> listenerConfigs) { this.listenerConfigs = listenerConfigs; return (T) this; } /** * Gets the total number of synchronous and asynchronous backups for this collection. * * @return the total number of synchronous and asynchronous backups for this collection */ public int getTotalBackupCount() { return backupCount + asyncBackupCount; } /** * Gets the number of synchronous backups for this collection. * * @return the number of synchronous backups for this collection */ public int getBackupCount() { return backupCount; } /** * Sets the number of synchronous backups for this collection. * * @param backupCount the number of synchronous backups to set for this collection * @return the current CollectionConfig * @throws IllegalArgumentException if backupCount smaller than 0, * or larger than the maximum number of backup * or the sum of the backups and async backups is larger than the maximum number of backups * @see #setAsyncBackupCount(int) */ public T setBackupCount(int backupCount) { this.backupCount = checkBackupCount(backupCount, asyncBackupCount); return (T) this; } /** * Gets the number of asynchronous backups. * * @return the number of asynchronous backups */ public int getAsyncBackupCount() { return asyncBackupCount; } /** * Sets the number of asynchronous backups. * * @param asyncBackupCount the number of asynchronous synchronous backups to set * @return the updated CollectionConfig * @throws IllegalArgumentException if asyncBackupCount is smaller than 0, * or larger than the maximum number of backups, * or the sum of the backups and async backups is larger than the maximum number of backups. * @see #setBackupCount(int) * @see #getAsyncBackupCount() */ public T setAsyncBackupCount(int asyncBackupCount) { this.asyncBackupCount = checkAsyncBackupCount(asyncBackupCount, asyncBackupCount); return (T) this; } /** * Gets the maximum size for the Configuration. * * @return the maximum size for the Configuration */ public int getMaxSize() { return maxSize == 0 ? Integer.MAX_VALUE : maxSize; } /** * Sets the maximum size for the collection. * * @return the current CollectionConfig */ public T setMaxSize(int maxSize) { this.maxSize = maxSize; return (T) this; } /** * Checks if collection statistics are enabled. * * @return {@code true} if collection statistics are enabled, {@code false} otherwise */ public boolean isStatisticsEnabled() { return statisticsEnabled; } /** * Sets collection statistics to enabled or disabled. * * @param statisticsEnabled {@code true} to enable collection statistics, {@code false} to disable * @return the current collection config instance */ public T setStatisticsEnabled(boolean statisticsEnabled) { this.statisticsEnabled = statisticsEnabled; return (T) this; } /** * Adds an item listener to this collection (listens for when items are added or removed). * * @param itemListenerConfig the item listener to add to this collection */ public void addItemListenerConfig(ItemListenerConfig itemListenerConfig) { getItemListenerConfigs().add(itemListenerConfig); } @Override public int getFactoryId() { return ConfigDataSerializerHook.F_ID; } @Override public void writeData(ObjectDataOutput out) throws IOException { out.writeUTF(name); writeNullableList(listenerConfigs, out); out.writeInt(backupCount); out.writeInt(asyncBackupCount); out.writeInt(maxSize); out.writeBoolean(statisticsEnabled); } @Override public void readData(ObjectDataInput in) throws IOException { name = in.readUTF(); listenerConfigs = readNullableList(in); backupCount = in.readInt(); asyncBackupCount = in.readInt(); maxSize = in.readInt(); statisticsEnabled = in.readBoolean(); } @Override @SuppressWarnings({"checkstyle:cyclomaticcomplexity", "checkstyle:npathcomplexity"}) public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CollectionConfig<?> that = (CollectionConfig<?>) o; if (backupCount != that.backupCount) { return false; } if (asyncBackupCount != that.asyncBackupCount) { return false; } if (getMaxSize() != that.getMaxSize()) { return false; } if (statisticsEnabled != that.statisticsEnabled) { return false; } if (name != null ? !name.equals(that.name) : that.name != null) { return false; } return getItemListenerConfigs().equals(that.getItemListenerConfigs()); } @Override public int hashCode() { int result = name != null ? name.hashCode() : 0; result = 31 * result + getItemListenerConfigs().hashCode(); result = 31 * result + backupCount; result = 31 * result + asyncBackupCount; result = 31 * result + getMaxSize(); result = 31 * result + (statisticsEnabled ? 1 : 0); return result; } }
/** * pims-web org.pimslims.servlet.location CreateLocation.java * * @author Marc Savitsky * @date 9 Apr 2008 * * Protein Information Management System * @version: 1.3 * * Copyright (c) 2008 Marc Savitsky * * * */ package org.pimslims.servlet.location; /** * CreateLocation * */ /* * Created on 18.07.2005 TODO Error Messages passing */ import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.pimslims.dao.ReadableVersion; import org.pimslims.dao.WritableVersion; import org.pimslims.exception.AbortedException; import org.pimslims.exception.AccessException; import org.pimslims.exception.ConstraintException; import org.pimslims.lab.ContainerUtility; import org.pimslims.metamodel.MetaClass; import org.pimslims.metamodel.ModelObject; import org.pimslims.model.core.LabNotebook; import org.pimslims.model.holder.Holder; import org.pimslims.model.location.Location; import org.pimslims.model.sample.Sample; import org.pimslims.presentation.LocationBeanWriter; import org.pimslims.presentation.ModelObjectBean; import org.pimslims.presentation.ServletUtil; import org.pimslims.presentation.sample.LocationBean; import org.pimslims.servlet.Create; import org.pimslims.servlet.PIMSServlet; @Deprecated // obsolete public class CreateLocation extends Create { public static final long serialVersionUID = 123243546; /* * (non-Javadoc) * * @see org.pimslims.servlet.PIMSServlet#getServletInfo() */ @Override public String getServletInfo() { return "Create a model object"; } /** * */ public CreateLocation() { super(); } @Override public void doPost(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException { // Get a WritableVersion final WritableVersion version = this.getWritableVersion(request, response); String hook = null; try { final LocationBean lb = new LocationBean(); lb.setName(request.getParameter("locationName")); lb.setType(request.getParameter("locationType")); lb.setTemperature(request.getParameter("locationTemperature")); lb.setTemperatureDisplayUnit(request.getParameter("locationTemperatureUnit")); lb.setPressure(request.getParameter("locationPressure")); lb.setPressureDisplayUnit(request.getParameter("locationPressureUnit")); final LabNotebook project = version.get(request.getParameter(PIMSServlet.LAB_NOTEBOOK_ID)); assert project != null : "project should not be null"; lb.setAccess(new ModelObjectBean(project)); final String locationParent = request.getParameter("locationParent"); if (null != locationParent && locationParent.length() > 0) { final ModelObject object = version.get(request.getParameter("locationParent")); lb.setParentLocation((Location) object); } final ModelObject modelObject = LocationBeanWriter.createNewLocation(version, lb); hook = modelObject.get_Hook(); if (this.validString(request.getParameter("hostobject"))) { final String hostObjectHook = request.getParameter("hostobject"); final ModelObject hostObject = version.get(hostObjectHook); if (hostObject instanceof org.pimslims.model.sample.Sample) { ContainerUtility.move((Sample) hostObject, (Location) modelObject); } if (hostObject instanceof org.pimslims.model.holder.Holder) { ContainerUtility.move((Holder) hostObject, (Location) modelObject); } if (hostObject instanceof org.pimslims.model.location.Location) { final Location thisLocation = (Location) hostObject; thisLocation.setLocation((Location) modelObject); } hook = hostObject.get_Hook(); } version.commit(); } catch (final ConstraintException e) { throw new ServletException(e); } catch (final AccessException e) { throw new ServletException(e); } catch (final AbortedException e) { throw new ServletException(e); } finally { if (!version.isCompleted()) { version.abort(); } } // now show the new target PIMSServlet.redirectPost(response, request.getContextPath() + "/View/" + hook); } /** * {@inheritDoc} * * @baseURL org.pimslims.model.target.Target:molecule?molecule=hook,hook,hook&OtherMolecules=hook,hook */ @Override public void doGet(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException { if (!this.checkStarted(request, response)) { return; } response.getWriter(); // get the class we are creating, // Path is e.g. Create/org.pimslims.model.people.organisation String pathInfo = "/org.pimslims.model.location.Location"; if (pathInfo == null || pathInfo.length() < 1) { request.setAttribute("message", "The type to create has not been specified cannot proceed"); final RequestDispatcher dispatcher = request.getRequestDispatcher("/JSP/OneMessage.jsp"); dispatcher.forward(request, response); return; } final ReadableVersion rv = this.getReadableVersion(request, response); if (rv == null) { return; } ModelObject hostObject = null; if (null != request.getParameter("hostobject")) { final String hostObjectHook = request.getParameter("hostobject"); hostObject = rv.get(hostObjectHook); } Map errorMessages = null; Map formValues = null; final HttpSession session = request.getSession(false); if (session != null) { errorMessages = (Map) session.getAttribute("errorMessages"); formValues = (Map) session.getAttribute("formValues"); final Map parms = formValues; if (parms != null) { for (final Iterator iter = parms.entrySet().iterator(); iter.hasNext();) { final Map.Entry elem = (Map.Entry) iter.next(); System.out.println(elem.getKey() + " VAL: " + elem.getValue()); } } session.removeAttribute("errorMessages"); session.removeAttribute("formValues"); } if (errorMessages == null) { errorMessages = Collections.synchronizedMap(new HashMap()); } // TODO parse attributes names from request line and create html only // for requested attributes // hook=org.pimslims.model.target.Target:9879 = 9878(?) // org.pimslims.model.target.Target?molecule=hook,hook,hook&OtherMolecules=hook,hook /** * Note: org.pimslims.model.target.Target:molecule?molecule=hook,hook,hook&OtherMolecules=hook,hook * org.pimslims.model.target.Target:molecule - means create molecule for a Target * org.pimslims.model.target.Target:molecule:NaturalSource - means create NaturalSource for a molecule * for a Target the rest of hooks for roles are to the Target only! */ //String cancelURL = ""; // default is front page pathInfo = pathInfo.substring(1); MetaClass metaClass = null; MetaClass mainMetaClass = null; final int columnp = pathInfo.indexOf(":"); final boolean isRoleObject = columnp >= 0; String roleName = null; if (isRoleObject) { // URL was like org.pimslims.model.target.Target:molecule... // making a new molecule for a new target etc final String mainMetaClassName = pathInfo.substring(0, columnp); mainMetaClass = this.getModel().getMetaClass(mainMetaClassName); if (mainMetaClass == null) { request.setAttribute("message", "Unknown object type. The metaclass was not set."); final RequestDispatcher dispatcher = request.getRequestDispatcher("/JSP/OneMessage.jsp"); dispatcher.forward(request, response); return; } // Means that role represented by Abstract class, the concrite class // is defined by given index in the Set of subclasses final int star = pathInfo.indexOf("*"); int setIndx; if (star >= 0) { setIndx = Integer.parseInt(pathInfo.substring(columnp + 1, star)); roleName = pathInfo.substring(star + 1); metaClass = ServletUtil.getMetaClassForAbstractRole(mainMetaClass, roleName, setIndx); } else { roleName = pathInfo.substring(columnp + 1); metaClass = ServletUtil.getMetaClassForRole(mainMetaClass, roleName); //cancelURL = "Create/" + mainMetaClass.getMetaClassName(); } } else { mainMetaClass = this.getModel().getMetaClass(pathInfo); if (mainMetaClass == null) { request.setAttribute("message", "Unknown object type. The metaclass was not set."); final RequestDispatcher dispatcher = request.getRequestDispatcher("/JSP/OneMessage.jsp"); dispatcher.forward(request, response); return; } metaClass = mainMetaClass; } if (mainMetaClass.isAbstract()) { request.setAttribute("metaClass", mainMetaClass); final RequestDispatcher dispatcher = request.getRequestDispatcher("/JSP/create/CreateAbstract.jsp"); dispatcher.forward(request, response); return; } if (metaClass == null) { request.setAttribute("message", "Unknown object type. The metaclass was not set."); final RequestDispatcher dispatcher = request.getRequestDispatcher("/JSP/OneMessage.jsp"); dispatcher.forward(request, response); return; } try { //final Collection objects = rv.getAll(org.pimslims.model.location.Location.class); final Collection objects = PIMSServlet.getAll(rv, org.pimslims.model.location.Location.class); final Map parms = request.getParameterMap(); /* RoleHooksHolder hooks = null; if (parms != null && parms.size() > 0) { hooks = new RoleHooksHolder(mainMetaClass); hooks.parse(parms); } */ /*final AttributeValueMap attributeValues = new AttributeValueMap(mainMetaClass, parms); if (null != formValues) { attributeValues.putAll(formValues); }*/ // TODO rewrite as a page hidden parameter //final MetaClass pmeta = metaClass; /* final CustomGetter customget = new CustomGetter(rv, pmeta, errorMessages, ServletUtil.getSortedAttributes(attributes), attributeValues, roles, mainMetaClass, hooks, request.getContextPath()); //customget.getMissed(errorMessages); //request.setAttribute("helptext", customget.getHelpText()); // request.setAttribute("missedErrorFields", // customget.getMissed(errorMessages)); //request.setAttribute("headerTitle", customget.getTitle()); //request.setAttribute("reqAttr", customget.reqAttrhtml); //request.setAttribute("optAttr", customget.optAttrhtml); //request.setAttribute("optRoles", customget.getOptRoleshtml()); //request.setAttribute("reqRoles", customget.getReqRoleshtml()); //request.setAttribute("readOnly", customget.readOnly ? "disabled" : ""); //request.setAttribute("resetAction", customget.resetAction); //request.setAttribute("javascript", customget.javascript); //request.setAttribute("cancelURL", cancelURL); // relative URL for page if cancel clicked */ request.setAttribute("locations", objects); request.setAttribute("accessObjects", PIMSServlet.getPossibleCreateOwners(rv)); request.setAttribute("errorMessages", errorMessages); //request.setAttribute("owner", rv.getDefaultOwner(mainMetaClass, null)); request.setAttribute("mainclass", mainMetaClass); request.setAttribute("roleName", roleName); request.setAttribute("hostObject", hostObject); request.setAttribute("metaclass", metaClass); request.setAttribute("pathInfo", pathInfo); for (final Iterator it = parms.entrySet().iterator(); it.hasNext();) { final Map.Entry e = (Map.Entry) it.next(); final String[] values = (String[]) e.getValue(); request.setAttribute((String) e.getKey(), values[0]); } final RequestDispatcher dispatcher = request.getRequestDispatcher("/JSP/location/CreateLocation.jsp"); dispatcher.forward(request, response); rv.commit(); } catch (final AbortedException e) { throw new ServletException(e); } catch (final ConstraintException e) { throw new ServletException(e); } finally { if (!rv.isCompleted()) { rv.abort(); } } } private boolean validString(final String s) { if (null != s && s.length() > 0) { return true; } return false; } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.devkit.dom.impl; import com.intellij.ide.plugins.PluginManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PropertyUtil; import com.intellij.psi.util.PsiTypesUtil; import com.intellij.psi.xml.XmlElement; import com.intellij.psi.xml.XmlFile; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.LinkedMultiMap; import com.intellij.util.containers.MultiMap; import com.intellij.util.xml.*; import com.intellij.util.xml.reflect.DomExtender; import com.intellij.util.xml.reflect.DomExtension; import com.intellij.util.xml.reflect.DomExtensionsRegistrar; import com.intellij.util.xmlb.Constants; import com.intellij.util.xmlb.annotations.AbstractCollection; import com.intellij.util.xmlb.annotations.Attribute; import com.intellij.util.xmlb.annotations.Property; import com.intellij.util.xmlb.annotations.Tag; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.devkit.dom.*; import java.lang.annotation.Annotation; import java.util.*; /** * @author mike */ public class ExtensionDomExtender extends DomExtender<Extensions> { private static final PsiClassConverter CLASS_CONVERTER = new PluginPsiClassConverter(); private static class MyRequired implements Required { @Override public boolean value() { return true; } @Override public boolean nonEmpty() { return true; } @Override public boolean identifier() { return false; } @Override public Class<? extends Annotation> annotationType() { return Required.class; } } private static class MyExtendClass extends ExtendClassImpl { private final String myInterfaceName; private MyExtendClass(String interfaceName) { myInterfaceName = interfaceName; } @Override public boolean allowAbstract() { return false; } @Override public boolean allowInterface() { return false; } @Override public boolean allowEnum() { return false; } @Override public String value() { return myInterfaceName; } } private static final DomExtender EXTENSION_EXTENDER = new DomExtender() { public void registerExtensions(@NotNull final DomElement domElement, @NotNull final DomExtensionsRegistrar registrar) { final ExtensionPoint extensionPoint = (ExtensionPoint)domElement.getChildDescription().getDomDeclaration(); assert extensionPoint != null; final String interfaceName = extensionPoint.getInterface().getStringValue(); final Project project = extensionPoint.getManager().getProject(); if (interfaceName != null) { registrar.registerGenericAttributeValueChildExtension(new XmlName("implementation"), PsiClass.class) .setConverter(CLASS_CONVERTER) .addCustomAnnotation(new MyExtendClass(interfaceName)) .addCustomAnnotation(new MyRequired()); registerXmlb(registrar, JavaPsiFacade.getInstance(project).findClass(interfaceName, GlobalSearchScope.allScope(project)), Collections.<With>emptyList()); } else { final String beanClassName = extensionPoint.getBeanClass().getStringValue(); if (beanClassName != null) { registerXmlb(registrar, JavaPsiFacade.getInstance(project).findClass(beanClassName, GlobalSearchScope.allScope(project)), extensionPoint.getWithElements()); } } } }; public void registerExtensions(@NotNull final Extensions extensions, @NotNull final DomExtensionsRegistrar registrar) { final XmlElement xmlElement = extensions.getXmlElement(); if (xmlElement == null) return; IdeaPlugin ideaPlugin = extensions.getParentOfType(IdeaPlugin.class, true); if (ideaPlugin == null) return; String prefix = getEpPrefix(extensions); for (IdeaPlugin plugin : getVisiblePlugins(ideaPlugin)) { final String pluginId = StringUtil.notNullize(plugin.getPluginId(), "com.intellij"); for (ExtensionPoints points : plugin.getExtensionPoints()) { for (ExtensionPoint point : points.getExtensionPoints()) { registerExtensionPoint(registrar, point, prefix, pluginId); } } } } private static String getEpPrefix(Extensions extensions) { String prefix = extensions.getDefaultExtensionNs().getStringValue(); if (prefix == null) prefix = extensions.getXmlns().getStringValue(); return prefix != null ? prefix + "." : ""; } private static Set<IdeaPlugin> getVisiblePlugins(IdeaPlugin ideaPlugin) { Set<IdeaPlugin> result = ContainerUtil.newHashSet(); MultiMap<String, IdeaPlugin> byId = getPluginMap(ideaPlugin.getManager().getProject()); collectDependencies(ideaPlugin, result, byId); //noinspection NullableProblems result.addAll(byId.get(null)); return result; } private static MultiMap<String, IdeaPlugin> getPluginMap(final Project project) { MultiMap<String, IdeaPlugin> byId = new LinkedMultiMap<String, IdeaPlugin>(); for (IdeaPlugin each : IdeaPluginConverter.getAllPlugins(project)) { byId.putValue(each.getPluginId(), each); } return byId; } private static void collectDependencies(final IdeaPlugin ideaPlugin, Set<IdeaPlugin> result, final MultiMap<String, IdeaPlugin> byId) { if (!result.add(ideaPlugin)) { return; } for (String id : getDependencies(ideaPlugin)) { for (IdeaPlugin dep : byId.get(id)) { collectDependencies(dep, result, byId); } } } private static void registerExtensionPoint(final DomExtensionsRegistrar registrar, final ExtensionPoint extensionPoint, String prefix, @Nullable String pluginId) { String epName = extensionPoint.getName().getStringValue(); if (epName != null && StringUtil.isNotEmpty(pluginId)) epName = pluginId + "." + epName; if (epName == null) epName = extensionPoint.getQualifiedName().getStringValue(); if (epName == null) return; if (!epName.startsWith(prefix)) return; final DomExtension domExtension = registrar.registerCollectionChildrenExtension(new XmlName(epName.substring(prefix.length())), Extension.class); domExtension.setDeclaringElement(extensionPoint); domExtension.addExtender(EXTENSION_EXTENDER); } private static void registerXmlb(final DomExtensionsRegistrar registrar, @Nullable final PsiClass beanClass, @NotNull List<With> elements) { if (beanClass == null) return; for (PsiField field : beanClass.getAllFields()) { registerField(registrar, field, findWithElement(elements, field)); } } @Nullable public static With findWithElement(List<With> elements, PsiField field) { for (With element : elements) { if (field.getName().equals(element.getAttribute().getStringValue())) { return element; } } return null; } private static void registerField(final DomExtensionsRegistrar registrar, @NotNull final PsiField field, With withElement) { final PsiMethod getter = PropertyUtil.findGetterForField(field); final PsiMethod setter = PropertyUtil.findSetterForField(field); if (!field.hasModifierProperty(PsiModifier.PUBLIC) && (getter == null || setter == null)) { return; } final String fieldName = field.getName(); final PsiConstantEvaluationHelper evalHelper = JavaPsiFacade.getInstance(field.getProject()).getConstantEvaluationHelper(); final PsiAnnotation attrAnno = findAnnotation(Attribute.class, field, getter, setter); if (attrAnno != null) { final String attrName = getStringAttribute(attrAnno, "value", evalHelper); if (attrName != null) { Class clazz = String.class; if (withElement != null || isClassField(fieldName)) { clazz = PsiClass.class; } else if (field.getType() == PsiType.BOOLEAN) { clazz = Boolean.class; } final DomExtension extension = registrar.registerGenericAttributeValueChildExtension(new XmlName(attrName), clazz).setDeclaringElement(field); markAsClass(extension, fieldName, withElement); } return; } final PsiAnnotation tagAnno = findAnnotation(Tag.class, field, getter, setter); final PsiAnnotation propAnno = findAnnotation(Property.class, field, getter, setter); final PsiAnnotation absColAnno = findAnnotation(AbstractCollection.class, field, getter, setter); //final PsiAnnotation colAnno = modifierList.findAnnotation(Collection.class.getName()); // todo final String tagName = tagAnno != null? getStringAttribute(tagAnno, "value", evalHelper) : propAnno != null && getBooleanAttribute(propAnno, "surroundWithTag", evalHelper)? Constants.OPTION : null; if (tagName != null) { if (absColAnno == null) { final DomExtension extension = registrar.registerFixedNumberChildExtension(new XmlName(tagName), SimpleTagValue.class).setDeclaringElement(field); markAsClass(extension, fieldName, withElement); } else { registrar.registerFixedNumberChildExtension(new XmlName(tagName), DomElement.class).addExtender(new DomExtender() { @Override public void registerExtensions(@NotNull DomElement domElement, @NotNull DomExtensionsRegistrar registrar) { registerCollectionBinding(field.getType(), registrar, absColAnno, evalHelper); } }); } } else if (absColAnno != null) { registerCollectionBinding(field.getType(), registrar, absColAnno, evalHelper); } } private static void markAsClass(DomExtension extension, String fieldName, @Nullable With withElement) { if (withElement != null) { final String withClassName = withElement.getImplements().getStringValue(); extension.addCustomAnnotation(new ExtendClassImpl() { @Override public String value() { return withClassName; } }); } if (isClassField(fieldName) || withElement != null) { extension.setConverter(CLASS_CONVERTER); } } public static boolean isClassField(String fieldName) { return (fieldName.endsWith("Class") && !fieldName.equals("forClass")) || fieldName.equals("implementation"); } @Nullable static PsiAnnotation findAnnotation(final Class<?> annotationClass, PsiMember... members) { for (PsiMember member : members) { if (member != null) { final PsiModifierList modifierList = member.getModifierList(); if (modifierList != null) { final PsiAnnotation annotation = modifierList.findAnnotation(annotationClass.getName()); if (annotation != null) { return annotation; } } } } return null; } private static void registerCollectionBinding(PsiType type, DomExtensionsRegistrar registrar, PsiAnnotation anno, PsiConstantEvaluationHelper evalHelper) { final boolean surroundWithTag = getBooleanAttribute(anno, "surroundWithTag", evalHelper); if (surroundWithTag) return; // todo Set, List, Array final String tagName = getStringAttribute(anno, "elementTag", evalHelper); final String attrName = getStringAttribute(anno, "elementValueAttribute", evalHelper); final PsiClass psiClass = getElementType(type); if (tagName != null && attrName == null) { registrar.registerCollectionChildrenExtension(new XmlName(tagName), SimpleTagValue.class); } else if (tagName != null) { registrar.registerCollectionChildrenExtension(new XmlName(tagName), DomElement.class).addExtender(new DomExtender() { @Override public void registerExtensions(@NotNull DomElement domElement, @NotNull DomExtensionsRegistrar registrar) { registrar.registerGenericAttributeValueChildExtension(new XmlName(attrName), String.class); } }); } else if (psiClass != null) { final PsiModifierList modifierList = psiClass.getModifierList(); final PsiAnnotation tagAnno = modifierList == null? null : modifierList.findAnnotation(Tag.class.getName()); final String classTagName = tagAnno == null? psiClass.getName() : getStringAttribute(tagAnno, "value", evalHelper); if (classTagName != null) { registrar.registerCollectionChildrenExtension(new XmlName(classTagName), DomElement.class).addExtender(new DomExtender() { @Override public void registerExtensions(@NotNull DomElement domElement, @NotNull DomExtensionsRegistrar registrar) { registerXmlb(registrar, psiClass, Collections.<With>emptyList()); } }); } } } @Nullable static String getStringAttribute(final PsiAnnotation annotation, final String name, final PsiConstantEvaluationHelper evalHelper) { String value = getAttributeValue(annotation, name); if (value != null) return value; final Object o = evalHelper.computeConstantExpression(annotation.findAttributeValue(name), false); return o instanceof String && StringUtil.isNotEmpty((String)o)? (String)o : null; } private static boolean getBooleanAttribute(final PsiAnnotation annotation, final String name, final PsiConstantEvaluationHelper evalHelper) { String value = getAttributeValue(annotation, name); if (value != null) return Boolean.parseBoolean(value); final Object o = evalHelper.computeConstantExpression(annotation.findAttributeValue(name), false); return o instanceof Boolean && ((Boolean)o).booleanValue(); } @Nullable private static String getAttributeValue(PsiAnnotation annotation, String name) { for (PsiNameValuePair attribute : annotation.getParameterList().getAttributes()) { if (name.equals(attribute.getName())) { return attribute.getLiteralValue(); } } return null; } @Nullable public static PsiClass getElementType(final PsiType psiType) { final PsiType elementType; if (psiType instanceof PsiArrayType) elementType = ((PsiArrayType)psiType).getComponentType(); else if (psiType instanceof PsiClassType) { final PsiType[] types = ((PsiClassType)psiType).getParameters(); elementType = types.length == 1? types[0] : null; } else elementType = null; return PsiTypesUtil.getPsiClass(elementType); } public static Collection<String> getDependencies(IdeaPlugin ideaPlugin) { Set<String> result = new HashSet<String>(); result.add(PluginManager.CORE_PLUGIN_ID); for (Dependency dependency : ideaPlugin.getDependencies()) { ContainerUtil.addIfNotNull(dependency.getStringValue(), result); } if (ideaPlugin.getPluginId() == null) { final VirtualFile file = DomUtil.getFile(ideaPlugin).getOriginalFile().getVirtualFile(); if (file != null) { final String fileName = file.getName(); if (!"plugin.xml".equals(fileName)) { final VirtualFile mainPluginXml = file.findFileByRelativePath("../plugin.xml"); if (mainPluginXml != null) { final PsiFile psiFile = PsiManager.getInstance(ideaPlugin.getManager().getProject()).findFile(mainPluginXml); if (psiFile instanceof XmlFile) { final XmlFile xmlFile = (XmlFile)psiFile; final DomFileElement<IdeaPlugin> fileElement = ideaPlugin.getManager().getFileElement(xmlFile, IdeaPlugin.class); if (fileElement != null) { final IdeaPlugin mainPlugin = fileElement.getRootElement(); ContainerUtil.addIfNotNull(mainPlugin.getPluginId(), result); for (Dependency dependency : mainPlugin.getDependencies()) { ContainerUtil.addIfNotNull(dependency.getStringValue(), result); } } } } } } } return result; } interface SimpleTagValue extends DomElement { @SuppressWarnings("UnusedDeclaration") @TagValue String getTagValue(); } }
/* * Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.switchyard.component.sca; import javax.transaction.Transaction; import javax.xml.namespace.QName; import org.jboss.jbossts.txbridge.outbound.OutboundBridge; import org.jboss.jbossts.txbridge.outbound.OutboundBridgeManager; import org.jboss.logging.Logger; import org.oasis_open.docs.ws_tx.wscoor._2006._06.CoordinationContextType; import org.switchyard.Context; import org.switchyard.Exchange; import org.switchyard.ExchangePattern; import org.switchyard.ExchangeState; import org.switchyard.HandlerException; import org.switchyard.Message; import org.switchyard.Scope; import org.switchyard.ServiceReference; import org.switchyard.SwitchYardException; import org.switchyard.common.type.Classes; import org.switchyard.component.common.SynchronousInOutHandler; import org.switchyard.config.model.composite.SCABindingModel; import org.switchyard.deploy.BaseServiceHandler; import org.switchyard.deploy.internal.Deployment; import org.switchyard.label.BehaviorLabel; import org.switchyard.remote.RemoteMessage; import org.switchyard.remote.RemoteRegistry; import org.switchyard.remote.cluster.ClusteredInvoker; import org.switchyard.remote.cluster.LoadBalanceStrategy; import org.switchyard.remote.cluster.RandomStrategy; import org.switchyard.remote.cluster.RoundRobinStrategy; import org.switchyard.remote.http.HttpInvokerLabel; import org.switchyard.runtime.event.ExchangeCompletionEvent; import com.arjuna.mw.wst11.TransactionManagerFactory; import com.arjuna.mwlabs.wst11.at.context.TxContextImple; /** * Handles outbound communication to an SCA service endpoint. */ public class SCAInvoker extends BaseServiceHandler { /** prefix for the context property. */ public static final String CONTEXT_PROPERTY_PREFIX = "org.switchyard.component.sca."; /** key for endpoint service name. */ public static final String KEY_TARGET_SERVICE = "targetService"; /** key for namespace of the endpoint service. */ public static final String KEY_TARGET_NAMESPACE = "targetNamespace"; private static Logger _log = Logger.getLogger(SCAInvoker.class); private final String _bindingName; private final String _referenceName; private final String _targetService; private final String _targetNamespace; private final boolean _clustered; private ClusteredInvoker _invoker; private boolean _preferLocal; private boolean _disableRemoteTransaction = false; private TransactionContextSerializer _txSerializer = new TransactionContextSerializer(); /** * Create a new SCAInvoker for invoking local endpoints. * @param config binding configuration model */ public SCAInvoker(SCABindingModel config) { _bindingName = config.getName(); _referenceName = config.getReference().getName(); _targetService = config.getTarget(); _targetNamespace = config.getTargetNamespace(); _clustered = config.isClustered(); _preferLocal = config.isPreferLocal(); } /** * Create a new SCAInvoker capable of invoking remote service endpoints. * @param config binding configuration model * @param registry registry of remote services */ public SCAInvoker(SCABindingModel config, RemoteRegistry registry) { this(config); if (config.isLoadBalanced()) { LoadBalanceStrategy loadBalancer = createLoadBalancer(config.getLoadBalance()); _invoker = new ClusteredInvoker(registry, loadBalancer); } else { _invoker = new ClusteredInvoker(registry); } } @Override public void handleMessage(Exchange exchange) throws HandlerException { // identify ourselves exchange.getContext().setProperty(ExchangeCompletionEvent.GATEWAY_NAME, _bindingName, Scope.EXCHANGE) .addLabels(BehaviorLabel.TRANSIENT.label()); if (getState() != State.STARTED) { throw SCAMessages.MESSAGES.referenceBindingNotStarted(_referenceName, _bindingName); } try { // Figure out the QName for the service were invoking QName serviceName = getTargetServiceName(exchange); // Get a handle for the reference and use a copy of the exchange to invoke it ServiceReference ref = exchange.getProvider().getDomain().getServiceReference(serviceName); if (_clustered) { // check to see if local is preferred and available if (_preferLocal && ref != null) { invokeLocal(exchange, ref); } else { invokeRemote(exchange, serviceName); } } else { if (ref == null) { throw SCAMessages.MESSAGES.serviceReferenceNotFoundInDomain(serviceName.toString(), exchange.getProvider().getDomain().getName().toString()); } invokeLocal(exchange, ref); } } catch (SwitchYardException syEx) { throw new HandlerException(syEx.getMessage()); } } /** * Set if remote transaction bridging should be disabled. * @param disable true if it disables remote transaction * @return this SCAInvoker instance (useful for method chaining) */ public SCAInvoker setDisableRemoteTransaction(boolean disable) { _disableRemoteTransaction = disable; return this; } // This method exists for test purposes and should not be used at runtime. Initialization // of the invoker instance occurs in the constructor for SCAInvoker. void setInvoker(ClusteredInvoker invoker) { _invoker = invoker; } private void invokeLocal(Exchange exchange, ServiceReference targetRef) throws HandlerException { SynchronousInOutHandler replyHandler = new SynchronousInOutHandler(); Exchange ex = targetRef.createExchange(exchange.getContract().getProviderOperation().getName(), replyHandler); // Can't send same message twice, so make a copy Message invokeMsg = exchange.getMessage().copy(); exchange.getContext().mergeInto(invokeMsg.getContext()); // Since this invocation may cross application boundaries, we need to set the TCCL // based on the target service's application class loader ClassLoader origCL = null; try { ClassLoader targetCL = (ClassLoader) targetRef.getDomain().getProperty(Deployment.CLASSLOADER_PROPERTY); origCL = Classes.setTCCL(targetCL); ex.send(invokeMsg); } finally { if (origCL != null) { Classes.setTCCL(origCL); } } if (ExchangePattern.IN_OUT.equals(ex.getPattern())) { replyHandler.waitForOut(); if (ex.getMessage() != null) { Message replyMsg = ex.getMessage().copy(); ex.getContext().mergeInto(replyMsg.getContext()); if (ExchangeState.FAULT.equals(ex.getState())) { exchange.sendFault(replyMsg); } else { exchange.send(replyMsg); } } } else if (ExchangeState.FAULT.equals(ex.getState())) { // Even though this is in-only, we need to report a runtime fault on send throw createHandlerException(ex.getMessage()); } } private void invokeRemote(Exchange exchange, QName serviceName) throws HandlerException { RemoteMessage request = new RemoteMessage() .setDomain(exchange.getProvider().getDomain().getName()) .setService(serviceName) .setOperation(exchange.getContract().getConsumerOperation().getName()) .setContent(exchange.getMessage().getContent()); exchange.getContext().mergeInto(request.getContext()); boolean transactionPropagated = bridgeOutgoingTransaction(request); try { RemoteMessage reply = _invoker.invoke(request); if (transactionPropagated) { bridgeIncomingTransaction(); } if (reply == null) { return; } if (ExchangePattern.IN_OUT.equals(exchange.getPattern())) { Message msg = exchange.createMessage(); msg.setContent(reply.getContent()); Context replyCtx = reply.getContext(); if (replyCtx != null) { replyCtx.mergeInto(exchange.getContext()); } if (reply.isFault()) { exchange.sendFault(msg); } else { exchange.send(msg); } } else { // still need to account for runtime exceptions on in-only if (reply.isFault()) { throw createHandlerException(reply.getContent()); } } } catch (java.io.IOException ioEx) { ioEx.printStackTrace(); exchange.sendFault(exchange.createMessage().setContent(ioEx)); } } private QName getTargetServiceName(Exchange exchange) { // Figure out the QName for the service were invoking. QName service = exchange.getProvider().getName(); // Overriding target service name if it's specified in context property. String targetName = exchange.getContext().getPropertyValue(CONTEXT_PROPERTY_PREFIX + KEY_TARGET_SERVICE); if (targetName == null) { targetName = _targetService != null ? _targetService : service.getLocalPart(); } String targetNS = exchange.getContext().getPropertyValue(CONTEXT_PROPERTY_PREFIX + KEY_TARGET_NAMESPACE); if (targetNS == null) { targetNS = _targetNamespace != null ? _targetNamespace : service.getNamespaceURI(); } return new QName(targetNS, targetName); } private boolean bridgeOutgoingTransaction(RemoteMessage request) throws HandlerException { if (_disableRemoteTransaction) { return false; } Transaction currentTransaction = null; try { currentTransaction = com.arjuna.ats.jta.TransactionManager.transactionManager().getTransaction(); } catch (Throwable t) { if (_log.isDebugEnabled()) { _log.debug(t); } } if (currentTransaction == null) { return false; } try { // create/resume subordinate WS-AT transaction OutboundBridge txOutboundBridge = OutboundBridgeManager.getOutboundBridge(); if (txOutboundBridge == null) { return false; } txOutboundBridge.start(); // embed WS-AT transaction context into request header final com.arjuna.mw.wst11.TransactionManager wsatManager = TransactionManagerFactory.transactionManager(); CoordinationContextType coordinationContext = null; if (wsatManager != null) { final TxContextImple txContext = (TxContextImple)wsatManager.currentTransaction(); if (txContext != null) { coordinationContext = txContext.context().getCoordinationContext(); } } if (coordinationContext != null) { String txContextString = _txSerializer.serialise(coordinationContext); if (_log.isDebugEnabled()) { _log.debug("Embedding transaction context into request header: " + txContextString); } request.getContext() .setProperty(TransactionContextSerializer.HEADER_TXCONTEXT, txContextString) .addLabels(BehaviorLabel.TRANSIENT.label(), HttpInvokerLabel.HEADER.label()); } return true; } catch (final Throwable th) { throw createHandlerException(th); } } private void bridgeIncomingTransaction() throws HandlerException { // disassociate subordinate WS-AT transaction OutboundBridge txOutboundBridge = OutboundBridgeManager.getOutboundBridge(); if (txOutboundBridge != null) { try { txOutboundBridge.stop(); } catch (Exception e) { throw createHandlerException(e); } } } private HandlerException createHandlerException(Message message) { return createHandlerException(message == null ? null : message.getContent()); } private HandlerException createHandlerException(Object content) { HandlerException ex; if (content == null) { ex = SCAMessages.MESSAGES.runtimeFaultOccurredWithoutExceptionDetails(); } else if (content instanceof HandlerException) { ex = (HandlerException)content; } else if (content instanceof Throwable) { ex = new HandlerException((Throwable)content); } else { ex = new HandlerException(content.toString()); } return ex; } LoadBalanceStrategy createLoadBalancer(String strategy) { if (RoundRobinStrategy.class.getSimpleName().equals(strategy)) { return new RoundRobinStrategy(); } else if (RandomStrategy.class.getSimpleName().equals(strategy)) { return new RandomStrategy(); } else { try { Class<?> strategyClass = Class.forName(strategy); if (!LoadBalanceStrategy.class.isAssignableFrom(strategyClass)) { throw SCAMessages.MESSAGES.loadBalanceClassDoesNotImplementLoadBalanceStrategy(strategy); } return (LoadBalanceStrategy)strategyClass.newInstance(); } catch (Exception ex) { throw SCAMessages.MESSAGES.unableToInstantiateStrategyClass(strategy, ex); } } } }
package org.jboss.resteasy.specimpl; import org.jboss.resteasy.core.Headers; import org.jboss.resteasy.spi.HttpRequest; import org.jboss.resteasy.spi.ResteasyProviderFactory; import org.jboss.resteasy.util.HeaderHelper; import org.jboss.resteasy.util.HttpHeaderNames; import javax.ws.rs.core.CacheControl; import javax.ws.rs.core.EntityTag; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Link; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.NewCookie; import javax.ws.rs.core.Response; import javax.ws.rs.core.Variant; import java.lang.annotation.Annotation; import java.net.URI; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.TimeZone; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class ResponseBuilderImpl extends Response.ResponseBuilder { protected Object entity; protected Annotation[] entityAnnotations; protected int status = -1; protected Headers<Object> metadata = new Headers<Object>(); @Override public Response build() { if (status == -1 && entity == null) status = 204; else if (status == -1) status = 200; return new BuiltResponse(status, metadata, entity, entityAnnotations); } @Override public Response.ResponseBuilder clone() { ResponseBuilderImpl impl = new ResponseBuilderImpl(); impl.metadata.putAll(metadata); impl.entity = entity; impl.status = status; impl.entityAnnotations = entityAnnotations; return impl; } @Override public Response.ResponseBuilder status(int status) { this.status = status; return this; } @Override public Response.ResponseBuilder entity(Object entity) { this.entity = entity; return this; } @Override public Response.ResponseBuilder entity(Object entity, Annotation[] annotations) { this.entity = entity; this.entityAnnotations = annotations; return this; } @Override public Response.ResponseBuilder type(MediaType type) { if (type == null) { metadata.remove(HttpHeaderNames.CONTENT_TYPE); return this; } metadata.putSingle(HttpHeaderNames.CONTENT_TYPE, type); return this; } @Override public Response.ResponseBuilder type(String type) { if (type == null) { metadata.remove(HttpHeaderNames.CONTENT_TYPE); return this; } metadata.putSingle(HttpHeaderNames.CONTENT_TYPE, type); return this; } @Override public Response.ResponseBuilder variant(Variant variant) { if (variant == null) { type((String)null); language((String)null); metadata.remove(HttpHeaderNames.CONTENT_ENCODING); return this; } type(variant.getMediaType()); language(variant.getLanguage()); if (variant.getEncoding() != null) metadata.putSingle(HttpHeaderNames.CONTENT_ENCODING, variant.getEncoding()); else metadata.remove(HttpHeaderNames.CONTENT_ENCODING); return this; } @Override public Response.ResponseBuilder variants(List<Variant> variants) { if (variants == null) { metadata.remove(HttpHeaderNames.VARY); return this; } String vary = createVaryHeader(variants); metadata.putSingle(HttpHeaderNames.VARY, vary); return this; } public static String createVaryHeader(List<Variant> variants) { boolean accept = false; boolean acceptLanguage = false; boolean acceptEncoding = false; for (Variant variant : variants) { if (variant.getMediaType() != null) accept = true; if (variant.getLanguage() != null) acceptLanguage = true; if (variant.getEncoding() != null) acceptEncoding = true; } String vary = null; if (accept) vary = HttpHeaderNames.ACCEPT; if (acceptLanguage) { if (vary == null) vary = HttpHeaderNames.ACCEPT_LANGUAGE; else vary += ", " + HttpHeaderNames.ACCEPT_LANGUAGE; } if (acceptEncoding) { if (vary == null) vary = HttpHeaderNames.ACCEPT_ENCODING; else vary += ", " + HttpHeaderNames.ACCEPT_ENCODING; } return vary; } @Override public Response.ResponseBuilder language(String language) { if (language == null) { metadata.remove(HttpHeaderNames.CONTENT_LANGUAGE); return this; } metadata.putSingle(HttpHeaderNames.CONTENT_LANGUAGE, language); return this; } @Override public Response.ResponseBuilder location(URI location) { if (location == null) { metadata.remove(HttpHeaderNames.LOCATION); return this; } if (!location.isAbsolute() && ResteasyProviderFactory.getContextData(HttpRequest.class) != null) { String path = location.toString(); if (path.startsWith("/")) path = path.substring(1); URI baseUri = ResteasyProviderFactory.getContextData(HttpRequest.class).getUri().getBaseUri(); location = baseUri.resolve(path); } metadata.putSingle(HttpHeaderNames.LOCATION, location); return this; } @Override public Response.ResponseBuilder contentLocation(URI location) { if (location == null) { metadata.remove(HttpHeaderNames.CONTENT_LOCATION); return this; } if (!location.isAbsolute() && ResteasyProviderFactory.getContextData(HttpRequest.class) != null) { String path = location.toString(); if (path.startsWith("/")) path = path.substring(1); URI baseUri = ResteasyProviderFactory.getContextData(HttpRequest.class).getUri().getBaseUri(); location = baseUri.resolve(path); } metadata.putSingle(HttpHeaderNames.CONTENT_LOCATION, location); return this; } @Override public Response.ResponseBuilder tag(EntityTag tag) { if (tag == null) { metadata.remove(HttpHeaderNames.ETAG); return this; } metadata.putSingle(HttpHeaderNames.ETAG, tag); return this; } @Override public Response.ResponseBuilder tag(String tag) { if (tag == null) { metadata.remove(HttpHeaderNames.ETAG); return this; } metadata.putSingle(HttpHeaderNames.ETAG, tag); return this; } @Override public Response.ResponseBuilder lastModified(Date lastModified) { if (lastModified == null) { metadata.remove(HttpHeaderNames.LAST_MODIFIED); return this; } metadata.putSingle(HttpHeaderNames.LAST_MODIFIED, lastModified); return this; } @Override public Response.ResponseBuilder cacheControl(CacheControl cacheControl) { if (cacheControl == null) { metadata.remove(HttpHeaderNames.CACHE_CONTROL); return this; } metadata.putSingle(HttpHeaderNames.CACHE_CONTROL, cacheControl); return this; } @Override public Response.ResponseBuilder header(String name, Object value) { if (value == null) { metadata.remove(name); return this; } metadata.add(name, value); return this; } @Override public Response.ResponseBuilder cookie(NewCookie... cookies) { if (cookies == null) { metadata.remove(HttpHeaderNames.SET_COOKIE); return this; } for (NewCookie cookie : cookies) { metadata.add(HttpHeaderNames.SET_COOKIE, cookie); } return this; } public Response.ResponseBuilder language(Locale language) { if (language == null) { metadata.remove(HttpHeaderNames.CONTENT_LANGUAGE); return this; } metadata.putSingle(HttpHeaderNames.CONTENT_LANGUAGE, language); return this; } public static SimpleDateFormat getDateFormatRFC822() { SimpleDateFormat dateFormatRFC822 = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z", Locale.US); dateFormatRFC822.setTimeZone(TimeZone.getTimeZone("GMT")); return dateFormatRFC822; } public Response.ResponseBuilder expires(Date expires) { if (expires == null) { metadata.remove(HttpHeaderNames.EXPIRES); return this; } metadata.putSingle(HttpHeaderNames.EXPIRES, getDateFormatRFC822().format(expires)); return this; } // spec public Response.ResponseBuilder allow(String... methods) { if (methods == null) { return allow((Set<String>)null); } HashSet<String> set = new HashSet<String>(); for (String m : methods) set.add(m); return allow(set); } public Response.ResponseBuilder allow(Set<String> methods) { HeaderHelper.setAllow(this.metadata, methods); return this; } @Override public Response.ResponseBuilder encoding(String encoding) { if (encoding == null) { metadata.remove(HttpHeaders.CONTENT_ENCODING); return this; } metadata.putSingle(HttpHeaders.CONTENT_ENCODING, encoding); return this; } @Override public Response.ResponseBuilder variants(Variant... variants) { return this.variants(Arrays.asList(variants)); } @Override public Response.ResponseBuilder links(Link... links) { metadata.remove(HttpHeaders.LINK); for (Link link : links) { metadata.add(HttpHeaders.LINK, link); } return this; } @Override public Response.ResponseBuilder link(URI uri, String rel) { Link link = Link.fromUri(uri).rel(rel).build(); metadata.add(HttpHeaders.LINK, link); return this; } @Override public Response.ResponseBuilder link(String uri, String rel) { Link link = Link.fromUri(uri).rel(rel).build(); metadata.add(HttpHeaders.LINK, link); return this; } @Override public Response.ResponseBuilder replaceAll(MultivaluedMap<String, Object> headers) { metadata.clear(); if (headers == null) return this; metadata.putAll(headers); return this; } }
package de.hub.clickwatch.ui.modelactions; import java.util.ArrayList; import java.util.List; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.jface.action.IAction; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Dialog; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.IActionDelegate; import org.eclipse.ui.IEditorPart; import org.eclipse.ui.IObjectActionDelegate; import org.eclipse.ui.IViewPart; import org.eclipse.ui.IViewReference; import org.eclipse.ui.IWorkbenchPart; import com.jcraft.jsch.Session; import de.hub.clickwatch.connection.INodeConnection; import de.hub.clickwatch.model.Node; import de.hub.clickwatch.ui.util.SshConnectionFactory; import de.hub.clickwatch.ui.views.ResultView; import de.hub.emfxml.util.EmfXmlUtil; /** * Executes shell commands in parallel on each node using SSH. * * @author zubow */ class ExecWorkerThread extends Thread { public String iNodeAddr; public String cmd; public String result; public Exception exception; public long latency; // in ms public ExecWorkerThread(String iNodeAddr, String cmd) { this.iNodeAddr = iNodeAddr; this.cmd = cmd; } public void run() { // remote execute System.out.println("cmd " + cmd + " on node " + iNodeAddr + " called."); try { result = execRemote(iNodeAddr, cmd); } catch (Exception e) { System.err.println("ErrorMsg:" + e.getMessage()); exception = e; } } private String execRemote(String host, String command) throws Exception { // init ssh long start = System.currentTimeMillis(); Session session = SshConnectionFactory.getInstance().createSession(SSHParams.SSH_USER, host); StringBuffer logMsg = SshConnectionFactory.getInstance().execRemote(session, command); log2Sout(logMsg); // close session SshConnectionFactory.getInstance().closeSession(session); latency = System.currentTimeMillis() - start; return logMsg.toString(); } private void log2Sout(StringBuffer sb) { System.out.println(sb.toString()); } } /** * Exec remote code via ssh. * @author zubow */ public class Execute extends AbstractNodeAction { private EObject currentResult = null; /** * GUI dialog for entering a command to be remotely executed */ public class InputDialog extends Dialog { private String cmdMessage; private String cmdInput; /** * InputDialog constructor */ public InputDialog(Shell parent) { // Pass the default styles here this(parent, SWT.DIALOG_TRIM | SWT.APPLICATION_MODAL); } /** * InputDialog constructor */ public InputDialog(Shell parent, int style) { // Let users override the default styles super(parent, style); setText("Node configuration"); setCmdMessage("Command to execute:"); setCmdInput(""); } public String getCmdMessage() { return cmdMessage; } public void setCmdMessage(String cmdMessage) { this.cmdMessage = cmdMessage; } public String getCmdInput() { return cmdInput; } public void setCmdInput(String cmdInput) { this.cmdInput = cmdInput; } /** * Opens the dialog ... */ public void open() { // Create the dialog window Shell shell = new Shell(getParent(), getStyle()); shell.setText(getText()); createContents(shell); shell.pack(); shell.open(); Display display = getParent().getDisplay(); while (!shell.isDisposed()) { if (!display.readAndDispatch()) { display.sleep(); } } } /** * Creates the dialog's contents */ private void createContents(final Shell shell) { shell.setLayout(new GridLayout(2, true)); // // command Label label = new Label(shell, SWT.NONE); label.setText(cmdMessage); GridData data = new GridData(); data.horizontalSpan = 2; label.setLayoutData(data); // input box final Text cmdText = new Text(shell, SWT.BORDER); //cmdText.setSize(64, 32); cmdText.setText("cat /proc/cpuinfo "); data = new GridData(GridData.FILL_HORIZONTAL); data.horizontalSpan = 2; cmdText.setLayoutData(data); // Create the OK button and add a handler // so that pressing it will set input // to the entered value Button ok = new Button(shell, SWT.PUSH); ok.setText("Exec"); data = new GridData(GridData.FILL_HORIZONTAL); ok.setLayoutData(data); ok.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent event) { cmdInput = cmdText.getText(); shell.close(); } }); // Create the cancel button and add a handler // so that pressing it will set input to null Button cancel = new Button(shell, SWT.PUSH); cancel.setText("Cancel"); data = new GridData(GridData.FILL_HORIZONTAL); cancel.setLayoutData(data); cancel.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent event) { cmdInput = null; shell.close(); } }); // Set the OK button as the default, so // user can type input and press Enter // to dismiss shell.setDefaultButton(ok); } } /** * Constructor for Execute action. */ public Execute() { super(); } /** * @see IObjectActionDelegate#setActivePart(IAction, IWorkbenchPart) */ @Override public void setActivePart(IAction action, IWorkbenchPart targetPart) { if (targetPart instanceof IEditorPart) { editor = (IEditorPart)targetPart; shell = targetPart.getSite().getShell(); } } /** * @see IActionDelegate#run(IAction) */ @Override public void run(IAction action) { if (selectedObjects == null || selectedObjects.isEmpty()) { return; } // ask user for command to execute InputDialog diag = new InputDialog(editor.getSite().getShell()); diag.open(); final String cmd = diag.getCmdInput(); if (cmd == null) { return; } // show exec result directly via popup only if a single node was selected otherwise go into batch mode // final boolean show_log = (node_lst.size() == 1) ? true : false; // create n parallel execution threads ExecWorkerThread[] workerThreads = new ExecWorkerThread[selectedObjects.size()]; for (int idx=0; idx<selectedObjects.size(); idx++) { final Node node = selectedObjects.get(idx); // disconnect if connected if (node.getConnection() != null) { INodeConnection oldConnection = (INodeConnection)node.getConnection(); node.setConnection(null); oldConnection.close(); } workerThreads[idx] = new ExecWorkerThread(node.getINetAddress(), cmd); workerThreads[idx].start(); } // sync point: wait until all worker threads are finished for (int i=0; i<workerThreads.length; i++) { try { workerThreads[i].join(); } catch (InterruptedException e) { e.printStackTrace(); } } final List<String> results = new ArrayList<String>(); final List<Long> latency = new ArrayList<Long>(); final List<String> nodeNames = new ArrayList<String>(); final List<Exception> exceptions = new ArrayList<Exception>(); for (int i=0; i<workerThreads.length; i++) { results.add(workerThreads[i].result); latency.add(workerThreads[i].latency); nodeNames.add(workerThreads[i].iNodeAddr); exceptions.add(workerThreads[i].exception); } // show results in treeview showResults(results, nodeNames, latency); // show exceptions in popup window showExceptions(nodeNames, exceptions); } /** * Update results in resultview */ private void showResults(List<String> results, List<String> nodeNames, List<Long> latency) { // create xml from results string StringBuffer xmlResults = new StringBuffer(); xmlResults.append("<network>"); for (int i=0; i<results.size(); i++) { xmlResults.append("<node id='" + nodeNames.get(i) + "' latency='" + latency.get(i) + "'>"); xmlResults.append(results.get(i)); xmlResults.append("</node>"); } xmlResults.append("</network>"); System.out.println("XMl results to display: " + xmlResults.toString()); EObject result = EmfXmlUtil.deserializeXml(xmlResults.toString()); for(IViewReference viewRef: editor.getEditorSite().getPage().getViewReferences()) { IViewPart view = viewRef.getView(false); if (view instanceof ResultView) { ((ResultView)view).setInput(result); if (currentResult != null) { EcoreUtil.delete(currentResult, true); } currentResult = result; } } } private void showExceptions(List<String> nodeNames, List<Exception> exceptions) { StringBuffer txtExc = new StringBuffer(); for (int i=0; i<exceptions.size(); i++) { txtExc.append("Node ").append(nodeNames.get(i)).append(" -> "); if (exceptions.get(i) != null) { txtExc.append(exceptions.get(i).getMessage()); } else { txtExc.append("OK"); } txtExc.append("\n"); } MessageDialog.openError(editor.getSite().getShell(), "Result stats", txtExc.toString()); } }
// BridgeDb, // An abstraction layer for identifier mapping services, both local and online. // Copyright 2006-2009 BridgeDb developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.bridgedb; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** contains information about a certain DataSource, such as <ul> <li>It's full name ("Ensembl") <li>It's system code ("En") <li>It's main url ("http://www.ensembl.org") <li>Id-specific url's ("http://www.ensembl.org/Homo_sapiens/Gene/Summary?g=" + id) </ul> The DataSource class uses the extensible enum pattern. You can't instantiate DataSources directly, instead you have to use one of the constants from the org.bridgedb.bio module such as BioDataSource.ENSEMBL, or the "getBySystemcode" or "getByFullname" methods. These methods return a predefined DataSource object if it exists. If a predefined DataSource for a requested SystemCode doesn't exists, a new one springs to life automatically. This can be used when the user requests new, unknown data sources. If you call getBySystemCode twice with the same argument, it is guaranteed that you get the same return object. However, there is no way to combine a new DataSource with a new FullName unless you use the "register" method. <p> This way any number of pre-defined DataSources can be used, but plugins can define new ones and you can handle unknown data sources in the same way as predefined ones. <p> Definitions for common DataSources can be found in {@link org.bridgedb.bio.BioDataSource}. */ public final class DataSource { private static Map<String, DataSource> bySysCode = new HashMap<String, DataSource>(); private static Map<String, DataSource> byFullName = new HashMap<String, DataSource>(); private static Set<DataSource> registry = new HashSet<DataSource>(); private static Map<String, DataSource> byAlias = new HashMap<String, DataSource>(); private static Map<String, DataSource> byMiriamBase = new HashMap<String, DataSource>(); private String sysCode = null; private String fullName = null; private String mainUrl = null; private String prefix = ""; private String postfix = ""; private Object organism = null; private String idExample = null; private boolean isPrimary = true; private String type = "unknown"; private String urnBase = ""; /** * Constructor is private, so that we don't * get any standalone DataSources. * DataSources should be obtained from * {@link getByFullName} or {@link getBySystemCode}. Information about * DataSources can be added with {@link register} */ private DataSource () {} /** * Turn id into url pointing to info page on the web, e.g. "http://www.ensembl.org/get?id=ENSG..." * @param id identifier to use in url * @return Url */ public String getUrl(String id) { return prefix + id + postfix; } /** * returns full name of DataSource e.g. "Ensembl". * May return null if only the system code is known. * Also used as identifier in GPML * @return full name of DataSource */ public String getFullName() { return fullName; } /** * returns GenMAPP SystemCode, e.g. "En". May return null, * if only the full name is known. * Also used as identifier in * <ol> * <li>Gdb databases, * <li>Gex databases. * <li>Imported data * <li>the Mapp format. * </ol> * We should try not to use the system code anywhere outside * these 4 uses. * @return systemcode, a short unique code. */ public String getSystemCode() { return sysCode; } /** * Return the main Url for this datasource, * that can be used to refer to the datasource in general. * (e.g. http://www.ensembl.org/) * * May return null in case the main url is unknown. * @return main url */ public String getMainUrl() { return mainUrl; } /** * @return type of entity that this DataSource describes, for example * "metabolite", "gene", "protein" or "probe" */ public String getType() { return type; } /** * Creates a global identifier. * It uses the MIRIAM data type list * to create a MIRIAM URI like "urn:miriam:uniprot:P12345", * or if this DataSource is not included * in the MIRIAM data types list, a bridgedb URI. * @param id Id to generate URN from. * @return the URN. */ public String getURN(String id) { String idPart = ""; try { idPart = URLEncoder.encode(id, "UTF-8"); } catch (UnsupportedEncodingException ex) { idPart = id; } return urnBase + ":" + idPart; } /** * Uses builder pattern to set optional attributes for a DataSource. For example, this allows you to use the * following code: * <pre> * DataSource.register("X", "Affymetrix") * .mainUrl("http://www.affymetrix.com") * .type("probe") * .primary(false); * </pre> */ public static final class Builder { private final DataSource current; /** * Create a Builder for a DataSource. Note that an existing DataSource is * modified rather than creating a new one. * This constructor should only be called by the register method. * @param current the DataSource to be modified */ private Builder(DataSource current) { this.current = current; } /** * @return the DataSource under construction */ public DataSource asDataSource() { return current; } /** * * @param urlPattern is a template for generating valid URL's for identifiers. * The pattern should contain the substring "$ID", which will be replaced by the actual identifier. * @return the same Builder object so you can chain setters */ public Builder urlPattern (String urlPattern) { if (urlPattern == null || "".equals (urlPattern)) { current.prefix = ""; current.postfix = ""; } else { int pos = urlPattern.indexOf("$id"); if (pos == -1) throw new IllegalArgumentException("Url maker pattern for " + current + "' should have $id in it"); current.prefix = urlPattern.substring(0, pos); current.postfix = urlPattern.substring(pos + 3); } return this; } /** * @param mainUrl url of homepage * @return the same Builder object so you can chain setters */ public Builder mainUrl (String mainUrl) { current.mainUrl = mainUrl; return this; } /** * @param idExample an example id from this system * @return the same Builder object so you can chain setters */ public Builder idExample (String idExample) { current.idExample = idExample; return this; } /** * @param isPrimary secondary id's such as EC numbers, Gene Ontology or vendor-specific systems occur in data or linkouts, * but their use in pathways is discouraged * @return the same Builder object so you can chain setters */ public Builder primary (boolean isPrimary) { current.isPrimary = isPrimary; return this; } /** * @param type the type of datasource, for example "protein", "gene", "metabolite" * @return the same Builder object so you can chain setters */ public Builder type (String type) { current.type = type; return this; } /** * @param organism organism for which this system code is suitable, or null for any / not applicable * @return the same Builder object so you can chain setters */ public Builder organism (Object organism) { current.organism = organism; return this; } /** * @param base for urn generation, for example "urn:miriam:uniprot" * @return the same Builder object so you can chain setters */ public Builder urnBase (String base) { current.urnBase = base; return this; } } /** * Register a new DataSource with (optional) detailed information. * This can be used by other modules to define new DataSources. * @param sysCode short unique code between 1-4 letters, originally used by GenMAPP * @param fullName full name used in GPML. Must be 20 or less characters * @return Builder that can be used for adding detailed information. */ public static Builder register(String sysCode, String fullName) { DataSource current = null; if (fullName == null && sysCode == null) throw new NullPointerException(); // if (fullName != null && fullName.length() > 20) // { // throw new IllegalArgumentException("full Name '" + fullName + "' must be 20 or less characters"); // } if (byFullName.containsKey(fullName)) { current = byFullName.get(fullName); } else if (bySysCode.containsKey(sysCode)) { current = bySysCode.get(sysCode); } else { current = new DataSource (); registry.add (current); } if (current.urnBase != null) { byMiriamBase.put (current.urnBase, current); } current.sysCode = sysCode; current.fullName = fullName; if (isSuitableKey(sysCode)) bySysCode.put(sysCode, current); if (isSuitableKey(fullName)) byFullName.put(fullName, current); return new Builder(current); } public void registerAlias(String alias) { byAlias.put (alias, this); } /** * Helper method to determine if a String is allowed as key for bySysCode and byFullname hashes. * Null values and empty strings are not allowed. * @param key key to check. * @return true if the key is allowed */ private static boolean isSuitableKey(String key) { return !(key == null || "".equals(key)); } /** * @param systemCode short unique code to query for * @return pre-existing DataSource object by system code, * if it exists, or creates a new one. */ public static DataSource getBySystemCode(String systemCode) { if (!bySysCode.containsKey(systemCode) && isSuitableKey(systemCode)) { register (systemCode, null); } return bySysCode.get(systemCode); } /** * returns pre-existing DataSource object by * full name, if it exists, * or creates a new one. * @param fullName full name to query for * @return DataSource */ public static DataSource getByFullName(String fullName) { if (!byFullName.containsKey(fullName) && isSuitableKey(fullName)) { register (null, fullName); } return byFullName.get(fullName); } public static DataSource getByAlias(String alias) { return byAlias.get(alias); } /** get all registered datasoures as a set. @return set of all registered DataSources */ static public Set<DataSource> getDataSources() { return registry; } /** * returns a filtered subset of available datasources. * @param primary Filter for specified primary-ness. If null, don't filter on primary-ness. * @param metabolite Filter for specified metabolite-ness. If null, don't filter on metabolite-ness. * @param o Filter for specified organism. If null, don't filter on organism. * @return filtered set. */ static public Set<DataSource> getFilteredSet (Boolean primary, Boolean metabolite, Object o) { final Set<DataSource> result = new HashSet<DataSource>(); for (DataSource ds : registry) { if ( (primary == null || ds.isPrimary() == primary) && (metabolite == null || ds.isMetabolite() == metabolite) && (o == null || ds.organism == null || o == ds.organism)) { result.add (ds); } } return result; } /** * Get a list of all non-null full names. * <p> * Warning: the ordering of this list is undefined. * Two subsequent calls may give different results. * @return List of full names */ static public List<String> getFullNames() { final List<String> result = new ArrayList<String>(); result.addAll (byFullName.keySet()); return result; } /** * The string representation of a DataSource is equal to * it's full name. (e.g. "Ensembl") * @return String representation */ public String toString() { return fullName; } /** * @return example Xref, mostly for testing purposes */ public Xref getExample () { return new Xref (idExample, this); } /** * @return if this is a primary DataSource or not. Primary DataSources * are preferred when annotating models. * * A DataSource is primary if it is not of type probe, * so that means e.g. Affymetrix or Agilent probes are not primary. All * gene, protein and metabolite identifiers are primary. */ public boolean isPrimary() { return isPrimary; } /** * @return if this DataSource describes metabolites or not. */ public boolean isMetabolite() { return type.equals ("metabolite"); } /** * @return Organism that this DataSource describes, or null if multiple / not applicable. */ public Object getOrganism() { return organism; } public static DataSource getByUrnBase(String base) { return byMiriamBase.get(base); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.distributedlog.lock; import static org.apache.distributedlog.lock.ZKSessionLock.areLockWaitersInSameSession; import static org.apache.distributedlog.lock.ZKSessionLock.asyncParseClientID; import static org.apache.distributedlog.lock.ZKSessionLock.getLockIdFromPath; import static org.apache.distributedlog.lock.ZKSessionLock.getLockPathPrefixV1; import static org.apache.distributedlog.lock.ZKSessionLock.getLockPathPrefixV2; import static org.apache.distributedlog.lock.ZKSessionLock.getLockPathPrefixV3; import static org.apache.distributedlog.lock.ZKSessionLock.parseMemberID; import static org.apache.distributedlog.lock.ZKSessionLock.serializeClientId; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import org.apache.bookkeeper.common.util.OrderedScheduler; import org.apache.bookkeeper.stats.NullStatsLogger; import org.apache.commons.lang3.tuple.Pair; import org.apache.distributedlog.DLMTestUtil; import org.apache.distributedlog.ZooKeeperClient; import org.apache.distributedlog.ZooKeeperClientBuilder; import org.apache.distributedlog.ZooKeeperClientUtils; import org.apache.distributedlog.ZooKeeperClusterTestCase; import org.apache.distributedlog.exceptions.LockingException; import org.apache.distributedlog.exceptions.OwnershipAcquireFailedException; import org.apache.distributedlog.lock.ZKSessionLock.State; import org.apache.distributedlog.util.FailpointUtils; import org.apache.distributedlog.util.Utils; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.ZooKeeper; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Distributed Lock Tests. */ public class TestZKSessionLock extends ZooKeeperClusterTestCase { @Rule public TestName testNames = new TestName(); private static final Logger logger = LoggerFactory.getLogger(TestZKSessionLock.class); private static final int sessionTimeoutMs = 2000; private ZooKeeperClient zkc; private ZooKeeperClient zkc0; // used for checking private OrderedScheduler lockStateExecutor; @Before public void setup() throws Exception { zkc = ZooKeeperClientBuilder.newBuilder() .name("zkc") .uri(DLMTestUtil.createDLMURI(zkPort, "/")) .sessionTimeoutMs(sessionTimeoutMs) .zkServers(zkServers) .zkAclId(null) .build(); zkc0 = ZooKeeperClientBuilder.newBuilder() .name("zkc0") .uri(DLMTestUtil.createDLMURI(zkPort, "/")) .sessionTimeoutMs(sessionTimeoutMs) .zkServers(zkServers) .zkAclId(null) .build(); lockStateExecutor = OrderedScheduler.newSchedulerBuilder() .numThreads(1) .build(); } @After public void teardown() throws Exception { zkc.close(); zkc0.close(); lockStateExecutor.shutdown(); } private static void createLockPath(ZooKeeper zk, String lockPath) throws Exception { zk.create(lockPath, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); } private static String createLockNodeV1(ZooKeeper zk, String lockPath, String clientId) throws Exception { return zk.create(getLockPathPrefixV1(lockPath), serializeClientId(clientId), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL); } private static String createLockNodeV2(ZooKeeper zk, String lockPath, String clientId) throws Exception { return zk.create(getLockPathPrefixV2(lockPath, clientId), serializeClientId(clientId), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL); } private static String createLockNodeV3(ZooKeeper zk, String lockPath, String clientId) throws Exception { return zk.create(getLockPathPrefixV3(lockPath, clientId, zk.getSessionId()), serializeClientId(clientId), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL); } private static String createLockNodeWithBadNodeName(ZooKeeper zk, String lockPath, String clientId, String badNodeName) throws Exception { return zk.create(lockPath + "/" + badNodeName, serializeClientId(clientId), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL); } private static List<String> getLockWaiters(ZooKeeperClient zkc, String lockPath) throws Exception { List<String> children = zkc.get().getChildren(lockPath, false); Collections.sort(children, ZKSessionLock.MEMBER_COMPARATOR); return children; } @Test(timeout = 60000) public void testParseClientID() throws Exception { ZooKeeper zk = zkc.get(); String lockPath = "/test-parse-clientid"; String clientId = "test-parse-clientid-" + System.currentTimeMillis(); Pair<String, Long> lockId = Pair.of(clientId, zk.getSessionId()); createLockPath(zk, lockPath); // Correct data String node1 = getLockIdFromPath(createLockNodeV1(zk, lockPath, clientId)); String node2 = getLockIdFromPath(createLockNodeV2(zk, lockPath, clientId)); String node3 = getLockIdFromPath(createLockNodeV3(zk, lockPath, clientId)); assertEquals(lockId, Utils.ioResult(asyncParseClientID(zk, lockPath, node1))); assertEquals(lockId, Utils.ioResult(asyncParseClientID(zk, lockPath, node2))); assertEquals(lockId, Utils.ioResult(asyncParseClientID(zk, lockPath, node3))); // Bad Lock Node Name String node4 = getLockIdFromPath(createLockNodeWithBadNodeName(zk, lockPath, clientId, "member")); String node5 = getLockIdFromPath(createLockNodeWithBadNodeName(zk, lockPath, clientId, "member_badnode")); String node6 = getLockIdFromPath( createLockNodeWithBadNodeName(zk, lockPath, clientId, "member_badnode_badnode")); String node7 = getLockIdFromPath( createLockNodeWithBadNodeName(zk, lockPath, clientId, "member_badnode_badnode_badnode")); String node8 = getLockIdFromPath( createLockNodeWithBadNodeName(zk, lockPath, clientId, "member_badnode_badnode_badnode_badnode")); assertEquals(lockId, Utils.ioResult(asyncParseClientID(zk, lockPath, node4))); assertEquals(lockId, Utils.ioResult(asyncParseClientID(zk, lockPath, node5))); assertEquals(lockId, Utils.ioResult(asyncParseClientID(zk, lockPath, node6))); assertEquals(lockId, Utils.ioResult(asyncParseClientID(zk, lockPath, node7))); assertEquals(lockId, Utils.ioResult(asyncParseClientID(zk, lockPath, node8))); // Malformed Node Name String node9 = getLockIdFromPath( createLockNodeWithBadNodeName(zk, lockPath, clientId, "member_malformed_s12345678_999999")); assertEquals(Pair.of("malformed", 12345678L), Utils.ioResult(asyncParseClientID(zk, lockPath, node9))); } @Test(timeout = 60000) public void testParseMemberID() throws Exception { assertEquals(Integer.MAX_VALUE, parseMemberID("badnode")); assertEquals(Integer.MAX_VALUE, parseMemberID("badnode_badnode")); assertEquals(0, parseMemberID("member_000000")); assertEquals(123, parseMemberID("member_000123")); } @Test(timeout = 60000) public void testAreLockWaitersInSameSession() throws Exception { ZooKeeper zk = zkc.get(); String lockPath = "/test-are-lock-waiters-in-same-session"; String clientId1 = "test-are-lock-waiters-in-same-session-1"; String clientId2 = "test-are-lock-waiters-in-same-session-2"; createLockPath(zk, lockPath); String node1 = getLockIdFromPath(createLockNodeV3(zk, lockPath, clientId1)); String node2 = getLockIdFromPath(createLockNodeV3(zk, lockPath, clientId2)); String node3 = getLockIdFromPath(createLockNodeV3(zk, lockPath, clientId1)); assertEquals(node1 + " and " + node3 + " should be in same session.", true, areLockWaitersInSameSession(node1, node3)); assertEquals(node1 + " and " + node2 + " should be not in same session.", false, areLockWaitersInSameSession(node1, node2)); assertEquals(node3 + " and " + node2 + " should be not in same session.", false, areLockWaitersInSameSession(node3, node2)); } @Test(timeout = 60000) public void testExecuteLockAction() throws Exception { String lockPath = "/test-execute-lock-action"; String clientId = "test-execute-lock-action-" + System.currentTimeMillis(); ZKSessionLock lock = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor); final AtomicInteger counter = new AtomicInteger(0); // lock action would be executed in same epoch final CountDownLatch latch1 = new CountDownLatch(1); lock.executeLockAction(lock.getEpoch(), new LockAction() { @Override public void execute() { counter.incrementAndGet(); latch1.countDown(); } @Override public String getActionName() { return "increment1"; } }); latch1.await(); assertEquals("counter should be increased in same epoch", 1, counter.get()); // lock action would not be executed in same epoch final CountDownLatch latch2 = new CountDownLatch(1); lock.executeLockAction(lock.getEpoch() + 1, new LockAction() { @Override public void execute() { counter.incrementAndGet(); } @Override public String getActionName() { return "increment2"; } }); lock.executeLockAction(lock.getEpoch(), new LockAction() { @Override public void execute() { latch2.countDown(); } @Override public String getActionName() { return "countdown"; } }); latch2.await(); assertEquals("counter should not be increased in different epochs", 1, counter.get()); // lock action would not be executed in same epoch and promise would be satisfied with exception CompletableFuture<Void> promise = new CompletableFuture<Void>(); lock.executeLockAction(lock.getEpoch() + 1, new LockAction() { @Override public void execute() { counter.incrementAndGet(); } @Override public String getActionName() { return "increment3"; } }, promise); try { Utils.ioResult(promise); fail("Should satisfy promise with epoch changed exception."); } catch (EpochChangedException ece) { // expected } assertEquals("counter should not be increased in different epochs", 1, counter.get()); lockStateExecutor.shutdown(); } /** * Test lock after unlock is called. * * @throws Exception */ @Test(timeout = 60000) public void testLockAfterUnlock() throws Exception { String lockPath = "/test-lock-after-unlock"; String clientId = "test-lock-after-unlock"; ZKSessionLock lock = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor); lock.unlock(); assertEquals(State.CLOSED, lock.getLockState()); try { lock.tryLock(0, TimeUnit.MILLISECONDS); fail("Should fail on tryLock since lock state has changed."); } catch (LockStateChangedException lsce) { // expected } assertEquals(State.CLOSED, lock.getLockState()); try { lock.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); fail("Should fail on tryLock immediately if lock state has changed."); } catch (LockStateChangedException lsce) { // expected } assertEquals(State.CLOSED, lock.getLockState()); } class DelayFailpointAction extends FailpointUtils.AbstractFailPointAction { long timeout; DelayFailpointAction(long timeout) { this.timeout = timeout; } @Override public boolean checkFailPoint() throws IOException { try { Thread.sleep(timeout); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } return true; } } /** * Test unlock timeout. * * @throws Exception */ @Test(timeout = 60000) public void testUnlockTimeout() throws Exception { String name = testNames.getMethodName(); String lockPath = "/" + name; String clientId = name; createLockPath(zkc.get(), lockPath); ZKSessionLock lock = new ZKSessionLock( zkc, lockPath, clientId, lockStateExecutor, 1 * 1000 /* op timeout */, NullStatsLogger.INSTANCE, new DistributedLockContext()); lock.tryLock(0, TimeUnit.MILLISECONDS); assertEquals(State.CLAIMED, lock.getLockState()); try { FailpointUtils.setFailpoint(FailpointUtils.FailPointName.FP_LockUnlockCleanup, new DelayFailpointAction(60 * 60 * 1000)); lock.unlock(); assertEquals(State.CLOSING, lock.getLockState()); } finally { FailpointUtils.removeFailpoint(FailpointUtils.FailPointName.FP_LockUnlockCleanup); } } /** * Test try-create after close race condition. * * @throws Exception */ @Test(timeout = 60000) public void testTryCloseRaceCondition() throws Exception { String name = testNames.getMethodName(); String lockPath = "/" + name; String clientId = name; createLockPath(zkc.get(), lockPath); ZKSessionLock lock = new ZKSessionLock( zkc, lockPath, clientId, lockStateExecutor, 1 * 1000 /* op timeout */, NullStatsLogger.INSTANCE, new DistributedLockContext()); try { FailpointUtils.setFailpoint(FailpointUtils.FailPointName.FP_LockTryCloseRaceCondition, FailpointUtils.DEFAULT_ACTION); lock.tryLock(0, TimeUnit.MILLISECONDS); } catch (LockClosedException ex) { } finally { FailpointUtils.removeFailpoint(FailpointUtils.FailPointName.FP_LockTryCloseRaceCondition); } assertEquals(State.CLOSED, lock.getLockState()); List<String> children = getLockWaiters(zkc, lockPath); assertEquals(0, children.size()); } /** * Test try acquire timeout. * * @throws Exception */ @Test(timeout = 60000) public void testTryAcquireTimeout() throws Exception { String name = testNames.getMethodName(); String lockPath = "/" + name; String clientId = name; createLockPath(zkc.get(), lockPath); ZKSessionLock lock = new ZKSessionLock( zkc, lockPath, clientId, lockStateExecutor, 1 /* op timeout */, NullStatsLogger.INSTANCE, new DistributedLockContext()); try { FailpointUtils.setFailpoint(FailpointUtils.FailPointName.FP_LockTryAcquire, new DelayFailpointAction(60 * 60 * 1000)); lock.tryLock(0, TimeUnit.MILLISECONDS); assertEquals(State.CLOSED, lock.getLockState()); } catch (LockingException le) { } catch (Exception e) { fail("expected locking exception"); } finally { FailpointUtils.removeFailpoint(FailpointUtils.FailPointName.FP_LockTryAcquire); } } @Test(timeout = 60000) public void testBasicLockUnlock0() throws Exception { testBasicLockUnlock(0); } @Test(timeout = 60000) public void testBasicLockUnlock1() throws Exception { testBasicLockUnlock(Long.MAX_VALUE); } /** * Test Basic Lock and Unlock. * - lock should succeed if there is no lock held * - lock should fail on a success lock * - unlock should release the held lock * * @param timeout * timeout to wait for the lock * @throws Exception */ private void testBasicLockUnlock(long timeout) throws Exception { String lockPath = "/test-basic-lock-unlock-" + timeout + System.currentTimeMillis(); String clientId = "test-basic-lock-unlock"; createLockPath(zkc.get(), lockPath); ZKSessionLock lock = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor); // lock lock.tryLock(timeout, TimeUnit.MILLISECONDS); // verification after lock assertEquals(State.CLAIMED, lock.getLockState()); List<String> children = getLockWaiters(zkc, lockPath); assertEquals(1, children.size()); assertEquals(lock.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(0)))); // lock should fail on a success lock try { lock.tryLock(timeout, TimeUnit.MILLISECONDS); fail("Should fail on locking a failure lock."); } catch (LockStateChangedException lsce) { // expected } assertEquals(State.CLAIMED, lock.getLockState()); children = getLockWaiters(zkc, lockPath); assertEquals(1, children.size()); assertEquals(lock.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(0)))); // unlock lock.unlock(); // verification after unlock assertEquals(State.CLOSED, lock.getLockState()); assertEquals(0, getLockWaiters(zkc, lockPath).size()); } /** * Test lock on non existed lock. * - lock should fail on a non existed lock. * * @throws Exception */ @Test(timeout = 60000) public void testLockOnNonExistedLock() throws Exception { String lockPath = "/test-lock-on-non-existed-lock"; String clientId = "test-lock-on-non-existed-lock"; ZKSessionLock lock = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor); // lock try { lock.tryLock(0, TimeUnit.MILLISECONDS); fail("Should fail on locking a non-existed lock."); } catch (LockingException le) { Throwable cause = le.getCause(); assertTrue(cause instanceof KeeperException); assertEquals(KeeperException.Code.NONODE, ((KeeperException) cause).code()); } assertEquals(State.CLOSED, lock.getLockState()); // lock should failed on a failure lock try { lock.tryLock(0, TimeUnit.MILLISECONDS); fail("Should fail on locking a failure lock."); } catch (LockStateChangedException lsce) { // expected } assertEquals(State.CLOSED, lock.getLockState()); } @Test(timeout = 60000) public void testLockWhenSomeoneHeldLock0() throws Exception { testLockWhenSomeoneHeldLock(0); } @Test(timeout = 60000) public void testLockWhenSomeoneHeldLock1() throws Exception { testLockWhenSomeoneHeldLock(500); } /** * Test lock if the lock is already held by someone else. Any lock in this situation will * fail with current owner. * * @param timeout * timeout to wait for the lock * @throws Exception */ private void testLockWhenSomeoneHeldLock(long timeout) throws Exception { String lockPath = "/test-lock-nowait-" + timeout + "-" + System.currentTimeMillis(); String clientId0 = "test-lock-nowait-0-" + System.currentTimeMillis(); String clientId1 = "test-lock-nowait-1-" + System.currentTimeMillis(); String clientId2 = "test-lock-nowait-2-" + System.currentTimeMillis(); createLockPath(zkc.get(), lockPath); ZKSessionLock lock0 = new ZKSessionLock(zkc0, lockPath, clientId0, lockStateExecutor); ZKSessionLock lock1 = new ZKSessionLock(zkc, lockPath, clientId1, lockStateExecutor); lock0.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); // verification after lock0 lock assertEquals(State.CLAIMED, lock0.getLockState()); List<String> children = getLockWaiters(zkc0, lockPath); assertEquals(1, children.size()); assertEquals(lock0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); try { lock1.tryLock(timeout, TimeUnit.MILLISECONDS); fail("lock1 should fail on locking since lock0 is holding the lock."); } catch (OwnershipAcquireFailedException oafe) { assertEquals(lock0.getLockId().getLeft(), oafe.getCurrentOwner()); } // verification after lock1 tryLock assertEquals(State.CLAIMED, lock0.getLockState()); assertEquals(State.CLOSED, lock1.getLockState()); children = getLockWaiters(zkc0, lockPath); assertEquals(1, children.size()); assertEquals(lock0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); lock0.unlock(); // verification after unlock lock0 assertEquals(State.CLOSED, lock0.getLockState()); assertEquals(0, getLockWaiters(zkc, lockPath).size()); ZKSessionLock lock2 = new ZKSessionLock(zkc, lockPath, clientId2, lockStateExecutor); lock2.tryLock(timeout, TimeUnit.MILLISECONDS); // verification after lock2 lock assertEquals(State.CLOSED, lock0.getLockState()); assertEquals(State.CLOSED, lock1.getLockState()); assertEquals(State.CLAIMED, lock2.getLockState()); children = getLockWaiters(zkc, lockPath); assertEquals(1, children.size()); assertEquals(lock2.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(0)))); lock2.unlock(); } @Test(timeout = 60000) public void testLockWhenPreviousLockZnodeStillExists() throws Exception { String lockPath = "/test-lock-when-previous-lock-znode-still-exists-" + System.currentTimeMillis(); String clientId = "client-id"; ZooKeeper zk = zkc.get(); createLockPath(zk, lockPath); final ZKSessionLock lock0 = new ZKSessionLock(zkc0, lockPath, clientId, lockStateExecutor); // lock0 lock lock0.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); // simulate lock0 expires but znode still exists final DistributedLockContext context1 = new DistributedLockContext(); context1.addLockId(lock0.getLockId()); final ZKSessionLock lock1 = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor, 60000, NullStatsLogger.INSTANCE, context1); lock1.tryLock(0L, TimeUnit.MILLISECONDS); assertEquals(State.CLAIMED, lock1.getLockState()); lock1.unlock(); final DistributedLockContext context2 = new DistributedLockContext(); context2.addLockId(lock0.getLockId()); final ZKSessionLock lock2 = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor, 60000, NullStatsLogger.INSTANCE, context2); lock2.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); assertEquals(State.CLAIMED, lock2.getLockState()); lock2.unlock(); lock0.unlock(); } @Test(timeout = 60000) public void testWaitForLockUnlock() throws Exception { testWaitForLockReleased("/test-wait-for-lock-unlock", true); } @Test(timeout = 60000) public void testWaitForLockExpired() throws Exception { testWaitForLockReleased("/test-wait-for-lock-expired", false); } /** * Test lock wait for the lock owner to release the lock. The lock waiter should acquire lock successfully * if the lock owner unlock or it is expired. * * @param lockPath * lock path * @param isUnlock * whether to unlock or expire the lock * @throws Exception */ private void testWaitForLockReleased(String lockPath, boolean isUnlock) throws Exception { String clientId0 = "test-wait-for-lock-released-0-" + System.currentTimeMillis(); String clientId1 = "test-wait-for-lock-released-1-" + System.currentTimeMillis(); createLockPath(zkc.get(), lockPath); final ZKSessionLock lock0 = new ZKSessionLock(zkc0, lockPath, clientId0, lockStateExecutor); final ZKSessionLock lock1 = new ZKSessionLock(zkc, lockPath, clientId1, lockStateExecutor); lock0.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); // verification after lock0 lock assertEquals(State.CLAIMED, lock0.getLockState()); List<String> children = getLockWaiters(zkc0, lockPath); assertEquals(1, children.size()); assertEquals(lock0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); final CountDownLatch lock1DoneLatch = new CountDownLatch(1); Thread lock1Thread = new Thread(new Runnable() { @Override public void run() { try { lock1.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); lock1DoneLatch.countDown(); } catch (LockingException e) { logger.error("Failed on locking lock1 : ", e); } } }, "lock1-thread"); lock1Thread.start(); // ensure lock1 is waiting for lock0 children = awaitWaiters(2, zkc, lockPath); if (isUnlock) { lock0.unlock(); } else { ZooKeeperClientUtils.expireSession(zkc0, zkServers, sessionTimeoutMs); } lock1DoneLatch.await(); lock1Thread.join(); // verification after lock2 lock if (isUnlock) { assertEquals(State.CLOSED, lock0.getLockState()); } else { assertEquals(State.EXPIRED, lock0.getLockState()); } assertEquals(State.CLAIMED, lock1.getLockState()); children = getLockWaiters(zkc, lockPath); assertEquals(1, children.size()); assertEquals(lock1.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(0)))); lock1.unlock(); } /** * Test session expired after claimed the lock: lock state should be changed to expired and notify * the lock listener about expiry. * * @throws Exception */ @Test(timeout = 60000) public void testLockListenerOnExpired() throws Exception { String lockPath = "/test-lock-listener-on-expired"; String clientId = "test-lock-listener-on-expired-" + System.currentTimeMillis(); createLockPath(zkc.get(), lockPath); final CountDownLatch expiredLatch = new CountDownLatch(1); LockListener listener = new LockListener() { @Override public void onExpired() { expiredLatch.countDown(); } }; final ZKSessionLock lock = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor).setLockListener(listener); lock.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); // verification after lock assertEquals(State.CLAIMED, lock.getLockState()); List<String> children = getLockWaiters(zkc, lockPath); assertEquals(1, children.size()); assertEquals(lock.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(0)))); ZooKeeperClientUtils.expireSession(zkc, zkServers, sessionTimeoutMs); expiredLatch.await(); assertEquals(State.EXPIRED, lock.getLockState()); children = getLockWaiters(zkc, lockPath); assertEquals(0, children.size()); try { lock.tryLock(0, TimeUnit.MILLISECONDS); fail("Should fail on tryLock since lock state has changed."); } catch (LockStateChangedException lsce) { // expected } lock.unlock(); } @Test(timeout = 60000) public void testSessionExpiredBeforeLock0() throws Exception { testSessionExpiredBeforeLock(0); } @Test(timeout = 60000) public void testSessionExpiredBeforeLock1() throws Exception { testSessionExpiredBeforeLock(Long.MAX_VALUE); } /** * Test Session Expired Before Lock does locking. The lock should be closed since * all zookeeper operations would be failed. * * @param timeout * timeout to wait for the lock * @throws Exception */ private void testSessionExpiredBeforeLock(long timeout) throws Exception { String lockPath = "/test-session-expired-before-lock-" + timeout + "-" + System.currentTimeMillis(); String clientId = "test-session-expired-before-lock-" + System.currentTimeMillis(); createLockPath(zkc.get(), lockPath); final AtomicInteger expireCounter = new AtomicInteger(0); final CountDownLatch expiredLatch = new CountDownLatch(1); LockListener listener = new LockListener() { @Override public void onExpired() { expireCounter.incrementAndGet(); } }; final ZKSessionLock lock = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor) .setLockListener(listener); // expire session ZooKeeperClientUtils.expireSession(zkc, zkServers, sessionTimeoutMs); // submit a runnable to lock state executor to ensure any state changes happened when session expired lockStateExecutor.executeOrdered(lockPath, () -> expiredLatch.countDown()); expiredLatch.await(); // no watcher was registered if never acquired lock successfully assertEquals(State.INIT, lock.getLockState()); try { lock.tryLock(timeout, TimeUnit.MILLISECONDS); fail("Should fail locking using an expired lock"); } catch (LockingException le) { assertTrue(le.getCause() instanceof KeeperException.SessionExpiredException); } assertEquals(State.CLOSED, lock.getLockState()); List<String> children = getLockWaiters(zkc, lockPath); assertEquals(0, children.size()); } @Test(timeout = 60000) public void testSessionExpiredForLockWaiter() throws Exception { String lockPath = "/test-session-expired-for-lock-waiter"; String clientId0 = "test-session-expired-for-lock-waiter-0"; String clientId1 = "test-session-expired-for-lock-waiter-1"; createLockPath(zkc.get(), lockPath); final ZKSessionLock lock0 = new ZKSessionLock(zkc0, lockPath, clientId0, lockStateExecutor); lock0.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); assertEquals(State.CLAIMED, lock0.getLockState()); List<String> children = getLockWaiters(zkc0, lockPath); assertEquals(1, children.size()); assertEquals(lock0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); final ZKSessionLock lock1 = new ZKSessionLock(zkc, lockPath, clientId1, lockStateExecutor); final CountDownLatch lock1DoneLatch = new CountDownLatch(1); Thread lock1Thread = new Thread(new Runnable() { @Override public void run() { try { lock1.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } catch (OwnershipAcquireFailedException oafe) { lock1DoneLatch.countDown(); } catch (LockingException e) { logger.error("Failed on locking lock1 : ", e); } } }, "lock1-thread"); lock1Thread.start(); // check lock1 is waiting for lock0 children = awaitWaiters(2, zkc, lockPath); assertEquals(2, children.size()); assertEquals(State.CLAIMED, lock0.getLockState()); assertEquals(lock0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); awaitState(State.WAITING, lock1); assertEquals(lock1.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(1)))); // expire lock1 ZooKeeperClientUtils.expireSession(zkc, zkServers, sessionTimeoutMs); lock1DoneLatch.countDown(); lock1Thread.join(); assertEquals(State.CLAIMED, lock0.getLockState()); assertEquals(State.CLOSED, lock1.getLockState()); children = getLockWaiters(zkc0, lockPath); assertEquals(1, children.size()); assertEquals(lock0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); } public void awaitState(State state, ZKSessionLock lock) throws InterruptedException { while (lock.getLockState() != state) { Thread.sleep(50); } } public List<String> awaitWaiters(int waiters, ZooKeeperClient zkc, String lockPath) throws Exception { List<String> children = getLockWaiters(zkc, lockPath); while (children.size() < waiters) { Thread.sleep(50); children = getLockWaiters(zkc, lockPath); } return children; } @Test(timeout = 60000) public void testLockUseSameClientIdButDifferentSessions0() throws Exception { testLockUseSameClientIdButDifferentSessions(true); } @Test(timeout = 60000) public void testLockUseSameClientIdButDifferentSessions1() throws Exception { testLockUseSameClientIdButDifferentSessions(false); } private void testLockUseSameClientIdButDifferentSessions(boolean isUnlock) throws Exception { String lockPath = "/test-lock-use-same-client-id-but-different-sessions-" + isUnlock + System.currentTimeMillis(); String clientId = "test-lock-use-same-client-id-but-different-sessions"; createLockPath(zkc.get(), lockPath); final ZKSessionLock lock0 = new ZKSessionLock(zkc0, lockPath, clientId, lockStateExecutor); lock0.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); // lock1_0 couldn't claim ownership since owner is in a different zk session. final ZKSessionLock lock1_0 = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor); try { lock1_0.tryLock(0, TimeUnit.MILLISECONDS); fail("Should fail locking since the lock is held in a different zk session."); } catch (OwnershipAcquireFailedException oafe) { assertEquals(clientId, oafe.getCurrentOwner()); } assertEquals(State.CLOSED, lock1_0.getLockState()); List<String> children = getLockWaiters(zkc0, lockPath); assertEquals(1, children.size()); assertEquals(lock0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); // lock1_1 would wait the ownership final ZKSessionLock lock1_1 = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor); final CountDownLatch lock1DoneLatch = new CountDownLatch(1); Thread lock1Thread = new Thread(new Runnable() { @Override public void run() { try { lock1_1.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); lock1DoneLatch.countDown(); } catch (LockingException e) { logger.error("Failed on locking lock1 : ", e); } } }, "lock1-thread"); lock1Thread.start(); // check lock1 is waiting for lock0 children = awaitWaiters(2, zkc, lockPath); logger.info("Found {} lock waiters : {}", children.size(), children); assertEquals(2, children.size()); assertEquals(State.CLAIMED, lock0.getLockState()); assertEquals(lock0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); awaitState(State.WAITING, lock1_1); assertEquals(lock1_1.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(1)))); if (isUnlock) { lock0.unlock(); } else { ZooKeeperClientUtils.expireSession(zkc0, zkServers, sessionTimeoutMs); } lock1DoneLatch.await(); lock1Thread.join(); // verification if (isUnlock) { assertEquals(State.CLOSED, lock0.getLockState()); } else { assertEquals(State.EXPIRED, lock0.getLockState()); } assertEquals(State.CLAIMED, lock1_1.getLockState()); children = getLockWaiters(zkc, lockPath); assertEquals(1, children.size()); assertEquals(lock1_1.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(0)))); lock1_1.unlock(); } @Test(timeout = 60000) public void testLockWithMultipleSiblingWaiters() throws Exception { String lockPath = "/test-lock-with-multiple-sibling-waiters"; String clientId = "client-id"; createLockPath(zkc.get(), lockPath); final ZKSessionLock lock0 = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor); final ZKSessionLock lock1 = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor); final ZKSessionLock lock2 = new ZKSessionLock(zkc, lockPath, clientId, lockStateExecutor); lock0.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); lock1.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); lock2.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); List<String> children = awaitWaiters(3, zkc, lockPath); assertEquals(3, children.size()); assertEquals(State.CLAIMED, lock0.getLockState()); assertEquals(State.CLAIMED, lock1.getLockState()); assertEquals(State.CLAIMED, lock2.getLockState()); lock0.unlock(); lock1.unlock(); lock2.unlock(); } /** * Immediate lock and unlock first lock. * @throws Exception */ @Test(timeout = 60000) public void testLockWhenSiblingUseDifferentLockId0() throws Exception { testLockWhenSiblingUseDifferentLockId(0, true); } /** * Immediate lock and expire first lock. * @throws Exception */ @Test(timeout = 60000) public void testLockWhenSiblingUseDifferentLockId1() throws Exception { testLockWhenSiblingUseDifferentLockId(0, false); } /** * Wait Lock and unlock lock0_0 and lock1. * @throws Exception */ @Test(timeout = 60000) public void testLockWhenSiblingUseDifferentLockId2() throws Exception { testLockWhenSiblingUseDifferentLockId(Long.MAX_VALUE, true); } /** * Wait Lock and expire first & third lock. * @throws Exception */ @Test(timeout = 60000) public void testLockWhenSiblingUseDifferentLockId3() throws Exception { testLockWhenSiblingUseDifferentLockId(Long.MAX_VALUE, false); } private void testLockWhenSiblingUseDifferentLockId(long timeout, final boolean isUnlock) throws Exception { String lockPath = "/test-lock-when-sibling-use-different-lock-id-" + timeout + "-" + isUnlock + "-" + System.currentTimeMillis(); String clientId0 = "client-id-0"; String clientId1 = "client-id-1"; createLockPath(zkc.get(), lockPath); final ZKSessionLock lock0_0 = new ZKSessionLock(zkc0, lockPath, clientId0, lockStateExecutor); final ZKSessionLock lock0_1 = new ZKSessionLock(zkc0, lockPath, clientId0, lockStateExecutor); final ZKSessionLock lock1 = new ZKSessionLock(zkc, lockPath, clientId1, lockStateExecutor); lock0_0.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); // lock1 wait for the lock ownership. final CountDownLatch lock1DoneLatch = new CountDownLatch(1); Thread lock1Thread = new Thread(new Runnable() { @Override public void run() { try { lock1.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); lock1DoneLatch.countDown(); } catch (LockingException e) { logger.error("Failed on locking lock1 : ", e); } } }, "lock1-thread"); lock1Thread.start(); // check lock1 is waiting for lock0_0 List<String> children = awaitWaiters(2, zkc, lockPath); assertEquals(2, children.size()); assertEquals(State.CLAIMED, lock0_0.getLockState()); assertEquals(lock0_0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); awaitState(State.WAITING, lock1); assertEquals(lock1.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(1)))); final CountDownLatch lock0DoneLatch = new CountDownLatch(1); final AtomicReference<String> ownerFromLock0 = new AtomicReference<String>(null); Thread lock0Thread = null; if (timeout == 0) { try { lock0_1.tryLock(0, TimeUnit.MILLISECONDS); fail("Should fail on locking if sibling is using differnt lock id."); } catch (OwnershipAcquireFailedException oafe) { assertEquals(clientId0, oafe.getCurrentOwner()); } assertEquals(State.CLOSED, lock0_1.getLockState()); children = getLockWaiters(zkc, lockPath); assertEquals(2, children.size()); assertEquals(State.CLAIMED, lock0_0.getLockState()); assertEquals(lock0_0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); assertEquals(State.WAITING, lock1.getLockState()); assertEquals(lock1.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(1)))); } else { lock0Thread = new Thread(new Runnable() { @Override public void run() { try { lock0_1.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); if (isUnlock) { lock0DoneLatch.countDown(); } } catch (OwnershipAcquireFailedException oafe) { if (!isUnlock) { ownerFromLock0.set(oafe.getCurrentOwner()); lock0DoneLatch.countDown(); } } catch (LockingException le) { logger.error("Failed on locking lock0_1 : ", le); } } }, "lock0-thread"); lock0Thread.start(); // check lock1 is waiting for lock0_0 children = awaitWaiters(3, zkc, lockPath); assertEquals(3, children.size()); assertEquals(State.CLAIMED, lock0_0.getLockState()); assertEquals(lock0_0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); awaitState(State.WAITING, lock1); assertEquals(lock1.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(1)))); awaitState(State.WAITING, lock0_1); assertEquals(lock0_1.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(2)))); } if (isUnlock) { lock0_0.unlock(); } else { ZooKeeperClientUtils.expireSession(zkc0, zkServers, sessionTimeoutMs); } lock1DoneLatch.await(); lock1Thread.join(); // check the state of lock0_0 if (isUnlock) { assertEquals(State.CLOSED, lock0_0.getLockState()); } else { assertEquals(State.EXPIRED, lock0_0.getLockState()); } if (timeout == 0) { children = getLockWaiters(zkc, lockPath); assertEquals(1, children.size()); assertEquals(State.CLAIMED, lock1.getLockState()); assertEquals(lock1.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(0)))); } else { assertNotNull(lock0Thread); if (!isUnlock) { // both lock0_0 and lock0_1 would be expired lock0DoneLatch.await(); lock0Thread.join(); assertEquals(clientId0, ownerFromLock0.get()); assertEquals(State.CLOSED, lock0_1.getLockState()); children = getLockWaiters(zkc, lockPath); assertEquals(1, children.size()); assertEquals(State.CLAIMED, lock1.getLockState()); assertEquals(lock1.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(0)))); } else { children = getLockWaiters(zkc, lockPath); assertEquals(2, children.size()); assertEquals(State.CLAIMED, lock1.getLockState()); assertEquals(lock1.getLockId(), Utils.ioResult(asyncParseClientID(zkc.get(), lockPath, children.get(0)))); assertEquals(State.WAITING, lock0_1.getLockState()); assertEquals(lock0_1.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(1)))); } } lock1.unlock(); if (timeout != 0 && isUnlock) { lock0DoneLatch.await(); lock0Thread.join(); children = getLockWaiters(zkc, lockPath); assertEquals(1, children.size()); assertEquals(State.CLAIMED, lock0_1.getLockState()); assertEquals(lock0_1.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); } } @Test(timeout = 60000) public void testLockWhenSiblingUseSameLockId0() throws Exception { testLockWhenSiblingUseSameLockId(0, true); } @Test(timeout = 60000) public void testLockWhenSiblingUseSameLockId1() throws Exception { testLockWhenSiblingUseSameLockId(0, false); } @Test(timeout = 60000) public void testLockWhenSiblingUseSameLockId2() throws Exception { testLockWhenSiblingUseSameLockId(Long.MAX_VALUE, true); } @Test(timeout = 60000) public void testLockWhenSiblingUseSameLockId3() throws Exception { testLockWhenSiblingUseSameLockId(Long.MAX_VALUE, false); } private void testLockWhenSiblingUseSameLockId(long timeout, final boolean isUnlock) throws Exception { String lockPath = "/test-lock-when-sibling-use-same-lock-id-" + timeout + "-" + isUnlock + "-" + System.currentTimeMillis(); String clientId = "client-id"; createLockPath(zkc.get(), lockPath); final ZKSessionLock lock0 = new ZKSessionLock(zkc0, lockPath, clientId, lockStateExecutor); final ZKSessionLock lock1 = new ZKSessionLock(zkc0, lockPath, clientId, lockStateExecutor); lock0.tryLock(Long.MAX_VALUE, TimeUnit.MILLISECONDS); List<String> children = getLockWaiters(zkc0, lockPath); assertEquals(1, children.size()); assertEquals(State.CLAIMED, lock0.getLockState()); assertEquals(lock0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); lock1.tryLock(timeout, TimeUnit.MILLISECONDS); children = getLockWaiters(zkc0, lockPath); assertEquals(2, children.size()); assertEquals(State.CLAIMED, lock0.getLockState()); assertEquals(lock0.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); assertEquals(State.CLAIMED, lock1.getLockState()); assertEquals(lock1.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(1)))); if (isUnlock) { lock0.unlock(); assertEquals(State.CLOSED, lock0.getLockState()); children = getLockWaiters(zkc0, lockPath); assertEquals(1, children.size()); assertEquals(State.CLAIMED, lock1.getLockState()); assertEquals(lock1.getLockId(), Utils.ioResult(asyncParseClientID(zkc0.get(), lockPath, children.get(0)))); lock1.unlock(); } else { ZooKeeperClientUtils.expireSession(zkc0, zkServers, sessionTimeoutMs); final CountDownLatch latch = new CountDownLatch(1); lockStateExecutor.executeOrdered(lockPath, () -> latch.countDown()); latch.await(); children = getLockWaiters(zkc, lockPath); assertEquals(0, children.size()); assertEquals(State.EXPIRED, lock0.getLockState()); assertEquals(State.EXPIRED, lock1.getLockState()); } } }
/* * Copyright 2007 The Kuali Foundation * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.inquiry; import org.apache.commons.lang.StringUtils; import org.kuali.rice.core.api.CoreApiServiceLocator; import org.kuali.rice.core.api.config.property.ConfigurationService; import org.kuali.rice.core.api.encryption.EncryptionService; import org.kuali.rice.krad.bo.BusinessObject; import org.kuali.rice.krad.bo.DataObjectRelationship; import org.kuali.rice.krad.bo.DocumentHeader; import org.kuali.rice.krad.bo.ExternalizableBusinessObject; import org.kuali.rice.krad.datadictionary.exception.UnknownBusinessClassAttributeException; import org.kuali.rice.krad.service.BusinessObjectService; import org.kuali.rice.krad.service.DataDictionaryService; import org.kuali.rice.krad.service.DataObjectAuthorizationService; import org.kuali.rice.krad.service.DataObjectMetaDataService; import org.kuali.rice.krad.service.KRADServiceLocator; import org.kuali.rice.krad.service.KRADServiceLocatorWeb; import org.kuali.rice.krad.service.KualiModuleService; import org.kuali.rice.krad.service.ModuleService; import org.kuali.rice.krad.uif.service.impl.ViewHelperServiceImpl; import org.kuali.rice.krad.uif.widget.Inquiry; import org.kuali.rice.krad.util.ExternalizableBusinessObjectUtils; import org.kuali.rice.krad.util.KRADConstants; import org.kuali.rice.krad.util.ObjectUtils; import java.security.GeneralSecurityException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Implementation of the <code>Inquirable</code> interface that uses metadata * from the data dictionary and performs a query against the database to retrieve * the data object for inquiry * * <p> * More advanced lookup operations or alternate ways of retrieving metadata can * be implemented by extending this base implementation and configuring * </p> * * @author Kuali Rice Team (rice.collab@kuali.org) */ public class InquirableImpl extends ViewHelperServiceImpl implements Inquirable { private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(InquirableImpl.class); protected Class<?> dataObjectClass; /** * A list that can be used to define classes that are superclasses or * superinterfaces of kuali objects where those objects' inquiry URLs need * to use the name of the superclass or superinterface as the business * object class attribute */ public static List<Class<?>> SUPER_CLASS_TRANSLATOR_LIST = new ArrayList<Class<?>>(); /** * Finds primary and alternate key sets configured for the configured data object class and * then attempts to find a set with matching key/value pairs from the request, if a set is * found then calls the module service (for EBOs) or business object service to retrieve * the data object * * <p> * Note at this point on business objects are supported by the default implementation * </p> * * @see Inquirable#retrieveDataObject(java.util.Map<java.lang.String,java.lang.String>) */ @Override public Object retrieveDataObject(Map<String, String> parameters) { if (dataObjectClass == null) { LOG.error("Data object class must be set in inquirable before retrieving the object"); throw new RuntimeException("Data object class must be set in inquirable before retrieving the object"); } // build list of key values from the map parameters List<String> pkPropertyNames = getDataObjectMetaDataService().listPrimaryKeyFieldNames(dataObjectClass); // some classes might have alternate keys defined for retrieving List<List<String>> alternateKeyNames = this.getAlternateKeysForClass(dataObjectClass); // add pk set as beginning so it will be checked first for match alternateKeyNames.add(0, pkPropertyNames); List<String> dataObjectKeySet = retrieveKeySetFromMap(alternateKeyNames, parameters); if ((dataObjectKeySet == null) || dataObjectKeySet.isEmpty()) { LOG.warn("Matching key set not found in request for class: " + getDataObjectClass()); return null; } // found key set, now build map of key values pairs we can use to retrieve the object Map<String, Object> keyPropertyValues = new HashMap<String, Object>(); for (String keyPropertyName : dataObjectKeySet) { String keyPropertyValue = parameters.get(keyPropertyName); // uppercase value if needed Boolean forceUppercase = Boolean.FALSE; try { forceUppercase = getDataDictionaryService().getAttributeForceUppercase(dataObjectClass, keyPropertyName); } catch (UnknownBusinessClassAttributeException ex) { // swallowing exception because this check for ForceUppercase would // require a DD entry for the attribute, and we will just set force uppercase to false LOG.warn("Data object class " + dataObjectClass + " property " + keyPropertyName + " should probably have a DD definition.", ex); } if (forceUppercase.booleanValue()) { keyPropertyName = keyPropertyName.toUpperCase(); } // check security on key field if (getDataObjectAuthorizationService().attributeValueNeedsToBeEncryptedOnFormsAndLinks(dataObjectClass, keyPropertyName)) { try { keyPropertyValue = getEncryptionService().decrypt(keyPropertyValue); } catch (GeneralSecurityException e) { LOG.error("Data object class " + dataObjectClass + " property " + keyPropertyName + " should have been encrypted, but there was a problem decrypting it.", e); throw new RuntimeException("Data object class " + dataObjectClass + " property " + keyPropertyName + " should have been encrypted, but there was a problem decrypting it.", e); } } keyPropertyValues.put(keyPropertyName, keyPropertyValue); } // now retrieve the object based on the key set Object dataObject = null; ModuleService moduleService = KRADServiceLocatorWeb.getKualiModuleService().getResponsibleModuleService( getDataObjectClass()); if (moduleService != null && moduleService.isExternalizable(getDataObjectClass())) { dataObject = moduleService.getExternalizableBusinessObject(getDataObjectClass().asSubclass( ExternalizableBusinessObject.class), keyPropertyValues); } else if (BusinessObject.class.isAssignableFrom(getDataObjectClass())) { dataObject = getBusinessObjectService().findByPrimaryKey(getDataObjectClass().asSubclass( BusinessObject.class), keyPropertyValues); } return dataObject; } /** * Iterates through the list of key sets looking for a set where the given map of parameters has * all the key names and values are non-blank, first matched set is returned * * @param potentialKeySets - List of key sets to check for match * @param parameters - map of parameter name/value pairs for matching key set * @return List<String> key set that was matched, or null if none were matched */ protected List<String> retrieveKeySetFromMap(List<List<String>> potentialKeySets, Map<String, String> parameters) { List<String> foundKeySet = null; for (List<String> potentialKeySet : potentialKeySets) { boolean keySetMatch = true; for (String keyName : potentialKeySet) { if (!parameters.containsKey(keyName) || StringUtils.isBlank(parameters.get(keyName))) { keySetMatch = false; } } if (keySetMatch) { foundKeySet = potentialKeySet; break; } } return foundKeySet; } /** * Invokes the module service to retrieve any alternate keys that have been * defined for the given class * * @param clazz - class to find alternate keys for * @return List<List<String>> list of alternate key sets, or empty list if none are found */ protected List<List<String>> getAlternateKeysForClass(Class<?> clazz) { KualiModuleService kualiModuleService = getKualiModuleService(); ModuleService moduleService = kualiModuleService.getResponsibleModuleService(clazz); List<List<String>> altKeys = null; if (moduleService != null) { altKeys = moduleService.listAlternatePrimaryKeyFieldNames(clazz); } return altKeys != null ? altKeys : new ArrayList<List<String>>(); } /** * @see Inquirable#buildInquirableLink(java.lang.Object, * java.lang.String, org.kuali.rice.krad.uif.widget.Inquiry) */ @Override public void buildInquirableLink(Object dataObject, String propertyName, Inquiry inquiry) { Class<?> inquiryObjectClass = null; // inquiry into data object class if property is title attribute Class<?> objectClass = ObjectUtils.materializeClassForProxiedObject(dataObject); if (propertyName.equals(getDataObjectMetaDataService().getTitleAttribute(objectClass))) { inquiryObjectClass = objectClass; } else if (ObjectUtils.isNestedAttribute(propertyName)) { String nestedPropertyName = ObjectUtils.getNestedAttributePrefix(propertyName); Object nestedPropertyObject = ObjectUtils.getNestedValue(dataObject, nestedPropertyName); if (ObjectUtils.isNotNull(nestedPropertyObject)) { String nestedPropertyPrimitive = ObjectUtils.getNestedAttributePrimitive(propertyName); Class<?> nestedPropertyObjectClass = ObjectUtils.materializeClassForProxiedObject(nestedPropertyObject); if (nestedPropertyPrimitive.equals(getDataObjectMetaDataService().getTitleAttribute( nestedPropertyObjectClass))) { inquiryObjectClass = nestedPropertyObjectClass; } } } // if not title, then get primary relationship DataObjectRelationship relationship = null; if (inquiryObjectClass == null) { relationship = getDataObjectMetaDataService().getDataObjectRelationship(dataObject, objectClass, propertyName, "", true, false, true); if (relationship != null) { inquiryObjectClass = relationship.getRelatedClass(); } } // if haven't found inquiry class, then no inquiry can be rendered if (inquiryObjectClass == null) { inquiry.setRender(false); return; } if (DocumentHeader.class.isAssignableFrom(inquiryObjectClass)) { String documentNumber = (String) ObjectUtils.getPropertyValue(dataObject, propertyName); if (StringUtils.isNotBlank(documentNumber)) { inquiry.getInquiryLinkField().setHrefText(getConfigurationService().getPropertyValueAsString( KRADConstants.WORKFLOW_URL_KEY) + KRADConstants.DOCHANDLER_DO_URL + documentNumber + KRADConstants.DOCHANDLER_URL_CHUNK); inquiry.getInquiryLinkField().setLinkLabel(documentNumber); inquiry.setRender(true); } return; } synchronized (SUPER_CLASS_TRANSLATOR_LIST) { for (Class<?> clazz : SUPER_CLASS_TRANSLATOR_LIST) { if (clazz.isAssignableFrom(inquiryObjectClass)) { inquiryObjectClass = clazz; break; } } } if (!inquiryObjectClass.isInterface() && ExternalizableBusinessObject.class.isAssignableFrom( inquiryObjectClass)) { inquiryObjectClass = ExternalizableBusinessObjectUtils.determineExternalizableBusinessObjectSubInterface( inquiryObjectClass); } // listPrimaryKeyFieldNames returns an unmodifiable list. So a copy is // necessary. List<String> keys = new ArrayList<String>(getDataObjectMetaDataService().listPrimaryKeyFieldNames( inquiryObjectClass)); if (keys == null) { keys = Collections.emptyList(); } // build inquiry parameter mappings Map<String, String> inquiryParameters = new HashMap<String, String>(); for (String keyName : keys) { String keyConversion = keyName; if (relationship != null) { keyConversion = relationship.getParentAttributeForChildAttribute(keyName); } else if (ObjectUtils.isNestedAttribute(propertyName)) { String nestedAttributePrefix = ObjectUtils.getNestedAttributePrefix(propertyName); keyConversion = nestedAttributePrefix + "." + keyName; } inquiryParameters.put(keyConversion, keyName); } inquiry.buildInquiryLink(dataObject, propertyName, inquiryObjectClass, inquiryParameters); } /** * @see Inquirable#setDataObjectClass(java.lang.Class) */ @Override public void setDataObjectClass(Class<?> dataObjectClass) { this.dataObjectClass = dataObjectClass; } /** * Retrieves the data object class configured for this inquirable * * @return Class<?> of configured data object, or null if data object class not configured */ protected Class<?> getDataObjectClass() { return this.dataObjectClass; } protected ConfigurationService getConfigurationService() { return KRADServiceLocator.getKualiConfigurationService(); } protected DataObjectMetaDataService getDataObjectMetaDataService() { return KRADServiceLocatorWeb.getDataObjectMetaDataService(); } protected KualiModuleService getKualiModuleService() { return KRADServiceLocatorWeb.getKualiModuleService(); } protected DataDictionaryService getDataDictionaryService() { return KRADServiceLocatorWeb.getDataDictionaryService(); } protected DataObjectAuthorizationService getDataObjectAuthorizationService() { return KRADServiceLocatorWeb.getDataObjectAuthorizationService(); } protected EncryptionService getEncryptionService() { return CoreApiServiceLocator.getEncryptionService(); } protected BusinessObjectService getBusinessObjectService() { return KRADServiceLocator.getBusinessObjectService(); } }
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.bitcoin.protocols.channels; import com.google.bitcoin.core.*; import com.google.bitcoin.protocols.channels.PaymentChannelCloseException.CloseReason; import com.google.bitcoin.utils.Threading; import com.google.common.annotations.VisibleForTesting; import com.google.common.util.concurrent.*; import com.google.protobuf.ByteString; import net.jcip.annotations.GuardedBy; import org.bitcoin.paymentchannel.Protos; import org.slf4j.LoggerFactory; import java.math.BigInteger; import java.util.concurrent.locks.ReentrantLock; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; /** * <p>A class which handles most of the complexity of creating a payment channel connection by providing a * simple in/out interface which is provided with protobufs from the server and which generates protobufs which should * be sent to the server.</p> * * <p>Does all required verification of server messages and properly stores state objects in the wallet-attached * {@link StoredPaymentChannelClientStates} so that they are automatically closed when necessary and refund * transactions are not lost if the application crashes before it unlocks.</p> * * <p>Though this interface is largely designed with stateful protocols (eg simple TCP connections) in mind, it is also * possible to use it with stateless protocols (eg sending protobufs when required over HTTP headers). In this case, the * "connection" translates roughly into the server-client relationship. See the javadocs for specific functions for more * details.</p> */ public class PaymentChannelClient implements IPaymentChannelClient { private static final org.slf4j.Logger log = LoggerFactory.getLogger(PaymentChannelClient.class); protected final ReentrantLock lock = Threading.lock("channelclient"); @GuardedBy("lock") private final ClientConnection conn; // Used to keep track of whether or not the "socket" ie connection is open and we can generate messages @VisibleForTesting @GuardedBy("lock") boolean connectionOpen = false; // The state object used to step through initialization and pay the server @GuardedBy("lock") private PaymentChannelClientState state; // The step we are at in initialization, this is partially duplicated in the state object private enum InitStep { WAITING_FOR_CONNECTION_OPEN, WAITING_FOR_VERSION_NEGOTIATION, WAITING_FOR_INITIATE, WAITING_FOR_REFUND_RETURN, WAITING_FOR_CHANNEL_OPEN, CHANNEL_OPEN, WAITING_FOR_CHANNEL_CLOSE, CHANNEL_CLOSED, } @GuardedBy("lock") private InitStep step = InitStep.WAITING_FOR_CONNECTION_OPEN; // Will either hold the StoredClientChannel of this channel or null after connectionOpen private StoredClientChannel storedChannel; // An arbitrary hash which identifies this channel (specified by the API user) private final Sha256Hash serverId; // The wallet associated with this channel private final Wallet wallet; // Information used during channel initialization to send to the server or check what the server sends to us private final ECKey myKey; private final BigInteger maxValue; @GuardedBy("lock") SettableFuture<BigInteger> increasePaymentFuture; @GuardedBy("lock") BigInteger lastPaymentActualAmount; /** * <p>The maximum amount of time for which we will accept the server locking up our funds for the multisig * contract.</p> * * <p>Note that though this is not final, it is in all caps because it should generally not be modified unless you * have some guarantee that the server will not request at least this (channels will fail if this is too small).</p> * * <p>24 hours is the default as it is expected that clients limit risk exposure by limiting channel size instead of * limiting lock time when dealing with potentially malicious servers.</p> */ public long MAX_TIME_WINDOW = 24*60*60; /** * Constructs a new channel manager which waits for {@link PaymentChannelClient#connectionOpen()} before acting. * * @param wallet The wallet which will be paid from, and where completed transactions will be committed. * Must already have a {@link StoredPaymentChannelClientStates} object in its extensions set. * @param myKey A freshly generated keypair used for the multisig contract and refund output. * @param maxValue The maximum value the server is allowed to request that we lock into this channel until the * refund transaction unlocks. Note that if there is a previously open channel, the refund * transaction used in this channel may be larger than maxValue. Thus, maxValue is not a method for * limiting the amount payable through this channel. * @param serverId An arbitrary hash representing this channel. This must uniquely identify the server. If an * existing stored channel exists in the wallet's {@link StoredPaymentChannelClientStates}, then an * attempt will be made to resume that channel. * @param conn A callback listener which represents the connection to the server (forwards messages we generate to * the server) */ public PaymentChannelClient(Wallet wallet, ECKey myKey, BigInteger maxValue, Sha256Hash serverId, ClientConnection conn) { this.wallet = checkNotNull(wallet); this.myKey = checkNotNull(myKey); this.maxValue = checkNotNull(maxValue); this.serverId = checkNotNull(serverId); this.conn = checkNotNull(conn); } @GuardedBy("lock") private void receiveInitiate(Protos.Initiate initiate, BigInteger contractValue) throws VerificationException, ValueOutOfRangeException { log.info("Got INITIATE message, providing refund transaction"); state = new PaymentChannelClientState(wallet, myKey, new ECKey(null, initiate.getMultisigKey().toByteArray()), contractValue, initiate.getExpireTimeSecs()); state.initiate(); step = InitStep.WAITING_FOR_REFUND_RETURN; Protos.ProvideRefund.Builder provideRefundBuilder = Protos.ProvideRefund.newBuilder() .setMultisigKey(ByteString.copyFrom(myKey.getPubKey())) .setTx(ByteString.copyFrom(state.getIncompleteRefundTransaction().bitcoinSerialize())); conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder() .setProvideRefund(provideRefundBuilder) .setType(Protos.TwoWayChannelMessage.MessageType.PROVIDE_REFUND) .build()); } @GuardedBy("lock") private void receiveRefund(Protos.TwoWayChannelMessage msg) throws VerificationException { checkState(step == InitStep.WAITING_FOR_REFUND_RETURN && msg.hasReturnRefund()); log.info("Got RETURN_REFUND message, providing signed contract"); Protos.ReturnRefund returnedRefund = msg.getReturnRefund(); state.provideRefundSignature(returnedRefund.getSignature().toByteArray()); step = InitStep.WAITING_FOR_CHANNEL_OPEN; // Before we can send the server the contract (ie send it to the network), we must ensure that our refund // transaction is safely in the wallet - thus we store it (this also keeps it up-to-date when we pay) state.storeChannelInWallet(serverId); Protos.ProvideContract.Builder provideContractBuilder = Protos.ProvideContract.newBuilder() .setTx(ByteString.copyFrom(state.getMultisigContract().bitcoinSerialize())); conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder() .setProvideContract(provideContractBuilder) .setType(Protos.TwoWayChannelMessage.MessageType.PROVIDE_CONTRACT) .build()); } @GuardedBy("lock") private void receiveChannelOpen() throws VerificationException { checkState(step == InitStep.WAITING_FOR_CHANNEL_OPEN || (step == InitStep.WAITING_FOR_INITIATE && storedChannel != null), step); log.info("Got CHANNEL_OPEN message, ready to pay"); if (step == InitStep.WAITING_FOR_INITIATE) state = new PaymentChannelClientState(storedChannel, wallet); step = InitStep.CHANNEL_OPEN; // channelOpen should disable timeouts, but // TODO accomodate high latency between PROVIDE_CONTRACT and here conn.channelOpen(); } /** * {@inheritDoc} */ @Override public void receiveMessage(Protos.TwoWayChannelMessage msg) throws ValueOutOfRangeException { lock.lock(); try { checkState(connectionOpen); // If we generate an error, we set errorBuilder and closeReason and break, otherwise we return Protos.Error.Builder errorBuilder; CloseReason closeReason; try { switch (msg.getType()) { case SERVER_VERSION: checkState(step == InitStep.WAITING_FOR_VERSION_NEGOTIATION && msg.hasServerVersion()); // Server might send back a major version lower than our own if they want to fallback to a // lower version. We can't handle that, so we just close the channel. if (msg.getServerVersion().getMajor() != 0) { errorBuilder = Protos.Error.newBuilder() .setCode(Protos.Error.ErrorCode.NO_ACCEPTABLE_VERSION); closeReason = CloseReason.NO_ACCEPTABLE_VERSION; break; } log.info("Got version handshake, awaiting INITIATE or resume CHANNEL_OPEN"); step = InitStep.WAITING_FOR_INITIATE; return; case INITIATE: checkState(step == InitStep.WAITING_FOR_INITIATE && msg.hasInitiate()); Protos.Initiate initiate = msg.getInitiate(); checkState(initiate.getExpireTimeSecs() > 0 && initiate.getMinAcceptedChannelSize() >= 0); if (initiate.getExpireTimeSecs() > Utils.now().getTime()/1000 + MAX_TIME_WINDOW) { errorBuilder = Protos.Error.newBuilder() .setCode(Protos.Error.ErrorCode.TIME_WINDOW_TOO_LARGE); closeReason = CloseReason.TIME_WINDOW_TOO_LARGE; break; } BigInteger minChannelSize = BigInteger.valueOf(initiate.getMinAcceptedChannelSize()); if (maxValue.compareTo(minChannelSize) < 0) { errorBuilder = Protos.Error.newBuilder() .setCode(Protos.Error.ErrorCode.CHANNEL_VALUE_TOO_LARGE); closeReason = CloseReason.SERVER_REQUESTED_TOO_MUCH_VALUE; log.error("Server requested too much value"); break; } receiveInitiate(initiate, maxValue); return; case RETURN_REFUND: receiveRefund(msg); return; case CHANNEL_OPEN: receiveChannelOpen(); return; case PAYMENT_ACK: receivePaymentAck(); return; case CLOSE: receiveClose(msg); return; case ERROR: checkState(msg.hasError()); log.error("Server sent ERROR {} with explanation {}", msg.getError().getCode().name(), msg.getError().hasExplanation() ? msg.getError().getExplanation() : ""); conn.destroyConnection(CloseReason.REMOTE_SENT_ERROR); return; default: log.error("Got unknown message type or type that doesn't apply to clients."); errorBuilder = Protos.Error.newBuilder() .setCode(Protos.Error.ErrorCode.SYNTAX_ERROR); closeReason = CloseReason.REMOTE_SENT_INVALID_MESSAGE; break; } } catch (VerificationException e) { log.error("Caught verification exception handling message from server", e); errorBuilder = Protos.Error.newBuilder() .setCode(Protos.Error.ErrorCode.BAD_TRANSACTION) .setExplanation(e.getMessage()); closeReason = CloseReason.REMOTE_SENT_INVALID_MESSAGE; } catch (IllegalStateException e) { log.error("Caught illegal state exception handling message from server", e); errorBuilder = Protos.Error.newBuilder() .setCode(Protos.Error.ErrorCode.SYNTAX_ERROR); closeReason = CloseReason.REMOTE_SENT_INVALID_MESSAGE; } conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder() .setError(errorBuilder) .setType(Protos.TwoWayChannelMessage.MessageType.ERROR) .build()); conn.destroyConnection(closeReason); } finally { lock.unlock(); } } @GuardedBy("lock") private void receiveClose(Protos.TwoWayChannelMessage msg) throws VerificationException { checkState(lock.isHeldByCurrentThread()); if (msg.hasClose()) { Transaction closeTx = new Transaction(wallet.getParams(), msg.getClose().getTx().toByteArray()); log.info("CLOSE message received with final contract {}", closeTx.getHash()); // TODO: set source if (state != null && state().isCloseTransaction(closeTx)) { // The wallet has a listener on it that the state object will use to do the right thing at this // point (like watching it for confirmations). The tx has been checked by now for syntactical validity // and that it correctly spends the multisig contract. wallet.receivePending(closeTx, null); } } else { log.info("CLOSE message received without final contract"); } if (step == InitStep.WAITING_FOR_CHANNEL_CLOSE) conn.destroyConnection(CloseReason.CLIENT_REQUESTED_CLOSE); else conn.destroyConnection(CloseReason.SERVER_REQUESTED_CLOSE); step = InitStep.CHANNEL_CLOSED; } /** * <p>Called when the connection terminates. Notifies the {@link StoredClientChannel} object that we can attempt to * resume this channel in the future and stops generating messages for the server.</p> * * <p>For stateless protocols, this translates to a client not using the channel for the immediate future, but * intending to reopen the channel later. There is likely little reason to use this in a stateless protocol.</p> * * <p>Note that this <b>MUST</b> still be called even after either * {@link ClientConnection#destroyConnection(com.google.bitcoin.protocols.channels.PaymentChannelCloseException.CloseReason)} or * {@link PaymentChannelClient#close()} is called to actually handle the connection close logic.</p> */ @Override public void connectionClosed() { lock.lock(); try { connectionOpen = false; if (state != null) state.disconnectFromChannel(); } finally { lock.unlock(); } } /** * <p>Closes the connection, notifying the server it should close the channel by broadcasting the most recent payment * transaction.</p> * * <p>Note that this only generates a CLOSE message for the server and calls * {@link ClientConnection#destroyConnection(CloseReason)} to close the connection, it does not * actually handle connection close logic, and {@link PaymentChannelClient#connectionClosed()} must still be called * after the connection fully closes.</p> * * @throws IllegalStateException If the connection is not currently open (ie the CLOSE message cannot be sent) */ @Override public void close() throws IllegalStateException { lock.lock(); try { checkState(connectionOpen); step = InitStep.WAITING_FOR_CHANNEL_CLOSE; log.info("Sending a CLOSE message to the server and waiting for response indicating successful propagation."); conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder() .setType(Protos.TwoWayChannelMessage.MessageType.CLOSE) .build()); } finally { lock.unlock(); } } /** * <p>Called to indicate the connection has been opened and messages can now be generated for the server.</p> * * <p>Attempts to find a channel to resume and generates a CLIENT_VERSION message for the server based on the * result.</p> */ @Override public void connectionOpen() { lock.lock(); try { connectionOpen = true; StoredPaymentChannelClientStates channels = (StoredPaymentChannelClientStates) wallet.getExtensions().get(StoredPaymentChannelClientStates.EXTENSION_ID); if (channels != null) storedChannel = channels.getUsableChannelForServerID(serverId); step = InitStep.WAITING_FOR_VERSION_NEGOTIATION; Protos.ClientVersion.Builder versionNegotiationBuilder = Protos.ClientVersion.newBuilder() .setMajor(0).setMinor(1); if (storedChannel != null) { versionNegotiationBuilder.setPreviousChannelContractHash(ByteString.copyFrom(storedChannel.contract.getHash().getBytes())); log.info("Begun version handshake, attempting to reopen channel with contract hash {}", storedChannel.contract.getHash()); } else log.info("Begun version handshake creating new channel"); conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder() .setType(Protos.TwoWayChannelMessage.MessageType.CLIENT_VERSION) .setClientVersion(versionNegotiationBuilder) .build()); } finally { lock.unlock(); } } /** * <p>Gets the {@link PaymentChannelClientState} object which stores the current state of the connection with the * server.</p> * * <p>Note that if you call any methods which update state directly the server will not be notified and channel * initialization logic in the connection may fail unexpectedly.</p> */ public PaymentChannelClientState state() { lock.lock(); try { return state; } finally { lock.unlock(); } } /** * Increments the total value which we pay the server. Note that the amount of money sent may not be the same as the * amount of money actually requested. It can be larger if the amount left over in the channel would be too small to * be accepted by the Bitcoin network. ValueOutOfRangeException will be thrown, however, if there's not enough money * left in the channel to make the payment at all. Only one payment can be in-flight at once. You have to ensure * you wait for the previous increase payment future to complete before incrementing the payment again. * * @param size How many satoshis to increment the payment by (note: not the new total). * @throws ValueOutOfRangeException If the size is negative or would pay more than this channel's total value * ({@link PaymentChannelClientConnection#state()}.getTotalValue()) * @throws IllegalStateException If the channel has been closed or is not yet open * (see {@link PaymentChannelClientConnection#getChannelOpenFuture()} for the second) * @return a future that completes when the server acknowledges receipt and acceptance of the payment. */ @Override public ListenableFuture<BigInteger> incrementPayment(BigInteger size) throws ValueOutOfRangeException, IllegalStateException { lock.lock(); try { if (state() == null || !connectionOpen || step != InitStep.CHANNEL_OPEN) throw new IllegalStateException("Channel is not fully initialized/has already been closed"); if (increasePaymentFuture != null) throw new IllegalStateException("Already incrementing paying, wait for previous payment to complete."); PaymentChannelClientState.IncrementedPayment payment = state().incrementPaymentBy(size); Protos.UpdatePayment.Builder updatePaymentBuilder = Protos.UpdatePayment.newBuilder() .setSignature(ByteString.copyFrom(payment.signature.encodeToBitcoin())) .setClientChangeValue(state.getValueRefunded().longValue()); increasePaymentFuture = SettableFuture.create(); increasePaymentFuture.addListener(new Runnable() { @Override public void run() { lock.lock(); increasePaymentFuture = null; lock.unlock(); } }, MoreExecutors.sameThreadExecutor()); conn.sendToServer(Protos.TwoWayChannelMessage.newBuilder() .setUpdatePayment(updatePaymentBuilder) .setType(Protos.TwoWayChannelMessage.MessageType.UPDATE_PAYMENT) .build()); lastPaymentActualAmount = payment.amount; return increasePaymentFuture; } finally { lock.unlock(); } } private void receivePaymentAck() { SettableFuture<BigInteger> future; BigInteger value; lock.lock(); try { if (increasePaymentFuture == null) return; checkNotNull(increasePaymentFuture, "Server sent a PAYMENT_ACK with no outstanding payment"); log.info("Received a PAYMENT_ACK from the server"); future = increasePaymentFuture; value = lastPaymentActualAmount; } finally { lock.unlock(); } // Ensure the future runs without the client lock held. future.set(value); } }
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * JFlex 1.6.1 * * Copyright (C) 1998-2015 Gerwin Klein <lsf@jflex.de> * * All rights reserved. * * * * License: BSD * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ package jflex; /** * A set of NFA states (= integers). * * Very similar to java.util.BitSet, but is faster and doesn't crash * * @author Gerwin Klein * @version JFlex 1.6.1 */ final public class StateSet { private final boolean DEBUG = false; public final static StateSet EMPTY = new StateSet(); final static int BITS = 6; final static int MASK = (1<<BITS)-1; long bits[]; public StateSet() { this(256); } public StateSet(int size) { bits = new long[size2nbits(size)]; } public StateSet(int size, int state) { this(size); addState(state); } public StateSet(StateSet set) { bits = new long[set.bits.length]; System.arraycopy(set.bits, 0, bits, 0, set.bits.length); } public void addState(int state) { if (DEBUG) { Out.dump("StateSet.addState("+state+") start"); //$NON-NLS-1$ //$NON-NLS-2$ Out.dump("Set is : "+this); //$NON-NLS-1$ } int index = state >> BITS; if (index >= bits.length) resize(state); bits[index] |= (1L << (state & MASK)); if (DEBUG) { Out.dump("StateSet.addState("+state+") end"); //$NON-NLS-1$ //$NON-NLS-2$ Out.dump("Set is : "+this); //$NON-NLS-1$ } } private int size2nbits (int size) { return ((size >> BITS) + 1); } private void resize(int size) { int needed = size2nbits(size); // if (needed < bits.length) return; long newbits[] = new long[Math.max(bits.length*4,needed)]; System.arraycopy(bits, 0, newbits, 0, bits.length); bits = newbits; } public void clear() { int l = bits.length; for (int i = 0; i < l; i++) bits[i] = 0; } public boolean isElement(int state) { int index = state >> BITS; if (index >= bits.length) return false; return (bits[index] & (1L << (state & MASK))) != 0; } /** * Returns one element of the set and removes it. * * Precondition: the set is not empty. */ public int getAndRemoveElement() { int i = 0; int o = 0; long m = 1; while (bits[i] == 0) i++; while ( (bits[i] & m) == 0 ) { m<<= 1; o++; } bits[i]&= ~m; return (i << BITS) + o; } public void remove(int state) { int index = state >> BITS; if (index >= bits.length) return; bits[index] &= ~(1L << (state & MASK)); } /** * Returns the set of elements that contained are in the specified set * but are not contained in this set. */ public StateSet complement(StateSet set) { if (set == null) return null; StateSet result = new StateSet(); result.bits = new long[set.bits.length]; int i; int m = Math.min(bits.length, set.bits.length); for (i = 0; i < m; i++) { result.bits[i] = ~bits[i] & set.bits[i]; } if (bits.length < set.bits.length) System.arraycopy(set.bits, m, result.bits, m, result.bits.length-m); if (DEBUG) Out.dump("Complement of "+this+Out.NL+"and "+set+Out.NL+" is :"+result); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ return result; } public void add(StateSet set) { if (DEBUG) Out.dump("StateSet.add("+set+") start"); //$NON-NLS-1$ //$NON-NLS-2$ if (set == null) return; long tbits[]; long sbits[] = set.bits; int sbitsl = sbits.length; if (bits.length < sbitsl) { tbits = new long[sbitsl]; System.arraycopy(bits, 0, tbits, 0, bits.length); } else { tbits = this.bits; } for (int i = 0; i < sbitsl; i++) { tbits[i] |= sbits[i]; } this.bits = tbits; if (DEBUG) { Out.dump("StateSet.add("+set+") end"); //$NON-NLS-1$ //$NON-NLS-2$ Out.dump("Set is : "+this); //$NON-NLS-1$ } } public boolean containsSet(StateSet set) { if (DEBUG) Out.dump("StateSet.containsSet("+set+"), this="+this); //$NON-NLS-1$ //$NON-NLS-2$ int i; int min = Math.min(bits.length, set.bits.length); for (i = 0; i < min; i++) if ( (bits[i] & set.bits[i]) != set.bits[i] ) return false; for (i = min; i < set.bits.length; i++) if ( set.bits[i] != 0 ) return false; return true; } /** * @throws ClassCastException if b is not a StateSet * @throws NullPointerException if b is null */ public boolean equals(Object b) { int i = 0; int l1,l2; StateSet set = (StateSet) b; if (DEBUG) Out.dump("StateSet.equals("+set+"), this="+this); //$NON-NLS-1$ //$NON-NLS-2$ l1 = bits.length; l2 = set.bits.length; if (l1 <= l2) { while (i < l1) { if (bits[i] != set.bits[i]) return false; i++; } while (i < l2) if (set.bits[i++] != 0) return false; } else { while (i < l2) { if (bits[i] != set.bits[i]) return false; i++; } while (i < l1) if (bits[i++] != 0) return false; } return true; } public int hashCode() { long h = 1234; long [] _bits = bits; int i = bits.length-1; // ignore zero high bits while (i >= 0 && _bits[i] == 0) i--; while (i >= 0) h ^= _bits[i--] * i; return (int)((h >> 32) ^ h); } public StateSetEnumerator states() { return new StateSetEnumerator(this); } public boolean containsElements() { for (long bit : bits) if (bit != 0) return true; return false; } public StateSet copy() { StateSet set = new StateSet(); set.bits = new long[bits.length]; System.arraycopy(bits, 0, set.bits, 0, bits.length); return set; } /** * Copy specified StateSet into this. * * @param set the state set to copy. */ public void copy(StateSet set) { if (DEBUG) Out.dump("StateSet.copy("+set+") start"); //$NON-NLS-1$ //$NON-NLS-2$ if (set == null) { for (int i = 0; i < bits.length; i++) bits[i] = 0; return; } if (bits.length < set.bits.length) { bits = new long[set.bits.length]; } else { for (int i = set.bits.length; i < bits.length; i++) bits[i] = 0; } System.arraycopy(set.bits, 0, bits, 0, bits.length); if (DEBUG) { Out.dump("StateSet.copy("+set+") end"); //$NON-NLS-1$ //$NON-NLS-2$ Out.dump("Set is : "+this); //$NON-NLS-1$ } } public String toString() { StateSetEnumerator set = states(); StringBuilder result = new StringBuilder("{"); //$NON-NLS-1$ if ( set.hasMoreElements() ) result.append(""+set.nextElement()); //$NON-NLS-1$ while ( set.hasMoreElements() ) { int i = set.nextElement(); result.append(", ").append(i); //$NON-NLS-1$ } result.append("}"); //$NON-NLS-1$ return result.toString(); } }
/* * Copyright 2015 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.cellbase.mongodb.impl; import com.mongodb.BulkWriteException; import com.mongodb.QueryBuilder; import com.mongodb.bulk.BulkWriteResult; import com.mongodb.client.model.Filters; import com.mongodb.client.model.Projections; import org.bson.Document; import org.bson.conversions.Bson; import org.opencb.biodata.models.core.Region; import org.opencb.biodata.models.variant.Variant; import org.opencb.biodata.models.variant.avro.Score; import org.opencb.cellbase.core.api.VariantDBAdaptor; import org.opencb.cellbase.mongodb.MongoDBCollectionConfiguration; import org.opencb.cellbase.mongodb.VariantMongoIterator; import org.opencb.commons.datastore.core.Query; import org.opencb.commons.datastore.core.QueryOptions; import org.opencb.commons.datastore.core.QueryResult; import org.opencb.commons.datastore.mongodb.MongoDBCollection; import org.opencb.commons.datastore.mongodb.MongoDataStore; import java.util.*; import java.util.function.Consumer; import java.util.regex.Pattern; /** * Created by imedina on 26/11/15. */ public class VariantMongoDBAdaptor extends MongoDBAdaptor implements VariantDBAdaptor<Variant> { private static final String POP_FREQUENCIES_FIELD = "annotation.populationFrequencies"; private static final float DECIMAL_RESOLUTION = 100f; private MongoDBCollection caddDBCollection; public VariantMongoDBAdaptor(String species, String assembly, MongoDataStore mongoDataStore) { super(species, assembly, mongoDataStore); mongoDBCollection = mongoDataStore.getCollection("variation"); caddDBCollection = mongoDataStore.getCollection("variation_functional_score"); logger.debug("VariationMongoDBAdaptor: in 'constructor'"); } @Override public QueryResult startsWith(String id, QueryOptions options) { Bson regex = Filters.regex("ids", Pattern.compile("^" + id)); Bson include = Projections.include("ids", "chromosome", "start", "end"); return mongoDBCollection.find(regex, include, options); } @Override public QueryResult<Variant> next(Query query, QueryOptions options) { return null; } @Override public QueryResult nativeNext(Query query, QueryOptions options) { return null; } @Override public QueryResult getIntervalFrequencies(Query query, int intervalSize, QueryOptions options) { if (query.getString("region") != null) { Region region = Region.parseRegion(query.getString("region")); Bson bsonDocument = parseQuery(query); return getIntervalFrequencies(bsonDocument, region, intervalSize, options); } return null; } @Override public QueryResult<Long> update(List objectList, String field) { QueryResult<Long> nLoadedObjects = null; switch (field) { case POP_FREQUENCIES_FIELD: nLoadedObjects = updatePopulationFrequencies((List<Document>) objectList); break; default: logger.error("Invalid field {}: no action implemented for updating this field.", field); break; } return nLoadedObjects; } @Override public QueryResult<Long> count(Query query) { Bson document = parseQuery(query); return mongoDBCollection.count(document); } @Override public QueryResult distinct(Query query, String field) { Bson document = parseQuery(query); return mongoDBCollection.distinct(field, document); } @Override public QueryResult stats(Query query) { return null; } @Override public QueryResult<Variant> get(Query query, QueryOptions options) { Bson bson = parseQuery(query); // options.put(MongoDBCollection.SKIP_COUNT, true); options = addPrivateExcludeOptions(options); return mongoDBCollection.find(bson, null, Variant.class, options); } @Override public QueryResult nativeGet(Query query, QueryOptions options) { Bson bson = parseQuery(query); // options.put(MongoDBCollection.SKIP_COUNT, true); return mongoDBCollection.find(bson, options); } @Override public Iterator<Variant> iterator(Query query, QueryOptions options) { Bson bson = parseQuery(query); options = addPrivateExcludeOptions(options); return new VariantMongoIterator(mongoDBCollection.nativeQuery().find(bson, options).iterator()); } @Override public Iterator nativeIterator(Query query, QueryOptions options) { Bson bson = parseQuery(query); return mongoDBCollection.nativeQuery().find(bson, options).iterator(); } @Override public void forEach(Query query, Consumer<? super Object> action, QueryOptions options) { Objects.requireNonNull(action); Iterator iterator = nativeIterator(query, options); while (iterator.hasNext()) { action.accept(iterator.next()); } } @Override public QueryResult rank(Query query, String field, int numResults, boolean asc) { return null; } @Override public QueryResult groupBy(Query query, String field, QueryOptions options) { Bson bsonQuery = parseQuery(query); return groupBy(bsonQuery, field, "name", options); } @Override public QueryResult groupBy(Query query, List<String> fields, QueryOptions options) { Bson bsonQuery = parseQuery(query); return groupBy(bsonQuery, fields, "name", options); } private Bson parseQuery(Query query) { List<Bson> andBsonList = new ArrayList<>(); createRegionQuery(query, VariantMongoDBAdaptor.QueryParams.REGION.key(), MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE, andBsonList); createOrQuery(query, VariantMongoDBAdaptor.QueryParams.ID.key(), "ids", andBsonList); createOrQuery(query, VariantMongoDBAdaptor.QueryParams.GENE.key(), "annotation.consequenceTypes.ensemblGeneId", andBsonList); createOrQuery(query, QueryParams.CHROMOSOME.key(), "chromosome", andBsonList); createOrQuery(query, QueryParams.REFERENCE.key(), "reference", andBsonList); createOrQuery(query, QueryParams.ALTERNATE.key(), "alternate", andBsonList); createOrQuery(query, VariantMongoDBAdaptor.QueryParams.CONSEQUENCE_TYPE.key(), "consequenceTypes.sequenceOntologyTerms.name", andBsonList); // createOrQuery(query, VariantMongoDBAdaptor.QueryParams.XREFS.key(), "transcripts.xrefs.id", andBsonList); if (andBsonList.size() > 0) { return Filters.and(andBsonList); } else { return new Document(); } } private QueryResult<Long> updatePopulationFrequencies(List<Document> variantDocumentList) { List<Bson> queries = new ArrayList<>(variantDocumentList.size()); List<Bson> updates = new ArrayList<>(variantDocumentList.size()); // QueryResult<Long> longQueryResult = null; for (Document variantDBObject : variantDocumentList) { Document annotationDBObject = (Document) variantDBObject.get("annotation"); Document push = new Document(POP_FREQUENCIES_FIELD, annotationDBObject.get("populationFrequencies")); // Remove annotation object from the DBObject so that push and setOnInsert do not update the same fields: // i.e. annotation.populationFrequencies and annotation variantDBObject.remove("annotation"); addChunkId(variantDBObject); Document update = new Document() .append("$pushAll", push) .append("$setOnInsert", variantDBObject); updates.add(update); String chunkId = getChunkIdPrefix((String) variantDBObject.get("chromosome"), (int) variantDBObject.get("start"), MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE); queries.add(new Document("_chunkIds", chunkId) .append("chromosome", variantDBObject.get("chromosome")) .append("start", variantDBObject.get("start")) .append("end", variantDBObject.get("end")) .append("reference", variantDBObject.get("reference")) .append("alternate", variantDBObject.get("alternate"))); } QueryResult<BulkWriteResult> bulkWriteResult; if (!queries.isEmpty()) { logger.info("updating object"); QueryOptions options = new QueryOptions("upsert", true); options.put("multi", false); try { bulkWriteResult = mongoDBCollection.update(queries, updates, options); } catch (BulkWriteException e) { throw e; } logger.info("{} object updated", bulkWriteResult.first().getUpserts().size() + bulkWriteResult.first().getModifiedCount()); QueryResult<Long> longQueryResult = new QueryResult<>(bulkWriteResult.getId(), bulkWriteResult.getDbTime(), bulkWriteResult .getNumResults(), bulkWriteResult.getNumTotalResults(), bulkWriteResult.getWarningMsg(), bulkWriteResult.getErrorMsg(), Collections.singletonList((long) (bulkWriteResult.first().getUpserts().size() + bulkWriteResult.first().getModifiedCount()))); // return bulkWriteResult.first().getUpserts().size() + bulkWriteResult.first().getModifiedCount(); return longQueryResult; } logger.info("no object updated"); return null; } // Method copied from MongoDBCellbaseLoader. In a near future only this one will stay. Insert work currently done // by MongoDBCellbaseLoader must be replaced by an appropriate method in this adaptor private void addChunkId(Document dbObject) { List<String> chunkIds = new ArrayList<>(); int chunkStart = (Integer) dbObject.get("start") / MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE; int chunkEnd = (Integer) dbObject.get("end") / MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE; String chunkIdSuffix = MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE / 1000 + "k"; for (int i = chunkStart; i <= chunkEnd; i++) { if (dbObject.containsKey("chromosome")) { chunkIds.add(dbObject.get("chromosome") + "_" + i + "_" + chunkIdSuffix); } else { chunkIds.add(dbObject.get("sequenceName") + "_" + i + "_" + chunkIdSuffix); } } dbObject.put("_chunkIds", chunkIds); } @Override public QueryResult<Score> getFunctionalScoreVariant(Variant variant, QueryOptions queryOptions) { String chromosome = variant.getChromosome(); int position = variant.getStart(); String reference = variant.getReference(); String alternate = variant.getAlternate(); String chunkId = getChunkIdPrefix(chromosome, position, MongoDBCollectionConfiguration.VARIATION_FUNCTIONAL_SCORE_CHUNK_SIZE); QueryBuilder builder = QueryBuilder.start("_chunkIds").is(chunkId); // .and("chromosome").is(chromosome) // .and("start").is(position); // System.out.println(chunkId); QueryResult result = executeQuery(chromosome + "_" + position + "_" + reference + "_" + alternate, new Document(builder.get().toMap()), queryOptions, caddDBCollection); // System.out.println("result = " + result); List<Score> scores = new ArrayList<>(); for (Object object : result.getResult()) { // System.out.println("object = " + object); Document dbObject = (Document) object; int chunkStart = dbObject.getInteger("start"); int chunkEnd = dbObject.getInteger("end"); // CADD positions are not continuous through the whole chromosome. Several documents may be associated with // the same chunk id: we have to be sure that current document contains queried position. Only two documents // will contain queried position - one for raw and one for scaled values if (position >= chunkStart && position <= chunkEnd) { int offset = (position - chunkStart); ArrayList basicDBList = dbObject.get("values", ArrayList.class); // long l1 = 0L; // TODO: delete // try { // TODO: delete long l1 = Long.parseLong(basicDBList.get(offset).toString()); // l1 = (Long) basicDBList.get(offset); // } catch (Exception e) { // TODO: delete // logger.error("problematic variant: {}", variant.toString()); // throw e; // } if (dbObject.getString("source").equalsIgnoreCase("cadd_raw")) { float value = 0f; switch (alternate.toLowerCase()) { case "a": // value = ((short) (l1 >> 48) - 10000) / DECIMAL_RESOLUTION; value = (((short) (l1 >> 48)) / DECIMAL_RESOLUTION) - 10; break; case "c": value = (((short) (l1 >> 32)) / DECIMAL_RESOLUTION) - 10; break; case "g": value = (((short) (l1 >> 16)) / DECIMAL_RESOLUTION) - 10; break; case "t": value = (((short) (l1 >> 0)) / DECIMAL_RESOLUTION) - 10; break; default: break; } scores.add(Score.newBuilder() .setScore(value) .setSource(dbObject.getString("source")) .setDescription(null) // .setDescription("") .build()); } if (dbObject.getString("source").equalsIgnoreCase("cadd_scaled")) { float value = 0f; switch (alternate.toLowerCase()) { case "a": value = ((short) (l1 >> 48)) / DECIMAL_RESOLUTION; break; case "c": value = ((short) (l1 >> 32)) / DECIMAL_RESOLUTION; break; case "g": value = ((short) (l1 >> 16)) / DECIMAL_RESOLUTION; break; case "t": value = ((short) (l1 >> 0)) / DECIMAL_RESOLUTION; break; default: break; } scores.add(Score.newBuilder() .setScore(value) .setSource(dbObject.getString("source")) .setDescription(null) // .setDescription("") .build()); } } } result.setResult(scores); return result; } }
/* * Copyright (c) 2015 PocketHub * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.pockethub.ui.ref; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.LoaderManager.LoaderCallbacks; import android.support.v4.content.Loader; import android.support.v7.app.ActionBar; import android.text.TextUtils; import android.util.Base64; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.webkit.WebView; import android.widget.ProgressBar; import com.alorma.github.sdk.bean.dto.response.GitBlob; import com.alorma.github.sdk.bean.dto.response.Repo; import com.github.kevinsawicki.wishlist.ViewUtils; import com.github.pockethub.Intents.Builder; import com.github.pockethub.R; import com.github.pockethub.core.code.RefreshBlobTask; import com.github.pockethub.core.commit.CommitUtils; import com.github.pockethub.ui.BaseActivity; import com.github.pockethub.ui.MarkdownLoader; import com.github.pockethub.util.AvatarLoader; import com.github.pockethub.util.HttpImageGetter; import com.github.pockethub.util.InfoUtils; import com.github.pockethub.util.MarkdownUtils; import com.github.pockethub.util.PreferenceUtils; import com.github.pockethub.util.ShareUtils; import com.github.pockethub.util.SourceEditor; import com.github.pockethub.util.ToastUtils; import com.google.inject.Inject; import static com.github.pockethub.Intents.EXTRA_BASE; import static com.github.pockethub.Intents.EXTRA_HEAD; import static com.github.pockethub.Intents.EXTRA_PATH; import static com.github.pockethub.Intents.EXTRA_REPOSITORY; import static com.github.pockethub.util.PreferenceUtils.RENDER_MARKDOWN; import static com.github.pockethub.util.PreferenceUtils.WRAP; /** * Activity to view a file on a branch */ public class BranchFileViewActivity extends BaseActivity implements LoaderCallbacks<CharSequence> { private static final String TAG = "BranchFileViewActivity"; private static final String ARG_TEXT = "text"; private static final String ARG_REPO = "repo"; /** * Create intent to show file in commit * * @param repository * @param branch * @param file * @param blobSha * @return intent */ public static Intent createIntent(Repo repository, String branch, String file, String blobSha) { Builder builder = new Builder("branch.file.VIEW"); builder.repo(repository); builder.add(EXTRA_BASE, blobSha); builder.add(EXTRA_PATH, file); builder.add(EXTRA_HEAD, branch); return builder.toIntent(); } private Repo repo; private String sha; private String path; private String file; private String branch; private boolean isMarkdownFile; private String renderedMarkdown; private GitBlob blob; private ProgressBar loadingBar; private WebView codeView; private SourceEditor editor; private MenuItem markdownItem; @Inject private AvatarLoader avatars; @Inject private HttpImageGetter imageGetter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_commit_file_view); repo = getParcelableExtra(EXTRA_REPOSITORY); sha = getStringExtra(EXTRA_BASE); path = getStringExtra(EXTRA_PATH); branch = getStringExtra(EXTRA_HEAD); loadingBar = finder.find(R.id.pb_loading); codeView = finder.find(R.id.wv_code); codeView.getSettings().setBuiltInZoomControls(true); codeView.getSettings().setUseWideViewPort(true); file = CommitUtils.getName(path); isMarkdownFile = MarkdownUtils.isMarkdown(file); editor = new SourceEditor(codeView); editor.setWrap(PreferenceUtils.getCodePreferences(this).getBoolean( WRAP, false)); setSupportActionBar((android.support.v7.widget.Toolbar) findViewById(R.id.toolbar)); ActionBar actionBar = getSupportActionBar(); actionBar.setTitle(file); actionBar.setSubtitle(branch); avatars.bind(actionBar, repo.owner); loadContent(); } @Override public boolean onCreateOptionsMenu(final Menu optionsMenu) { getMenuInflater().inflate(R.menu.activity_file_view, optionsMenu); MenuItem wrapItem = optionsMenu.findItem(R.id.m_wrap); if (PreferenceUtils.getCodePreferences(this).getBoolean(WRAP, false)) wrapItem.setTitle(R.string.disable_wrapping); else wrapItem.setTitle(R.string.enable_wrapping); markdownItem = optionsMenu.findItem(R.id.m_render_markdown); if (isMarkdownFile) { markdownItem.setEnabled(blob != null); markdownItem.setVisible(true); if (PreferenceUtils.getCodePreferences(this).getBoolean( RENDER_MARKDOWN, true)) markdownItem.setTitle(R.string.show_raw_markdown); else markdownItem.setTitle(R.string.render_markdown); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.m_wrap: if (editor.getWrap()) item.setTitle(R.string.enable_wrapping); else item.setTitle(R.string.disable_wrapping); editor.toggleWrap(); PreferenceUtils.save(PreferenceUtils.getCodePreferences(this) .edit().putBoolean(WRAP, editor.getWrap())); return true; case R.id.m_share: shareFile(); return true; case R.id.m_render_markdown: if (editor.isMarkdown()) { item.setTitle(R.string.render_markdown); editor.toggleMarkdown(); editor.setSource(file, blob); } else { item.setTitle(R.string.show_raw_markdown); editor.toggleMarkdown(); if (renderedMarkdown != null) editor.setSource(file, renderedMarkdown, false); else loadMarkdown(); } PreferenceUtils.save(PreferenceUtils.getCodePreferences(this) .edit().putBoolean(RENDER_MARKDOWN, editor.isMarkdown())); return true; default: return super.onOptionsItemSelected(item); } } @Override public Loader<CharSequence> onCreateLoader(int loader, Bundle args) { final String raw = args.getString(ARG_TEXT); final Repo repo = (Repo) args .getParcelable(ARG_REPO); return new MarkdownLoader(this, repo, raw, imageGetter, false); } @Override public void onLoadFinished(Loader<CharSequence> loader, CharSequence rendered) { if (rendered == null) ToastUtils.show(this, R.string.error_rendering_markdown); ViewUtils.setGone(loadingBar, true); ViewUtils.setGone(codeView, false); if (!TextUtils.isEmpty(rendered)) { renderedMarkdown = rendered.toString(); if (markdownItem != null) markdownItem.setEnabled(true); editor.setMarkdown(true).setSource(file, renderedMarkdown, false); } } @Override public void onLoaderReset(Loader<CharSequence> loader) { } private void shareFile() { String id = InfoUtils.createRepoId(repo); startActivity(ShareUtils.create(path + " at " + branch + " on " + id, "https://github.com/" + id + "/blob/" + branch + '/' + path)); } private void loadMarkdown() { ViewUtils.setGone(loadingBar, false); ViewUtils.setGone(codeView, true); String markdown = new String(Base64.decode(blob.content, Base64.DEFAULT)); Bundle args = new Bundle(); args.putCharSequence(ARG_TEXT, markdown); args.putParcelable(ARG_REPO, repo); getSupportLoaderManager().restartLoader(0, args, this); } private void loadContent() { ViewUtils.setGone(loadingBar, false); ViewUtils.setGone(codeView, true); new RefreshBlobTask(repo, sha, this) { @Override protected void onSuccess(GitBlob blob) throws Exception { super.onSuccess(blob); BranchFileViewActivity.this.blob = blob; if (markdownItem != null) markdownItem.setEnabled(true); if (isMarkdownFile && PreferenceUtils.getCodePreferences( BranchFileViewActivity.this).getBoolean( RENDER_MARKDOWN, true)) loadMarkdown(); else { ViewUtils.setGone(loadingBar, true); ViewUtils.setGone(codeView, false); editor.setMarkdown(false).setSource(file, blob); } } @Override protected void onException(Exception e) throws RuntimeException { super.onException(e); Log.d(TAG, "Loading file contents failed", e); ViewUtils.setGone(loadingBar, true); ViewUtils.setGone(codeView, false); ToastUtils.show(BranchFileViewActivity.this, e, R.string.error_file_load); } }.execute(); } }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.xmlinput; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.w3c.dom.Node; /** * Describes an XML field and the position in an XML file * * @author Matt * @since 16-12-2005 * */ public class XMLInputField implements Cloneable { private static Class<?> PKG = XMLInputMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ public final static int TYPE_TRIM_NONE = 0; public final static int TYPE_TRIM_LEFT = 1; public final static int TYPE_TRIM_RIGHT = 2; public final static int TYPE_TRIM_BOTH = 3; public final static String trimTypeCode[] = { "none", "left", "right", "both" }; public final static String trimTypeDesc[] = { BaseMessages.getString(PKG, "XMLInputField.TrimType.None"), BaseMessages.getString(PKG, "XMLInputField.TrimType.Left"), BaseMessages.getString(PKG, "XMLInputField.TrimType.Right"), BaseMessages.getString(PKG, "XMLInputField.TrimType.Both") }; public final static String POSITION_MARKER = ","; private String name; private XMLInputFieldPosition[] fieldPosition; private int type; private int length; private String format; private int trimtype; private int precision; private String currencySymbol; private String decimalSymbol; private String groupSymbol; private boolean repeat; private String samples[]; public XMLInputField(String fieldname, XMLInputFieldPosition[] xmlInputFieldPositions) { this.name = fieldname; this.fieldPosition = xmlInputFieldPositions; this.length = -1; this.type = ValueMeta.TYPE_STRING; this.format = ""; this.trimtype = TYPE_TRIM_NONE; this.groupSymbol = ""; this.decimalSymbol = ""; this.currencySymbol= ""; this.precision = -1; this.repeat = false; } public XMLInputField() { this(null, null); } public String getXML() { String retval=""; retval+=" <field>"+Const.CR; retval+=" "+XMLHandler.addTagValue("name", getName()); retval+=" "+XMLHandler.addTagValue("type", getTypeDesc()); retval+=" "+XMLHandler.addTagValue("format", getFormat()); retval+=" "+XMLHandler.addTagValue("currency", getCurrencySymbol()); retval+=" "+XMLHandler.addTagValue("decimal", getDecimalSymbol()); retval+=" "+XMLHandler.addTagValue("group", getGroupSymbol()); retval+=" "+XMLHandler.addTagValue("length", getLength()); retval+=" "+XMLHandler.addTagValue("precision", getPrecision()); retval+=" "+XMLHandler.addTagValue("trim_type", getTrimTypeCode()); retval+=" "+XMLHandler.addTagValue("repeat", isRepeated()); retval+=" <positions>"; for (int i=0;i<fieldPosition.length;i++) { retval+=XMLHandler.addTagValue("position", fieldPosition[i].toString(), false); } retval+=" </positions>"+Const.CR; retval+=" </field>"+Const.CR; return retval; } public XMLInputField(Node fnode) throws KettleValueException { setName( XMLHandler.getTagValue(fnode, "name") ); setType( ValueMeta.getType(XMLHandler.getTagValue(fnode, "type")) ); setFormat( XMLHandler.getTagValue(fnode, "format") ); setCurrencySymbol( XMLHandler.getTagValue(fnode, "currency") ); setDecimalSymbol( XMLHandler.getTagValue(fnode, "decimal") ); setGroupSymbol( XMLHandler.getTagValue(fnode, "group") ); setLength( Const.toInt(XMLHandler.getTagValue(fnode, "length"), -1) ); setPrecision( Const.toInt(XMLHandler.getTagValue(fnode, "precision"), -1) ); setTrimType( getTrimTypeByCode(XMLHandler.getTagValue(fnode, "trim_type")) ); setRepeated( !"N".equalsIgnoreCase(XMLHandler.getTagValue(fnode, "repeat")) ); Node positions = XMLHandler.getSubNode(fnode, "positions"); int nrPositions = XMLHandler.countNodes(positions, "position"); fieldPosition = new XMLInputFieldPosition[nrPositions]; for (int i=0;i<nrPositions;i++) { Node positionnode = XMLHandler.getSubNodeByNr(positions, "position", i); String encoded = XMLHandler.getNodeValue(positionnode); fieldPosition[i] = new XMLInputFieldPosition(encoded); } } public final static int getTrimTypeByCode(String tt) { if (tt==null) return 0; for (int i=0;i<trimTypeCode.length;i++) { if (trimTypeCode[i].equalsIgnoreCase(tt)) return i; } return 0; } public final static int getTrimTypeByDesc(String tt) { if (tt==null) return 0; for (int i=0;i<trimTypeDesc.length;i++) { if (trimTypeDesc[i].equalsIgnoreCase(tt)) return i; } return 0; } public final static String getTrimTypeCode(int i) { if (i<0 || i>=trimTypeCode.length) return trimTypeCode[0]; return trimTypeCode[i]; } public final static String getTrimTypeDesc(int i) { if (i<0 || i>=trimTypeDesc.length) return trimTypeDesc[0]; return trimTypeDesc[i]; } public Object clone() { try { XMLInputField retval = (XMLInputField) super.clone(); if (fieldPosition!=null) { retval.setFieldPosition( new XMLInputFieldPosition[fieldPosition.length] ); for (int i=0;i<fieldPosition.length;i++) { retval.getFieldPosition()[i] = (XMLInputFieldPosition)fieldPosition[i].clone(); } } return retval; } catch(CloneNotSupportedException e) { return null; } } /** * @return Returns the xmlInputFieldPositions. */ public XMLInputFieldPosition[] getFieldPosition() { return fieldPosition; } /** * @param xmlInputFieldPositions The xmlInputFieldPositions to set. */ public void setFieldPosition(XMLInputFieldPosition[] xmlInputFieldPositions) { this.fieldPosition = xmlInputFieldPositions; } public int getLength() { return length; } public void setLength(int length) { this.length = length; } public String getName() { return name; } public void setName(String fieldname) { this.name = fieldname; } public int getType() { return type; } public String getTypeDesc() { return ValueMeta.getTypeDesc(type); } public void setType(int type) { this.type = type; } public String getFormat() { return format; } public void setFormat(String format) { this.format = format; } public void setSamples(String samples[]) { this.samples = samples; } public String[] getSamples() { return samples; } public int getTrimType() { return trimtype; } public String getTrimTypeCode() { return getTrimTypeCode(trimtype); } public String getTrimTypeDesc() { return getTrimTypeDesc(trimtype); } public void setTrimType(int trimtype) { this.trimtype= trimtype; } public String getGroupSymbol() { return groupSymbol; } public void setGroupSymbol(String group_symbol) { this.groupSymbol = group_symbol; } public String getDecimalSymbol() { return decimalSymbol; } public void setDecimalSymbol(String decimal_symbol) { this.decimalSymbol = decimal_symbol; } public String getCurrencySymbol() { return currencySymbol; } public void setCurrencySymbol(String currency_symbol) { this.currencySymbol = currency_symbol; } public int getPrecision() { return precision; } public void setPrecision(int precision) { this.precision = precision; } public boolean isRepeated() { return repeat; } public void setRepeated(boolean repeat) { this.repeat = repeat; } public void flipRepeated() { repeat = !repeat; } public String getFieldPositionsCode() { String enc=""; for (int i=0;i<fieldPosition.length;i++) { XMLInputFieldPosition pos = fieldPosition[i]; if (i>0) enc+=POSITION_MARKER; enc+=pos.toString(); } return enc; } public void guess() { } public void setFieldPosition(String encoded) throws KettleException { try { String codes[] = encoded.split(POSITION_MARKER); fieldPosition = new XMLInputFieldPosition[codes.length]; for (int i=0;i<codes.length;i++) { fieldPosition[i] = new XMLInputFieldPosition(codes[i]); } } catch(Exception e) { throw new KettleException("Unable to parse the field positions because of an error"+Const.CR+"Please use E=element or A=attribute in a comma separated list (code: "+encoded+")", e); } } }
package controllers; import com.crazycabo.drivers.DriverManager; import com.crazycabo.drivers.saucelabs.SauceAPI; import com.crazycabo.drivers.testrail.APIException; import com.crazycabo.drivers.testrail.APIMethods; import com.crazycabo.drivers.testrail.TestRailCaseID; import com.crazycabo.models.ServerStaticGlobals; import com.crazycabo.models.TestRailInfo; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.junit.After; import org.junit.AssumptionViolatedException; import org.junit.Before; import org.junit.Rule; import org.junit.rules.TestRule; import org.junit.rules.TestWatcher; import org.junit.runner.Description; import org.openqa.selenium.Dimension; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.support.ui.WebDriverWait; import com.crazycabo.models.Screenshot; import ru.yandex.qatools.allure.annotations.Parameter; import java.io.IOException; import java.util.Objects; /** * Created By: Brian Smith on 10/8/14. * Package: test/java */ public class TestDefault { public WebDriver localDriver; private final Logger logger = LogManager.getLogger("test-default"); private String testName; private Long timeStart; private Long timeEnd; @Parameter("Session ID") private String sessionID; @Parameter("TestRail Case ID") private String testRailCaseID; @Parameter("Configuration") private String configuration; @Rule public TestRule watcher = new TestWatcher() { protected void starting(Description description) { testName = description.getDisplayName(); configuration = System.getProperty("browser") + " " + System.getProperty("browser_version") + " on " + System.getProperty("platform"); timeStart = System.currentTimeMillis(); try { testRailCaseID = description.getAnnotation(TestRailCaseID.class).value(); } catch (NullPointerException e) { testRailCaseID = "unknown"; } } protected void succeeded(Description description) { timeEnd = System.currentTimeMillis(); if (System.getProperty("saucelabs", "false").equals("true") && !sessionID.isEmpty()) { SauceAPI.flagTestPassed(sessionID); } addTestRailAPIResult(APIMethods.RESULT.PASSED, (timeEnd-timeStart)/1000.0, sessionID, configuration, testName, ""); } protected void failed(Throwable error, Description description) { timeEnd = System.currentTimeMillis(); if (System.getProperty("saucelabs", "false").equals("true") && !sessionID.isEmpty()) { SauceAPI.flagTestFailed(sessionID); } addTestRailAPIResult(APIMethods.RESULT.FAILED, (timeEnd-timeStart)/1000.0, sessionID, configuration, testName, error.getMessage()); } @Override protected void skipped(AssumptionViolatedException e, Description description) { timeEnd = System.currentTimeMillis(); if (System.getProperty("saucelabs", "false").equals("true") && !sessionID.isEmpty()) { SauceAPI.flagTestPassed(sessionID); } addTestRailAPIResult(APIMethods.RESULT.SKIPPED, (timeEnd-timeStart)/1000.0, sessionID, configuration, testName, e.getMessage()); } }; @Before public void setUp() throws Exception { openBrowser(); logger.info("Browser window initiated."); if (System.getProperty("saucelabs", "false").equals("true")) { sessionID = ((RemoteWebDriver) localDriver).getSessionId().toString(); if (sessionID.isEmpty() || Objects.equals(sessionID, "")) { throw new Error("A Sauce session ID could not be found."); } SauceAPI.addTestName(sessionID, testName); logger.info("Assigned Sauce session ID."); } } @After public void tearDown() { getScreenShot(); quit(); } public Boolean getScreenShot() { try { Screenshot screenshot = new Screenshot(localDriver); screenshot.getScreenShot(); return true; } catch (NullPointerException e) { System.out.println("Cannot take screenshot as the browser is not available."); return false; } } public void hideElement(WebElement element) { ((JavascriptExecutor)localDriver).executeScript("arguments[0].style.visibility='hidden'", element); new WebDriverWait(localDriver, 15).until( ExpectedConditions.not(ExpectedConditions.visibilityOf(element))); } public void manualWait(Integer seconds) { try { Thread.sleep(seconds * 1000); } catch (InterruptedException e) { e.printStackTrace(); } } public void navigateToURLPath(String page) { localDriver.navigate().to(System.getProperty(ServerStaticGlobals.serverURL, "http://no_valid_url") + "/" + page); } public void openBrowser() { localDriver = DriverManager.get(); logger.info("Local driver object assigned."); if (System.getProperty("saucelabs", "false").equals("false")) { String res = System.getProperty("resolution", "1280x1024"); String[] sRes = res.split("x"); setBrowserDimensions(Integer.parseInt(sRes[0]), Integer.parseInt(sRes[1])); } } public void quit() { try { localDriver.quit(); } catch (NullPointerException e) { System.out.println("Cannot quit as the browser is not available."); } } private void setBrowserDimensions(Integer w, Integer h) { Dimension browserWindow = new Dimension(w, h); localDriver.manage().window().setSize(browserWindow); } /** * Send results to the TestRail API * * @param result PASSED, FAILED, or SKIPPED * @param runTime Total time in seconds * @param sessionID The ID of the current session * @param configuration Name of the browser used * @param testName String from description.getDisplayName() * @param error Error message from test if available */ private void addTestRailAPIResult(APIMethods.RESULT result, Double runTime, String sessionID, String configuration, String testName, String error) { if (!System.getProperty("testrailUser", "").equals("") && !System.getProperty("testrailPassword", "").equals("")) { APIMethods testRailAPI = new APIMethods(); try { // Pull class and method names out of the test name String testClass = (testName.substring(testName.indexOf("("))).replaceAll("[(-)]", ""); String testMethod = testName.substring(0, testName.indexOf("(")); // Construct a comment with all necessary information for suites traceability String message = "Session ID: " + sessionID + "\n" + "Configuration: " + configuration + "\n" + "Suite Class: " + testClass + "\n" + "Suite Method: " + testMethod + "\n\n" + error; testRailAPI.addResult(TestRailInfo.getRunID(), testRailCaseID, result, runTime.intValue(), message); } catch (APIException e) { System.out.println("TestRail API Exception: " + e); } catch (IOException e) { System.out.println("Java IO Exception: " + e); } } else { System.out.println("NOTICE: TestRail credentials not set. Ignoring API requests."); } } }
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.servlet.spec; import io.undertow.security.api.SecurityContext; import io.undertow.security.idm.Account; import io.undertow.server.HttpServerExchange; import io.undertow.server.RequestTooBigException; import io.undertow.server.handlers.form.FormData; import io.undertow.server.handlers.form.FormDataParser; import io.undertow.server.handlers.form.MultiPartParserDefinition; import io.undertow.server.protocol.http.HttpAttachments; import io.undertow.server.session.Session; import io.undertow.server.session.SessionConfig; import io.undertow.servlet.UndertowServletMessages; import io.undertow.servlet.api.AuthorizationManager; import io.undertow.servlet.api.Deployment; import io.undertow.servlet.api.InstanceFactory; import io.undertow.servlet.api.InstanceHandle; import io.undertow.servlet.core.ManagedServlet; import io.undertow.servlet.core.ServletUpgradeListener; import io.undertow.servlet.handlers.ServletChain; import io.undertow.servlet.handlers.ServletPathMatch; import io.undertow.servlet.handlers.ServletRequestContext; import io.undertow.servlet.util.EmptyEnumeration; import io.undertow.servlet.util.IteratorEnumeration; import io.undertow.util.AttachmentKey; import io.undertow.util.CanonicalPathUtils; import io.undertow.util.DateUtils; import io.undertow.util.HeaderMap; import io.undertow.util.HeaderValues; import io.undertow.util.Headers; import io.undertow.util.HttpString; import io.undertow.util.LocaleUtils; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.charset.UnsupportedCharsetException; import java.security.AccessController; import java.security.Principal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.Deque; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import javax.servlet.AsyncContext; import javax.servlet.DispatcherType; import javax.servlet.MultipartConfigElement; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.ServletInputStream; import javax.servlet.ServletRequest; import javax.servlet.ServletRequestWrapper; import javax.servlet.ServletResponse; import javax.servlet.ServletResponseWrapper; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletMapping; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.servlet.http.HttpUpgradeHandler; import javax.servlet.http.Part; import javax.servlet.http.PushBuilder; /** * The http servlet request implementation. This class is not thread safe * * @author Stuart Douglas */ public final class HttpServletRequestImpl implements HttpServletRequest { @Deprecated public static final AttachmentKey<Boolean> SECURE_REQUEST = HttpServerExchange.SECURE_REQUEST; static final AttachmentKey<Boolean> REQUESTED_SESSION_ID_SET = AttachmentKey.create(Boolean.class); static final AttachmentKey<String> REQUESTED_SESSION_ID = AttachmentKey.create(String.class); private final HttpServerExchange exchange; private final ServletContextImpl originalServletContext; private ServletContextImpl servletContext; private Map<String, Object> attributes = null; private ServletInputStream servletInputStream; private BufferedReader reader; private Cookie[] cookies; private List<Part> parts = null; private volatile boolean asyncStarted = false; private volatile AsyncContextImpl asyncContext = null; private Map<String, Deque<String>> queryParameters; private FormData parsedFormData; private RuntimeException formParsingException; private Charset characterEncoding; private boolean readStarted; private SessionConfig.SessionCookieSource sessionCookieSource; public HttpServletRequestImpl(final HttpServerExchange exchange, final ServletContextImpl servletContext) { this.exchange = exchange; this.servletContext = servletContext; this.originalServletContext = servletContext; } public HttpServerExchange getExchange() { return exchange; } @Override public String getAuthType() { SecurityContext securityContext = exchange.getSecurityContext(); return securityContext != null ? securityContext.getMechanismName() : null; } @Override public Cookie[] getCookies() { if (cookies == null) { Map<String, io.undertow.server.handlers.Cookie> cookies = exchange.getRequestCookies(); if (cookies.isEmpty()) { return null; } int count = cookies.size(); Cookie[] value = new Cookie[count]; int i = 0; for (Map.Entry<String, io.undertow.server.handlers.Cookie> entry : cookies.entrySet()) { io.undertow.server.handlers.Cookie cookie = entry.getValue(); try { Cookie c = new Cookie(cookie.getName(), cookie.getValue()); if (cookie.getDomain() != null) { c.setDomain(cookie.getDomain()); } c.setHttpOnly(cookie.isHttpOnly()); if (cookie.getMaxAge() != null) { c.setMaxAge(cookie.getMaxAge()); } if (cookie.getPath() != null) { c.setPath(cookie.getPath()); } c.setSecure(cookie.isSecure()); c.setVersion(cookie.getVersion()); value[i++] = c; } catch (IllegalArgumentException e) { // Ignore bad cookie } } if( i < count ) { Cookie[] shrunkCookies = new Cookie[i]; System.arraycopy(value, 0, shrunkCookies, 0, i); value = shrunkCookies; } this.cookies = value; } return cookies; } @Override public long getDateHeader(final String name) { String header = exchange.getRequestHeaders().getFirst(name); if (header == null) { return -1; } Date date = DateUtils.parseDate(header); if (date == null) { throw UndertowServletMessages.MESSAGES.headerCannotBeConvertedToDate(header); } return date.getTime(); } @Override public String getHeader(final String name) { HeaderMap headers = exchange.getRequestHeaders(); return headers.getFirst(name); } public String getHeader(final HttpString name) { HeaderMap headers = exchange.getRequestHeaders(); return headers.getFirst(name); } @Override public Enumeration<String> getHeaders(final String name) { List<String> headers = exchange.getRequestHeaders().get(name); if (headers == null) { return EmptyEnumeration.instance(); } return new IteratorEnumeration<>(headers.iterator()); } @Override public Enumeration<String> getHeaderNames() { final Set<String> headers = new HashSet<>(); for (final HttpString i : exchange.getRequestHeaders().getHeaderNames()) { headers.add(i.toString()); } return new IteratorEnumeration<>(headers.iterator()); } @Override public HttpServletMapping getHttpServletMapping() { ServletRequestContext src = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); ServletPathMatch match = src.getOriginalServletPathMatch(); if(getDispatcherType() == DispatcherType.FORWARD) { match = src.getServletPathMatch(); } String matchValue; switch (match.getMappingMatch()) { case EXACT: matchValue = match.getMatched(); if(matchValue.startsWith("/")) { matchValue = matchValue.substring(1); } break; case DEFAULT: case CONTEXT_ROOT: matchValue = ""; break; case PATH: matchValue = match.getRemaining(); if(matchValue.startsWith("/")) { matchValue = matchValue.substring(1); } break; case EXTENSION: matchValue = match.getMatched().substring(0, match.getMatched().length() - match.getMatchString().length() + 1); if(matchValue.startsWith("/")) { matchValue = matchValue.substring(1); } break; default: matchValue = match.getRemaining(); } return new MappingImpl(matchValue, match.getMatchString(), match.getMappingMatch(), match.getServletChain().getManagedServlet().getServletInfo().getName()); } @Override public int getIntHeader(final String name) { String header = getHeader(name); if (header == null) { return -1; } return Integer.parseInt(header); } @Override public String getMethod() { return exchange.getRequestMethod().toString(); } @Override public String getPathInfo() { ServletPathMatch match = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).getServletPathMatch(); if (match != null) { return match.getRemaining(); } return null; } @Override public String getPathTranslated() { return getRealPath(getPathInfo()); } @Override public String getContextPath() { return servletContext.getContextPath(); } @Override public String getQueryString() { return exchange.getQueryString().isEmpty() ? null : exchange.getQueryString(); } @Override public String getRemoteUser() { Principal userPrincipal = getUserPrincipal(); return userPrincipal != null ? userPrincipal.getName() : null; } @Override public boolean isUserInRole(final String role) { if (role == null) { return false; } //according to the servlet spec this aways returns false if (role.equals("*")) { return false; } SecurityContext sc = exchange.getSecurityContext(); Account account = sc.getAuthenticatedAccount(); if (account == null) { return false; } ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); if (role.equals("**")) { Set<String> roles = servletRequestContext.getDeployment().getDeploymentInfo().getSecurityRoles(); if (!roles.contains("**")) { return true; } } final ServletChain servlet = servletRequestContext.getCurrentServlet(); final Deployment deployment = servletContext.getDeployment(); final AuthorizationManager authorizationManager = deployment.getDeploymentInfo().getAuthorizationManager(); return authorizationManager.isUserInRole(role, account, servlet.getManagedServlet().getServletInfo(), this, deployment); } @Override public Principal getUserPrincipal() { SecurityContext securityContext = exchange.getSecurityContext(); Principal result = null; Account account = null; if (securityContext != null && (account = securityContext.getAuthenticatedAccount()) != null) { result = account.getPrincipal(); } return result; } @Override public String getRequestedSessionId() { Boolean isRequestedSessionIdSaved = exchange.getAttachment(REQUESTED_SESSION_ID_SET); if (isRequestedSessionIdSaved != null && isRequestedSessionIdSaved) { return exchange.getAttachment(REQUESTED_SESSION_ID); } SessionConfig config = originalServletContext.getSessionConfig(); if(config instanceof ServletContextImpl.ServletContextSessionConfig) { return ((ServletContextImpl.ServletContextSessionConfig)config).getDelegate().findSessionId(exchange); } return config.findSessionId(exchange); } @Override public String changeSessionId() { HttpSessionImpl session = servletContext.getSession(originalServletContext, exchange, false); if (session == null) { throw UndertowServletMessages.MESSAGES.noSession(); } String oldId = session.getId(); Session underlyingSession; if(System.getSecurityManager() == null) { underlyingSession = session.getSession(); } else { underlyingSession = AccessController.doPrivileged(new HttpSessionImpl.UnwrapSessionAction(session)); } String newId = underlyingSession.changeSessionId(exchange, originalServletContext.getSessionConfig()); servletContext.getDeployment().getApplicationListeners().httpSessionIdChanged(session, oldId); return newId; } @Override public String getRequestURI() { //we need the non-decoded string, which means we need to use exchange.getRequestURI() if(exchange.isHostIncludedInRequestURI()) { //we need to strip out the host part String uri = exchange.getRequestURI(); int slashes =0; for(int i = 0; i < uri.length(); ++i) { if(uri.charAt(i) == '/') { if(++slashes == 3) { return uri.substring(i); } } } return "/"; } else { return exchange.getRequestURI(); } } @Override public StringBuffer getRequestURL() { return new StringBuffer(exchange.getRequestURL()); } @Override public String getServletPath() { ServletPathMatch match = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).getServletPathMatch(); if (match != null) { return match.getMatched(); } return ""; } @Override public HttpSession getSession(final boolean create) { return servletContext.getSession(originalServletContext, exchange, create); } @Override public HttpSession getSession() { return getSession(true); } @Override public boolean isRequestedSessionIdValid() { HttpSessionImpl session = servletContext.getSession(originalServletContext, exchange, false); if(session == null) { return false; } if(session.isInvalid()) { return false; } return session.getId().equals(getRequestedSessionId()); } @Override public boolean isRequestedSessionIdFromCookie() { return sessionCookieSource() == SessionConfig.SessionCookieSource.COOKIE; } @Override public boolean isRequestedSessionIdFromURL() { return sessionCookieSource() == SessionConfig.SessionCookieSource.URL; } @Override public boolean isRequestedSessionIdFromUrl() { return isRequestedSessionIdFromURL(); } @Override public boolean authenticate(final HttpServletResponse response) throws IOException, ServletException { if (response.isCommitted()) { throw UndertowServletMessages.MESSAGES.responseAlreadyCommited(); } SecurityContext sc = exchange.getSecurityContext(); sc.setAuthenticationRequired(); // TODO: this will set the status code and headers without going through any potential // wrappers, is this a problem? if (sc.authenticate()) { if (sc.isAuthenticated()) { return true; } else { throw UndertowServletMessages.MESSAGES.authenticationFailed(); } } else { if(!exchange.isResponseStarted() && exchange.getStatusCode() == 200) { throw UndertowServletMessages.MESSAGES.authenticationFailed(); } else { return false; } } } @Override public void login(final String username, final String password) throws ServletException { if (username == null || password == null) { throw UndertowServletMessages.MESSAGES.loginFailed(); } SecurityContext sc = exchange.getSecurityContext(); if (sc.isAuthenticated()) { throw UndertowServletMessages.MESSAGES.userAlreadyLoggedIn(); } boolean login = false; try { login = sc.login(username, password); } catch (SecurityException se) { if (se.getCause() instanceof ServletException) throw (ServletException) se.getCause(); throw new ServletException(se); } if (!login) { throw UndertowServletMessages.MESSAGES.loginFailed(); } } @Override public void logout() throws ServletException { SecurityContext sc = exchange.getSecurityContext(); sc.logout(); if(servletContext.getDeployment().getDeploymentInfo().isInvalidateSessionOnLogout()) { HttpSession session = getSession(false); if(session != null) { session.invalidate(); } } } @Override public Collection<Part> getParts() throws IOException, ServletException { verifyMultipartServlet(); if (parts == null) { loadParts(); } return parts; } private void verifyMultipartServlet() { ServletRequestContext src = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); MultipartConfigElement multipart = src.getServletPathMatch().getServletChain().getManagedServlet().getMultipartConfig(); if(multipart == null) { throw UndertowServletMessages.MESSAGES.multipartConfigNotPresent(); } } @Override public Part getPart(final String name) throws IOException, ServletException { verifyMultipartServlet(); if (parts == null) { loadParts(); } for (Part part : parts) { if (part.getName().equals(name)) { return part; } } return null; } @Override public <T extends HttpUpgradeHandler> T upgrade(final Class<T> handlerClass) throws IOException { try { InstanceFactory<T> factory = servletContext.getDeployment().getDeploymentInfo().getClassIntrospecter().createInstanceFactory(handlerClass); final InstanceHandle<T> instance = factory.createInstance(); exchange.upgradeChannel(new ServletUpgradeListener<>(instance, servletContext.getDeployment(), exchange)); return instance.getInstance(); } catch (InstantiationException e) { throw new RuntimeException(e); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } } private void loadParts() throws IOException, ServletException { final ServletRequestContext requestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); if (parts == null) { final List<Part> parts = new ArrayList<>(); String mimeType = exchange.getRequestHeaders().getFirst(Headers.CONTENT_TYPE); if (mimeType != null && mimeType.startsWith(MultiPartParserDefinition.MULTIPART_FORM_DATA)) { FormData formData = parseFormData(); if(formData != null) { for (final String namedPart : formData) { for (FormData.FormValue part : formData.get(namedPart)) { parts.add(new PartImpl(namedPart, part, requestContext.getOriginalServletPathMatch().getServletChain().getManagedServlet().getMultipartConfig(), servletContext, this)); } } } } else { throw UndertowServletMessages.MESSAGES.notAMultiPartRequest(); } this.parts = parts; } } @Override public Object getAttribute(final String name) { if (attributes == null) { return null; } return attributes.get(name); } @Override public Enumeration<String> getAttributeNames() { if (attributes == null) { return EmptyEnumeration.instance(); } return new IteratorEnumeration<>(attributes.keySet().iterator()); } @Override public String getCharacterEncoding() { if (characterEncoding != null) { return characterEncoding.name(); } String characterEncodingFromHeader = getCharacterEncodingFromHeader(); if (characterEncodingFromHeader != null) { return characterEncodingFromHeader; } // first check, web-app context level default request encoding if (servletContext.getDeployment().getDeploymentInfo().getDefaultRequestEncoding() != null) { return servletContext.getDeployment().getDeploymentInfo().getDefaultRequestEncoding(); } // now check the container level default encoding if (servletContext.getDeployment().getDeploymentInfo().getDefaultEncoding() != null) { return servletContext.getDeployment().getDeploymentInfo().getDefaultEncoding(); } return null; } private String getCharacterEncodingFromHeader() { String contentType = exchange.getRequestHeaders().getFirst(Headers.CONTENT_TYPE); if (contentType == null) { return null; } return Headers.extractQuotedValueFromHeader(contentType, "charset"); } @Override public void setCharacterEncoding(final String env) throws UnsupportedEncodingException { if (readStarted) { return; } try { characterEncoding = Charset.forName(env); final ManagedServlet originalServlet = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).getOriginalServletPathMatch().getServletChain().getManagedServlet(); final FormDataParser parser = originalServlet.getFormParserFactory().createParser(exchange); if (parser != null) { parser.setCharacterEncoding(env); } } catch (UnsupportedCharsetException e) { throw new UnsupportedEncodingException(); } } @Override public int getContentLength() { long length = getContentLengthLong(); if(length > Integer.MAX_VALUE) { return -1; } return (int)length; } @Override public long getContentLengthLong() { final String contentLength = getHeader(Headers.CONTENT_LENGTH); if (contentLength == null || contentLength.isEmpty()) { return -1; } return Long.parseLong(contentLength); } @Override public String getContentType() { return getHeader(Headers.CONTENT_TYPE); } @Override public ServletInputStream getInputStream() throws IOException { if (reader != null) { throw UndertowServletMessages.MESSAGES.getReaderAlreadyCalled(); } if(servletInputStream == null) { servletInputStream = new ServletInputStreamImpl(this); } readStarted = true; return servletInputStream; } public void closeAndDrainRequest() throws IOException { if(reader != null) { reader.close(); } if(servletInputStream == null) { servletInputStream = new ServletInputStreamImpl(this); } servletInputStream.close(); } /** * Frees any resources (namely buffers) that may be associated with this request. * */ public void freeResources() throws IOException { if(reader != null) { reader.close(); } if(servletInputStream != null) { servletInputStream.close(); } } @Override public String getParameter(final String name) { if(queryParameters == null) { queryParameters = exchange.getQueryParameters(); } Deque<String> params = queryParameters.get(name); if (params == null) { final FormData parsedFormData = parseFormData(); if (parsedFormData != null) { FormData.FormValue res = parsedFormData.getFirst(name); if (res == null || res.isFileItem()) { return null; } else { return res.getValue(); } } return null; } return params.getFirst(); } @Override public Enumeration<String> getParameterNames() { if (queryParameters == null) { queryParameters = exchange.getQueryParameters(); } final Set<String> parameterNames = new HashSet<>(queryParameters.keySet()); final FormData parsedFormData = parseFormData(); if (parsedFormData != null) { Iterator<String> it = parsedFormData.iterator(); while (it.hasNext()) { String name = it.next(); for(FormData.FormValue param : parsedFormData.get(name)) { if(!param.isFileItem()) { parameterNames.add(name); break; } } } } return new IteratorEnumeration<>(parameterNames.iterator()); } @Override public String[] getParameterValues(final String name) { if (queryParameters == null) { queryParameters = exchange.getQueryParameters(); } final List<String> ret = new ArrayList<>(); Deque<String> params = queryParameters.get(name); if (params != null) { for (String param : params) { ret.add(param); } } final FormData parsedFormData = parseFormData(); if (parsedFormData != null) { Deque<FormData.FormValue> res = parsedFormData.get(name); if (res != null) { for (FormData.FormValue value : res) { if(!value.isFileItem()) { ret.add(value.getValue()); } } } } if (ret.isEmpty()) { return null; } return ret.toArray(new String[ret.size()]); } @Override public Map<String, String[]> getParameterMap() { if (queryParameters == null) { queryParameters = exchange.getQueryParameters(); } final Map<String, ArrayList<String>> arrayMap = new HashMap<>(); for (Map.Entry<String, Deque<String>> entry : queryParameters.entrySet()) { arrayMap.put(entry.getKey(), new ArrayList<>(entry.getValue())); } final FormData parsedFormData = parseFormData(); if (parsedFormData != null) { Iterator<String> it = parsedFormData.iterator(); while (it.hasNext()) { final String name = it.next(); Deque<FormData.FormValue> val = parsedFormData.get(name); if (arrayMap.containsKey(name)) { ArrayList<String> existing = arrayMap.get(name); for (final FormData.FormValue v : val) { if(!v.isFileItem()) { existing.add(v.getValue()); } } } else { final ArrayList<String> values = new ArrayList<>(); for (final FormData.FormValue v : val) { if(!v.isFileItem()) { values.add(v.getValue()); } } arrayMap.put(name, values); } } } final Map<String, String[]> ret = new HashMap<>(); for(Map.Entry<String, ArrayList<String>> entry : arrayMap.entrySet()) { ret.put(entry.getKey(), entry.getValue().toArray(new String[entry.getValue().size()])); } return ret; } private FormData parseFormData() { if(formParsingException != null) { throw formParsingException; } if (parsedFormData == null) { if (readStarted) { return null; } final ManagedServlet originalServlet = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).getCurrentServlet().getManagedServlet(); final FormDataParser parser = originalServlet.getFormParserFactory().createParser(exchange); if (parser == null) { return null; } readStarted = true; try { return parsedFormData = parser.parseBlocking(); } catch (RequestTooBigException | MultiPartParserDefinition.FileTooLargeException e) { throw formParsingException = new IllegalStateException(e); } catch (RuntimeException e) { throw formParsingException = e; } catch (IOException e) { throw formParsingException = new RuntimeException(e); } } return parsedFormData; } @Override public String getProtocol() { return exchange.getProtocol().toString(); } @Override public String getScheme() { return exchange.getRequestScheme(); } @Override public String getServerName() { return exchange.getHostName(); } @Override public int getServerPort() { return exchange.getHostPort(); } @Override public BufferedReader getReader() throws IOException { if (reader == null) { if (servletInputStream != null) { throw UndertowServletMessages.MESSAGES.getInputStreamAlreadyCalled(); } Charset charSet = null; if (this.characterEncoding != null) { charSet = this.characterEncoding; } else { final String c = getCharacterEncoding(); if (c != null) { try { charSet = Charset.forName(c); } catch (UnsupportedCharsetException e) { throw new UnsupportedEncodingException(e.getMessage()); } } } reader = new BufferedReader(charSet == null ? new InputStreamReader(exchange.getInputStream(), StandardCharsets.ISO_8859_1) : new InputStreamReader(exchange.getInputStream(), charSet)); } readStarted = true; return reader; } @Override public String getRemoteAddr() { InetSocketAddress sourceAddress = exchange.getSourceAddress(); if(sourceAddress == null) { return ""; } InetAddress address = sourceAddress.getAddress(); if(address == null) { //this is unresolved, so we just return the host name //not exactly spec, but if the name should be resolved then a PeerNameResolvingHandler should be used //and this is probably better than just returning null return sourceAddress.getHostString(); } return address.getHostAddress(); } @Override public String getRemoteHost() { InetSocketAddress sourceAddress = exchange.getSourceAddress(); if(sourceAddress == null) { return ""; } return sourceAddress.getHostString(); } @Override public void setAttribute(final String name, final Object object) { if(object == null) { removeAttribute(name); return; } if (attributes == null) { attributes = new HashMap<>(); } Object existing = attributes.put(name, object); if (existing != null) { servletContext.getDeployment().getApplicationListeners().servletRequestAttributeReplaced(this, name, existing); } else { servletContext.getDeployment().getApplicationListeners().servletRequestAttributeAdded(this, name, object); } } @Override public void removeAttribute(final String name) { if (attributes == null) { return; } Object exiting = attributes.remove(name); servletContext.getDeployment().getApplicationListeners().servletRequestAttributeRemoved(this, name, exiting); } @Override public Locale getLocale() { return getLocales().nextElement(); } @Override public Enumeration<Locale> getLocales() { final List<String> acceptLanguage = exchange.getRequestHeaders().get(Headers.ACCEPT_LANGUAGE); List<Locale> ret = LocaleUtils.getLocalesFromHeader(acceptLanguage); if(ret.isEmpty()) { return new IteratorEnumeration<>(Collections.singletonList(Locale.getDefault()).iterator()); } return new IteratorEnumeration<>(ret.iterator()); } @Override public boolean isSecure() { return exchange.isSecure(); } @Override public RequestDispatcher getRequestDispatcher(final String path) { String realPath; if (path.startsWith("/")) { realPath = path; } else { String current = exchange.getRelativePath(); int lastSlash = current.lastIndexOf("/"); if (lastSlash != -1) { current = current.substring(0, lastSlash + 1); } realPath = CanonicalPathUtils.canonicalize(current + path); } return new RequestDispatcherImpl(realPath, servletContext); } @Override public String getRealPath(final String path) { return servletContext.getRealPath(path); } @Override public int getRemotePort() { return exchange.getSourceAddress().getPort(); } /** * String java.net.InetAddress.getHostName() * Gets the host name for this IP address. * If this InetAddress was created with a host name, this host name will be remembered and returned; otherwise, a reverse name lookup will be performed and the result will be returned based on the system configured name lookup service. If a lookup of the name service is required, call getCanonicalHostName. * If there is a security manager, its checkConnect method is first called with the hostname and -1 as its arguments to see if the operation is allowed. If the operation is not allowed, it will return the textual representation of the IP address. * @see InetAddres#getHostName */ @Override public String getLocalName() { return exchange.getDestinationAddress().getHostName(); } @Override public String getLocalAddr() { InetSocketAddress destinationAddress = exchange.getDestinationAddress(); if (destinationAddress == null) { return ""; } InetAddress address = destinationAddress.getAddress(); if (address == null) { //this is unresolved, so we just return the host name return destinationAddress.getHostString(); } return address.getHostAddress(); } @Override public int getLocalPort() { return exchange.getDestinationAddress().getPort(); } @Override public ServletContextImpl getServletContext() { return servletContext; } @Override public AsyncContext startAsync() throws IllegalStateException { if (!isAsyncSupported()) { throw UndertowServletMessages.MESSAGES.startAsyncNotAllowed(); } else if (asyncStarted) { throw UndertowServletMessages.MESSAGES.asyncAlreadyStarted(); } asyncStarted = true; final ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); return asyncContext = new AsyncContextImpl(exchange, servletRequestContext.getServletRequest(), servletRequestContext.getServletResponse(), servletRequestContext, false, asyncContext); } @Override public AsyncContext startAsync(final ServletRequest servletRequest, final ServletResponse servletResponse) throws IllegalStateException { final ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); if (!servletContext.getDeployment().getDeploymentInfo().isAllowNonStandardWrappers()) { if (servletRequestContext.getOriginalRequest() != servletRequest) { if (!(servletRequest instanceof ServletRequestWrapper)) { throw UndertowServletMessages.MESSAGES.requestWasNotOriginalOrWrapper(servletRequest); } } if (servletRequestContext.getOriginalResponse() != servletResponse) { if (!(servletResponse instanceof ServletResponseWrapper)) { throw UndertowServletMessages.MESSAGES.responseWasNotOriginalOrWrapper(servletResponse); } } } if (!isAsyncSupported()) { throw UndertowServletMessages.MESSAGES.startAsyncNotAllowed(); } else if (asyncStarted) { throw UndertowServletMessages.MESSAGES.asyncAlreadyStarted(); } asyncStarted = true; servletRequestContext.setServletRequest(servletRequest); servletRequestContext.setServletResponse(servletResponse); return asyncContext = new AsyncContextImpl(exchange, servletRequest, servletResponse, servletRequestContext, true, asyncContext); } @Override public boolean isAsyncStarted() { return asyncStarted; } @Override public boolean isAsyncSupported() { return exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).isAsyncSupported(); } @Override public AsyncContextImpl getAsyncContext() { if (!isAsyncStarted()) { throw UndertowServletMessages.MESSAGES.asyncNotStarted(); } return asyncContext; } public AsyncContextImpl getAsyncContextInternal() { return asyncContext; } @Override public DispatcherType getDispatcherType() { return exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).getDispatcherType(); } public Map<String, Deque<String>> getQueryParameters() { if (queryParameters == null) { queryParameters = exchange.getQueryParameters(); } return queryParameters; } public void setQueryParameters(final Map<String, Deque<String>> queryParameters) { this.queryParameters = queryParameters; } public void setServletContext(final ServletContextImpl servletContext) { this.servletContext = servletContext; } void asyncRequestDispatched() { asyncStarted = false; } public String getOriginalRequestURI() { String uri = (String) getAttribute(RequestDispatcher.FORWARD_REQUEST_URI); if(uri != null) { return uri; } uri = (String) getAttribute(AsyncContext.ASYNC_REQUEST_URI); if(uri != null) { return uri; } return getRequestURI(); } public String getOriginalServletPath() { String uri = (String) getAttribute(RequestDispatcher.FORWARD_SERVLET_PATH); if(uri != null) { return uri; } uri = (String) getAttribute(AsyncContext.ASYNC_SERVLET_PATH); if(uri != null) { return uri; } return getServletPath(); } public String getOriginalPathInfo() { String uri = (String) getAttribute(RequestDispatcher.FORWARD_PATH_INFO); if(uri != null) { return uri; } uri = (String) getAttribute(AsyncContext.ASYNC_PATH_INFO); if(uri != null) { return uri; } return getPathInfo(); } public String getOriginalContextPath() { String uri = (String) getAttribute(RequestDispatcher.FORWARD_CONTEXT_PATH); if(uri != null) { return uri; } uri = (String) getAttribute(AsyncContext.ASYNC_CONTEXT_PATH); if(uri != null) { return uri; } return getContextPath(); } public String getOriginalQueryString() { String uri = (String) getAttribute(RequestDispatcher.FORWARD_QUERY_STRING); if(uri != null) { return uri; } uri = (String) getAttribute(AsyncContext.ASYNC_QUERY_STRING); if(uri != null) { return uri; } return getQueryString(); } private SessionConfig.SessionCookieSource sessionCookieSource() { HttpSession session = getSession(false); if(session == null) { return SessionConfig.SessionCookieSource.NONE; } if(sessionCookieSource == null) { sessionCookieSource = originalServletContext.getSessionConfig().sessionCookieSource(exchange); } return sessionCookieSource; } @Override public String toString() { return "HttpServletRequestImpl [ " + getMethod() + ' ' + getRequestURI() + " ]"; } @Override public PushBuilder newPushBuilder() { if(exchange.getConnection().isPushSupported()) { return new PushBuilderImpl(this); } return null; } @Override public Map<String, String> getTrailerFields() { HeaderMap trailers = exchange.getAttachment(HttpAttachments.REQUEST_TRAILERS); if(trailers == null) { return Collections.emptyMap(); } Map<String, String> ret = new HashMap<>(); for(HeaderValues entry : trailers) { ret.put(entry.getHeaderName().toString().toLowerCase(Locale.ENGLISH), entry.getFirst()); } return ret; } @Override public boolean isTrailerFieldsReady() { if(exchange.isRequestComplete()) { return true; } return !exchange.getConnection().isRequestTrailerFieldsSupported(); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.connect.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/connect-2017-08-08/UpdateHoursOfOperation" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateHoursOfOperationRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. * </p> */ private String instanceId; /** * <p> * The identifier of the hours of operation. * </p> */ private String hoursOfOperationId; /** * <p> * The name of the hours of operation. * </p> */ private String name; /** * <p> * The description of the hours of operation. * </p> */ private String description; /** * <p> * The time zone of the hours of operation. * </p> */ private String timeZone; /** * <p> * Configuration information of the hours of operation. * </p> */ private java.util.List<HoursOfOperationConfig> config; /** * <p> * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. * </p> * * @param instanceId * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. */ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } /** * <p> * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. * </p> * * @return The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. */ public String getInstanceId() { return this.instanceId; } /** * <p> * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. * </p> * * @param instanceId * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateHoursOfOperationRequest withInstanceId(String instanceId) { setInstanceId(instanceId); return this; } /** * <p> * The identifier of the hours of operation. * </p> * * @param hoursOfOperationId * The identifier of the hours of operation. */ public void setHoursOfOperationId(String hoursOfOperationId) { this.hoursOfOperationId = hoursOfOperationId; } /** * <p> * The identifier of the hours of operation. * </p> * * @return The identifier of the hours of operation. */ public String getHoursOfOperationId() { return this.hoursOfOperationId; } /** * <p> * The identifier of the hours of operation. * </p> * * @param hoursOfOperationId * The identifier of the hours of operation. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateHoursOfOperationRequest withHoursOfOperationId(String hoursOfOperationId) { setHoursOfOperationId(hoursOfOperationId); return this; } /** * <p> * The name of the hours of operation. * </p> * * @param name * The name of the hours of operation. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the hours of operation. * </p> * * @return The name of the hours of operation. */ public String getName() { return this.name; } /** * <p> * The name of the hours of operation. * </p> * * @param name * The name of the hours of operation. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateHoursOfOperationRequest withName(String name) { setName(name); return this; } /** * <p> * The description of the hours of operation. * </p> * * @param description * The description of the hours of operation. */ public void setDescription(String description) { this.description = description; } /** * <p> * The description of the hours of operation. * </p> * * @return The description of the hours of operation. */ public String getDescription() { return this.description; } /** * <p> * The description of the hours of operation. * </p> * * @param description * The description of the hours of operation. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateHoursOfOperationRequest withDescription(String description) { setDescription(description); return this; } /** * <p> * The time zone of the hours of operation. * </p> * * @param timeZone * The time zone of the hours of operation. */ public void setTimeZone(String timeZone) { this.timeZone = timeZone; } /** * <p> * The time zone of the hours of operation. * </p> * * @return The time zone of the hours of operation. */ public String getTimeZone() { return this.timeZone; } /** * <p> * The time zone of the hours of operation. * </p> * * @param timeZone * The time zone of the hours of operation. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateHoursOfOperationRequest withTimeZone(String timeZone) { setTimeZone(timeZone); return this; } /** * <p> * Configuration information of the hours of operation. * </p> * * @return Configuration information of the hours of operation. */ public java.util.List<HoursOfOperationConfig> getConfig() { return config; } /** * <p> * Configuration information of the hours of operation. * </p> * * @param config * Configuration information of the hours of operation. */ public void setConfig(java.util.Collection<HoursOfOperationConfig> config) { if (config == null) { this.config = null; return; } this.config = new java.util.ArrayList<HoursOfOperationConfig>(config); } /** * <p> * Configuration information of the hours of operation. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setConfig(java.util.Collection)} or {@link #withConfig(java.util.Collection)} if you want to override the * existing values. * </p> * * @param config * Configuration information of the hours of operation. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateHoursOfOperationRequest withConfig(HoursOfOperationConfig... config) { if (this.config == null) { setConfig(new java.util.ArrayList<HoursOfOperationConfig>(config.length)); } for (HoursOfOperationConfig ele : config) { this.config.add(ele); } return this; } /** * <p> * Configuration information of the hours of operation. * </p> * * @param config * Configuration information of the hours of operation. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateHoursOfOperationRequest withConfig(java.util.Collection<HoursOfOperationConfig> config) { setConfig(config); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getInstanceId() != null) sb.append("InstanceId: ").append(getInstanceId()).append(","); if (getHoursOfOperationId() != null) sb.append("HoursOfOperationId: ").append(getHoursOfOperationId()).append(","); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getTimeZone() != null) sb.append("TimeZone: ").append(getTimeZone()).append(","); if (getConfig() != null) sb.append("Config: ").append(getConfig()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateHoursOfOperationRequest == false) return false; UpdateHoursOfOperationRequest other = (UpdateHoursOfOperationRequest) obj; if (other.getInstanceId() == null ^ this.getInstanceId() == null) return false; if (other.getInstanceId() != null && other.getInstanceId().equals(this.getInstanceId()) == false) return false; if (other.getHoursOfOperationId() == null ^ this.getHoursOfOperationId() == null) return false; if (other.getHoursOfOperationId() != null && other.getHoursOfOperationId().equals(this.getHoursOfOperationId()) == false) return false; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getTimeZone() == null ^ this.getTimeZone() == null) return false; if (other.getTimeZone() != null && other.getTimeZone().equals(this.getTimeZone()) == false) return false; if (other.getConfig() == null ^ this.getConfig() == null) return false; if (other.getConfig() != null && other.getConfig().equals(this.getConfig()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getInstanceId() == null) ? 0 : getInstanceId().hashCode()); hashCode = prime * hashCode + ((getHoursOfOperationId() == null) ? 0 : getHoursOfOperationId().hashCode()); hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getTimeZone() == null) ? 0 : getTimeZone().hashCode()); hashCode = prime * hashCode + ((getConfig() == null) ? 0 : getConfig().hashCode()); return hashCode; } @Override public UpdateHoursOfOperationRequest clone() { return (UpdateHoursOfOperationRequest) super.clone(); } }
/* * XMLParser.java * * Created on June 17, 2005, 10:12 PM * */ package org.xmlactions.common.xml; import java.util.Vector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author MMURPHY */ public class XMLParser extends XMLReader { private static Logger log = LoggerFactory.getLogger(XMLParser.class); /** Creates a new instance of XMLParser */ public XMLParser() { super(); } /** Creates a new instance of XMLParser */ public XMLParser(byte [] buffer, boolean debug) { super(buffer); super.debug = debug; } /** Creates a new instance of XMLParser */ public XMLParser(byte [] buffer) { super(buffer); } public String getNextNodeNameAsString() { int pos = this.curPos; this.curPos++; String s = getNameOfNode(); this.curPos = pos; if (s == null) { return(null); } return(s); } public String getNodeNameAsString() { int pos = this.curPos; String s = getNameOfNode(); this.curPos = pos; if (s == null) { return(null); } return(s); } protected String getNameOfNode() { StringBuffer sb = new StringBuffer(); //Log.getInstance().debug("XML[" + this.getCurPos() + "]:" + this.toString()); if (this.findStartElement() > -1) { this.skipWhiteSpace(); int iLoop = 0; while( true ) { try { byte b = read(); //Log.getInstance().debug("b:" + (char)b); if (this.isWhiteSpace(b) == true) { break; } else if (iLoop == 0 && (b == '/' || b == '?')) { this.findStartElement(); this.skipWhiteSpace(); iLoop=0; continue; } else if (b == '/' || b == '>') { break; } sb.append((char)b); iLoop++; } catch (EndOfBufferException e) { return(null); } catch (Exception ex) { error.append(this.getClass().getName() + ".getNextNode Exception:" + ex.getMessage()); return(null); } } } else { return(null); } //Log.getInstance().debug("found nodeName:" + sb); return(sb.toString()); } public String getAttributeNameAsString() { return(getNameOfAttribute()); } /** * This assumes that the next text will be either the element name (preceded by <) * or the next attribute name. */ protected String getNameOfAttribute() { int markPos = this.getCurPos(); StringBuffer sb = new StringBuffer(); this.skipWhiteSpace(); while( true ) // read until we get an attribute or end of element { try { byte b = read(); if (b == '<') { // We are at the beginning of the element this.skipWhiteSpace(); // move to element name this.skipXMLName(); // skip element name this.skipWhiteSpace(); // move to attribut name if one exists } else if (this.isXMLNameChar(b) == false) { curPos--; break; } else { sb.append((char)b); } } catch (EndOfBufferException e) { this.setCurPos(markPos); return(null); } catch (Exception ex) { this.setCurPos(markPos); error.append(this.getClass().getName() + ".getNameOfAttribute Exception:" + ex.getMessage()); return(null); } } if (sb.length() > 0) { return(sb.toString()); } this.setCurPos(markPos); return(null); } public XMLAttribute getNextAttribute() { //XMLAttribute att String name = getNameOfNextAttribute(); int newPos = this.curPos; // Log.getInstance().debug("\n============\n" + JS.getCurrentMethodName_static() + " 1. curPos:" + this.curPos + " " + name); if (name == null || name.length() == 0) return(null); // @todo must fix this to get the value for this named attribute if two attributes with the same name exist //Object value = this.getAttributeValue(name); Object value = this.getAttributeValue(); if (newPos > this.curPos) { //Log.getInstance().debug(JS.getCurrentMethodName_static() + " 2.a curPos:" + this.curPos + " value:" + value); //return(new XMLAttribute(name, value)); return(null);// this is a kludge to stop us looping forever when we have two attributes with the same name. } //Log.getInstance().debug(JS.getCurrentMethodName_static() + " 2. curPos:" + this.curPos + " value:" + value); return(new XMLAttribute(name, value)); } /** * This assumes that the next text will be either the element name (preceded by <) * or the next attribute name. */ protected String getNameOfNextAttribute() { //int markPos = this.getCurPos(); StringBuffer sb = new StringBuffer(); this.skipWhiteSpace(); while( true ) // read until we get an attribute or end of element { try { byte b = read(); if (b == '<') { // We are at the beginning of the element this.skipWhiteSpace(); // move to element name this.skipXMLName(); // skip element name this.skipWhiteSpace(); // move to attribut name if one exists } else if (this.isXMLNameChar(b) == false) { curPos--; break; } else { sb.append((char)b); } } catch (EndOfBufferException e) { //this.setCurPos(markPos); return(null); } catch (Exception ex) { //this.setCurPos(markPos); error.append(this.getClass().getName() + ".getNameOfAttribute Exception:" + ex.getMessage()); return(null); } } if (sb.length() > 0) { return(sb.toString()); } //this.setCurPos(markPos); return(null); } /** * @return the list of attributes for the current node in the format * name="name" att1="content of att1" etc. */ public String getAttributeList() { String attName; StringBuffer sb = new StringBuffer(); while( (attName = getAttributeNameAsString()) != null) { //log.debug("attName[" + curPos + "]:" + attName); String value = getAttributeValue(attName); if (value != null) { // log.debug("value[" + curPos + "]:" + value); sb.append(attName); sb.append("=\""); sb.append(value); sb.append("\" "); //Log.getInstance().debug("parser.curPos:" + parser.getCurPos()); //Log.getInstance().debug("atts:" + sb.toString()); } } if (sb.length() > 0) { return(sb.toString()); } return(""); } /** * @return an XMLBuilder.XMLObject.Attribute Vector of attributes for * the current node in the format */ public Vector<XMLAttribute> getAttributes() { Vector<XMLAttribute> attributes = new Vector<XMLAttribute>(); XMLAttribute att; while( (att = this.getNextAttribute()) != null) { attributes.add(att); } return(attributes); } }
/* * Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.gcardone.junidecode; /** * Character map for Unicode characters with codepoint U+73xx. * @author Giuseppe Cardone * @version 0.1 */ class X73 { public static final String[] map = new String[]{ "Sha ", // 0x00 "Li ", // 0x01 "Han ", // 0x02 "Xian ", // 0x03 "Jing ", // 0x04 "Pai ", // 0x05 "Fei ", // 0x06 "Yao ", // 0x07 "Ba ", // 0x08 "Qi ", // 0x09 "Ni ", // 0x0a "Biao ", // 0x0b "Yin ", // 0x0c "Lai ", // 0x0d "Xi ", // 0x0e "Jian ", // 0x0f "Qiang ", // 0x10 "Kun ", // 0x11 "Yan ", // 0x12 "Guo ", // 0x13 "Zong ", // 0x14 "Mi ", // 0x15 "Chang ", // 0x16 "Yi ", // 0x17 "Zhi ", // 0x18 "Zheng ", // 0x19 "Ya ", // 0x1a "Meng ", // 0x1b "Cai ", // 0x1c "Cu ", // 0x1d "She ", // 0x1e "Kari ", // 0x1f "Cen ", // 0x20 "Luo ", // 0x21 "Hu ", // 0x22 "Zong ", // 0x23 "Ji ", // 0x24 "Wei ", // 0x25 "Feng ", // 0x26 "Wo ", // 0x27 "Yuan ", // 0x28 "Xing ", // 0x29 "Zhu ", // 0x2a "Mao ", // 0x2b "Wei ", // 0x2c "Yuan ", // 0x2d "Xian ", // 0x2e "Tuan ", // 0x2f "Ya ", // 0x30 "Nao ", // 0x31 "Xie ", // 0x32 "Jia ", // 0x33 "Hou ", // 0x34 "Bian ", // 0x35 "You ", // 0x36 "You ", // 0x37 "Mei ", // 0x38 "Zha ", // 0x39 "Yao ", // 0x3a "Sun ", // 0x3b "Bo ", // 0x3c "Ming ", // 0x3d "Hua ", // 0x3e "Yuan ", // 0x3f "Sou ", // 0x40 "Ma ", // 0x41 "Yuan ", // 0x42 "Dai ", // 0x43 "Yu ", // 0x44 "Shi ", // 0x45 "Hao ", // 0x46 "[?] ", // 0x47 "Yi ", // 0x48 "Zhen ", // 0x49 "Chuang ", // 0x4a "Hao ", // 0x4b "Man ", // 0x4c "Jing ", // 0x4d "Jiang ", // 0x4e "Mu ", // 0x4f "Zhang ", // 0x50 "Chan ", // 0x51 "Ao ", // 0x52 "Ao ", // 0x53 "Hao ", // 0x54 "Cui ", // 0x55 "Fen ", // 0x56 "Jue ", // 0x57 "Bi ", // 0x58 "Bi ", // 0x59 "Huang ", // 0x5a "Pu ", // 0x5b "Lin ", // 0x5c "Yu ", // 0x5d "Tong ", // 0x5e "Yao ", // 0x5f "Liao ", // 0x60 "Shuo ", // 0x61 "Xiao ", // 0x62 "Swu ", // 0x63 "Ton ", // 0x64 "Xi ", // 0x65 "Ge ", // 0x66 "Juan ", // 0x67 "Du ", // 0x68 "Hui ", // 0x69 "Kuai ", // 0x6a "Xian ", // 0x6b "Xie ", // 0x6c "Ta ", // 0x6d "Xian ", // 0x6e "Xun ", // 0x6f "Ning ", // 0x70 "Pin ", // 0x71 "Huo ", // 0x72 "Nou ", // 0x73 "Meng ", // 0x74 "Lie ", // 0x75 "Nao ", // 0x76 "Guang ", // 0x77 "Shou ", // 0x78 "Lu ", // 0x79 "Ta ", // 0x7a "Xian ", // 0x7b "Mi ", // 0x7c "Rang ", // 0x7d "Huan ", // 0x7e "Nao ", // 0x7f "Luo ", // 0x80 "Xian ", // 0x81 "Qi ", // 0x82 "Jue ", // 0x83 "Xuan ", // 0x84 "Miao ", // 0x85 "Zi ", // 0x86 "Lu ", // 0x87 "Lu ", // 0x88 "Yu ", // 0x89 "Su ", // 0x8a "Wang ", // 0x8b "Qiu ", // 0x8c "Ga ", // 0x8d "Ding ", // 0x8e "Le ", // 0x8f "Ba ", // 0x90 "Ji ", // 0x91 "Hong ", // 0x92 "Di ", // 0x93 "Quan ", // 0x94 "Gan ", // 0x95 "Jiu ", // 0x96 "Yu ", // 0x97 "Ji ", // 0x98 "Yu ", // 0x99 "Yang ", // 0x9a "Ma ", // 0x9b "Gong ", // 0x9c "Wu ", // 0x9d "Fu ", // 0x9e "Wen ", // 0x9f "Jie ", // 0xa0 "Ya ", // 0xa1 "Fen ", // 0xa2 "Bian ", // 0xa3 "Beng ", // 0xa4 "Yue ", // 0xa5 "Jue ", // 0xa6 "Yun ", // 0xa7 "Jue ", // 0xa8 "Wan ", // 0xa9 "Jian ", // 0xaa "Mei ", // 0xab "Dan ", // 0xac "Pi ", // 0xad "Wei ", // 0xae "Huan ", // 0xaf "Xian ", // 0xb0 "Qiang ", // 0xb1 "Ling ", // 0xb2 "Dai ", // 0xb3 "Yi ", // 0xb4 "An ", // 0xb5 "Ping ", // 0xb6 "Dian ", // 0xb7 "Fu ", // 0xb8 "Xuan ", // 0xb9 "Xi ", // 0xba "Bo ", // 0xbb "Ci ", // 0xbc "Gou ", // 0xbd "Jia ", // 0xbe "Shao ", // 0xbf "Po ", // 0xc0 "Ci ", // 0xc1 "Ke ", // 0xc2 "Ran ", // 0xc3 "Sheng ", // 0xc4 "Shen ", // 0xc5 "Yi ", // 0xc6 "Zu ", // 0xc7 "Jia ", // 0xc8 "Min ", // 0xc9 "Shan ", // 0xca "Liu ", // 0xcb "Bi ", // 0xcc "Zhen ", // 0xcd "Zhen ", // 0xce "Jue ", // 0xcf "Fa ", // 0xd0 "Long ", // 0xd1 "Jin ", // 0xd2 "Jiao ", // 0xd3 "Jian ", // 0xd4 "Li ", // 0xd5 "Guang ", // 0xd6 "Xian ", // 0xd7 "Zhou ", // 0xd8 "Gong ", // 0xd9 "Yan ", // 0xda "Xiu ", // 0xdb "Yang ", // 0xdc "Xu ", // 0xdd "Luo ", // 0xde "Su ", // 0xdf "Zhu ", // 0xe0 "Qin ", // 0xe1 "Ken ", // 0xe2 "Xun ", // 0xe3 "Bao ", // 0xe4 "Er ", // 0xe5 "Xiang ", // 0xe6 "Yao ", // 0xe7 "Xia ", // 0xe8 "Heng ", // 0xe9 "Gui ", // 0xea "Chong ", // 0xeb "Xu ", // 0xec "Ban ", // 0xed "Pei ", // 0xee "[?] ", // 0xef "Dang ", // 0xf0 "Ei ", // 0xf1 "Hun ", // 0xf2 "Wen ", // 0xf3 "E ", // 0xf4 "Cheng ", // 0xf5 "Ti ", // 0xf6 "Wu ", // 0xf7 "Wu ", // 0xf8 "Cheng ", // 0xf9 "Jun ", // 0xfa "Mei ", // 0xfb "Bei ", // 0xfc "Ting ", // 0xfd "Xian ", // 0xfe "Chuo " // 0xff }; }
/** * Copyright (C) 2015 - present McLeod Moores Software Limited. All rights reserved. * * Please see distribution for license. */ package com.mcleodmoores.starling.client.marketdata; import static com.opengamma.core.value.MarketDataRequirementNames.MARKET_VALUE; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.testng.annotations.Test; import org.threeten.bp.Instant; import org.threeten.bp.LocalDate; import org.threeten.bp.OffsetTime; import org.threeten.bp.Period; import org.threeten.bp.ZonedDateTime; import com.mcleodmoores.starling.client.portfolio.PortfolioKey; import com.mcleodmoores.starling.client.portfolio.PortfolioManager; import com.mcleodmoores.starling.client.results.ViewKey; import com.mcleodmoores.starling.client.testutils.StarlingTestUtils; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.analytics.math.interpolation.factory.LinearInterpolator1dAdapter; import com.opengamma.core.config.impl.ConfigItem; import com.opengamma.core.historicaltimeseries.HistoricalTimeSeries; import com.opengamma.core.historicaltimeseries.HistoricalTimeSeriesSource; import com.opengamma.core.id.ExternalSchemes; import com.opengamma.core.position.impl.SimpleCounterparty; import com.opengamma.core.position.impl.SimplePortfolio; import com.opengamma.core.position.impl.SimplePortfolioNode; import com.opengamma.core.position.impl.SimplePosition; import com.opengamma.core.position.impl.SimpleTrade; import com.opengamma.core.security.impl.SimpleSecurityLink; import com.opengamma.core.value.MarketDataRequirementNames; import com.opengamma.engine.ComputationTargetSpecification; import com.opengamma.engine.function.config.FunctionConfigurationDefinition; import com.opengamma.engine.function.config.ParameterizedFunctionConfiguration; import com.opengamma.engine.function.config.StaticFunctionConfiguration; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueRequirementNames; import com.opengamma.engine.view.ViewCalculationConfiguration; import com.opengamma.engine.view.ViewDefinition; import com.opengamma.financial.analytics.curve.CurveDefinition; import com.opengamma.financial.analytics.curve.CurveMarketDataFunction; import com.opengamma.financial.analytics.curve.CurveNodeIdMapper; import com.opengamma.financial.analytics.curve.InterpolatedCurveDefinition; import com.opengamma.financial.analytics.ircurve.CurveInstrumentProvider; import com.opengamma.financial.analytics.ircurve.StaticCurveInstrumentProvider; import com.opengamma.financial.analytics.ircurve.strips.CashNode; import com.opengamma.financial.analytics.ircurve.strips.CurveNode; import com.opengamma.financial.analytics.model.simpleinstrument.SimpleFuturePresentValueFunction; import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesFunction; import com.opengamma.financial.security.future.EnergyFutureSecurity; import com.opengamma.financial.security.future.FutureSecurity; import com.opengamma.financial.tool.ToolContext; import com.opengamma.id.ExternalId; import com.opengamma.id.ExternalIdBundle; import com.opengamma.id.UniqueId; import com.opengamma.livedata.UserPrincipal; import com.opengamma.master.config.ConfigMaster; import com.opengamma.master.config.ConfigMasterUtils; import com.opengamma.master.position.ManageableTrade; import com.opengamma.timeseries.date.localdate.ImmutableLocalDateDoubleTimeSeries; import com.opengamma.timeseries.date.localdate.LocalDateDoubleTimeSeries; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.Currency; import com.opengamma.util.test.TestGroup; import com.opengamma.util.time.Expiry; import com.opengamma.util.time.Tenor; /** * Unit tests for {@link MarketDataManager}. */ @Test(groups = TestGroup.UNIT, singleThreaded = true) public class MarketDataManagerTest { /** Scheme for market data used in this test */ private static final String TEST_SCHEME = "MY_SCHEME"; /** * Creates a test market data. * @return the market data set */ private static MarketDataSet createTestMarketData() { final MarketDataSet dataSet = MarketDataSet.empty(); dataSet.put(MarketDataKey.of(ExternalId.of(TEST_SCHEME, "AUDUSD").toBundle(), DataField.of(MarketDataRequirementNames.MARKET_VALUE)), 1.8); dataSet.put(MarketDataKey.of(ExternalId.of(TEST_SCHEME, "NZDUSD").toBundle(), DataField.of(MarketDataRequirementNames.MARKET_VALUE)), 2.2); dataSet.put(MarketDataKey.of(ExternalId.of(TEST_SCHEME, "GBPUSD").toBundle(), DataField.of(MarketDataRequirementNames.MARKET_VALUE)), 1.5); dataSet.put(MarketDataKey.of(ExternalId.of(TEST_SCHEME, "GBP1Y").toBundle(), DataField.of(MarketDataRequirementNames.MARKET_VALUE)), ImmutableLocalDateDoubleTimeSeries.builder() .putAll(new LocalDate[] {LocalDate.of(2016, 1, 1), LocalDate.of(2016, 1, 2)}, new double[] {0.01, 0.02}).build()); return dataSet; } /** * Tests saving and updating market data using the manager. */ @Test public void testSaveOrUpdate() { // note these configurations should be used as time series are actually stored and retrieved from the source final ToolContext toolContext = StarlingTestUtils.getToolContext("/inmemory/marketdata-test.properties"); final LocalDate today = LocalDate.now(); final MarketDataManager marketDataManager = new MarketDataManager(toolContext); // saves the data marketDataManager.saveOrUpdate(createTestMarketData(), today); final HistoricalTimeSeriesSource source = toolContext.getHistoricalTimeSeriesSource(); HistoricalTimeSeries historicalTimeSeries1 = source.getHistoricalTimeSeries(ExternalId.of(TEST_SCHEME, "AUDUSD").toBundle(), DataSource.DEFAULT.getName(), DataProvider.DEFAULT.getName(), MARKET_VALUE); LocalDateDoubleTimeSeries timeSeries1 = historicalTimeSeries1.getTimeSeries(); assertEquals(timeSeries1.size(), 1); assertEquals(timeSeries1.getValue(today), 1.8); HistoricalTimeSeries historicalTimeSeries2 = source.getHistoricalTimeSeries(ExternalId.of(TEST_SCHEME, "NZDUSD").toBundle(), DataSource.DEFAULT.getName(), DataProvider.DEFAULT.getName(), MARKET_VALUE); LocalDateDoubleTimeSeries timeSeries2 = historicalTimeSeries2.getTimeSeries(); assertEquals(timeSeries2.size(), 1); assertEquals(timeSeries2.getValue(today), 2.2); HistoricalTimeSeries historicalTimeSeries3 = source.getHistoricalTimeSeries(ExternalId.of(TEST_SCHEME, "GBPUSD").toBundle(), DataSource.DEFAULT.getName(), DataProvider.DEFAULT.getName(), MARKET_VALUE); LocalDateDoubleTimeSeries timeSeries3 = historicalTimeSeries3.getTimeSeries(); assertEquals(timeSeries3.size(), 1); assertEquals(timeSeries3.getValue(today), 1.5); HistoricalTimeSeries historicalTimeSeries4 = source.getHistoricalTimeSeries(ExternalId.of(TEST_SCHEME, "GBP1Y").toBundle(), DataSource.DEFAULT.getName(), DataProvider.DEFAULT.getName(), MARKET_VALUE); LocalDateDoubleTimeSeries timeSeries4 = historicalTimeSeries4.getTimeSeries(); assertEquals(timeSeries4.size(), 2); assertEquals(timeSeries4.getValue(LocalDate.of(2016, 1, 1)), 0.01); assertEquals(timeSeries4.getValue(LocalDate.of(2016, 1, 2)), 0.02); // updates the data for some of the series final MarketDataSet updatedData = MarketDataSet.empty(); updatedData.put(MarketDataKey.of(ExternalId.of(TEST_SCHEME, "AUDUSD").toBundle(), DataField.of(MarketDataRequirementNames.MARKET_VALUE)), 1.9); updatedData.put(MarketDataKey.of(ExternalId.of(TEST_SCHEME, "GBP1Y").toBundle(), DataField.of(MarketDataRequirementNames.MARKET_VALUE)), ImmutableLocalDateDoubleTimeSeries.builder().putAll(new LocalDate[] {LocalDate.of(2016, 1, 1), LocalDate.of(2016, 1, 2)}, new double[] {0.01, 0.03}).build()); marketDataManager.saveOrUpdate(updatedData, today); historicalTimeSeries1 = source.getHistoricalTimeSeries(ExternalId.of(TEST_SCHEME, "AUDUSD").toBundle(), DataSource.DEFAULT.getName(), DataProvider.DEFAULT.getName(), MARKET_VALUE); timeSeries1 = historicalTimeSeries1.getTimeSeries(); assertEquals(timeSeries1.size(), 1); assertEquals(timeSeries1.getValue(today), 1.9); historicalTimeSeries2 = source.getHistoricalTimeSeries(ExternalId.of(TEST_SCHEME, "NZDUSD").toBundle(), DataSource.DEFAULT.getName(), DataProvider.DEFAULT.getName(), MARKET_VALUE); timeSeries2 = historicalTimeSeries2.getTimeSeries(); assertEquals(timeSeries2.size(), 1); assertEquals(timeSeries2.getValue(today), 2.2); historicalTimeSeries3 = source.getHistoricalTimeSeries(ExternalId.of(TEST_SCHEME, "GBPUSD").toBundle(), DataSource.DEFAULT.getName(), DataProvider.DEFAULT.getName(), MARKET_VALUE); timeSeries3 = historicalTimeSeries3.getTimeSeries(); assertEquals(timeSeries3.size(), 1); assertEquals(timeSeries3.getValue(today), 1.5); historicalTimeSeries4 = source.getHistoricalTimeSeries(ExternalId.of(TEST_SCHEME, "GBP1Y").toBundle(), DataSource.DEFAULT.getName(), DataProvider.DEFAULT.getName(), MARKET_VALUE); timeSeries4 = historicalTimeSeries4.getTimeSeries(); assertEquals(timeSeries4.size(), 2); assertEquals(timeSeries4.getValue(LocalDate.of(2016, 1, 1)), 0.01); assertEquals(timeSeries4.getValue(LocalDate.of(2016, 1, 2)), 0.03); } /** * Tests that the save or update method can handle an input that is neither a scalar nor time series. */ @Test public void testSaveWrongDataType() { final MarketDataSet dataSet = MarketDataSet.empty(); dataSet.put(MarketDataKey.of(ExternalId.of(TEST_SCHEME, "JPYUSD").toBundle(), DataField.of(MarketDataRequirementNames.MARKET_VALUE)), "NaN"); final ToolContext toolContext = StarlingTestUtils.getToolContext("/inmemory/marketdata-test.properties"); final LocalDate today = LocalDate.now(); final MarketDataManager marketDataManager = new MarketDataManager(toolContext); // saves the data marketDataManager.saveOrUpdate(dataSet, today); final HistoricalTimeSeriesSource source = toolContext.getHistoricalTimeSeriesSource(); assertNull(source.getHistoricalTimeSeries(ExternalId.of(TEST_SCHEME, "JPYUSD").toBundle(), DataSource.DEFAULT.getName(), DataProvider.DEFAULT.getName(), MARKET_VALUE)); } /** * Tests the behaviour when required market data is requested for a view that is not stored in the master. */ @Test(expectedExceptions = OpenGammaRuntimeException.class) public void testNoViewInMaster() { final ToolContext toolContext = StarlingTestUtils.getToolContext("/inmemory/marketdata-test.properties"); final ZonedDateTime today = ZonedDateTime.now(); final Instant now = Instant.from(today); final MarketDataManager marketDataManager = new MarketDataManager(toolContext); final FunctionConfigurationDefinition functionDefinition = new FunctionConfigurationDefinition("TEST_FUNCTIONS", Arrays.asList("TEST_FUNCTIONS"), Collections.<StaticFunctionConfiguration>emptyList(), Collections.<ParameterizedFunctionConfiguration>emptyList()); ConfigMasterUtils.storeByName(toolContext.getConfigMaster(), ConfigItem.of(functionDefinition, functionDefinition.getName(), FunctionConfigurationDefinition.class)); final ViewKey viewKey = ViewKey.of("TEST", UniqueId.of("TEST", "1")); marketDataManager.getRequiredDataForView(viewKey, now); } /** * Tests that all of the required market data is identified for a view. In this test, the market data * required to produce the {@link ValueRequirementNames#CURVE_MARKET_DATA} for a single curve is all * of the ids returned from the curve node id mapper referenced in the curve nodes and does not include * any time series data. */ @Test public void testGetRequiredDataNoTimeSeries() { // the always available live market data provider in this configuration means that the graph will build final ToolContext toolContext = StarlingTestUtils.getToolContext("/inmemory/marketdata-test.properties"); final Set<CurveNode> nodes = new HashSet<>(); final Map<Tenor, CurveInstrumentProvider> ids = new HashMap<>(); final Set<ExternalId> expectedIds = new HashSet<>(); final ExternalId conventionId = ExternalId.of("TEST", "Deposit convention"); final String curveNodeIdMapperName = "Ids"; for (int i = 0; i < 10; i++) { final Tenor tenor = Tenor.ofMonths(i); final ExternalId id = ExternalSchemes.syntheticSecurityId(i + "MCASH"); expectedIds.add(id); nodes.add(new CashNode(Tenor.of(Period.ZERO), tenor, conventionId, curveNodeIdMapperName)); ids.put(tenor, new StaticCurveInstrumentProvider(id)); } final CurveNodeIdMapper cnim = CurveNodeIdMapper.builder() .name(curveNodeIdMapperName) .cashNodeIds(ids) .build(); final String curveName = "Curve"; final CurveDefinition curveDefinition = new InterpolatedCurveDefinition(curveName, nodes, LinearInterpolator1dAdapter.NAME); final ZonedDateTime today = ZonedDateTime.now(); final Instant now = Instant.from(today); final MarketDataManager marketDataManager = new MarketDataManager(toolContext); final FunctionConfigurationDefinition functionDefinition = new FunctionConfigurationDefinition("TEST_FUNCTIONS", Arrays.asList("TEST_FUNCTIONS"), Collections.<StaticFunctionConfiguration>emptyList(), Collections.singletonList(new ParameterizedFunctionConfiguration(CurveMarketDataFunction.class.getName(), Collections.singletonList(curveName)))); ConfigMasterUtils.storeByName(toolContext.getConfigMaster(), ConfigItem.of(functionDefinition, functionDefinition.getName(), FunctionConfigurationDefinition.class)); ConfigMasterUtils.storeByName(toolContext.getConfigMaster(), ConfigItem.of(curveDefinition, curveDefinition.getName(), InterpolatedCurveDefinition.class)); ConfigMasterUtils.storeByName(toolContext.getConfigMaster(), ConfigItem.of(cnim, cnim.getName(), CurveNodeIdMapper.class)); final ViewDefinition viewDefinition = createCurveDataView(toolContext, curveDefinition.getName()); final ViewKey viewKey = ViewKey.of(viewDefinition.getName(), viewDefinition.getUniqueId()); final MarketDataInfo requiredData = marketDataManager.getRequiredDataForView(viewKey, now); assertEquals(requiredData.getScalars().size(), nodes.size()); assertTrue(requiredData.getTimeSeries().isEmpty()); for (final Map.Entry<MarketDataKey, ? extends MarketDataMetaData> entry : requiredData.getScalars().entrySet()) { final MarketDataKey key = entry.getKey(); final ExternalIdBundle idBundle = key.getExternalIdBundle(); assertEquals(idBundle.size(), 1); final Set<ExternalId> idsForScheme = idBundle.getExternalIds(ExternalSchemes.OG_SYNTHETIC_TICKER); assertEquals(idsForScheme.size(), 1); assertTrue(expectedIds.contains(idsForScheme.iterator().next())); } } /** * Tests that all of the required market data is identified for a view requesting the mark-to-market price of * a single future. In this case, the pricing function requires a "live" market data value for the future and * a time series looking back seven days to get the previous close price. */ @Test public void testGetRequiredDataWithTimeSeries() { // the always available historical time series resolver in this configuration means that the graph will build final ToolContext toolContext = StarlingTestUtils.getToolContext("/inmemory/marketdata-test.properties"); final ExternalId futureTicker = ExternalSchemes.syntheticSecurityId("ABH6 Index"); final ExternalId tradeId = ExternalId.of("TEST_ID", "FUTURE"); final PortfolioKey portfolioKey = createSingleFuturePortfolio(toolContext, futureTicker, tradeId); final ViewDefinition viewDefinition = createSimpleFutureView(toolContext, portfolioKey.getUniqueId()); final FunctionConfigurationDefinition functionDefinition = new FunctionConfigurationDefinition("TEST_FUNCTIONS", Arrays.asList("TEST_FUNCTIONS"), Arrays.asList( new StaticFunctionConfiguration(SimpleFuturePresentValueFunction.class.getName()), new StaticFunctionConfiguration(HistoricalTimeSeriesFunction.class.getName())), Collections.<ParameterizedFunctionConfiguration>emptyList()); ConfigMasterUtils.storeByName(toolContext.getConfigMaster(), ConfigItem.of(functionDefinition, functionDefinition.getName(), FunctionConfigurationDefinition.class)); final ViewKey viewKey = ViewKey.of(viewDefinition.getName(), viewDefinition.getUniqueId()); final ZonedDateTime today = ZonedDateTime.now(); final Instant now = Instant.from(today); final MarketDataManager marketDataManager = new MarketDataManager(toolContext); final MarketDataInfo requiredData = marketDataManager.getRequiredDataForView(viewKey, now); // test the scalar data (i.e. the live future price) assertEquals(requiredData.getScalars().size(), 1); final Map.Entry<MarketDataKey, ? extends MarketDataMetaData> scalarEntry = requiredData.getScalars().entrySet().iterator().next(); final MarketDataKey scalarKey = scalarEntry.getKey(); final ExternalIdBundle scalarIdBundle = scalarKey.getExternalIdBundle(); // two ids in request - the future ticker referenced in the security and the trade id assertEquals(scalarIdBundle.size(), 2); assertEquals(scalarIdBundle.getExternalIds(ExternalSchemes.OG_SYNTHETIC_TICKER).size(), 1); assertEquals(scalarIdBundle.getExternalIds(ExternalSchemes.OG_SYNTHETIC_TICKER).iterator().next(), futureTicker); assertEquals(scalarIdBundle.getExternalIds(tradeId.getScheme()).size(), 1); assertEquals(scalarIdBundle.getExternalIds(tradeId.getScheme()).iterator().next(), tradeId); // test the time series data (i.e. the historical prices of the future) assertEquals(requiredData.getTimeSeries().size(), 1); final Map.Entry<MarketDataKey, ? extends MarketDataMetaData> tsEntry = requiredData.getTimeSeries().entrySet().iterator().next(); final MarketDataKey tsKey = tsEntry.getKey(); final ExternalIdBundle tsIdBundle = tsKey.getExternalIdBundle(); // two ids in request - the future ticker referenced in the security and the trade id assertEquals(tsIdBundle.size(), 2); assertEquals(tsIdBundle.getExternalIds(ExternalSchemes.OG_SYNTHETIC_TICKER).size(), 1); assertEquals(tsIdBundle.getExternalIds(ExternalSchemes.OG_SYNTHETIC_TICKER).iterator().next(), futureTicker); assertEquals(tsIdBundle.getExternalIds(tradeId.getScheme()).size(), 1); assertEquals(tsIdBundle.getExternalIds(tradeId.getScheme()).iterator().next(), tradeId); } //TODO multi-equity portfolio with multiple currencies that are converted /** * Creates a {@link ViewDefinition} that requests the market data required to construct a single curve. * @param toolContext the tool context * @param curveName the curve name * @return the view definition */ private static ViewDefinition createCurveDataView(final ToolContext toolContext, final String curveName) { final ConfigMaster configMaster = ArgumentChecker.notNull(toolContext.getConfigMaster(), "configMaster"); final String viewName = "Curve Data Test View"; final ViewDefinition viewDefinition = new ViewDefinition(viewName, null, UserPrincipal.getLocalUser()); viewDefinition.setDefaultCurrency(Currency.USD); viewDefinition.setMaxDeltaCalculationPeriod(500L); viewDefinition.setMaxFullCalculationPeriod(500L); viewDefinition.setMinDeltaCalculationPeriod(500L); viewDefinition.setMinFullCalculationPeriod(500L); final ViewCalculationConfiguration defaultCalculationConfig = new ViewCalculationConfiguration(viewDefinition, "Test"); defaultCalculationConfig.addSpecificRequirement(new ValueRequirement(ValueRequirementNames.CURVE_MARKET_DATA, ComputationTargetSpecification.NULL, ValueProperties.builder().with(ValuePropertyNames.CURVE, curveName).get())); viewDefinition.addViewCalculationConfiguration(defaultCalculationConfig); ConfigMasterUtils.storeByName(configMaster, ConfigItem.of(viewDefinition, viewName)); return viewDefinition; } /** * Creates a {@link ViewDefinition} that requests the market data and time series required to produce a * mark-to-market price for an energy future. * @param toolContext the tool context * @param portfolioId the portfolio id * @return the view definition */ private static ViewDefinition createSimpleFutureView(final ToolContext toolContext, final UniqueId portfolioId) { final ConfigMaster configMaster = ArgumentChecker.notNull(toolContext.getConfigMaster(), "configMaster"); final String viewName = "Simple Future Test View"; final ViewDefinition viewDefinition = new ViewDefinition(viewName, portfolioId, UserPrincipal.getLocalUser()); viewDefinition.setDefaultCurrency(Currency.USD); viewDefinition.setMaxDeltaCalculationPeriod(500L); viewDefinition.setMaxFullCalculationPeriod(500L); viewDefinition.setMinDeltaCalculationPeriod(500L); viewDefinition.setMinFullCalculationPeriod(500L); final ViewCalculationConfiguration defaultCalculationConfig = new ViewCalculationConfiguration(viewDefinition, "Test"); defaultCalculationConfig.addPortfolioRequirement(FutureSecurity.SECURITY_TYPE, ValueRequirementNames.PRESENT_VALUE, ValueProperties.none()); viewDefinition.addViewCalculationConfiguration(defaultCalculationConfig); ConfigMasterUtils.storeByName(configMaster, ConfigItem.of(viewDefinition, viewName)); return viewDefinition; } /** * Creates and saves a portfolio containing a single energy future trade. * @param toolContext the tool context * @param futureTicker the future ticker * @param tradeId the future trade id * @return the portfolio key */ private static PortfolioKey createSingleFuturePortfolio(final ToolContext toolContext, final ExternalId futureTicker, final ExternalId tradeId) { final List<SimplePosition> positions = new ArrayList<>(); final ExternalId marketDataId = futureTicker; final long tradeQuantity = 1; final long positionQuantity = 1; final FutureSecurity security = new EnergyFutureSecurity(new Expiry(ZonedDateTime.now().plusMonths(3)), "EXCH", "EXCH", Currency.USD, 1000, "Energy"); security.addExternalId(marketDataId); security.addExternalId(tradeId); final SimpleTrade trade = new SimpleTrade(security, BigDecimal.valueOf(tradeQuantity), new SimpleCounterparty(ExternalId.of("Test", "Ctpty")), LocalDate.now().minusDays(7), OffsetTime.now()); trade.addAttribute(ManageableTrade.meta().providerId().name(), tradeId.toString()); final SimplePosition position = new SimplePosition(); position.addAttribute(ManageableTrade.meta().providerId().name(), tradeId.toString()); position.setSecurityLink(SimpleSecurityLink.of(security)); position.setQuantity(BigDecimal.valueOf(positionQuantity)); position.addTrade(trade); positions.add(position); final SimplePortfolio portfolio = new SimplePortfolio("Test"); final SimplePortfolioNode node = new SimplePortfolioNode("Test"); portfolio.getRootNode().addChildNode(node); node.addPositions(positions); return new PortfolioManager(toolContext).savePortfolio(portfolio); } }
/* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.samples.apps.iosched.model; import android.content.Context; import android.content.CursorLoader; import android.database.Cursor; import android.provider.BaseColumns; import android.text.TextUtils; import com.google.samples.apps.iosched.Config; import com.google.samples.apps.iosched.archframework.QueryEnum; import com.google.samples.apps.iosched.provider.ScheduleContract; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; public class TagMetadata { // List of tags in each category, sorted by the category sort order. private HashMap<String, ArrayList<Tag>> mTagsInCategory = new HashMap<String, ArrayList<Tag>>(); // Hash map from tag ID to tag. private HashMap<String, Tag> mTagsById = new HashMap<String, Tag>(); // Hash map from tag name to tag id. private HashMap<String, String> mTagsByName = new HashMap<String, String>(); public static CursorLoader createCursorLoader(Context context) { return new CursorLoader(context, ScheduleContract.Tags.CONTENT_URI, TagsQueryEnum.TAG.getProjection(), null, null, null); } protected TagMetadata() { } public TagMetadata(Cursor cursor) { // Not using while(cursor.moveToNext()) because it would lead to issues when writing tests. // Either we would mock cursor.moveToNext() to return true and the test would have infinite // loop, or we would mock cursor.moveToNext() to return false, and the test would be for an // empty cursor. int count = cursor.getCount(); for (int i = 0; i < count; i++) { cursor.moveToPosition(i); Tag tag = new Tag(cursor.getString(cursor.getColumnIndex(ScheduleContract.Tags.TAG_ID)), cursor.getString(cursor.getColumnIndex(ScheduleContract.Tags.TAG_NAME)), cursor.getString(cursor.getColumnIndex(ScheduleContract.Tags.TAG_CATEGORY)), cursor.getInt( cursor.getColumnIndex(ScheduleContract.Tags.TAG_ORDER_IN_CATEGORY)), cursor.getString(cursor.getColumnIndex(ScheduleContract.Tags.TAG_ABSTRACT)), cursor.getInt(cursor.getColumnIndex(ScheduleContract.Tags.TAG_COLOR)), cursor.getString(cursor.getColumnIndex(ScheduleContract.Tags.TAG_PHOTO_URL))); mTagsById.put(tag.getId(), tag); mTagsByName.put(tag.getName(), tag.getId()); if (!mTagsInCategory.containsKey(tag.getCategory())) { mTagsInCategory.put(tag.getCategory(), new ArrayList<Tag>()); } mTagsInCategory.get(tag.getCategory()).add(tag); } for (ArrayList<Tag> list : mTagsInCategory.values()) { Collections.sort(list); } } /** * @return the tag with the {@code tagId}, if found. */ public Tag getTagById(String tagId) { return mTagsById.containsKey(tagId) ? mTagsById.get(tagId) : null; } /** * @return the tag with the {@code tagName} if found. */ private Tag getTagByName(String tagName) { String tagId = mTagsByName.containsKey(tagName) ? mTagsByName.get(tagName) : null; return tagId != null ? getTagById(tagId) : null; } /** * @return the tag with the id matching the {@code searchString}, if found; if not found, * returns the tag with the name matching the {@code searchString}, if found. */ public Tag getTag(String searchString) { Tag tagById = getTagById(searchString); if (tagById != null) { return tagById; } else { return getTagByName(searchString); } } public List<Tag> getTagsInCategory(String category) { return mTagsInCategory.containsKey(category) ? Collections.unmodifiableList(mTagsInCategory.get(category)) : null; } /** * Given the set of tags on a session, returns its group label. */ public Tag getSessionGroupTag(String[] sessionTags) { int bestOrder = Integer.MAX_VALUE; Tag bestTag = null; for (String tagId : sessionTags) { Tag tag = getTagById(tagId); if (tag != null && Config.Tags.SESSION_GROUPING_TAG_CATEGORY.equals(tag.getCategory()) && tag.getOrderInCategory() < bestOrder) { bestOrder = tag.getOrderInCategory(); bestTag = tag; } } return bestTag; } @Override public String toString() { return "Tag Metadata has " + mTagsById.size() + " tags in " + mTagsInCategory.size() + " categories"; } public static Comparator<Tag> TAG_DISPLAY_ORDER_COMPARATOR = new Comparator<Tag>() { @Override public int compare(Tag tag, Tag tag2) { if (!TextUtils.equals(tag.getCategory(), tag2.getCategory())) { return Config.getCategoryDisplayOrder(tag.getCategory()) - Config.getCategoryDisplayOrder(tag2.getCategory()); } else if (tag.getOrderInCategory() != tag2.getOrderInCategory()) { return tag.getOrderInCategory() - tag2.getOrderInCategory(); } return tag.getName().compareTo(tag2.getName()); } }; public enum TagsQueryEnum implements QueryEnum { TAG(0, new String[]{ BaseColumns._ID, ScheduleContract.Tags.TAG_ID, ScheduleContract.Tags.TAG_NAME, ScheduleContract.Tags.TAG_CATEGORY, ScheduleContract.Tags.TAG_ORDER_IN_CATEGORY, ScheduleContract.Tags.TAG_ABSTRACT, ScheduleContract.Tags.TAG_COLOR, ScheduleContract.Tags.TAG_PHOTO_URL }); private int id; private String[] projection; TagsQueryEnum(int id, String[] projection) { this.id = id; this.projection = projection; } @Override public int getId() { return id; } @Override public String[] getProjection() { return projection; } } static public class Tag implements Comparable<Tag> { private String mId; private String mName; private String mCategory; private int mOrderInCategory; private String mAbstract; private int mColor; private String mPhotoUrl; public Tag(String id, String name, String category, int orderInCategory, String _abstract, int color, String photoUrl) { mId = id; mName = name; mCategory = category; mOrderInCategory = orderInCategory; mAbstract = _abstract; mColor = color; mPhotoUrl = photoUrl; } public String getId() { return mId; } public String getName() { return mName; } public String getCategory() { return mCategory; } public int getOrderInCategory() { return mOrderInCategory; } public String getAbstract() { return mAbstract; } public int getColor() { return mColor; } public String getPhotoUrl() { return mPhotoUrl; } @Override public int compareTo(Tag another) { return mOrderInCategory - another.mOrderInCategory; } @Override public String toString() { return "TagMetadata.Tag: id = " + mId + " name = " + mName; } } }
package edu.pacificu.chordinate.chordinate.algorithm; import java.util.ArrayList; import java.util.List; /** * Contains functionality based on scales and scale types. */ public class Scale { public enum ScaleType { MAJOR, NATURAL_MINOR, HARMONIC_MINOR, MELODIC_MINOR } public static final char NATURAL = 'n'; public static final char SHARP = '#'; public static final int NUM_NOTES = 12; public static final int NUM_SCALE_STEPS = 7; public static final int NUM_STEPS_MINOR_SEVENTH = 10; private static final List<Note> CHROMATIC_SCALE = new ArrayList<Note>() { { add(new Note('A', NATURAL, '0', false)); add(new Note('A', SHARP, '0', false)); add(new Note('B', NATURAL, '0', false)); add(new Note('C', NATURAL, '0', false)); add(new Note('C', SHARP, '0', false)); add(new Note('D', NATURAL, '0', false)); add(new Note('D', SHARP, '0', false)); add(new Note('E', NATURAL, '0', false)); add(new Note('F', NATURAL, '0', false)); add(new Note('F', SHARP, '0', false)); add(new Note('G', NATURAL, '0', false)); add(new Note('G', SHARP, '0', false)); } }; /* Step indexes. Represent the number of steps between each scale degree within each scale type. */ private static final int[] MAJOR_INDEX = {Step.WHOLE, Step.WHOLE, Step.HALF, Step.WHOLE, Step.WHOLE, Step.WHOLE, Step.HALF}; private static final int[] NAT_MINOR_INDEX = {Step.WHOLE, Step.HALF, Step.WHOLE, Step.WHOLE, Step.HALF, Step.WHOLE, Step.WHOLE}; private static final int[] HAR_MINOR_INDEX = {Step.WHOLE, Step.HALF, Step.WHOLE, Step.WHOLE, Step.HALF, Step.MINOR_THIRD, Step.HALF}; private static final int[] MEL_MINOR_INDEX = {Step.WHOLE, Step.HALF, Step.WHOLE, Step.WHOLE, Step.WHOLE, Step.WHOLE, Step.HALF}; /** * Finds the index of a note within the chromatic scale. * * @param note the note to find * @return the index of the note within the chromatic scale */ public static int findNoteInList(Note note) { for (int i = 0; i < NUM_NOTES; ++i) { if (note.bAreNotesEqual(CHROMATIC_SCALE.get(i))) { return i; } } return ChromaticScaleDegree.C; } /** * Determines the number of half steps between the two notes passed in. * * @param srcNote the note to start at * @param destNote the note to end at * @return the number of half steps (distance) between the two notes */ public static int findDistBtwNotes(Note srcNote, Note destNote) { int index1 = -1; int index2 = -1; int dist; for (int i = 0; i < NUM_NOTES; ++i) { if (srcNote.bAreNotesEqual(CHROMATIC_SCALE.get(i))) { index1 = i; } if (destNote.bAreNotesEqual(CHROMATIC_SCALE.get(i))) { index2 = i; } } if (index1 <= index2) { dist = index2 - index1; } else { dist = (NUM_NOTES - index1) + index2; } return dist; } /** * Calls the addSteps function with the correct step index based on the scale type. * * @param scaleType the scale type * @param noteDestIndex the note index to reach * @param chordRootIndex the chord root index * @return the number of half steps between the chord root and the destination note */ private static int findStep (ScaleType scaleType, int noteDestIndex, int chordRootIndex) { switch (scaleType) { case MAJOR: return addSteps(MAJOR_INDEX, noteDestIndex, chordRootIndex); case NATURAL_MINOR: return addSteps(NAT_MINOR_INDEX, noteDestIndex, chordRootIndex); case HARMONIC_MINOR: return addSteps(HAR_MINOR_INDEX, noteDestIndex, chordRootIndex); case MELODIC_MINOR: return addSteps(MEL_MINOR_INDEX, noteDestIndex, chordRootIndex); } return Step.WHOLE; } /** * Adds up the number of half steps between the chord root and the destination note. * * @param indexScale the index scale (dependant upon scale the scale type) * @param noteDestIndex the note index to reach * @param chordRootIndex the chord root index * @return the number of half steps between the chord root and the destination note */ private static int addSteps (int[] indexScale, int noteDestIndex, int chordRootIndex) { int step = Step.ROOT; if (0 == chordRootIndex && 0 == noteDestIndex) { step = 0; } else { if (0 == chordRootIndex) { ++chordRootIndex; } if (noteDestIndex > NUM_SCALE_STEPS) { for (int i = chordRootIndex - 1; i < NUM_SCALE_STEPS; ++i) { step += indexScale[i]; } noteDestIndex %= NUM_SCALE_STEPS; chordRootIndex = 1; } for (int i = chordRootIndex - 1; i <= noteDestIndex - 1; ++i) { step += indexScale[i]; } } return step; } /** * Determines the note within the chromatic scale based on the scale degree of the desired note. * * @param key the key * @param scaleType the scale type * @param scaleDegree the scale degree of the desired note * @return the desired note within the chromatic scale */ public static Note getNoteInScale (Note key, ScaleType scaleType, int scaleDegree) { return new Note(CHROMATIC_SCALE.get((findNoteInList(key) + findStep(scaleType, scaleDegree, ScaleDegree.TONIC)) % NUM_NOTES)); } /** * Determines the note that is a third up from a root note. * * @param root the root note * @param scaleType the scale type * @param scaleDegree the scale degree of the root note * @return the note a third up from the root note */ public static Note getThird (Note root, Scale.ScaleType scaleType, int scaleDegree) { int steps = findStep(scaleType, scaleDegree + ScaleDegree.MEDIANT, scaleDegree + 1); return new Note(CHROMATIC_SCALE.get((findNoteInList(root) + steps) % NUM_NOTES)); } /** * Determines the note that is a fifth up from a root note. * * @param root the root note * @param scaleType the scale type * @param scaleDegree the scale degree of the root note * @return the note a fifth up from the root note */ public static Note getFifth (Note root, Scale.ScaleType scaleType, int scaleDegree) { int steps = findStep(scaleType, scaleDegree + ScaleDegree.DOMINANT, scaleDegree + 1); return new Note(CHROMATIC_SCALE.get((findNoteInList(root) + steps) % NUM_NOTES)); } /** * Determines the note a fifth down from the root note. * * @param root the root note * @param scaleType the scale type * @return the note a fifth down from the root note */ public static Note getRootFromFifth (Note root, Scale.ScaleType scaleType) { int steps = 0, num; for (int i = 3; i >= 0; --i) { switch (scaleType) { case MAJOR: steps += MAJOR_INDEX[i]; break; case NATURAL_MINOR: steps += NAT_MINOR_INDEX[i]; break; case HARMONIC_MINOR: steps += HAR_MINOR_INDEX[i]; break; case MELODIC_MINOR: steps += MEL_MINOR_INDEX[i]; break; } } num = findNoteInList(root) - steps; if (num < 0) { num = NUM_NOTES + num; } return new Note(CHROMATIC_SCALE.get(num)); } /** * Determines the note a minor seventh up from a root note. * * @param root the note to base the minor seventh off of * @return the note a minor seventh up from the root */ public static Note getMinorSeventh (Note root) { return new Note(CHROMATIC_SCALE.get((findNoteInList(root) + NUM_STEPS_MINOR_SEVENTH) % NUM_NOTES)); } /** * Determines the scale degree based on the key, the scale type, and the note. * * @param key the key * @param note the note to get the scale degree of * @param scaleType the scale type * @return the scale degree */ public static int getScaleDegreeFromNote(Note key, Note note, Scale.ScaleType scaleType) { int dist = Scale.findDistBtwNotes(key, note); switch (scaleType) { case MAJOR: return getMajScaleDegreeFromDist(dist); case NATURAL_MINOR: return getNatMinScaleDegreeFromDist(dist); case HARMONIC_MINOR: return getHarMinScaleDegreeFromDist(dist); case MELODIC_MINOR: return getMelMinScaleDegreeFromDist(dist); default: return ScaleDegree.NON_SCALE_TONE; } } /** * Determines the scale degree within a major scale based on the distance between the // TODO: fix magic consts in these four functs * root note of the scale and another note. * * @param dist the distance between the root and another note * @return the scale degree */ private static int getMajScaleDegreeFromDist (int dist) { switch (dist) { case 0: return ScaleDegree.TONIC; case 2: return ScaleDegree.SUPERTONIC; case 4: return ScaleDegree.MEDIANT; case 5: return ScaleDegree.SUBDOMINANT; case 7: return ScaleDegree.DOMINANT; case 9: return ScaleDegree.SUBMEDIANT; case 11: return ScaleDegree.SUBTONIC; default: return ScaleDegree.NON_SCALE_TONE; } } /** * Determines the scale degree within a natural minor scale based on the distance between the * root note of the scale and another note. * * @param dist the distance between the root and another note * @return the scale degree */ private static int getNatMinScaleDegreeFromDist (int dist) { switch (dist) { case 0: return ScaleDegree.TONIC; case 2: return ScaleDegree.SUPERTONIC; case 3: return ScaleDegree.MEDIANT; case 5: return ScaleDegree.SUBDOMINANT; case 7: return ScaleDegree.DOMINANT; case 8: return ScaleDegree.SUBMEDIANT; case 10: return ScaleDegree.SUBTONIC; default: return ScaleDegree.NON_SCALE_TONE; } } /** * Determines the scale degree within a harmonic minor scale based on the distance between the * root note of the scale and another note. * * @param dist the distance between the root and another note * @return the scale degree */ private static int getHarMinScaleDegreeFromDist (int dist) { switch (dist) { case 0: return ScaleDegree.TONIC; case 2: return ScaleDegree.SUPERTONIC; case 3: return ScaleDegree.MEDIANT; case 5: return ScaleDegree.SUBDOMINANT; case 7: return ScaleDegree.DOMINANT; case 8: return ScaleDegree.SUBMEDIANT; case 11: return ScaleDegree.SUBTONIC; default: return ScaleDegree.NON_SCALE_TONE; } } /** * Determines the scale degree within a melodic minor scale based on the distance between the * root note of the scale and another note. * * @param dist the distance between the root and another note * @return the scale degree */ private static int getMelMinScaleDegreeFromDist (int dist) { switch (dist) { case 0: return ScaleDegree.TONIC; case 2: return ScaleDegree.SUPERTONIC; case 3: return ScaleDegree.MEDIANT; case 5: return ScaleDegree.SUBDOMINANT; case 7: return ScaleDegree.DOMINANT; case 9: return ScaleDegree.SUBMEDIANT; case 11: return ScaleDegree.SUBTONIC; default: return ScaleDegree.NON_SCALE_TONE; } } }
/* * ----------------------------------------------------------------------- * Copyright 2012 - Alistair Rutherford - www.netthreads.co.uk * ----------------------------------------------------------------------- * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netthreads.libgdx.scene.transition; import aurelienribon.tweenengine.BaseTween; import aurelienribon.tweenengine.TweenCallback; import aurelienribon.tweenengine.TweenEquation; import com.badlogic.gdx.scenes.scene2d.Group; import com.netthreads.libgdx.director.AppInjector; import com.netthreads.libgdx.director.Director; import com.netthreads.libgdx.scene.Scene; /** * Base transition class. * * This class forms basis of scene transition. Draws the incoming and outgoing * scenes. Ensures scene contents positions are noted on entry and reset on * exit. Implements transition "complete" handler to set incoming scene as main * scene. * */ public class TransitionScene extends Scene implements TweenCallback { private boolean complete; private float inX; private float inY; private float outX; private float outY; private Scene inScene; private Scene outScene; private Group inSceneRoot; private Group outSceneRoot; private int durationMillis; private TweenEquation easeEquation; /** * The one and only director. */ private static Director director = AppInjector.getInjector().getInstance(Director.class);; /** * Enter handler makes a note of scene contents position. * */ @Override public void enter() { complete = false; // Make note of starting positions. We are going to have to reset these // back when we finish. inX = inSceneRoot.getX(); inY = inSceneRoot.getY(); outX = outSceneRoot.getX(); outY = outSceneRoot.getY(); } /** * Exit handler resets scene contents positions. * */ @Override public void exit() { complete = true; inSceneRoot.setX(inX); inSceneRoot.setY(inY); outSceneRoot.setX(outX); outSceneRoot.setY(outY); } /** * Draw both scenes as we animated contents. * */ @Override public void draw() { // Draw if (!complete) { outScene.draw(); } inScene.draw(); } /** * Keep the incoming and outgoing scene action pipelines running. * */ @Override public void act(float delta) { super.act(delta); // Move inSceneRoot.act(delta); outSceneRoot.act(delta); } /** * Default transition handlers sets inScen to centre-stage when transition * complete. */ @Override public void onEvent(int eventType, BaseTween<?> source) { switch (eventType) { case COMPLETE: director.setScene(inScene); break; default: break; } } /** * Transition complete. * * @return The transition complete handler. */ public boolean isComplete() { return complete; } public float getInX() { return inX; } public void setInX(float inX) { this.inX = inX; } public float getInY() { return inY; } public void setInY(float inY) { this.inY = inY; } public float getOutX() { return outX; } public void setOutX(float outX) { this.outX = outX; } public float getOutY() { return outY; } public void setOutY(float outY) { this.outY = outY; } public Scene getInScene() { return inScene; } public void setInScene(Scene inScene) { this.inScene = inScene; } public Scene getOutScene() { return outScene; } public void setOutScene(Scene outScene) { this.outScene = outScene; } public Group getInSceneRoot() { return inSceneRoot; } public void setInSceneRoot(Group inSceneRoot) { this.inSceneRoot = inSceneRoot; } public Group getOutSceneRoot() { return outSceneRoot; } public void setOutSceneRoot(Group outSceneRoot) { this.outSceneRoot = outSceneRoot; } public void setComplete(boolean complete) { this.complete = complete; } public int getDurationMillis() { return durationMillis; } public void setDurationMillis(int durationMillis) { this.durationMillis = durationMillis; } public TweenEquation getEaseEquation() { return easeEquation; } public void setEaseEquation(TweenEquation easeEquation) { this.easeEquation = easeEquation; } }
/* * * * Copyright 2012-2015 Viant. * * * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * * use this file except in compliance with the License. You may obtain a copy of * * the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations under * * the License. * */package com.sm.store.server; import com.sm.query.Predicate; import com.sm.query.Result; import com.sm.query.utils.QueryException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import voldemort.store.cachestore.Key; import voldemort.store.cachestore.Value; import voldemort.utils.Pair; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Stack; import java.util.concurrent.ConcurrentNavigableMap; import static com.sm.query.Predicate.Operator.GreaterEQ; public class QueryIterator implements Iterator { private static final Log logger = LogFactory.getLog(QueryIterator.class); //Predicate condition; boolean tableScan; RemoteStore remoteStore; Stack<Predicate> predicateStack = new Stack<Predicate>() ; //Stack<Direction> directionStack = new Stack<Direction>(); public QueryIterator(Stack<Predicate> predicateStack, boolean tableScan, RemoteStore remoteStore) { this.predicateStack = predicateStack; this.tableScan = tableScan; this.remoteStore = remoteStore; if ( remoteStore == null ) throw new QueryException("remoteStore is null"); init(); } Iterator<Key> iterator; boolean end = false; Result.Type keyType; boolean sorted; private void init() { if ( remoteStore.getStore().getMap().keySet().isEmpty()) { logger.info("remoteStore is empty"); end = true; } else { keyType = findKeyType(); sorted = remoteStore instanceof RemoteScanStore ? true : false; } if ( predicateStack.isEmpty()) { if ( tableScan ) { iterator = remoteStore.getStore().getMap().keySet().iterator(); } else { end = ! tableScan; } } else { if ( needTableScan() ) { logger.info("needTableScan true for "+remoteStore.getStoreName()); tableScan = true; iterator = remoteStore.getStore().getMap().keySet().iterator(); } else { //set tableScan to false tableScan = false; iterator = findIterator(predicateStack.pop()); } } } private boolean needTableScan() { //check tableScan if ( tableScan) return true; else { if (!sorted) { Stack<Predicate> stack = (Stack<Predicate>) predicateStack.clone(); while (!stack.empty()) { if (traverse(stack.pop())) return true; } } return false; } } private boolean traverse(Predicate predicate) { switch ( predicate.getOperator()) { case Or: if ( traverse( predicate.left() ) ) return true; if ( traverse( predicate.right())) return true; return false; case In : case Equal: return false; default: return true; } } private Result.Type findKeyType() { Key key = remoteStore.getStore().getMap().keySet().iterator().next(); switch (key.getType()) { case STRING : return Result.Type.STRING; case INT: return Result.Type.INT; case LONG: return Result.Type.LONG; case BYTEARY: case BARRAY: return Result.Type.ARRAY; default: return Result.Type.OBJECT; } } private Iterator<Key> findIterator(Predicate predicate) { if ( predicate != null ) { switch (predicate.getOperator()) { case Or : predicateStack.push( predicate.right()); //if ( predicate.left().getOperator() == Predicate.Operator.Or) { return findIterator( predicate.left()); //} case In : return buildInIterator( predicate.right().getValue()); case Range : return buildRangeIteration( predicate.left(), predicate.right() ); case Greater: case Less: case GreaterEQ: case LessEQ: return buildRest(predicate); case NotEqual: tableScan = true; return remoteStore.getStore().getMap().keySet().iterator(); case Equal: return buildInIterator( predicate.right().getValue()); default: throw new QueryException("wrong keyType of operator "+predicate.getOperator()); } } else throw new QueryException("predicate is null"); } @Override public boolean hasNext() { if ( end ) return false; if ( iterator.hasNext() ) return true; else { //check stack if ( predicateStack.empty()) return false; else { if ( tableScan ) { logger.info("tableScan "+tableScan+" predicateStack size "+predicateStack.size()); return false; } else { iterator = findIterator(predicateStack.pop()); return iterator.hasNext(); } } } } @Override public Pair<Key, Value> next() { Key key = iterator.next(); Value value = remoteStore.get(key); return new Pair(key, value); } @Override public void remove() { } Iterator<Key> buildInIterator(String value) { String[] list = value.split("\t"); if ( list.length == 0 ) throw new QueryException("invalidate value "+value); List<Key> listKey = new ArrayList<Key>(); for (String each : list) { listKey.add( createKey(each, keyType)); } return listKey.iterator(); } private Key createKey(String each, Result.Type type) { switch (type) { case STRING : return Key.createKey(each); case INT: case INTS: return Key.createKey(Integer.valueOf(each)); case LONG: case LONGS: return Key.createKey( Long.valueOf( each)); case ARRAY: return Key.createKey(each.getBytes()); default: throw new QueryException("create Key wrong keyType "+type); } } private Iterator<Key> buildRangeIteration(Predicate left, Predicate right) { if ( ! sorted ) { tableScan = true; return remoteStore.getStore().getMap().keySet().iterator(); } else { boolean inclusive = (left.getOperator()== GreaterEQ || left.getOperator() == Predicate.Operator.LessEQ) ? true : false ; ConcurrentNavigableMap sortedStore = (ConcurrentNavigableMap) ((RemoteScanStore) remoteStore).getSortedStore().getMap(); if ( left.getOperator() == Predicate.Operator.GreaterEQ || left.getOperator() == Predicate.Operator.Greater) { Key from =createKey(left.right().getValue(), keyType); Key to =createKey( right.right().getValue(), keyType); return sortedStore.subMap(from, inclusive, to, inclusive).keySet().iterator(); } else { Key from =createKey(right.right().getValue(), keyType); Key to =createKey( left.right().getValue(), keyType); return sortedStore.subMap(from, inclusive, to, inclusive).keySet().iterator(); } } } private Iterator<Key> buildRest(Predicate predicate) { if ( ! sorted ) { tableScan = true; return remoteStore.getStore().getMap().keySet().iterator(); } else { boolean inclusive = (predicate.getOperator()== GreaterEQ || predicate.getOperator() == Predicate.Operator.LessEQ) ? true : false ; ConcurrentNavigableMap sortedStore = (ConcurrentNavigableMap) ((RemoteScanStore) remoteStore).getSortedStore().getMap(); if ( predicate.getOperator() == Predicate.Operator.GreaterEQ || predicate.getOperator() == Predicate.Operator.Greater) { Key from =createKey(predicate.right().getValue(), keyType); return sortedStore.tailMap(from,inclusive).keySet().iterator(); } else { Key from =createKey(predicate.right().getValue(), keyType); return sortedStore.headMap( from,inclusive).keySet().iterator(); } } } public boolean isTableScan() { return tableScan; } public RemoteStore getRemoteStore() { return remoteStore; } public Stack<Predicate> getPredicateStack() { return predicateStack; } public Iterator<Key> getIterator() { return iterator; } public boolean isEnd() { return end; } public Result.Type getKeyType() { return keyType; } public boolean isSorted() { return sorted; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.ssl; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; import javax.net.ssl.SSLPeerUnverifiedException; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.ActiveMQNotConnectedException; import org.apache.activemq.artemis.api.core.Interceptor; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.api.core.client.ActiveMQClient; import org.apache.activemq.artemis.api.core.client.ClientConsumer; import org.apache.activemq.artemis.api.core.client.ClientMessage; import org.apache.activemq.artemis.api.core.client.ClientProducer; import org.apache.activemq.artemis.api.core.client.ClientSession; import org.apache.activemq.artemis.api.core.client.ClientSessionFactory; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.core.config.impl.ConfigurationImpl; import org.apache.activemq.artemis.core.protocol.core.Packet; import org.apache.activemq.artemis.core.protocol.core.impl.PacketImpl; import org.apache.activemq.artemis.core.remoting.impl.netty.NettyAcceptor; import org.apache.activemq.artemis.core.remoting.impl.netty.NettyConnection; import org.apache.activemq.artemis.core.remoting.impl.netty.TransportConstants; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.tests.util.ActiveMQTestBase; import org.apache.activemq.artemis.utils.RandomUtil; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import io.netty.handler.ssl.SslHandler; @RunWith(value = Parameterized.class) public class CoreClientOverTwoWaySSLTest extends ActiveMQTestBase { @Parameterized.Parameters(name = "storeType={0}") public static Collection getParameters() { return Arrays.asList(new Object[][]{{"JCEKS"}, {"JKS"}}); } public CoreClientOverTwoWaySSLTest(String storeType) { this.storeType = storeType; SERVER_SIDE_KEYSTORE = "server-side-keystore." + storeType.toLowerCase(); SERVER_SIDE_TRUSTSTORE = "server-side-truststore." + storeType.toLowerCase(); CLIENT_SIDE_TRUSTSTORE = "client-side-truststore." + storeType.toLowerCase(); CLIENT_SIDE_KEYSTORE = "client-side-keystore." + storeType.toLowerCase(); } public static final SimpleString QUEUE = new SimpleString("QueueOverSSL"); /** * These artifacts are required for testing 2-way SSL in addition to the artifacts for 1-way SSL from {@link CoreClientOverOneWaySSLTest} * * Commands to create the JKS artifacts: * keytool -genkey -keystore client-side-keystore.jks -storepass secureexample -keypass secureexample -dname "CN=ActiveMQ Artemis Client, OU=Artemis, O=ActiveMQ, L=AMQ, S=AMQ, C=AMQ" -keyalg RSA * keytool -export -keystore client-side-keystore.jks -file activemq-jks.cer -storepass secureexample * keytool -import -keystore server-side-truststore.jks -file activemq-jks.cer -storepass secureexample -keypass secureexample -noprompt * * keytool -genkey -keystore verified-client-side-keystore.jks -storepass secureexample -keypass secureexample -dname "CN=localhost, OU=Artemis, O=ActiveMQ, L=AMQ, S=AMQ, C=AMQ" -keyalg RSA * keytool -export -keystore verified-client-side-keystore.jks -file activemq-jks.cer -storepass secureexample * keytool -import -keystore verified-server-side-truststore.jks -file activemq-jks.cer -storepass secureexample -keypass secureexample -noprompt * * Commands to create the JCEKS artifacts: * keytool -genkey -keystore client-side-keystore.jceks -storetype JCEKS -storepass secureexample -keypass secureexample -dname "CN=ActiveMQ Artemis Client, OU=Artemis, O=ActiveMQ, L=AMQ, S=AMQ, C=AMQ" -keyalg RSA * keytool -export -keystore client-side-keystore.jceks -file activemq-jceks.cer -storetype jceks -storepass secureexample * keytool -import -keystore server-side-truststore.jceks -storetype JCEKS -file activemq-jceks.cer -storepass secureexample -keypass secureexample -noprompt * * keytool -genkey -keystore verified-client-side-keystore.jceks -storetype JCEKS -storepass secureexample -keypass secureexample -dname "CN=localhost, OU=Artemis, O=ActiveMQ, L=AMQ, S=AMQ, C=AMQ" -keyalg RSA * keytool -export -keystore verified-client-side-keystore.jceks -file activemq-jceks.cer -storetype jceks -storepass secureexample * keytool -import -keystore verified-server-side-truststore.jceks -storetype JCEKS -file activemq-jceks.cer -storepass secureexample -keypass secureexample -noprompt */ private String storeType; private String SERVER_SIDE_KEYSTORE; private String SERVER_SIDE_TRUSTSTORE; private String CLIENT_SIDE_TRUSTSTORE; private String CLIENT_SIDE_KEYSTORE; private final String PASSWORD = "secureexample"; private ActiveMQServer server; private TransportConfiguration tc; private class MyInterceptor implements Interceptor { @Override public boolean intercept(final Packet packet, final RemotingConnection connection) throws ActiveMQException { if (packet.getType() == PacketImpl.SESS_SEND) { try { if (connection.getTransportConnection() instanceof NettyConnection) { System.out.println("Passed through...."); NettyConnection nettyConnection = (NettyConnection) connection.getTransportConnection(); SslHandler sslHandler = (SslHandler) nettyConnection.getChannel().pipeline().get("ssl"); Assert.assertNotNull(sslHandler); Assert.assertNotNull(sslHandler.engine().getSession()); Assert.assertNotNull(sslHandler.engine().getSession().getPeerCertificateChain()); } } catch (SSLPeerUnverifiedException e) { Assert.fail(e.getMessage()); } } return true; } } @Test public void testTwoWaySSL() throws Exception { String text = RandomUtil.randomString(); tc.getParams().put(TransportConstants.SSL_ENABLED_PROP_NAME, true); tc.getParams().put(TransportConstants.TRUSTSTORE_PROVIDER_PROP_NAME, storeType); tc.getParams().put(TransportConstants.KEYSTORE_PROVIDER_PROP_NAME, storeType); tc.getParams().put(TransportConstants.TRUSTSTORE_PATH_PROP_NAME, CLIENT_SIDE_TRUSTSTORE); tc.getParams().put(TransportConstants.TRUSTSTORE_PASSWORD_PROP_NAME, PASSWORD); tc.getParams().put(TransportConstants.KEYSTORE_PATH_PROP_NAME, CLIENT_SIDE_KEYSTORE); tc.getParams().put(TransportConstants.KEYSTORE_PASSWORD_PROP_NAME, PASSWORD); server.getRemotingService().addIncomingInterceptor(new MyInterceptor()); ServerLocator locator = addServerLocator(ActiveMQClient.createServerLocatorWithoutHA(tc)); ClientSessionFactory sf = createSessionFactory(locator); ClientSession session = sf.createSession(false, true, true); session.createQueue(CoreClientOverTwoWaySSLTest.QUEUE, CoreClientOverTwoWaySSLTest.QUEUE, false); ClientProducer producer = session.createProducer(CoreClientOverTwoWaySSLTest.QUEUE); ClientMessage message = createTextMessage(session, text); producer.send(message); ClientConsumer consumer = session.createConsumer(CoreClientOverTwoWaySSLTest.QUEUE); session.start(); ClientMessage m = consumer.receive(1000); Assert.assertNotNull(m); Assert.assertEquals(text, m.getBodyBuffer().readString()); } @Test public void testTwoWaySSLVerifyClientHost() throws Exception { NettyAcceptor acceptor = (NettyAcceptor) server.getRemotingService().getAcceptor("nettySSL"); acceptor.getConfiguration().put(TransportConstants.VERIFY_HOST_PROP_NAME, true); acceptor.getConfiguration().put(TransportConstants.TRUSTSTORE_PATH_PROP_NAME, "verified-" + SERVER_SIDE_TRUSTSTORE); server.getRemotingService().stop(false); server.getRemotingService().start(); server.getRemotingService().startAcceptors(); String text = RandomUtil.randomString(); tc.getParams().put(TransportConstants.SSL_ENABLED_PROP_NAME, true); tc.getParams().put(TransportConstants.TRUSTSTORE_PROVIDER_PROP_NAME, storeType); tc.getParams().put(TransportConstants.KEYSTORE_PROVIDER_PROP_NAME, storeType); tc.getParams().put(TransportConstants.TRUSTSTORE_PATH_PROP_NAME, CLIENT_SIDE_TRUSTSTORE); tc.getParams().put(TransportConstants.TRUSTSTORE_PASSWORD_PROP_NAME, PASSWORD); tc.getParams().put(TransportConstants.KEYSTORE_PATH_PROP_NAME, "verified-" + CLIENT_SIDE_KEYSTORE); tc.getParams().put(TransportConstants.KEYSTORE_PASSWORD_PROP_NAME, PASSWORD); server.getRemotingService().addIncomingInterceptor(new MyInterceptor()); ServerLocator locator = addServerLocator(ActiveMQClient.createServerLocatorWithoutHA(tc)); ClientSessionFactory sf = createSessionFactory(locator); ClientSession session = sf.createSession(false, true, true); session.createQueue(CoreClientOverTwoWaySSLTest.QUEUE, CoreClientOverTwoWaySSLTest.QUEUE, false); ClientProducer producer = session.createProducer(CoreClientOverTwoWaySSLTest.QUEUE); ClientMessage message = createTextMessage(session, text); producer.send(message); ClientConsumer consumer = session.createConsumer(CoreClientOverTwoWaySSLTest.QUEUE); session.start(); ClientMessage m = consumer.receive(1000); Assert.assertNotNull(m); Assert.assertEquals(text, m.getBodyBuffer().readString()); } @Test public void testTwoWaySSLVerifyClientHostNegative() throws Exception { NettyAcceptor acceptor = (NettyAcceptor) server.getRemotingService().getAcceptor("nettySSL"); acceptor.getConfiguration().put(TransportConstants.VERIFY_HOST_PROP_NAME, true); server.getRemotingService().stop(false); server.getRemotingService().start(); server.getRemotingService().startAcceptors(); tc.getParams().put(TransportConstants.SSL_ENABLED_PROP_NAME, true); tc.getParams().put(TransportConstants.TRUSTSTORE_PROVIDER_PROP_NAME, storeType); tc.getParams().put(TransportConstants.KEYSTORE_PROVIDER_PROP_NAME, storeType); tc.getParams().put(TransportConstants.TRUSTSTORE_PATH_PROP_NAME, CLIENT_SIDE_TRUSTSTORE); tc.getParams().put(TransportConstants.TRUSTSTORE_PASSWORD_PROP_NAME, PASSWORD); tc.getParams().put(TransportConstants.KEYSTORE_PATH_PROP_NAME, CLIENT_SIDE_KEYSTORE); tc.getParams().put(TransportConstants.KEYSTORE_PASSWORD_PROP_NAME, PASSWORD); server.getRemotingService().addIncomingInterceptor(new MyInterceptor()); ServerLocator locator = addServerLocator(ActiveMQClient.createServerLocatorWithoutHA(tc)); try { ClientSessionFactory sf = createSessionFactory(locator); fail("Creating a session here should fail due to a certificate with a CN that doesn't match the host name."); } catch (Exception e) { // ignore } } @Test public void testTwoWaySSLVerifyClientTrustAllTrue() throws Exception { NettyAcceptor acceptor = (NettyAcceptor) server.getRemotingService().getAcceptor("nettySSL"); acceptor.getConfiguration().put(TransportConstants.NEED_CLIENT_AUTH_PROP_NAME, true); server.getRemotingService().stop(false); server.getRemotingService().start(); server.getRemotingService().startAcceptors(); //Set trust all so this should work even with no trust store set tc.getParams().put(TransportConstants.SSL_ENABLED_PROP_NAME, true); tc.getParams().put(TransportConstants.TRUST_ALL_PROP_NAME, true); tc.getParams().put(TransportConstants.KEYSTORE_PROVIDER_PROP_NAME, storeType); tc.getParams().put(TransportConstants.KEYSTORE_PATH_PROP_NAME, CLIENT_SIDE_KEYSTORE); tc.getParams().put(TransportConstants.KEYSTORE_PASSWORD_PROP_NAME, PASSWORD); server.getRemotingService().addIncomingInterceptor(new MyInterceptor()); ServerLocator locator = addServerLocator(ActiveMQClient.createServerLocatorWithoutHA(tc)); ClientSessionFactory sf = createSessionFactory(locator); sf.close(); } @Test public void testTwoWaySSLVerifyClientTrustAllTrueByURI() throws Exception { NettyAcceptor acceptor = (NettyAcceptor) server.getRemotingService().getAcceptor("nettySSL"); acceptor.getConfiguration().put(TransportConstants.NEED_CLIENT_AUTH_PROP_NAME, true); server.getRemotingService().stop(false); server.getRemotingService().start(); server.getRemotingService().startAcceptors(); //Set trust all so this should work even with no trust store set StringBuilder uri = new StringBuilder("tcp://" + tc.getParams().get(TransportConstants.HOST_PROP_NAME).toString() + ":" + tc.getParams().get(TransportConstants.PORT_PROP_NAME).toString()); uri.append("?").append(TransportConstants.SSL_ENABLED_PROP_NAME).append("=true"); uri.append("&").append(TransportConstants.TRUST_ALL_PROP_NAME).append("=true"); uri.append("&").append(TransportConstants.KEYSTORE_PROVIDER_PROP_NAME).append("=").append(storeType); uri.append("&").append(TransportConstants.KEYSTORE_PATH_PROP_NAME).append("=").append(CLIENT_SIDE_KEYSTORE); uri.append("&").append(TransportConstants.KEYSTORE_PASSWORD_PROP_NAME).append("=").append(PASSWORD); server.getRemotingService().addIncomingInterceptor(new MyInterceptor()); ServerLocator locator = addServerLocator(ActiveMQClient.createServerLocator(uri.toString())); ClientSessionFactory sf = createSessionFactory(locator); sf.close(); } @Test public void testTwoWaySSLVerifyClientTrustAllFalse() throws Exception { NettyAcceptor acceptor = (NettyAcceptor) server.getRemotingService().getAcceptor("nettySSL"); acceptor.getConfiguration().put(TransportConstants.NEED_CLIENT_AUTH_PROP_NAME, true); server.getRemotingService().stop(false); server.getRemotingService().start(); server.getRemotingService().startAcceptors(); //Trust all defaults to false so this should fail with no trust store set tc.getParams().put(TransportConstants.SSL_ENABLED_PROP_NAME, true); tc.getParams().put(TransportConstants.KEYSTORE_PROVIDER_PROP_NAME, storeType); tc.getParams().put(TransportConstants.KEYSTORE_PATH_PROP_NAME, CLIENT_SIDE_KEYSTORE); tc.getParams().put(TransportConstants.KEYSTORE_PASSWORD_PROP_NAME, PASSWORD); server.getRemotingService().addIncomingInterceptor(new MyInterceptor()); ServerLocator locator = addServerLocator(ActiveMQClient.createServerLocatorWithoutHA(tc)); try { ClientSessionFactory sf = createSessionFactory(locator); fail("Creating a session here should fail due to no trust store being set"); } catch (Exception e) { // ignore } } @Test public void testTwoWaySSLWithoutClientKeyStore() throws Exception { tc.getParams().put(TransportConstants.SSL_ENABLED_PROP_NAME, true); tc.getParams().put(TransportConstants.TRUSTSTORE_PROVIDER_PROP_NAME, storeType); tc.getParams().put(TransportConstants.TRUSTSTORE_PATH_PROP_NAME, CLIENT_SIDE_TRUSTSTORE); tc.getParams().put(TransportConstants.TRUSTSTORE_PASSWORD_PROP_NAME, PASSWORD); ServerLocator locator = addServerLocator(ActiveMQClient.createServerLocatorWithoutHA(tc)); try { createSessionFactory(locator); Assert.fail(); } catch (ActiveMQNotConnectedException se) { //ok } catch (ActiveMQException e) { Assert.fail("Invalid Exception type:" + e.getType()); } } // Package protected --------------------------------------------- @Override @Before public void setUp() throws Exception { super.setUp(); Map<String, Object> params = new HashMap<>(); params.put(TransportConstants.SSL_ENABLED_PROP_NAME, true); params.put(TransportConstants.KEYSTORE_PATH_PROP_NAME, SERVER_SIDE_KEYSTORE); params.put(TransportConstants.KEYSTORE_PASSWORD_PROP_NAME, PASSWORD); params.put(TransportConstants.TRUSTSTORE_PATH_PROP_NAME, SERVER_SIDE_TRUSTSTORE); params.put(TransportConstants.TRUSTSTORE_PASSWORD_PROP_NAME, PASSWORD); params.put(TransportConstants.TRUSTSTORE_PROVIDER_PROP_NAME, storeType); params.put(TransportConstants.KEYSTORE_PROVIDER_PROP_NAME, storeType); params.put(TransportConstants.NEED_CLIENT_AUTH_PROP_NAME, true); ConfigurationImpl config = createBasicConfig().addAcceptorConfiguration(new TransportConfiguration(NETTY_ACCEPTOR_FACTORY, params, "nettySSL")); server = createServer(false, config); server.start(); waitForServerToStart(server); tc = new TransportConfiguration(NETTY_CONNECTOR_FACTORY); } }
package water.rapids.ast.prims.assign; import water.DKV; import water.H2O; import water.MRTask; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.Vec; import water.parser.BufferedString; import water.rapids.*; import water.rapids.ast.AstParameter; import water.rapids.ast.AstPrimitive; import water.rapids.ast.AstRoot; import water.rapids.ast.params.AstNum; import water.rapids.ast.params.AstNumList; import water.rapids.ast.prims.mungers.AstColSlice; import water.rapids.vals.ValFrame; import java.util.Arrays; /** * Rectangular assign into a row and column slice. The destination must * already exist. The output is conceptually a new copy of the data, with a * fresh Frame. Copy-On-Write optimizations lower the cost to be proportional * to the over-written sections. */ public class AstRectangleAssign extends AstPrimitive { @Override public String[] args() { return new String[]{"dst", "src", "col_expr", "row_expr"}; } @Override public int nargs() { return 5; } // (:= dst src col_expr row_expr) @Override public String str() { return ":="; } @Override public ValFrame apply(Env env, Env.StackHelp stk, AstRoot[] asts) { Frame dst = stk.track(asts[1].exec(env)).getFrame(); Val vsrc = stk.track(asts[2].exec(env)); AstParameter col_list = (AstParameter) asts[3]; // Column selection AstNumList cols_numlist = new AstNumList(col_list.columns(dst.names())); // Special for AstAssign: "empty" really means "all" if (cols_numlist.isEmpty()) cols_numlist = new AstNumList(0, dst.numCols()); // Allow R-like number list expansion: negative column numbers mean exclusion int[] cols = AstColSlice.col_select(dst.names(), cols_numlist); // Any COW optimized path changes Vecs in dst._vecs, and so needs a // defensive copy. Any update-in-place path updates Chunks instead of // dst._vecs, and does not need a defensive copy. To make life easier, // just make the copy now. dst = new Frame(dst._names, dst.vecs().clone()); // Assign over the column slice if (asts[4] instanceof AstNum || asts[4] instanceof AstNumList) { // Explictly named row assignment AstNumList rows = (asts[4] instanceof AstNum) ? new AstNumList(((AstNum) asts[4]).getNum()) : ((AstNumList) asts[4]); if (rows.isEmpty()) rows = new AstNumList(0, dst.numRows()); // Empty rows is really: all rows switch (vsrc.type()) { case Val.NUM: assign_frame_scalar(dst, cols, rows, vsrc.getNum(), env._ses); break; case Val.STR: assign_frame_scalar(dst, cols, rows, vsrc.getStr(), env._ses); break; case Val.FRM: assign_frame_frame(dst, cols, rows, vsrc.getFrame(), env._ses); break; default: throw new IllegalArgumentException("Source must be a Frame or Number, but found a " + vsrc.getClass()); } } else { // Boolean assignment selection? Frame rows = stk.track(asts[4].exec(env)).getFrame(); switch (vsrc.type()) { case Val.NUM: assign_frame_scalar(dst, cols, rows, vsrc.getNum(), env._ses); break; case Val.STR: assign_frame_scalar(dst, cols, rows, vsrc.getStr(), env._ses); break; case Val.FRM: throw H2O.unimpl(); default: throw new IllegalArgumentException("Source must be a Frame or Number, but found a " + vsrc.getClass()); } } return new ValFrame(dst); } // Rectangular array copy from src into dst private void assign_frame_frame(Frame dst, int[] cols, AstNumList rows, Frame src, Session ses) { // Sanity check if (cols.length != src.numCols()) throw new IllegalArgumentException("Source and destination frames must have the same count of columns"); long nrows = rows.cnt(); if (src.numRows() != nrows) throw new IllegalArgumentException("Requires same count of rows in the number-list (" + nrows + ") as in the source (" + src.numRows() + ")"); // Whole-column assignment? Directly reuse columns: Copy-On-Write // optimization happens here on the apply() exit. if (dst.numRows() == nrows && rows.isDense()) { for (int i = 0; i < cols.length; i++) dst.replace(cols[i], src.vecs()[i]); if (dst._key != null) DKV.put(dst); return; } // Partial update; needs to preserve type, and may need to copy to support // copy-on-write Vec[] dvecs = dst.vecs(); final Vec[] svecs = src.vecs(); for (int col = 0; col < cols.length; col++) { int dtype = dvecs[cols[col]].get_type(); if (dtype != svecs[col].get_type()) throw new IllegalArgumentException("Columns must be the same type; " + "column " + col + ", \'" + dst._names[cols[col]] + "\', is of type " + dvecs[cols[col]].get_type_str() + " and the source is " + svecs[col].get_type_str()); if ((dtype == Vec.T_CAT) && (! Arrays.equals(dvecs[cols[col]].domain(), svecs[col].domain()))) throw new IllegalArgumentException("Cannot assign to a categorical column with a different domain; " + "source column " + src._names[col] + ", target column " + dst._names[cols[col]]); } // Frame fill // Handle fast small case if (nrows <= 1 || (cols.length * nrows) <= 1000) { // Go parallel for more than 1000 random updates // Copy dst columns as-needed to allow update-in-place dvecs = ses.copyOnWrite(dst, cols); // Update dst columns long[] rownums = rows.expand8(); // Just these rows for (int col = 0; col < svecs.length; col++) if (svecs[col].get_type() == Vec.T_STR) { BufferedString bStr = new BufferedString(); for (int ridx = 0; ridx < rownums.length; ridx++) { BufferedString s = svecs[col].atStr(bStr, ridx); dvecs[cols[col]].set(rownums[ridx], s != null ? s.toString() : null); } } else { for (int ridx = 0; ridx < rownums.length; ridx++) dvecs[cols[col]].set(rownums[ridx], svecs[col].at(ridx)); } return; } // Handle large case Vec[] vecs = ses.copyOnWrite(dst, cols); Vec[] vecs2 = new Vec[cols.length]; // Just the selected columns get updated for (int i = 0; i < cols.length; i++) vecs2[i] = vecs[cols[i]]; rows.sort(); // Side-effect internal sort; needed for fast row lookup new AssignFrameFrameTask(rows, svecs).doAll(vecs2); } private static class AssignFrameFrameTask extends RowSliceTask { private Vec[] _svecs; private AssignFrameFrameTask(AstNumList rows, Vec[] svecs) { super(rows); _svecs = svecs; } @Override void mapChunkSlice(Chunk[] cs, int chkOffset) { long start = cs[0].start(); Chunk[] scs = null; for (int i = chkOffset; i < cs[0]._len; ++i) { long idx = _rows.index(start + i); if (idx < 0) continue; if ((scs == null) || (scs[0].start() < idx) || (idx >= scs[0].start() + scs[0].len())) { int sChkIdx = _svecs[0].elem2ChunkIdx(idx); scs = new Chunk[_svecs.length]; for (int j = 0; j < _svecs.length; j++) { scs[j] = _svecs[j].chunkForChunkIdx(sChkIdx); } } BufferedString bStr = new BufferedString(); int si = (int) (idx - scs[0].start()); for (int j = 0; j < cs.length; j++) { Chunk chk = cs[j]; Chunk schk = scs[j]; if (_svecs[j].get_type() == Vec.T_STR) { BufferedString s = schk.atStr(bStr, si); chk.set(i, s != null ? s.toString() : null); BufferedString bss = chk.atStr(new BufferedString(), i); if (s == null && bss != null) { chk.set(i, s != null ? s.toString() : null); } } else { chk.set(i, schk.atd(si)); } } } } } // Assign a NON-STRING SCALAR over some dst rows; optimize for all rows private void assign_frame_scalar(Frame dst, int[] cols, AstNumList rows, double src, Session ses) { // Handle fast small case long nrows = rows.cnt(); if (nrows == 1) { Vec[] vecs = ses.copyOnWrite(dst, cols); long drow = (long) rows._bases[0]; for (int col : cols) vecs[col].set(drow, src); return; } // Bulk assign constant (probably zero) over a frame. Directly set // columns: Copy-On-Write optimization happens here on the apply() exit. if (dst.numRows() == nrows && rows.isDense()) { Vec anyVec = dst.anyVec(); assert anyVec != null; // if anyVec was null, then dst.numRows() would have been 0 Vec vsrc = anyVec.makeCon(src); for (int col : cols) dst.replace(col, vsrc); if (dst._key != null) DKV.put(dst); return; } // Handle large case Vec[] vecs = ses.copyOnWrite(dst, cols); Vec[] vecs2 = new Vec[cols.length]; // Just the selected columns get updated for (int i = 0; i < cols.length; i++) vecs2[i] = vecs[cols[i]]; rows.sort(); // Side-effect internal sort; needed for fast row lookup new AssignFrameScalarTask(rows, src).doAll(vecs2); } private static class AssignFrameScalarTask extends RowSliceTask { private double _src; private AssignFrameScalarTask(AstNumList rows, double src) { super(rows); _src = src; } @Override void mapChunkSlice(Chunk[] cs, int chkOffset) { long start = cs[0].start(); for (int i = chkOffset; i < cs[0]._len; ++i) if (_rows.has(start + i)) for (Chunk chk : cs) chk.set(i, _src); } } // Assign a STRING over some dst rows; optimize for all rows private void assign_frame_scalar(Frame dst, int[] cols, AstNumList rows, String src, Session ses) { // Check for needing to copy before updating // Handle fast small case Vec[] dvecs = dst.vecs(); long nrows = rows.cnt(); if( nrows==1 ) { long drow = (long)rows.expand()[0]; for( Vec vec : dvecs ) vec.set(drow, src); return; } // Handle large case Vec[] vecs = ses.copyOnWrite(dst, cols); Vec[] vecs2 = new Vec[cols.length]; // Just the selected columns get updated for (int i = 0; i < cols.length; i++) vecs2[i] = vecs[cols[i]]; rows.sort(); // Side-effect internal sort; needed for fast row lookup new AssignFrameStringScalarTask(rows, src).doAll(vecs2); } private static class AssignFrameStringScalarTask extends RowSliceTask { private String _src; private AssignFrameStringScalarTask(AstNumList rows, String src) { super(rows); _src = src; } @Override void mapChunkSlice(Chunk[] cs, int chkOffset) { long start = cs[0].start(); for (int i = chkOffset; i < cs[0]._len; ++i) if (_rows.has(start + i)) for (Chunk chk : cs) chk.set(i, _src); } } private boolean isScalarCompatible(Object scalar, Vec v) { if (scalar == null) return true; else if (scalar instanceof Number) return v.get_type() == Vec.T_NUM || v.get_type() == Vec.T_TIME; else if (scalar instanceof String) { if (v.get_type() == Vec.T_CAT) { for (String f: v.domain()) if (f.equals(scalar)) return true; return false; } else return v.get_type() == Vec.T_STR || (v.get_type() == Vec.T_UUID); } else return false; } // Boolean assignment with a scalar private void assign_frame_scalar(Frame dst, int[] cols, Frame rows, Object src, Session ses) { // Bulk assign a numeric constant over a frame. Directly set columns without checking target type // assuming the user just wants to overwrite everything: Copy-On-Write optimization happens here on the apply() exit. // Note: this skips "scalar to Vec" compatibility check because the whole Vec is overwritten Vec bool = rows.vec(0); if (bool.isConst() && ((int) bool.min() == 1) && (src instanceof Number)) { Vec anyVec = dst.anyVec(); assert anyVec != null; Vec vsrc = anyVec.makeCon((double) src); for (int col : cols) dst.replace(col, vsrc); if (dst._key != null) DKV.put(dst); return; } // Make sure the scalar value is compatible with the target vector for (int col: cols) { if (! isScalarCompatible(src, dst.vec(col))) { throw new IllegalArgumentException("Cannot assign value " + src + " into a vector of type " + dst.vec(col).get_type_str() + "."); } } Vec[] vecs = ses.copyOnWrite(dst, cols); Vec[] vecs2 = new Vec[cols.length]; // Just the selected columns get updated for (int i = 0; i < cols.length; i++) vecs2[i] = vecs[cols[i]]; ConditionalAssignTask.doAssign(vecs2, src, rows.vec(0)); } private static class ConditionalAssignTask extends MRTask<ConditionalAssignTask> { final Chunk.ValueSetter[] _setters; ConditionalAssignTask(Vec[] vecs, Object value) { _setters = new Chunk.ValueSetter[vecs.length]; for (int i = 0; i < _setters.length; i++) _setters[i] = Chunk.createValueSetter(vecs[i], value); } @Override public void map(Chunk[] cs) { Chunk bool = cs[cs.length - 1]; for (int row = 0; row < cs[0]._len; row++) { if (bool.at8(row) == 1) for (int col = 0; col < cs.length - 1; col++) _setters[col].setValue(cs[col], row); } } /** * Sets a given value to all cells where given predicateVec is true. * @param dst target Vecs * @param src source Value * @param predicateVec predicate Vec */ static void doAssign(Vec[] dst, Object src, Vec predicateVec) { Vec[] vecs = new Vec[dst.length + 1]; System.arraycopy(dst, 0, vecs, 0, dst.length); vecs[vecs.length - 1] = predicateVec; new ConditionalAssignTask(dst, src).doAll(vecs); } } private static abstract class RowSliceTask extends MRTask<RowSliceTask> { final AstNumList _rows; RowSliceTask(AstNumList rows) { _rows = rows; } @Override public void map(Chunk[] cs) { long start = cs[0].start(); long end = start + cs[0]._len; long min = (long) _rows.min(), max = (long) _rows.max() - 1; // exclusive max to inclusive max when stride == 1 // [ start, ..., end ] the chunk //1 [] rows out left: rows.max() < start //2 [] rows out rite: rows.min() > end //3 [ rows ] rows run left: rows.min() < start && rows.max() <= end //4 [ rows ] rows run in : start <= rows.min() && rows.max() <= end //5 [ rows ] rows run rite: start <= rows.min() && end < rows.max() if (!(max < start || min > end)) { // not situation 1 or 2 above long startOffset = min > start ? min : start; // situation 4 and 5 => min > start; int chkOffset = (int) (startOffset - start); mapChunkSlice(cs, chkOffset); } } abstract void mapChunkSlice(Chunk[] cs, int chkOffset); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. */ package com.microsoft.azure.management.network.samples; import com.microsoft.azure.management.Azure; import com.microsoft.azure.management.compute.KnownLinuxVirtualMachineImage; import com.microsoft.azure.management.compute.VirtualMachine; import com.microsoft.azure.management.compute.VirtualMachineSizeTypes; import com.microsoft.azure.management.network.Network; import com.microsoft.azure.management.network.NetworkSecurityGroup; import com.microsoft.azure.management.network.SecurityRuleProtocol; import com.microsoft.azure.management.resources.fluentcore.arm.Region; import com.microsoft.azure.management.resources.fluentcore.utils.SdkContext; import com.microsoft.azure.management.samples.Utils; import com.microsoft.rest.LogLevel; import java.io.File; import java.util.Date; /** * Azure Network sample for managing virtual networks - * - Create a virtual network with Subnets * - Update a virtual network * - Create virtual machines in the virtual network subnets * - Create another virtual network * - List virtual networks * - Delete a virtual network. */ public final class ManageVirtualNetwork { /** * Main function which runs the actual sample. * @param azure instance of the azure client * @return true if sample runs successfully */ public static boolean runSample(Azure azure) { final String vnetName1 = SdkContext.randomResourceName("vnet1", 20); final String vnetName2 = SdkContext.randomResourceName("vnet2", 20); final String vnet1FrontEndSubnetName = "frontend"; final String vnet1BackEndSubnetName = "backend"; final String vnet1FrontEndSubnetNsgName = "frontendnsg"; final String vnet1BackEndSubnetNsgName = "backendnsg"; final String frontEndVMName = SdkContext.randomResourceName("fevm", 24); final String backEndVMName = SdkContext.randomResourceName("bevm", 24); final String publicIPAddressLeafDnsForFrontEndVM = SdkContext.randomResourceName("pip1", 24); final String userName = "tirekicker"; final String sshKey = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfSPC2K7LZcFKEO+/t3dzmQYtrJFZNxOsbVgOVKietqHyvmYGHEC0J2wPdAqQ/63g/hhAEFRoyehM+rbeDri4txB3YFfnOK58jqdkyXzupWqXzOrlKY4Wz9SKjjN765+dqUITjKRIaAip1Ri137szRg71WnrmdP3SphTRlCx1Bk2nXqWPsclbRDCiZeF8QOTi4JqbmJyK5+0UqhqYRduun8ylAwKKQJ1NJt85sYIHn9f1Rfr6Tq2zS0wZ7DHbZL+zB5rSlAr8QyUdg/GQD+cmSs6LvPJKL78d6hMGk84ARtFo4A79ovwX/Fj01znDQkU6nJildfkaolH2rWFG/qttD azjava@javalib.com"; final String rgName = SdkContext.randomResourceName("rgNEMV", 24); try { //============================================================ // Create a virtual network with specific address-space and two subnet // Creates a network security group for backend subnet System.out.println("Creating a network security group for virtual network backend subnet..."); NetworkSecurityGroup backEndSubnetNsg = azure.networkSecurityGroups().define(vnet1BackEndSubnetNsgName) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .defineRule("DenyInternetInComing") .denyInbound() .fromAddress("INTERNET") .fromAnyPort() .toAnyAddress() .toAnyPort() .withAnyProtocol() .attach() .defineRule("DenyInternetOutGoing") .denyOutbound() .fromAnyAddress() .fromAnyPort() .toAddress("INTERNET") .toAnyPort() .withAnyProtocol() .attach() .create(); System.out.println("Created network security group"); // Print the network security group Utils.print(backEndSubnetNsg); // Create the virtual network with frontend and backend subnets, with // network security group rule applied to backend subnet] System.out.println("Creating virtual network #1..."); Network virtualNetwork1 = azure.networks().define(vnetName1) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .withAddressSpace("192.168.0.0/16") .withSubnet(vnet1FrontEndSubnetName, "192.168.1.0/24") .defineSubnet(vnet1BackEndSubnetName) .withAddressPrefix("192.168.2.0/24") .withExistingNetworkSecurityGroup(backEndSubnetNsg) .attach() .create(); System.out.println("Created a virtual network"); // Print the virtual network details Utils.print(virtualNetwork1); //============================================================ // Update a virtual network // Creates a network security group for frontend subnet System.out.println("Creating a network security group for virtual network backend subnet..."); NetworkSecurityGroup frontEndSubnetNsg = azure.networkSecurityGroups().define(vnet1FrontEndSubnetNsgName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .defineRule("AllowHttpInComing") .allowInbound() .fromAddress("INTERNET") .fromAnyPort() .toAnyAddress() .toPort(80) .withProtocol(SecurityRuleProtocol.TCP) .attach() .defineRule("DenyInternetOutGoing") .denyOutbound() .fromAnyAddress() .fromAnyPort() .toAddress("INTERNET") .toAnyPort() .withAnyProtocol() .attach() .create(); System.out.println("Created network security group"); // Print the network security group Utils.print(frontEndSubnetNsg); // Update the virtual network frontend subnet by associating it with network security group System.out.println("Associating network security group rule to frontend subnet"); virtualNetwork1.update() .updateSubnet(vnet1FrontEndSubnetName) .withExistingNetworkSecurityGroup(frontEndSubnetNsg) .parent() .apply(); System.out.println("Network security group rule associated with the frontend subnet"); // Print the virtual network details Utils.print(virtualNetwork1); //============================================================ // Create a virtual machine in each subnet // Creates the first virtual machine in frontend subnet System.out.println("Creating a Linux virtual machine in the frontend subnet"); Date t1 = new Date(); VirtualMachine frontEndVM = azure.virtualMachines().define(frontEndVMName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .withExistingPrimaryNetwork(virtualNetwork1) .withSubnet(vnet1FrontEndSubnetName) .withPrimaryPrivateIPAddressDynamic() .withNewPrimaryPublicIPAddress(publicIPAddressLeafDnsForFrontEndVM) .withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS) .withRootUsername(userName) .withSsh(sshKey) .withSize(VirtualMachineSizeTypes.STANDARD_D3_V2) .create(); Date t2 = new Date(); System.out.println("Created Linux VM: (took " + ((t2.getTime() - t1.getTime()) / 1000) + " seconds) " + frontEndVM.id()); // Print virtual machine details Utils.print(frontEndVM); // Creates the second virtual machine in the backend subnet System.out.println("Creating a Linux virtual machine in the backend subnet"); Date t3 = new Date(); VirtualMachine backEndVM = azure.virtualMachines().define(backEndVMName) .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .withExistingPrimaryNetwork(virtualNetwork1) .withSubnet(vnet1BackEndSubnetName) .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS) .withRootUsername(userName) .withSsh(sshKey) .withSize(VirtualMachineSizeTypes.STANDARD_D3_V2) .create(); Date t4 = new Date(); System.out.println("Created Linux VM: (took " + ((t4.getTime() - t3.getTime()) / 1000) + " seconds) " + backEndVM.id()); // Print virtual machine details Utils.print(backEndVM); //============================================================ // Create a virtual network with default address-space and one default subnet System.out.println("Creating virtual network #2..."); Network virtualNetwork2 = azure.networks().define(vnetName2) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .create(); System.out.println("Created a virtual network"); // Print the virtual network details Utils.print(virtualNetwork2); //============================================================ // List virtual networks for (Network virtualNetwork : azure.networks().listByResourceGroup(rgName)) { Utils.print(virtualNetwork); } //============================================================ // Delete a virtual network System.out.println("Deleting the virtual network"); azure.networks().deleteById(virtualNetwork2.id()); System.out.println("Deleted the virtual network"); return true; } catch (Exception e) { System.err.println(e.getMessage()); } finally { try { System.out.println("Deleting Resource Group: " + rgName); azure.resourceGroups().deleteByName(rgName); System.out.println("Deleted Resource Group: " + rgName); } catch (NullPointerException npe) { System.out.println("Did not create any resources in Azure. No clean up is necessary"); } catch (Exception g) { g.printStackTrace(); } } return false; } /** * Main entry point. * @param args the parameters */ public static void main(String[] args) { try { //============================================================= // Authenticate final File credFile = new File(System.getenv("AZURE_AUTH_LOCATION")); Azure azure = Azure.configure() .withLogLevel(LogLevel.BODY) .authenticate(credFile) .withDefaultSubscription(); // Print selected subscription System.out.println("Selected subscription: " + azure.subscriptionId()); runSample(azure); } catch (Exception e) { System.out.println(e.getMessage()); e.printStackTrace(); } } private ManageVirtualNetwork() { } }
/** * Copyright 2010-2016 Boxfuse GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flywaydb.maven; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.project.MavenProject; import org.apache.maven.settings.Server; import org.apache.maven.settings.Settings; import org.flywaydb.core.Flyway; import org.flywaydb.core.api.FlywayException; import org.flywaydb.core.internal.util.ExceptionUtils; import org.flywaydb.core.internal.util.Location; import org.flywaydb.core.internal.util.logging.Log; import org.flywaydb.core.internal.util.logging.LogFactory; import org.sonatype.plexus.components.cipher.DefaultPlexusCipher; import org.sonatype.plexus.components.cipher.PlexusCipherException; import org.sonatype.plexus.components.sec.dispatcher.DefaultSecDispatcher; import org.sonatype.plexus.components.sec.dispatcher.SecDispatcher; import org.sonatype.plexus.components.sec.dispatcher.SecDispatcherException; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.Map; import java.util.Properties; /** * Common base class for all mojos with all common attributes.<br> * * @requiresDependencyResolution test * @configurator include-project-dependencies * @phase pre-integration-test */ @SuppressWarnings({"JavaDoc", "FieldCanBeLocal", "UnusedDeclaration"}) abstract class AbstractFlywayMojo extends AbstractMojo { /** * Property name prefix for placeholders that are configured through properties. */ private static final String PLACEHOLDERS_PROPERTY_PREFIX = "flyway.placeholders."; protected Log log; protected Flyway flyway = new Flyway(); /** * Whether to skip the execution of the Maven Plugin for this module.<br/> * <p>Also configurable with Maven or System Property: ${flyway.skip}</p> * * @parameter property="flyway.skip" */ /* private -> for testing */ boolean skip; /** * The fully qualified classname of the jdbc driver to use to connect to the database.<br> * By default, the driver is autodetected based on the url.<br/> * <p>Also configurable with Maven or System Property: ${flyway.driver}</p> * * @parameter property="flyway.driver" */ /* private -> for testing */ String driver; /** * The jdbc url to use to connect to the database.<br> * <p>Also configurable with Maven or System Property: ${flyway.url}</p> * * @parameter property="flyway.url" */ /* private -> for testing */ String url; /** * The user to use to connect to the database. (default: <i>blank</i>)<br> * The credentials can be specified by user/password or {@code serverId} from settings.xml * <p>Also configurable with Maven or System Property: ${flyway.user}</p> * * @parameter property="flyway.user" */ /* private -> for testing */ String user; /** * The password to use to connect to the database. (default: <i>blank</i>)<br> * <p>Also configurable with Maven or System Property: ${flyway.password}</p> * * @parameter property="flyway.password" */ private String password; /** * List of the schemas managed by Flyway. These schema names are case-sensitive.<br/> * (default: The default schema for the datasource connection) * <p>Consequences:</p> * <ul> * <li>The first schema in the list will be automatically set as the default one during the migration.</li> * <li>The first schema in the list will also be the one containing the metadata table.</li> * <li>The schemas will be cleaned in the order of this list.</li> * </ul> * <p>Also configurable with Maven or System Property: ${flyway.schemas} (comma-separated list)</p> * * @parameter property="flyway.schemas" */ private String[] schemas; /** * <p>The name of the metadata table that will be used by Flyway. (default: schema_version)</p> * <p> By default (single-schema mode) the * metadata table is placed in the default schema for the connection provided by the datasource. <br/> When the * {@code flyway.schemas} property is set (multi-schema mode), the metadata table is placed in the first schema of * the list. </p> * <p>Also configurable with Maven or System Property: ${flyway.table}</p> * * @parameter property="flyway.table" */ private String table = flyway.getTable(); /** * The version to tag an existing schema with when executing baseline. (default: 1)<br/> * <p>Also configurable with Maven or System Property: ${flyway.baselineVersion}</p> * * @parameter property="flyway.baselineVersion" */ private String baselineVersion; /** * The description to tag an existing schema with when executing baseline. (default: << Flyway Baseline >>)<br> * <p>Also configurable with Maven or System Property: ${flyway.baselineDescription}</p> * * @parameter property="flyway.baselineDescription" */ private String baselineDescription; /** * Locations on the classpath to scan recursively for migrations. Locations may contain both sql * and java-based migrations. (default: filesystem:src/main/resources/db/migration) * <p>Also configurable with Maven or System Property: ${flyway.locations} (Comma-separated list)</p> * * @parameter */ private String[] locations; /** * The fully qualified class names of the custom MigrationResolvers to be used in addition or as replacement * (if skipDefaultResolvers is true) to the built-in ones for resolving Migrations to apply. * <p>(default: none)</p> * <p>Also configurable with Maven or System Property: ${flyway.resolvers} (Comma-separated list)</p> * * @parameter */ private String[] resolvers = new String[0]; /** * When set to true, default resolvers are skipped, i.e. only custom resolvers as defined by 'resolvers' * are used. (default: false)<br> <p>Also configurable with Maven or System Property: * ${flyway.skipDefaultResolvers}</p> * * @parameter property="flyway.skipDefaultResolvers" */ private boolean skipDefaultResolvers; /** * The encoding of Sql migrations. (default: UTF-8)<br> <p>Also configurable with Maven or System Property: * ${flyway.encoding}</p> * * @parameter property="flyway.encoding" */ private String encoding = flyway.getEncoding(); /** * The file name prefix for Sql migrations (default: V) <p>Also configurable with Maven or System Property: * ${flyway.sqlMigrationPrefix}</p> * * <p>Sql migrations have the following file name structure: prefixVERSIONseparatorDESCRIPTIONsuffix , * which using the defaults translates to V1_1__My_description.sql</p> * * @parameter property="flyway.sqlMigrationPrefix" */ private String sqlMigrationPrefix = flyway.getSqlMigrationPrefix(); /** * The file name prefix for repeatable sql migrations (default: R) <p>Also configurable with Maven or System Property: * ${flyway.repeatableSqlMigrationPrefix}</p> * * <p>Repeatable sql migrations have the following file name structure: prefixSeparatorDESCRIPTIONsuffix , * which using the defaults translates to R__My_description.sql</p> * * @parameter property="flyway.repeatableSqlMigrationPrefix" */ private String repeatableSqlMigrationPrefix = flyway.getRepeatableSqlMigrationPrefix(); /** * The file name separator for Sql migrations (default: __) <p>Also configurable with Maven or System Property: * ${flyway.sqlMigrationSeparator}</p> * * <p>Sql migrations have the following file name structure: prefixVERSIONseparatorDESCRIPTIONsuffix , * which using the defaults translates to V1_1__My_description.sql</p> * * @parameter property="flyway.sqlMigrationSeparator" */ private String sqlMigrationSeparator = flyway.getSqlMigrationSeparator(); /** * The file name suffix for Sql migrations (default: .sql) <p>Also configurable with Maven or System Property: * ${flyway.sqlMigrationSuffix}</p> * * <p>Sql migrations have the following file name structure: prefixVERSIONseparatorDESCRIPTIONsuffix , * which using the defaults translates to V1_1__My_description.sql</p> * * @parameter property="flyway.sqlMigrationSuffix" */ private String sqlMigrationSuffix = flyway.getSqlMigrationSuffix(); /** * Whether to automatically call clean or not when a validation error occurs. (default: {@code false})<br/> * <p> This is exclusively intended as a convenience for development. Even tough we * strongly recommend not to change migration scripts once they have been checked into SCM and run, this provides a * way of dealing with this case in a smooth manner. The database will be wiped clean automatically, ensuring that * the next migration will bring you back to the state checked into SCM.</p> * <p><b>Warning ! Do not enable in production !</b></p><br/> * <p>Also configurable with Maven or System Property: ${flyway.cleanOnValidationError}</p> * * @parameter property="flyway.cleanOnValidationError" */ private boolean cleanOnValidationError = flyway.isCleanOnValidationError(); /** * Whether to disable clean. (default: {@code false}) * <p>This is especially useful for production environments where running clean can be quite a career limiting move.</p> * <p>Also configurable with Maven or System Property: ${flyway.cleanDisabled}</p> * * @parameter property="flyway.cleanDisabled" */ private boolean cleanDisabled; /** * The target version up to which Flyway should consider migrations. * Migrations with a higher version number will be ignored. * The special value {@code current} designates the current version of the schema. (default: the latest version) * <p>Also configurable with Maven or System Property: ${flyway.target}</p> * * @parameter property="flyway.target" */ private String target = flyway.getTarget().getVersion(); /** * Allows migrations to be run "out of order" (default: {@code false}). * <p>If you already have versions 1 and 3 applied, and now a version 2 is found, * it will be applied too instead of being ignored.</p> * <p>Also configurable with Maven or System Property: ${flyway.outOfOrder}</p> * * @parameter property="flyway.outOfOrder" */ private boolean outOfOrder = flyway.isOutOfOrder(); /** * Ignore future migrations when reading the metadata table. These are migrations that were performed by a * newer deployment of the application that are not yet available in this version. For example: we have migrations * available on the classpath up to version 3.0. The metadata table indicates that a migration to version 4.0 * (unknown to us) has already been applied. Instead of bombing out (fail fast) with an exception, a * warning is logged and Flyway continues normally. This is useful for situations where one must be able to redeploy * an older version of the application after the database has been migrated by a newer one. (default: {@code true}) * <p>Also configurable with Maven or System Property: ${flyway.ignoreFutureMigrations}</p> * * @parameter property="flyway.ignoreFutureMigrations" */ private boolean ignoreFutureMigrations = true; /** * Ignores failed future migrations when reading the metadata table. These are migrations that we performed by a * newer deployment of the application that are not yet available in this version. For example: we have migrations * available on the classpath up to version 3.0. The metadata table indicates that a migration to version 4.0 * (unknown to us) has already been attempted and failed. Instead of bombing out (fail fast) with an exception, a * warning is logged and Flyway terminates normally. This is useful for situations where a database rollback is not * an option. An older version of the application can then be redeployed, even though a newer one failed due to a * bad migration. (default: false) * <p>Also configurable with Maven or System Property: ${flyway.ignoreFailedFutureMigration}</p> * * @parameter property="flyway.ignoreFailedFutureMigration" * * @deprecated Use the more generic <code>ignoreFutureMigrations</code> instead. Will be removed in Flyway 5.0. */ @Deprecated private boolean ignoreFailedFutureMigration; /** * Whether placeholders should be replaced. (default: true)<br> * <p>Also configurable with Maven or System Property: ${flyway.placeholderReplacement}</p> * * @parameter property="flyway.placeholderReplacement" */ private boolean placeholderReplacement = flyway.isPlaceholderReplacement(); /** * A map of &lt;placeholder, replacementValue&gt; to apply to sql migration scripts. * <p/> * <p>Also configurable with Maven or System Properties like ${flyway.placeholders.myplaceholder} or ${flyway.placeholders.otherone}</p> * * @parameter */ private Map<String, String> placeholders = flyway.getPlaceholders(); /** * The prefix of every placeholder. (default: ${ )<br> * <p>Also configurable with Maven or System Property: ${flyway.placeholderPrefix}</p> * * @parameter property="flyway.placeholderPrefix" */ private String placeholderPrefix = flyway.getPlaceholderPrefix(); /** * The suffix of every placeholder. (default: } )<br> * <p>Also configurable with Maven or System Property: ${flyway.placeholderSuffix}</p> * * @parameter property="flyway.placeholderSuffix" */ private String placeholderSuffix = flyway.getPlaceholderSuffix(); /** * An array of FlywayCallback implementations. (default: empty )<br> * <p>Also configurable with Maven or System Property: ${flyway.callbacks}</p> * * @parameter */ private String[] callbacks = new String[0]; /** * When set to true, default callbacks are skipped, i.e. only custom callbacks as defined by 'resolvers' * are used. (default: false)<br> <p>Also configurable with Maven or System Property: * ${flyway.skipDefaultCallbacks}</p> * * @parameter property="flyway.skipDefaultCallbacks" */ private boolean skipDefaultCallbacks; /** * <p> * Whether to automatically call baseline when migrate is executed against a non-empty schema with no metadata table. * This schema will then be baselined with the {@code initialVersion} before executing the migrations. * Only migrations above {@code initialVersion} will then be applied. * </p> * <p> * This is useful for initial Flyway production deployments on projects with an existing DB. * </p> * <p> * Be careful when enabling this as it removes the safety net that ensures * Flyway does not migrate the wrong database in case of a configuration mistake! (default: {@code false}) * </p> * <p>Also configurable with Maven or System Property: ${flyway.baselineOnMigrate}</p> * * @parameter property="flyway.baselineOnMigrate" */ private Boolean baselineOnMigrate; /** * Whether to automatically call validate or not when running migrate. (default: {@code true})<br/> * <p>Also configurable with Maven or System Property: ${flyway.validationErrorMode}</p> * * @parameter property="flyway.validateOnMigrate" */ private boolean validateOnMigrate = flyway.isValidateOnMigrate(); /** * Properties file from which to load the Flyway configuration. The names of the individual properties match the ones you would * use as Maven or System properties. The encoding of the file must be the same as the encoding defined with the * flyway.encoding property, which is UTF-8 by default. Relative paths are relative to the POM. (default: flyway.properties) * <p/> * <p>Also configurable with Maven or System Property: ${flyway.configFile}</p> * * @parameter property="flyway.configFile" */ private File configFile; /** * The id of the server tag in settings.xml (default: flyway-db)<br/> * The credentials can be specified by user/password or {@code serverId} from settings.xml<br> * <p>Also configurable with Maven or System Property: ${flyway.serverId}</p> * * @parameter property="flyway.serverId" */ private String serverId = "flyway-db"; /** * The link to the settings.xml * * @parameter property="settings" * @required * @readonly */ /* private -> for testing */ Settings settings; /** * Reference to the current project that includes the Flyway Maven plugin. * * @parameter property="project" required="true" */ /* private -> for testing */ MavenProject mavenProject; /** * Load username password from settings * * @throws FlywayException when the credentials could not be loaded. */ private void loadCredentialsFromSettings() throws FlywayException { final Server server = settings.getServer(serverId); if (user == null) { if (server != null) { user = server.getUsername(); try { SecDispatcher secDispatcher = new DefaultSecDispatcher() {{ _cipher = new DefaultPlexusCipher(); }}; password = secDispatcher.decrypt(server.getPassword()); } catch (SecDispatcherException e) { throw new FlywayException("Unable to decrypt password", e); } catch (PlexusCipherException e) { throw new FlywayException("Unable to initialize password decryption", e); } } } else if (server != null) { throw new FlywayException("You specified credentials both in the Flyway config and settings.xml. Use either one or the other"); } } /** * Retrieves the value of this boolean property, based on the matching System on the Maven property. * * @param systemPropertyName The name of the System property. * @param mavenPropertyValue The value of the Maven property. * @return The value to use. */ /* private -> for testing */ boolean getBooleanProperty(String systemPropertyName, boolean mavenPropertyValue) { String systemPropertyValue = System.getProperty(systemPropertyName); if (systemPropertyValue != null) { return Boolean.getBoolean(systemPropertyName); } return mavenPropertyValue; } public final void execute() throws MojoExecutionException, MojoFailureException { LogFactory.setLogCreator(new MavenLogCreator(this)); log = LogFactory.getLog(getClass()); if (getBooleanProperty("flyway.skip", skip)) { log.info("Skipping Flyway execution"); return; } try { loadCredentialsFromSettings(); flyway.setClassLoader(Thread.currentThread().getContextClassLoader()); flyway.setSchemas(schemas); flyway.setTable(table); if (baselineVersion != null) { flyway.setBaselineVersionAsString(baselineVersion); } if (baselineDescription != null) { flyway.setBaselineDescription(baselineDescription); } if (locations != null) { for (int i = 0; i < locations.length; i++) { if (locations[i].startsWith(Location.FILESYSTEM_PREFIX)) { String newLocation = locations[i].substring(Location.FILESYSTEM_PREFIX.length()); File file = new File(newLocation); if (!file.isAbsolute()) { file = new File(mavenProject.getBasedir(), newLocation); } locations[i] = Location.FILESYSTEM_PREFIX + file.getAbsolutePath(); } } } else { locations = new String[] { Location.FILESYSTEM_PREFIX + mavenProject.getBasedir().getAbsolutePath() + "/src/main/resources/db/migration" }; } flyway.setLocations(locations); flyway.setResolversAsClassNames(resolvers); flyway.setSkipDefaultResolvers(skipDefaultResolvers); flyway.setCallbacksAsClassNames(callbacks); flyway.setSkipDefaultCallbacks(skipDefaultCallbacks); flyway.setEncoding(encoding); flyway.setSqlMigrationPrefix(sqlMigrationPrefix); flyway.setRepeatableSqlMigrationPrefix(repeatableSqlMigrationPrefix); flyway.setSqlMigrationSeparator(sqlMigrationSeparator); flyway.setSqlMigrationSuffix(sqlMigrationSuffix); flyway.setCleanOnValidationError(cleanOnValidationError); flyway.setCleanDisabled(cleanDisabled); flyway.setOutOfOrder(outOfOrder); flyway.setTargetAsString(target); flyway.setIgnoreFutureMigrations(ignoreFutureMigrations); if (ignoreFailedFutureMigration) { flyway.setIgnoreFailedFutureMigration(ignoreFailedFutureMigration); } flyway.setPlaceholderReplacement(placeholderReplacement); flyway.setPlaceholderPrefix(placeholderPrefix); flyway.setPlaceholderSuffix(placeholderSuffix); if (baselineOnMigrate != null) { flyway.setBaselineOnMigrate(baselineOnMigrate); } flyway.setValidateOnMigrate(validateOnMigrate); Properties properties = new Properties(); properties.putAll(mavenProject.getProperties()); if (driver != null) { properties.setProperty("flyway.driver", driver); } if (url != null) { properties.setProperty("flyway.url", url); } if (user != null) { properties.setProperty("flyway.user", user); } if (password != null) { properties.setProperty("flyway.password", password); } for (String placeholer : placeholders.keySet()) { String value = placeholders.get(placeholer); properties.setProperty("flyway.placeholders." + placeholer, value == null ? "" : value); } properties.putAll(getConfigFileProperties()); properties.putAll(System.getProperties()); filterProperties(properties); flyway.configure(properties); doExecute(flyway); } catch (Exception e) { throw new MojoExecutionException(e.toString(), ExceptionUtils.getRootCause(e)); } } /** * Filters there properties to remove the Flyway Maven Plugin-specific ones. * * @param properties The properties to filter. */ private static void filterProperties(Properties properties) { properties.remove("flyway.configFile"); properties.remove("flyway.current"); } /** * Retrieve the properties from the config file (if specified). */ private Properties getConfigFileProperties() throws IOException { Properties properties = new Properties(); String configFileProp = System.getProperty("flyway.configFile"); if (configFileProp != null) { configFile = new File(configFileProp); if (!configFile.isAbsolute()) { configFile = new File(mavenProject.getBasedir(), configFileProp); } } if (configFile == null) { File file = new File(mavenProject.getBasedir(), "flyway.properties"); if (file.isFile() && file.canRead()) { configFile = file; } else { log.debug("flyway.properties not found. Skipping."); return properties; } } else if (!configFile.canRead() || !configFile.isFile()) { throw new FlywayException("Unable to read config file: " + configFile.getAbsolutePath()); } properties.load(new InputStreamReader(new FileInputStream(configFile), encoding)); return properties; } /** * Retrieves this property from either the system or the maven properties. * * @param name The name of the property to retrieve. * @return The property value. {@code null} if not found. */ protected String getProperty(String name) { String systemProperty = System.getProperty(name); if (systemProperty != null) { return systemProperty; } return mavenProject.getProperties().getProperty(name); } /** * Executes this mojo. * * @param flyway The flyway instance to operate on. * @throws Exception any exception */ protected abstract void doExecute(Flyway flyway) throws Exception; }
/* * Copyright 2015 Pascal TROUVIN <pascal.trouvin at o4s.fr>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.CloudOps.laas.ws; import java.io.BufferedReader; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Enumeration; import java.util.Iterator; import java.util.Timer; import java.util.TimerTask; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.CloudOps.laas.ForwardUpdates; import org.CloudOps.laas.Logid; import org.CloudOps.laas.Logids; import org.CloudOps.laas.LogidsSaver; import org.CloudOps.laas.MessageJson; import org.CloudOps.laas.Params; import org.CloudOps.laas.Rule; import org.CloudOps.laas.Stats; import org.CloudOps.laas.mq.Kafka; import org.apache.log4j.Level; import org.apache.log4j.Logger; /** * * @author Pascal TROUVIN <pascal.trouvin at o4s.fr> */ public class WsLogid extends HttpServlet { static Logids lids=new Logids(); static String jsonFilename=null; static String propertiesFilename=null; static Params laasProperties=null; static Pattern noAuthenticationFor=null; static Timer timer = new Timer(); static TimerTask lidsSaveTask = null; static TimerTask forwardUpdatesTask = null; static Logger log=Logger.getLogger("WsLogid"); static Thread kafka=null; static Stats stat=new Stats("WsLogid"); @Override public void init(ServletConfig config) throws ServletException { super.init(config); laasProperties=new Params(config); jsonFilename=makeContextParameterToFilename(config, "loadDataFrom"); if( jsonFilename!=null ){ try { lids.loadFromFile(jsonFilename); } catch (IOException ex) { log.log(Level.FATAL, null, ex); } catch (Exception ex) { log.log(Level.FATAL, null, ex); } lidsSaveTask=new LogidsSaver(jsonFilename); timer.schedule(lidsSaveTask, 1000, 60000); } else { log.log(Level.INFO, "No loadDataFrom parameter defined"); } stat.reset("LaaSid", lids.count()); // // LaaS.properties // // forward-updates-to String forwardURL=laasProperties.getProperty("forward-updates-to"); if( forwardURL!=null ){ log.log(Level.INFO, "Forward Updates to "+forwardURL); forwardUpdatesTask=new ForwardUpdates(forwardURL); timer.schedule(forwardUpdatesTask, 1000, 60000); } else { log.log(Level.INFO, "Forward Updates DISABLED, missing parameter ForwardURL in "+propertiesFilename); } // no-authentication-for String noAuthParam=laasProperties.getProperty("no-authentication-for"); if( noAuthParam!=null ){ log.log(Level.INFO, "APIKEY Authentication: Bypass authentication for '"+noAuthParam+"'"); noAuthenticationFor=Pattern.compile(noAuthParam); } else { log.log(Level.INFO, "APIKEY Authentication: APIKEY required for everyone"); } if( kafka!=null ){ log.info("A kafka process is already running"); } else { kafka=new Thread(new Kafka()); kafka.start(); } } /** * Handles the HTTP <code>GET</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { stat.stat("GET", 1); if( ! checkIfLogged(request, response) ) return; response.setContentType("application/json;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { String action=request.getParameter("action"); if( action!=null ){ switch (action) { case "count": out.print("{\"count\":"+lids.count()+"}"); break; case "getall": out.print("["); Boolean first=true; for(Iterator it=lids.getkeys(); it.hasNext();){ if( first ) first=false; else out.print(","); String lid=it.next().toString(); Logid l=lids.logid(lid); out.print(l.toJSON()); } out.print("]"); break; default: response.sendError(400, "Unknown action command"); break; } } } } /** * Handles the HTTP <code>POST</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { stat.stat("POST", 1); if( ! checkIfLogged(request, response) ) return; response.setContentType("text/html;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { BufferedReader reader = request.getReader(); String line; while ((line = reader.readLine()) != null) { out.println("<"+line); MessageJson msg=new MessageJson(line); for(Iterator it=lids.getkeys(); it.hasNext();){ String lid=(String) it.next(); Logid logid=lids.logid(lid); if( logid.match(msg) ){ // at least one rule list had matched out.println(">"+logid.toString()); } } } } } /** * Handles the HTTP <code>PUT</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { stat.stat("PUT", 1); if( ! checkIfLogged(request, response) ) return; String lid=request.getParameter("logid"); if( lid==null ) lid=request.getParameter("lid"); Boolean isForward=request.getParameter("Forward")!=null; response.setContentType("application/json;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { if( lid==null ){ out.print("{\"status\":\"FAIL\",\"comment\":\"lid field is missing\"}"); } else { ArrayList<String> rulesString=new ArrayList<>(); for(Enumeration e=request.getParameterNames(); e.hasMoreElements(); ){ String pn=(String)e.nextElement(); if( pn.equals("lid") ) continue; String pv=request.getParameter(pn); if( ! Rule.check(pv) ) continue; // the parameter value match the rule syntax rulesString.add(pv); } Logid l=null; try { if( lids.create(lid, rulesString) ){ l=lids.logid(lid); out.print(l.toJSON("\"status\":\"OK\",\"comment\":\"CREATED\"")); stat.reset("LaaSid", lids.count()); StringBuilder str=new StringBuilder(); for(int i=0; i<rulesString.size(); i++){ if( i>0 ) str.append("&"); str.append("rule").append(i).append("=").append(hexaCodeSpecialChars(rulesString.get(i))); } if( ! isForward ) ForwardUpdates.addUpdate("PUT", str.toString()); } } catch (Exception ex) { log.fatal(ex); } if( l==null ) { out.print("{\"lid\":\""+lid+"\"," + "\"status\":\"FAIL\"," + "\"comment\":\"Failed to CREATE\"}"); } else { out.print(l.toJSON()); } } } } private String hexaCodeSpecialChars(String str){ StringBuilder s=new StringBuilder(); for(int i=0; i<str.length(); i++){ char c=str.charAt(i); if( (c>='A' && c<='Z') || (c>='a' && c<='z') || (c>='0' && c<='9') ) s.append(c); else s.append('%').append(Integer.toHexString(c)); } return s.toString(); } /** * Handles the HTTP <code>DELETE</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doDelete(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { stat.stat("DELETE", 1); if( ! checkIfLogged(request, response) ) return; Logid l=null; String lid=request.getParameter("logid"); if( lid==null ) lid=request.getParameter("lid"); if( lid!=null ){ l=lids.logid(lid); } Boolean isForward=request.getParameter("Forward")!=null; response.setContentType("application/json;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { if( l==null ){ out.print("{\"lid\":\""+lid+"\"," + "\"status\":\"FAIL\"," + "\"comment\":\"lid not found\"}"); } else { if( lids.delete(lid) ){ out.print(l.toJSON("\"status\":\"OK\",\"comment\":\"DELETED\"")); if( ! isForward ) ForwardUpdates.addUpdate("DELETE", "lid="+lid); stat.reset("LaaSid", lids.count()); } else { out.print(l.toJSON("\"status\":\"FAIL\",\"comment\":\"Error in deletion\"")); } } } } /** * checkIfLogged * @param request * @param response * @return true if client is logged with an API key, else false * @throws IOException */ public static Boolean checkIfLogged(HttpServletRequest request, HttpServletResponse response) throws IOException{ HttpSession session=request.getSession(); if( session==null || session.getAttribute("apikey")==null ){ if( noAuthenticationFor!=null ){ // an authentication bypass configured String clientIP=request.getHeader("X-Real-IP"); if( clientIP==null ) clientIP=request.getRemoteAddr(); Matcher m=noAuthenticationFor.matcher(clientIP); if( m.find() ){ if( session!=null ){ session.invalidate(); } // create a new session, everytime for security reason session=request.getSession(true); session.setAttribute("apikey", "BYPASS"); stat.stat("AuthenticationByPass", 1); log.info("checkIfLogged from Authentication Bypassed for '"+clientIP+"'"); return true; } log.info("checkIfLogged from Authentication required for '"+clientIP+"'"); } stat.stat("AuthenticationRequired", 1); response.sendRedirect("login?referer="+request.getRequestURL().toString()+"?"+request.getQueryString()); return false; } return true; } private String makeContextParameterToFilename(ServletConfig config, String contextParameterName){ String filename=config.getServletContext().getInitParameter(contextParameterName); if( filename!=null ){ if( ! filename.startsWith("/") ){ String hd =System.getProperty("user.dir"); if( ! hd.endsWith("/") ) hd+="/"; filename = hd+filename; } } return filename; } /** * Returns a short description of the servlet. * * @return a String containing servlet description */ @Override public String getServletInfo() { return "Short description"; }// </editor-fold> }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.router.clientrm; import java.io.IOException; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.FailApplicationAttemptRequest; import org.apache.hadoop.yarn.api.protocolrecords.FailApplicationAttemptResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetAllResourceProfilesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetAllResourceProfilesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetAllResourceTypeInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetAllResourceTypeInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewReservationRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewReservationResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetResourceProfileRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetResourceProfileResponse; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest; import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationListRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationListResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse; import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerRequest; import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerResponse; import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationPriorityRequest; import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationPriorityResponse; import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsRequest; import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsResponse; import org.apache.hadoop.yarn.exceptions.YarnException; /** * Mock intercepter that does not do anything other than forwarding it to the * next intercepter in the chain. */ public class PassThroughClientRequestInterceptor extends AbstractClientRequestInterceptor { @Override public GetNewApplicationResponse getNewApplication( GetNewApplicationRequest request) throws YarnException, IOException { return getNextInterceptor().getNewApplication(request); } @Override public SubmitApplicationResponse submitApplication( SubmitApplicationRequest request) throws YarnException, IOException { return getNextInterceptor().submitApplication(request); } @Override public KillApplicationResponse forceKillApplication( KillApplicationRequest request) throws YarnException, IOException { return getNextInterceptor().forceKillApplication(request); } @Override public GetClusterMetricsResponse getClusterMetrics( GetClusterMetricsRequest request) throws YarnException, IOException { return getNextInterceptor().getClusterMetrics(request); } @Override public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest request) throws YarnException, IOException { return getNextInterceptor().getClusterNodes(request); } @Override public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request) throws YarnException, IOException { return getNextInterceptor().getQueueInfo(request); } @Override public GetQueueUserAclsInfoResponse getQueueUserAcls( GetQueueUserAclsInfoRequest request) throws YarnException, IOException { return getNextInterceptor().getQueueUserAcls(request); } @Override public MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues( MoveApplicationAcrossQueuesRequest request) throws YarnException, IOException { return getNextInterceptor().moveApplicationAcrossQueues(request); } @Override public GetNewReservationResponse getNewReservation( GetNewReservationRequest request) throws YarnException, IOException { return getNextInterceptor().getNewReservation(request); } @Override public ReservationSubmissionResponse submitReservation( ReservationSubmissionRequest request) throws YarnException, IOException { return getNextInterceptor().submitReservation(request); } @Override public ReservationListResponse listReservations( ReservationListRequest request) throws YarnException, IOException { return getNextInterceptor().listReservations(request); } @Override public ReservationUpdateResponse updateReservation( ReservationUpdateRequest request) throws YarnException, IOException { return getNextInterceptor().updateReservation(request); } @Override public ReservationDeleteResponse deleteReservation( ReservationDeleteRequest request) throws YarnException, IOException { return getNextInterceptor().deleteReservation(request); } @Override public GetNodesToLabelsResponse getNodeToLabels( GetNodesToLabelsRequest request) throws YarnException, IOException { return getNextInterceptor().getNodeToLabels(request); } @Override public GetLabelsToNodesResponse getLabelsToNodes( GetLabelsToNodesRequest request) throws YarnException, IOException { return getNextInterceptor().getLabelsToNodes(request); } @Override public GetClusterNodeLabelsResponse getClusterNodeLabels( GetClusterNodeLabelsRequest request) throws YarnException, IOException { return getNextInterceptor().getClusterNodeLabels(request); } @Override public GetApplicationReportResponse getApplicationReport( GetApplicationReportRequest request) throws YarnException, IOException { return getNextInterceptor().getApplicationReport(request); } @Override public GetApplicationsResponse getApplications(GetApplicationsRequest request) throws YarnException, IOException { return getNextInterceptor().getApplications(request); } @Override public GetApplicationAttemptReportResponse getApplicationAttemptReport( GetApplicationAttemptReportRequest request) throws YarnException, IOException { return getNextInterceptor().getApplicationAttemptReport(request); } @Override public GetApplicationAttemptsResponse getApplicationAttempts( GetApplicationAttemptsRequest request) throws YarnException, IOException { return getNextInterceptor().getApplicationAttempts(request); } @Override public GetContainerReportResponse getContainerReport( GetContainerReportRequest request) throws YarnException, IOException { return getNextInterceptor().getContainerReport(request); } @Override public GetContainersResponse getContainers(GetContainersRequest request) throws YarnException, IOException { return getNextInterceptor().getContainers(request); } @Override public GetDelegationTokenResponse getDelegationToken( GetDelegationTokenRequest request) throws YarnException, IOException { return getNextInterceptor().getDelegationToken(request); } @Override public RenewDelegationTokenResponse renewDelegationToken( RenewDelegationTokenRequest request) throws YarnException, IOException { return getNextInterceptor().renewDelegationToken(request); } @Override public CancelDelegationTokenResponse cancelDelegationToken( CancelDelegationTokenRequest request) throws YarnException, IOException { return getNextInterceptor().cancelDelegationToken(request); } @Override public FailApplicationAttemptResponse failApplicationAttempt( FailApplicationAttemptRequest request) throws YarnException, IOException { return getNextInterceptor().failApplicationAttempt(request); } @Override public UpdateApplicationPriorityResponse updateApplicationPriority( UpdateApplicationPriorityRequest request) throws YarnException, IOException { return getNextInterceptor().updateApplicationPriority(request); } @Override public SignalContainerResponse signalToContainer( SignalContainerRequest request) throws YarnException, IOException { return getNextInterceptor().signalToContainer(request); } @Override public UpdateApplicationTimeoutsResponse updateApplicationTimeouts( UpdateApplicationTimeoutsRequest request) throws YarnException, IOException { return getNextInterceptor().updateApplicationTimeouts(request); } @Override public GetAllResourceProfilesResponse getResourceProfiles( GetAllResourceProfilesRequest request) throws YarnException, IOException { return getNextInterceptor().getResourceProfiles(request); } @Override public GetResourceProfileResponse getResourceProfile( GetResourceProfileRequest request) throws YarnException, IOException { return getNextInterceptor().getResourceProfile(request); } @Override public GetAllResourceTypeInfoResponse getResourceTypeInfo( GetAllResourceTypeInfoRequest request) throws YarnException, IOException { return getNextInterceptor().getResourceTypeInfo(request); } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.object; import com.google.common.collect.ImmutableMap; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.Filter; import org.elasticsearch.ElasticSearchIllegalStateException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.lucene.search.TermFilter; import org.elasticsearch.common.lucene.uid.UidField; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.multifield.MultiFieldMapper; import java.io.IOException; import java.util.*; import static com.google.common.collect.ImmutableMap.copyOf; import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.*; import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType; /** * */ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { public static final String CONTENT_TYPE = "object"; public static final String NESTED_CONTENT_TYPE = "nested"; public static class Defaults { public static final boolean ENABLED = true; public static final Nested NESTED = Nested.NO; public static final Dynamic DYNAMIC = null; // not set, inherited from father public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL; } public static enum Dynamic { TRUE, FALSE, STRICT } public static class Nested { public static final Nested NO = new Nested(false, false, false); public static Nested newNested(boolean includeInParent, boolean includeInRoot) { return new Nested(true, includeInParent, includeInRoot); } private final boolean nested; private final boolean includeInParent; private final boolean includeInRoot; private Nested(boolean nested, boolean includeInParent, boolean includeInRoot) { this.nested = nested; this.includeInParent = includeInParent; this.includeInRoot = includeInRoot; } public boolean isNested() { return nested; } public boolean isIncludeInParent() { return includeInParent; } public boolean isIncludeInRoot() { return includeInRoot; } } public static class Builder<T extends Builder, Y extends ObjectMapper> extends Mapper.Builder<T, Y> { protected boolean enabled = Defaults.ENABLED; protected Nested nested = Defaults.NESTED; protected Dynamic dynamic = Defaults.DYNAMIC; protected ContentPath.Type pathType = Defaults.PATH_TYPE; protected Boolean includeInAll; protected final List<Mapper.Builder> mappersBuilders = newArrayList(); public Builder(String name) { super(name); this.builder = (T) this; } public T enabled(boolean enabled) { this.enabled = enabled; return builder; } public T dynamic(Dynamic dynamic) { this.dynamic = dynamic; return builder; } public T nested(Nested nested) { this.nested = nested; return builder; } public T pathType(ContentPath.Type pathType) { this.pathType = pathType; return builder; } public T includeInAll(boolean includeInAll) { this.includeInAll = includeInAll; return builder; } public T add(Mapper.Builder builder) { mappersBuilders.add(builder); return this.builder; } @Override public Y build(BuilderContext context) { ContentPath.Type origPathType = context.path().pathType(); context.path().pathType(pathType); context.path().add(name); Map<String, Mapper> mappers = new HashMap<String, Mapper>(); for (Mapper.Builder builder : mappersBuilders) { Mapper mapper = builder.build(context); mappers.put(mapper.name(), mapper); } context.path().pathType(origPathType); context.path().remove(); ObjectMapper objectMapper = createMapper(name, context.path().fullPathAsText(name), enabled, nested, dynamic, pathType, mappers); objectMapper.includeInAllIfNotSet(includeInAll); return (Y) objectMapper; } protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers) { return new ObjectMapper(name, fullPath, enabled, nested, dynamic, pathType, mappers); } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { Map<String, Object> objectNode = node; ObjectMapper.Builder builder = createBuilder(name); boolean nested = false; boolean nestedIncludeInParent = false; boolean nestedIncludeInRoot = false; for (Map.Entry<String, Object> entry : objectNode.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("dynamic")) { String value = fieldNode.toString(); if (value.equalsIgnoreCase("strict")) { builder.dynamic(Dynamic.STRICT); } else { builder.dynamic(nodeBooleanValue(fieldNode) ? Dynamic.TRUE : Dynamic.FALSE); } } else if (fieldName.equals("type")) { String type = fieldNode.toString(); if (type.equals(CONTENT_TYPE)) { builder.nested = Nested.NO; } else if (type.equals(NESTED_CONTENT_TYPE)) { nested = true; } else { throw new MapperParsingException("Trying to parse an object but has a different type [" + type + "] for [" + name + "]"); } } else if (fieldName.equals("include_in_parent")) { nestedIncludeInParent = nodeBooleanValue(fieldNode); } else if (fieldName.equals("include_in_root")) { nestedIncludeInRoot = nodeBooleanValue(fieldNode); } else if (fieldName.equals("enabled")) { builder.enabled(nodeBooleanValue(fieldNode)); } else if (fieldName.equals("path")) { builder.pathType(parsePathType(name, fieldNode.toString())); } else if (fieldName.equals("properties")) { parseProperties(builder, (Map<String, Object>) fieldNode, parserContext); } else if (fieldName.equals("include_in_all")) { builder.includeInAll(nodeBooleanValue(fieldNode)); } else { processField(builder, fieldName, fieldNode); } } if (nested) { builder.nested = Nested.newNested(nestedIncludeInParent, nestedIncludeInRoot); } return builder; } private void parseProperties(ObjectMapper.Builder objBuilder, Map<String, Object> propsNode, ParserContext parserContext) { for (Map.Entry<String, Object> entry : propsNode.entrySet()) { String propName = entry.getKey(); Map<String, Object> propNode = (Map<String, Object>) entry.getValue(); String type; Object typeNode = propNode.get("type"); if (typeNode != null) { type = typeNode.toString(); } else { // lets see if we can derive this... if (propNode.get("properties") != null) { type = ObjectMapper.CONTENT_TYPE; } else if (propNode.get("fields") != null) { type = MultiFieldMapper.CONTENT_TYPE; } else if (propNode.size() == 1 && propNode.get("enabled") != null) { // if there is a single property with the enabled flag on it, make it an object // (usually, setting enabled to false to not index any type, including core values, which // non enabled object type supports). type = ObjectMapper.CONTENT_TYPE; } else { throw new MapperParsingException("No type specified for property [" + propName + "]"); } } Mapper.TypeParser typeParser = parserContext.typeParser(type); if (typeParser == null) { throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + propName + "]"); } objBuilder.add(typeParser.parse(propName, propNode, parserContext)); } } protected Builder createBuilder(String name) { return object(name); } protected void processField(Builder builder, String fieldName, Object fieldNode) { } } private final String name; private final String fullPath; private final boolean enabled; private final Nested nested; private final String nestedTypePath; private final Filter nestedTypeFilter; private final Dynamic dynamic; private final ContentPath.Type pathType; private Boolean includeInAll; private volatile ImmutableMap<String, Mapper> mappers = ImmutableMap.of(); private final Object mutex = new Object(); ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers) { this.name = name; this.fullPath = fullPath; this.enabled = enabled; this.nested = nested; this.dynamic = dynamic; this.pathType = pathType; if (mappers != null) { this.mappers = copyOf(mappers); } this.nestedTypePath = "__" + fullPath; this.nestedTypeFilter = new TermFilter(new Term(TypeFieldMapper.NAME, nestedTypePath)); } @Override public String name() { return this.name; } @Override public void includeInAll(Boolean includeInAll) { if (includeInAll == null) { return; } this.includeInAll = includeInAll; // when called from outside, apply this on all the inner mappers for (Mapper mapper : mappers.values()) { if (mapper instanceof AllFieldMapper.IncludeInAll) { ((AllFieldMapper.IncludeInAll) mapper).includeInAll(includeInAll); } } } @Override public void includeInAllIfNotSet(Boolean includeInAll) { if (this.includeInAll == null) { this.includeInAll = includeInAll; } // when called from outside, apply this on all the inner mappers for (Mapper mapper : mappers.values()) { if (mapper instanceof AllFieldMapper.IncludeInAll) { ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll); } } } public Nested nested() { return this.nested; } public Filter nestedTypeFilter() { return this.nestedTypeFilter; } public ObjectMapper putMapper(Mapper mapper) { if (mapper instanceof AllFieldMapper.IncludeInAll) { ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll); } synchronized (mutex) { mappers = newMapBuilder(mappers).put(mapper.name(), mapper).immutableMap(); } return this; } @Override public void traverse(FieldMapperListener fieldMapperListener) { for (Mapper mapper : mappers.values()) { mapper.traverse(fieldMapperListener); } } @Override public void traverse(ObjectMapperListener objectMapperListener) { objectMapperListener.objectMapper(this); for (Mapper mapper : mappers.values()) { mapper.traverse(objectMapperListener); } } public String fullPath() { return this.fullPath; } public String nestedTypePath() { return nestedTypePath; } public final Dynamic dynamic() { return this.dynamic; } protected boolean allowValue() { return true; } public void parse(ParseContext context) throws IOException { if (!enabled) { context.parser().skipChildren(); return; } XContentParser parser = context.parser(); String currentFieldName = parser.currentName(); XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.VALUE_NULL) { // the object is null ("obj1" : null), simply bail return; } if (token.isValue() && !allowValue()) { // if we are parsing an object but it is just a value, its only allowed on root level parsers with there // is a field name with the same name as the type throw new MapperParsingException("object mapping for [" + name + "] tried to parse as object, but found a concrete value"); } Document restoreDoc = null; if (nested.isNested()) { Document nestedDoc = new Document(); // pre add the uid field if possible (id was already provided) Field uidField = (Field) context.doc().getField(UidFieldMapper.NAME); if (uidField != null) { // we don't need to add it as a full uid field in nested docs, since we don't need versioning // we also rely on this for UidField#loadVersion // this is a deeply nested field if (uidField.stringValue() != null) { nestedDoc.add(new Field(UidFieldMapper.NAME, uidField.stringValue(), Field.Store.NO, Field.Index.NOT_ANALYZED)); } else { nestedDoc.add(new Field(UidFieldMapper.NAME, ((UidField) uidField).uid(), Field.Store.NO, Field.Index.NOT_ANALYZED)); } } // the type of the nested doc starts with __, so we can identify that its a nested one in filters // note, we don't prefix it with the type of the doc since it allows us to execute a nested query // across types (for example, with similar nested objects) nestedDoc.add(new Field(TypeFieldMapper.NAME, nestedTypePath, Field.Store.NO, Field.Index.NOT_ANALYZED)); restoreDoc = context.switchDoc(nestedDoc); context.addDoc(nestedDoc); } ContentPath.Type origPathType = context.path().pathType(); context.path().pathType(pathType); // if we are at the end of the previous object, advance if (token == XContentParser.Token.END_OBJECT) { token = parser.nextToken(); } if (token == XContentParser.Token.START_OBJECT) { // if we are just starting an OBJECT, advance, this is the object we are parsing, we need the name first token = parser.nextToken(); } while (token != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.START_OBJECT) { serializeObject(context, currentFieldName); } else if (token == XContentParser.Token.START_ARRAY) { serializeArray(context, currentFieldName); } else if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { serializeNullValue(context, currentFieldName); } else if (token == null) { throw new MapperParsingException("object mapping for [" + name + "] tried to parse as object, but got EOF, has a concrete value been provided to it?"); } else if (token.isValue()) { serializeValue(context, currentFieldName, token); } token = parser.nextToken(); } // restore the enable path flag context.path().pathType(origPathType); if (nested.isNested()) { Document nestedDoc = context.switchDoc(restoreDoc); if (nested.isIncludeInParent()) { for (IndexableField field : nestedDoc.getFields()) { if (field.name().equals(UidFieldMapper.NAME) || field.name().equals(TypeFieldMapper.NAME)) { continue; } else { context.doc().add(field); } } } if (nested.isIncludeInRoot()) { // don't add it twice, if its included in parent, and we are handling the master doc... if (!(nested.isIncludeInParent() && context.doc() == context.rootDoc())) { for (IndexableField field : nestedDoc.getFields()) { if (field.name().equals(UidFieldMapper.NAME) || field.name().equals(TypeFieldMapper.NAME)) { continue; } else { context.rootDoc().add(field); } } } } } } private void serializeNullValue(ParseContext context, String lastFieldName) throws IOException { // we can only handle null values if we have mappings for them Mapper mapper = mappers.get(lastFieldName); if (mapper != null) { mapper.parse(context); } } private void serializeObject(final ParseContext context, String currentFieldName) throws IOException { if (currentFieldName == null) { throw new MapperParsingException("object mapping [" + name + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]"); } context.path().add(currentFieldName); Mapper objectMapper = mappers.get(currentFieldName); if (objectMapper != null) { objectMapper.parse(context); } else { Dynamic dynamic = this.dynamic; if (dynamic == null) { dynamic = context.root().dynamic(); } if (dynamic == Dynamic.STRICT) { throw new StrictDynamicMappingException(fullPath, currentFieldName); } else if (dynamic == Dynamic.TRUE) { // we sync here just so we won't add it twice. Its not the end of the world // to sync here since next operations will get it before boolean newMapper = false; synchronized (mutex) { objectMapper = mappers.get(currentFieldName); if (objectMapper == null) { newMapper = true; // remove the current field name from path, since template search and the object builder add it as well... context.path().remove(); Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object"); if (builder == null) { builder = MapperBuilders.object(currentFieldName).enabled(true).dynamic(dynamic).pathType(pathType); } BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path()); objectMapper = builder.build(builderContext); putMapper(objectMapper); // ...now re add it context.path().add(currentFieldName); context.setMappingsModified(); } } // traverse and parse outside of the mutex if (newMapper) { // we need to traverse in case we have a dynamic template and need to add field mappers // introduced by it objectMapper.traverse(context.newFieldMappers()); objectMapper.traverse(context.newObjectMappers()); } // now, parse it objectMapper.parse(context); } else { // not dynamic, read everything up to end object context.parser().skipChildren(); } } context.path().remove(); } private void serializeArray(ParseContext context, String lastFieldName) throws IOException { String arrayFieldName = lastFieldName; Mapper mapper = mappers.get(lastFieldName); if (mapper != null && mapper instanceof ArrayValueMapperParser) { mapper.parse(context); } else { XContentParser parser = context.parser(); XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { serializeObject(context, lastFieldName); } else if (token == XContentParser.Token.START_ARRAY) { serializeArray(context, lastFieldName); } else if (token == XContentParser.Token.FIELD_NAME) { lastFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { serializeNullValue(context, lastFieldName); } else if (token == null) { throw new MapperParsingException("object mapping for [" + name + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?"); } else { serializeValue(context, lastFieldName, token); } } } } private void serializeValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException { if (currentFieldName == null) { throw new MapperParsingException("object mapping [" + name + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]"); } Mapper mapper = mappers.get(currentFieldName); if (mapper != null) { mapper.parse(context); return; } Dynamic dynamic = this.dynamic; if (dynamic == null) { dynamic = context.root().dynamic(); } if (dynamic == Dynamic.STRICT) { throw new StrictDynamicMappingException(fullPath, currentFieldName); } if (dynamic == Dynamic.FALSE) { return; } // we sync here since we don't want to add this field twice to the document mapper // its not the end of the world, since we add it to the mappers once we create it // so next time we won't even get here for this field boolean newMapper = false; synchronized (mutex) { mapper = mappers.get(currentFieldName); if (mapper == null) { newMapper = true; BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path()); if (token == XContentParser.Token.VALUE_STRING) { boolean resolved = false; // do a quick test to see if its fits a dynamic template, if so, use it. // we need to do it here so we can handle things like attachment templates, where calling // text (to see if its a date) causes the binary value to be cleared if (!resolved) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null); if (builder != null) { mapper = builder.build(builderContext); resolved = true; } } if (!resolved && context.parser().textLength() == 0) { // empty string with no mapping, treat it like null value return; } if (!resolved && context.root().dateDetection()) { String text = context.parser().text(); // a safe check since "1" gets parsed as well if (text.contains(":") || text.contains("-") || text.contains("/")) { for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) { try { dateTimeFormatter.parser().parseMillis(text); Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date"); if (builder == null) { builder = dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter); } mapper = builder.build(builderContext); resolved = true; break; } catch (Exception e) { // failure to parse this, continue } } } } if (!resolved && context.root().numericDetection()) { String text = context.parser().text(); try { Long.parseLong(text); Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); if (builder == null) { builder = longField(currentFieldName); } mapper = builder.build(builderContext); resolved = true; } catch (Exception e) { // not a long number } if (!resolved) { try { Double.parseDouble(text); Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); if (builder == null) { builder = doubleField(currentFieldName); } mapper = builder.build(builderContext); resolved = true; } catch (Exception e) { // not a long number } } } // DON'T do automatic ip detection logic, since it messes up with docs that have hosts and ips // check if its an ip // if (!resolved && text.indexOf('.') != -1) { // try { // IpFieldMapper.ipToLong(text); // XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "ip"); // if (builder == null) { // builder = ipField(currentFieldName); // } // mapper = builder.build(builderContext); // resolved = true; // } catch (Exception e) { // // failure to parse, not ip... // } // } if (!resolved) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string"); if (builder == null) { builder = stringField(currentFieldName); } mapper = builder.build(builderContext); } } else if (token == XContentParser.Token.VALUE_NUMBER) { XContentParser.NumberType numberType = context.parser().numberType(); if (numberType == XContentParser.NumberType.INT) { if (context.parser().estimatedNumberType()) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); if (builder == null) { builder = longField(currentFieldName); } mapper = builder.build(builderContext); } else { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer"); if (builder == null) { builder = integerField(currentFieldName); } mapper = builder.build(builderContext); } } else if (numberType == XContentParser.NumberType.LONG) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); if (builder == null) { builder = longField(currentFieldName); } mapper = builder.build(builderContext); } else if (numberType == XContentParser.NumberType.FLOAT) { if (context.parser().estimatedNumberType()) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); if (builder == null) { builder = doubleField(currentFieldName); } mapper = builder.build(builderContext); } else { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float"); if (builder == null) { builder = floatField(currentFieldName); } mapper = builder.build(builderContext); } } else if (numberType == XContentParser.NumberType.DOUBLE) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); if (builder == null) { builder = doubleField(currentFieldName); } mapper = builder.build(builderContext); } } else if (token == XContentParser.Token.VALUE_BOOLEAN) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean"); if (builder == null) { builder = booleanField(currentFieldName); } mapper = builder.build(builderContext); } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "binary"); if (builder == null) { builder = binaryField(currentFieldName); } mapper = builder.build(builderContext); } else { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null); if (builder != null) { mapper = builder.build(builderContext); } else { // TODO how do we identify dynamically that its a binary value? throw new ElasticSearchIllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]"); } } putMapper(mapper); context.setMappingsModified(); } } if (newMapper) { mapper.traverse(context.newFieldMappers()); } mapper.parse(context); } @Override public void merge(final Mapper mergeWith, final MergeContext mergeContext) throws MergeMappingException { if (!(mergeWith instanceof ObjectMapper)) { mergeContext.addConflict("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]"); return; } ObjectMapper mergeWithObject = (ObjectMapper) mergeWith; doMerge(mergeWithObject, mergeContext); List<Mapper> mappersToTraverse = new ArrayList<Mapper>(); synchronized (mutex) { for (Mapper mergeWithMapper : mergeWithObject.mappers.values()) { Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name()); if (mergeIntoMapper == null) { // no mapping, simply add it if not simulating if (!mergeContext.mergeFlags().simulate()) { putMapper(mergeWithMapper); mappersToTraverse.add(mergeWithMapper); } } else { if ((mergeWithMapper instanceof MultiFieldMapper) && !(mergeIntoMapper instanceof MultiFieldMapper)) { MultiFieldMapper mergeWithMultiField = (MultiFieldMapper) mergeWithMapper; mergeWithMultiField.merge(mergeIntoMapper, mergeContext); if (!mergeContext.mergeFlags().simulate()) { putMapper(mergeWithMultiField); // now, record mappers to traverse events for all mappers for (Mapper mapper : mergeWithMultiField.mappers().values()) { mappersToTraverse.add(mapper); } } } else { mergeIntoMapper.merge(mergeWithMapper, mergeContext); } } } } // call this outside of the mutex for (Mapper mapper : mappersToTraverse) { mapper.traverse(mergeContext.newFieldMappers()); mapper.traverse(mergeContext.newObjectMappers()); } } protected void doMerge(ObjectMapper mergeWith, MergeContext mergeContext) { } @Override public void close() { for (Mapper mapper : mappers.values()) { mapper.close(); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { toXContent(builder, params, null, Mapper.EMPTY_ARRAY); return builder; } public void toXContent(XContentBuilder builder, Params params, ToXContent custom, Mapper... additionalMappers) throws IOException { builder.startObject(name); if (nested.isNested()) { builder.field("type", NESTED_CONTENT_TYPE); if (nested.isIncludeInParent()) { builder.field("include_in_parent", true); } if (nested.isIncludeInRoot()) { builder.field("include_in_root", true); } } else if (mappers.isEmpty()) { // only write the object content type if there are no properties, otherwise, it is automatically detected builder.field("type", CONTENT_TYPE); } // grr, ugly! on root, dynamic defaults to TRUE, on children, it defaults to null to // inherit the root behavior if (this instanceof RootObjectMapper) { if (dynamic != Dynamic.TRUE) { builder.field("dynamic", dynamic.name().toLowerCase()); } } else { if (dynamic != Defaults.DYNAMIC) { builder.field("dynamic", dynamic.name().toLowerCase()); } } if (enabled != Defaults.ENABLED) { builder.field("enabled", enabled); } if (pathType != Defaults.PATH_TYPE) { builder.field("path", pathType.name().toLowerCase()); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } if (custom != null) { custom.toXContent(builder, params); } doXContent(builder, params); // sort the mappers so we get consistent serialization format TreeMap<String, Mapper> sortedMappers = new TreeMap<String, Mapper>(mappers); // check internal mappers first (this is only relevant for root object) for (Mapper mapper : sortedMappers.values()) { if (mapper instanceof InternalMapper) { mapper.toXContent(builder, params); } } if (additionalMappers != null && additionalMappers.length > 0) { TreeMap<String, Mapper> additionalSortedMappers = new TreeMap<String, Mapper>(); for (Mapper mapper : additionalMappers) { additionalSortedMappers.put(mapper.name(), mapper); } for (Mapper mapper : additionalSortedMappers.values()) { mapper.toXContent(builder, params); } } if (!mappers.isEmpty()) { builder.startObject("properties"); for (Mapper mapper : sortedMappers.values()) { if (!(mapper instanceof InternalMapper)) { mapper.toXContent(builder, params); } } builder.endObject(); } builder.endObject(); } protected void doXContent(XContentBuilder builder, Params params) throws IOException { } }
/* * #%L * This file is part of eAudit4j, a library for creating pluggable * auditing solutions, providing an audit processor that creates * a random event ID that is appended as a field to audit events. * %% * Copyright (C) 2015 - 2016 Michael Beiter <michael@beiter.org> * %% * All rights reserved. * . * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the copyright holder nor the names of the * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * . * . * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * #L% */ package org.beiter.michael.eaudit4j.processors.eventid.propsbuilder; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.beiter.michael.eaudit4j.processors.eventid.EventIdProperties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * This class builds a set of {@link EventIdProperties} using the settings obtained from a Map. * <p> * Use the keys from the various KEY_* fields to properly populate the Map before calling this class' methods. */ // CHECKSTYLE:OFF // this is flagged in checkstyle with a missing whitespace before '}', which is a bug in checkstyle // suppress warnings about the long variable names @SuppressWarnings({"PMD.LongVariable"}) // CHECKSTYLE:ON public final class MapBasedEventIdPropsBuilder { /** * The logger object for this class */ private static final Logger LOG = LoggerFactory.getLogger(MapBasedEventIdPropsBuilder.class); // ################# // # Default values // ################# /** * @see EventIdProperties#setLength(int) */ public static final int DEFAULT_LENGTH = 16; /** * @see EventIdProperties#setEventFieldName(String) */ public static final String DEFAULT_EVENT_FIELD_NAME = "org.beiter.michael.eaudit4j.processors.eventid"; // ##################### // # Configuration Keys // ##################### /** * @see EventIdProperties#setLength(int) */ public static final String KEY_LENGTH = "audit.processor.eid.length"; /** * @see EventIdProperties#setEventFieldName(String) */ public static final String KEY_EVENT_FIELD_NAME = "audit.processor.eid.eventFieldName"; /** * A private constructor to prevent instantiation of this class */ private MapBasedEventIdPropsBuilder() { } /** * Creates a set of event ID properties that use the defaults as specified in this class. * * @return A set of event ID properties with (reasonable) defaults * @see MapBasedEventIdPropsBuilder */ public static EventIdProperties buildDefault() { return build(new ConcurrentHashMap<String, String>()); } /** * Initialize a set of event ID properties based on key / values in a <code>HashMap</code>. * * @param properties A <code>HashMap</code> with configuration properties, using the keys as specified in this class * @return A {@link EventIdProperties} object with default values, plus the provided parameters * @throws NullPointerException When {@code properties} is {@code null} */ // CHECKSTYLE:OFF // this is flagged in checkstyle with a missing whitespace before '}', which is a bug in checkstyle // suppress warnings about this method being too long (not much point in splitting up this one!) // suppress warnings about this method being too complex (can't extract a generic subroutine to reduce exec paths) @SuppressWarnings({"PMD.ExcessiveMethodLength", "PMD.NPathComplexity", "PMD.CyclomaticComplexity", "PMD.StdCyclomaticComplexity", "PMD.ModifiedCyclomaticComplexity"}) // CHECKSTYLE:ON public static EventIdProperties build(final Map<String, String> properties) { Validate.notNull(properties, "The validated object 'value' is null"); final EventIdProperties eventIdProperties = new EventIdProperties(); String tmp = properties.get(KEY_LENGTH); if (StringUtils.isNotEmpty(tmp)) { if (StringUtils.isNumeric(tmp)) { eventIdProperties.setLength(Integer.decode(tmp)); logValue(KEY_LENGTH, tmp); } else { eventIdProperties.setLength(DEFAULT_LENGTH); logDefault(KEY_LENGTH, tmp, "not numeric", String.valueOf(DEFAULT_LENGTH)); } } else { eventIdProperties.setLength(DEFAULT_LENGTH); logDefault(KEY_LENGTH, String.valueOf(DEFAULT_LENGTH)); } tmp = properties.get(KEY_EVENT_FIELD_NAME); if (StringUtils.isNotEmpty(tmp)) { eventIdProperties.setEventFieldName(tmp); logValue(KEY_EVENT_FIELD_NAME, tmp); } else { eventIdProperties.setEventFieldName(DEFAULT_EVENT_FIELD_NAME); logDefault(KEY_EVENT_FIELD_NAME, DEFAULT_EVENT_FIELD_NAME); } // set the additional properties, preserving the originally provided properties // create a defensive copy of the map and all its properties // the code looks a little more complicated than a simple "putAll()", but it catches situations // where a Map is provided that supports null values (e.g. a HashMap) vs Map implementations // that do not (e.g. ConcurrentHashMap). final Map<String, String> tempMap = new ConcurrentHashMap<>(); for (final Map.Entry<String, String> entry : properties.entrySet()) { final String key = entry.getKey(); final String value = entry.getValue(); if (value != null) { tempMap.put(key, value); } } eventIdProperties.setAdditionalProperties(tempMap); return eventIdProperties; } /** * Create a log entry when a value has been successfully configured. * * @param key The configuration key * @param value The value that is being used */ private static void logValue(final String key, final String value) { // Fortify will report a violation here because of disclosure of potentially confidential information. // However, the configuration keys are not confidential, which makes this a non-issue / false positive. if (LOG.isInfoEnabled()) { final StringBuilder msg = new StringBuilder("Key found in configuration ('") .append(key) .append("'), using configured value (not disclosed here for security reasons)"); LOG.info(msg.toString()); } // Fortify will report a violation here because of disclosure of potentially confidential information. // The configuration VALUES are confidential. DO NOT activate DEBUG logging in production. if (LOG.isDebugEnabled()) { final StringBuilder msg = new StringBuilder("Key found in configuration ('") .append(key) .append("'), using configured value ('"); if (value == null) { msg.append("null')"); } else { msg.append(value).append("')"); } LOG.debug(msg.toString()); } } /** * Create a log entry when a default value is being used in case the propsbuilder key has not been provided in the * configuration. * * @param key The configuration key * @param defaultValue The default value that is being used */ private static void logDefault(final String key, final String defaultValue) { // Fortify will report a violation here because of disclosure of potentially confidential information. // However, neither the configuration keys nor the default propsbuilder values are confidential, which makes // this a non-issue / false positive. if (LOG.isInfoEnabled()) { final StringBuilder msg = new StringBuilder("Key is not configured ('") .append(key) .append("'), using default value ('"); if (defaultValue == null) { msg.append("null')"); } else { msg.append(defaultValue).append("')"); } LOG.info(msg.toString()); } } /** * Create a log entry when a default value is being used in case that an invalid configuration value has been * provided in the configuration for the propsbuilder key. * * @param key The configuration key * @param invalidValue The invalid value that cannot be used * @param validationError The validation error that caused the invalid value to be refused * @param defaultValue The default value that is being used */ // suppress warnings about not using an object for the four strings in this PRIVATE method @SuppressWarnings("PMD.UseObjectForClearerAPI") private static void logDefault(final String key, final String invalidValue, final String validationError, final String defaultValue) { if (LOG.isWarnEnabled()) { final StringBuilder msg = new StringBuilder("Invalid value ('") .append(invalidValue) .append("', ") .append(validationError) .append(") for key '") .append(key) .append("', using default instead ('"); if (defaultValue == null) { msg.append("null')"); } else { msg.append(defaultValue).append("')"); } LOG.warn(msg.toString()); } } }
// Copyright (c) 2014 Intel Corporation. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.xwalk.embedding.test.v5; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import android.graphics.Color; import org.xwalk.embedding.base.XWalkViewTestBase; import android.annotation.SuppressLint; import android.test.suitebuilder.annotation.SmallTest; import android.test.suitebuilder.annotation.MediumTest; import android.util.Pair; @SuppressLint("NewApi") public class XWalkViewTest extends XWalkViewTestBase { @SmallTest public void testSetUserAgentString() { try { getInstrumentation().runOnMainSync(new Runnable(){ @Override public void run() { mXWalkView.setUserAgentString(USER_AGENT); } }); loadDataSync(null, EMPTY_PAGE, "text/html", false); String result = executeJavaScriptAndWaitForResult("navigator.userAgent;"); assertEquals(EXPECTED_USER_AGENT, result); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testSetZOrderOnTop_True() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setZOrderOnTop(true); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testSetZOrderOnTop_False() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setZOrderOnTop(false); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testSetBackgroundColor_Color() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setBackgroundColor(Color.RED); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testSetBackgroundColor_Value() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setBackgroundColor(Color.parseColor("#00FF00")); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testSetBackgroundColor_Transparent() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setBackgroundColor(0); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testSetXWalkViewTransparent() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setZOrderOnTop(true); mXWalkView.setBackgroundColor(0); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testOnCanZoomInAndOut() { try { final float mPageMinimumScale = 0.5f; String url = "file:///android_asset/zoom.html"; assertFalse("Should not be able to zoom in", canZoomInOnUiThread()); loadUrlSync(url); pollOnUiThread(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return mPageMinimumScale == mTestHelperBridge.getOnScaleChangedHelper().getNewScale(); } }); assertTrue("Should be able to zoom in", canZoomInOnUiThread()); assertFalse("Should not be able to zoom out", canZoomOutOnUiThread()); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } catch (Throwable e) { // TODO: handle exception assertTrue(false); e.printStackTrace(); } } @SmallTest public void testOnZoomByLimited() { try { final float MAXIMUM_SCALE = 2.0f; final float mPageMinimumScale = 0.5f; String url = "file:///android_asset/zoom.html"; assertFalse("Should not be able to zoom in", canZoomInOnUiThread()); loadUrlSync(url); pollOnUiThread(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return mPageMinimumScale == mTestHelperBridge.getOnScaleChangedHelper().getNewScale(); } }); zoomByOnUiThreadAndWait(4.0f); pollOnUiThread(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return MAXIMUM_SCALE == mTestHelperBridge.getOnScaleChangedHelper().getNewScale(); } }); zoomByOnUiThreadAndWait(0.5f); pollOnUiThread(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return MAXIMUM_SCALE * 0.5f == mTestHelperBridge.getOnScaleChangedHelper().getNewScale(); } }); zoomByOnUiThreadAndWait(0.01f); pollOnUiThread(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return mPageMinimumScale == mTestHelperBridge.getOnScaleChangedHelper().getNewScale(); } }); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } catch (Throwable e) { // TODO: handle exception assertTrue(false); e.printStackTrace(); } } @SmallTest public void testOnZoomInAndOut() { try { final float mPageMinimumScale = 0.5f; String url = "file:///android_asset/zoom.html"; assertFalse("Should not be able to zoom in", canZoomInOnUiThread()); loadUrlSync(url); pollOnUiThread(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return mPageMinimumScale == mTestHelperBridge.getOnScaleChangedHelper().getNewScale(); } }); while (canZoomInOnUiThread()) { zoomInOnUiThreadAndWait(); } assertTrue("Should be able to zoom out", canZoomOutOnUiThread()); while (canZoomOutOnUiThread()) { zoomOutOnUiThreadAndWait(); } assertTrue("Should be able to zoom in", canZoomInOnUiThread()); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } catch (Throwable e) { // TODO: handle exception assertTrue(false); e.printStackTrace(); } } @SmallTest public void testSetAcceptLanuages() throws Throwable { String result; final String script = "navigator.languages"; final String[] languages = {"en;q=0.7", "zh-cn", "da,en-gb;q=0.8,en;q=0.7"}; final String[] expectedLanguages = {"[\"en;q=0.7\"]", "[\"zh-cn\"]", "[\"da\",\"en-gb;q=0.8\",\"en;q=0.7\"]"}; result = executeJavaScriptAndWaitForResult(script); assertNotNull(result); for (int i = 0; i < languages.length; i++) { setAcceptLanguages(languages[i]); result = executeJavaScriptAndWaitForResult(script); assertEquals(expectedLanguages[i], result); } } @SmallTest public void testClearCacheForSingleFile() throws Throwable { final String pagePath = "/clear_cache_test.html"; final String otherPagePath = "/clear_other_cache_test.html"; List<Pair<String, String>> headers = new ArrayList<Pair<String, String>>(); // Set Cache-Control headers to cache this request. One century should be long enough. headers.add(Pair.create("Cache-Control", "max-age=3153600000")); headers.add(Pair.create("Last-Modified", "Mon, 12 May 2014 00:00:00 GMT")); final String pageUrl = mWebServer.setResponse( pagePath, "<html><body>foo</body></html>", headers); final String otherPageUrl = mWebServer.setResponse( otherPagePath, "<html><body>foo</body></html>", headers); // First load to populate cache. clearSingleCacheOnUiThread(pageUrl); loadUrlSync(pageUrl); assertEquals(1, mWebServer.getRequestCount(pagePath)); // Load about:blank so next load is not treated as reload by XWalkView and force // revalidate with the server. loadUrlSync("about:blank"); // No clearCache call, so should be loaded from cache. loadUrlSync(pageUrl); assertEquals(1, mWebServer.getRequestCount(pagePath)); loadUrlSync(otherPageUrl); assertEquals(1, mWebServer.getRequestCount(otherPagePath)); // Same as above. loadUrlSync("about:blank"); // Clear cache, so should hit server again. clearSingleCacheOnUiThread(pageUrl); loadUrlSync(pageUrl); assertEquals(2, mWebServer.getRequestCount(pagePath)); // otherPageUrl was not cleared, so should be loaded from cache. loadUrlSync(otherPageUrl); assertEquals(1, mWebServer.getRequestCount(otherPagePath)); // Same as above. loadUrlSync("about:blank"); // Do not clear cache, so should be loaded from cache. clearCacheOnUiThread(false); loadUrlSync(pageUrl); assertEquals(2, mWebServer.getRequestCount(pagePath)); } }
/* * Copyright (C) 2017 Synacts GmbH, Switzerland (info@synacts.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.digitalid.utility.generator.information.method; import java.util.HashMap; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.ExecutableType; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import net.digitalid.utility.annotations.method.Impure; import net.digitalid.utility.annotations.method.Pure; import net.digitalid.utility.annotations.method.PureWithSideEffects; import net.digitalid.utility.collaboration.annotations.TODO; import net.digitalid.utility.collaboration.enumerations.Author; import net.digitalid.utility.collaboration.enumerations.Priority; import net.digitalid.utility.contracts.Require; import net.digitalid.utility.functional.iterables.FiniteIterable; import net.digitalid.utility.generator.annotations.generators.GenerateBuilder; import net.digitalid.utility.generator.annotations.generators.GenerateSubclass; import net.digitalid.utility.generator.annotations.meta.Interceptor; import net.digitalid.utility.generator.generators.BuilderGenerator; import net.digitalid.utility.generator.generators.SubclassGenerator; import net.digitalid.utility.generator.information.type.TypeInformation; import net.digitalid.utility.generator.interceptor.MethodInterceptor; import net.digitalid.utility.processing.logging.ProcessingLog; import net.digitalid.utility.processing.logging.SourcePosition; import net.digitalid.utility.processing.utility.ProcessingUtility; import net.digitalid.utility.processing.utility.StaticProcessingEnvironment; import net.digitalid.utility.processing.utility.TypeImporter; import net.digitalid.utility.string.Strings; import net.digitalid.utility.validation.annotations.generation.Default; import net.digitalid.utility.validation.annotations.generation.Recover; import net.digitalid.utility.validation.annotations.type.Immutable; import net.digitalid.utility.validation.processing.AnnotationHandlerUtility; import net.digitalid.utility.validation.validator.MethodAnnotationValidator; import net.digitalid.utility.validation.validator.ValueAnnotationValidator; import com.sun.tools.javac.code.Type; /** * This type collects the relevant information about a method for generating a {@link SubclassGenerator subclass} and {@link BuilderGenerator builder}. */ @Immutable public class MethodInformation extends ExecutableInformation { /* -------------------------------------------------- Type -------------------------------------------------- */ /** * Returns whether this method declares its own generic parameters. */ @Pure public boolean isGeneric() { return !getType().getTypeVariables().isEmpty(); } /* -------------------------------------------------- Return Type -------------------------------------------------- */ /** * Returns whether this method has the given return type. */ @Pure public boolean hasReturnType(@Nonnull String desiredTypeName) { final @Nonnull String returnTypeName = getElement().getReturnType().toString(); ProcessingLog.verbose("Return type: $, desired type: $", returnTypeName, desiredTypeName); return returnTypeName.equals(desiredTypeName); } /** * Returns whether this method has the given return type. */ @Pure public boolean hasReturnType(@Nonnull Class<?> type) { return hasReturnType(type.getCanonicalName()); } /** * Returns whether this method has a return type (does not return void). */ @Pure public boolean hasReturnType() { return getElement().getReturnType().getKind() != TypeKind.VOID; } /* -------------------------------------------------- Getters and Setters -------------------------------------------------- */ /** * Returns whether this method is a getter. */ @Pure public boolean isGetter() { // TODO: Remove the static check (see ProcessingUtility#isGetter)? return !isStatic() && !isGeneric() && !throwsExceptions() && !hasParameters() && hasReturnType() && (getName().startsWith("get") || (getName().startsWith("is") || getName().startsWith("has") || getName().startsWith("can")) && hasReturnType(boolean.class)); } /** * Returns whether this method is a setter. */ @Pure public boolean isSetter() { return !isGeneric() && !throwsExceptions() && hasSingleParameter() && !hasReturnType() && getName().startsWith("set"); } /** * Returns the name of the field that corresponds to this getter or setter method. * * @require isGetter() || isSetter() : "The method is neither a getter nor a setter."; */ @Pure public @Nonnull String getFieldName() { Require.that(isGetter() || isSetter()).orThrow("The method $ is neither a getter nor a setter.", getName()); return Strings.lowercaseFirstCharacter(getName().substring(getName().startsWith("is") ? 2 : 3)); } /* -------------------------------------------------- Thrown Types -------------------------------------------------- */ public @Nonnull FiniteIterable<@Nonnull TypeMirror> getThrownTypes() { final @Nonnull ExecutableType executableType = (ExecutableType) StaticProcessingEnvironment.getTypeUtils().asMemberOf(getContainingType(), getElement()); return FiniteIterable.of(executableType.getThrownTypes()); } /* -------------------------------------------------- Annotations -------------------------------------------------- */ /** * Returns whether the method is annotated with '@Pure'. */ @Pure public boolean isPure() { return hasAnnotation(Pure.class) || hasAnnotation(PureWithSideEffects.class); } /** * Returns whether the method is annotated with '@Impure'. */ @Pure public boolean isImpure() { return hasAnnotation(Impure.class); } /** * Returns whether the method is annotated with '@Recover'. */ @Pure public boolean isRecover() { return hasAnnotation(Recover.class); } /** * Returns whether the method is annotated with '@Test'. */ @Pure public boolean isTest() { return hasAnnotation("org.junit.Test") || hasAnnotation("org.junit.BeforeClass") || hasAnnotation("org.junit.AfterClass"); } @Pure public boolean isEnum() { return getContainingType().asElement().getKind() == ElementKind.ENUM; } /* -------------------------------------------------- Return Value Validators -------------------------------------------------- */ private final @Nonnull Map<@Nonnull AnnotationMirror, @Nonnull ValueAnnotationValidator> returnValueValidators; @Pure public @Nonnull Map<@Nonnull AnnotationMirror, @Nonnull ValueAnnotationValidator> getReturnValueValidators() { return returnValueValidators; } /* -------------------------------------------------- Method Validators -------------------------------------------------- */ private final @Nonnull Map<@Nonnull AnnotationMirror, @Nonnull MethodAnnotationValidator> methodValidators; @Pure public @Nonnull Map<@Nonnull AnnotationMirror, @Nonnull MethodAnnotationValidator> getMethodValidators() { return methodValidators; } /* -------------------------------------------------- Method Interceptors -------------------------------------------------- */ private final @Nonnull Map<@Nonnull AnnotationMirror, @Nonnull MethodInterceptor> methodInterceptors; /** * Returns the interceptors that intercept the call to this method. */ @Pure public @Nonnull Map<@Nonnull AnnotationMirror, @Nonnull MethodInterceptor> getMethodInterceptors() { return methodInterceptors; } /* -------------------------------------------------- Constructors -------------------------------------------------- */ protected MethodInformation(@Nonnull ExecutableElement element, @Nonnull DeclaredType containingType, @Nonnull TypeInformation typeInformation) { super(element, containingType, typeInformation); Require.that(element.getKind() == ElementKind.METHOD).orThrow("The element $ has to be a method.", SourcePosition.of(element)); if (typeInformation.hasAnnotation(GenerateSubclass.class)) { this.methodInterceptors = AnnotationHandlerUtility.getAnnotationHandlers(element, Interceptor.class, MethodInterceptor.class); } else { this.methodInterceptors = new HashMap<>(); ProcessingLog.verbose("Ignoring method interceptors on method $ because no subclass is generated for type $.", getName(), typeInformation.getName()); } if (isDeclaredInDigitalIDLibrary()) { if (!isEnum() && !isTest() && isPure() == isImpure()) { ProcessingLog.error("A method has to be either '@Pure' or '@Impure':", SourcePosition.of(element)); } if (isSetter() && isPure()) { ProcessingLog.error("A setter may not be '@Pure':", SourcePosition.of(element)); } } if (typeInformation.hasAnnotation(GenerateSubclass.class) || typeInformation.hasAnnotation(GenerateBuilder.class)) { this.methodValidators = AnnotationHandlerUtility.getMethodValidators(getElement()); this.returnValueValidators = AnnotationHandlerUtility.getValueValidators(getElement()); } else { this.methodValidators = new HashMap<>(); this.returnValueValidators = new HashMap<>(); ProcessingLog.verbose("Ignoring method and return value validators on method $ because neither a subclass nor a builder is generated for type $.", getName(), typeInformation.getName()); } // TODO: This is just a temporary hack to ensure that the annotations on the parameters are checked in any case. for (@Nonnull VariableElement parameter : element.getParameters()) { AnnotationHandlerUtility.getValueValidators(parameter); } } /** * Returns the method information of the given method element and containing type. * * @require element.getKind() == ElementKind.METHOD : "The element has to be a method."; */ @Pure public static @Nonnull MethodInformation of(@Nonnull ExecutableElement element, @Nonnull DeclaredType containingType, @Nonnull TypeInformation typeInformation) { return new MethodInformation(element, containingType, typeInformation); } /* -------------------------------------------------- Modifiers -------------------------------------------------- */ /** * Returns whether the represented {@link #getElement() element} is synchronized. */ @Pure public boolean isSynchronized() { return getModifiers().contains(Modifier.SYNCHRONIZED); } /** * Returns the modifiers for the method that overrides this method. */ @Pure public @Nonnull String getModifiersForOverridingMethod() { final @Nonnull StringBuilder result = new StringBuilder(); if (isPublic()) { result.append("public "); } if (isProtected()) { result.append("protected "); } if (isSynchronized()) { result.append("synchronized "); } return result.toString(); } /* -------------------------------------------------- Default Values -------------------------------------------------- */ @Pure @TODO(task = "Please document public methods.", date = "2015-05-16", author = Author.KASPAR_ETTER, assignee = Author.STEPHANIE_STROKA, priority = Priority.LOW) public @Nullable String getDefaultValue() { if (isGetter()) { final @Nonnull FiniteIterable<@Nonnull VariableElement> fields = ProcessingUtility.getAllFields((TypeElement) getContainingType().asElement()); final @Nullable VariableElement fieldElement = fields.findFirst(field -> field.getSimpleName().contentEquals(getFieldName())); if (fieldElement == null) { ProcessingLog.debugging("Found the method $, which looks like a getter, but does not have a corresponding field.", getName()); return null; } final @Nullable Default defaultAnnotation = fieldElement.getAnnotation(Default.class); if (defaultAnnotation != null) { return defaultAnnotation.value(); } else { if (getType().getKind().isPrimitive()) { final @Nonnull String typeName; if (getType() instanceof Type.AnnotatedType) { Type.AnnotatedType annotatedType = (Type.AnnotatedType) getType(); final Type type = annotatedType.unannotatedType(); typeName = type.toString(); } else { typeName = getType().toString(); } if (typeName.equals("boolean")) { return "false"; } else { return "0"; } } else { return "null"; } } } else { return null; } } /* -------------------------------------------------- Annotations to String -------------------------------------------------- */ // TODO: Review and simplify the following methods! /** * Returns all return type annotations of the method as a space-separated string. */ public @Nonnull String getReturnTypeAnnotations(@Nonnull TypeImporter typeImporter) { final @Nonnull TypeMirror typeMirror = StaticProcessingEnvironment.getTypeUtils().asMemberOf(getContainingType(), getElement()); final @Nonnull ExecutableType executableType = (ExecutableType) typeMirror; final @Nonnull TypeMirror returnType = executableType.getReturnType(); final @Nonnull String returnTypeAnnotationsAsString; if (returnType instanceof Type.AnnotatedType) { final Type.@Nonnull AnnotatedType annotatedType = (Type.AnnotatedType) returnType; returnTypeAnnotationsAsString = ProcessingUtility.getAnnotationsAsString(FiniteIterable.of(annotatedType.getAnnotationMirrors()), typeImporter); } else { returnTypeAnnotationsAsString = ""; } return returnTypeAnnotationsAsString; } public @Nullable TypeMirror getReturnType() { if (!hasReturnType()) { return null; } final @Nonnull TypeMirror typeMirror = StaticProcessingEnvironment.getTypeUtils().asMemberOf(getContainingType(), getElement()); Require.that(typeMirror instanceof ExecutableType).orThrow("Expected ExecutableType, but got $", getContainingType()); final @Nonnull ExecutableType executableType = (ExecutableType) typeMirror; return executableType.getReturnType(); } public @Nullable String getReturnType(@Nonnull TypeImporter typeImporter) { final @Nullable TypeMirror returnType = getReturnType(); if (returnType == null) { return null; } final @Nonnull StringBuilder returnTypeAsString = new StringBuilder(); if (returnType instanceof Type.AnnotatedType) { final Type.AnnotatedType annotatedType = (Type.AnnotatedType) returnType; returnTypeAsString.append(getReturnTypeAnnotations(typeImporter)); returnTypeAsString.append(typeImporter.importIfPossible(annotatedType.unannotatedType())); } else { returnTypeAsString.append(typeImporter.importIfPossible(returnType)); } ProcessingLog.debugging("Stringifying return type: $ = $", returnType.getClass(), returnTypeAsString); return returnTypeAsString.toString(); } @Pure public boolean canBeImplemented() { return AnnotationHandlerUtility.getAnnotationHandlers(getElement(), Interceptor.class, MethodInterceptor.class).size() > 0; } }
/* * Copyright (c) 2010-2017 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.page.admin.server; import com.evolveum.midpoint.gui.api.GuiStyleConstants; import com.evolveum.midpoint.gui.api.model.ReadOnlyModel; import com.evolveum.midpoint.gui.api.page.PageBase; import com.evolveum.midpoint.gui.api.util.WebComponentUtil; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.xml.XmlTypeConverter; import com.evolveum.midpoint.schema.util.WfContextUtil; import com.evolveum.midpoint.web.component.DateLabelComponent; import com.evolveum.midpoint.web.component.ObjectSummaryPanel; import com.evolveum.midpoint.web.component.refresh.AutoRefreshDto; import com.evolveum.midpoint.web.component.refresh.AutoRefreshPanel; import com.evolveum.midpoint.web.component.util.SummaryTagSimple; import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour; import com.evolveum.midpoint.web.component.wf.WfGuiUtil; import com.evolveum.midpoint.web.model.ContainerableFromPrismObjectModel; import com.evolveum.midpoint.web.page.admin.server.dto.ApprovalOutcomeIcon; import com.evolveum.midpoint.web.page.admin.server.dto.OperationResultStatusPresentationProperties; import com.evolveum.midpoint.web.page.admin.server.dto.TaskDto; import com.evolveum.midpoint.web.page.admin.server.dto.TaskDtoExecutionStatus; import com.evolveum.midpoint.xml.ns._public.common.common_3.*; import org.apache.commons.lang3.time.DurationFormatUtils; import org.apache.wicket.datetime.PatternDateConverter; import org.apache.wicket.model.AbstractReadOnlyModel; import org.apache.wicket.model.IModel; import java.util.Date; /** * @author mederly * */ public class TaskSummaryPanel extends ObjectSummaryPanel<TaskType> { private static final long serialVersionUID = -5077637168906420769L; private static final String ID_TAG_EXECUTION_STATUS = "summaryTagExecutionStatus"; private static final String ID_TAG_RESULT = "summaryTagResult"; private static final String ID_TAG_WF_OUTCOME = "wfOutcomeTag"; private static final String ID_TAG_EMPTY = "emptyTag"; private static final String ID_TAG_REFRESH = "refreshTag"; private PageTaskEdit parentPage; public TaskSummaryPanel(String id, IModel<PrismObject<TaskType>> model, IModel<AutoRefreshDto> refreshModel, final PageTaskEdit parentPage) { super(id, TaskType.class, model, parentPage); initLayoutCommon(parentPage); this.parentPage = parentPage; IModel<TaskType> containerModel = new ContainerableFromPrismObjectModel<>(model); SummaryTagSimple<TaskType> tagExecutionStatus = new SummaryTagSimple<TaskType>(ID_TAG_EXECUTION_STATUS, containerModel) { @Override protected void initialize(TaskType taskType) { TaskDtoExecutionStatus status = TaskDtoExecutionStatus.fromTaskExecutionStatus(taskType.getExecutionStatus(), taskType.getNodeAsObserved() != null); String icon = getIconForExecutionStatus(status); setIconCssClass(icon); if (status != null) { setLabel(PageBase.createStringResourceStatic(TaskSummaryPanel.this, status).getString()); } // TODO setColor } }; addTag(tagExecutionStatus); SummaryTagSimple<TaskType> tagResult = new SummaryTagSimple<TaskType>(ID_TAG_RESULT, containerModel) { @Override protected void initialize(TaskType taskType) { OperationResultStatusType resultStatus = taskType.getResultStatus(); String icon = OperationResultStatusPresentationProperties.parseOperationalResultStatus(resultStatus).getIcon(); setIconCssClass(icon); if (resultStatus != null) { setLabel(PageBase.createStringResourceStatic(TaskSummaryPanel.this, resultStatus).getString()); } // TODO setColor } }; addTag(tagResult); SummaryTagSimple<TaskType> tagOutcome = new SummaryTagSimple<TaskType>(ID_TAG_WF_OUTCOME, containerModel) { @Override protected void initialize(TaskType taskType) { String icon, name; if (parentPage.getTaskDto().getWorkflowOutcome() == null) { // shouldn't occur! return; } if (parentPage.getTaskDto().getWorkflowOutcome()) { icon = ApprovalOutcomeIcon.APPROVED.getIcon(); name = "approved"; } else { icon = ApprovalOutcomeIcon.REJECTED.getIcon(); name = "rejected"; } setIconCssClass(icon); setLabel(PageBase.createStringResourceStatic(TaskSummaryPanel.this, "TaskSummaryPanel." + name).getString()); } }; tagOutcome.add(new VisibleEnableBehaviour() { @Override public boolean isVisible() { return parentPage.getTaskDto().getWorkflowOutcome() != null; } }); addTag(tagOutcome); final AutoRefreshPanel refreshTag = new AutoRefreshPanel(ID_TAG_REFRESH, refreshModel, parentPage, true); refreshTag.setOutputMarkupId(true); refreshTag.add(new VisibleEnableBehaviour() { @Override public boolean isVisible() { return parentPage.getTaskDto().getWorkflowOutcome() == null; // because otherwise there are too many tags to fit into window } } ); addTag(refreshTag); } private String getIconForExecutionStatus(TaskDtoExecutionStatus status) { if (status == null) { return "fa fa-fw fa-question-circle text-warning"; } switch (status) { case RUNNING: return "fa fa-fw fa-spinner"; case RUNNABLE: return "fa fa-fw fa-hand-o-up"; case SUSPENDED: return "fa fa-fw fa-bed"; case SUSPENDING: return "fa fa-fw fa-bed"; case WAITING: return "fa fa-fw fa-clock-o"; case CLOSED: return "fa fa-fw fa-power-off"; default: return ""; } } @Override protected String getIconCssClass() { return GuiStyleConstants.CLASS_OBJECT_TASK_ICON; } @Override protected String getIconBoxAdditionalCssClass() { // TODO return "summary-panel-task"; } @Override protected String getBoxAdditionalCssClass() { // TODO return "summary-panel-task"; } @Override protected boolean isIdentifierVisible() { return false; } @Override protected String getTagBoxCssClass() { return "summary-tag-box-wide"; } private String getStageInfo() { return WfContextUtil.getStageInfo(parentPage.getTaskDto().getWorkflowContext()); } public String getRequestedOn() { return WebComponentUtil.getLocalizedDate(parentPage.getTaskDto().getRequestedOn(), DateLabelComponent.MEDIUM_MEDIUM_STYLE); } @Override protected IModel<String> getDisplayNameModel() { return new ReadOnlyModel<>(() -> { // temporary code TaskDto taskDto = parentPage.getTaskDto(); String name = WfGuiUtil.getLocalizedProcessName(taskDto.getWorkflowContext(), TaskSummaryPanel.this); if (name == null) { name = WfGuiUtil.getLocalizedTaskName(taskDto.getWorkflowContext(), TaskSummaryPanel.this); } if (name == null) { name = taskDto.getName(); } return name; }); } @Override protected IModel<String> getTitleModel() { return new AbstractReadOnlyModel<String>() { @Override public String getObject() { TaskDto taskDto = parentPage.getTaskDto(); if (taskDto.isWorkflow()) { return getString("TaskSummaryPanel.requestedBy", taskDto.getRequestedBy()); } else { TaskType taskType = getModelObject(); String rv; if (taskType.getExpectedTotal() != null) { rv = createStringResource("TaskSummaryPanel.progressWithTotalKnown", taskType.getProgress(), taskType.getExpectedTotal()) .getString(); } else { rv = createStringResource("TaskSummaryPanel.progressWithTotalUnknown", taskType.getProgress()).getString(); } if (taskDto.isSuspended()) { rv += " " + getString("TaskSummaryPanel.progressIfSuspended"); } else if (taskDto.isClosed()) { rv += " " + getString("TaskSummaryPanel.progressIfClosed"); } else if (taskDto.isWaiting()) { rv += " " + getString("TaskSummaryPanel.progressIfWaiting"); } else if (taskDto.getStalledSince() != null) { rv += " " + getString("TaskSummaryPanel.progressIfStalled", WebComponentUtil.formatDate(new Date(parentPage.getTaskDto().getStalledSince()))); } return rv; } } }; } @Override protected IModel<String> getTitle2Model() { return new AbstractReadOnlyModel<String>() { @Override public String getObject() { if (parentPage.getTaskDto().isWorkflow()) { return getString("TaskSummaryPanel.requestedOn", getRequestedOn()); } else { TaskType taskType = getModelObject(); if (taskType.getOperationStats() != null && taskType.getOperationStats().getIterativeTaskInformation() != null && taskType.getOperationStats().getIterativeTaskInformation().getLastSuccessObjectName() != null) { return createStringResource("TaskSummaryPanel.lastProcessed", taskType.getOperationStats().getIterativeTaskInformation().getLastSuccessObjectName()).getString(); } else { return ""; } } } }; } @Override protected IModel<String> getTitle3Model() { return new AbstractReadOnlyModel<String>() { @Override public String getObject() { if (parentPage.getTaskDto().isWorkflow()) { String stageInfo = getStageInfo(); if (stageInfo != null) { return getString("TaskSummaryPanel.stage", stageInfo); } else { return null; } } TaskType taskType = getModel().getObject(); if (taskType == null) { return null; } long started = XmlTypeConverter.toMillis(taskType.getLastRunStartTimestamp()); long finished = XmlTypeConverter.toMillis(taskType.getLastRunFinishTimestamp()); if (started == 0) { return null; } if (taskType.getExecutionStatus() == TaskExecutionStatusType.RUNNABLE && taskType.getNodeAsObserved() != null || finished == 0 || finished < started) { PatternDateConverter pdc = new PatternDateConverter (WebComponentUtil.getLocalizedDatePattern(DateLabelComponent.SHORT_MEDIUM_STYLE), true ); String date = pdc.convertToString(new Date(started), WebComponentUtil.getCurrentLocale()); return getString("TaskStatePanel.message.executionTime.notFinished", date, DurationFormatUtils.formatDurationHMS(System.currentTimeMillis() - started)); } else { PatternDateConverter pdc = new PatternDateConverter (WebComponentUtil.getLocalizedDatePattern(DateLabelComponent.SHORT_MEDIUM_STYLE), true ); String startedDate = pdc.convertToString(new Date(started), WebComponentUtil.getCurrentLocale()); String finishedDate = pdc.convertToString(new Date(finished), WebComponentUtil.getCurrentLocale()); return getString("TaskStatePanel.message.executionTime.finished", startedDate, finishedDate, DurationFormatUtils.formatDurationHMS(finished - started)); } } }; } public AutoRefreshPanel getRefreshPanel() { return (AutoRefreshPanel) getTag(ID_TAG_REFRESH); } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.sns.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * Container for the parameters to the {@link com.amazonaws.services.sns.AmazonSNS#setTopicAttributes(SetTopicAttributesRequest) SetTopicAttributes operation}. * <p> * Allows a topic owner to set an attribute of the topic to a new value. * </p> * * @see com.amazonaws.services.sns.AmazonSNS#setTopicAttributes(SetTopicAttributesRequest) */ public class SetTopicAttributesRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * The ARN of the topic to modify. */ private String topicArn; /** * The name of the attribute you want to set. Only a subset of the * topic's attributes are mutable. <p>Valid values: <code>Policy</code> | * <code>DisplayName</code> | <code>DeliveryPolicy</code> */ private String attributeName; /** * The new value for the attribute. */ private String attributeValue; /** * Default constructor for a new SetTopicAttributesRequest object. Callers should use the * setter or fluent setter (with...) methods to initialize this object after creating it. */ public SetTopicAttributesRequest() {} /** * Constructs a new SetTopicAttributesRequest object. * Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param topicArn The ARN of the topic to modify. * @param attributeName The name of the attribute you want to set. Only a * subset of the topic's attributes are mutable. <p>Valid values: * <code>Policy</code> | <code>DisplayName</code> | * <code>DeliveryPolicy</code> * @param attributeValue The new value for the attribute. */ public SetTopicAttributesRequest(String topicArn, String attributeName, String attributeValue) { setTopicArn(topicArn); setAttributeName(attributeName); setAttributeValue(attributeValue); } /** * The ARN of the topic to modify. * * @return The ARN of the topic to modify. */ public String getTopicArn() { return topicArn; } /** * The ARN of the topic to modify. * * @param topicArn The ARN of the topic to modify. */ public void setTopicArn(String topicArn) { this.topicArn = topicArn; } /** * The ARN of the topic to modify. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param topicArn The ARN of the topic to modify. * * @return A reference to this updated object so that method calls can be chained * together. */ public SetTopicAttributesRequest withTopicArn(String topicArn) { this.topicArn = topicArn; return this; } /** * The name of the attribute you want to set. Only a subset of the * topic's attributes are mutable. <p>Valid values: <code>Policy</code> | * <code>DisplayName</code> | <code>DeliveryPolicy</code> * * @return The name of the attribute you want to set. Only a subset of the * topic's attributes are mutable. <p>Valid values: <code>Policy</code> | * <code>DisplayName</code> | <code>DeliveryPolicy</code> */ public String getAttributeName() { return attributeName; } /** * The name of the attribute you want to set. Only a subset of the * topic's attributes are mutable. <p>Valid values: <code>Policy</code> | * <code>DisplayName</code> | <code>DeliveryPolicy</code> * * @param attributeName The name of the attribute you want to set. Only a subset of the * topic's attributes are mutable. <p>Valid values: <code>Policy</code> | * <code>DisplayName</code> | <code>DeliveryPolicy</code> */ public void setAttributeName(String attributeName) { this.attributeName = attributeName; } /** * The name of the attribute you want to set. Only a subset of the * topic's attributes are mutable. <p>Valid values: <code>Policy</code> | * <code>DisplayName</code> | <code>DeliveryPolicy</code> * <p> * Returns a reference to this object so that method calls can be chained together. * * @param attributeName The name of the attribute you want to set. Only a subset of the * topic's attributes are mutable. <p>Valid values: <code>Policy</code> | * <code>DisplayName</code> | <code>DeliveryPolicy</code> * * @return A reference to this updated object so that method calls can be chained * together. */ public SetTopicAttributesRequest withAttributeName(String attributeName) { this.attributeName = attributeName; return this; } /** * The new value for the attribute. * * @return The new value for the attribute. */ public String getAttributeValue() { return attributeValue; } /** * The new value for the attribute. * * @param attributeValue The new value for the attribute. */ public void setAttributeValue(String attributeValue) { this.attributeValue = attributeValue; } /** * The new value for the attribute. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param attributeValue The new value for the attribute. * * @return A reference to this updated object so that method calls can be chained * together. */ public SetTopicAttributesRequest withAttributeValue(String attributeValue) { this.attributeValue = attributeValue; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTopicArn() != null) sb.append("TopicArn: " + getTopicArn() + ","); if (getAttributeName() != null) sb.append("AttributeName: " + getAttributeName() + ","); if (getAttributeValue() != null) sb.append("AttributeValue: " + getAttributeValue() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTopicArn() == null) ? 0 : getTopicArn().hashCode()); hashCode = prime * hashCode + ((getAttributeName() == null) ? 0 : getAttributeName().hashCode()); hashCode = prime * hashCode + ((getAttributeValue() == null) ? 0 : getAttributeValue().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SetTopicAttributesRequest == false) return false; SetTopicAttributesRequest other = (SetTopicAttributesRequest)obj; if (other.getTopicArn() == null ^ this.getTopicArn() == null) return false; if (other.getTopicArn() != null && other.getTopicArn().equals(this.getTopicArn()) == false) return false; if (other.getAttributeName() == null ^ this.getAttributeName() == null) return false; if (other.getAttributeName() != null && other.getAttributeName().equals(this.getAttributeName()) == false) return false; if (other.getAttributeValue() == null ^ this.getAttributeValue() == null) return false; if (other.getAttributeValue() != null && other.getAttributeValue().equals(this.getAttributeValue()) == false) return false; return true; } @Override public SetTopicAttributesRequest clone() { return (SetTopicAttributesRequest) super.clone(); } }
package org.sagebionetworks.web.client.widget.table.v2.results; import static org.sagebionetworks.repo.model.table.QueryOptions.BUNDLE_MASK_LAST_UPDATED_ON; import static org.sagebionetworks.repo.model.table.QueryOptions.BUNDLE_MASK_QUERY_COLUMN_MODELS; import static org.sagebionetworks.repo.model.table.QueryOptions.BUNDLE_MASK_QUERY_COUNT; import static org.sagebionetworks.repo.model.table.QueryOptions.BUNDLE_MASK_QUERY_RESULTS; import static org.sagebionetworks.repo.model.table.QueryOptions.BUNDLE_MASK_QUERY_SELECT_COLUMNS; import static org.sagebionetworks.web.client.ServiceEntryPointUtils.fixServiceEntryPoint; import static org.sagebionetworks.web.client.widget.table.v2.results.QueryBundleUtils.DEFAULT_LIMIT; import static org.sagebionetworks.web.client.widget.table.v2.results.QueryBundleUtils.DEFAULT_OFFSET; import java.util.ArrayList; import java.util.List; import org.sagebionetworks.repo.model.ErrorResponseCode; import org.sagebionetworks.repo.model.asynch.AsynchronousResponseBody; import org.sagebionetworks.repo.model.table.FacetColumnRequest; import org.sagebionetworks.repo.model.table.Query; import org.sagebionetworks.repo.model.table.QueryBundleRequest; import org.sagebionetworks.repo.model.table.QueryResult; import org.sagebionetworks.repo.model.table.QueryResultBundle; import org.sagebionetworks.repo.model.table.Row; import org.sagebionetworks.repo.model.table.RowSet; import org.sagebionetworks.repo.model.table.SortDirection; import org.sagebionetworks.repo.model.table.SortItem; import org.sagebionetworks.web.client.GWTWrapper; import org.sagebionetworks.web.client.PopupUtilsView; import org.sagebionetworks.web.client.PortalGinInjector; import org.sagebionetworks.web.client.SynapseClientAsync; import org.sagebionetworks.web.client.cache.ClientCache; import org.sagebionetworks.web.client.utils.Callback; import org.sagebionetworks.web.client.utils.CallbackP; import org.sagebionetworks.web.client.widget.asynch.AsynchronousProgressHandler; import org.sagebionetworks.web.client.widget.asynch.AsynchronousProgressWidget; import org.sagebionetworks.web.client.widget.entity.controller.SynapseAlert; import org.sagebionetworks.web.client.widget.table.modal.fileview.TableType; import org.sagebionetworks.web.client.widget.table.v2.TotalVisibleResultsWidget; import org.sagebionetworks.web.client.widget.table.v2.results.facets.FacetsWidget; import org.sagebionetworks.web.shared.asynch.AsynchType; import org.sagebionetworks.web.shared.exceptions.BadRequestException; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; /** * This widget will execute a table query and show the resulting query results in a paginated view.. * * @author jmhill * */ public class TableQueryResultWidget implements TableQueryResultView.Presenter, IsWidget, PagingAndSortingListener { public static final String SCHEMA_CHANGED_MESSAGE = "The underlying Table/View schema has been changed so this query must be reset."; public static final String FACET_COLUMNS_CHANGED_MESSAGE = "requested facet column names must all be in the set"; public static final int ETAG_CHECK_DELAY_MS = 5000; public static final String VERIFYING_ETAG_MESSAGE = "Verifying that the recent changes have propagated through the system..."; public static final String RUNNING_QUERY_MESSAGE = ""; // while running, just show loading spinner (and cancel) public static final String QUERY_CANCELED = "Query canceled"; // We cache these parts of the QueryResultBundle private static final long CACHED_PARTS_MASK = BUNDLE_MASK_QUERY_COLUMN_MODELS | BUNDLE_MASK_QUERY_SELECT_COLUMNS | BUNDLE_MASK_QUERY_COUNT; private static final Long ALL_PARTS_MASK = new Long(255); SynapseClientAsync synapseClient; TableQueryResultView view; PortalGinInjector ginInjector; QueryResultBundle bundle; TablePageWidget pageViewerWidget; QueryResultEditorWidget queryResultEditor; Query startingQuery; boolean isEditable; TableType tableType; QueryResultsListener queryListener; SynapseAlert synapseAlert; CallbackP<FacetColumnRequest> facetChangedHandler; Callback resetFacetsHandler; ClientCache clientCache; GWTWrapper gwt; int currentJobIndex = 0; QueryResultBundle cachedFullQueryResultBundle = null; FacetsWidget facetsWidget; TotalVisibleResultsWidget totalVisibleResultsWidget; boolean facetsRequireRefresh; PopupUtilsView popupUtils; @Inject public TableQueryResultWidget(TableQueryResultView view, SynapseClientAsync synapseClient, PortalGinInjector ginInjector, SynapseAlert synapseAlert, ClientCache clientCache, GWTWrapper gwt, FacetsWidget facetsWidget, PopupUtilsView popupUtils, TotalVisibleResultsWidget totalVisibleResultsWidget) { this.synapseClient = synapseClient; fixServiceEntryPoint(synapseClient); this.view = view; this.ginInjector = ginInjector; this.pageViewerWidget = ginInjector.createNewTablePageWidget(); this.synapseAlert = synapseAlert; this.clientCache = clientCache; this.gwt = gwt; this.facetsWidget = facetsWidget; this.popupUtils = popupUtils; this.totalVisibleResultsWidget = totalVisibleResultsWidget; view.setFacetsWidget(facetsWidget); this.view.setPageWidget(this.pageViewerWidget); this.view.setPresenter(this); this.view.setSynapseAlertWidget(synapseAlert.asWidget()); this.view.setTotalVisibleResultsPanel(totalVisibleResultsWidget); resetFacetsHandler = new Callback() { @Override public void invoke() { startingQuery.setSelectedFacets(null); cachedFullQueryResultBundle = null; startingQuery.setOffset(0L); queryChanging(); } }; facetChangedHandler = new CallbackP<FacetColumnRequest>() { @Override public void invoke(FacetColumnRequest request) { List<FacetColumnRequest> selectedFacets = startingQuery.getSelectedFacets(); if (selectedFacets == null) { selectedFacets = new ArrayList<FacetColumnRequest>(); startingQuery.setSelectedFacets(selectedFacets); } for (FacetColumnRequest facetColumnRequest : selectedFacets) { if (facetColumnRequest.getColumnName().equals(request.getColumnName())) { selectedFacets.remove(facetColumnRequest); break; } } selectedFacets.add(request); cachedFullQueryResultBundle = null; startingQuery.setOffset(0L); facetsRequireRefresh = true; queryChanging(); } }; } /** * Configure this widget with a query string. * * @param query * @param isEditable Is the user allowed to edit the query results? * @param listener Listener for query start and finish events. */ public void configure(Query query, boolean isEditable, TableType tableType, QueryResultsListener listener) { facetsRequireRefresh = true; this.isEditable = isEditable; this.tableType = tableType; this.startingQuery = query; this.queryListener = listener; cachedFullQueryResultBundle = null; runQuery(); } private void runQuery() { currentJobIndex++; runQuery(currentJobIndex); } private void runQuery(final int jobIndex) { this.view.setErrorVisible(false); fireStartEvent(); pageViewerWidget.setTableVisible(false); this.view.setProgressWidgetVisible(true); String entityId = QueryBundleUtils.getTableId(this.startingQuery); String viewEtag = clientCache.get(entityId + QueryResultEditorWidget.VIEW_RECENTLY_CHANGED_KEY); if (viewEtag == null) { if (facetsRequireRefresh) { // no need to update facets if it's just a page change or sort facetsWidget.configure(startingQuery, facetChangedHandler, resetFacetsHandler); } else { // facet refresh unnecessary for this query execution, but reset to true for next time. facetsRequireRefresh = true; } // run the job QueryBundleRequest qbr = new QueryBundleRequest(); long partMask = BUNDLE_MASK_QUERY_RESULTS | BUNDLE_MASK_LAST_UPDATED_ON; if (cachedFullQueryResultBundle == null) { partMask = partMask | CACHED_PARTS_MASK; } else { // we can release the old query result cachedFullQueryResultBundle.setQueryResult(null); } qbr.setPartMask(partMask); qbr.setQuery(this.startingQuery); qbr.setEntityId(entityId); AsynchronousProgressWidget progressWidget = ginInjector.creatNewAsynchronousProgressWidget(); this.view.setProgressWidget(progressWidget); progressWidget.startAndTrackJob(RUNNING_QUERY_MESSAGE, false, AsynchType.TableQuery, qbr, new AsynchronousProgressHandler() { @Override public void onFailure(Throwable failure) { if (currentJobIndex == jobIndex) { showError(failure); } } @Override public void onComplete(AsynchronousResponseBody response) { if (currentJobIndex == jobIndex) { setQueryResults((QueryResultBundle) response); } } @Override public void onCancel() { if (currentJobIndex == jobIndex) { showError(QUERY_CANCELED); } } }); } else { verifyOldEtagIsNotInView(entityId, viewEtag); } } /** * Look for the given etag in the given file view. If it is still there, wait a few seconds and try * again. If the etag is not in the view, then remove the clientCache key and run the query (since * this indicates that the user change was propagated to the replicated layer) * * @param fileViewEntityId * @param oldEtag */ public void verifyOldEtagIsNotInView(final String fileViewEntityId, String oldEtag) { // check to see if etag exists in view QueryBundleRequest qbr = new QueryBundleRequest(); qbr.setPartMask(ALL_PARTS_MASK); Query query = new Query(); query.setSql("select * from " + fileViewEntityId + " where ROW_ETAG='" + oldEtag + "'"); query.setOffset(DEFAULT_OFFSET); query.setLimit(DEFAULT_LIMIT); qbr.setQuery(query); qbr.setEntityId(fileViewEntityId); AsynchronousProgressWidget progressWidget = ginInjector.creatNewAsynchronousProgressWidget(); this.view.setProgressWidget(progressWidget); progressWidget.startAndTrackJob(VERIFYING_ETAG_MESSAGE, false, AsynchType.TableQuery, qbr, new AsynchronousProgressHandler() { @Override public void onFailure(Throwable failure) { showError(failure); } @Override public void onComplete(AsynchronousResponseBody response) { QueryResultBundle resultBundle = (QueryResultBundle) response; if (resultBundle.getQueryCount() > 0) { // retry after waiting a few seconds gwt.scheduleExecution(new Callback() { @Override public void invoke() { runQuery(); } }, ETAG_CHECK_DELAY_MS); } else { // clear cache value and run the actual query clientCache.remove(fileViewEntityId + QueryResultEditorWidget.VIEW_RECENTLY_CHANGED_KEY); runQuery(); } } @Override public void onCancel() { showError(QUERY_CANCELED); } }); } /** * Called after a successful query. * * @param bundle */ private void setQueryResults(final QueryResultBundle bundle) { if (cachedFullQueryResultBundle != null) { // Add the cached parts that we didn't request to the new result bundle. // See CACHED_PARTS_MASK for which parts are cached bundle.setQueryCount(cachedFullQueryResultBundle.getQueryCount()); bundle.setColumnModels(cachedFullQueryResultBundle.getColumnModels()); bundle.setSelectColumns(cachedFullQueryResultBundle.getSelectColumns()); } else { cachedFullQueryResultBundle = bundle; } setQueryResultsAndSort(bundle, startingQuery.getSort()); } private void setQueryResultsAndSort(QueryResultBundle bundle, List<SortItem> sortItems) { this.bundle = bundle; this.view.setResultCountVisible(true); this.view.setResultCount(bundle.getQueryCount()); this.view.setErrorVisible(false); this.view.setProgressWidgetVisible(false); // configure the page widget this.pageViewerWidget.configure(bundle, this.startingQuery, sortItems, false, tableType, null, this, facetChangedHandler); pageViewerWidget.setTableVisible(true); QueryResult result = bundle.getQueryResult(); RowSet rowSet = result.getQueryResults(); List<Row> rows = rowSet.getRows(); if (rows.isEmpty()) { showError("No rows returned."); } fireFinishEvent(true, QueryResultEditorWidget.isQueryResultEditable(this.bundle, tableType)); } /** * Starting a query. */ private void fireStartEvent() { if (this.queryListener != null) { this.queryListener.queryExecutionStarted(); } } /** * Finished a query. */ private void fireFinishEvent(boolean wasSuccessful, boolean resultsEditable) { if (this.queryListener != null) { this.queryListener.queryExecutionFinished(wasSuccessful, resultsEditable); } } /** * Show an error. * * @param caught */ private void showError(Throwable caught) { setupErrorState(); // due to invalid column set? (see PLFM-5491) if (caught instanceof BadRequestException && ErrorResponseCode.INVALID_TABLE_QUERY_FACET_COLUMN_REQUEST.equals(((BadRequestException) caught).getErrorResponseCode())) { popupUtils.showErrorMessage(SCHEMA_CHANGED_MESSAGE); resetFacetsHandler.invoke(); } else { synapseAlert.handleException(caught); } } /** * Show an error message. * * @param message */ private void showError(String message) { setupErrorState(); synapseAlert.showError(message); } private void setupErrorState() { pageViewerWidget.setTableVisible(false); this.view.setResultCountVisible(false); this.view.setProgressWidgetVisible(false); fireFinishEvent(false, false); this.view.setErrorVisible(true); } @Override public Widget asWidget() { return view.asWidget(); } @Override public void onEditRows() { if (this.queryResultEditor == null) { this.queryResultEditor = ginInjector.createNewQueryResultEditorWidget(); view.setEditorWidget(this.queryResultEditor); } this.queryResultEditor.showEditor(bundle, tableType); } @Override public void onPageChange(Long newOffset) { facetsRequireRefresh = false; this.startingQuery.setOffset(newOffset); queryChanging(); } private void queryChanging() { if (this.queryListener != null) { this.queryListener.onStartingNewQuery(this.startingQuery); } view.scrollTableIntoView(); runQuery(); } public Query getStartingQuery() { return this.startingQuery; } @Override public void onToggleSort(String header) { facetsRequireRefresh = false; SortItem targetSortItem = null; List<SortItem> sortItems = startingQuery.getSort(); if (sortItems == null) { sortItems = new ArrayList<>(); startingQuery.setSort(sortItems); } for (SortItem sortItem : sortItems) { if (header.equals(sortItem.getColumn())) { targetSortItem = sortItem; break; } } // transition through UNSORTED (not in sort list) -> DESC -> ASC -> UNSORTED (remove from sort list) if (targetSortItem == null) { // new sort, set to default targetSortItem = new SortItem(); targetSortItem.setColumn(header); targetSortItem.setDirection(SortDirection.DESC); sortItems.add(targetSortItem); } else if (SortDirection.DESC.equals(targetSortItem.getDirection())) { targetSortItem.setDirection(SortDirection.ASC); } else { sortItems.remove(targetSortItem); } // reset offset and run the new query startingQuery.setOffset(0L); queryChanging(); } public void setFacetsVisible(boolean visible) { view.setFacetsVisible(visible); } public TotalVisibleResultsWidget getTotalVisibleResultsWidget() { return this.totalVisibleResultsWidget; } }
/** */ package CIM.IEC61970.Informative.MarketOperations.impl; import CIM.IEC61970.Core.ConnectivityNode; import CIM.IEC61970.Core.CorePackage; import CIM.IEC61970.Core.impl.IdentifiedObjectImpl; import CIM.IEC61970.Informative.MarketOperations.FTR; import CIM.IEC61970.Informative.MarketOperations.MarketOperationsPackage; import CIM.IEC61970.Informative.MarketOperations.Pnode; import CIM.IEC61970.Informative.MarketOperations.PnodeClearing; import CIM.IEC61970.Informative.MarketOperations.RTO; import CIM.IEC61970.Informative.MarketOperations.RegisteredResource; import CIM.IEC61970.Informative.MarketOperations.TransactionBid; import CIM.IEC61970.Meas.MeasPackage; import CIM.IEC61970.Meas.Measurement; import java.util.Collection; import java.util.Date; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList; import org.eclipse.emf.ecore.util.InternalEList; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Pnode</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getReceiptTransactionBids <em>Receipt Transaction Bids</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getRTO <em>RTO</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getRegisteredResources <em>Registered Resources</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getBeginPeriod <em>Begin Period</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getPnodeClearing <em>Pnode Clearing</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getType <em>Type</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getDeliveryTransactionBids <em>Delivery Transaction Bids</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#isIsPublic <em>Is Public</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getConnectivityNode <em>Connectivity Node</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getFTRs <em>FT Rs</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getEndPeriod <em>End Period</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getMeasurements <em>Measurements</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.PnodeImpl#getUsage <em>Usage</em>}</li> * </ul> * * @generated */ public class PnodeImpl extends IdentifiedObjectImpl implements Pnode { /** * The cached value of the '{@link #getReceiptTransactionBids() <em>Receipt Transaction Bids</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getReceiptTransactionBids() * @generated * @ordered */ protected EList<TransactionBid> receiptTransactionBids; /** * The cached value of the '{@link #getRTO() <em>RTO</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRTO() * @generated * @ordered */ protected RTO rto; /** * The cached value of the '{@link #getRegisteredResources() <em>Registered Resources</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRegisteredResources() * @generated * @ordered */ protected EList<RegisteredResource> registeredResources; /** * The default value of the '{@link #getBeginPeriod() <em>Begin Period</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getBeginPeriod() * @generated * @ordered */ protected static final Date BEGIN_PERIOD_EDEFAULT = null; /** * The cached value of the '{@link #getBeginPeriod() <em>Begin Period</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getBeginPeriod() * @generated * @ordered */ protected Date beginPeriod = BEGIN_PERIOD_EDEFAULT; /** * The cached value of the '{@link #getPnodeClearing() <em>Pnode Clearing</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPnodeClearing() * @generated * @ordered */ protected PnodeClearing pnodeClearing; /** * The default value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getType() * @generated * @ordered */ protected static final String TYPE_EDEFAULT = null; /** * The cached value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getType() * @generated * @ordered */ protected String type = TYPE_EDEFAULT; /** * The cached value of the '{@link #getDeliveryTransactionBids() <em>Delivery Transaction Bids</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDeliveryTransactionBids() * @generated * @ordered */ protected EList<TransactionBid> deliveryTransactionBids; /** * The default value of the '{@link #isIsPublic() <em>Is Public</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isIsPublic() * @generated * @ordered */ protected static final boolean IS_PUBLIC_EDEFAULT = false; /** * The cached value of the '{@link #isIsPublic() <em>Is Public</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isIsPublic() * @generated * @ordered */ protected boolean isPublic = IS_PUBLIC_EDEFAULT; /** * The cached value of the '{@link #getConnectivityNode() <em>Connectivity Node</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getConnectivityNode() * @generated * @ordered */ protected ConnectivityNode connectivityNode; /** * The cached value of the '{@link #getFTRs() <em>FT Rs</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getFTRs() * @generated * @ordered */ protected EList<FTR> ftRs; /** * The default value of the '{@link #getEndPeriod() <em>End Period</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getEndPeriod() * @generated * @ordered */ protected static final Date END_PERIOD_EDEFAULT = null; /** * The cached value of the '{@link #getEndPeriod() <em>End Period</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getEndPeriod() * @generated * @ordered */ protected Date endPeriod = END_PERIOD_EDEFAULT; /** * The cached value of the '{@link #getMeasurements() <em>Measurements</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMeasurements() * @generated * @ordered */ protected EList<Measurement> measurements; /** * The default value of the '{@link #getUsage() <em>Usage</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUsage() * @generated * @ordered */ protected static final String USAGE_EDEFAULT = null; /** * The cached value of the '{@link #getUsage() <em>Usage</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUsage() * @generated * @ordered */ protected String usage = USAGE_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected PnodeImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return MarketOperationsPackage.Literals.PNODE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<TransactionBid> getReceiptTransactionBids() { if (receiptTransactionBids == null) { receiptTransactionBids = new EObjectWithInverseResolvingEList<TransactionBid>(TransactionBid.class, this, MarketOperationsPackage.PNODE__RECEIPT_TRANSACTION_BIDS, MarketOperationsPackage.TRANSACTION_BID__RECEIPT_PNODE); } return receiptTransactionBids; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RTO getRTO() { if (rto != null && rto.eIsProxy()) { InternalEObject oldRTO = (InternalEObject)rto; rto = (RTO)eResolveProxy(oldRTO); if (rto != oldRTO) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, MarketOperationsPackage.PNODE__RTO, oldRTO, rto)); } } return rto; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RTO basicGetRTO() { return rto; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetRTO(RTO newRTO, NotificationChain msgs) { RTO oldRTO = rto; rto = newRTO; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__RTO, oldRTO, newRTO); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setRTO(RTO newRTO) { if (newRTO != rto) { NotificationChain msgs = null; if (rto != null) msgs = ((InternalEObject)rto).eInverseRemove(this, MarketOperationsPackage.RTO__PNODES, RTO.class, msgs); if (newRTO != null) msgs = ((InternalEObject)newRTO).eInverseAdd(this, MarketOperationsPackage.RTO__PNODES, RTO.class, msgs); msgs = basicSetRTO(newRTO, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__RTO, newRTO, newRTO)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<RegisteredResource> getRegisteredResources() { if (registeredResources == null) { registeredResources = new EObjectWithInverseResolvingEList<RegisteredResource>(RegisteredResource.class, this, MarketOperationsPackage.PNODE__REGISTERED_RESOURCES, MarketOperationsPackage.REGISTERED_RESOURCE__PNODE); } return registeredResources; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Date getBeginPeriod() { return beginPeriod; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setBeginPeriod(Date newBeginPeriod) { Date oldBeginPeriod = beginPeriod; beginPeriod = newBeginPeriod; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__BEGIN_PERIOD, oldBeginPeriod, beginPeriod)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public PnodeClearing getPnodeClearing() { if (pnodeClearing != null && pnodeClearing.eIsProxy()) { InternalEObject oldPnodeClearing = (InternalEObject)pnodeClearing; pnodeClearing = (PnodeClearing)eResolveProxy(oldPnodeClearing); if (pnodeClearing != oldPnodeClearing) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, MarketOperationsPackage.PNODE__PNODE_CLEARING, oldPnodeClearing, pnodeClearing)); } } return pnodeClearing; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public PnodeClearing basicGetPnodeClearing() { return pnodeClearing; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetPnodeClearing(PnodeClearing newPnodeClearing, NotificationChain msgs) { PnodeClearing oldPnodeClearing = pnodeClearing; pnodeClearing = newPnodeClearing; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__PNODE_CLEARING, oldPnodeClearing, newPnodeClearing); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPnodeClearing(PnodeClearing newPnodeClearing) { if (newPnodeClearing != pnodeClearing) { NotificationChain msgs = null; if (pnodeClearing != null) msgs = ((InternalEObject)pnodeClearing).eInverseRemove(this, MarketOperationsPackage.PNODE_CLEARING__PNODE, PnodeClearing.class, msgs); if (newPnodeClearing != null) msgs = ((InternalEObject)newPnodeClearing).eInverseAdd(this, MarketOperationsPackage.PNODE_CLEARING__PNODE, PnodeClearing.class, msgs); msgs = basicSetPnodeClearing(newPnodeClearing, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__PNODE_CLEARING, newPnodeClearing, newPnodeClearing)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getType() { return type; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setType(String newType) { String oldType = type; type = newType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__TYPE, oldType, type)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<TransactionBid> getDeliveryTransactionBids() { if (deliveryTransactionBids == null) { deliveryTransactionBids = new EObjectWithInverseResolvingEList<TransactionBid>(TransactionBid.class, this, MarketOperationsPackage.PNODE__DELIVERY_TRANSACTION_BIDS, MarketOperationsPackage.TRANSACTION_BID__DELIVERY_PNODE); } return deliveryTransactionBids; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isIsPublic() { return isPublic; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setIsPublic(boolean newIsPublic) { boolean oldIsPublic = isPublic; isPublic = newIsPublic; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__IS_PUBLIC, oldIsPublic, isPublic)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ConnectivityNode getConnectivityNode() { if (connectivityNode != null && connectivityNode.eIsProxy()) { InternalEObject oldConnectivityNode = (InternalEObject)connectivityNode; connectivityNode = (ConnectivityNode)eResolveProxy(oldConnectivityNode); if (connectivityNode != oldConnectivityNode) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, MarketOperationsPackage.PNODE__CONNECTIVITY_NODE, oldConnectivityNode, connectivityNode)); } } return connectivityNode; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ConnectivityNode basicGetConnectivityNode() { return connectivityNode; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetConnectivityNode(ConnectivityNode newConnectivityNode, NotificationChain msgs) { ConnectivityNode oldConnectivityNode = connectivityNode; connectivityNode = newConnectivityNode; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__CONNECTIVITY_NODE, oldConnectivityNode, newConnectivityNode); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setConnectivityNode(ConnectivityNode newConnectivityNode) { if (newConnectivityNode != connectivityNode) { NotificationChain msgs = null; if (connectivityNode != null) msgs = ((InternalEObject)connectivityNode).eInverseRemove(this, CorePackage.CONNECTIVITY_NODE__PNODE, ConnectivityNode.class, msgs); if (newConnectivityNode != null) msgs = ((InternalEObject)newConnectivityNode).eInverseAdd(this, CorePackage.CONNECTIVITY_NODE__PNODE, ConnectivityNode.class, msgs); msgs = basicSetConnectivityNode(newConnectivityNode, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__CONNECTIVITY_NODE, newConnectivityNode, newConnectivityNode)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<FTR> getFTRs() { if (ftRs == null) { ftRs = new EObjectWithInverseResolvingEList.ManyInverse<FTR>(FTR.class, this, MarketOperationsPackage.PNODE__FT_RS, MarketOperationsPackage.FTR__PNODES); } return ftRs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Date getEndPeriod() { return endPeriod; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setEndPeriod(Date newEndPeriod) { Date oldEndPeriod = endPeriod; endPeriod = newEndPeriod; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__END_PERIOD, oldEndPeriod, endPeriod)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<Measurement> getMeasurements() { if (measurements == null) { measurements = new EObjectWithInverseResolvingEList<Measurement>(Measurement.class, this, MarketOperationsPackage.PNODE__MEASUREMENTS, MeasPackage.MEASUREMENT__PNODE); } return measurements; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getUsage() { return usage; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setUsage(String newUsage) { String oldUsage = usage; usage = newUsage; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.PNODE__USAGE, oldUsage, usage)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MarketOperationsPackage.PNODE__RECEIPT_TRANSACTION_BIDS: return ((InternalEList<InternalEObject>)(InternalEList<?>)getReceiptTransactionBids()).basicAdd(otherEnd, msgs); case MarketOperationsPackage.PNODE__RTO: if (rto != null) msgs = ((InternalEObject)rto).eInverseRemove(this, MarketOperationsPackage.RTO__PNODES, RTO.class, msgs); return basicSetRTO((RTO)otherEnd, msgs); case MarketOperationsPackage.PNODE__REGISTERED_RESOURCES: return ((InternalEList<InternalEObject>)(InternalEList<?>)getRegisteredResources()).basicAdd(otherEnd, msgs); case MarketOperationsPackage.PNODE__PNODE_CLEARING: if (pnodeClearing != null) msgs = ((InternalEObject)pnodeClearing).eInverseRemove(this, MarketOperationsPackage.PNODE_CLEARING__PNODE, PnodeClearing.class, msgs); return basicSetPnodeClearing((PnodeClearing)otherEnd, msgs); case MarketOperationsPackage.PNODE__DELIVERY_TRANSACTION_BIDS: return ((InternalEList<InternalEObject>)(InternalEList<?>)getDeliveryTransactionBids()).basicAdd(otherEnd, msgs); case MarketOperationsPackage.PNODE__CONNECTIVITY_NODE: if (connectivityNode != null) msgs = ((InternalEObject)connectivityNode).eInverseRemove(this, CorePackage.CONNECTIVITY_NODE__PNODE, ConnectivityNode.class, msgs); return basicSetConnectivityNode((ConnectivityNode)otherEnd, msgs); case MarketOperationsPackage.PNODE__FT_RS: return ((InternalEList<InternalEObject>)(InternalEList<?>)getFTRs()).basicAdd(otherEnd, msgs); case MarketOperationsPackage.PNODE__MEASUREMENTS: return ((InternalEList<InternalEObject>)(InternalEList<?>)getMeasurements()).basicAdd(otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MarketOperationsPackage.PNODE__RECEIPT_TRANSACTION_BIDS: return ((InternalEList<?>)getReceiptTransactionBids()).basicRemove(otherEnd, msgs); case MarketOperationsPackage.PNODE__RTO: return basicSetRTO(null, msgs); case MarketOperationsPackage.PNODE__REGISTERED_RESOURCES: return ((InternalEList<?>)getRegisteredResources()).basicRemove(otherEnd, msgs); case MarketOperationsPackage.PNODE__PNODE_CLEARING: return basicSetPnodeClearing(null, msgs); case MarketOperationsPackage.PNODE__DELIVERY_TRANSACTION_BIDS: return ((InternalEList<?>)getDeliveryTransactionBids()).basicRemove(otherEnd, msgs); case MarketOperationsPackage.PNODE__CONNECTIVITY_NODE: return basicSetConnectivityNode(null, msgs); case MarketOperationsPackage.PNODE__FT_RS: return ((InternalEList<?>)getFTRs()).basicRemove(otherEnd, msgs); case MarketOperationsPackage.PNODE__MEASUREMENTS: return ((InternalEList<?>)getMeasurements()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case MarketOperationsPackage.PNODE__RECEIPT_TRANSACTION_BIDS: return getReceiptTransactionBids(); case MarketOperationsPackage.PNODE__RTO: if (resolve) return getRTO(); return basicGetRTO(); case MarketOperationsPackage.PNODE__REGISTERED_RESOURCES: return getRegisteredResources(); case MarketOperationsPackage.PNODE__BEGIN_PERIOD: return getBeginPeriod(); case MarketOperationsPackage.PNODE__PNODE_CLEARING: if (resolve) return getPnodeClearing(); return basicGetPnodeClearing(); case MarketOperationsPackage.PNODE__TYPE: return getType(); case MarketOperationsPackage.PNODE__DELIVERY_TRANSACTION_BIDS: return getDeliveryTransactionBids(); case MarketOperationsPackage.PNODE__IS_PUBLIC: return isIsPublic(); case MarketOperationsPackage.PNODE__CONNECTIVITY_NODE: if (resolve) return getConnectivityNode(); return basicGetConnectivityNode(); case MarketOperationsPackage.PNODE__FT_RS: return getFTRs(); case MarketOperationsPackage.PNODE__END_PERIOD: return getEndPeriod(); case MarketOperationsPackage.PNODE__MEASUREMENTS: return getMeasurements(); case MarketOperationsPackage.PNODE__USAGE: return getUsage(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case MarketOperationsPackage.PNODE__RECEIPT_TRANSACTION_BIDS: getReceiptTransactionBids().clear(); getReceiptTransactionBids().addAll((Collection<? extends TransactionBid>)newValue); return; case MarketOperationsPackage.PNODE__RTO: setRTO((RTO)newValue); return; case MarketOperationsPackage.PNODE__REGISTERED_RESOURCES: getRegisteredResources().clear(); getRegisteredResources().addAll((Collection<? extends RegisteredResource>)newValue); return; case MarketOperationsPackage.PNODE__BEGIN_PERIOD: setBeginPeriod((Date)newValue); return; case MarketOperationsPackage.PNODE__PNODE_CLEARING: setPnodeClearing((PnodeClearing)newValue); return; case MarketOperationsPackage.PNODE__TYPE: setType((String)newValue); return; case MarketOperationsPackage.PNODE__DELIVERY_TRANSACTION_BIDS: getDeliveryTransactionBids().clear(); getDeliveryTransactionBids().addAll((Collection<? extends TransactionBid>)newValue); return; case MarketOperationsPackage.PNODE__IS_PUBLIC: setIsPublic((Boolean)newValue); return; case MarketOperationsPackage.PNODE__CONNECTIVITY_NODE: setConnectivityNode((ConnectivityNode)newValue); return; case MarketOperationsPackage.PNODE__FT_RS: getFTRs().clear(); getFTRs().addAll((Collection<? extends FTR>)newValue); return; case MarketOperationsPackage.PNODE__END_PERIOD: setEndPeriod((Date)newValue); return; case MarketOperationsPackage.PNODE__MEASUREMENTS: getMeasurements().clear(); getMeasurements().addAll((Collection<? extends Measurement>)newValue); return; case MarketOperationsPackage.PNODE__USAGE: setUsage((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case MarketOperationsPackage.PNODE__RECEIPT_TRANSACTION_BIDS: getReceiptTransactionBids().clear(); return; case MarketOperationsPackage.PNODE__RTO: setRTO((RTO)null); return; case MarketOperationsPackage.PNODE__REGISTERED_RESOURCES: getRegisteredResources().clear(); return; case MarketOperationsPackage.PNODE__BEGIN_PERIOD: setBeginPeriod(BEGIN_PERIOD_EDEFAULT); return; case MarketOperationsPackage.PNODE__PNODE_CLEARING: setPnodeClearing((PnodeClearing)null); return; case MarketOperationsPackage.PNODE__TYPE: setType(TYPE_EDEFAULT); return; case MarketOperationsPackage.PNODE__DELIVERY_TRANSACTION_BIDS: getDeliveryTransactionBids().clear(); return; case MarketOperationsPackage.PNODE__IS_PUBLIC: setIsPublic(IS_PUBLIC_EDEFAULT); return; case MarketOperationsPackage.PNODE__CONNECTIVITY_NODE: setConnectivityNode((ConnectivityNode)null); return; case MarketOperationsPackage.PNODE__FT_RS: getFTRs().clear(); return; case MarketOperationsPackage.PNODE__END_PERIOD: setEndPeriod(END_PERIOD_EDEFAULT); return; case MarketOperationsPackage.PNODE__MEASUREMENTS: getMeasurements().clear(); return; case MarketOperationsPackage.PNODE__USAGE: setUsage(USAGE_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case MarketOperationsPackage.PNODE__RECEIPT_TRANSACTION_BIDS: return receiptTransactionBids != null && !receiptTransactionBids.isEmpty(); case MarketOperationsPackage.PNODE__RTO: return rto != null; case MarketOperationsPackage.PNODE__REGISTERED_RESOURCES: return registeredResources != null && !registeredResources.isEmpty(); case MarketOperationsPackage.PNODE__BEGIN_PERIOD: return BEGIN_PERIOD_EDEFAULT == null ? beginPeriod != null : !BEGIN_PERIOD_EDEFAULT.equals(beginPeriod); case MarketOperationsPackage.PNODE__PNODE_CLEARING: return pnodeClearing != null; case MarketOperationsPackage.PNODE__TYPE: return TYPE_EDEFAULT == null ? type != null : !TYPE_EDEFAULT.equals(type); case MarketOperationsPackage.PNODE__DELIVERY_TRANSACTION_BIDS: return deliveryTransactionBids != null && !deliveryTransactionBids.isEmpty(); case MarketOperationsPackage.PNODE__IS_PUBLIC: return isPublic != IS_PUBLIC_EDEFAULT; case MarketOperationsPackage.PNODE__CONNECTIVITY_NODE: return connectivityNode != null; case MarketOperationsPackage.PNODE__FT_RS: return ftRs != null && !ftRs.isEmpty(); case MarketOperationsPackage.PNODE__END_PERIOD: return END_PERIOD_EDEFAULT == null ? endPeriod != null : !END_PERIOD_EDEFAULT.equals(endPeriod); case MarketOperationsPackage.PNODE__MEASUREMENTS: return measurements != null && !measurements.isEmpty(); case MarketOperationsPackage.PNODE__USAGE: return USAGE_EDEFAULT == null ? usage != null : !USAGE_EDEFAULT.equals(usage); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (beginPeriod: "); result.append(beginPeriod); result.append(", type: "); result.append(type); result.append(", isPublic: "); result.append(isPublic); result.append(", endPeriod: "); result.append(endPeriod); result.append(", usage: "); result.append(usage); result.append(')'); return result.toString(); } } //PnodeImpl
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.splunk; import org.apache.calcite.adapter.splunk.util.StringUtils; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.plan.RelOptRuleOperand; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.logical.LogicalFilter; import org.apache.calcite.rel.logical.LogicalProject; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rel.type.RelRecordType; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexSlot; import org.apache.calcite.sql.SqlBinaryOperator; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.NlsString; import org.apache.calcite.util.Pair; import com.google.common.collect.ImmutableSet; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.logging.Logger; /** * Planner rule to push filters and projections to Splunk. */ public class SplunkPushDownRule extends RelOptRule { private static final Logger LOGGER = StringUtils.getClassTracer(SplunkPushDownRule.class); private static final Set<SqlKind> SUPPORTED_OPS = ImmutableSet.of( SqlKind.CAST, SqlKind.EQUALS, SqlKind.LESS_THAN, SqlKind.LESS_THAN_OR_EQUAL, SqlKind.GREATER_THAN, SqlKind.GREATER_THAN_OR_EQUAL, SqlKind.NOT_EQUALS, SqlKind.LIKE, SqlKind.AND, SqlKind.OR, SqlKind.NOT); public static final SplunkPushDownRule PROJECT_ON_FILTER = new SplunkPushDownRule( operand( LogicalProject.class, operand( LogicalFilter.class, operand( LogicalProject.class, operand(SplunkTableScan.class, none())))), "proj on filter on proj"); public static final SplunkPushDownRule FILTER_ON_PROJECT = new SplunkPushDownRule( operand( LogicalFilter.class, operand( LogicalProject.class, operand(SplunkTableScan.class, none()))), "filter on proj"); public static final SplunkPushDownRule FILTER = new SplunkPushDownRule( operand( LogicalFilter.class, operand(SplunkTableScan.class, none())), "filter"); public static final SplunkPushDownRule PROJECT = new SplunkPushDownRule( operand( LogicalProject.class, operand(SplunkTableScan.class, none())), "proj"); /** Creates a SplunkPushDownRule. */ protected SplunkPushDownRule(RelOptRuleOperand rule, String id) { super(rule, "SplunkPushDownRule: " + id); } // ~ Methods -------------------------------------------------------------- // implement RelOptRule public void onMatch(RelOptRuleCall call) { LOGGER.fine(description); int relLength = call.rels.length; SplunkTableScan splunkRel = (SplunkTableScan) call.rels[relLength - 1]; LogicalFilter filter; LogicalProject topProj = null; LogicalProject bottomProj = null; RelDataType topRow = splunkRel.getRowType(); int filterIdx = 2; if (call.rels[relLength - 2] instanceof LogicalProject) { bottomProj = (LogicalProject) call.rels[relLength - 2]; filterIdx = 3; // bottom projection will change the field count/order topRow = bottomProj.getRowType(); } String filterString; if (filterIdx <= relLength && call.rels[relLength - filterIdx] instanceof LogicalFilter) { filter = (LogicalFilter) call.rels[relLength - filterIdx]; int topProjIdx = filterIdx + 1; if (topProjIdx <= relLength && call.rels[relLength - topProjIdx] instanceof LogicalProject) { topProj = (LogicalProject) call.rels[relLength - topProjIdx]; } RexCall filterCall = (RexCall) filter.getCondition(); SqlOperator op = filterCall.getOperator(); List<RexNode> operands = filterCall.getOperands(); LOGGER.fine("fieldNames: " + getFieldsString(topRow)); final StringBuilder buf = new StringBuilder(); if (getFilter(op, operands, buf, topRow.getFieldNames())) { filterString = buf.toString(); } else { return; // can't handle } } else { filterString = ""; } // top projection will change the field count/order if (topProj != null) { topRow = topProj.getRowType(); } LOGGER.fine("pre transformTo fieldNames: " + getFieldsString(topRow)); call.transformTo( appendSearchString( filterString, splunkRel, topProj, bottomProj, topRow, null)); } /** * Appends a search string. * * @param toAppend Search string to append * @param splunkRel Relational expression * @param topProj Top projection * @param bottomProj Bottom projection */ protected RelNode appendSearchString( String toAppend, SplunkTableScan splunkRel, LogicalProject topProj, LogicalProject bottomProj, RelDataType topRow, RelDataType bottomRow) { StringBuilder updateSearchStr = new StringBuilder(splunkRel.search); if (!toAppend.isEmpty()) { updateSearchStr.append(" ").append(toAppend); } List<RelDataTypeField> bottomFields = bottomRow == null ? null : bottomRow.getFieldList(); List<RelDataTypeField> topFields = topRow == null ? null : topRow.getFieldList(); if (bottomFields == null) { bottomFields = splunkRel.getRowType().getFieldList(); } // handle bottom projection (ie choose a subset of the table fields) if (bottomProj != null) { List<RelDataTypeField> tmp = new ArrayList<RelDataTypeField>(); List<RelDataTypeField> dRow = bottomProj.getRowType().getFieldList(); for (RexNode rn : bottomProj.getProjects()) { RelDataTypeField rdtf; if (rn instanceof RexSlot) { RexSlot rs = (RexSlot) rn; rdtf = bottomFields.get(rs.getIndex()); } else { rdtf = dRow.get(tmp.size()); } tmp.add(rdtf); } bottomFields = tmp; } // field renaming: to -> from List<Pair<String, String>> renames = new LinkedList<Pair<String, String>>(); // handle top projection (ie reordering and renaming) List<RelDataTypeField> newFields = bottomFields; if (topProj != null) { LOGGER.fine("topProj: " + String.valueOf(topProj.getPermutation())); newFields = new ArrayList<RelDataTypeField>(); int i = 0; for (RexNode rn : topProj.getProjects()) { RexInputRef rif = (RexInputRef) rn; RelDataTypeField field = bottomFields.get(rif.getIndex()); if (!bottomFields.get(rif.getIndex()).getName() .equals(topFields.get(i).getName())) { renames.add( new Pair<String, String>( bottomFields.get(rif.getIndex()).getName(), topFields.get(i).getName())); field = topFields.get(i); } newFields.add(field); } } if (!renames.isEmpty()) { updateSearchStr.append("| rename "); for (Pair<String, String> p : renames) { updateSearchStr.append(p.left).append(" AS ") .append(p.right).append(" "); } } RelDataType resultType = new RelRecordType(newFields); String searchWithFilter = updateSearchStr.toString(); RelNode rel = new SplunkTableScan( splunkRel.getCluster(), splunkRel.getTable(), splunkRel.splunkTable, searchWithFilter, splunkRel.earliest, splunkRel.latest, resultType.getFieldNames()); LOGGER.fine( "end of appendSearchString fieldNames: " + rel.getRowType().getFieldNames()); return rel; } // ~ Private Methods ------------------------------------------------------ private static RelNode addProjectionRule(LogicalProject proj, RelNode rel) { if (proj == null) { return rel; } return LogicalProject.create(rel, proj.getProjects(), proj.getRowType()); } // TODO: use StringBuilder instead of String // TODO: refactor this to use more tree like parsing, need to also // make sure we use parens properly - currently precedence // rules are simply left to right private boolean getFilter(SqlOperator op, List<RexNode> operands, StringBuilder s, List<String> fieldNames) { if (!valid(op.getKind())) { return false; } boolean like = false; switch (op.getKind()) { case NOT: // NOT op pre-pended s = s.append(" NOT "); break; case CAST: return asd(false, operands, s, fieldNames, 0); case LIKE: like = true; break; } for (int i = 0; i < operands.size(); i++) { if (!asd(like, operands, s, fieldNames, i)) { return false; } if (op instanceof SqlBinaryOperator && i == 0) { s.append(" ").append(op).append(" "); } } return true; } private boolean asd(boolean like, List<RexNode> operands, StringBuilder s, List<String> fieldNames, int i) { RexNode operand = operands.get(i); if (operand instanceof RexCall) { s.append("("); final RexCall call = (RexCall) operand; boolean b = getFilter( call.getOperator(), call.getOperands(), s, fieldNames); if (!b) { return false; } s.append(")"); } else { if (operand instanceof RexInputRef) { if (i != 0) { return false; } int fieldIndex = ((RexInputRef) operand).getIndex(); String name = fieldNames.get(fieldIndex); s.append(name); } else { // RexLiteral String tmp = toString(like, (RexLiteral) operand); if (tmp == null) { return false; } s.append(tmp); } } return true; } private boolean valid(SqlKind kind) { return SUPPORTED_OPS.contains(kind); } private String toString(SqlOperator op) { if (op.equals(SqlStdOperatorTable.LIKE)) { return SqlStdOperatorTable.EQUALS.toString(); } else if (op.equals(SqlStdOperatorTable.NOT_EQUALS)) { return "!="; } return op.toString(); } public static String searchEscape(String str) { if (str.isEmpty()) { return "\"\""; } StringBuilder sb = new StringBuilder(str.length()); boolean quote = false; for (int i = 0; i < str.length(); i++) { char c = str.charAt(i); if (c == '"' || c == '\\') { sb.append('\\'); } sb.append(c); quote |= !(Character.isLetterOrDigit(c) || c == '_'); } if (quote || sb.length() != str.length()) { sb.insert(0, '"'); sb.append('"'); return sb.toString(); } return str; } private String toString(boolean like, RexLiteral literal) { String value = null; SqlTypeName litSqlType = literal.getTypeName(); if (SqlTypeName.NUMERIC_TYPES.contains(litSqlType)) { value = literal.getValue().toString(); } else if (litSqlType.equals(SqlTypeName.CHAR)) { value = ((NlsString) literal.getValue()).getValue(); if (like) { value = value.replaceAll("%", "*"); } value = searchEscape(value); } return value; } // transform the call from SplunkUdxRel to FarragoJavaUdxRel // usually used to stop the optimizer from calling us protected void transformToFarragoUdxRel( RelOptRuleCall call, SplunkTableScan splunkRel, LogicalFilter filter, LogicalProject topProj, LogicalProject bottomProj) { assert false; /* RelNode rel = new EnumerableRules.EnumerableTableAccessRel( udxRel.getCluster(), udxRel.getTable(), udxRel.getRowType(), udxRel.getServerMofId()); rel = RelOptUtil.createCastRel(rel, udxRel.getRowType(), true); rel = addProjectionRule(bottomProj, rel); if (filter != null) { rel = new LogicalFilter(filter.getCluster(), rel, filter.getCondition()); } rel = addProjectionRule(topProj, rel); call.transformTo(rel); */ } public static String getFieldsString(RelDataType row) { return row.getFieldNames().toString(); } } // End SplunkPushDownRule.java
package io.pactfoundation.consumer.dsl; import au.com.dius.pact.consumer.dsl.PM; import au.com.dius.pact.consumer.dsl.PactDslJsonArray; import au.com.dius.pact.core.model.PactSpecVersion; import org.junit.jupiter.api.Test; import java.util.Map; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; public class LambdaDslJsonArrayTest { @Test public void testObjectArray() { /* [ { "foo": "Foo" }, { "bar": "Bar" } ] */ // Old DSL final String pactDslJson = new PactDslJsonArray("", "", null, false) .object() .stringValue("foo", "Foo") .closeObject() .object() .stringType("bar", "Bar") .closeObject() .getBody().toString(); // Lambda DSL final PactDslJsonArray actualPactDsl = new PactDslJsonArray("", "", null, false); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .object((o) -> o.stringValue("foo", "Foo")) .object((o) -> o.stringValue("bar", "Bar")) .build(); String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().isEmpty(), is(true)); } @Test public void testStringArray() { /* [ "Foo", "Bar", "x" ] */ // Old DSL final String pactDslJson = new PactDslJsonArray("", "", null, false) .string("Foo") .stringType("Bar") .stringMatcher("[a-z]", "x") .getBody().toString(); // Lambda DSL final PactDslJsonArray actualPactDsl = new PactDslJsonArray("", "", null, false); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .stringValue("Foo") .stringType("Bar") .stringMatcher("[a-z]", "x") .build(); String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); } @Test public void testNumberArray() { /* [ 1, 2, 3 ] */ // Old DSL final String pactDslJson = new PactDslJsonArray("", "", null, false) .numberValue(1) .numberValue(2) .numberValue(3) .getBody().toString(); // Lambda DSL final PactDslJsonArray actualPactDsl = new PactDslJsonArray("", "", null, false); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .numberValue(1) .numberValue(2) .numberValue(3) .build(); String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); } @Test public void testAndMatchingRules() { /* [ "fooBar" ] */ // Old DSL final String pactDslJson = new PactDslJsonArray("", "", null, false) .and("foobar", PM.stringType(), PM.includesStr("foo"), PM.stringMatcher("*Bar")) .getBody().toString(); // Lambda DSL final PactDslJsonArray actualPactDsl = new PactDslJsonArray("", "", null, false); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .and("foobar", PM.stringType(), PM.includesStr("foo"), PM.stringMatcher("*Bar")) .build(); String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(3)); Map matcher = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(matcher.get("match"), is("type")); matcher = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(matcher.get("match"), is("include")); matcher = actualPactDsl.getMatchers().allMatchingRules().get(2).toMap(PactSpecVersion.V3); assertThat(matcher.get("match"), is("regex")); } @Test public void testOrMatchingRules() { /* [ null ] */ // Old DSL final String pactDslJson = new PactDslJsonArray("", "", null, false) .or(null, PM.nullValue(), PM.date(), PM.ipAddress()) .getBody().toString(); // Lambda DSL final PactDslJsonArray actualPactDsl = new PactDslJsonArray("", "", null, false); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .or(null, PM.nullValue(), PM.date(), PM.ipAddress()) .build(); String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(3)); Map matcher = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(matcher.get("match"), is("null")); matcher = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(matcher.get("match"), is("date")); matcher = actualPactDsl.getMatchers().allMatchingRules().get(2).toMap(PactSpecVersion.V3); assertThat(matcher.get("match"), is("regex")); } @Test public void testArrayArray() { /* [ ["a1", "a2"], [1, 2], [{"foo": "Foo"}] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray("", "", null, false) .array() .stringValue("a1") .stringValue("a2") .closeArray() .array() .numberValue(1) .numberValue(2) .closeArray() .array() .object() .stringValue("foo", "Foo") .closeObject() .closeArray() .getBody().toString(); // Lambda DSL final PactDslJsonArray actualPactDsl = new PactDslJsonArray("", "", null, false); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .array((a) -> a.stringValue("a1").stringValue("a2")) .array((a) -> a.numberValue(1).numberValue(2)) .array((a) -> a.object((o) -> o.stringValue("foo", "Foo"))) .build(); String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); } @Test public void testEachArrayLike() { /* [ [ ["Foo"] ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .eachArrayLike() .stringType("Foo") .closeArray() .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .eachArrayLike(a -> a.stringType("Foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("match"), is("type")); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testEachArrayLikeWithExample() { /* [ [ ["Foo", "Foo"] ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .eachArrayLike(2) .stringType("Foo") .closeArray() .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .eachArrayLike(2, a -> a.stringType("Foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("match"), is("type")); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testEachArrayWithMinLike() { /* [ [ ["Foo"] ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .eachArrayWithMinLike(2) .stringType("Foo") .closeArray() .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .eachArrayWithMinLike(2, a -> a.stringType("Foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("min"), is(2)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testEachArrayWithMinLikeWithExample() { /* [ [ ["Foo", "Foo", "Foo"] ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .eachArrayWithMinLike(3, 2) .stringType("Foo") .closeArray() .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .eachArrayWithMinLike(3, 2, a -> a.stringType("Foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("min"), is(2)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testEachArrayWithMaxLike() { /* [ [ ["Foo"] ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .eachArrayWithMaxLike(2) .stringType("Foo") .closeArray() .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .eachArrayWithMaxLike(2, a -> a.stringType("Foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("max"), is(2)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testEachArrayWithMaxLikeWithExample() { /* [ [ ["Foo", "Foo"] ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .eachArrayWithMaxLike(2, 3) .stringType("Foo") .closeArray() .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .eachArrayWithMaxLike(2, 3, a -> a.stringType("Foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("max"), is(3)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testEachArrayWithMinMaxLike() { // Old DSL final String pactDslJson = new PactDslJsonArray() .eachArrayWithMinMaxLike(2, 10) .stringType("Foo") .closeArray() .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .eachArrayWithMinMaxLike(2, 10, a -> a.stringType("Foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("min"), is(2)); assertThat(arrayRule.get("max"), is(10)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testEachArrayWithMinMaxLikeWithExample() { // Old DSL final String pactDslJson = new PactDslJsonArray() .eachArrayWithMinMaxLike(2, 2, 10) .stringType("Foo") .closeArray() .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .eachArrayWithMinMaxLike(2, 10, 2, a -> a.stringType("Foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("min"), is(2)); assertThat(arrayRule.get("max"), is(10)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testEachLike() { /* [ [ { "foo": "string" } ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .eachLike() .stringType("foo") .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .eachLike(o -> o.stringType("foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("match"), is("type")); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testEachLikeWithExample() { /* [ [ { "foo": "string" }, { "foo": "string" } ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .eachLike(2) .stringType("foo") .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .eachLike(2, o -> o.stringType("foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("match"), is("type")); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testMinArrayLike() { /* [ [ { "foo": "string" }, { "foo": "string" } ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .minArrayLike(2) .stringType("foo") .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .minArrayLike(2, o -> o.stringType("foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("min"), is(2)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testMinArrayLikeWithExample() { /* [ [ { "foo": "string" }, { "foo": "string" }, { "foo": "string" } ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .minArrayLike(2, 3) .stringType("foo") .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .minArrayLike(2, 3, o -> o.stringType("foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("min"), is(2)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testMaxArrayLike() { /* [ [ { "foo": "string" } ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .maxArrayLike(2) .stringType("foo") .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .maxArrayLike(2, o -> o.stringType("foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("max"), is(2)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testMaxArrayLikeWithExample() { /* [ [ { "foo": "string" }, { "foo": "string" } ] ] */ // Old DSL final String pactDslJson = new PactDslJsonArray() .maxArrayLike(3, 2) .stringType("foo") .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .maxArrayLike(3, 2, o -> o.stringType("foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("max"), is(3)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testMinMaxArrayLike() { // Old DSL final String pactDslJson = new PactDslJsonArray() .minMaxArrayLike(2, 5) .stringType("foo") .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .minMaxArrayLike(2, 5, o -> o.stringType("foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("min"), is(2)); assertThat(arrayRule.get("max"), is(5)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } @Test public void testMinMaxArrayLikeWithExample() { // Old DSL final String pactDslJson = new PactDslJsonArray() .minMaxArrayLike(3, 8, 4) .stringType("foo") .closeArray() .getBody() .toString(); final PactDslJsonArray actualPactDsl = new PactDslJsonArray(); final LambdaDslJsonArray array = new LambdaDslJsonArray(actualPactDsl); array .minMaxArrayLike(3, 8, 4, o -> o.stringType("foo")) .build(); final String actualJson = actualPactDsl.getBody().toString(); assertThat(actualJson, is(pactDslJson)); assertThat(actualPactDsl.getMatchers().allMatchingRules().size(), is(2)); final Map<String, Object> arrayRule = actualPactDsl.getMatchers().allMatchingRules().get(0).toMap(PactSpecVersion.V3); assertThat(arrayRule.get("min"), is(3)); assertThat(arrayRule.get("max"), is(8)); final Map<String, Object> objectRule = actualPactDsl.getMatchers().allMatchingRules().get(1).toMap(PactSpecVersion.V3); assertThat(objectRule.get("match"), is("type")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.persistence.impl.journal; import java.io.File; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.EnumSet; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import org.apache.activemq.artemis.api.core.ActiveMQBuffer; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.ActiveMQIllegalStateException; import org.apache.activemq.artemis.api.core.ActiveMQInternalErrorException; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.Pair; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.io.IOCriticalErrorListener; import org.apache.activemq.artemis.core.io.SequentialFile; import org.apache.activemq.artemis.core.io.SequentialFileFactory; import org.apache.activemq.artemis.core.io.aio.AIOSequentialFileFactory; import org.apache.activemq.artemis.core.io.mapped.MappedSequentialFileFactory; import org.apache.activemq.artemis.core.io.nio.NIOSequentialFileFactory; import org.apache.activemq.artemis.core.journal.Journal; import org.apache.activemq.artemis.core.journal.impl.JournalFile; import org.apache.activemq.artemis.core.journal.impl.JournalImpl; import org.apache.activemq.artemis.core.paging.PagedMessage; import org.apache.activemq.artemis.core.paging.PagingManager; import org.apache.activemq.artemis.core.paging.PagingStore; import org.apache.activemq.artemis.core.persistence.OperationContext; import org.apache.activemq.artemis.core.persistence.impl.journal.codec.LargeMessagePersister; import org.apache.activemq.artemis.core.persistence.impl.journal.codec.PendingLargeMessageEncoding; import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.ReplicationLiveIsStoppingMessage; import org.apache.activemq.artemis.core.replication.ReplicatedJournal; import org.apache.activemq.artemis.core.replication.ReplicationManager; import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle; import org.apache.activemq.artemis.core.server.ActiveMQServerLogger; import org.apache.activemq.artemis.core.server.JournalType; import org.apache.activemq.artemis.core.server.LargeServerMessage; import org.apache.activemq.artemis.core.server.files.FileStoreMonitor; import org.apache.activemq.artemis.utils.ExecutorFactory; import org.jboss.logging.Logger; public class JournalStorageManager extends AbstractJournalStorageManager { private static final Logger logger = Logger.getLogger(JournalStorageManager.class); private SequentialFileFactory journalFF; private SequentialFileFactory bindingsFF; SequentialFileFactory largeMessagesFactory; private Journal originalMessageJournal; private Journal originalBindingsJournal; protected String largeMessagesDirectory; private ReplicationManager replicator; public JournalStorageManager(final Configuration config, final ExecutorFactory executorFactory, final ScheduledExecutorService scheduledExecutorService, final ExecutorFactory ioExecutors) { this(config, executorFactory, scheduledExecutorService, ioExecutors, null); } public JournalStorageManager(final Configuration config, final ExecutorFactory executorFactory, final ExecutorFactory ioExecutors) { this(config, executorFactory, null, ioExecutors, null); } public JournalStorageManager(final Configuration config, final ExecutorFactory executorFactory, final ScheduledExecutorService scheduledExecutorService, final ExecutorFactory ioExecutors, final IOCriticalErrorListener criticalErrorListener) { super(config, executorFactory, scheduledExecutorService, ioExecutors, criticalErrorListener); } public JournalStorageManager(final Configuration config, final ExecutorFactory executorFactory, final ExecutorFactory ioExecutors, final IOCriticalErrorListener criticalErrorListener) { super(config, executorFactory, null, ioExecutors, criticalErrorListener); } @Override protected void init(Configuration config, IOCriticalErrorListener criticalErrorListener) { if (!EnumSet.allOf(JournalType.class).contains(config.getJournalType())) { throw ActiveMQMessageBundle.BUNDLE.invalidJournal(); } bindingsFF = new NIOSequentialFileFactory(config.getBindingsLocation(), criticalErrorListener, config.getJournalMaxIO_NIO()); bindingsFF.setDatasync(config.isJournalDatasync()); Journal localBindings = new JournalImpl(ioExecutors, 1024 * 1024, 2, config.getJournalCompactMinFiles(), config.getJournalPoolFiles(), config.getJournalCompactPercentage(), bindingsFF, "activemq-bindings", "bindings", 1, 0); bindingsJournal = localBindings; originalBindingsJournal = localBindings; switch (config.getJournalType()) { case NIO: ActiveMQServerLogger.LOGGER.journalUseNIO(); journalFF = new NIOSequentialFileFactory(config.getJournalLocation(), true, config.getJournalBufferSize_NIO(), config.getJournalBufferTimeout_NIO(), config.getJournalMaxIO_NIO(), config.isLogJournalWriteRate(), criticalErrorListener); break; case ASYNCIO: ActiveMQServerLogger.LOGGER.journalUseAIO(); journalFF = new AIOSequentialFileFactory(config.getJournalLocation(), config.getJournalBufferSize_AIO(), config.getJournalBufferTimeout_AIO(), config.getJournalMaxIO_AIO(), config.isLogJournalWriteRate(), criticalErrorListener); break; case MAPPED: ActiveMQServerLogger.LOGGER.journalUseMAPPED(); //the mapped version do not need buffering by default journalFF = new MappedSequentialFileFactory(config.getJournalLocation(), criticalErrorListener, true).chunkBytes(config.getJournalFileSize()).overlapBytes(0); break; default: throw ActiveMQMessageBundle.BUNDLE.invalidJournalType2(config.getJournalType()); } journalFF.setDatasync(config.isJournalDatasync()); Journal localMessage = new JournalImpl(ioExecutors, config.getJournalFileSize(), config.getJournalMinFiles(), config.getJournalPoolFiles(), config.getJournalCompactMinFiles(), config.getJournalCompactPercentage(), journalFF, "activemq-data", "amq", journalFF.getMaxIO(), 0); messageJournal = localMessage; originalMessageJournal = localMessage; largeMessagesDirectory = config.getLargeMessagesDirectory(); largeMessagesFactory = new NIOSequentialFileFactory(config.getLargeMessagesLocation(), false, criticalErrorListener, 1); if (config.getPageMaxConcurrentIO() != 1) { pageMaxConcurrentIO = new Semaphore(config.getPageMaxConcurrentIO()); } else { pageMaxConcurrentIO = null; } } // Life Cycle Handlers @Override protected void beforeStart() throws Exception { checkAndCreateDir(config.getBindingsLocation(), config.isCreateBindingsDir()); checkAndCreateDir(config.getJournalLocation(), config.isCreateJournalDir()); checkAndCreateDir(config.getLargeMessagesLocation(), config.isCreateJournalDir()); cleanupIncompleteFiles(); } @Override protected void beforeStop() throws Exception { if (replicator != null) { replicator.stop(); } } @Override public void stop() throws Exception { stop(false, true); } public boolean isReplicated() { return replicator != null; } private void cleanupIncompleteFiles() throws Exception { if (largeMessagesFactory != null) { List<String> tmpFiles = largeMessagesFactory.listFiles("tmp"); for (String tmpFile : tmpFiles) { SequentialFile file = largeMessagesFactory.createSequentialFile(tmpFile); file.delete(); } } } @Override public synchronized void stop(boolean ioCriticalError, boolean sendFailover) throws Exception { if (!started) { return; } if (!ioCriticalError) { performCachedLargeMessageDeletes(); // Must call close to make sure last id is persisted if (journalLoaded && idGenerator != null) idGenerator.persistCurrentID(); } final CountDownLatch latch = new CountDownLatch(1); try { executor.execute(new Runnable() { @Override public void run() { latch.countDown(); } }); latch.await(30, TimeUnit.SECONDS); } catch (RejectedExecutionException ignored) { // that's ok } // We cache the variable as the replicator could be changed between here and the time we call stop // since sendLiveIsStopping may issue a close back from the channel // and we want to ensure a stop here just in case ReplicationManager replicatorInUse = replicator; if (replicatorInUse != null) { if (sendFailover) { final OperationContext token = replicator.sendLiveIsStopping(ReplicationLiveIsStoppingMessage.LiveStopping.FAIL_OVER); if (token != null) { try { token.waitCompletion(5000); } catch (Exception e) { // ignore it } } } replicatorInUse.stop(); } bindingsJournal.stop(); messageJournal.stop(); journalLoaded = false; started = false; } /** * Assumption is that this is only called with a writeLock on the StorageManager. */ @Override protected void performCachedLargeMessageDeletes() { for (Long largeMsgId : largeMessagesToDelete) { SequentialFile msg = createFileForLargeMessage(largeMsgId, LargeMessageExtension.DURABLE); try { msg.delete(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.journalErrorDeletingMessage(e, largeMsgId); } if (replicator != null) { replicator.largeMessageDelete(largeMsgId); } } largeMessagesToDelete.clear(); } protected SequentialFile createFileForLargeMessage(final long messageID, final boolean durable) { if (durable) { return createFileForLargeMessage(messageID, LargeMessageExtension.DURABLE); } else { return createFileForLargeMessage(messageID, LargeMessageExtension.TEMPORARY); } } @Override /** * @param messages * @param buff * @return * @throws Exception */ protected LargeServerMessage parseLargeMessage(final Map<Long, Message> messages, final ActiveMQBuffer buff) throws Exception { LargeServerMessage largeMessage = createLargeMessage(); LargeMessagePersister.getInstance().decode(buff, largeMessage); if (largeMessage.containsProperty(Message.HDR_ORIG_MESSAGE_ID)) { // for compatibility: couple with old behaviour, copying the old file to avoid message loss long originalMessageID = largeMessage.getLongProperty(Message.HDR_ORIG_MESSAGE_ID); SequentialFile currentFile = createFileForLargeMessage(largeMessage.getMessageID(), true); if (!currentFile.exists()) { SequentialFile linkedFile = createFileForLargeMessage(originalMessageID, true); if (linkedFile.exists()) { linkedFile.copyTo(currentFile); linkedFile.close(); } } currentFile.close(); } return largeMessage; } @Override public void pageClosed(final SimpleString storeName, final int pageNumber) { if (isReplicated()) { readLock(); try { if (isReplicated()) replicator.pageClosed(storeName, pageNumber); } finally { readUnLock(); } } } @Override public void pageDeleted(final SimpleString storeName, final int pageNumber) { if (isReplicated()) { readLock(); try { if (isReplicated()) replicator.pageDeleted(storeName, pageNumber); } finally { readUnLock(); } } } @Override public void pageWrite(final PagedMessage message, final int pageNumber) { if (isReplicated()) { // Note: (https://issues.jboss.org/browse/HORNETQ-1059) // We have to replicate durable and non-durable messages on paging // since acknowledgments are written using the page-position. // Say you are sending durable and non-durable messages to a page // The ACKs would be done to wrong positions, and the backup would be a mess readLock(); try { if (isReplicated()) replicator.pageWrite(message, pageNumber); } finally { readUnLock(); } } } @Override public ByteBuffer allocateDirectBuffer(int size) { return journalFF.allocateDirectBuffer(size); } @Override public void freeDirectBuffer(ByteBuffer buffer) { journalFF.releaseBuffer(buffer); } public long storePendingLargeMessage(final long messageID) throws Exception { readLock(); try { long recordID = generateID(); messageJournal.appendAddRecord(recordID, JournalRecordIds.ADD_LARGE_MESSAGE_PENDING, new PendingLargeMessageEncoding(messageID), true, getContext(true)); return recordID; } finally { readUnLock(); } } // This should be accessed from this package only void deleteLargeMessageFile(final LargeServerMessage largeServerMessage) throws ActiveMQException { if (largeServerMessage.getPendingRecordID() < 0) { try { // The delete file happens asynchronously // And the client won't be waiting for the actual file to be deleted. // We set a temporary record (short lived) on the journal // to avoid a situation where the server is restarted and pending large message stays on forever largeServerMessage.setPendingRecordID(storePendingLargeMessage(largeServerMessage.getMessageID())); } catch (Exception e) { throw new ActiveMQInternalErrorException(e.getMessage(), e); } } final SequentialFile file = largeServerMessage.getFile(); if (file == null) { return; } if (largeServerMessage.isDurable() && isReplicated()) { readLock(); try { if (isReplicated() && replicator.isSynchronizing()) { synchronized (largeMessagesToDelete) { largeMessagesToDelete.add(Long.valueOf(largeServerMessage.getMessageID())); confirmLargeMessage(largeServerMessage); } return; } } finally { readUnLock(); } } Runnable deleteAction = new Runnable() { @Override public void run() { try { readLock(); try { if (replicator != null) { replicator.largeMessageDelete(largeServerMessage.getMessageID()); } file.delete(); // The confirm could only be done after the actual delete is done confirmLargeMessage(largeServerMessage); } finally { readUnLock(); } } catch (Exception e) { ActiveMQServerLogger.LOGGER.journalErrorDeletingMessage(e, largeServerMessage.getMessageID()); } } }; if (executor == null) { deleteAction.run(); } else { executor.execute(deleteAction); } } @Override public LargeServerMessage createLargeMessage() { return new LargeServerMessageImpl(this); } @Override public LargeServerMessage createLargeMessage(final long id, final Message message) throws Exception { readLock(); try { if (isReplicated()) { replicator.largeMessageBegin(id); } LargeServerMessageImpl largeMessage = (LargeServerMessageImpl) createLargeMessage(); largeMessage.copyHeadersAndProperties(message); largeMessage.setMessageID(id); // We do this here to avoid a case where the replication gets a list without this file // to avoid a race largeMessage.validateFile(); if (largeMessage.isDurable()) { // We store a marker on the journal that the large file is pending long pendingRecordID = storePendingLargeMessage(id); largeMessage.setPendingRecordID(pendingRecordID); } return largeMessage; } finally { readUnLock(); } } @Override public SequentialFile createFileForLargeMessage(final long messageID, LargeMessageExtension extension) { return largeMessagesFactory.createSequentialFile(messageID + extension.getExtension()); } /** * Send an entire journal file to a replicating backup server. */ private void sendJournalFile(JournalFile[] journalFiles, JournalContent type) throws Exception { for (JournalFile jf : journalFiles) { if (!started) return; replicator.syncJournalFile(jf, type); } } private JournalFile[] prepareJournalForCopy(Journal journal, JournalContent contentType, String nodeID, boolean autoFailBack) throws Exception { journal.forceMoveNextFile(); JournalFile[] datafiles = journal.getDataFiles(); replicator.sendStartSyncMessage(datafiles, contentType, nodeID, autoFailBack); return datafiles; } @Override public void startReplication(ReplicationManager replicationManager, PagingManager pagingManager, String nodeID, final boolean autoFailBack, long initialReplicationSyncTimeout) throws Exception { if (!started) { throw new IllegalStateException("JournalStorageManager must be started..."); } assert replicationManager != null; if (!(messageJournal instanceof JournalImpl) || !(bindingsJournal instanceof JournalImpl)) { throw ActiveMQMessageBundle.BUNDLE.notJournalImpl(); } // We first do a compact without any locks, to avoid copying unnecessary data over the network. // We do this without holding the storageManager lock, so the journal stays open while compact is being done originalMessageJournal.scheduleCompactAndBlock(-1); originalBindingsJournal.scheduleCompactAndBlock(-1); JournalFile[] messageFiles = null; JournalFile[] bindingsFiles = null; // We get a picture of the current sitaution on the large messages // and we send the current messages while more state is coming Map<Long, Pair<String, Long>> pendingLargeMessages = null; try { Map<SimpleString, Collection<Integer>> pageFilesToSync; storageManagerLock.writeLock().lock(); try { if (isReplicated()) throw new ActiveMQIllegalStateException("already replicating"); replicator = replicationManager; // Establishes lock originalMessageJournal.synchronizationLock(); originalBindingsJournal.synchronizationLock(); try { originalBindingsJournal.replicationSyncPreserveOldFiles(); originalMessageJournal.replicationSyncPreserveOldFiles(); pagingManager.lock(); try { pagingManager.disableCleanup(); messageFiles = prepareJournalForCopy(originalMessageJournal, JournalContent.MESSAGES, nodeID, autoFailBack); bindingsFiles = prepareJournalForCopy(originalBindingsJournal, JournalContent.BINDINGS, nodeID, autoFailBack); pageFilesToSync = getPageInformationForSync(pagingManager); pendingLargeMessages = recoverPendingLargeMessages(); } finally { pagingManager.unlock(); } } finally { originalMessageJournal.synchronizationUnlock(); originalBindingsJournal.synchronizationUnlock(); } bindingsJournal = new ReplicatedJournal(((byte) 0), originalBindingsJournal, replicator); messageJournal = new ReplicatedJournal((byte) 1, originalMessageJournal, replicator); // We need to send the list while locking otherwise part of the body might get sent too soon // it will send a list of IDs that we are allocating replicator.sendLargeMessageIdListMessage(pendingLargeMessages); } finally { storageManagerLock.writeLock().unlock(); } sendJournalFile(messageFiles, JournalContent.MESSAGES); sendJournalFile(bindingsFiles, JournalContent.BINDINGS); sendLargeMessageFiles(pendingLargeMessages); sendPagesToBackup(pageFilesToSync, pagingManager); storageManagerLock.writeLock().lock(); try { if (replicator != null) { replicator.sendSynchronizationDone(nodeID, initialReplicationSyncTimeout); performCachedLargeMessageDeletes(); } } finally { storageManagerLock.writeLock().unlock(); } } catch (Exception e) { logger.warn(e.getMessage(), e); stopReplication(); throw e; } finally { // Re-enable compact and reclaim of journal files originalBindingsJournal.replicationSyncFinished(); originalMessageJournal.replicationSyncFinished(); pagingManager.resumeCleanup(); } } private void sendLargeMessageFiles(final Map<Long, Pair<String, Long>> pendingLargeMessages) throws Exception { Iterator<Map.Entry<Long, Pair<String, Long>>> iter = pendingLargeMessages.entrySet().iterator(); while (started && iter.hasNext()) { Map.Entry<Long, Pair<String, Long>> entry = iter.next(); String fileName = entry.getValue().getA(); final long id = entry.getKey(); long size = entry.getValue().getB(); SequentialFile seqFile = largeMessagesFactory.createSequentialFile(fileName); if (!seqFile.exists()) continue; if (replicator != null) { replicator.syncLargeMessageFile(seqFile, size, id); } else { throw ActiveMQMessageBundle.BUNDLE.replicatorIsNull(); } } } /** * @param pagingManager * @return * @throws Exception */ private Map<SimpleString, Collection<Integer>> getPageInformationForSync(PagingManager pagingManager) throws Exception { Map<SimpleString, Collection<Integer>> info = new HashMap<>(); for (SimpleString storeName : pagingManager.getStoreNames()) { PagingStore store = pagingManager.getPageStore(storeName); info.put(storeName, store.getCurrentIds()); store.forceAnotherPage(); } return info; } private void checkAndCreateDir(final File dir, final boolean create) { if (!dir.exists()) { if (create) { if (!dir.mkdirs()) { throw new IllegalStateException("Failed to create directory " + dir); } } else { throw ActiveMQMessageBundle.BUNDLE.cannotCreateDir(dir.getAbsolutePath()); } } } /** * Sets a list of large message files into the replicationManager for synchronization. * <p> * Collects a list of existing large messages and their current size, passing re. * <p> * So we know how much of a given message to sync with the backup. Further data appends to the * messages will be replicated normally. * * @throws Exception */ private Map<Long, Pair<String, Long>> recoverPendingLargeMessages() throws Exception { Map<Long, Pair<String, Long>> largeMessages = new HashMap<>(); // only send durable messages... // listFiles append a "." to anything... List<String> filenames = largeMessagesFactory.listFiles("msg"); List<Long> idList = new ArrayList<>(); for (String filename : filenames) { Long id = getLargeMessageIdFromFilename(filename); if (!largeMessagesToDelete.contains(id)) { idList.add(id); SequentialFile seqFile = largeMessagesFactory.createSequentialFile(filename); long size = seqFile.size(); largeMessages.put(id, new Pair<>(filename, size)); } } return largeMessages; } /** * @param pageFilesToSync * @throws Exception */ private void sendPagesToBackup(Map<SimpleString, Collection<Integer>> pageFilesToSync, PagingManager manager) throws Exception { for (Map.Entry<SimpleString, Collection<Integer>> entry : pageFilesToSync.entrySet()) { if (!started) return; PagingStore store = manager.getPageStore(entry.getKey()); store.sendPages(replicator, entry.getValue()); } } private long getLargeMessageIdFromFilename(String filename) { return Long.parseLong(filename.split("\\.")[0]); } /** * Stops replication by resetting replication-related fields to their 'unreplicated' state. */ @Override public void stopReplication() { logger.trace("stopReplication()"); storageManagerLock.writeLock().lock(); try { if (replicator == null) return; bindingsJournal = originalBindingsJournal; messageJournal = originalMessageJournal; try { replicator.stop(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.errorStoppingReplicationManager(e); } replicator = null; // delete inside the writeLock. Avoids a lot of state checking and races with // startReplication. // This method should not be called under normal circumstances performCachedLargeMessageDeletes(); } finally { storageManagerLock.writeLock().unlock(); } } @Override public final void addBytesToLargeMessage(final SequentialFile file, final long messageId, final byte[] bytes) throws Exception { readLock(); try { file.position(file.size()); file.writeDirect(ByteBuffer.wrap(bytes), false); if (isReplicated()) { replicator.largeMessageWrite(messageId, bytes); } } finally { readUnLock(); } } @Override public void injectMonitor(FileStoreMonitor monitor) throws Exception { if (journalFF != null) { monitor.addStore(journalFF.getDirectory()); } if (largeMessagesFactory != null) { monitor.addStore(largeMessagesFactory.getDirectory()); } if (bindingsFF != null) { monitor.addStore(bindingsFF.getDirectory()); } } }
// Copyright 2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.dfp.jaxws.utils.v201502; import com.google.api.ads.dfp.jaxws.v201502.BooleanValue; import com.google.api.ads.dfp.jaxws.v201502.ColumnType; import com.google.api.ads.dfp.jaxws.v201502.Date; import com.google.api.ads.dfp.jaxws.v201502.DateTime; import com.google.api.ads.dfp.jaxws.v201502.DateTimeValue; import com.google.api.ads.dfp.jaxws.v201502.DateValue; import com.google.api.ads.dfp.jaxws.v201502.NumberValue; import com.google.api.ads.dfp.jaxws.v201502.ResultSet; import com.google.api.ads.dfp.jaxws.v201502.Row; import com.google.api.ads.dfp.jaxws.v201502.SetValue; import com.google.api.ads.dfp.jaxws.v201502.Targeting; import com.google.api.ads.dfp.jaxws.v201502.TargetingValue; import com.google.api.ads.dfp.jaxws.v201502.TextValue; import com.google.api.ads.dfp.jaxws.v201502.Value; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Strings; import com.google.common.collect.Lists; import org.apache.commons.lang.StringEscapeUtils; import java.text.NumberFormat; import java.text.ParseException; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; /** * A utility class for handling PQL objects. A typical use case is to combine * result sets from the PublisherQueryLanguageService to then create a CSV * such as: * * <pre> * <code> * ResultSet combinedResultSet = null * do { * ResultSet resultSet = pqlService.select(pagedStatement); * combinedResultSet = (combinedResultSet == null) ? resultSet : * Pql.combineResultSet(combinedResultSet, resultSet); * // ... * } while(!finishedCollectingResultSets) * //... * CsvFiles.writeCsv(Pql.resultSetToStringArrayList(combinedResultSet), filePath); * </code> * </pre> */ public final class Pql { /** * {@code Pql} is meant to be used statically. */ private Pql() {} /** * Creates a {@link Value} from the value i.e. a {@link TextValue} for a * value of type {@code String}, {@link BooleanValue} for type * {@code Boolean}, {@link NumberValue} for type {@code Double}, * {@code Long}, or {@code Integer}, and {@link DateTimeValue} for type * {@link DateTime}. If the value is a {@code Value}, the value is returned. * If the value is {@code null}, an empty {@link TextValue} is returned. * * @param value the value to convert * @return the constructed value of the appropriate type * @throws IllegalArgumentException if value cannot be converted */ public static Value createValue(Object value) { if (value instanceof Value) { return (Value) value; } else if (value == null) { return new TextValue(); } else { if (value instanceof Boolean) { BooleanValue booleanValue = new BooleanValue(); booleanValue.setValue((Boolean) value); return booleanValue; } else if (value instanceof Double || value instanceof Long || value instanceof Integer) { NumberValue numberValue = new NumberValue(); numberValue.setValue(value.toString()); return numberValue; } else if (value instanceof String) { TextValue textValue = new TextValue(); textValue.setValue((String) value); return textValue; } else if (value instanceof DateTime) { DateTimeValue dateTimeValue = new DateTimeValue(); dateTimeValue.setValue((DateTime) value); return dateTimeValue; } else if (value instanceof Date) { DateValue dateValue = new DateValue(); dateValue.setValue((Date) value); return dateValue; } else if (value instanceof Targeting) { TargetingValue targetingValue = new TargetingValue(); targetingValue.setValue((Targeting) value); return targetingValue; } else if (value instanceof Set<?>) { SetValue setValue = new SetValue(); Set<Value> values = new LinkedHashSet<Value>(); for (Object entry : (Set<?>) value) { validateSetValueEntryForSet(createValue(entry), values); values.add(createValue(entry)); } setValue.getValues().addAll(values); return setValue; } else { throw new IllegalArgumentException("Unsupported Value type [" + value.getClass() + "]"); } } } /** * Validates that an Object is a valid entry for a SetValue * * @param entry the Object to validate * @throws IllegalArgumentException if the Object is an unsupported type */ private static void validateSetValueEntryForSet(Object entry, Set<?> set) { if (entry instanceof Set<?> || entry instanceof SetValue) { throw new IllegalArgumentException("Unsupported Value type [nested sets]"); } if (!set.isEmpty()) { Object existingEntry = set.iterator().next(); if (!existingEntry.getClass().isAssignableFrom(entry.getClass())) { throw new IllegalArgumentException(String.format("Unsupported Value type [SetValue with " + "mixed types %s and %s]", existingEntry.getClass(), entry.getClass())); } } } /** * Creates a String from the Value. Date and DateTime values are converted * using the rules of {@link DateTimes#toString(Date)} and * {@link DateTimes#toStringWithTimeZone(DateTime)} respectively. * * @param value the value to convert * @return the string representation of the value or an empty string for null * @throws IllegalArgumentException if value cannot be converted */ public static String toString(Value value) { Object unwrappedValue = getCsvValue(value); if (unwrappedValue == null) { return ""; } else { return unwrappedValue.toString(); } } /** * Gets the underlying value of the {@code Value} object that's comparable * to what would be returned in any other API object (i.e. DateTimeValue * will return an API DateTime, not a Joda DateTime). * * @param value the value to convert * @returns the native value of {@code Value} or {@code null} if the * underlying value is null * @throws IllegalArgumentException if value cannot be converted */ public static Object getApiValue(Value value) { if (value instanceof BooleanValue) { return ((BooleanValue) value).isValue(); } else if (value instanceof NumberValue) { if (((NumberValue) value).getValue() == null) { return null; } else { try { return NumberFormat.getInstance().parse(((NumberValue) value).getValue()); } catch (ParseException e) { throw new IllegalStateException("Recieved invalid number format from API."); } } } else if (value instanceof TextValue) { return ((TextValue) value).getValue(); } else if (value instanceof DateTimeValue) { return ((DateTimeValue) value).getValue(); } else if (value instanceof DateValue) { return ((DateValue) value).getValue(); } else if (value instanceof TargetingValue) { return ((TargetingValue) value).getValue(); } else if (value instanceof SetValue) { List<Value> setValues = ((SetValue) value).getValues(); Set<Object> apiValue = new LinkedHashSet<Object>(); if (setValues != null) { for (Value setValue : setValues) { validateSetValueEntryForSet(getApiValue(setValue), apiValue); apiValue.add(getApiValue(setValue)); } } return apiValue; } else { throw new IllegalArgumentException("Unsupported Value type [" + value.getClass() + "]"); } } /** * Gets the underlying value of the {@code Value} object that's considered * native to Java (i.e. DateTimeValue will return a Joda DateTime, not an API * DateTime and DateValue will return a string in the form of "yyyy-MM-dd"). * * @param value the value to convert * @returns the native value of {@code Value} or {@code null} if the * underlying value is null * @throws IllegalArgumentException if value cannot be converted */ public static Object getNativeValue(Value value) { return getNativeValue(getApiValue(value)); } /** * Gets the underlying value of the api value object that's considered * native to Java (i.e. DateTimeValue will return a Joda DateTime, not an API * DateTime and DateValue will return a string in the form of "yyyy-MM-dd"). * * @param apiValue the api value to convert * @returns the native value or {@code null} if the * underlying value is null * @throws IllegalArgumentException if apiValue cannot be converted */ private static Object getNativeValue(Object apiValue) { if (apiValue == null) { return null; } else if (apiValue instanceof DateTime) { return DateTimes.toDateTime((DateTime) apiValue); } else if (apiValue instanceof Date) { return DateTimes.toString((Date) apiValue); } else if (apiValue instanceof Set<?>) { Set<Object> nativeValues = new LinkedHashSet<Object>(); for (Object entry : (Set<?>) apiValue) { validateSetValueEntryForSet(getNativeValue(entry), nativeValues); nativeValues.add(getNativeValue(entry)); } return nativeValues; } else { return apiValue; } } /** * Gets the underlying value of the {@code Value} object that should be used * for CSV conversion (i.e. DateTimeValue will return a String representation, * but NumberValue will return a Long or Double). * * @param value the value to convert * @returns the CSV conversion value of {@code Value} or {@code null} if the * underlying value is null * @throws IllegalArgumentException if value cannot be converted */ public static Object getCsvValue(Value value) { return getCsvValue(getApiValue(value)); } /** * Gets the underlying value of the {@code Value} object that should be used * for CSV conversion (i.e. DateTimeValue will return a String representation, * but NumberValue will return a Long or Double). * * @param apiValue the api value to convert * @returns the CSV conversion value of the api value or {@code null} if the * underlying value is null * @throws IllegalArgumentException if value cannot be converted */ private static Object getCsvValue(Object apiValue) { if (apiValue == null) { return null; } else if (apiValue instanceof DateTime) { return DateTimes.toStringWithTimeZone((DateTime) apiValue); } else if (apiValue instanceof Date) { return DateTimes.toString((Date) apiValue); } else if (apiValue instanceof Set<?>) { Set<Object> csvValues = new LinkedHashSet<Object>(); for (Object entry : (Set<?>) apiValue) { Object csvValue = getCsvValue(entry); validateSetValueEntryForSet(csvValue, csvValues); if (csvValue instanceof String) { csvValue = StringEscapeUtils.escapeCsv((String) csvValue); } csvValues.add(csvValue); } return Joiner.on(",").join(csvValues); } else if (apiValue instanceof Targeting) { throw new IllegalArgumentException("Unsupported Value type [" + apiValue.getClass() + "]"); } else { return apiValue; } } /** * Gets the result set as list of string arrays, which can be transformed to * a CSV using {@code CsvFiles} such as * <pre> * <code> * ResultSet combinedResultSet = Pql.combineResultSet(resultSet1, resultSet2); * //... * combinedResultSet = Pql.combineResultSet(combinedResultSet, resultSet3); * CsvFiles.writeCsv(Pql.resultSetToStringArrayList(combinedResultSet), filePath); * </code> * </pre> * * @param resultSet the result set to convert to a CSV compatible format * @return a list of string arrays representing the result set */ public static List<String[]> resultSetToStringArrayList(ResultSet resultSet) { List<String[]> stringArrayList = Lists.newArrayList(); stringArrayList.add(getColumnLabels(resultSet).toArray(new String[] {})); if (resultSet.getRows() != null) { for (Row row : resultSet.getRows()) { try { stringArrayList.add(getRowStringValues(row).toArray(new String[] {})); } catch (IllegalArgumentException e) { throw new IllegalStateException("Cannot convert result set to string array list", e); } } } return stringArrayList; } /** * Gets the result set as a table representation in the form of: * * <pre> * +-------+-------+-------+ * |column1|column2|column3| * +-------+-------+-------+ * |value1 |value2 |value3 | * +-------+-------+-------+ * |value1 |value2 |value3 | * +-------+-------+-------+ * </pre> * * @param resultSet the result set to display as a string * @return the string representation of result set as a table * @throws IllegalAccessException if the values of the result set cannot be * accessed */ public static String resultSetToString(ResultSet resultSet) throws IllegalAccessException { StringBuilder resultSetStringBuilder = new StringBuilder(); List<String[]> resultSetStringArrayList = resultSetToStringArrayList(resultSet); List<Integer> maxColumnSizes = getMaxColumnSizes(resultSetStringArrayList); String rowTemplate = createRowTemplate(maxColumnSizes); String rowSeparator = createRowSeperator(maxColumnSizes); resultSetStringBuilder.append(rowSeparator); for (int i = 0; i < resultSetStringArrayList.size(); i++) { resultSetStringBuilder.append( String.format(rowTemplate, (Object[]) resultSetStringArrayList.get(i))).append( rowSeparator); } return resultSetStringBuilder.toString(); } /** * Creates the row template given the maximum size for each column * * @param maxColumnSizes the maximum size for each column * @return the row template to format row data into */ private static String createRowTemplate(List<Integer> maxColumnSizes) { List<String> columnFormatSpecifiers = Lists.newArrayList(); for (int maxColumnSize : maxColumnSizes) { columnFormatSpecifiers.add("%-" + maxColumnSize + "s"); } return new StringBuilder("| ").append(Joiner.on(" | ").join(columnFormatSpecifiers)).append( " |\n").toString(); } /** * Creates the row separator given the maximum size for each column * * @param maxColumnSizes the maximum size for each column * @return the row separator */ private static String createRowSeperator(List<Integer> maxColumnSizes) { StringBuilder rowSeparator = new StringBuilder("+"); for (int maxColumnSize : maxColumnSizes) { rowSeparator.append(Strings.repeat("-", maxColumnSize + 2)).append("+"); } return rowSeparator.append("\n").toString(); } /** * Gets a list of the maximum size for each column. * * @param resultSet the result set to process * @return a list of the maximum size for each column */ private static List<Integer> getMaxColumnSizes(List<String[]> resultSet) { List<Integer> maxColumnSizes = Lists.newArrayList(); for (int i = 0; i < resultSet.get(0).length; i++) { int maxColumnSize = -1; for (int j = 0; j < resultSet.size(); j++) { if (resultSet.get(j)[i].length() > maxColumnSize) { maxColumnSize = resultSet.get(j)[i].length(); } } maxColumnSizes.add(maxColumnSize); } return maxColumnSizes; } /** * Gets the column labels for the result set. * * @param resultSet the result set to get the column labels for * @return the string list of column labels */ public static List<String> getColumnLabels(ResultSet resultSet) { return Lists.transform( Lists.newArrayList(resultSet.getColumnTypes()), new Function<ColumnType, String>() { @Override public String apply(ColumnType input) { return input.getLabelName(); } }); } /** * Gets the values in a row of the result set in the form of a string * list. * * @param row the row to get the values for * @return the string list of the row values */ public static List<String> getRowStringValues(Row row) { return Lists.transform(Lists.newArrayList(row.getValues()), new Function<Value, String>() { @Override public String apply(Value input) { return Pql.toString(input); } }); } /** * Combines the first and second result sets, if and only if, the columns * of both result sets match. * * @throws IllegalArgumentException if the columns of the first result set * don't match the second */ public static ResultSet combineResultSets(ResultSet first, ResultSet second) { Function<ColumnType, String> columnTypeToString = new Function<ColumnType, String>() { @Override public String apply(ColumnType input) { return input.getLabelName(); } }; List<String> firstColumns = Lists.transform(Lists.newArrayList(first.getColumnTypes()), columnTypeToString); List<String> secondColumns = Lists.transform(Lists.newArrayList(second.getColumnTypes()), columnTypeToString); if (!firstColumns.equals(secondColumns)) { throw new IllegalArgumentException(String.format( "First result set columns [%s] do not match second columns [%s]", Joiner.on(",").join(firstColumns), Joiner.on(",").join(secondColumns))); } List<Row> combinedRows = Lists.newArrayList(first.getRows()); if (second.getRows() != null) { combinedRows.addAll(Lists.newArrayList(second.getRows())); } ResultSet combinedResultSet = new ResultSet(); combinedResultSet.getColumnTypes().addAll(first.getColumnTypes()); combinedResultSet.getRows().addAll(combinedRows); return combinedResultSet; } }
package controller; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.ResourceBundle; import java.util.Set; import javax.swing.JFileChooser; import jgame.JGColor; import model.DefaultModel; import model.Model; import model.expression.Expression; import viewer.SLogoViewer; import viewer.Viewer; import viewer.toggle.BackgroundColorButton; import viewer.toggle.PenColorButton; import Exceptions.SlogoException; /** * * Controller is responsible for storing instances of Turtle and servers interface for View and Model. * View will call a function in Controller to pass command strings to Model, * set activeTurtle and get multiple properties of the activeTurtle, background color. * Model will update the TurtleTrace of the activeTurtle. * * @author FrontEnd - Alex, Adam * @author BackEnd - Yuhua, Fabio * */ public class Controller implements ControllerToViewInterface, ControllerToModelInterface { private static final String CLEARSCREEN = "clearscreen"; Model model; protected Viewer viewer; List<Turtle> turtles; List<Turtle> activeTurtles; private List<String> commandList; private int currentCommand; Map<String, Workspace> workspaces; Workspace currentWorkspace; ResourceBundle messages; Map<String, String> languageToCountry; Map<Double, ColorIndex> colorIndexes; private static final String BACKGROUND = "background"; private static final String PEN_COLOR = "penColor"; private static final String SHAPE = "shape"; private List<HashMap<String, Double>> preferencesMap; public Map<String, HashMap<String, Double>> currentPreferencesOfWorkspaces; public Controller () { buildLanguageMap(); model = new DefaultModel(this); viewer = new SLogoViewer(this); preferencesMap = new ArrayList<HashMap<String, Double>>(); currentPreferencesOfWorkspaces = new HashMap<String, HashMap<String, Double>>(); workspaces = new HashMap<String, Workspace>(); currentWorkspace = new Workspace(); workspaces.put("1", currentWorkspace); commandList=new ArrayList<String>(); currentCommand=-1; setCurrentWorkspace("1"); } //Test purposes only public Set<String> getLanguages () { return languageToCountry.keySet(); } private void buildLanguageMap() { languageToCountry = new HashMap<String, String>(); languageToCountry.put("en", "US"); languageToCountry.put("fr", "FR"); languageToCountry.put("pt", "PT"); languageToCountry.put("it", "IT"); languageToCountry.put("de", "DE"); setLanguage("en"); } // Take the commands typed by the user and updates the TurtleTrace accordingly. @Override public void interpretCommand (String userInput) { try { model.updateTrace(userInput); ((SLogoViewer)viewer).updatePastCommandsBox(commandList.subList(0,currentCommand+1)); System.out.println("Update Trace Finished!"); } catch (SlogoException e) { SlogoError error = new SlogoError("Parse Error", "A syntax error occured while parsing your script"); currentWorkspace.setSlogoError(error); System.out.println("Error!"); return; } } /** * Returns the current settings of the GUI */ public Map<String, Double> getCurrentPreferences() { Map<String, Double> preference = new HashMap<String, Double>(); preference.put(BACKGROUND, BackgroundColorButton.getColorIdFromColor(this.getCurrentWorkspace().getBackgroundColor())); preference.put(PEN_COLOR, PenColorButton.getColorIdFromColor(this.getCurrentWorkspace().getPenColor())); preference.put(SHAPE, (double) this.getCurrentWorkspace().getTurtleImage()); return preference; } /** * Stores a given preference map for future loading * @param preference */ public void savePreferences (Map<String, Double> preference) { this.preferencesMap.add((HashMap<String, Double>) preference); } /** * Loads the preference map into the GUI specified by preference index * @param index */ public void loadPreferences (int index) { Map<String, Double> map = this.preferencesMap.get(index); this.setBackgroundColor(BackgroundColorButton.getColorFromColorId(map.get(BACKGROUND))); this.setPenColor(PenColorButton.getColorFromColorId(map.get(PEN_COLOR))); this.setTurtleImage(Double.toString(map.get(SHAPE))); } /** * Used for automatically storing a workspace's preferences, not for manual loading, but rather for auto-loading * upon return to the workspace identified by workspaceId */ private void storeCurrentWorkspacePreferences(String workspaceId) { currentPreferencesOfWorkspaces.put(workspaceId, (HashMap<String, Double>) this.getCurrentPreferences()); } /** * Auto-loads the preferences of a workspace from what was auto-saved upon leaving previously */ public void loadLastPreferences(String workspaceId) { if (this.currentPreferencesOfWorkspaces.containsKey(workspaceId)) { Map<String, Double> map = this.currentPreferencesOfWorkspaces.get(workspaceId); this.setBackgroundColor(BackgroundColorButton.getColorFromColorId(map.get(BACKGROUND))); this.setPenColor(PenColorButton.getColorFromColorId(map.get(PEN_COLOR))); this.setTurtleImage(Double.toString(map.get(SHAPE))); } else { this.setBackgroundColor(JGColor.white); this.setTurtleImage("1"); this.setPenColor(JGColor.black); } } public List<HashMap<String, Double>> getAllPreferences() { return (ArrayList<HashMap<String, Double>>) this.preferencesMap; } /** * Adds the userInput to the commandList to be parsed by backend * @param userInput */ public void addCommand (String userInput) { if (commandList.size() != currentCommand && !commandList.isEmpty()) { commandList = commandList.subList(0, currentCommand+1); } commandList.add(userInput); currentCommand++; interpretCommand(userInput); } /** * Undoes previously issued command */ public void undo () { if (currentCommand>-1){ currentCommand--; interpretCommand(CLEARSCREEN); for (String command : commandList.subList(0, currentCommand+1)) { interpretCommand(command); } } } /** * Redoes previously undone command */ public void redo () { if (currentCommand < commandList.size()-1) { currentCommand++; interpretCommand(commandList.get(currentCommand)); } } public List<String> getCurrentCommands(){ return commandList.subList(0, currentCommand+1); } @Override public Workspace getCurrentWorkspace () { return currentWorkspace; } /** * Auto-stores preferences for departing workspace, * Loads specified workspaceId or creates a new one if non-existent, * Auto-loads preferences of incoming workspace */ public void setCurrentWorkspace (String workspaceId) { this.storeCurrentWorkspacePreferences(this.getCurrentWorkspace().getWorkspaceId()); Workspace tempWorkspace = workspaces.get(workspaceId); if (tempWorkspace == null) { tempWorkspace = new Workspace(); workspaces.put(workspaceId, tempWorkspace); } currentWorkspace = tempWorkspace; currentWorkspace.setWorkspaceId(workspaceId); ((SLogoViewer)viewer).clearScreen(); ((SLogoViewer)viewer).clearDataTables(); this.loadLastPreferences(workspaceId); } public void setLanguage (String language) { System.out.println("Calling change language to: "+language); String country = languageToCountry.get(language); Locale currentLocale; currentLocale = new Locale(language, country); messages = ResourceBundle.getBundle("Languages.MessagesBundle", currentLocale); } /** * Save userInput commands into a file */ public void saveFile (String userInput) { JFileChooser chooser = new JFileChooser(); chooser.setCurrentDirectory(new File("/home/me/Documents")); int retrival = chooser.showSaveDialog(null); if (retrival == JFileChooser.APPROVE_OPTION) { try { FileWriter fw = new FileWriter(chooser.getSelectedFile() + ".txt"); fw.write(userInput); fw.close(); } catch (Exception ex) { ex.printStackTrace(); } } } /** * Load commands to current workspace from a previously saved file */ public void loadFile () { String result = ""; JFileChooser fileChooser = new JFileChooser(); if (fileChooser.showOpenDialog(null) == JFileChooser.APPROVE_OPTION) { try { File file = fileChooser.getSelectedFile(); FileInputStream saveFile = new FileInputStream(file); int content; while ((content = saveFile.read()) != -1) { result += (char) content; } saveFile.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } interpretCommand(result); } } @Override public List<Turtle> getTurtles () { return currentWorkspace.getTurtles(); } @Override public List<Turtle> getActiveTurtles () { return currentWorkspace.getActiveTurtles(); } // Additional getters/setters public void setBackgroundColor (JGColor backgroundColor) { ((SLogoViewer) viewer).setBackgroundColor(backgroundColor); currentWorkspace.setBackgroundColor(backgroundColor); } public JGColor getBackgroundColor () { return currentWorkspace.getBackgroundColor(); } public void setPenColor (JGColor penColor) { ((SLogoViewer) viewer).setPenColor(penColor); currentWorkspace.setPenColor(penColor); } public void setTrackedTurtle(String turtleNum){ ((SLogoViewer) viewer).setTrackedTurtle(turtleNum); } public JGColor getPenColor () { return currentWorkspace.getPenColor(); } public void setTurtleImage (String imageNumber) { ((SLogoViewer) viewer).setTurtleImage((int) (Double.parseDouble(imageNumber))); this.currentWorkspace.setTurtleImage((int) (Double.parseDouble(imageNumber))); } public Double getTurtleImage() { return (double) ((SLogoViewer) viewer).getTurtleImage(); } public void toggleGrid () { ((SLogoViewer) viewer).toggleGrid(); this.currentWorkspace.toggleGrid(); } public void toggleHighlightTurtles () { ((SLogoViewer)viewer).toggleHighlightTurtles(); } public void updateUserVariableBox(){ try{ Map<String, Expression> variableMap=getGlobalVariables(); if (variableMap!=null){ ((SLogoViewer)viewer).updateUserVariableTable(variableMap); } } catch(Exception e){ } } public void updateUserDefinedCommandsBox(){ try{ Map<String, Expression> functionList=getDefinedFunction(); if (functionList!=null){ ((SLogoViewer)viewer).updateUserCommandList(functionList); } } catch(Exception e){ } } @Override public Map<String, Expression> getDefinedFunction () { return currentWorkspace.getDefinedFunction(); } @Override public Map<String, Expression> getRunningFunction () { return currentWorkspace.getRunningFunction(); } @Override public Map<String, Expression> getGlobalVariables () { return currentWorkspace.getGlobalVariables(); } @Override public Map<String, Map<String, Expression>> getLocalVariables () { return currentWorkspace.getLocalVariables(); } public ColorIndex getRGBForIndex (String index) { return colorIndexes.get(index); } @Override public void addColorIndex (ColorIndex index) { colorIndexes.put(index.index, index); } public ResourceBundle getLanguageMessages() { return this.messages; } @Override public void clearScreen () { currentWorkspace.clearScreen(); ((SLogoViewer) viewer).clearScreen(); } public void clearWorkspace () { currentWorkspace.clearWorkspace(); } /* * The below methods were never needed in the Controller directly. */ @Override public void xCor () { // view.xCor(); } @Override public void penColor () { // view.penColor(); } @Override public void turtleShape () { // view.turtleShape(); } @Override public void yCor () { // view.yCor(); } @Override public void heading () { // view.heading(); } @Override public void isPenDown () { // view.isPenDown(); } @Override public void isShowing () { // view.isShowing(); } public void id() { // view show ids } public ResourceBundle getMessages () { return messages; } public Viewer getViewer() { return this.viewer; } public List<HashMap<String, Double>> getPreferencesMap() { return this.preferencesMap; } }
package cn.aiseminar.aisentry.reader; import cn.aiseminar.aisentry.*; import cn.aiseminar.aisentry.aimouth.AIMouth; import cn.aiseminar.aisentry.aimouth.AIMouth.TTS_State; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.InputStreamReader; import android.annotation.SuppressLint; import android.app.Activity; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.view.GestureDetector; import android.view.GestureDetector.SimpleOnGestureListener; import android.view.LayoutInflater; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.View.OnClickListener; import android.view.Window; import android.view.View.OnTouchListener; import android.widget.ImageButton; import android.widget.TextView; import android.widget.ViewFlipper; /** * * @author Administrator * */ public class FileReader extends Activity { private static final String gb2312 = "GB2312"; private static final String utf8 = "UTF-8"; private static final String defaultCode = gb2312; private ViewFlipper mViewFlipper = null; private GestureDetector mGestureDetector = null; // for speak control private View mBtnGroupView = null; private ImageButton mPlayBtn = null; private ImageButton mPauseBtn = null; private boolean mbSpeaking = false; private AIMouth mMouth = null; private Handler mMsgHandler = null; // for file reading private String mFilePath; private int mSpeakOffset = 0; private int mSpeakingLength = 0; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); setContentView(R.layout.file_reader); mMsgHandler = new AISHandler(); mViewFlipper = (ViewFlipper) findViewById(R.id.viewflipper_reader); mGestureDetector = new GestureDetector(this, new PageOnGestureListener()); initButtonGroup(); try { mFilePath = this.getIntent().getStringExtra(SelectFileActivity.EXTRA_FILEPATH); refreshGUI(defaultCode); } catch (Exception e) { } } @Override protected void onStart() { if (null == mMouth) { mMouth = AIMouth.getMouth(this); } mMouth.setMsgHandler(mMsgHandler); super.onStart(); } @Override protected void onDestroy() { if (null != mMouth) { mMouth.stop(); } super.onDestroy(); } private void refreshGUI(String code) { String fileContent = getStringFromFile(code); int curOffset = 0; for (int i = 0; i < 1; i ++) { TextView tv = (TextView) createLayoutView(R.layout.reader_page); tv.setText(fileContent.substring(curOffset)); setGestureListenerForView(tv); mViewFlipper.addView(tv); } } private void initButtonGroup() { mBtnGroupView = findViewById(R.id.readerBtnLayout); mPlayBtn = (ImageButton) findViewById(R.id.ibtnPlay); mPlayBtn.setVisibility(View.VISIBLE); mPauseBtn = (ImageButton) findViewById(R.id.ibtnPause); mPauseBtn.setVisibility(View.INVISIBLE); mbSpeaking = false; mBtnGroupView.setVisibility(View.INVISIBLE); } private void showButtonGroup() { int bShow = mBtnGroupView.getVisibility(); mBtnGroupView.setVisibility((View.VISIBLE == bShow) ? View.INVISIBLE : View.VISIBLE); mPlayBtn.setVisibility(mbSpeaking ? View.INVISIBLE : View.VISIBLE); mPauseBtn.setVisibility(mbSpeaking ? View.VISIBLE : View.INVISIBLE); mPlayBtn.setOnClickListener(new PlayButtonOnClickListener()); mPauseBtn.setOnClickListener(new PlayButtonOnClickListener()); } private void setGestureListenerForView(View targetView) { targetView.setLongClickable(true); targetView.setOnTouchListener(new OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { return mGestureDetector.onTouchEvent(event); } }); } private View createLayoutView(int layoutId) { LayoutInflater inflater = getLayoutInflater(); if (null == inflater) return null; return inflater.inflate(layoutId, null); } public String getStringFromFile(String code) { try { if (! new File(mFilePath).exists()) { return null; } StringBuffer sBuffer = new StringBuffer(); FileInputStream fInputStream = new FileInputStream(mFilePath); InputStreamReader inputStreamReader = new InputStreamReader(fInputStream, code); BufferedReader in = new BufferedReader(inputStreamReader); while (in.ready()) { sBuffer.append(in.readLine() + "\n"); } in.close(); return sBuffer.toString(); } catch (Exception e) { e.printStackTrace(); } return null; } public void speakNextString() { mSpeakOffset += mSpeakingLength; TextView tv = (TextView) mViewFlipper.getCurrentView(); String content = tv.getText().toString().trim(); int endPos = content.indexOf('\n', mSpeakOffset); while (endPos == mSpeakOffset) // skip blank line { mSpeakOffset ++; endPos = content.indexOf('\n', mSpeakOffset); } if (-1 == endPos && mSpeakOffset < content.length()) // the last line of file { endPos = content.length(); } if (endPos > mSpeakOffset) { content = content.substring(mSpeakOffset, endPos); mSpeakingLength = content.length(); mMouth.speak(content); } } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { // case R.id.gb2312: // refreshGUI(defaultCode); // break; // case R.id.utf8: // refreshGUI(utf8); // break; default: break; } return super.onOptionsItemSelected(item); } public byte[] readFile(String fileName) throws Exception { byte[] result = null; FileInputStream fis = null; try { File file = new File(fileName); fis = new FileInputStream(file); result = new byte[fis.available()]; fis.read(result); } catch (Exception e) { } finally { fis.close(); } return result; } /* help classes */ class PageOnGestureListener extends SimpleOnGestureListener { @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { if (mViewFlipper.getChildCount() > 1) { int fling_min_distance = 100; int fling_min_velocity = 200; if (Math.abs(velocityX) > fling_min_velocity) { if (e1.getX() - e2.getX() > fling_min_distance) // right in { mViewFlipper.setInAnimation(FileReader.this, R.anim.anim_right_in); mViewFlipper.setOutAnimation(FileReader.this, R.anim.anim_left_out); mViewFlipper.showPrevious(); } else if (e2.getX() - e1.getX() > fling_min_distance) // left in { mViewFlipper.setInAnimation(FileReader.this, R.anim.anim_left_in); mViewFlipper.setOutAnimation(FileReader.this, R.anim.anim_right_out); mViewFlipper.showNext(); } } } return super.onFling(e1, e2, velocityX, velocityY); } @Override public boolean onSingleTapConfirmed(MotionEvent e) { showButtonGroup(); return super.onSingleTapConfirmed(e); } @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { return super.onScroll(e1, e2, distanceX, distanceY); } } class PlayButtonOnClickListener implements OnClickListener { @Override public void onClick(View v) { if (mbSpeaking) { // now user paused mSpeakingLength = 0; // will reading from last offset mMouth.stop(); } else { TextView tv = (TextView) mViewFlipper.getCurrentView(); String content = tv.getText().toString().trim(); if (mSpeakOffset >= content.length()) { mSpeakOffset = 0; mSpeakingLength = 0; } speakNextString(); } mbSpeaking = ! mbSpeaking; mPlayBtn.setVisibility(mbSpeaking ? View.INVISIBLE : View.VISIBLE); mPauseBtn.setVisibility(mbSpeaking ? View.VISIBLE : View.INVISIBLE); } } @SuppressLint("HandlerLeak") class AISHandler extends Handler { @Override public void handleMessage(Message msg) { if (AISMessageCode.MOUTH_MSG_BASE + TTS_State.TTS_SPEAK_COMPLETED.ordinal() == msg.what) { if (null != mMouth) { speakNextString(); } return; } super.handleMessage(msg); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spi; import java.io.Serializable; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import javax.management.JMException; import javax.management.MBeanServer; import javax.management.ObjectName; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.events.DiscoveryEvent; import org.apache.ignite.events.Event; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.IgniteNodeAttributes; import org.apache.ignite.internal.managers.communication.GridMessageListener; import org.apache.ignite.internal.managers.eventstorage.GridLocalEventListener; import org.apache.ignite.internal.processors.timeout.GridSpiTimeoutObject; import org.apache.ignite.internal.util.IgniteExceptionRegistry; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.SB; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiPredicate; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.plugin.extensions.communication.MessageFactory; import org.apache.ignite.plugin.extensions.communication.MessageFormatter; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; import org.apache.ignite.plugin.security.SecuritySubject; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.resources.LoggerResource; import org.apache.ignite.spi.discovery.DiscoveryDataBag; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.IgniteSystemProperties.IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK; import static org.apache.ignite.events.EventType.EVT_NODE_JOINED; /** * This class provides convenient adapter for SPI implementations. */ public abstract class IgniteSpiAdapter implements IgniteSpi { /** */ private ObjectName spiMBean; /** SPI start timestamp. */ private long startTstamp; /** */ @LoggerResource private IgniteLogger log; /** Ignite instance. */ protected Ignite ignite; /** Ignite instance name. */ protected String igniteInstanceName; /** SPI name. */ private String name; /** Grid SPI context. */ private volatile IgniteSpiContext spiCtx = new GridDummySpiContext(null, false, null); /** Discovery listener. */ private GridLocalEventListener paramsLsnr; /** Local node. */ private ClusterNode locNode; /** Failure detection timeout usage switch. */ private boolean failureDetectionTimeoutEnabled = true; /** * Failure detection timeout for client nodes. Initialized with the value of * {@link IgniteConfiguration#getClientFailureDetectionTimeout()}. */ private long clientFailureDetectionTimeout; /** * Failure detection timeout. Initialized with the value of * {@link IgniteConfiguration#getFailureDetectionTimeout()}. */ private long failureDetectionTimeout; /** Start flag to deny repeating start attempts. */ private final AtomicBoolean startedFlag = new AtomicBoolean(); /** * Creates new adapter and initializes it from the current (this) class. * SPI name will be initialized to the simple name of the class * (see {@link Class#getSimpleName()}). */ protected IgniteSpiAdapter() { name = U.getSimpleName(getClass()); } /** * Starts startup stopwatch. */ protected void startStopwatch() { startTstamp = U.currentTimeMillis(); } /** * This method is called by built-in managers implementation to avoid * repeating SPI start attempts. */ public final void onBeforeStart() { if (!startedFlag.compareAndSet(false, true)) throw new IllegalStateException("SPI has already been started " + "(always create new configuration instance for each starting Ignite instances) " + "[spi=" + this + ']'); } /** * Checks if {@link #onBeforeStart()} has been called on this SPI instance. * * @return {@code True} if {@link #onBeforeStart()} has already been called. */ public final boolean started() { return startedFlag.get(); } /** * @return Local node. */ protected ClusterNode getLocalNode() { if (locNode != null) return locNode; locNode = getSpiContext().localNode(); return locNode; } /** {@inheritDoc} */ @Override public String getName() { return name; } /** * Gets ignite instance. * * @return Ignite instance. */ public Ignite ignite() { return ignite; } /** * Sets SPI name. * * @param name SPI name. * @return {@code this} for chaining. */ @IgniteSpiConfiguration(optional = true) public IgniteSpiAdapter setName(String name) { this.name = name; return this; } /** {@inheritDoc} */ @Override public final void onContextInitialized(final IgniteSpiContext spiCtx) throws IgniteSpiException { assert spiCtx != null; this.spiCtx = spiCtx; if (!Boolean.getBoolean(IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK)) { spiCtx.addLocalEventListener(paramsLsnr = new GridLocalEventListener() { @Override public void onEvent(Event evt) { assert evt instanceof DiscoveryEvent : "Invalid event [expected=" + EVT_NODE_JOINED + ", actual=" + evt.type() + ", evt=" + evt + ']'; ClusterNode node = spiCtx.node(((DiscoveryEvent)evt).eventNode().id()); if (node != null) try { checkConfigurationConsistency(spiCtx, node, false); checkConfigurationConsistency0(spiCtx, node, false); } catch (IgniteSpiException e) { U.error(log, "Spi consistency check failed [node=" + node.id() + ", spi=" + getName() + ']', e); } } }, EVT_NODE_JOINED); final Collection<ClusterNode> remotes = F.concat(false, spiCtx.remoteNodes(), spiCtx.remoteDaemonNodes()); for (ClusterNode node : remotes) { checkConfigurationConsistency(spiCtx, node, true); checkConfigurationConsistency0(spiCtx, node, true); } } onContextInitialized0(spiCtx); } /** * Method to be called in the end of onContextInitialized method. * * @param spiCtx SPI context. * @throws IgniteSpiException In case of errors. */ protected void onContextInitialized0(final IgniteSpiContext spiCtx) throws IgniteSpiException { // No-op. } /** {@inheritDoc} */ @Override public final void onContextDestroyed() { onContextDestroyed0(); if (spiCtx != null && paramsLsnr != null) spiCtx.removeLocalEventListener(paramsLsnr); ClusterNode locNode = spiCtx == null ? null : spiCtx.localNode(); // Set dummy no-op context. spiCtx = new GridDummySpiContext(locNode, true, spiCtx); } /** {@inheritDoc} */ @Override public void onClientDisconnected(IgniteFuture<?> reconnectFut) { // No-op. } /** {@inheritDoc} */ @Override public void onClientReconnected(boolean clusterRestarted) { // No-op. } /** * Inject ignite instance. * * @param ignite Ignite instance. */ @IgniteInstanceResource protected void injectResources(Ignite ignite) { this.ignite = ignite; if (ignite != null) igniteInstanceName = ignite.name(); } /** * Method to be called in the beginning of onContextDestroyed() method. */ protected void onContextDestroyed0() { // No-op. } /** * This method returns SPI internal instances that need to be injected as well. * Usually these will be instances provided to SPI externally by user, e.g. during * SPI configuration. * * @return Internal SPI objects that also need to be injected. */ public Collection<Object> injectables() { return Collections.emptyList(); } /** * Gets SPI context. * * @return SPI context. */ public IgniteSpiContext getSpiContext() { return spiCtx; } /** * Gets Exception registry. * * @return Exception registry. */ public IgniteExceptionRegistry getExceptionRegistry() { return IgniteExceptionRegistry.get(); } /** {@inheritDoc} */ @Override public Map<String, Object> getNodeAttributes() throws IgniteSpiException { return Collections.emptyMap(); } /** * Throws exception with uniform error message if given parameter's assertion condition * is {@code false}. * * @param cond Assertion condition to check. * @param condDesc Description of failed condition. Note that this description should include * JavaBean name of the property (<b>not</b> a variable name) as well condition in * Java syntax like, for example: * <pre name="code" class="java"> * ... * assertParameter(dirPath != null, "dirPath != null"); * ... * </pre> * Note that in case when variable name is the same as JavaBean property you * can just copy Java condition expression into description as a string. * @throws IgniteSpiException Thrown if given condition is {@code false} */ protected final void assertParameter(boolean cond, String condDesc) throws IgniteSpiException { if (!cond) throw new IgniteSpiException("SPI parameter failed condition check: " + condDesc); } /** * Gets uniformly formatted message for SPI start. * * @return Uniformly formatted message for SPI start. */ protected final String startInfo() { return "SPI started ok [startMs=" + startTstamp + ", spiMBean=" + spiMBean + ']'; } /** * Gets SPI startup time. * @return Time in millis. */ final long getStartTstamp() { return startTstamp; } /** * Gets uniformly format message for SPI stop. * * @return Uniformly format message for SPI stop. */ protected final String stopInfo() { return "SPI stopped ok."; } /** * Gets uniformed string for configuration parameter. * * @param name Parameter name. * @param val Parameter value. * @return Uniformed string for configuration parameter. */ protected final String configInfo(String name, Object val) { assert name != null; return "Using parameter [" + name + '=' + val + ']'; } /** * @param msg Error message. * @param locVal Local node value. * @return Error text. */ private static String format(String msg, Object locVal) { return msg + U.nl() + ">>> => Local node: " + locVal + U.nl(); } /** * @param msg Error message. * @param locVal Local node value. * @param rmtVal Remote node value. * @return Error text. */ private static String format(String msg, Object locVal, Object rmtVal) { return msg + U.nl() + ">>> => Local node: " + locVal + U.nl() + ">>> => Remote node: " + rmtVal + U.nl(); } /** * Registers SPI MBean. Note that SPI can only register one MBean. * * @param igniteInstanceName Ignite instance name. If null, then name will be empty. * @param impl MBean implementation. * @param mbeanItf MBean interface (if {@code null}, then standard JMX * naming conventions are used. * @param <T> Type of the MBean * @throws IgniteSpiException If registration failed. */ protected final <T extends IgniteSpiManagementMBean> void registerMBean(String igniteInstanceName, T impl, Class<T> mbeanItf ) throws IgniteSpiException { if(ignite == null || U.IGNITE_MBEANS_DISABLED) return; MBeanServer jmx = ignite.configuration().getMBeanServer(); assert mbeanItf == null || mbeanItf.isInterface(); assert jmx != null; try { spiMBean = U.registerMBean(jmx, igniteInstanceName, "SPIs", getName(), impl, mbeanItf); if (log.isDebugEnabled()) log.debug("Registered SPI MBean: " + spiMBean); } catch (JMException e) { throw new IgniteSpiException("Failed to register SPI MBean: " + spiMBean, e); } } /** * Unregisters MBean. * * @throws IgniteSpiException If bean could not be unregistered. */ protected final void unregisterMBean() throws IgniteSpiException { // Unregister SPI MBean. if (spiMBean != null && ignite != null) { assert !U.IGNITE_MBEANS_DISABLED; MBeanServer jmx = ignite.configuration().getMBeanServer(); assert jmx != null; try { jmx.unregisterMBean(spiMBean); if (log.isDebugEnabled()) log.debug("Unregistered SPI MBean: " + spiMBean); } catch (JMException e) { throw new IgniteSpiException("Failed to unregister SPI MBean: " + spiMBean, e); } } } /** * @return {@code True} if node is stopping. */ protected final boolean isNodeStopping() { return spiCtx.isStopping(); } /** * @return {@code true} if this check is optional. */ private boolean checkOptional() { IgniteSpiConsistencyChecked ann = U.getAnnotation(getClass(), IgniteSpiConsistencyChecked.class); return ann != null && ann.optional(); } /** * @return {@code true} if this check is enabled. */ private boolean checkEnabled() { return U.getAnnotation(getClass(), IgniteSpiConsistencyChecked.class) != null; } /** * @return {@code true} if client cluster nodes should be checked. */ private boolean checkClient() { IgniteSpiConsistencyChecked ann = U.getAnnotation(getClass(), IgniteSpiConsistencyChecked.class); return ann != null && ann.checkClient(); } /** * Method which is called in the end of checkConfigurationConsistency() method. May be overriden in SPIs. * * @param spiCtx SPI context. * @param node Remote node. * @param starting If this node is starting or not. * @throws IgniteSpiException in case of errors. */ protected void checkConfigurationConsistency0(IgniteSpiContext spiCtx, ClusterNode node, boolean starting) throws IgniteSpiException { // No-op. } /** * Checks remote node SPI configuration and prints warnings if necessary. * * @param spiCtx SPI context. * @param node Remote node. * @param starting Flag indicating whether this method is called during SPI start or not. * @throws IgniteSpiException If check fatally failed. */ @SuppressWarnings("IfMayBeConditional") private void checkConfigurationConsistency(IgniteSpiContext spiCtx, ClusterNode node, boolean starting) throws IgniteSpiException { assert spiCtx != null; assert node != null; /* * Optional SPI means that we should not print warning if SPIs are different but * still need to compare attributes if SPIs are the same. */ boolean optional = checkOptional(); boolean enabled = checkEnabled(); boolean checkClient = checkClient(); if (!enabled) return; if (!checkClient && (getLocalNode().isClient() || node.isClient())) return; String clsAttr = createSpiAttributeName(IgniteNodeAttributes.ATTR_SPI_CLASS); String name = getName(); SB sb = new SB(); /* * If there are any attributes do compare class and version * (do not print warning for the optional SPIs). */ /* Check SPI class and version. */ String locCls = spiCtx.localNode().attribute(clsAttr); String rmtCls = node.attribute(clsAttr); assert locCls != null : "Local SPI class name attribute not found: " + clsAttr; boolean isSpiConsistent = false; String tipStr = " (fix configuration or set " + "-D" + IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK + "=true system property)"; if (rmtCls == null) { if (!optional && starting) throw new IgniteSpiException("Remote SPI with the same name is not configured" + tipStr + " [name=" + name + ", loc=" + locCls + ", locNode=" + spiCtx.localNode() + ", rmt=" + rmtCls + ", rmtNode=" + node + ']'); sb.a(format(">>> Remote SPI with the same name is not configured: " + name, locCls)); } else if (!locCls.equals(rmtCls)) { if (!optional && starting) throw new IgniteSpiException("Remote SPI with the same name is of different type" + tipStr + " [name=" + name + ", loc=" + locCls + ", rmt=" + rmtCls + ']'); sb.a(format(">>> Remote SPI with the same name is of different type: " + name, locCls, rmtCls)); } else isSpiConsistent = true; if (optional && !isSpiConsistent) return; // It makes no sense to compare inconsistent SPIs attributes. if (isSpiConsistent) { List<String> attrs = getConsistentAttributeNames(); // Process all SPI specific attributes. for (String attr : attrs) { // Ignore class and version attributes processed above. if (!attr.equals(clsAttr)) { // This check is considered as optional if no attributes Object rmtVal = node.attribute(attr); Object locVal = spiCtx.localNode().attribute(attr); if (locVal == null && rmtVal == null) continue; if (locVal == null || rmtVal == null || !locVal.equals(rmtVal)) sb.a(format(">>> Remote node has different " + getName() + " SPI attribute " + attr, locVal, rmtVal)); } } } if (sb.length() > 0) { String msg; if (starting) msg = U.nl() + U.nl() + ">>> +--------------------------------------------------------------------+" + U.nl() + ">>> + Courtesy notice that starting node has inconsistent configuration. +" + U.nl() + ">>> + Ignore this message if you are sure that this is done on purpose. +" + U.nl() + ">>> +--------------------------------------------------------------------+" + U.nl() + ">>> Remote Node ID: " + node.id().toString().toUpperCase() + U.nl() + sb; else msg = U.nl() + U.nl() + ">>> +-------------------------------------------------------------------+" + U.nl() + ">>> + Courtesy notice that joining node has inconsistent configuration. +" + U.nl() + ">>> + Ignore this message if you are sure that this is done on purpose. +" + U.nl() + ">>> +-------------------------------------------------------------------+" + U.nl() + ">>> Remote Node ID: " + node.id().toString().toUpperCase() + U.nl() + sb; U.courtesy(log, msg); } } /** * Returns back a list of attributes that should be consistent * for this SPI. Consistency means that remote node has to * have the same attribute with the same value. * * @return List or attribute names. */ protected List<String> getConsistentAttributeNames() { return Collections.emptyList(); } /** * Creates new name for the given attribute. Name contains * SPI name prefix. * * @param attrName SPI attribute name. * @return New name with SPI name prefix. */ protected String createSpiAttributeName(String attrName) { return U.spiAttribute(this, attrName); } /** * @param obj Timeout object. * @see IgniteSpiContext#addTimeoutObject(IgniteSpiTimeoutObject) */ protected void addTimeoutObject(IgniteSpiTimeoutObject obj) { spiCtx.addTimeoutObject(obj); } /** * @param obj Timeout object. * @see IgniteSpiContext#removeTimeoutObject(IgniteSpiTimeoutObject) */ protected void removeTimeoutObject(IgniteSpiTimeoutObject obj) { spiCtx.removeTimeoutObject(obj); } /** * Initiates and checks failure detection timeout value. */ protected void initFailureDetectionTimeout() { if (failureDetectionTimeoutEnabled) { failureDetectionTimeout = ignite.configuration().getFailureDetectionTimeout(); if (failureDetectionTimeout <= 0) throw new IgniteSpiException("Invalid failure detection timeout value: " + failureDetectionTimeout); else if (failureDetectionTimeout <= 10) // Because U.currentTimeInMillis() is updated once in 10 milliseconds. log.warning("Failure detection timeout is too low, it may lead to unpredictable behaviour " + "[failureDetectionTimeout=" + failureDetectionTimeout + ']'); else if (failureDetectionTimeout <= ignite.configuration().getMetricsUpdateFrequency()) log.warning("'IgniteConfiguration.failureDetectionTimeout' should be greater then " + "'IgniteConfiguration.metricsUpdateFrequency' to prevent unnecessary status checking."); } // Intentionally compare references using '!=' below else if (ignite.configuration().getFailureDetectionTimeout() != IgniteConfiguration.DFLT_FAILURE_DETECTION_TIMEOUT) log.warning("Failure detection timeout will be ignored (one of SPI parameters has been set explicitly)"); clientFailureDetectionTimeout = ignite.configuration().getClientFailureDetectionTimeout(); if (clientFailureDetectionTimeout <= 0) throw new IgniteSpiException("Invalid client failure detection timeout value: " + clientFailureDetectionTimeout); else if (clientFailureDetectionTimeout <= 10) // Because U.currentTimeInMillis() is updated once in 10 milliseconds. log.warning("Client failure detection timeout is too low, it may lead to unpredictable behaviour " + "[clientFailureDetectionTimeout=" + clientFailureDetectionTimeout + ']'); if (clientFailureDetectionTimeout < ignite.configuration().getMetricsUpdateFrequency()) throw new IgniteSpiException("Inconsistent configuration " + "('IgniteConfiguration.clientFailureDetectionTimeout' must be greater or equal to " + "'IgniteConfiguration.metricsUpdateFrequency')."); } /** * Enables or disables failure detection timeout. * * @param enabled {@code true} if enable, {@code false} otherwise. */ public void failureDetectionTimeoutEnabled(boolean enabled) { failureDetectionTimeoutEnabled = enabled; } /** * Checks whether failure detection timeout is enabled for this {@link IgniteSpi}. * * @return {@code true} if enabled, {@code false} otherwise. */ public boolean failureDetectionTimeoutEnabled() { return failureDetectionTimeoutEnabled; } /** * Returns client failure detection timeout set to use for network related operations. * * @return client failure detection timeout in milliseconds or {@code 0} if the timeout is disabled. */ public long clientFailureDetectionTimeout() { return clientFailureDetectionTimeout; } /** * Returns failure detection timeout set to use for network related operations. * * @return failure detection timeout in milliseconds or {@code 0} if the timeout is disabled. */ public long failureDetectionTimeout() { return failureDetectionTimeout; } /** * Temporarily SPI context. */ private class GridDummySpiContext implements IgniteSpiContext { /** */ private final ClusterNode locNode; /** */ private final boolean stopping; /** */ private final MessageFactory msgFactory; /** */ private final MessageFormatter msgFormatter; /** * Create temp SPI context. * * @param locNode Local node. * @param stopping Node stopping flag. * @param spiCtx SPI context. */ GridDummySpiContext(ClusterNode locNode, boolean stopping, @Nullable IgniteSpiContext spiCtx) { this.locNode = locNode; this.stopping = stopping; MessageFactory msgFactory0 = spiCtx != null ? spiCtx.messageFactory() : null; MessageFormatter msgFormatter0 = spiCtx != null ? spiCtx.messageFormatter() : null; if (msgFactory0 == null) { msgFactory0 = new MessageFactory() { @Nullable @Override public Message create(short type) { throw new IgniteException("Failed to read message, node is not started."); } }; } if (msgFormatter0 == null) { msgFormatter0 = new MessageFormatter() { @Override public MessageWriter writer(UUID rmtNodeId) { throw new IgniteException("Failed to write message, node is not started."); } @Override public MessageReader reader(UUID rmtNodeId, MessageFactory msgFactory) { throw new IgniteException("Failed to read message, node is not started."); } }; } this.msgFactory = msgFactory0; this.msgFormatter = msgFormatter0; } /** {@inheritDoc} */ @Override public void addLocalEventListener(GridLocalEventListener lsnr, int... types) { /* No-op. */ } /** {@inheritDoc} */ @Override public void addMessageListener(GridMessageListener lsnr, String topic) { /* No-op. */ } /** {@inheritDoc} */ @Override public void addLocalMessageListener(Object topic, IgniteBiPredicate<UUID, ?> p) { /* No-op. */ } /** {@inheritDoc} */ @Override public void recordEvent(Event evt) { /* No-op. */ } /** {@inheritDoc} */ @Override public void registerPort(int port, IgnitePortProtocol proto) { /* No-op. */ } /** {@inheritDoc} */ @Override public void deregisterPort(int port, IgnitePortProtocol proto) { /* No-op. */ } /** {@inheritDoc} */ @Override public void deregisterPorts() { /* No-op. */ } /** {@inheritDoc} */ @Override public <K, V> V get(String cacheName, K key) { return null; } /** {@inheritDoc} */ @Override public <K, V> V put(String cacheName, K key, V val, long ttl) { return null; } /** {@inheritDoc} */ @Override public <K, V> V putIfAbsent(String cacheName, K key, V val, long ttl) { return null; } /** {@inheritDoc} */ @Override public <K, V> V remove(String cacheName, K key) { return null; } /** {@inheritDoc} */ @Override public <K> boolean containsKey(String cacheName, K key) { return false; } /** {@inheritDoc} */ @Override public int partition(String cacheName, Object key) { return -1; } /** {@inheritDoc} */ @Override public Collection<ClusterNode> nodes() { return locNode == null ? Collections.<ClusterNode>emptyList() : Collections.singletonList(locNode); } /** {@inheritDoc} */ @Override public ClusterNode localNode() { return locNode; } /** {@inheritDoc} */ @Override public Collection<ClusterNode> remoteDaemonNodes() { return Collections.emptyList(); } /** {@inheritDoc} */ @Nullable @Override public ClusterNode node(UUID nodeId) { return null; } /** {@inheritDoc} */ @Override public Collection<ClusterNode> remoteNodes() { return Collections.emptyList(); } /** {@inheritDoc} */ @Override public boolean pingNode(UUID nodeId) { return locNode != null && nodeId.equals(locNode.id()); } /** {@inheritDoc} */ @Override public boolean removeLocalEventListener(GridLocalEventListener lsnr) { return false; } /** {@inheritDoc} */ @Override public boolean isEventRecordable(int... types) { return true; } /** {@inheritDoc} */ @Override public void removeLocalMessageListener(Object topic, IgniteBiPredicate<UUID, ?> p) { /* No-op. */ } /** {@inheritDoc} */ @Override public boolean removeMessageListener(GridMessageListener lsnr, String topic) { return false; } /** {@inheritDoc} */ @Override public void send(ClusterNode node, Serializable msg, String topic) { /* No-op. */ } /** {@inheritDoc} */ @Nullable @Override public IgniteNodeValidationResult validateNode(ClusterNode node) { return null; } /** {@inheritDoc} */ @Nullable @Override public IgniteNodeValidationResult validateNode(ClusterNode node, DiscoveryDataBag discoData) { return null; } /** {@inheritDoc} */ @Override public Collection<SecuritySubject> authenticatedSubjects() { return Collections.emptyList(); } /** {@inheritDoc} */ @Override public SecuritySubject authenticatedSubject(UUID subjId) { return null; } /** {@inheritDoc} */ @Override public MessageFormatter messageFormatter() { return msgFormatter; } /** {@inheritDoc} */ @Override public MessageFactory messageFactory() { return msgFactory; } /** {@inheritDoc} */ @Override public boolean isStopping() { return stopping; } /** {@inheritDoc} */ @Override public boolean tryFailNode(UUID nodeId, @Nullable String warning) { return false; } /** {@inheritDoc} */ @Override public void failNode(UUID nodeId, @Nullable String warning) { // No-op. } /** {@inheritDoc} */ @Override public void addTimeoutObject(IgniteSpiTimeoutObject obj) { Ignite ignite0 = ignite; if (!(ignite0 instanceof IgniteKernal)) throw new IgniteSpiException("Wrong Ignite instance is set: " + ignite0); ((IgniteKernal)ignite0).context().timeout().addTimeoutObject(new GridSpiTimeoutObject(obj)); } /** {@inheritDoc} */ @Override public void removeTimeoutObject(IgniteSpiTimeoutObject obj) { Ignite ignite0 = ignite; if (!(ignite0 instanceof IgniteKernal)) throw new IgniteSpiException("Wrong Ignite instance is set: " + ignite0); ((IgniteKernal)ignite0).context().timeout().removeTimeoutObject(new GridSpiTimeoutObject(obj)); } /** {@inheritDoc} */ @Override public Map<String, Object> nodeAttributes() { return Collections.emptyMap(); } /** {@inheritDoc} */ @Override public boolean communicationFailureResolveSupported() { return false; } /** {@inheritDoc} */ @Override public void resolveCommunicationFailure(ClusterNode node, Exception err) { throw new UnsupportedOperationException(); } } }
/* * Copyright 2010 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://code.google.com/p/google-apis-client-generator/ * (build: 2015-11-16 19:10:01 UTC) * on 2015-11-19 at 17:34:48 UTC * Modify at your own risk. */ package com.google.api.services.discovery.model; /** * Model definition for RestMethod. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the APIs Discovery Service. For a detailed explanation * see: * <a href="http://code.google.com/p/google-http-java-client/wiki/JSON">http://code.google.com/p/google-http-java-client/wiki/JSON</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class RestMethod extends com.google.api.client.json.GenericJson { /** * Description of this method. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String description; /** * Whether this method requires an ETag to be specified. The ETag is sent as an HTTP If-Match or * If-None-Match header. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean etagRequired; /** * HTTP method used by this method. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String httpMethod; /** * A unique ID for this method. This property can be used to match methods between different * versions of Discovery. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * Media upload parameters. * The value may be {@code null}. */ @com.google.api.client.util.Key private MediaUpload mediaUpload; /** * Ordered list of required parameters, serves as a hint to clients on how to structure their * method signatures. The array is ordered such that the "most-significant" parameter appears * first. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> parameterOrder; /** * Details for all parameters in this method. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, JsonSchema> parameters; /** * The URI path of this REST method. Should be used in conjunction with the basePath property at * the api-level. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String path; /** * The schema for the request. * The value may be {@code null}. */ @com.google.api.client.util.Key private Request request; /** * The schema for the response. * The value may be {@code null}. */ @com.google.api.client.util.Key private Response response; /** * OAuth 2.0 scopes applicable to this method. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> scopes; /** * Whether this method supports media downloads. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean supportsMediaDownload; /** * Whether this method supports media uploads. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean supportsMediaUpload; /** * Whether this method supports subscriptions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean supportsSubscription; /** * Indicates that downloads from this method should use the download service URL (i.e. * "/download"). Only applies if the method supports media download. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean useMediaDownloadService; /** * Description of this method. * @return value or {@code null} for none */ public java.lang.String getDescription() { return description; } /** * Description of this method. * @param description description or {@code null} for none */ public RestMethod setDescription(java.lang.String description) { this.description = description; return this; } /** * Whether this method requires an ETag to be specified. The ETag is sent as an HTTP If-Match or * If-None-Match header. * @return value or {@code null} for none */ public java.lang.Boolean getEtagRequired() { return etagRequired; } /** * Whether this method requires an ETag to be specified. The ETag is sent as an HTTP If-Match or * If-None-Match header. * @param etagRequired etagRequired or {@code null} for none */ public RestMethod setEtagRequired(java.lang.Boolean etagRequired) { this.etagRequired = etagRequired; return this; } /** * HTTP method used by this method. * @return value or {@code null} for none */ public java.lang.String getHttpMethod() { return httpMethod; } /** * HTTP method used by this method. * @param httpMethod httpMethod or {@code null} for none */ public RestMethod setHttpMethod(java.lang.String httpMethod) { this.httpMethod = httpMethod; return this; } /** * A unique ID for this method. This property can be used to match methods between different * versions of Discovery. * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * A unique ID for this method. This property can be used to match methods between different * versions of Discovery. * @param id id or {@code null} for none */ public RestMethod setId(java.lang.String id) { this.id = id; return this; } /** * Media upload parameters. * @return value or {@code null} for none */ public MediaUpload getMediaUpload() { return mediaUpload; } /** * Media upload parameters. * @param mediaUpload mediaUpload or {@code null} for none */ public RestMethod setMediaUpload(MediaUpload mediaUpload) { this.mediaUpload = mediaUpload; return this; } /** * Ordered list of required parameters, serves as a hint to clients on how to structure their * method signatures. The array is ordered such that the "most-significant" parameter appears * first. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getParameterOrder() { return parameterOrder; } /** * Ordered list of required parameters, serves as a hint to clients on how to structure their * method signatures. The array is ordered such that the "most-significant" parameter appears * first. * @param parameterOrder parameterOrder or {@code null} for none */ public RestMethod setParameterOrder(java.util.List<java.lang.String> parameterOrder) { this.parameterOrder = parameterOrder; return this; } /** * Details for all parameters in this method. * @return value or {@code null} for none */ public java.util.Map<String, JsonSchema> getParameters() { return parameters; } /** * Details for all parameters in this method. * @param parameters parameters or {@code null} for none */ public RestMethod setParameters(java.util.Map<String, JsonSchema> parameters) { this.parameters = parameters; return this; } /** * The URI path of this REST method. Should be used in conjunction with the basePath property at * the api-level. * @return value or {@code null} for none */ public java.lang.String getPath() { return path; } /** * The URI path of this REST method. Should be used in conjunction with the basePath property at * the api-level. * @param path path or {@code null} for none */ public RestMethod setPath(java.lang.String path) { this.path = path; return this; } /** * The schema for the request. * @return value or {@code null} for none */ public Request getRequest() { return request; } /** * The schema for the request. * @param request request or {@code null} for none */ public RestMethod setRequest(Request request) { this.request = request; return this; } /** * The schema for the response. * @return value or {@code null} for none */ public Response getResponse() { return response; } /** * The schema for the response. * @param response response or {@code null} for none */ public RestMethod setResponse(Response response) { this.response = response; return this; } /** * OAuth 2.0 scopes applicable to this method. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getScopes() { return scopes; } /** * OAuth 2.0 scopes applicable to this method. * @param scopes scopes or {@code null} for none */ public RestMethod setScopes(java.util.List<java.lang.String> scopes) { this.scopes = scopes; return this; } /** * Whether this method supports media downloads. * @return value or {@code null} for none */ public java.lang.Boolean getSupportsMediaDownload() { return supportsMediaDownload; } /** * Whether this method supports media downloads. * @param supportsMediaDownload supportsMediaDownload or {@code null} for none */ public RestMethod setSupportsMediaDownload(java.lang.Boolean supportsMediaDownload) { this.supportsMediaDownload = supportsMediaDownload; return this; } /** * Whether this method supports media uploads. * @return value or {@code null} for none */ public java.lang.Boolean getSupportsMediaUpload() { return supportsMediaUpload; } /** * Whether this method supports media uploads. * @param supportsMediaUpload supportsMediaUpload or {@code null} for none */ public RestMethod setSupportsMediaUpload(java.lang.Boolean supportsMediaUpload) { this.supportsMediaUpload = supportsMediaUpload; return this; } /** * Whether this method supports subscriptions. * @return value or {@code null} for none */ public java.lang.Boolean getSupportsSubscription() { return supportsSubscription; } /** * Whether this method supports subscriptions. * @param supportsSubscription supportsSubscription or {@code null} for none */ public RestMethod setSupportsSubscription(java.lang.Boolean supportsSubscription) { this.supportsSubscription = supportsSubscription; return this; } /** * Indicates that downloads from this method should use the download service URL (i.e. * "/download"). Only applies if the method supports media download. * @return value or {@code null} for none */ public java.lang.Boolean getUseMediaDownloadService() { return useMediaDownloadService; } /** * Indicates that downloads from this method should use the download service URL (i.e. * "/download"). Only applies if the method supports media download. * @param useMediaDownloadService useMediaDownloadService or {@code null} for none */ public RestMethod setUseMediaDownloadService(java.lang.Boolean useMediaDownloadService) { this.useMediaDownloadService = useMediaDownloadService; return this; } @Override public RestMethod set(String fieldName, Object value) { return (RestMethod) super.set(fieldName, value); } @Override public RestMethod clone() { return (RestMethod) super.clone(); } /** * Media upload parameters. */ public static final class MediaUpload extends com.google.api.client.json.GenericJson { /** * MIME Media Ranges for acceptable media uploads to this method. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> accept; /** * Maximum size of a media upload, such as "1MB", "2GB" or "3TB". * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String maxSize; /** * Supported upload protocols. * The value may be {@code null}. */ @com.google.api.client.util.Key private Protocols protocols; /** * MIME Media Ranges for acceptable media uploads to this method. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getAccept() { return accept; } /** * MIME Media Ranges for acceptable media uploads to this method. * @param accept accept or {@code null} for none */ public MediaUpload setAccept(java.util.List<java.lang.String> accept) { this.accept = accept; return this; } /** * Maximum size of a media upload, such as "1MB", "2GB" or "3TB". * @return value or {@code null} for none */ public java.lang.String getMaxSize() { return maxSize; } /** * Maximum size of a media upload, such as "1MB", "2GB" or "3TB". * @param maxSize maxSize or {@code null} for none */ public MediaUpload setMaxSize(java.lang.String maxSize) { this.maxSize = maxSize; return this; } /** * Supported upload protocols. * @return value or {@code null} for none */ public Protocols getProtocols() { return protocols; } /** * Supported upload protocols. * @param protocols protocols or {@code null} for none */ public MediaUpload setProtocols(Protocols protocols) { this.protocols = protocols; return this; } @Override public MediaUpload set(String fieldName, Object value) { return (MediaUpload) super.set(fieldName, value); } @Override public MediaUpload clone() { return (MediaUpload) super.clone(); } /** * Supported upload protocols. */ public static final class Protocols extends com.google.api.client.json.GenericJson { /** * Supports the Resumable Media Upload protocol. * The value may be {@code null}. */ @com.google.api.client.util.Key private Resumable resumable; /** * Supports uploading as a single HTTP request. * The value may be {@code null}. */ @com.google.api.client.util.Key private Simple simple; /** * Supports the Resumable Media Upload protocol. * @return value or {@code null} for none */ public Resumable getResumable() { return resumable; } /** * Supports the Resumable Media Upload protocol. * @param resumable resumable or {@code null} for none */ public Protocols setResumable(Resumable resumable) { this.resumable = resumable; return this; } /** * Supports uploading as a single HTTP request. * @return value or {@code null} for none */ public Simple getSimple() { return simple; } /** * Supports uploading as a single HTTP request. * @param simple simple or {@code null} for none */ public Protocols setSimple(Simple simple) { this.simple = simple; return this; } @Override public Protocols set(String fieldName, Object value) { return (Protocols) super.set(fieldName, value); } @Override public Protocols clone() { return (Protocols) super.clone(); } /** * Supports the Resumable Media Upload protocol. */ public static final class Resumable extends com.google.api.client.json.GenericJson { /** * True if this endpoint supports uploading multipart media. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean multipart; /** * The URI path to be used for upload. Should be used in conjunction with the basePath property at * the api-level. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String path; /** * True if this endpoint supports uploading multipart media. * @return value or {@code null} for none */ public java.lang.Boolean getMultipart() { return multipart; } /** * True if this endpoint supports uploading multipart media. * @param multipart multipart or {@code null} for none */ public Resumable setMultipart(java.lang.Boolean multipart) { this.multipart = multipart; return this; } /** * Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}. * * <p> * Boolean properties can have four possible values: * {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE} * or {@link Boolean#FALSE}. * </p> * * <p> * This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE} * and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and * it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * </p> * * <p> * True if this endpoint supports uploading multipart media. * </p> */ public boolean isMultipart() { if (multipart == null || multipart == com.google.api.client.util.Data.NULL_BOOLEAN) { return true; } return multipart; } /** * The URI path to be used for upload. Should be used in conjunction with the basePath property at * the api-level. * @return value or {@code null} for none */ public java.lang.String getPath() { return path; } /** * The URI path to be used for upload. Should be used in conjunction with the basePath property at * the api-level. * @param path path or {@code null} for none */ public Resumable setPath(java.lang.String path) { this.path = path; return this; } @Override public Resumable set(String fieldName, Object value) { return (Resumable) super.set(fieldName, value); } @Override public Resumable clone() { return (Resumable) super.clone(); } } /** * Supports uploading as a single HTTP request. */ public static final class Simple extends com.google.api.client.json.GenericJson { /** * True if this endpoint supports upload multipart media. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean multipart; /** * The URI path to be used for upload. Should be used in conjunction with the basePath property at * the api-level. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String path; /** * True if this endpoint supports upload multipart media. * @return value or {@code null} for none */ public java.lang.Boolean getMultipart() { return multipart; } /** * True if this endpoint supports upload multipart media. * @param multipart multipart or {@code null} for none */ public Simple setMultipart(java.lang.Boolean multipart) { this.multipart = multipart; return this; } /** * Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}. * * <p> * Boolean properties can have four possible values: * {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE} * or {@link Boolean#FALSE}. * </p> * * <p> * This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE} * and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and * it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * </p> * * <p> * True if this endpoint supports upload multipart media. * </p> */ public boolean isMultipart() { if (multipart == null || multipart == com.google.api.client.util.Data.NULL_BOOLEAN) { return true; } return multipart; } /** * The URI path to be used for upload. Should be used in conjunction with the basePath property at * the api-level. * @return value or {@code null} for none */ public java.lang.String getPath() { return path; } /** * The URI path to be used for upload. Should be used in conjunction with the basePath property at * the api-level. * @param path path or {@code null} for none */ public Simple setPath(java.lang.String path) { this.path = path; return this; } @Override public Simple set(String fieldName, Object value) { return (Simple) super.set(fieldName, value); } @Override public Simple clone() { return (Simple) super.clone(); } } } } /** * The schema for the request. */ public static final class Request extends com.google.api.client.json.GenericJson { /** * Schema ID for the request schema. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String $ref; /** * parameter name. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String parameterName; /** * Schema ID for the request schema. * @return value or {@code null} for none */ public java.lang.String get$ref() { return $ref; } /** * Schema ID for the request schema. * @param $ref $ref or {@code null} for none */ public Request set$ref(java.lang.String $ref) { this.$ref = $ref; return this; } /** * parameter name. * @return value or {@code null} for none */ public java.lang.String getParameterName() { return parameterName; } /** * parameter name. * @param parameterName parameterName or {@code null} for none */ public Request setParameterName(java.lang.String parameterName) { this.parameterName = parameterName; return this; } @Override public Request set(String fieldName, Object value) { return (Request) super.set(fieldName, value); } @Override public Request clone() { return (Request) super.clone(); } } /** * The schema for the response. */ public static final class Response extends com.google.api.client.json.GenericJson { /** * Schema ID for the response schema. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String $ref; /** * Schema ID for the response schema. * @return value or {@code null} for none */ public java.lang.String get$ref() { return $ref; } /** * Schema ID for the response schema. * @param $ref $ref or {@code null} for none */ public Response set$ref(java.lang.String $ref) { this.$ref = $ref; return this; } @Override public Response set(String fieldName, Object value) { return (Response) super.set(fieldName, value); } @Override public Response clone() { return (Response) super.clone(); } } }
package testsforlibgdx; import static org.junit.Assert.*; import org.iwt2.nikky.model.actors.NikkiActor; import org.iwt2.nikky.model.stages.CombatStage; import org.junit.Before; import org.junit.Test; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Graphics; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.GL10; import com.badlogic.gdx.graphics.GL11; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.graphics.GLCommon; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.graphics.glutils.ShaderProgram; import com.badlogic.gdx.math.Matrix4; import com.badlogic.gdx.scenes.scene2d.Stage; public class TestStage { class FakeBatch implements Batch { @Override public void dispose() { // TODO Auto-generated method stub } @Override public void begin() { // TODO Auto-generated method stub } @Override public void disableBlending() { // TODO Auto-generated method stub } @Override public void draw(Texture arg0, float arg1, float arg2) { // TODO Auto-generated method stub } @Override public void draw(TextureRegion arg0, float arg1, float arg2) { // TODO Auto-generated method stub } @Override public void draw(Texture arg0, float[] arg1, int arg2, int arg3) { // TODO Auto-generated method stub } @Override public void draw(Texture arg0, float arg1, float arg2, float arg3, float arg4) { // TODO Auto-generated method stub } @Override public void draw(TextureRegion arg0, float arg1, float arg2, float arg3, float arg4) { // TODO Auto-generated method stub } @Override public void draw(Texture arg0, float arg1, float arg2, int arg3, int arg4, int arg5, int arg6) { // TODO Auto-generated method stub } @Override public void draw(Texture arg0, float arg1, float arg2, float arg3, float arg4, float arg5, float arg6, float arg7, float arg8) { // TODO Auto-generated method stub } @Override public void draw(TextureRegion arg0, float arg1, float arg2, float arg3, float arg4, float arg5, float arg6, float arg7, float arg8, float arg9) { // TODO Auto-generated method stub } @Override public void draw(Texture arg0, float arg1, float arg2, float arg3, float arg4, int arg5, int arg6, int arg7, int arg8, boolean arg9, boolean arg10) { // TODO Auto-generated method stub } @Override public void draw(TextureRegion arg0, float arg1, float arg2, float arg3, float arg4, float arg5, float arg6, float arg7, float arg8, float arg9, boolean arg10) { // TODO Auto-generated method stub } @Override public void draw(Texture arg0, float arg1, float arg2, float arg3, float arg4, float arg5, float arg6, float arg7, float arg8, float arg9, int arg10, int arg11, int arg12, int arg13, boolean arg14, boolean arg15) { // TODO Auto-generated method stub } @Override public void enableBlending() { // TODO Auto-generated method stub } @Override public void end() { // TODO Auto-generated method stub } @Override public void flush() { // TODO Auto-generated method stub } @Override public int getBlendDstFunc() { // TODO Auto-generated method stub return 0; } @Override public int getBlendSrcFunc() { // TODO Auto-generated method stub return 0; } @Override public Color getColor() { // TODO Auto-generated method stub return null; } @Override public Matrix4 getProjectionMatrix() { // TODO Auto-generated method stub return null; } @Override public Matrix4 getTransformMatrix() { // TODO Auto-generated method stub return null; } @Override public boolean isBlendingEnabled() { // TODO Auto-generated method stub return false; } @Override public void setBlendFunction(int arg0, int arg1) { // TODO Auto-generated method stub } @Override public void setColor(Color arg0) { // TODO Auto-generated method stub } @Override public void setColor(float arg0) { // TODO Auto-generated method stub } @Override public void setColor(float arg0, float arg1, float arg2, float arg3) { // TODO Auto-generated method stub } @Override public void setProjectionMatrix(Matrix4 arg0) { // TODO Auto-generated method stub } @Override public void setShader(ShaderProgram arg0) { // TODO Auto-generated method stub } @Override public void setTransformMatrix(Matrix4 arg0) { // TODO Auto-generated method stub } } class FakeGraphics implements Graphics { @Override public boolean isGL11Available() { // TODO Auto-generated method stub return false; } @Override public boolean isGL20Available() { // TODO Auto-generated method stub return false; } @Override public GLCommon getGLCommon() { // TODO Auto-generated method stub return null; } @Override public GL10 getGL10() { // TODO Auto-generated method stub return null; } @Override public GL11 getGL11() { // TODO Auto-generated method stub return null; } @Override public GL20 getGL20() { // TODO Auto-generated method stub return null; } /** * Faked */ @Override public int getWidth() { return 800; } /** * Faked */ @Override public int getHeight() { return 600; } @Override public float getDeltaTime() { // TODO Auto-generated method stub return 0; } @Override public float getRawDeltaTime() { // TODO Auto-generated method stub return 0; } @Override public int getFramesPerSecond() { // TODO Auto-generated method stub return 0; } @Override public GraphicsType getType() { // TODO Auto-generated method stub return null; } @Override public float getPpiX() { // TODO Auto-generated method stub return 0; } @Override public float getPpiY() { // TODO Auto-generated method stub return 0; } @Override public float getPpcX() { // TODO Auto-generated method stub return 0; } @Override public float getPpcY() { // TODO Auto-generated method stub return 0; } @Override public float getDensity() { // TODO Auto-generated method stub return 0; } @Override public boolean supportsDisplayModeChange() { // TODO Auto-generated method stub return false; } @Override public DisplayMode[] getDisplayModes() { // TODO Auto-generated method stub return null; } @Override public DisplayMode getDesktopDisplayMode() { // TODO Auto-generated method stub return null; } @Override public boolean setDisplayMode(DisplayMode displayMode) { // TODO Auto-generated method stub return false; } @Override public boolean setDisplayMode(int width, int height, boolean fullscreen) { // TODO Auto-generated method stub return false; } @Override public void setTitle(String title) { // TODO Auto-generated method stub } @Override public void setVSync(boolean vsync) { // TODO Auto-generated method stub } @Override public BufferFormat getBufferFormat() { // TODO Auto-generated method stub return null; } @Override public boolean supportsExtension(String extension) { // TODO Auto-generated method stub return false; } @Override public void setContinuousRendering(boolean isContinuous) { // TODO Auto-generated method stub } @Override public boolean isContinuousRendering() { // TODO Auto-generated method stub return false; } @Override public void requestRendering() { // TODO Auto-generated method stub } @Override public boolean isFullscreen() { // TODO Auto-generated method stub return false; } } @Before public void setUp() throws Exception { } /** * I have to mock * GDX.graphicxs for methods getWidth and getHeight * SpriteBach * Uses the cnstructor that accepts a spritebatch */ @Test public void tesAddActorIntoStage() { Gdx.graphics = new FakeGraphics(); Stage stage = new Stage(800f, 600f, true, new FakeBatch()); } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package com.android.systemui.statusbar.phone; import android.app.ActivityManager; import android.app.ActivityManagerNative; import android.app.admin.DevicePolicyManager; import android.content.BroadcastReceiver; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.ServiceConnection; import android.content.pm.ActivityInfo; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.content.res.Configuration; import android.hardware.fingerprint.FingerprintManager; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.ColorMatrix; import android.graphics.ColorMatrixColorFilter; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.hardware.fingerprint.FingerprintManager; import android.os.AsyncTask; import android.os.Bundle; import android.os.IBinder; import android.os.Message; import android.os.Messenger; import android.os.RemoteException; import android.os.UserHandle; import android.provider.MediaStore; import android.service.media.CameraPrewarmService; import android.telecom.TelecomManager; import android.util.AttributeSet; import android.util.Log; import android.util.TypedValue; import android.view.View; import android.view.ViewGroup; import android.view.accessibility.AccessibilityNodeInfo; import android.view.animation.AnimationUtils; import android.view.animation.Interpolator; import android.widget.FrameLayout; import android.widget.TextView; import com.android.internal.widget.LockPatternUtils; import com.android.keyguard.KeyguardUpdateMonitor; import com.android.keyguard.KeyguardUpdateMonitorCallback; import com.android.systemui.EventLogConstants; import com.android.systemui.EventLogTags; import com.android.systemui.R; import com.android.systemui.assist.AssistManager; import com.android.systemui.cm.LockscreenShortcutsHelper; import com.android.systemui.cm.LockscreenShortcutsHelper.Shortcuts; import com.android.systemui.statusbar.CommandQueue; import com.android.systemui.statusbar.KeyguardAffordanceView; import com.android.systemui.statusbar.KeyguardIndicationController; import com.android.systemui.statusbar.policy.AccessibilityController; import com.android.systemui.statusbar.policy.FlashlightController; import com.android.systemui.statusbar.policy.PreviewInflater; import static android.view.accessibility.AccessibilityNodeInfo.ACTION_CLICK; import static android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction; /** * Implementation for the bottom area of the Keyguard, including camera/phone affordance and status * text. */ public class KeyguardBottomAreaView extends FrameLayout implements View.OnClickListener, UnlockMethodCache.OnUnlockMethodChangedListener, LockscreenShortcutsHelper.OnChangeListener, AccessibilityController.AccessibilityStateChangedCallback, View.OnLongClickListener { final static String TAG = "PhoneStatusBar/KeyguardBottomAreaView"; public static final String CAMERA_LAUNCH_SOURCE_AFFORDANCE = "lockscreen_affordance"; public static final String CAMERA_LAUNCH_SOURCE_WIGGLE = "wiggle_gesture"; public static final String CAMERA_LAUNCH_SOURCE_POWER_DOUBLE_TAP = "power_double_tap"; public static final String EXTRA_CAMERA_LAUNCH_SOURCE = "com.android.systemui.camera_launch_source"; private static final Intent SECURE_CAMERA_INTENT = new Intent(MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE) .addFlags(Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS); public static final Intent INSECURE_CAMERA_INTENT = new Intent(MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA); private static final Intent PHONE_INTENT = new Intent(Intent.ACTION_DIAL); private static final int DOZE_ANIMATION_STAGGER_DELAY = 48; private static final int DOZE_ANIMATION_ELEMENT_DURATION = 250; private KeyguardAffordanceView mCameraImageView; private KeyguardAffordanceView mLeftAffordanceView; private LockIcon mLockIcon; private TextView mIndicationText; private ViewGroup mPreviewContainer; private View mLeftPreview; private View mCameraPreview; private ActivityStarter mActivityStarter; private UnlockMethodCache mUnlockMethodCache; private LockPatternUtils mLockPatternUtils; private FlashlightController mFlashlightController; private PreviewInflater mPreviewInflater; private KeyguardIndicationController mIndicationController; private AccessibilityController mAccessibilityController; private PhoneStatusBar mPhoneStatusBar; private LockscreenShortcutsHelper mShortcutHelper; private final ColorMatrixColorFilter mGrayScaleFilter; private final Interpolator mLinearOutSlowInInterpolator; private boolean mUserSetupComplete; private boolean mPrewarmBound; private Messenger mPrewarmMessenger; private final ServiceConnection mPrewarmConnection = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder service) { mPrewarmMessenger = new Messenger(service); } @Override public void onServiceDisconnected(ComponentName name) { mPrewarmMessenger = null; } }; private AssistManager mAssistManager; public KeyguardBottomAreaView(Context context) { this(context, null); } public KeyguardBottomAreaView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public KeyguardBottomAreaView(Context context, AttributeSet attrs, int defStyleAttr) { this(context, attrs, defStyleAttr, 0); } public KeyguardBottomAreaView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); mLinearOutSlowInInterpolator = AnimationUtils.loadInterpolator(context, android.R.interpolator.linear_out_slow_in); ColorMatrix cm = new ColorMatrix(); cm.setSaturation(0); mGrayScaleFilter = new ColorMatrixColorFilter(cm); } private AccessibilityDelegate mAccessibilityDelegate = new AccessibilityDelegate() { @Override public void onInitializeAccessibilityNodeInfo(View host, AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(host, info); String label = null; if (host == mLockIcon) { label = getResources().getString(R.string.unlock_label); } else if (host == mCameraImageView) { if (isTargetCustom(Shortcuts.RIGHT_SHORTCUT)) { label = mShortcutHelper.getFriendlyNameForUri(Shortcuts.RIGHT_SHORTCUT); } else { label = getResources().getString(R.string.camera_label); } } else if (host == mLeftAffordanceView) { if (isTargetCustom(Shortcuts.LEFT_SHORTCUT)) { label = mShortcutHelper.getFriendlyNameForUri(Shortcuts.LEFT_SHORTCUT); } else { if (isLeftVoiceAssist()) { label = getResources().getString(R.string.voice_assist_label); } else { label = getResources().getString(R.string.phone_label); } } } info.addAction(new AccessibilityAction(ACTION_CLICK, label)); } @Override public boolean performAccessibilityAction(View host, int action, Bundle args) { if (action == ACTION_CLICK) { if (host == mLockIcon) { mPhoneStatusBar.animateCollapsePanels( CommandQueue.FLAG_EXCLUDE_RECENTS_PANEL, true /* force */); return true; } else if (host == mCameraImageView) { launchCamera(CAMERA_LAUNCH_SOURCE_AFFORDANCE); return true; } else if (host == mLeftAffordanceView) { launchLeftAffordance(); return true; } } return super.performAccessibilityAction(host, action, args); } }; @Override protected void onFinishInflate() { super.onFinishInflate(); mLockPatternUtils = new LockPatternUtils(mContext); mPreviewContainer = (ViewGroup) findViewById(R.id.preview_container); mCameraImageView = (KeyguardAffordanceView) findViewById(R.id.camera_button); mLeftAffordanceView = (KeyguardAffordanceView) findViewById(R.id.left_button); mLockIcon = (LockIcon) findViewById(R.id.lock_icon); mIndicationText = (TextView) findViewById(R.id.keyguard_indication_text); mShortcutHelper = new LockscreenShortcutsHelper(mContext, this); watchForCameraPolicyChanges(); updateCameraVisibility(); mUnlockMethodCache = UnlockMethodCache.getInstance(getContext()); mUnlockMethodCache.addListener(this); mLockIcon.update(); setClipChildren(false); setClipToPadding(false); mPreviewInflater = new PreviewInflater(mContext, new LockPatternUtils(mContext)); mLockIcon.setOnClickListener(this); mLockIcon.setOnLongClickListener(this); mCameraImageView.setOnClickListener(this); mLeftAffordanceView.setOnClickListener(this); initAccessibility(); updateCustomShortcuts(); } private void updateCustomShortcuts() { updateLeftAffordanceIcon(); updateRightAffordanceIcon(); inflateCameraPreview(); } private void updateRightAffordanceIcon() { Drawable drawable; String contentDescription; boolean shouldGrayScale = false; if (isTargetCustom(Shortcuts.RIGHT_SHORTCUT)) { drawable = mShortcutHelper.getDrawableForTarget(Shortcuts.RIGHT_SHORTCUT); shouldGrayScale = true; contentDescription = mShortcutHelper.getFriendlyNameForUri(Shortcuts.RIGHT_SHORTCUT); } else { drawable = mContext.getDrawable(R.drawable.ic_camera_alt_24dp); contentDescription = mContext.getString(R.string.accessibility_camera_button); } mCameraImageView.setImageDrawable(drawable); mCameraImageView.setContentDescription(contentDescription); mCameraImageView.setDefaultFilter(shouldGrayScale ? mGrayScaleFilter : null); updateCameraVisibility(); } private void initAccessibility() { mLockIcon.setAccessibilityDelegate(mAccessibilityDelegate); mLeftAffordanceView.setAccessibilityDelegate(mAccessibilityDelegate); mCameraImageView.setAccessibilityDelegate(mAccessibilityDelegate); } @Override protected void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); int indicationBottomMargin = getResources().getDimensionPixelSize( R.dimen.keyguard_indication_margin_bottom); MarginLayoutParams mlp = (MarginLayoutParams) mIndicationText.getLayoutParams(); if (mlp.bottomMargin != indicationBottomMargin) { mlp.bottomMargin = indicationBottomMargin; mIndicationText.setLayoutParams(mlp); } // Respect font size setting. mIndicationText.setTextSize(TypedValue.COMPLEX_UNIT_PX, getResources().getDimensionPixelSize( com.android.internal.R.dimen.text_size_small_material)); } public void setActivityStarter(ActivityStarter activityStarter) { mActivityStarter = activityStarter; } public void setFlashlightController(FlashlightController flashlightController) { mFlashlightController = flashlightController; } public void setAccessibilityController(AccessibilityController accessibilityController) { mAccessibilityController = accessibilityController; mLockIcon.setAccessibilityController(accessibilityController); accessibilityController.addStateChangedCallback(this); } public void setPhoneStatusBar(PhoneStatusBar phoneStatusBar) { mPhoneStatusBar = phoneStatusBar; updateCameraVisibility(); // in case onFinishInflate() was called too early } public void setUserSetupComplete(boolean userSetupComplete) { mUserSetupComplete = userSetupComplete; updateCameraVisibility(); updateLeftAffordanceIcon(); } private Intent getCameraIntent() { KeyguardUpdateMonitor updateMonitor = KeyguardUpdateMonitor.getInstance(mContext); boolean canSkipBouncer = updateMonitor.getUserCanSkipBouncer( KeyguardUpdateMonitor.getCurrentUser()); boolean secure = mLockPatternUtils.isSecure(KeyguardUpdateMonitor.getCurrentUser()); return (secure && !canSkipBouncer) ? SECURE_CAMERA_INTENT : INSECURE_CAMERA_INTENT; } /** * Resolves the intent to launch the camera application. */ public ResolveInfo resolveCameraIntent() { return mContext.getPackageManager().resolveActivityAsUser(getCameraIntent(), PackageManager.MATCH_DEFAULT_ONLY, KeyguardUpdateMonitor.getCurrentUser()); } private void updateCameraVisibility() { if (mCameraImageView == null) { // Things are not set up yet; reply hazy, ask again later return; } boolean visible = mUserSetupComplete; if (visible) { if (isTargetCustom(Shortcuts.RIGHT_SHORTCUT)) { visible = !mShortcutHelper.isTargetEmpty(Shortcuts.RIGHT_SHORTCUT); } else { ResolveInfo resolved = resolveCameraIntent(); visible = !isCameraDisabledByDpm() && resolved != null && getResources().getBoolean(R.bool.config_keyguardShowCameraAffordance); } } mCameraImageView.setVisibility(visible ? View.VISIBLE : View.GONE); } private void updateLeftAffordanceIcon() { Drawable drawable; String contentDescription; boolean shouldGrayScale = false; boolean visible = mUserSetupComplete; if (mShortcutHelper.isTargetCustom(Shortcuts.LEFT_SHORTCUT)) { drawable = mShortcutHelper.getDrawableForTarget(Shortcuts.LEFT_SHORTCUT); shouldGrayScale = true; contentDescription = mShortcutHelper.getFriendlyNameForUri(Shortcuts.LEFT_SHORTCUT); visible |= !mShortcutHelper.isTargetEmpty(Shortcuts.LEFT_SHORTCUT); } else if (canLaunchVoiceAssist()) { drawable = mContext.getDrawable(R.drawable.ic_mic_26dp); contentDescription = mContext.getString(R.string.accessibility_voice_assist_button); } else { visible &= isPhoneVisible(); drawable = mContext.getDrawable(R.drawable.ic_phone_24dp); contentDescription = mContext.getString(R.string.accessibility_phone_button); } mLeftAffordanceView.setVisibility(visible ? View.VISIBLE : View.GONE); mLeftAffordanceView.setImageDrawable(drawable); mLeftAffordanceView.setContentDescription(contentDescription); mLeftAffordanceView.setDefaultFilter(shouldGrayScale ? mGrayScaleFilter : null); } public boolean isLeftVoiceAssist() { return !isTargetCustom(Shortcuts.LEFT_SHORTCUT) && canLaunchVoiceAssist(); } private boolean isPhoneVisible() { PackageManager pm = mContext.getPackageManager(); return pm.hasSystemFeature(PackageManager.FEATURE_TELEPHONY) && pm.resolveActivity(PHONE_INTENT, 0) != null; } private boolean isCameraDisabledByDpm() { final DevicePolicyManager dpm = (DevicePolicyManager) getContext().getSystemService(Context.DEVICE_POLICY_SERVICE); if (dpm != null && mPhoneStatusBar != null) { try { final int userId = ActivityManagerNative.getDefault().getCurrentUser().id; final int disabledFlags = dpm.getKeyguardDisabledFeatures(null, userId); final boolean disabledBecauseKeyguardSecure = (disabledFlags & DevicePolicyManager.KEYGUARD_DISABLE_SECURE_CAMERA) != 0 && mPhoneStatusBar.isKeyguardSecure(); return dpm.getCameraDisabled(null) || disabledBecauseKeyguardSecure; } catch (RemoteException e) { Log.e(TAG, "Can't get userId", e); } } return false; } private void watchForCameraPolicyChanges() { final IntentFilter filter = new IntentFilter(); filter.addAction(DevicePolicyManager.ACTION_DEVICE_POLICY_MANAGER_STATE_CHANGED); getContext().registerReceiverAsUser(mDevicePolicyReceiver, UserHandle.ALL, filter, null, null); KeyguardUpdateMonitor.getInstance(mContext).registerCallback(mUpdateMonitorCallback); } @Override public void onStateChanged(boolean accessibilityEnabled, boolean touchExplorationEnabled) { mCameraImageView.setClickable(touchExplorationEnabled); mLeftAffordanceView.setClickable(touchExplorationEnabled); mCameraImageView.setFocusable(accessibilityEnabled); mLeftAffordanceView.setFocusable(accessibilityEnabled); mLockIcon.update(); } @Override public void onClick(View v) { if (v == mCameraImageView) { launchCamera(CAMERA_LAUNCH_SOURCE_AFFORDANCE); } else if (v == mLeftAffordanceView) { launchLeftAffordance(); } if (v == mLockIcon) { if (!mAccessibilityController.isAccessibilityEnabled()) { handleTrustCircleClick(); } else { mPhoneStatusBar.animateCollapsePanels( CommandQueue.FLAG_EXCLUDE_NONE, true /* force */); } } } @Override public boolean onLongClick(View v) { handleTrustCircleClick(); return true; } private void handleTrustCircleClick() { EventLogTags.writeSysuiLockscreenGesture( EventLogConstants.SYSUI_LOCKSCREEN_GESTURE_TAP_LOCK, 0 /* lengthDp - N/A */, 0 /* velocityDp - N/A */); mIndicationController.showTransientIndication( R.string.keyguard_indication_trust_disabled); mLockPatternUtils.requireCredentialEntry(KeyguardUpdateMonitor.getCurrentUser()); } public void bindCameraPrewarmService() { Intent intent = getCameraIntent(); ActivityInfo targetInfo = PreviewInflater.getTargetActivityInfo(mContext, intent, KeyguardUpdateMonitor.getCurrentUser()); if (targetInfo != null && targetInfo.metaData != null) { String clazz = targetInfo.metaData.getString( MediaStore.META_DATA_STILL_IMAGE_CAMERA_PREWARM_SERVICE); if (clazz != null) { Intent serviceIntent = new Intent(); serviceIntent.setClassName(targetInfo.packageName, clazz); serviceIntent.setAction(CameraPrewarmService.ACTION_PREWARM); try { if (getContext().bindServiceAsUser(serviceIntent, mPrewarmConnection, Context.BIND_AUTO_CREATE | Context.BIND_FOREGROUND_SERVICE, new UserHandle(UserHandle.USER_CURRENT))) { mPrewarmBound = true; } } catch (SecurityException e) { Log.w(TAG, "Unable to bind to prewarm service package=" + targetInfo.packageName + " class=" + clazz, e); } } } } public void unbindCameraPrewarmService(boolean launched) { if (mPrewarmBound) { if (mPrewarmMessenger != null && launched) { try { mPrewarmMessenger.send(Message.obtain(null /* handler */, CameraPrewarmService.MSG_CAMERA_FIRED)); } catch (RemoteException e) { Log.w(TAG, "Error sending camera fired message", e); } } mContext.unbindService(mPrewarmConnection); mPrewarmBound = false; } } public void launchCamera(String source) { final Intent intent; if (!mShortcutHelper.isTargetCustom(LockscreenShortcutsHelper.Shortcuts.RIGHT_SHORTCUT)) { intent = getCameraIntent(); } else { intent = mShortcutHelper.getIntent(LockscreenShortcutsHelper.Shortcuts.RIGHT_SHORTCUT); intent.putExtra(EXTRA_CAMERA_LAUNCH_SOURCE, source); } boolean wouldLaunchResolverActivity = PreviewInflater.wouldLaunchResolverActivity( mContext, intent, KeyguardUpdateMonitor.getCurrentUser()); if (intent == SECURE_CAMERA_INTENT && !wouldLaunchResolverActivity) { AsyncTask.execute(new Runnable() { @Override public void run() { int result = ActivityManager.START_CANCELED; try { result = ActivityManagerNative.getDefault().startActivityAsUser( null, getContext().getBasePackageName(), intent, intent.resolveTypeIfNeeded(getContext().getContentResolver()), null, null, 0, Intent.FLAG_ACTIVITY_NEW_TASK, null, null, UserHandle.CURRENT.getIdentifier()); } catch (RemoteException e) { Log.w(TAG, "Unable to start camera activity", e); } mActivityStarter.preventNextAnimation(); final boolean launched = isSuccessfulLaunch(result); post(new Runnable() { @Override public void run() { unbindCameraPrewarmService(launched); } }); } }); } else { // We need to delay starting the activity because ResolverActivity finishes itself if // launched behind lockscreen. mActivityStarter.startActivity(intent, false /* dismissShade */, new ActivityStarter.Callback() { @Override public void onActivityStarted(int resultCode) { unbindCameraPrewarmService(isSuccessfulLaunch(resultCode)); } }); } } private static boolean isSuccessfulLaunch(int result) { return result == ActivityManager.START_SUCCESS || result == ActivityManager.START_DELIVERED_TO_TOP || result == ActivityManager.START_TASK_TO_FRONT; } public void launchLeftAffordance() { if (mShortcutHelper.isTargetCustom(Shortcuts.LEFT_SHORTCUT)) { Intent intent = mShortcutHelper.getIntent(Shortcuts.LEFT_SHORTCUT); mActivityStarter.startActivity(intent, false /* dismissShade */); } else if (isLeftVoiceAssist()) { launchVoiceAssist(); } else { launchPhone(); } } private void launchVoiceAssist() { Runnable runnable = new Runnable() { @Override public void run() { mAssistManager.launchVoiceAssistFromKeyguard(); mActivityStarter.preventNextAnimation(); } }; if (mPhoneStatusBar.isKeyguardCurrentlySecure()) { AsyncTask.execute(runnable); } else { mPhoneStatusBar.executeRunnableDismissingKeyguard(runnable, null /* cancelAction */, false /* dismissShade */, false /* afterKeyguardGone */); } } private boolean canLaunchVoiceAssist() { if (mAssistManager == null) { return false; } return mAssistManager.canVoiceAssistBeLaunchedFromKeyguard(); } private void launchPhone() { final TelecomManager tm = TelecomManager.from(mContext); if (tm.isInCall()) { AsyncTask.execute(new Runnable() { @Override public void run() { tm.showInCallScreen(false /* showDialpad */); } }); } else { mActivityStarter.startActivity(PHONE_INTENT, false /* dismissShade */); } } @Override protected void onVisibilityChanged(View changedView, int visibility) { super.onVisibilityChanged(changedView, visibility); if (changedView == this && visibility == VISIBLE) { mLockIcon.update(); updateCameraVisibility(); } } public KeyguardAffordanceView getLeftView() { return mLeftAffordanceView; } public KeyguardAffordanceView getRightView() { return mCameraImageView; } public View getLeftPreview() { return mLeftPreview; } public View getRightPreview() { return mCameraPreview; } public LockIcon getLockIcon() { return mLockIcon; } public View getIndicationView() { return mIndicationText; } @Override public boolean hasOverlappingRendering() { return false; } @Override public void onUnlockMethodStateChanged() { mLockIcon.update(); updateCameraVisibility(); } private void inflateCameraPreview() { if (isTargetCustom(Shortcuts.RIGHT_SHORTCUT)) { mPreviewContainer.removeView(mCameraPreview); } else { mCameraPreview = mPreviewInflater.inflatePreview(getCameraIntent()); if (mCameraPreview != null) { mPreviewContainer.addView(mCameraPreview); mCameraPreview.setVisibility(View.INVISIBLE); } } } private void updateLeftPreview() { View previewBefore = mLeftPreview; if (previewBefore != null) { mPreviewContainer.removeView(previewBefore); } if (isTargetCustom(Shortcuts.LEFT_SHORTCUT)) { // Custom shortcuts don't support previews return; } if (isLeftVoiceAssist()) { mLeftPreview = mPreviewInflater.inflatePreviewFromService( mAssistManager.getVoiceInteractorComponentName()); } else { mLeftPreview = mPreviewInflater.inflatePreview(PHONE_INTENT); } if (mLeftPreview != null) { mPreviewContainer.addView(mLeftPreview); mLeftPreview.setVisibility(View.INVISIBLE); } } public void startFinishDozeAnimation() { long delay = 0; if (mLeftAffordanceView.getVisibility() == View.VISIBLE) { startFinishDozeAnimationElement(mLeftAffordanceView, delay); delay += DOZE_ANIMATION_STAGGER_DELAY; } startFinishDozeAnimationElement(mLockIcon, delay); delay += DOZE_ANIMATION_STAGGER_DELAY; if (mCameraImageView.getVisibility() == View.VISIBLE) { startFinishDozeAnimationElement(mCameraImageView, delay); } mIndicationText.setAlpha(0f); mIndicationText.animate() .alpha(1f) .setInterpolator(mLinearOutSlowInInterpolator) .setDuration(NotificationPanelView.DOZE_ANIMATION_DURATION); } private void startFinishDozeAnimationElement(View element, long delay) { element.setAlpha(0f); element.setTranslationY(element.getHeight() / 2); element.animate() .alpha(1f) .translationY(0f) .setInterpolator(mLinearOutSlowInInterpolator) .setStartDelay(delay) .setDuration(DOZE_ANIMATION_ELEMENT_DURATION); } private final BroadcastReceiver mDevicePolicyReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { post(new Runnable() { @Override public void run() { updateCameraVisibility(); } }); } }; private final KeyguardUpdateMonitorCallback mUpdateMonitorCallback = new KeyguardUpdateMonitorCallback() { @Override public void onUserSwitchComplete(int userId) { updateCameraVisibility(); } @Override public void onStartedWakingUp() { mLockIcon.setDeviceInteractive(true); } @Override public void onFinishedGoingToSleep(int why) { mLockIcon.setDeviceInteractive(false); } @Override public void onScreenTurnedOn() { mLockIcon.setScreenOn(true); } @Override public void onScreenTurnedOff() { mLockIcon.setScreenOn(false); } @Override public void onKeyguardVisibilityChanged(boolean showing) { mLockIcon.update(); } @Override public void onFingerprintRunningStateChanged(boolean running) { mLockIcon.update(); } @Override public void onStrongAuthStateChanged(int userId) { mLockIcon.update(); } }; public void setKeyguardIndicationController( KeyguardIndicationController keyguardIndicationController) { mIndicationController = keyguardIndicationController; } public void setAssistManager(AssistManager assistManager) { mAssistManager = assistManager; updateLeftAffordance(); } public void updateLeftAffordance() { updateLeftAffordanceIcon(); updateLeftPreview(); } private String getIndexHint(LockscreenShortcutsHelper.Shortcuts shortcut) { if (mShortcutHelper.isTargetCustom(shortcut)) { boolean isRtl = getLayoutDirection() == LAYOUT_DIRECTION_RTL; String label = mShortcutHelper.getFriendlyNameForUri(shortcut); int resId = 0; switch (shortcut) { case LEFT_SHORTCUT: resId = isRtl ? R.string.right_shortcut_hint : R.string.left_shortcut_hint; break; case RIGHT_SHORTCUT: resId = isRtl ? R.string.left_shortcut_hint : R.string.right_shortcut_hint; break; } return mContext.getString(resId, label); } else { return null; } } public String getLeftHint() { String label = getIndexHint(LockscreenShortcutsHelper.Shortcuts.LEFT_SHORTCUT); if (label == null) { if (isLeftVoiceAssist()) { label = mContext.getString(R.string.voice_hint); } else { label = mContext.getString(R.string.phone_hint); } } return label; } public String getRightHint() { String label = getIndexHint(LockscreenShortcutsHelper.Shortcuts.RIGHT_SHORTCUT); if (label == null) { label = mContext.getString(R.string.camera_hint); } return label; } public boolean isTargetCustom(LockscreenShortcutsHelper.Shortcuts shortcut) { return mShortcutHelper.isTargetCustom(shortcut); } @Override public void onChange() { updateCustomShortcuts(); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); mAccessibilityController.removeStateChangedCallback(this); mContext.unregisterReceiver(mDevicePolicyReceiver); mShortcutHelper.cleanup(); mUnlockMethodCache.removeListener(this); } }
/* * Copyright 2000-2016 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client; import java.util.Date; import java.util.logging.Level; import java.util.logging.Logger; import com.google.gwt.i18n.client.LocaleInfo; import com.google.gwt.i18n.client.TimeZone; import com.google.gwt.i18n.shared.DateTimeFormat; import com.vaadin.shared.ui.datefield.DateResolution; /** * This class provides date/time parsing services to all components on the * client side. * * @author Vaadin Ltd. * */ @SuppressWarnings("deprecation") public class DateTimeService { private String locale; private static int[] maxDaysInMonth = { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }; private static final long MILLISECONDS_PER_DAY = 24 * 3600 * 1000; /** * Creates a new date time service with the application default locale. */ public DateTimeService() { locale = LocaleService.getDefaultLocale(); } /** * Creates a new date time service with a given locale. * * @param locale * e.g. {@code fi}, {@code en}, etc. * @throws LocaleNotLoadedException */ public DateTimeService(String locale) throws LocaleNotLoadedException { setLocale(locale); } public void setLocale(String locale) throws LocaleNotLoadedException { if (!LocaleService.getAvailableLocales().contains(locale)) { throw new LocaleNotLoadedException(locale); } this.locale = locale; } public String getLocale() { return locale; } public String getMonth(int month) { try { return LocaleService.getMonthNames(locale)[month]; } catch (final LocaleNotLoadedException e) { getLogger().log(Level.SEVERE, "Error in getMonth", e); return null; } } public String getShortMonth(int month) { try { return LocaleService.getShortMonthNames(locale)[month]; } catch (final LocaleNotLoadedException e) { getLogger().log(Level.SEVERE, "Error in getShortMonth", e); return null; } } public String getDay(int day) { try { return LocaleService.getDayNames(locale)[day]; } catch (final LocaleNotLoadedException e) { getLogger().log(Level.SEVERE, "Error in getDay", e); return null; } } /** * Returns the localized short name of the specified day. * * @param day * the day, {@code 0} is {@code SUNDAY} * @return the localized short name */ public String getShortDay(int day) { try { return LocaleService.getShortDayNames(locale)[day]; } catch (final LocaleNotLoadedException e) { getLogger().log(Level.SEVERE, "Error in getShortDay", e); return null; } } /** * Returns the first day of the week, according to the used Locale. * * @return the localized first day of the week, {@code 0} is {@code SUNDAY} */ public int getFirstDayOfWeek() { try { return LocaleService.getFirstDayOfWeek(locale); } catch (final LocaleNotLoadedException e) { getLogger().log(Level.SEVERE, "Error in getFirstDayOfWeek", e); return 0; } } /** * Returns whether the locale has twelve hour, or twenty four hour clock. * * @return {@code true} if the locale has twelve hour clock, {@code false} * for twenty four clock */ public boolean isTwelveHourClock() { try { return LocaleService.isTwelveHourClock(locale); } catch (final LocaleNotLoadedException e) { getLogger().log(Level.SEVERE, "Error in isTwelveHourClock", e); return false; } } public String getClockDelimeter() { try { return LocaleService.getClockDelimiter(locale); } catch (final LocaleNotLoadedException e) { getLogger().log(Level.SEVERE, "Error in getClockDelimiter", e); return ":"; } } private static final String[] DEFAULT_AMPM_STRINGS = { "AM", "PM" }; public String[] getAmPmStrings() { try { return LocaleService.getAmPmStrings(locale); } catch (final LocaleNotLoadedException e) { // TODO can this practically even happen? Should die instead? getLogger().log(Level.SEVERE, "Locale not loaded, using fallback : AM/PM", e); return DEFAULT_AMPM_STRINGS; } } /** * Returns the first day of week of the specified {@code month}. * * @param month * the month, not {@code null} * @return the first day of week, */ public int getStartWeekDay(Date month) { final Date dateForFirstOfThisMonth = new Date(month.getYear(), month.getMonth(), 1); int firstDay; try { firstDay = LocaleService.getFirstDayOfWeek(locale); } catch (final LocaleNotLoadedException e) { getLogger().log(Level.SEVERE, "Locale not loaded, using fallback 0", e); firstDay = 0; } int start = dateForFirstOfThisMonth.getDay() - firstDay; if (start < 0) { start += 7; } return start; } public static void setMilliseconds(Date date, int ms) { date.setTime(date.getTime() / 1000 * 1000 + ms); } public static int getMilliseconds(Date date) { if (date == null) { return 0; } return (int) (date.getTime() - date.getTime() / 1000 * 1000); } public static int getNumberOfDaysInMonth(Date date) { final int month = date.getMonth(); if (month == 1 && isLeapYear(date)) { return 29; } return maxDaysInMonth[month]; } public static boolean isLeapYear(Date date) { // Instantiate the date for 1st March of that year final Date firstMarch = new Date(date.getYear(), 2, 1); // Go back 1 day final long firstMarchTime = firstMarch.getTime(); final long lastDayTimeFeb = firstMarchTime - (24 * 60 * 60 * 1000); // NUM_MILLISECS_A_DAY // Instantiate new Date with this time final Date febLastDay = new Date(lastDayTimeFeb); // Check for date in this new instance return (29 == febLastDay.getDate()) ? true : false; } public static boolean isSameDay(Date d1, Date d2) { return (getDayInt(d1) == getDayInt(d2)); } public static boolean isInRange(Date date, Date rangeStart, Date rangeEnd, DateResolution resolution) { Date s; Date e; if (rangeStart.after(rangeEnd)) { s = rangeEnd; e = rangeStart; } else { e = rangeEnd; s = rangeStart; } long start = s.getYear() * 10000000000l; long end = e.getYear() * 10000000000l; long target = date.getYear() * 10000000000l; if (resolution == DateResolution.YEAR) { return (start <= target && end >= target); } start += s.getMonth() * 100000000l; end += e.getMonth() * 100000000l; target += date.getMonth() * 100000000l; if (resolution == DateResolution.MONTH) { return (start <= target && end >= target); } start += s.getDate() * 1000000l; end += e.getDate() * 1000000l; target += date.getDate() * 1000000l; if (resolution == DateResolution.DAY) { return (start <= target && end >= target); } start += s.getHours() * 10000l; end += e.getHours() * 10000l; target += date.getHours() * 10000l; start += s.getMinutes() * 100l; end += e.getMinutes() * 100l; target += date.getMinutes() * 100l; start += s.getSeconds(); end += e.getSeconds(); target += date.getSeconds(); return (start <= target && end >= target); } private static int getDayInt(Date date) { final int y = date.getYear(); final int m = date.getMonth(); final int d = date.getDate(); return ((y + 1900) * 10000 + m * 100 + d) * 1000000000; } /** * Returns the ISO-8601 week number of the given date. * * @param date * The date for which the week number should be resolved * @return The ISO-8601 week number for {@literal date} */ public static int getISOWeekNumber(Date date) { int dayOfWeek = date.getDay(); // 0 == sunday // ISO 8601 use weeks that start on monday so we use // mon=1,tue=2,...sun=7; if (dayOfWeek == 0) { dayOfWeek = 7; } // Find nearest thursday (defines the week in ISO 8601). The week number // for the nearest thursday is the same as for the target date. int nearestThursdayDiff = 4 - dayOfWeek; // 4 is thursday Date nearestThursday = new Date( date.getTime() + nearestThursdayDiff * MILLISECONDS_PER_DAY); Date firstOfJanuary = new Date(nearestThursday.getYear(), 0, 1); long timeDiff = nearestThursday.getTime() - firstOfJanuary.getTime(); // Rounding the result, as the division doesn't result in an integer // when the given date is inside daylight saving time period. int daysSinceFirstOfJanuary = (int) Math .round((double) timeDiff / MILLISECONDS_PER_DAY); int weekNumber = (daysSinceFirstOfJanuary) / 7 + 1; return weekNumber; } /** * Check if format contains the month name. If it does we manually convert * it to the month name since DateTimeFormat.format always uses the current * locale and will replace the month name wrong if current locale is * different from the locale set for the DateField. * * MMMM is converted into long month name, MMM is converted into short month * name. '' are added around the name to avoid that DateTimeFormat parses * the month name as a pattern. * * @param date * The date to convert * @param formatStr * The format string that might contain MMM or MMMM * @return */ public String formatDate(Date date, String formatStr) { return formatDate(date, formatStr, null); } /** * Check if format contains the month name. If it does we manually convert * it to the month name since DateTimeFormat.format always uses the current * locale and will replace the month name wrong if current locale is * different from the locale set for the DateField. * * MMMM is converted into long month name, MMM is converted into short month * name. '' are added around the name to avoid that DateTimeFormat parses * the month name as a pattern. * * z is converted into the time zone name, using the specified * {@code timeZoneJSON} * * @param date * The date to convert * @param formatStr * The format string that might contain {@code MMM} or * {@code MMMM} * @param timeZone * The {@link TimeZone} used to replace {@code z}, can be * {@code null} * * @return the formatted date string * @since 8.2 */ public String formatDate(Date date, String formatStr, TimeZone timeZone) { /* * Format month and day names separately when locale for the * DateTimeService is not the same as the browser locale */ formatStr = formatTimeZone(date, formatStr, timeZone); formatStr = formatMonthNames(date, formatStr); formatStr = formatDayNames(date, formatStr); // Format uses the browser locale DateTimeFormat format = DateTimeFormat.getFormat(formatStr); String result = format.format(date); return result; } private String formatDayNames(Date date, String formatStr) { if (formatStr.contains("EEEE")) { String dayName = getDay(date.getDay()); if (dayName != null) { /* * Replace 4 or more E:s with the quoted day name. Also * concatenate generated string with any other string prepending * or following the EEEE pattern, i.e. 'EEEE'ta ' becomes 'DAYta * ' and not 'DAY''ta ', 'ab'EEEE becomes 'abDAY', 'x'EEEE'y' * becomes 'xDAYy'. */ formatStr = formatStr.replaceAll("'([E]{4,})'", dayName); formatStr = formatStr.replaceAll("([E]{4,})'", "'" + dayName); formatStr = formatStr.replaceAll("'([E]{4,})", dayName + "'"); formatStr = formatStr.replaceAll("[E]{4,}", "'" + dayName + "'"); } } if (formatStr.contains("EEE")) { String dayName = getShortDay(date.getDay()); if (dayName != null) { /* * Replace 3 or more E:s with the quoted month name. Also * concatenate generated string with any other string prepending * or following the EEE pattern, i.e. 'EEE'ta ' becomes 'DAYta ' * and not 'DAY''ta ', 'ab'EEE becomes 'abDAY', 'x'EEE'y' * becomes 'xDAYy'. */ formatStr = formatStr.replaceAll("'([E]{3,})'", dayName); formatStr = formatStr.replaceAll("([E]{3,})'", "'" + dayName); formatStr = formatStr.replaceAll("'([E]{3,})", dayName + "'"); formatStr = formatStr.replaceAll("[E]{3,}", "'" + dayName + "'"); } } return formatStr; } private String formatMonthNames(Date date, String formatStr) { if (formatStr.contains("MMMM")) { String monthName = getMonth(date.getMonth()); if (monthName != null) { /* * Replace 4 or more M:s with the quoted month name. Also * concatenate generated string with any other string prepending * or following the MMMM pattern, i.e. 'MMMM'ta ' becomes * 'MONTHta ' and not 'MONTH''ta ', 'ab'MMMM becomes 'abMONTH', * 'x'MMMM'y' becomes 'xMONTHy'. */ formatStr = formatStr.replaceAll("'([M]{4,})'", monthName); formatStr = formatStr.replaceAll("([M]{4,})'", "'" + monthName); formatStr = formatStr.replaceAll("'([M]{4,})", monthName + "'"); formatStr = formatStr.replaceAll("[M]{4,}", "'" + monthName + "'"); } } if (formatStr.contains("MMM")) { String monthName = getShortMonth(date.getMonth()); if (monthName != null) { /* * Replace 3 or more M:s with the quoted month name. Also * concatenate generated string with any other string prepending * or following the MMM pattern, i.e. 'MMM'ta ' becomes 'MONTHta * ' and not 'MONTH''ta ', 'ab'MMM becomes 'abMONTH', 'x'MMM'y' * becomes 'xMONTHy'. */ formatStr = formatStr.replaceAll("'([M]{3,})'", monthName); formatStr = formatStr.replaceAll("([M]{3,})'", "'" + monthName); formatStr = formatStr.replaceAll("'([M]{3,})", monthName + "'"); formatStr = formatStr.replaceAll("[M]{3,}", "'" + monthName + "'"); } } return formatStr; } private String formatTimeZone(Date date, String formatStr, TimeZone timeZone) { // if 'z' is found outside quotes and timeZone is used if (getIndexOf(formatStr, 'z') != -1 && timeZone != null) { return replaceTimeZone(formatStr, timeZone.getShortName(date)); } return formatStr; } /** * Replaces the {@code z} characters of the specified {@code formatStr} with * the given {@code timeZoneName}. * * @param formatStr * The format string, which is the pattern describing the date * and time format * @param timeZoneName * the time zone name * @return the format string, with {@code z} replaced (if found) */ private static String replaceTimeZone(String formatStr, String timeZoneName) { // search for 'z' outside the quotes (inside quotes is escaped) int start = getIndexOf(formatStr, 'z'); if (start == -1) { return formatStr; } // if there are multiple consecutive 'z', treat them as one int end = start; while (end + 1 < formatStr.length() && formatStr.charAt(end + 1) == 'z') { end++; } return formatStr.substring(0, start) + "'" + timeZoneName + "'" + formatStr.substring(end + 1); } /** * Returns the first index of the specified {@code ch}, which is outside the * quotes. */ private static int getIndexOf(String str, char ch) { boolean inQuote = false; for (int i = 0; i < str.length(); i++) { char c = str.charAt(i); if (c == '\'') { if (i + 1 < str.length() && str.charAt(i + 1) == '\'') { i++; } else { inQuote ^= true; } } else if (c == ch && !inQuote) { return i; } } return -1; } /** * Replaces month names in the entered date with the name in the current * browser locale. * * @param enteredDate * Date string e.g. "5 May 2010" * @param formatString * Format string e.g. "d M yyyy" * @return The date string where the month names have been replaced by the * browser locale version */ private String parseMonthName(String enteredDate, String formatString) { LocaleInfo browserLocale = LocaleInfo.getCurrentLocale(); if (browserLocale.getLocaleName().equals(getLocale())) { // No conversion needs to be done when locales match return enteredDate; } String[] browserMonthNames = browserLocale.getDateTimeConstants() .months(); String[] browserShortMonthNames = browserLocale.getDateTimeConstants() .shortMonths(); if (formatString.contains("MMMM")) { // Full month name for (int i = 0; i < 12; i++) { enteredDate = enteredDate.replaceAll(getMonth(i), browserMonthNames[i]); } } if (formatString.contains("MMM")) { // Short month name for (int i = 0; i < 12; i++) { enteredDate = enteredDate.replaceAll(getShortMonth(i), browserShortMonthNames[i]); } } return enteredDate; } /** * Parses the given date string using the given format string and the locale * set in this DateTimeService instance. * * @param dateString * Date string e.g. "1 February 2010" * @param formatString * Format string e.g. "d MMMM yyyy" * @param lenient * true to use lenient parsing, false to use strict parsing * @return A Date object representing the dateString. Never returns null. * @throws IllegalArgumentException * if the parsing fails * */ public Date parseDate(String dateString, String formatString, boolean lenient) throws IllegalArgumentException { /* DateTimeFormat uses the browser's locale */ DateTimeFormat format = DateTimeFormat.getFormat(formatString); /* * Parse month names separately when locale for the DateTimeService is * not the same as the browser locale */ dateString = parseMonthName(dateString, formatString); Date date; if (lenient) { date = format.parse(dateString); } else { date = format.parseStrict(dateString); } // Some version of Firefox sets the timestamp to 0 if parsing fails. if (date != null && date.getTime() == 0) { throw new IllegalArgumentException( "Parsing of '" + dateString + "' failed"); } return date; } private static Logger getLogger() { return Logger.getLogger(DateTimeService.class.getName()); } }
package Controller; import java.awt.Component; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ComponentEvent; import java.awt.event.ComponentListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.io.File; import javax.swing.JComboBox; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.Timer; import Model.Model; /** * Controller for Assignment 2 - HCI * 2/15/17 * @author Emily Black * Navigation Ways: * 1) Buttons * 2) ComboBox * 3) Keybindings (Control Left/Right arrow) * 4) Click on images (Left/Right mouse button) * 5) Slideshow */ public class Controller implements ActionListener, ComponentListener, MouseListener { private final Model model; private Timer timer; /* Timer for slideshow */ @SuppressWarnings({ "rawtypes", "unused" }) private JComboBox comboBox; public Controller( final Model model ) { this.model = model; } @Override public void actionPerformed(ActionEvent e) { /* Help Button Clicked */ if((((Component) e.getSource()).getName()).equals("help")) { JOptionPane.showMessageDialog(((Component) e.getSource()).getParent(), "Click the import button at the top right to import a directory and get started." + '\n' + "The 5 different navigation techniques include:" + '\n' + '\n' + "1) Buttons" + '\n' + "2) ComboBox" + '\n' + "3) Keybindings (Control Left/Right Arrow)" + '\n' + "4) Click on Image (Left/Right Click)" + '\n' + "5) Slideshow (Click to Toggle ON/OFF)"); } /* Left Button Clicked */ else if((((Component) e.getSource()).getName()).equals("left")) { /* Left mouse button */ if(model.getFilteredImages() != null) { if(model.getPointer() != 0) { model.setPointer(model.getPointer()-1); model.notifyListeners(); } /* If we have reached the far left */ else if(model.getPointer() == 0) { model.setPointer(model.getFilteredImages().length-1); model.notifyListeners(); } } } /* Left Keybind Pressed */ else if((((Component) e.getSource()).getName()).equals("nextKeybind")) { /* Left mouse button */ if(model.getFilteredImages() != null) { if(model.getPointer() != 0) { model.setPointer(model.getPointer()-1); model.notifyListeners(); } /* If we have reached the far left */ else if(model.getPointer() == 0) { model.setPointer(model.getFilteredImages().length-1); model.notifyListeners(); } } } /* Right Button Clicked */ else if((((Component) e.getSource()).getName()).equals("right")) { /* Right mouse button */ if(model.getFilteredImages() != null) { /* If we haven't reached the far right */ if(model.getPointer() != model.getFilteredImages().length-1) { model.setPointer(model.getPointer()+1); model.notifyListeners(); } /* If we have reached the far right */ else if(model.getPointer() == model.getFilteredImages().length-1) { model.setPointer(0); model.notifyListeners(); } } } /* Right Keybind Pressed */ else if((((Component) e.getSource()).getName()).equals("prevKeybind")) { /* Right mouse button */ if(model.getFilteredImages() != null) { /* If we haven't reached the far right */ if(model.getPointer() != model.getFilteredImages().length-1) { model.setPointer(model.getPointer()+1); model.notifyListeners(); } /* If we have reached the far right */ else if(model.getPointer() == model.getFilteredImages().length-1) { model.setPointer(0); model.notifyListeners(); } } } /* Slideshow Button Clicked */ else if((((Component) e.getSource()).getName()).equals("slideshow")) { if(model.getSlideShowToggle() == false && model.getFilteredImages() != null) { timer = new Timer(1000, new ActionListener() { @Override public void actionPerformed(ActionEvent e) { /* If we haven't reached the far right */ if(model.getPointer() != model.getFilteredImages().length-1) { model.setPointer(model.getPointer()+1); model.notifyListeners(); } /* If we have reached the far right */ else if(model.getPointer() == model.getFilteredImages().length-1) { model.setPointer(0); model.notifyListeners(); } } }); timer.start(); model.setSlideShowToggle(true); } else if(model.getSlideShowToggle() == true) { timer.stop(); model.setSlideShowToggle(false); } } else if((((Component) e.getSource()).getName()).equals("comboBox")) { @SuppressWarnings("rawtypes") String imageName = (String) ((JComboBox) e.getSource()).getSelectedItem(); int result = model.containsFile(imageName); if(result != -1) { model.setPointer(result); } model.notifyListeners(); } /* Import Button Clicked */ else if((((Component) e.getSource()).getName()).equals("import")) { JFileChooser importImage = new JFileChooser(); importImage.setBounds(100, 100, 600, 475); importImage.setCurrentDirectory(new java.io.File(".")); /* Starts user off where they are at */ importImage.setAcceptAllFileFilterUsed(false); /* Gets rid of all files option */ importImage.setDialogTitle("Emily's Awesome Image Viewer"); importImage.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int status = importImage.showOpenDialog(null); if (status == JFileChooser.APPROVE_OPTION) { File[] images = importImage.getSelectedFile().listFiles(); int counter = 0; /* Number of images in directory */ for(File x: images) { if (!x.isDirectory()) { if(x.getName().toLowerCase().endsWith(".jpg") || x.getName().toLowerCase().endsWith(".png") || x.getName().toLowerCase().endsWith(".gif")) { counter++; } } } /* Size of how many images there are in the directory */ model.setFilteredImagesSize(counter); int i = 0; /* Cannot be greater than counter */ for(File x: images) { if(!x.isDirectory()) { if(x.getName().toLowerCase().endsWith(".jpg") || x.getName().toLowerCase().endsWith(".png") || x.getName().toLowerCase().endsWith(".gif")) { model.setSpecificElementFilteredImages(i, x.getPath()); i++; } } } model.notifyListeners(); } else if(status == JFileChooser.CANCEL_OPTION) { importImage.cancelSelection(); } } } @Override public void componentResized(ComponentEvent e) { if(model.getFilteredImages() != null) { model.notifyListeners(); //resizes imagelabel } } @Override public void componentMoved(ComponentEvent e) { // TODO Auto-generated method stub } @Override public void componentShown(ComponentEvent e) { // TODO Auto-generated method stub } @Override public void componentHidden(ComponentEvent e) { // TODO Auto-generated method stub } @Override public void mouseClicked(MouseEvent e) { int buttonClicked = e.getButton(); /* Left mouse button */ if(buttonClicked == 1 && (model.getFilteredImages() != null)) { if(model.getPointer() != 0) { model.setPointer(model.getPointer()-1); model.notifyListeners(); } /* If we have reached the far left */ else if(model.getPointer() == 0) { model.setPointer(model.getFilteredImages().length-1); model.notifyListeners(); } } /* Right mouse button */ else if(buttonClicked == 3 && (model.getFilteredImages() != null)) { /* If we haven't reached the far right */ if(model.getPointer() != model.getFilteredImages().length-1) { model.setPointer(model.getPointer()+1); model.notifyListeners(); } /* If we have reached the far right */ else if(model.getPointer() == model.getFilteredImages().length-1) { model.setPointer(0); model.notifyListeners(); } } } @Override public void mousePressed(MouseEvent e) { // TODO Auto-generated method stub } @Override public void mouseReleased(MouseEvent e) { // TODO Auto-generated method stub } @Override public void mouseEntered(MouseEvent e) { // TODO Auto-generated method stub } @Override public void mouseExited(MouseEvent e) { // TODO Auto-generated method stub } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ode.bpel.rtrep.v2; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.ode.bpel.common.FaultException; import org.apache.ode.bpel.evt.VariableModificationEvent; import org.apache.ode.bpel.rtrep.v2.channels.FaultData; import org.apache.ode.bpel.rtrep.v2.channels.ParentScopeChannel; import org.apache.ode.bpel.rtrep.v2.channels.ParentScopeChannelListener; import org.apache.ode.bpel.rtrep.v2.channels.TerminationChannel; import org.apache.ode.bpel.rtrep.v2.channels.TerminationChannelListener; import org.apache.ode.bpel.evar.ExternalVariableModuleException; import org.apache.ode.jacob.ChannelListener; import org.apache.ode.jacob.SynchChannel; import org.apache.ode.utils.DOMUtils; import org.apache.ode.utils.stl.FilterIterator; import org.apache.ode.utils.stl.MemberOfFunction; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; public class FOREACH extends ACTIVITY { private static final long serialVersionUID = 1L; private static final Log __log = LogFactory.getLog(FOREACH.class); private OForEach _oforEach; private Set<ChildInfo> _children = new HashSet<ChildInfo>(); private Set<CompensationHandler> _compHandlers = new HashSet<CompensationHandler>(); private int _startCounter = -1; private int _finalCounter = -1; private int _currentCounter = -1; private int _completedCounter = 0; private int _completionCounter = -1; public FOREACH(ActivityInfo self, ScopeFrame frame, LinkFrame linkFrame) { super(self,frame, linkFrame); _oforEach = (OForEach) self.o; } public void run() { try { _startCounter = evaluateCondition(_oforEach.startCounterValue); _finalCounter = evaluateCondition(_oforEach.finalCounterValue); if (_oforEach.completionCondition != null) { _completionCounter = evaluateCondition(_oforEach.completionCondition.branchCount); } _currentCounter = _startCounter; } catch (FaultException fe) { __log.error(fe); _self.parent.completed(createFault(fe.getQName(), _self.o), _compHandlers); return; } // Checking for bpws:invalidBranchCondition when the counter limit is superior // to the maximum number of children if (_completionCounter > 0 && _completionCounter > _finalCounter - _startCounter) { _self.parent.completed( createFault(_oforEach.getOwner().constants.qnInvalidBranchCondition, _self.o), _compHandlers); return; } // There's really nothing to do if (_finalCounter < _startCounter || _completionCounter == 0) { _self.parent.completed(null, _compHandlers); } else { // If we're parrallel, starting all our child copies, otherwise one will suffice. if (_oforEach.parallel) { for (int m = _startCounter; m <= _finalCounter; m++) { newChild(); } } else newChild(); instance(new ACTIVE()); } } private class ACTIVE extends BpelJacobRunnable { private static final long serialVersionUID = -5642862698981385732L; private FaultData _fault; private boolean _terminateRequested = false; public void run() { Iterator<ChildInfo> active = active(); // Continuing as long as a child is active if (active().hasNext()) { Set<ChannelListener> mlSet = new HashSet<ChannelListener>(); mlSet.add(new TerminationChannelListener(_self.self) { private static final long serialVersionUID = 2554750257484084466L; public void terminate() { // Terminating all children before sepuku for (Iterator<ChildInfo> i = active(); i.hasNext(); ) replication(i.next().activity.self).terminate(); _terminateRequested = true; instance(ACTIVE.this); } }); for (;active.hasNext();) { // Checking out our children final ChildInfo child = active.next(); mlSet.add(new ParentScopeChannelListener(child.activity.parent) { private static final long serialVersionUID = -8027205709961438172L; public void compensate(OScope scope, SynchChannel ret) { // Forward compensation to parent _self.parent.compensate(scope, ret); instance(ACTIVE.this); } public void completed(FaultData faultData, Set<CompensationHandler> compensations) { child.completed = true; // if (_completionCounter > 0 && _oforEach.completionCondition.successfulBranchesOnly) { if (faultData != null) _completedCounter++; } else _completedCounter++; _compHandlers.addAll(compensations); // Keeping the fault to let everybody know if (faultData != null && _fault == null) { _fault = faultData; } if (shouldContinue() && _fault == null && !_terminateRequested) { // Everything fine. If parrallel, just let our children be, otherwise making a new child if (!_oforEach.parallel) newChild(); } else { // Work is done or something wrong happened, children shouldn't continue for (Iterator<ChildInfo> i = active(); i.hasNext(); ) replication(i.next().activity.self).terminate(); } instance(ACTIVE.this); } public void cancelled() { completed(null, CompensationHandler.emptySet()); } public void failure(String reason, Element data) { completed(null, CompensationHandler.emptySet()); } }); } object(false,mlSet); } else { // No children left, either because they've all been executed or because we // had to make them stop. _self.parent.completed(_fault, _compHandlers); } } } private boolean shouldContinue() { boolean stop = false; if (_completionCounter > 0) { stop = (_completedCounter >= _completionCounter) || stop; } stop = (_startCounter + _completedCounter > _finalCounter) || stop; return !stop; } private int evaluateCondition(OExpression condition) throws FaultException { try { return getBpelRuntime().getExpLangRuntime(). evaluateAsNumber(condition, getEvaluationContext()).intValue(); } catch (FaultException e) { String msg; msg = "ForEach counter value couldn't be evaluated as xs:unsignedInt."; __log.error(msg, e); throw new FaultException(_oforEach.getOwner().constants.qnForEachCounterError,msg, e); } } private void newChild() { ChildInfo child = new ChildInfo(new ActivityInfo(genMonotonic(), _oforEach.innerScope, newChannel(TerminationChannel.class), newChannel(ParentScopeChannel.class))); _children.add(child); // Creating the current counter value node Document doc = DOMUtils.newDocument(); Node counterNode = doc.createTextNode(""+_currentCounter++); // Instantiating the scope directly to keep control of its scope frame, allows // the introduction of the counter variable in there (monkey business that is). ScopeFrame newFrame = new ScopeFrame( _oforEach.innerScope, getBpelRuntime().createScopeInstance(_scopeFrame.scopeInstanceId, _oforEach.innerScope), _scopeFrame, null); VariableInstance vinst = newFrame.resolve(_oforEach.counterVariable); try { initializeVariable(vinst, counterNode); } catch (ExternalVariableModuleException e) { __log.error("Exception while initializing external variable", e); _self.parent.failure(e.toString(), null); return; } // Generating event VariableModificationEvent se = new VariableModificationEvent(vinst.declaration.name); se.setNewValue(counterNode); if (_oforEach.debugInfo != null) se.setLineNo(_oforEach.debugInfo.startLine); sendEvent(se); instance(new SCOPE(child.activity, newFrame, _linkFrame)); } public String toString() { return "<T:Act:Flow:" + _oforEach.name + ">"; } private Iterator<ChildInfo> active() { return new FilterIterator<ChildInfo>(_children.iterator(), new MemberOfFunction<ChildInfo>() { public boolean isMember(ChildInfo childInfo) { return !childInfo.completed; } }); } }
/* * Copyright 2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.data.gclouddatastore.repository; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.time.Instant; import java.time.LocalDateTime; import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.cloud.datastore.BlobValue; import com.google.cloud.datastore.EntityValue; import com.google.cloud.datastore.FullEntity; import com.google.cloud.datastore.IncompleteKey; import com.google.cloud.datastore.LatLng; import com.google.cloud.datastore.ListValue; import com.google.cloud.datastore.TimestampValue; import com.google.cloud.datastore.Value; import com.google.cloud.datastore.ValueType; import org.springframework.beans.BeanWrapper; import org.springframework.beans.PropertyAccessorFactory; public class Unmarshaller { public <K extends IncompleteKey> Object unmarshal( FullEntity<? extends IncompleteKey> entity) { Map<String, Object> newMap = new HashMap<>(); unmarshalToMap(entity, newMap); return newMap; } public <K extends IncompleteKey, T> T unmarshal( FullEntity<? extends IncompleteKey> entity, Class<T> clazz) { try { T obj = clazz.newInstance(); unmarshalToObject(entity, obj); return obj; } catch (InstantiationException | IllegalAccessException e) { throw new IllegalStateException(); } } @SuppressWarnings("unchecked") public <T> T unmarshal(Value<?> value, Class<T> clazz) { return (T) unmarshal(value); } public Object unmarshal(Value<?> value) { ValueType valueType = value.getType(); switch (valueType) { case BLOB: return ((BlobValue) value).get().toByteArray(); case BOOLEAN: case DOUBLE: case LAT_LNG: case LONG: case STRING: return value.get(); case ENTITY: FullEntity<? extends IncompleteKey> entity = ((EntityValue) value).get(); return unmarshal(entity); case KEY: throw new UnsupportedOperationException(valueType.toString()); case LIST: List<Object> newList = new ArrayList<>(); List<? extends Value<?>> list = ((ListValue) value).get(); for (Value<?> newValue : list) { newList.add(unmarshal(newValue)); } return newList; case NULL: return null; case RAW_VALUE: throw new UnsupportedOperationException(valueType.toString()); case TIMESTAMP: return ((TimestampValue) value).get().toSqlTimestamp().toInstant(); default: throw new RuntimeException("should never reach here"); } } public <K extends IncompleteKey> void unmarshalToMap(FullEntity<K> entity, Map<String, Object> map) { for (String name : entity.getNames()) { Value<?> value = entity.getValue(name); ValueType valueType = value.getType(); switch (valueType) { case ENTITY: if (map.containsKey(name)) { unmarshalToObject(entity.getEntity(name), map.get(name)); } else { Map<String, Object> newMap = new HashMap<>(); unmarshalToMap(entity.getEntity(name), newMap); map.put(name, newMap); } break; case BLOB: case BOOLEAN: case DOUBLE: case LAT_LNG: case LIST: case LONG: case NULL: case STRING: case TIMESTAMP: map.put(name, unmarshal(value)); break; case KEY: break; case RAW_VALUE: throw new UnsupportedOperationException(valueType.toString()); } } } @SuppressWarnings("unchecked") public <K extends IncompleteKey> void unmarshalToObject(FullEntity<K> entity, Object object) { if (object instanceof Map) { unmarshalToMap(entity, (Map<String, Object>) object); return; } BeanWrapper beanWrapper = PropertyAccessorFactory.forBeanPropertyAccess(object); for (String name : entity.getNames()) { Value<?> value = entity.getValue(name); ValueType valueType = value.getType(); Class<?> targetType = beanWrapper.getPropertyType(name); if (targetType == null) continue; switch (valueType) { case BLOB: if (targetType.isAssignableFrom(byte[].class)) { beanWrapper.setPropertyValue(name, unmarshal(value)); } else if (targetType.isAssignableFrom(String.class)) { beanWrapper.setPropertyValue(name, new String( unmarshal(value, byte[].class), Charset.forName("UTF-8"))); } break; case BOOLEAN: if (targetType.isAssignableFrom(Boolean.class) || targetType.isAssignableFrom(boolean.class)) { beanWrapper.setPropertyValue(name, unmarshal(value)); } break; case DOUBLE: if (targetType.isAssignableFrom(Double.class) || targetType.isAssignableFrom(double.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).doubleValue()); } else if (targetType.isAssignableFrom(Float.class) || targetType.isAssignableFrom(float.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).floatValue()); } else if (targetType.isAssignableFrom(Long.class) || targetType.isAssignableFrom(long.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).longValue()); } else if (targetType.isAssignableFrom(Integer.class) || targetType.isAssignableFrom(int.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).intValue()); } else if (targetType.isAssignableFrom(Short.class) || targetType.isAssignableFrom(short.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).shortValue()); } else if (targetType.isAssignableFrom(Byte.class) || targetType.isAssignableFrom(byte.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).byteValue()); } break; case LONG: if (targetType.isAssignableFrom(Long.class) || targetType.isAssignableFrom(long.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).longValue()); } else if (targetType.isAssignableFrom(Integer.class) || targetType.isAssignableFrom(int.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).intValue()); } else if (targetType.isAssignableFrom(Short.class) || targetType.isAssignableFrom(short.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).shortValue()); } else if (targetType.isAssignableFrom(Byte.class) || targetType.isAssignableFrom(byte.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).byteValue()); } else if (targetType.isAssignableFrom(Double.class) || targetType.isAssignableFrom(double.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).doubleValue()); } else if (targetType.isAssignableFrom(Float.class) || targetType.isAssignableFrom(float.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Number.class).floatValue()); } break; case STRING: if (targetType.isAssignableFrom(String.class)) { beanWrapper.setPropertyValue(name, unmarshal(value)); } else if (targetType.isAssignableFrom(byte[].class)) { beanWrapper.setPropertyValue(name, unmarshal(value, String.class) .getBytes(Charset.forName("UTF-8"))); } else if (targetType.isAssignableFrom(Long.class) || targetType.isAssignableFrom(long.class)) { beanWrapper.setPropertyValue(name, Long.decode(unmarshal(value, String.class))); } else if (targetType.isAssignableFrom(Integer.class) || targetType.isAssignableFrom(int.class)) { beanWrapper.setPropertyValue(name, Integer.decode(unmarshal(value, String.class))); } else if (targetType.isAssignableFrom(Short.class) || targetType.isAssignableFrom(short.class)) { beanWrapper.setPropertyValue(name, Short.decode(unmarshal(value, String.class))); } else if (targetType.isAssignableFrom(Byte.class) || targetType.isAssignableFrom(byte.class)) { beanWrapper.setPropertyValue(name, Byte.decode(unmarshal(value, String.class))); } else if (targetType.isAssignableFrom(Double.class) || targetType.isAssignableFrom(double.class)) { beanWrapper.setPropertyValue(name, Double.valueOf(unmarshal(value, String.class))); } else if (targetType.isAssignableFrom(Float.class) || targetType.isAssignableFrom(float.class)) { beanWrapper.setPropertyValue(name, Float.valueOf(unmarshal(value, String.class))); } else if (targetType.isAssignableFrom(URI.class) || targetType.isAssignableFrom(float.class)) { try { beanWrapper.setPropertyValue(name, new URI(unmarshal(value, String.class))); } catch (URISyntaxException e) { break; } } break; case ENTITY: if (targetType.isAssignableFrom(Map.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Map.class)); } else if (Map.class.isAssignableFrom(targetType)) { Map<String, Object> map = (Map<String, Object>) beanWrapper .getPropertyValue(name); if (map == null) { try { map = (Map<String, Object>) targetType.getConstructor() .newInstance(); } catch (ReflectiveOperationException e) { break; } } // map.clear(); unmarshalToMap((FullEntity<?>) value.get(), map); } else { // Bean Object targetObject = beanWrapper.getPropertyValue(name); if (targetObject == null) { try { targetObject = targetType.getConstructor().newInstance(); } catch (ReflectiveOperationException e) { break; } } unmarshalToObject(((EntityValue) value).get(), targetObject); } break; case KEY: break; case LAT_LNG: if (targetType.isAssignableFrom(LatLng.class)) { beanWrapper.setPropertyValue(name, unmarshal(value)); } else if (targetType.isAssignableFrom(com.google.type.LatLng.class)) { LatLng latLng = unmarshal(value, LatLng.class); beanWrapper.setPropertyValue(name, com.google.type.LatLng.newBuilder() .setLatitude(latLng.getLatitude()) .setLongitude(latLng.getLongitude()).build()); } break; case LIST: if (targetType.isAssignableFrom(List.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, List.class)); } else if (List.class.isAssignableFrom(targetType)) { List<Object> newList = (List<Object>) beanWrapper .getPropertyValue(name); if (newList == null) { try { newList = (List<Object>) targetType.getConstructor() .newInstance(); } catch (ReflectiveOperationException e) { break; } } newList.clear(); for (Object newValue : unmarshal(value, List.class)) { newList.add(newValue); } } break; case NULL: if (Object.class.isAssignableFrom(targetType)) { beanWrapper.setPropertyValue(name, null); } break; case RAW_VALUE: break; case TIMESTAMP: if (targetType.isAssignableFrom(Instant.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Instant.class)); } else if (targetType.isAssignableFrom(Date.class)) { beanWrapper.setPropertyValue(name, Date.from(unmarshal(value, Instant.class))); } else if (targetType.isAssignableFrom(Calendar.class)) { beanWrapper.setPropertyValue(name, new Calendar.Builder() .setInstant( Date.from(unmarshal(value, Instant.class))) .build()); } else if (targetType.isAssignableFrom(java.sql.Timestamp.class)) { beanWrapper.setPropertyValue(name, ((TimestampValue) value).get().toSqlTimestamp()); } else if (targetType.isAssignableFrom(LocalDateTime.class)) { beanWrapper.setPropertyValue(name, ((TimestampValue) value).get() .toSqlTimestamp().toLocalDateTime()); } else if (targetType.isAssignableFrom(OffsetDateTime.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Instant.class).atOffset(ZoneOffset.UTC)); } else if (targetType.isAssignableFrom(ZonedDateTime.class)) { beanWrapper.setPropertyValue(name, unmarshal(value, Instant.class).atZone(ZoneOffset.UTC)); } else if (targetType.isAssignableFrom(long.class) || targetType.isAssignableFrom(Long.class)) { beanWrapper.setPropertyValue(name, ((TimestampValue) value).get().getSeconds()); } break; } } } }
/** * Copyright (C) 2011-2012 Turn, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.turn.ttorrent.client; import com.turn.ttorrent.client.announce.Announce; import com.turn.ttorrent.client.announce.AnnounceException; import com.turn.ttorrent.client.announce.AnnounceResponseListener; import com.turn.ttorrent.client.peer.PeerActivityListener; import com.turn.ttorrent.common.Peer; import com.turn.ttorrent.common.Torrent; import com.turn.ttorrent.common.protocol.PeerMessage; import com.turn.ttorrent.common.protocol.TrackerMessage; import com.turn.ttorrent.client.peer.SharingPeer; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import java.util.BitSet; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Observable; import java.util.Random; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import java.util.TreeSet; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A pure-java BitTorrent client. * * <p> * A BitTorrent client in its bare essence shares a given torrent. If the * torrent is not complete locally, it will continue to download it. If or after * the torrent is complete, the client may eventually continue to seed it for * other clients. * </p> * * <p> * This BitTorrent client implementation is made to be simple to embed and * simple to use. First, initialize a ShareTorrent object from a torrent * meta-info source (either a file or a byte array, see * com.turn.ttorrent.SharedTorrent for how to create a SharedTorrent object). * Then, instantiate your Client object with this SharedTorrent and call one of * {@link #download} to simply download the torrent, or {@link #share} to * download and continue seeding for the given amount of time after the download * completes. * </p> * * @author mpetazzoni */ public class Client extends Observable implements Runnable, AnnounceResponseListener, IncomingConnectionListener, PeerActivityListener { private static final Logger logger = LoggerFactory.getLogger(Client.class); /** * Peers unchoking frequency, in seconds. Current BitTorrent specification * recommends 10 seconds to avoid choking fibrilation. */ private static final int UNCHOKING_FREQUENCY = 3; /** * Optimistic unchokes are done every 2 loop iterations, i.e. every * 2*UNCHOKING_FREQUENCY seconds. */ private static final int OPTIMISTIC_UNCHOKE_ITERATIONS = 3; private static final int RATE_COMPUTATION_ITERATIONS = 2; private static final int MAX_DOWNLOADERS_UNCHOKE = 4; /** Default data output directory. */ // private static final String DEFAULT_OUTPUT_DIRECTORY = "/tmp"; public enum ClientState { WAITING, VALIDATING, SHARING, SEEDING, ERROR, DONE; }; private static final String BITTORRENT_ID_PREFIX = "-TO0042-"; private SharedTorrent torrent; private ClientState state; private Peer self; private Thread thread; private boolean stop; private long seed; private ConnectionHandler service; private Announce announce; private ConcurrentMap<String, SharingPeer> peers; private ConcurrentMap<String, SharingPeer> connected; private Random random; /** * Initialize the BitTorrent client. * * @param address * The address to bind to. * @param torrent * The torrent to download and share. */ public Client(InetAddress address, SharedTorrent torrent) throws UnknownHostException, IOException { this.torrent = torrent; this.state = ClientState.WAITING; String id = Client.BITTORRENT_ID_PREFIX + UUID.randomUUID().toString().split("-")[4]; // Initialize the incoming connection handler and register ourselves to // it. this.service = new ConnectionHandler(this.torrent, id, address); this.service.register(this); this.self = new Peer(this.service.getSocketAddress().getAddress().getHostAddress(), (short) this.service .getSocketAddress().getPort(), ByteBuffer.wrap(id.getBytes(Torrent.BYTE_ENCODING))); // Initialize the announce request thread, and register ourselves to it // as well. this.announce = new Announce(this.torrent, this.self); this.announce.register(this); logger.info( "BitTorrent client [{}] for {} started and " + "listening at {}:{}...", new Object[] { this.self.getShortHexPeerId(), this.torrent.getName(), this.self.getIp(), this.self.getPort() }); this.peers = new ConcurrentHashMap<String, SharingPeer>(); this.connected = new ConcurrentHashMap<String, SharingPeer>(); this.random = new Random(System.currentTimeMillis()); } /** * Get this client's peer specification. */ public Peer getPeerSpec() { return this.self; } /** * Return the torrent this client is exchanging on. */ public SharedTorrent getTorrent() { return this.torrent; } /** * Returns the set of known peers. */ public Set<SharingPeer> getPeers() { return new HashSet<SharingPeer>(this.peers.values()); } /** * Change this client's state and notify its observers. * * <p> * If the state has changed, this client's observers will be notified. * </p> * * @param state * The new client state. */ private synchronized void setState(ClientState state) { if (this.state != state) { this.setChanged(); } this.state = state; this.notifyObservers(this.state); } /** * Return the current state of this BitTorrent client. */ public ClientState getState() { return this.state; } /** * Download the torrent without seeding after completion. */ public void download() { this.share(0); } /** * Download and share this client's torrent until interrupted. */ public void share() { this.share(-1); } /** * Download and share this client's torrent. * * @param seed * Seed time in seconds after the download is complete. Pass * <code>0</code> to immediately stop after downloading. */ public synchronized void share(int seed) { this.seed = seed; this.stop = false; if (this.thread == null || !this.thread.isAlive()) { this.thread = new Thread(this); this.thread.setName("bt-client(" + this.self.getShortHexPeerId() + ")"); this.thread.start(); } } /** * Immediately but gracefully stop this client. */ public void stop() { this.stop(true); } /** * Immediately but gracefully stop this client. * * @param wait * Whether to wait for the client execution thread to complete or * not. This allows for the client's state to be settled down in * one of the <tt>DONE</tt> or <tt>ERROR</tt> states when this * method returns. */ public void stop(boolean wait) { this.stop = true; if (this.thread != null && this.thread.isAlive()) { this.thread.interrupt(); if (wait) { this.waitForCompletion(); } } this.thread = null; } /** * Wait for downloading (and seeding, if requested) to complete. */ public void waitForCompletion() { if (this.thread != null && this.thread.isAlive()) { try { this.thread.join(); } catch (InterruptedException ie) { logger.error(ie.getMessage(), ie); } } } /** * Tells whether we are a seed for the torrent we're sharing. */ public boolean isSeed() { return this.torrent.isComplete(); } /** * Main client loop. * * <p> * The main client download loop is very simple: it starts the announce * request thread, the incoming connection handler service, and loops * unchoking peers every UNCHOKING_FREQUENCY seconds until told to stop. * Every OPTIMISTIC_UNCHOKE_ITERATIONS, an optimistic unchoke will be * attempted to try out other peers. * </p> * * <p> * Once done, it stops the announce and connection services, and returns. * </p> */ @Override public void run() { // First, analyze the torrent's local data. try { this.setState(ClientState.VALIDATING); this.torrent.init(); } catch (IOException ioe) { logger.warn("Error while initializing torrent data: {}!", ioe.getMessage(), ioe); } catch (InterruptedException ie) { logger.warn("Client was interrupted during initialization. " + "Aborting right away."); } finally { if (!this.torrent.isInitialized()) { try { this.service.close(); } catch (IOException ioe) { logger.warn("Error while releasing bound channel: {}!", ioe.getMessage(), ioe); } this.setState(ClientState.ERROR); this.torrent.close(); return; } } // Initial completion test if (this.torrent.isComplete()) { this.seed(); } else { this.setState(ClientState.SHARING); } // Detect early stop if (this.stop) { logger.info("Download is complete and no seeding was requested."); this.finish(); return; } this.announce.start(); this.service.start(); int optimisticIterations = 0; int rateComputationIterations = 0; while (!this.stop) { optimisticIterations = (optimisticIterations == 0 ? Client.OPTIMISTIC_UNCHOKE_ITERATIONS : optimisticIterations - 1); rateComputationIterations = (rateComputationIterations == 0 ? Client.RATE_COMPUTATION_ITERATIONS : rateComputationIterations - 1); try { this.unchokePeers(optimisticIterations == 0); this.info(); if (rateComputationIterations == 0) { this.resetPeerRates(); } } catch (Exception e) { logger.error("An exception occurred during the BitTorrent " + "client main loop execution!", e); } try { Thread.sleep(Client.UNCHOKING_FREQUENCY * 1000); } catch (InterruptedException ie) { logger.trace("BitTorrent main loop interrupted."); } } logger.debug("Stopping BitTorrent client connection service " + "and announce threads..."); this.service.stop(); try { this.service.close(); } catch (IOException ioe) { logger.warn("Error while releasing bound channel: {}!", ioe.getMessage(), ioe); } this.announce.stop(); // Close all peer connections logger.debug("Closing all remaining peer connections..."); for (SharingPeer peer : this.connected.values()) { peer.unbind(true); } this.finish(); } /** * Close torrent and set final client state before signing off. */ private void finish() { this.torrent.close(); // Determine final state if (this.torrent.isFinished()) { this.setState(ClientState.DONE); } else { this.setState(ClientState.ERROR); } logger.info("BitTorrent client signing off."); } /** * Display information about the BitTorrent client state. * * <p> * This emits an information line in the log about this client's state. It * includes the number of choked peers, number of connected peers, number of * known peers, information about the torrent availability and completion * and current transmission rates. * </p> */ public synchronized void info() { float dl = 0; float ul = 0; for (SharingPeer peer : this.connected.values()) { dl += peer.getDLRate().get(); ul += peer.getULRate().get(); } logger.info( "{} {}/{} pieces ({}%) [{}/{}] with {}/{} peers at {}/{} kB/s.", new Object[] { this.getState().name(), this.torrent.getCompletedPieces().cardinality(), this.torrent.getPieceCount(), String.format("%.2f", this.torrent.getCompletion()), this.torrent.getAvailablePieces().cardinality(), this.torrent.getRequestedPieces().cardinality(), this.connected.size(), this.peers.size(), String.format("%.2f", dl / 1024.0), String.format("%.2f", ul / 1024.0), }); for (SharingPeer peer : this.connected.values()) { Piece piece = peer.getRequestedPiece(); logger.debug(" | {} {}", peer, piece != null ? "(downloading " + piece + ")" : ""); } } /** * Reset peers download and upload rates. * * <p> * This method is called every RATE_COMPUTATION_ITERATIONS to reset the * download and upload rates of all peers. This contributes to making the * download and upload rate computations rolling averages every * UNCHOKING_FREQUENCY * RATE_COMPUTATION_ITERATIONS seconds (usually 20 * seconds). * </p> */ private synchronized void resetPeerRates() { for (SharingPeer peer : this.connected.values()) { peer.getDLRate().reset(); peer.getULRate().reset(); } } /** * Retrieve a SharingPeer object from the given peer specification. * * <p> * This function tries to retrieve an existing peer object based on the * provided peer specification or otherwise instantiates a new one and adds * it to our peer repository. * </p> * * @param search * The {@link Peer} specification. */ private SharingPeer getOrCreatePeer(Peer search) { SharingPeer peer; synchronized (this.peers) { logger.trace("Searching for {}...", search); if (search.hasPeerId()) { peer = this.peers.get(search.getHexPeerId()); if (peer != null) { logger.trace("Found peer (by peer ID): {}.", peer); this.peers.put(peer.getHostIdentifier(), peer); this.peers.put(search.getHostIdentifier(), peer); return peer; } } peer = this.peers.get(search.getHostIdentifier()); if (peer != null) { if (search.hasPeerId()) { logger.trace("Recording peer ID {} for {}.", search.getHexPeerId(), peer); peer.setPeerId(search.getPeerId()); this.peers.put(search.getHexPeerId(), peer); } logger.debug("Found peer (by host ID): {}.", peer); return peer; } peer = new SharingPeer(search.getIp(), search.getPort(), search.getPeerId(), this.torrent); logger.trace("Created new peer: {}.", peer); this.peers.put(peer.getHostIdentifier(), peer); if (peer.hasPeerId()) { this.peers.put(peer.getHexPeerId(), peer); } return peer; } } /** * Retrieve a peer comparator. * * <p> * Returns a peer comparator based on either the download rate or the upload * rate of each peer depending on our state. While sharing, we rely on the * download rate we get from each peer. When our download is complete and * we're only seeding, we use the upload rate instead. * </p> * * @return A SharingPeer comparator that can be used to sort peers based on * the download or upload rate we get from them. */ private Comparator<SharingPeer> getPeerRateComparator() { if (ClientState.SHARING.equals(this.state)) { return new SharingPeer.DLRateComparator(); } else if (ClientState.SEEDING.equals(this.state)) { return new SharingPeer.ULRateComparator(); } else { throw new IllegalStateException("Client is neither sharing nor " + "seeding, we shouldn't be comparing peers at this point."); } } /** * Unchoke connected peers. * * <p> * This is one of the "clever" places of the BitTorrent client. Every * OPTIMISTIC_UNCHOKING_FREQUENCY seconds, we decide which peers should be * unchocked and authorized to grab pieces from us. * </p> * * <p> * Reciprocation (tit-for-tat) and upload capping is implemented here by * carefully choosing which peers we unchoke, and which peers we choke. * </p> * * <p> * The four peers with the best download rate and are interested in us get * unchoked. This maximizes our download rate as we'll be able to get data * from there four "best" peers quickly, while allowing these peers to * download from us and thus reciprocate their generosity. * </p> * * <p> * Peers that have a better download rate than these four downloaders but * are not interested get unchoked too, we want to be able to download from * them to get more data more quickly. If one becomes interested, it takes a * downloader's place as one of the four top downloaders (i.e. we choke the * downloader with the worst upload rate). * </p> * * @param optimistic * Whether to perform an optimistic unchoke as well. */ private synchronized void unchokePeers(boolean optimistic) { // Build a set of all connected peers, we don't care about peers we're // not connected to. TreeSet<SharingPeer> bound = new TreeSet<SharingPeer>(this.getPeerRateComparator()); bound.addAll(this.connected.values()); if (bound.size() == 0) { logger.trace("No connected peers, skipping unchoking."); return; } else { logger.trace("Running unchokePeers() on {} connected peers.", bound.size()); } int downloaders = 0; Set<SharingPeer> choked = new HashSet<SharingPeer>(); // We're interested in the top downloaders first, so use a descending // set. for (SharingPeer peer : bound.descendingSet()) { if (downloaders < Client.MAX_DOWNLOADERS_UNCHOKE) { // Unchoke up to MAX_DOWNLOADERS_UNCHOKE interested peers if (peer.isChoking()) { if (peer.isInterested()) { downloaders++; } peer.unchoke(); } } else { // Choke everybody else choked.add(peer); } } // Actually choke all chosen peers (if any), except the eventual // optimistic unchoke. if (choked.size() > 0) { SharingPeer randomPeer = choked.toArray(new SharingPeer[0])[this.random.nextInt(choked.size())]; for (SharingPeer peer : choked) { if (optimistic && peer == randomPeer) { logger.debug("Optimistic unchoke of {}.", peer); continue; } peer.choke(); } } } /** AnnounceResponseListener handler(s). **********************************/ /** * Handle an announce response event. * * @param interval * The announce interval requested by the tracker. * @param complete * The number of seeders on this torrent. * @param incomplete * The number of leechers on this torrent. */ @Override public void handleAnnounceResponse(int interval, int complete, int incomplete) { this.announce.setInterval(interval); } /** * Handle the discovery of new peers. * * @param peers * The list of peers discovered (from the announce response or * any other means like DHT/PEX, etc.). */ @Override public void handleDiscoveredPeers(List<Peer> peers) { if (peers == null || peers.isEmpty()) { // No peers returned by the tracker. Apparently we're alone on // this one for now. return; } logger.info("Got {} peer(s) in tracker response.", peers.size()); if (!this.service.isAlive()) { logger.warn("Connection handler service is not available."); return; } for (Peer peer : peers) { // Attempt to connect to the peer if and only if: // - We're not already connected or connecting to it; // - We're not a seeder (we leave the responsibility // of connecting to peers that need to download // something). SharingPeer match = this.getOrCreatePeer(peer); if (this.isSeed()) { continue; } synchronized (match) { if (!match.isConnected()) { this.service.connect(match); } } } } /** IncomingConnectionListener handler(s). ********************************/ /** * Handle a new peer connection. * * <p> * This handler is called once the connection has been successfully * established and the handshake exchange made. This generally simply means * binding the peer to the socket, which will put in place the communication * thread and logic with this peer. * </p> * * @param channel * The connected socket channel to the remote peer. Note that if * the peer somehow rejected our handshake reply, this socket * might very soon get closed, but this is handled down the road. * @param peerId * The byte-encoded peerId extracted from the peer's handshake, * after validation. * @see com.turn.ttorrent.client.peer.SharingPeer */ @Override public void handleNewPeerConnection(SocketChannel channel, byte[] peerId) { Peer search = new Peer(channel.socket().getInetAddress().getHostAddress(), channel.socket().getPort(), (peerId != null ? ByteBuffer.wrap(peerId) : (ByteBuffer) null)); logger.info("Handling new peer connection with {}...", search); SharingPeer peer = this.getOrCreatePeer(search); try { synchronized (peer) { if (peer.isConnected()) { logger.info("Already connected with {}, closing link.", peer); channel.close(); return; } peer.register(this); peer.bind(channel); } this.connected.put(peer.getHexPeerId(), peer); peer.register(this.torrent); logger.debug("New peer connection with {} [{}/{}].", new Object[] { peer, this.connected.size(), this.peers.size() }); } catch (Exception e) { this.connected.remove(peer.getHexPeerId()); logger.warn("Could not handle new peer connection " + "with {}: {}", peer, e.getMessage()); } } /** * Handle a failed peer connection. * * <p> * If an outbound connection failed (could not connect, invalid handshake, * etc.), remove the peer from our known peers. * </p> * * @param peer * The peer we were trying to connect with. * @param cause * The exception encountered when connecting with the peer. */ @Override public void handleFailedConnection(SharingPeer peer, Throwable cause) { logger.warn("Could not connect to {}: {}.", peer, cause.getMessage()); this.peers.remove(peer.getHostIdentifier()); if (peer.hasPeerId()) { this.peers.remove(peer.getHexPeerId()); } } /** PeerActivityListener handler(s). **************************************/ @Override public void handlePeerChoked(SharingPeer peer) { /* Do nothing */ } @Override public void handlePeerReady(SharingPeer peer) { /* Do nothing */ } @Override public void handlePieceAvailability(SharingPeer peer, Piece piece) { /* * Do * nothing */ } @Override public void handleBitfieldAvailability(SharingPeer peer, BitSet availablePieces) { /* * Do * nothing */ } @Override public void handlePieceSent(SharingPeer peer, Piece piece) { /* Do nothing */ } /** * Piece download completion handler. * * <p> * When a piece is completed, and valid, we announce to all connected peers * that we now have this piece. * </p> * * <p> * We use this handler to identify when all of the pieces have been * downloaded. When that's the case, we can start the seeding period, if * any. * </p> * * @param peer * The peer we got the piece from. * @param piece * The piece in question. */ @Override public void handlePieceCompleted(SharingPeer peer, Piece piece) throws IOException { synchronized (this.torrent) { if (piece.isValid()) { // Make sure the piece is marked as completed in the torrent // Note: this is required because the order the // PeerActivityListeners are called is not defined, and we // might be called before the torrent's piece completion // handler is. this.torrent.markCompleted(piece); logger.debug("Completed download of {} from {}. " + "We now have {}/{} pieces", new Object[] { piece, peer, this.torrent.getCompletedPieces().cardinality(), this.torrent.getPieceCount() }); // Send a HAVE message to all connected peers PeerMessage have = PeerMessage.HaveMessage.craft(piece.getIndex()); for (SharingPeer remote : this.connected.values()) { remote.send(have); } // Force notify after each piece is completed to propagate // download // completion information (or new seeding state) this.setChanged(); this.notifyObservers(this.state); } else { logger.warn("Downloaded piece#{} from {} was not valid ;-(", piece.getIndex(), peer); } if (this.torrent.isComplete()) { logger.info("Last piece validated and completed, finishing download..."); // Cancel all remaining outstanding requests for (SharingPeer remote : this.connected.values()) { if (remote.isDownloading()) { int requests = remote.cancelPendingRequests().size(); logger.info("Cancelled {} remaining pending requests on {}.", requests, remote); } } this.torrent.finish(); try { this.announce.getCurrentTrackerClient().announce( TrackerMessage.AnnounceRequestMessage.RequestEvent.COMPLETED, true); } catch (AnnounceException ae) { logger.warn("Error announcing completion event to " + "tracker: {}", ae.getMessage()); } logger.info("Download is complete and finalized."); this.seed(); } } } @Override public void handlePeerDisconnected(SharingPeer peer) { if (this.connected.remove(peer.hasPeerId() ? peer.getHexPeerId() : peer.getHostIdentifier()) != null) { logger.debug("Peer {} disconnected, [{}/{}].", new Object[] { peer, this.connected.size(), this.peers.size() }); } peer.reset(); } @Override public void handleIOException(SharingPeer peer, IOException ioe) { logger.warn("I/O error while exchanging data with {}, " + "closing connection with it!", peer, ioe.getMessage()); peer.unbind(true); } /** Post download seeding. ************************************************/ /** * Start the seeding period, if any. * * <p> * This method is called when all the pieces of our torrent have been * retrieved. This may happen immediately after the client starts if the * torrent was already fully download or we are the initial seeder client. * </p> * * <p> * When the download is complete, the client switches to seeding mode for as * long as requested in the <code>share()</code> call, if seeding was * requested. If not, the StopSeedingTask will execute immediately to stop * the client's main loop. * </p> * * @see StopSeedingTask */ private synchronized void seed() { // Silently ignore if we're already seeding. if (ClientState.SEEDING.equals(this.getState())) { return; } logger.info("Download of {} pieces completed.", this.torrent.getPieceCount()); this.setState(ClientState.SEEDING); if (this.seed < 0) { logger.info("Seeding indefinetely..."); return; } // In case seeding for 0 seconds we still need to schedule the task in // order to call stop() from different thread to avoid deadlock logger.info("Seeding for {} seconds...", this.seed); Timer timer = new Timer(); timer.schedule(new ClientShutdown(this, timer), this.seed * 1000); } /** * Timer task to stop seeding. * * <p> * This TimerTask will be called by a timer set after the download is * complete to stop seeding from this client after a certain amount of * requested seed time (might be 0 for immediate termination). * </p> * * <p> * This task simply contains a reference to this client instance and calls * its <code>stop()</code> method to interrupt the client's main loop. * </p> * * @author mpetazzoni */ private static class ClientShutdown extends TimerTask { private final Client client; private final Timer timer; ClientShutdown(Client client, Timer timer) { this.client = client; this.timer = timer; } @Override public void run() { this.client.stop(); if (this.timer != null) { this.timer.cancel(); } } }; }
package es.upm.fi.dia.oeg.ogsadai.sparql.lqp.activities; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.query.ResultSet; import uk.org.ogsadai.activity.ActivityContractName; import uk.org.ogsadai.activity.ActivityProcessingException; import uk.org.ogsadai.activity.ActivityTerminatedException; import uk.org.ogsadai.activity.ActivityUserException; import uk.org.ogsadai.activity.MatchedIterativeActivity; import uk.org.ogsadai.activity.io.ActivityIOException; import uk.org.ogsadai.activity.io.ActivityInput; import uk.org.ogsadai.activity.io.BlockWriter; import uk.org.ogsadai.activity.io.ControlBlock; import uk.org.ogsadai.activity.io.TupleListActivityInput; import uk.org.ogsadai.activity.io.TupleListIterator; import uk.org.ogsadai.activity.io.TypedActivityInput; import uk.org.ogsadai.common.msgs.DAILogger; import uk.org.ogsadai.metadata.MetadataWrapper; import uk.org.ogsadai.tuple.ColumnMetadata; import uk.org.ogsadai.tuple.SimpleColumnMetadata; import uk.org.ogsadai.tuple.SimpleTupleMetadata; import uk.org.ogsadai.tuple.Tuple; import uk.org.ogsadai.tuple.TupleTypes; import es.upm.fi.dia.oeg.ogsadai.rdfresource.RDFConnectionProvider; /** * An activity that executes SPARQL query on a target RDF data resource and * produces a list of tuples containing the results of the query. * <p> * Activity inputs: * </p> * <ul> * <li> * <code>expression</code>. Type: {@link java.lang.String}. SPARQL query * expression.</li> * </ul> * <p> * Activity outputs: * </p> * <ul> * <li> * <code>data</code>. Type: OGSA-DAI list of {@link uk.org.ogsadai.tuple.Tuple} * with the first item in the list an instance of * {@link uk.org.ogsadai.metadata.MetadataWrapper} containing a * {@link uk.org.ogsadai.tuple.TupleMetadata} object. The tuples produced by the * query.</li> * </ul> * <p> * Configuration parameters: none. * </p> * <p> * Activity input/output ordering: none. * </p> * <p> * Activity contracts: * </p> * <ul> * <li> * <code>es.upm.fi.dia.oeg.ogsadai.rdfactivity.contract.EPRQueryActivity</code></li> * </ul> * <p> * Target data resource: * <ul> * <li> * {@link es.upm.fi.dia.oeg.ogsadai.rdfresource.RDFResource}</li> * </ul> * </p> * </p> * <p> * Behaviour: * </p> * <ul> * <li> * This activity accepts a sequence of SPARQL query expressions as input and is * targeted at a RDF data resource. In each iteration one input query is * processed by executing the query across the target data resource. The results * of each iteration is a OGSA-DAI list of tuples with a metadata header block.</li> * <li> * Partial data may be produced if an error occurs at any stage of processing.</li> * </ul> * * @author Carlos Buil Aranda, Oscar Corcho * @email cbuil@fi.upm.es, ocorcho@fi.upm.es * @institution Universidad Politecnica de Madrid */ public class VarSparqlEndpointQueryActivity extends MatchedIterativeActivity { /** Copyright statement. */ private static final String COPYRIGHT_NOTICE = "Copyright (c) Pontificia Universidad Catolica de Chile, 2013."; /** Logger. */ private static final DAILogger LOG = DAILogger .getLogger(VarSparqlEndpointQueryActivity.class); /** Activity input name - SPARQL expression */ public static final String INPUT_SPARQL_EXPRESSION = "expression"; /** Activity input name - SPARQL expression */ public static final String INPUT_SPARQL_ENDPOINT = "endpointURL"; /** Activity output name - produces lists of tuples */ public static final String OUTPUT_SPARQL_RESULTS = "rdfdata"; /** The RDFDataResource connection provider */ private RDFConnectionProvider mResource; /** * SPARQL EPR address */ private String mAddress; /** * Constructor. */ public VarSparqlEndpointQueryActivity() { super(); mContracts .add(new ActivityContractName( "es.upm.fi.dia.oeg.ogsadai.rdfactivity.contract.VarSparqlEndpointQueryActivity")); } @Override protected ActivityInput[] getIterationInputs() { return new ActivityInput[] { new TypedActivityInput(INPUT_SPARQL_EXPRESSION, String.class), new TupleListActivityInput(INPUT_SPARQL_ENDPOINT), }; } @Override protected void postprocess() throws ActivityUserException, ActivityProcessingException, ActivityTerminatedException { // no post processing } @Override protected void preprocess() throws ActivityUserException, ActivityProcessingException, ActivityTerminatedException { validateOutput(OUTPUT_SPARQL_RESULTS); } @Override protected void processIteration(Object[] iterationData) throws ActivityProcessingException, ActivityTerminatedException, ActivityUserException { final String expression = (String) iterationData[0]; TupleListIterator endpointTuples = (TupleListIterator) iterationData[1]; // mAddress = (String) iterationData[1]; try { LOG.debug("About to query SPARQL endpoint..."); Query query = QueryFactory.create(expression); Tuple tuple; BlockWriter output = getOutput(OUTPUT_SPARQL_RESULTS); boolean includeListMarkers = true; if (includeListMarkers) { output.write(ControlBlock.LIST_BEGIN); } if (query != null) { Query query1 = QueryFactory.create(query); List<String> resultvars = query1.getResultVars(); Iterator<String> it1 = resultvars.listIterator(); List<ColumnMetadata> listColumnMetadata = new ArrayList<ColumnMetadata>(); while (it1.hasNext()) { String var = (String) it1.next(); listColumnMetadata.add(new SimpleColumnMetadata(var, TupleTypes._STRING, 100, ColumnMetadata.COLUMN_NULLABLE, 10000)); } LOG.debug("Sparql VAR query metadata: " + listColumnMetadata); output.write(new MetadataWrapper(new SimpleTupleMetadata( listColumnMetadata))); while ((tuple = (Tuple) endpointTuples.nextValue()) != null) { String sparqlEndpoint = tuple.getString(1); LOG.debug("Endpoint to query: " + sparqlEndpoint); // Create a single execution of this query, apply to a model // which is wrapped up as a Dataset final QueryExecution qexec = QueryExecutionFactory .sparqlService( sparqlEndpoint.substring(1, sparqlEndpoint.length() - 1), query); ResultSet rs = null; try { LOG.debug("About to execute SPARQL query..."); rs = qexec.execSelect(); LOG.debug("SPARQL query " + query + " executed"); } finally { if (rs != null) { // QueryExecution objects should be closed to free // any // system resources LOG.debug("About to create OGSA-DAI tuples..."); VarRDFUtilities.createTupleList(rs, output); LOG.debug("tuples created..."); } else { LOG.debug("ResultSet is null, did you write correctly the SPARQL EPR address?"); } qexec.close(); } } } if (includeListMarkers) { output.write(ControlBlock.LIST_END); output.closeForWriting(); } } catch (IOException e) { throw new ActivityIOException(e); } catch (Exception e) { e.printStackTrace(); } } protected void cleanUp() throws Exception { super.cleanUp(); } }