gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.stellar.dsl.functions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.metron.stellar.common.utils.StellarProcessorUtils.run;
public class FunctionalFunctionsTest {
@Test
@SuppressWarnings("unchecked")
public void testZipLongest_boundary() {
for (String expr : ImmutableList.of( "ZIP_LONGEST()"
, "ZIP_LONGEST( null, null )"
, "ZIP_LONGEST( [], null )"
, "ZIP_LONGEST( [], [] )"
, "ZIP_LONGEST( null, [] )"
)
)
{
List<List<Object>> o = (List<List<Object>>) run(expr, new HashMap<>());
Assert.assertEquals(0, o.size());
}
}
@Test
@SuppressWarnings("unchecked")
public void testZip_longest() {
Map<String, Object> variables = ImmutableMap.of(
"list1" , ImmutableList.of(1, 2, 3)
,"list2", ImmutableList.of(4, 5, 6, 7)
);
for (String expr : ImmutableList.of( "ZIP_LONGEST(list1)"
, "ZIP_LONGEST( [1, 2, 3])"
)
)
{
List<List<Object>> o = (List<List<Object>>) run(expr, variables);
Assert.assertEquals(3, o.size());
for (int i = 0; i < 3; ++i) {
List l = o.get(i);
Assert.assertEquals(1, l.size());
Assert.assertEquals(i+1, l.get(0));
}
}
for (String expr : ImmutableList.of( "ZIP_LONGEST(list1, list2)"
, "ZIP_LONGEST( [1, 2, 3], [4, 5, 6, 7] )"
)
)
{
List<List<Object>> o = (List<List<Object>>) run(expr, variables);
Assert.assertEquals(4, o.size());
for (int i = 0; i < 3; ++i) {
List l = o.get(i);
Assert.assertEquals(2, l.size());
Assert.assertEquals(i+1, l.get(0));
Assert.assertEquals(i+4, l.get(1));
}
{
int i = 3;
List l = o.get(i);
Assert.assertEquals(2, l.size());
Assert.assertNull(l.get(0));
Assert.assertEquals(i+4, l.get(1));
}
}
for (String expr : ImmutableList.of(
"REDUCE(ZIP_LONGEST(list2, list1), (s, x) -> s + GET_FIRST(x) * GET_LAST(x), 0)"
, "REDUCE(ZIP_LONGEST( [1, 2, 3], [4, 5, 6, 7] ), (s, x) -> s + GET_FIRST(x) * GET_LAST(x), 0)"
, "REDUCE(ZIP_LONGEST(list1, list2), (s, x) -> s + GET_FIRST(x) * GET_LAST(x), 0)" //this works because stellar treats nulls as 0 in arithmetic operations.
, "REDUCE(ZIP_LONGEST(list1, list2), (s, x) -> s + (GET_FIRST(x) == null?0:GET_FIRST(x)) * (GET_LAST(x) == null?0:GET_LAST(x)), 0)" //with proper guarding NOT assuming stellar peculiarities
)
)
{
int o = (int) run(expr, variables);
Assert.assertEquals(1*4 + 2*5 + 3*6, o, 1e-7);
}
}
@Test
@SuppressWarnings("unchecked")
public void testZip_boundary() {
for (String expr : ImmutableList.of( "ZIP()"
, "ZIP( null, null )"
, "ZIP( [], null )"
, "ZIP( [], [] )"
, "ZIP( null, [] )"
)
)
{
List<List<Object>> o = (List<List<Object>>) run(expr, new HashMap<>());
Assert.assertEquals(0, o.size());
}
}
@Test
@SuppressWarnings("unchecked")
public void testZip() {
Map<String, Object> variables = ImmutableMap.of(
"list1" , ImmutableList.of(1, 2, 3)
,"list2", ImmutableList.of(4, 5, 6)
);
for (String expr : ImmutableList.of( "ZIP(list1)"
, "ZIP( [1, 2, 3])"
)
)
{
List<List<Object>> o = (List<List<Object>>) run(expr, variables);
Assert.assertEquals(3, o.size());
for (int i = 0; i < 3; ++i) {
List l = o.get(i);
Assert.assertEquals(1, l.size());
Assert.assertEquals(i+1, l.get(0));
}
}
for (String expr : ImmutableList.of( "ZIP(list1, list2)"
, "ZIP( [1, 2, 3], [4, 5, 6] )"
, "ZIP( [1, 2, 3], [4, 5, 6, 7] )"
)
)
{
List<List<Object>> o = (List<List<Object>>) run(expr, variables);
Assert.assertEquals(3, o.size());
for (int i = 0; i < 3; ++i) {
List l = o.get(i);
Assert.assertEquals(2, l.size());
Assert.assertEquals(i+1, l.get(0));
Assert.assertEquals(i+4, l.get(1));
}
}
for (String expr : ImmutableList.of(
"REDUCE(ZIP(list1, list2), (s, x) -> s + GET_FIRST(x) * GET_LAST(x), 0)"
, "REDUCE(ZIP( [1, 2, 3], [4, 5, 6] ), (s, x) -> s + GET_FIRST(x) * GET_LAST(x), 0)"
, "REDUCE(ZIP( [1, 2, 3], [4, 5, 6, 7] ), (s, x) -> s + GET_FIRST(x) * GET_LAST(x), 0)"
)
)
{
int o = (int) run(expr, variables);
Assert.assertEquals(1*4 + 2*5 + 3*6, o, 1e-7);
}
}
@Test
@SuppressWarnings("unchecked")
public void testRecursive() {
for (String expr : ImmutableList.of( "MAP(list, inner_list -> REDUCE(inner_list, (x, y) -> x + y, 0) )"
, "MAP(list, (inner_list) -> REDUCE(inner_list, (x, y) -> x + y, 0) )"
)
)
{
Object o = run(expr, ImmutableMap.of("list", ImmutableList.of(ImmutableList.of(1, 2, 3), ImmutableList.of(4, 5, 6))));
Assert.assertTrue(o instanceof List);
List<Number> result = (List<Number>) o;
Assert.assertEquals(2, result.size());
Assert.assertEquals(6, result.get(0));
Assert.assertEquals(15, result.get(1));
}
}
@Test
@SuppressWarnings("unchecked")
public void testMap_null() {
for (String expr : ImmutableList.of( "MAP([ 1, 2, null], x -> if x == null then 0 else 2*x )"
, "MAP([ 1, 2, null], x -> x == null ? 0 : 2*x )"
, "MAP([ 1, foo, baz], x -> x == null ? 0 : 2*x )"
)
)
{
Map<String,Object> variableMap = new HashMap<String,Object>(){{
put("foo",2);
put("bar", 3);
put("baz",null);
}};
Object o = run(expr,variableMap);
Assert.assertTrue(o instanceof List);
List<String> result = (List<String>) o;
Assert.assertEquals(3, result.size());
Assert.assertEquals(2, result.get(0));
Assert.assertEquals(4, result.get(1));
Assert.assertEquals(0, result.get(2));
}
}
@Test
@SuppressWarnings("unchecked")
public void testMap() {
for (String expr : ImmutableList.of( "MAP([ 'foo', 'bar'], (x) -> TO_UPPER(x) )"
, "MAP([ foo, 'bar'], (x) -> TO_UPPER(x) )"
, "MAP([ foo, bar], (x) -> TO_UPPER(x) )"
, "MAP([ foo, bar], x -> TO_UPPER(x) )"
, "MAP([ foo, bar], x -> true?TO_UPPER(x):THROW('error') )"
, "MAP([ foo, bar], x -> false?THROW('error'):TO_UPPER(x) )"
)
)
{
Object o = run(expr, ImmutableMap.of("foo", "foo", "bar", "bar"));
Assert.assertTrue(o instanceof List);
List<String> result = (List<String>) o;
Assert.assertEquals(2, result.size());
Assert.assertEquals("FOO", result.get(0));
Assert.assertEquals("BAR", result.get(1));
}
}
@Test
@SuppressWarnings("unchecked")
public void testMap_conditional() {
for (String expr : ImmutableList.of("MAP([ 'foo', 'bar'], (item) -> item == 'foo' )"
,"MAP([ foo, bar], (item) -> item == 'foo' )"
,"MAP([ foo, bar], (item) -> item == foo )"
,"MAP([ foo, bar], item -> item == foo )"
)
)
{
Object o = run(expr, ImmutableMap.of("foo", "foo", "bar", "bar"));
Assert.assertTrue(o instanceof List);
List<Boolean> result = (List<Boolean>) o;
Assert.assertEquals(2, result.size());
Assert.assertEquals(true, result.get(0));
Assert.assertEquals(false, result.get(1));
}
}
@Test
@SuppressWarnings("unchecked")
public void testFilter() {
for (String expr : ImmutableList.of("FILTER([ 'foo', 'bar'], (item) -> item == 'foo' )"
,"FILTER([ 'foo', bar], (item) -> item == 'foo' )"
,"FILTER([ foo, bar], (item) -> item == 'foo' )"
,"FILTER([ foo, bar], (item) -> (item == 'foo' && true) )"
,"FILTER([ foo, bar], (item) -> if item == 'foo' then true else false )"
,"FILTER([ foo, bar], item -> if item == 'foo' then true else false )"
)
)
{
Object o = run(expr, ImmutableMap.of("foo", "foo", "bar", "bar"));
Assert.assertTrue(o instanceof List);
List<String> result = (List<String>) o;
Assert.assertEquals(1, result.size());
Assert.assertEquals("foo", result.get(0));
}
}
@Test
@SuppressWarnings("unchecked")
public void testFilter_shortcircuit() {
for (String expr : ImmutableList.of("FILTER([ 'foo'], item -> item == 'foo' or THROW('exception') )"
,"FILTER([ 'foo'], (item) -> item == 'foo' or THROW('exception') )"
)
)
{
Object o = run(expr, ImmutableMap.of("foo", "foo", "bar", "bar"));
Assert.assertTrue(o instanceof List);
List<String> result = (List<String>) o;
Assert.assertEquals(1, result.size());
Assert.assertEquals("foo", result.get(0));
}
}
@Test
@SuppressWarnings("unchecked")
public void testFilter_null() {
for (String expr : ImmutableList.of("FILTER([ 'foo', null], item -> item == null )"
,"FILTER([ 'foo', baz], (item) -> item == null )"
)
)
{
Map<String,Object> variableMap = new HashMap<String,Object>(){{
put("foo","foo");
put("bar","bar");
put("baz",null);
}};
Object o = run(expr,variableMap);
Assert.assertTrue(o instanceof List);
List<String> result = (List<String>) o;
Assert.assertEquals(1, result.size());
Assert.assertEquals(null, result.get(0));
}
}
@Test
@SuppressWarnings("unchecked")
public void testFilter_notnull() {
for (String expr : ImmutableList.of("FILTER([ 'foo', null], item -> item != null )"
,"FILTER([ 'foo', baz], (item) -> item != null )"
,"FILTER([ foo, baz], (item) -> item != null )"
)
)
{
Map<String,Object> variableMap = new HashMap<String,Object>(){{
put("foo","foo");
put("bar","bar");
put("baz",null);
}};
Object o = run(expr,variableMap);
Assert.assertTrue(o instanceof List);
List<String> result = (List<String>) o;
Assert.assertEquals(1, result.size());
Assert.assertEquals("foo", result.get(0));
}
}
@Test
@SuppressWarnings("unchecked")
public void testFilter_none() {
for (String expr : ImmutableList.of( "FILTER([ foo, bar], () -> false )"
, "FILTER([ 'foo', 'bar'], (item)-> false )"
,"FILTER([ 'foo', bar], (item ) -> false )"
,"FILTER([ foo, bar], (item) -> false )"
,"FILTER([ foo, bar], item -> false )"
)
)
{
Object o = run(expr, ImmutableMap.of("foo", "foo", "bar", "bar"));
Assert.assertTrue(o instanceof List);
List<String> result = (List<String>) o;
Assert.assertEquals(0, result.size());
}
}
@Test
@SuppressWarnings("unchecked")
public void testFilter_all() {
for (String expr : ImmutableList.of("FILTER([ 'foo', 'bar'], (item) -> true )"
,"FILTER([ 'foo', bar], (item) -> true )"
,"FILTER([ foo, bar], (item) -> true )"
,"FILTER([ foo, bar], item -> true )"
,"FILTER([ foo, bar], ()-> true )"
)
)
{
Object o = run(expr, ImmutableMap.of("foo", "foo", "bar", "bar"));
Assert.assertTrue(o instanceof List);
List<String> result = (List<String>) o;
Assert.assertEquals(2, result.size());
Assert.assertEquals("foo", result.get(0));
Assert.assertEquals("bar", result.get(1));
}
}
@Test
public void testReduce_null() {
for (String expr : ImmutableList.of("REDUCE([ 1, 2, 3, null], (x, y) -> if y != null then x + y else x , 0 )"
,"REDUCE([ foo, bar, 3, baz], (sum, y) -> if y != null then sum + y else sum, 0 )"
)
)
{
Map<String,Object> variableMap = new HashMap<String,Object>(){{
put("foo",1);
put("bar", 2);
put("baz",null);
}};
Object o = run(expr,variableMap);
Assert.assertTrue(o instanceof Number);
Number result = (Number) o;
Assert.assertEquals(6, result.intValue());
}
}
@Test
public void testReduce() {
for (String expr : ImmutableList.of("REDUCE([ 1, 2, 3 ], (x, y) -> x + y , 0 )"
,"REDUCE([ foo, bar, 3 ], (x, y) -> x + y , 0 )"
)
)
{
Object o = run(expr, ImmutableMap.of("foo", 1, "bar", 2));
Assert.assertTrue(o instanceof Number);
Number result = (Number) o;
Assert.assertEquals(6, result.intValue());
}
}
@Test
@SuppressWarnings("unchecked")
public void testReduce_on_various_list_sizes() {
{
String expr = "REDUCE([ 1, 2, 3, 4 ], (x, y) -> x + y , 0 )";
Object o = run(expr, ImmutableMap.of());
Assert.assertTrue(o instanceof Number);
Number result = (Number) o;
Assert.assertEquals(10, result.intValue());
}
{
String expr = "REDUCE([ 1, 2 ], (x, y) -> x + y , 0 )";
Object o = run(expr, ImmutableMap.of());
Assert.assertTrue(o instanceof Number);
Number result = (Number) o;
Assert.assertEquals(3, result.intValue());
}
{
String expr = "REDUCE([ 1 ], (x, y) -> x + y , 0 )";
Object o = run(expr, ImmutableMap.of());
Assert.assertTrue(o instanceof Number);
Number result = (Number) o;
Assert.assertEquals(1, result.intValue());
}
}
@Test
@SuppressWarnings("unchecked")
public void testReduce_NonNumeric() {
for (String expr : ImmutableList.of("REDUCE([ 'foo', 'bar', 'grok'], (x, y) -> LIST_ADD(x, y), [] )"
)
)
{
Object o = run(expr, ImmutableMap.of("foo", 1, "bar", 2,"x",0,"y",0));
Assert.assertTrue(o instanceof List);
List<String> result = (List<String>) o;
Assert.assertEquals(3, result.size());
Assert.assertEquals("foo", result.get(0));
Assert.assertEquals("bar", result.get(1));
Assert.assertEquals("grok", result.get(2));
}
}
@Test
public void testReduce_returns_null_when_less_than_3_args() {
{
String expr = "REDUCE([ 1, 2, 3 ], (x, y) -> LIST_ADD(x, y))";
Assert.assertThat(run(expr, ImmutableMap.of()), CoreMatchers.equalTo(null));
}
{
String expr = "REDUCE([ 1, 2, 3 ])";
Assert.assertThat(run(expr, ImmutableMap.of()), CoreMatchers.equalTo(null));
}
}
}
| |
package acceptance.user;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import acceptance.AbstractAccTest;
import java.util.Arrays;
import java.util.Iterator;
import java.util.stream.Stream;
import org.assertj.core.api.ThrowableAssert.ThrowingCallable;
import org.junit.jupiter.api.DynamicTest;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestFactory;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.function.ThrowingConsumer;
import pro.taskana.common.api.exceptions.InvalidArgumentException;
import pro.taskana.common.api.exceptions.MismatchedRoleException;
import pro.taskana.common.test.security.JaasExtension;
import pro.taskana.common.test.security.WithAccessId;
import pro.taskana.user.api.UserService;
import pro.taskana.user.api.exceptions.UserAlreadyExistException;
import pro.taskana.user.api.exceptions.UserNotFoundException;
import pro.taskana.user.api.models.User;
/** Acceptance test which tests the functionality of the UserService. */
@ExtendWith(JaasExtension.class)
class UserServiceAccTest extends AbstractAccTest {
private static final UserService USER_SERVICE = taskanaEngine.getUserService();
@WithAccessId(user = "user-1-2")
@Test
void should_ReturnUserWithAllFields_When_IdExisting() throws Exception {
User user = USER_SERVICE.getUser("teamlead-1");
assertThat(user.getFirstName()).isEqualTo("Titus");
assertThat(user.getLastName()).isEqualTo("Toll");
assertThat(user.getFullName()).isEqualTo("Toll, Titus");
assertThat(user.getLongName()).isEqualTo("Toll, Titus - (teamlead-1)");
assertThat(user.getEmail()).isEqualTo("titus.toll@web.de");
assertThat(user.getPhone()).isEqualTo("040-2951854");
assertThat(user.getMobilePhone()).isEqualTo("015637683197");
assertThat(user.getOrgLevel4()).isEqualTo("Novatec");
assertThat(user.getOrgLevel3()).isEqualTo("BPM");
assertThat(user.getOrgLevel2()).isEqualTo("Human Workflow");
assertThat(user.getOrgLevel1()).isEqualTo("TASKANA");
assertThat(user.getData()).isEqualTo("xy");
}
@WithAccessId(user = "user-1-2")
@Test
void should_ThrowUserNotFoundException_When_TryingToGetUserWithNonExistingId() {
ThrowingCallable callable = () -> USER_SERVICE.getUser("NOT_EXISTING");
assertThatThrownBy(callable)
.isInstanceOf(UserNotFoundException.class)
.hasMessage("User with id 'NOT_EXISTING' was not found.");
}
@WithAccessId(user = "admin")
@Test
void should_InsertUserInDatabase_When_CreatingUser() throws Exception {
User userToCreate = createExampleUser("user-10-20");
USER_SERVICE.createUser(userToCreate);
User userInDatabse = USER_SERVICE.getUser(userToCreate.getId());
assertThat(userToCreate)
.hasNoNullFieldsOrProperties()
.isNotSameAs(userInDatabse)
.isEqualTo(userInDatabse);
}
@WithAccessId(user = "admin")
@Test
void should_SetTheLongAndFullNameAccordingToRules_When_CreatingUserWithThoseFieldsEmpty()
throws Exception {
User userToCreate = createExampleUser("user-10-21");
userToCreate.setLongName(null);
userToCreate.setFullName(null);
String fullName = userToCreate.getLastName() + ", " + userToCreate.getFirstName();
String longName =
userToCreate.getLastName()
+ ", "
+ userToCreate.getFirstName()
+ " - ("
+ userToCreate.getId()
+ ")";
User createdUser = USER_SERVICE.createUser(userToCreate);
assertThat(createdUser.getLongName()).isEqualTo(longName);
assertThat(createdUser.getFullName()).isEqualTo(fullName);
}
@WithAccessId(user = "admin")
@Test
void should_ThrowInvalidArgumentException_When_TryingToCreateUserWithFirstOrLastNameNull()
throws Exception {
User userToCreate = createExampleUser("user-10-20");
userToCreate.setFirstName(null);
ThrowingCallable callable = () -> USER_SERVICE.createUser(userToCreate);
assertThatThrownBy(callable)
.isInstanceOf(InvalidArgumentException.class)
.hasMessage("First and last name of User must be set or empty.");
userToCreate.setFirstName("xy");
userToCreate.setLastName(null);
callable = () -> USER_SERVICE.createUser(userToCreate);
assertThatThrownBy(callable).isInstanceOf(InvalidArgumentException.class);
}
@WithAccessId(user = "admin")
@TestFactory
Stream<DynamicTest> should_ThrowInvalidArgumentException_When_TryingToCreateUserWithNotSetId()
throws Exception {
Iterator<String> iterator = Arrays.asList("", null).iterator();
ThrowingConsumer<String> test =
userId -> {
User userToCreate = createExampleUser("user-10-20");
userToCreate.setId(userId);
ThrowingCallable callable = () -> USER_SERVICE.createUser(userToCreate);
assertThatThrownBy(callable)
.isInstanceOf(InvalidArgumentException.class)
.hasMessage("UserId must not be empty when creating User.");
};
return DynamicTest.stream(iterator, c -> "for " + c, test);
}
@WithAccessId(user = "admin")
@Test
void should_ThrowUserAlreadyExistException_When_TryingToCreateUserWithExistingId() {
User userToCreate = createExampleUser("teamlead-1"); // existing userId
ThrowingCallable callable = () -> USER_SERVICE.createUser(userToCreate);
assertThatThrownBy(callable)
.isInstanceOf(UserAlreadyExistException.class)
.hasMessage("User with id 'teamlead-1' already exists.");
}
@WithAccessId(user = "user-1-2")
@Test
void should_ThrowNotAuthorizedException_When_TryingToCreateUserWithoutAdminRole() {
User userToCreate = createExampleUser("user-10-22");
ThrowingCallable callable = () -> USER_SERVICE.createUser(userToCreate);
assertThatThrownBy(callable)
.isInstanceOf(MismatchedRoleException.class)
.hasMessage(
"Not authorized. The current user 'user-1-2' is not member of role(s) "
+ "'[BUSINESS_ADMIN, ADMIN]'.");
}
@WithAccessId(user = "admin")
@Test
void should_UpdateUserInDatabase_When_IdExisting() throws Exception {
User userToUpdate = createExampleUser("teamlead-1"); // existing userId
USER_SERVICE.updateUser(userToUpdate);
User userInDatabase = USER_SERVICE.getUser("teamlead-1");
assertThat(userToUpdate)
.hasNoNullFieldsOrProperties()
.isNotSameAs(userInDatabase)
.isEqualTo(userInDatabase);
}
@WithAccessId(user = "admin")
@Test
void should_ThrowUserNotFoundException_When_TryingToUpdateUserWithNonExistingId() {
User userToUpdate = createExampleUser("NOT_EXISTING");
ThrowingCallable callable = () -> USER_SERVICE.updateUser(userToUpdate);
assertThatThrownBy(callable)
.isInstanceOf(UserNotFoundException.class)
.hasMessage("User with id 'NOT_EXISTING' was not found.");
}
@WithAccessId(user = "user-1-2")
@Test
void should_ThrowNotAuthorizedException_When_TryingToUpdateUserWithNoAdminRole() {
User userToUpdate = createExampleUser("teamlead-1"); // existing userId
ThrowingCallable callable = () -> USER_SERVICE.updateUser(userToUpdate);
assertThatThrownBy(callable)
.isInstanceOf(MismatchedRoleException.class)
.hasMessage(
"Not authorized. The current user 'user-1-2' is not member of role(s) "
+ "'[BUSINESS_ADMIN, ADMIN]'.");
}
@WithAccessId(user = "admin")
@Test
void should_DeleteUserFromDatabase_When_IdExisting() throws Exception {
String id = "teamlead-1";
USER_SERVICE.getUser(id); // User existing
USER_SERVICE.deleteUser(id);
ThrowingCallable callable = () -> USER_SERVICE.getUser(id); // User deleted
assertThatThrownBy(callable).isInstanceOf(UserNotFoundException.class);
}
@WithAccessId(user = "admin")
@Test
void should_ThrowUserNotFoundException_When_TryingToDeleteUserWithNonExistingId() {
ThrowingCallable callable = () -> USER_SERVICE.deleteUser("NOT_EXISTING");
assertThatThrownBy(callable)
.isInstanceOf(UserNotFoundException.class)
.hasMessage("User with id 'NOT_EXISTING' was not found.");
}
@WithAccessId(user = "user-1-2")
@Test
void should_ThrowNotAuthorizedException_When_TryingToDeleteUserWithNoAdminRole() {
ThrowingCallable callable = () -> USER_SERVICE.deleteUser("teamlead-1");
assertThatThrownBy(callable)
.isInstanceOf(MismatchedRoleException.class)
.hasMessage(
"Not authorized. The current user 'user-1-2' is not member of role(s) "
+ "'[BUSINESS_ADMIN, ADMIN]'.");
}
}
| |
package edu.hm.cs.projektstudium.findlunch.webapp.controller;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import com.google.maps.GeoApiContext;
import com.google.maps.GeocodingApi;
import com.google.maps.GeocodingApiRequest;
import com.google.maps.model.GeocodingResult;
import edu.hm.cs.projektstudium.findlunch.webapp.logging.LogUtils;
import edu.hm.cs.projektstudium.findlunch.webapp.model.DayOfWeek;
import edu.hm.cs.projektstudium.findlunch.webapp.model.OpeningTime;
import edu.hm.cs.projektstudium.findlunch.webapp.model.Restaurant;
import edu.hm.cs.projektstudium.findlunch.webapp.model.RestaurantType;
import edu.hm.cs.projektstudium.findlunch.webapp.model.TimeSchedule;
import edu.hm.cs.projektstudium.findlunch.webapp.model.User;
import edu.hm.cs.projektstudium.findlunch.webapp.model.validation.CustomRestaurantValidator;
import edu.hm.cs.projektstudium.findlunch.webapp.repositories.CountryRepository;
import edu.hm.cs.projektstudium.findlunch.webapp.repositories.DayOfWeekRepository;
import edu.hm.cs.projektstudium.findlunch.webapp.repositories.KitchenTypeRepository;
import edu.hm.cs.projektstudium.findlunch.webapp.repositories.RestaurantRepository;
import edu.hm.cs.projektstudium.findlunch.webapp.repositories.RestaurantTypeRepository;
import edu.hm.cs.projektstudium.findlunch.webapp.repositories.UserRepository;
import edu.hm.cs.projektstudium.findlunch.webapp.security.RestaurantUserDetailsService;
/**
* The class is responsible for handling http calls related to the process of adding a restaurant.
*/
@Controller
public class RestaurantController {
/** The restaurant repository. */
@Autowired
private RestaurantRepository restaurantRepository;
/** The country repository. */
@Autowired
private CountryRepository countryRepository;
/** The kitchen type repository. */
@Autowired
private KitchenTypeRepository kitchenTypeRepository;
/** The restaurant type repository. */
@Autowired
private RestaurantTypeRepository restaurantTypeRepository;
/** The day of week repository. */
@Autowired
private DayOfWeekRepository dayOfWeekRepository;
/** The user repository. */
@Autowired
private UserRepository userRepository;
/** The custom user details service for restaurant users. Used to refresh the SecurityContextHolder after a restaurant is added */
@Autowired
private RestaurantUserDetailsService customUserDetailsService;
/** The custom restaurant validator. Handled enhanced checks not handled by the hibernate annotation */
@Autowired
private CustomRestaurantValidator customRestaurantValidator;
/** The message source. */
@Autowired
private MessageSource messageSource;
/** The logger. */
private final Logger LOGGER = LoggerFactory.getLogger(RestaurantController.class);
/**
* Gets the page for adding a new restaurant to the user.
*
* @param request the HttpServletRequest
* @param model
* Model in which necessary object are placed to be displayed on the website.
* @param principal
* Currently logged in user.
* @return the string for the corresponding HTML page
*/
@RequestMapping(path = { "/restaurant/add" }, method = RequestMethod.GET)
public String addRestaurant(Model model, Principal principal, HttpServletRequest request) {
LOGGER.info(LogUtils.getInfoStringWithParameterList(request, Thread.currentThread().getStackTrace()[1].getMethodName()));
User authenticatedUser = (User)((Authentication) principal).getPrincipal();
if (authenticatedUser.getAdministratedRestaurant() != null) {
LOGGER.error(LogUtils.getErrorMessage(request, Thread.currentThread().getStackTrace()[1].getMethodName(), "The user " + authenticatedUser.getUsername() + " already has a restaurant. Another restaurant cannot be added."));
return "redirect:/offer";
} else {
Restaurant r = getNewRestaurant();
r.setEmail(authenticatedUser.getUsername());
model.addAttribute("restaurant", r);
model.addAttribute("kitchenTypes", kitchenTypeRepository.findAllByOrderByNameAsc());
model.addAttribute("restaurantTypes", getRestaurantTypes());
model.addAttribute("countries", countryRepository.findAll());
return "restaurant";
}
}
/**
* Gets the restaurant types.
*
* @return the restaurant types
*/
private List<RestaurantType> getRestaurantTypes() {
List<RestaurantType> result = new ArrayList<RestaurantType>();
result.addAll(restaurantTypeRepository.findAllByOrderByNameAsc());
RestaurantType noType = new RestaurantType();
noType.setName("------");
noType.setId(-1);
result.add(0, noType);
return result;
}
/**
* Gets the new restaurant.
*
* @return the new restaurant
*/
private Restaurant getNewRestaurant() {
Restaurant restaurant = new Restaurant();
// add TimeSchedule entry for each day of week
ArrayList<TimeSchedule> times = new ArrayList<TimeSchedule>();
List<DayOfWeek> days = dayOfWeekRepository.findAll();
for (DayOfWeek day : days) {
TimeSchedule t = new TimeSchedule();
t.setDayOfWeek(day);
t.setRestaurant(restaurant);
times.add(t);
}
restaurant.setTimeSchedules(times);
return restaurant;
}
/**
* Gets the page for editing a restaurant from the user.
*
* @param request the HttpServletRequest
* @param model
* Model in which necessary object are placed to be displayed on the website.
* @param principal
* Currently logged in user.
* @return the string for the corresponding HTML page
*/
@RequestMapping(path = { "/restaurant/edit" }, method = RequestMethod.GET)
public String editRestaurant(Model model, Principal principal, HttpServletRequest request) {
LOGGER.info(LogUtils.getInfoStringWithParameterList(request, Thread.currentThread().getStackTrace()[1].getMethodName()));
User authenticatedUser = (User)((Authentication) principal).getPrincipal();
if(authenticatedUser.getAdministratedRestaurant() == null) {
LOGGER.error(LogUtils.getErrorMessage(request, Thread.currentThread().getStackTrace()[1].getMethodName(), "The user " + authenticatedUser.getUsername() + " has no restaurant. A restaurant has to be added before offers it can be edited."));
return "redirect:/restaurant/add";
}
Restaurant restaurant = restaurantRepository.findById(authenticatedUser.getAdministratedRestaurant().getId());
if(restaurant == null) {
LOGGER.error(LogUtils.getErrorMessage(request, Thread.currentThread().getStackTrace()[1].getMethodName(), "The user " + authenticatedUser.getUsername() + " has no restaurant. A restaurant has to be added before offers it can be edited."));
return "redirect:/restaurant/add";
}
List<TimeSchedule> existingTimes = new ArrayList<TimeSchedule>();
existingTimes.addAll(restaurant.getTimeSchedules());
// all elements have to be cleared and copied to a new list
// to avoid "no longer referenced by the owning entity
// instance"-exception
restaurant.getTimeSchedules().clear();
for (DayOfWeek day : dayOfWeekRepository.findAll()) {
TimeSchedule t = new TimeSchedule();
t.setDayOfWeek(day);
for (TimeSchedule ts : existingTimes) {
if (ts.getDayOfWeek().getId() == day.getId()) {
// copy values
t.setOfferStartTime(ts.getOfferStartTime());
t.setOfferEndTime(ts.getOfferEndTime());
for (OpeningTime ot : ts.getOpeningTimes()) {
handleOpeningTime(ot, t);
}
break;
}
}
restaurant.addTimeSchedule(t);
}
model.addAttribute("restaurant", restaurant);
model.addAttribute("kitchenTypes", kitchenTypeRepository.findAllByOrderByNameAsc());
model.addAttribute("restaurantTypes", getRestaurantTypes());
model.addAttribute("countries", countryRepository.findAll());
return "restaurant";
}
/**
* Handle opening time.
*
* @param ot the OpeningTime
* @param t the TimeSchedule
*/
private void handleOpeningTime(OpeningTime ot, TimeSchedule t)
{
// all elements have to be cleared and copied to a new list
// to avoid "no longer referenced by the owning entity
// instance"-exception
OpeningTime o = new OpeningTime();
o.setClosingTime(ot.getClosingTime());
o.setOpeningTime(ot.getOpeningTime());
if (t.getOpeningTimes() == null) {
t.setOpeningTimes(new ArrayList<OpeningTime>());
}
t.addOpeningTime(o);
}
/**
* Adds a new opening time object to the restaurant.
*
* @param request the HttpServletRequest
* @param restaurant
* Restaurant object to be saved. Populated by the content of the html form field.
* @param bindingResult
* Binding result in which errors for the fields are stored. Populated by hibernate validation annotation and custom validator classes.
* @param model
* Model in which necessary object are placed to be displayed on the website.
* @param req
* The request sent by the user
* @return the string for the corresponding HTML page
*/
@RequestMapping(path = { "/restaurant/add", "/restaurant/edit" }, params = { "addOpeningTime" })
public String addOpeningTime(final Restaurant restaurant, final BindingResult bindingResult, final Model model, final HttpServletRequest request) {
LOGGER.info(LogUtils.getInfoStringWithParameterList(request, Thread.currentThread().getStackTrace()[1].getMethodName()));
model.addAttribute("restaurant", restaurant);
model.addAttribute("kitchenTypes", kitchenTypeRepository.findAllByOrderByNameAsc());
model.addAttribute("restaurantTypes", getRestaurantTypes());
model.addAttribute("countries", countryRepository.findAll());
final Integer timeScheduleId = Integer.valueOf(request.getParameter("addOpeningTime"));
TimeSchedule t = restaurant.getTimeSchedules().get(timeScheduleId);
if (t != null) {
if (t.getOpeningTimes() == null) {
// first OpeningTime for that day
// initialize list
t.setOpeningTimes(new ArrayList<OpeningTime>());
}
// add new OpeningTime
OpeningTime o = new OpeningTime();
o.setOpeningTime(timeToDate(0, 0));
o.setClosingTime(timeToDate(0, 0));
o.setTimeSchedule(t);
t.addOpeningTime(o);
}
return "restaurant";
}
/**
* Removes an opening time object to the restaurant.
*
* @param request the HttpServletRequest
* @param restaurant
* Restaurant object to be saved. Populated by the content of the html form field.
* @param bindingResult
* Binding result in which errors for the fields are stored. Populated by hibernate validation annotation and custom validator classes.
* @param model
* Model in which necessary object are placed to be displayed on the website.
* @param req
* The request sent by the user
* @return the string for the corresponding HTML page
*/
@RequestMapping(path = { "/restaurant/add", "/restaurant/edit" }, params = { "removeOpeningTime" })
public String removeOpeningTime(final Restaurant restaurant, final BindingResult bindingResult, final Model model, final HttpServletRequest request) {
LOGGER.info(LogUtils.getInfoStringWithParameterList(request, Thread.currentThread().getStackTrace()[1].getMethodName()));
model.addAttribute("kitchenTypes", kitchenTypeRepository.findAllByOrderByNameAsc());
model.addAttribute("restaurantTypes", getRestaurantTypes());
model.addAttribute("countries", countryRepository.findAll());
// get id of TimeSchedule and OpeningTime
final String ids = request.getParameter("removeOpeningTime");
String[] idParts = ids.split(Pattern.quote("."));
int timeScheduleId = Integer.valueOf(idParts[0]);
int openingTimeId = Integer.valueOf(idParts[1]);
TimeSchedule t = restaurant.getTimeSchedules().get(timeScheduleId);
if (t != null) {
// remove OpeningTime
OpeningTime toRemove = t.getOpeningTimes().get(openingTimeId);
t.removeOpeningTime(toRemove);
}
model.addAttribute("restaurant", restaurant);
return "restaurant";
}
/**
* Removes an offer time from the time schedule of the restaurant.
*
* @param request the HttpServletRequest
* @param restaurant
* Restaurant object to be saved. Populated by the content of the html form field.
* @param bindingResult
* Binding result in which errors for the fields are stored. Populated by hibernate validation annotation and custom validator classes.
* @param model
* Model in which necessary object are placed to be displayed on the website.
* @param req
* The request sent by the user
* @return the string for the corresponding HTML page
*/
@RequestMapping(path = { "/restaurant/add", "/restaurant/edit" }, params = { "removeOfferTime" })
public String removeOfferTime(final Restaurant restaurant, final BindingResult bindingResult, final Model model, final HttpServletRequest request) {
LOGGER.info(LogUtils.getInfoStringWithParameterList(request, Thread.currentThread().getStackTrace()[1].getMethodName()));
model.addAttribute("restaurant", restaurant);
model.addAttribute("kitchenTypes", kitchenTypeRepository.findAllByOrderByNameAsc());
model.addAttribute("restaurantTypes", getRestaurantTypes());
model.addAttribute("countries", countryRepository.findAll());
final Integer timeScheduleId = Integer.valueOf(request.getParameter("removeOfferTime"));
TimeSchedule t = restaurant.getTimeSchedules().get(timeScheduleId);
if (t != null) {
// remove OfferTimes
t.setOfferStartTime(null);
t.setOfferEndTime(null);
}
return "restaurant";
}
/**
* Adds an offer time to the time schedule of the restaurant.
*
* @param request the HttpServletRequest
* @param restaurant
* Restaurant object to be saved. Populated by the content of the html form field.
* @param bindingResult
* Binding result in which errors for the fields are stored. Populated by hibernate validation annotation and custom validator classes.
* @param model
* Model in which necessary object are placed to be displayed on the website.
* @param req
* The request sent by the user
* @return the string for the corresponding HTML page
*/
@RequestMapping(path = { "/restaurant/add", "/restaurant/edit" }, params = { "addOfferTime" })
public String addOfferTime(final Restaurant restaurant, final BindingResult bindingResult, final Model model, final HttpServletRequest request) {
LOGGER.info(LogUtils.getDefaultInfoString(request, Thread.currentThread().getStackTrace()[1].getMethodName()));
model.addAttribute("restaurant", restaurant);
model.addAttribute("kitchenTypes", kitchenTypeRepository.findAllByOrderByNameAsc());
model.addAttribute("restaurantTypes", getRestaurantTypes());
model.addAttribute("countries", countryRepository.findAll());
final Integer timeScheduleId = Integer.valueOf(request.getParameter("addOfferTime"));
TimeSchedule t = restaurant.getTimeSchedules().get(timeScheduleId);
if (t != null) {
// set OfferTimes
t.setOfferStartTime(timeToDate(0, 0));
t.setOfferEndTime(timeToDate(0, 0));
}
return "restaurant";
}
/**
* Save the restaurant to the database.
*
* @param request the HttpServletRequest
* @param restaurant
* Restaurant object to be saved. Populated by the content of the html form field.
* @param bindingResult
* Binding result in which errors for the fields are stored. Populated by hibernate validation annotation and custom validator classes.
* @param model
* Model in which necessary object are placed to be displayed on the website.
* @param principal
* Currently logged in user.
* @return the string for the corresponding HTML page
*/
@RequestMapping(method = RequestMethod.POST, path = { "/restaurant/add", "/restaurant/edit" }, params = { "saveRestaurant" })
public String saveRestaurant(@Valid final Restaurant restaurant, BindingResult bindingResult, final Model model, Principal principal, HttpServletRequest request) {
LOGGER.info(LogUtils.getInfoStringWithParameterList(request, Thread.currentThread().getStackTrace()[1].getMethodName()));
model.addAttribute("restaurant", restaurant);
model.addAttribute("kitchenTypes", kitchenTypeRepository.findAllByOrderByNameAsc());
model.addAttribute("restaurantTypes", getRestaurantTypes());
model.addAttribute("countries", countryRepository.findAll());
String result = getLocationOfRestaurant(restaurant, request);
if (result != null) {
model.addAttribute("geocodingException", result);
LOGGER.error(LogUtils.getErrorMessage(request, Thread.currentThread().getStackTrace()[1].getMethodName(), "The Location of the restaurant could not be retrieved."));
return "restaurant";
}
// Checks not handled by Hibernate annotations
customRestaurantValidator.validate(restaurant, bindingResult);
if (bindingResult.hasErrors()) {
LOGGER.error(LogUtils.getValidationErrorString(request, bindingResult, Thread.currentThread().getStackTrace()[1].getMethodName()));
return "restaurant";
}
for (int i = restaurant.getTimeSchedules().size() - 1; i >= 0; i--) {
handleTimeSchedule(restaurant, i);
}
if (restaurant.getRestaurantType() != null && restaurant.getRestaurantType().getId() == -1) {
restaurant.setRestaurantType(null);
}
User authenticatedUser = (User)((Authentication) principal).getPrincipal();
User u = userRepository.findOne(authenticatedUser.getId());
u.setAdministratedRestaurant(restaurant);
restaurant.addAdmin(u);
restaurantRepository.save(restaurant);
// Update UserDetails
User updatedUserDetails = customUserDetailsService.loadUserByUsername(authenticatedUser.getUsername());
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(updatedUserDetails, null, updatedUserDetails.getAuthorities());
SecurityContextHolder.getContext().setAuthentication(token);
return "redirect:/home?success";
}
/**
* Handles a time schedule from a restaurant. Removes TimeSchedules with no OfferTimes;
* Sets the restaurant for each valid TimeSchedule;
* Sets the reference (TimeSchedule) to each valid OpeningTime
*
* @param restaurant
* Restaurant for which the time schedules should be handled.
* @param i
* Index of the timeSchedule to be handled.
*/
private void handleTimeSchedule(Restaurant restaurant, int i)
{
TimeSchedule t = restaurant.getTimeSchedules().get(i);
if (t.getOfferStartTime() == null && t.getOfferEndTime() == null && (t.getOpeningTimes() == null || t.getOpeningTimes().size() == 0)) {
// remove entry
restaurant.removeTimeSchedule(t);
} else {
// set restaurant
restaurant.getTimeSchedules().get(i).setRestaurant(restaurant);
if (t.getOpeningTimes() != null) {
if (t.getOpeningTimes().size() == 0)
t.getOpeningTimes().clear();
else {
for (int j = t.getOpeningTimes().size() - 1; j >= 0; j--) {
// set TimeSchedule
t.getOpeningTimes().get(j).setTimeSchedule(t);
}
}
}
}
}
/**
* Cancel restaurant.
*
* @param request the HttpServletRequest
* @param model
* Model in which necessary object are placed to be displayed on the website.
* @return the string for the corresponding HTML page
*/
@RequestMapping(path = { "/restaurant/add", "/restaurant/edit" }, method = RequestMethod.POST, params = { "cancel" })
public String cancelRestaurant(Model model, HttpServletRequest request) {
LOGGER.info(LogUtils.getCancelInfoString(request, Thread.currentThread().getStackTrace()[1].getMethodName()));
return "redirect:/home";
}
/**
* Converts a given time to a Date object.
*
* @param hours the hours
* @param min the minutes
* @return the date
*/
private Date timeToDate(int hours, int min) {
Calendar cal = Calendar.getInstance();
cal.set(2016, 1, 0, hours, min, 0);
return cal.getTime();
}
/**
* Gets the location of restaurant using the Google Geocoding API.
*
* @param restaurant
* Restaurant from which to get the location.
* @return the location of restaurant
*/
private String getLocationOfRestaurant(Restaurant restaurant, HttpServletRequest request) {
// Replace the API key below with a valid API key.
GeoApiContext context = new GeoApiContext().setApiKey("AIzaSyAvO9bl1Yi2hn7mkTSniv5lXaPRii1JxjI");
GeocodingApiRequest req = GeocodingApi.newRequest(context).address(String.format("%1$s %2$s, %3$s %4$s", restaurant.getStreetNumber(), restaurant.getStreet(), restaurant.getZip(), restaurant.getCity()));
try {
GeocodingResult[] result = req.await();
if (result != null && result.length > 0) {
// Handle successful request.
GeocodingResult firstMatch = result[0];
if (firstMatch.geometry != null && firstMatch.geometry.location != null) {
restaurant.setLocationLatitude((float) firstMatch.geometry.location.lat);
restaurant.setLocationLongitude((float) firstMatch.geometry.location.lng);
} else {
return messageSource.getMessage("restaurant.addressNotResolveable", null, Locale.getDefault());
}
} else {
return messageSource.getMessage("restaurant.addressNotFound", null, Locale.getDefault());
}
} catch (Exception e) {
LOGGER.error(LogUtils.getExceptionMessage(request, Thread.currentThread().getStackTrace()[1].getMethodName(), e));
return messageSource.getMessage("restaurant.googleApiError", new String[] { e.getMessage() }, Locale.getDefault());
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache30;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.query.Index;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.cache.query.SelectResults;
import org.apache.geode.cache.query.internal.QueryObserverAdapter;
import org.apache.geode.cache.query.internal.QueryObserverHolder;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.compression.SnappyCompressor;
import org.apache.geode.internal.cache.GemFireCacheImpl;
import org.apache.geode.internal.cache.xmlcache.CacheCreation;
import org.apache.geode.internal.cache.xmlcache.CacheXml;
import org.apache.geode.internal.cache.xmlcache.CacheXmlGenerator;
import org.apache.geode.internal.cache.xmlcache.DiskStoreAttributesCreation;
import org.apache.geode.internal.cache.xmlcache.RegionAttributesCreation;
@SuppressWarnings("serial")
public class CacheXml80DUnitTest extends CacheXml70DUnitTest {
@Override
protected String getGemFireVersion() {
return CacheXml.VERSION_8_0;
}
@Test
public void testCompressor() throws Exception {
final String regionName = "testCompressor";
final CacheCreation cache = new CacheCreation();
final RegionAttributesCreation attrs = new RegionAttributesCreation(cache);
attrs.setCompressor(SnappyCompressor.getDefaultInstance());
/* Region regionBefore = */ cache.createRegion(regionName, attrs);
testXml(cache);
final Cache c = getCache();
assertNotNull(c);
final Region regionAfter = c.getRegion(regionName);
assertNotNull(regionAfter);
assertTrue(
SnappyCompressor.getDefaultInstance().equals(regionAfter.getAttributes().getCompressor()));
regionAfter.localDestroyRegion();
}
/**
* Tests xml creation for indexes First creates 3 indexes and makes sure the cache creates all 3
* Creates a 4th through the api and writes out the xml Restarts the cache with the new xml Makes
* sure the new cache has the 4 indexes
*/
@Test
public void testIndexXmlCreation() throws Exception {
CacheCreation cache = new CacheCreation();
RegionAttributesCreation attrs = new RegionAttributesCreation(cache);
attrs.setScope(Scope.DISTRIBUTED_ACK);
attrs.setDataPolicy(DataPolicy.REPLICATE);
cache.createRegion("replicated", attrs);
cache.getQueryService().createIndex("crIndex", "CR_ID", "/replicated");
cache.getQueryService().createHashIndex("hashIndex", "HASH_ID", "/replicated");
cache.getQueryService().createKeyIndex("primaryKeyIndex", "ID", "/replicated");
testXml(cache);
Cache c = getCache();
assertNotNull(c);
QueryService qs = c.getQueryService();
Collection<Index> indexes = qs.getIndexes();
assertEquals(3, indexes.size());
c.getQueryService().createIndex("crIndex2", "r.CR_ID_2", "/replicated r");
c.getQueryService().createIndex("rIndex", "r.R_ID", "/replicated r, r.positions.values rv");
File dir = new File(this.temporaryFolder.getRoot(), "XML_" + this.getGemFireVersion());
dir.mkdirs();
File file = new File(dir, "actual-" + getUniqueName() + ".xml");
PrintWriter pw = new PrintWriter(new FileWriter(file), true);
CacheXmlGenerator.generate(c, pw, getUseSchema(), getGemFireVersion());
pw.close();
// Get index info before closing cache.
indexes = qs.getIndexes();
c.close();
GemFireCacheImpl.testCacheXml = file;
assertTrue(c.isClosed());
c = getCache();
qs = c.getQueryService();
Collection<Index> newIndexes = qs.getIndexes();
assertEquals(5, newIndexes.size());
Region r = c.getRegion("/replicated");
for (int i = 0; i < 5; i++) {
r.put(i, new TestObject(i));
}
// Validate to see, newly created indexes match the initial configuration
for (Index index : indexes) {
Index newIndex = qs.getIndex(r, index.getName());
assertEquals("Index from clause is not same for index " + index.getName(),
newIndex.getFromClause(), index.getFromClause());
assertEquals("Index expression is not same for index " + index.getName(),
newIndex.getIndexedExpression(), index.getIndexedExpression());
}
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
SelectResults results =
(SelectResults) qs.newQuery("select * from /replicated r where r.ID = 1").execute();
assertEquals(1, results.size());
assertTrue(checkIndexUsed(observer, "primaryKeyIndex"));
observer.reset();
results =
(SelectResults) qs.newQuery("select * from /replicated r where r.CR_ID = 1").execute();
assertEquals(2, results.size());
assertTrue(checkIndexUsed(observer, "crIndex"));
observer.reset();
results =
(SelectResults) qs.newQuery("select * from /replicated r where r.CR_ID_2 = 1").execute();
assertEquals(2, results.size());
assertTrue(checkIndexUsed(observer, "crIndex2"));
observer.reset();
results = (SelectResults) qs
.newQuery("select * from /replicated r, r.positions.values rv where r.R_ID > 1").execute();
assertEquals(3, results.size());
assertTrue(checkIndexUsed(observer, "rIndex"));
observer.reset();
results =
(SelectResults) qs.newQuery("select * from /replicated r where r.HASH_ID = 1").execute();
assertEquals(1, results.size());
assertTrue(checkIndexUsed(observer, "hashIndex"));
observer.reset();
}
@Test
public void testCacheServerDisableTcpNoDelay() throws Exception {
CacheCreation cache = new CacheCreation();
CacheServer cs = cache.addCacheServer();
cs.setPort(0);
cs.setTcpNoDelay(false);
RegionAttributesCreation attrs = new RegionAttributesCreation(cache);
attrs.setDataPolicy(DataPolicy.NORMAL);
cache.createVMRegion("rootNORMAL", attrs);
testXml(cache);
}
@Test
public void testCacheServerEnableTcpNoDelay() throws Exception {
CacheCreation cache = new CacheCreation();
CacheServer cs = cache.addCacheServer();
cs.setPort(0);
cs.setTcpNoDelay(true);
RegionAttributesCreation attrs = new RegionAttributesCreation(cache);
attrs.setDataPolicy(DataPolicy.NORMAL);
cache.createVMRegion("rootNORMAL", attrs);
testXml(cache);
}
@Test
public void testDiskUsage() throws Exception {
CacheCreation cache = new CacheCreation();
DiskStoreAttributesCreation disk = new DiskStoreAttributesCreation();
disk.setDiskUsageWarningPercentage(97);
disk.setDiskUsageCriticalPercentage(98);
disk.setName("mydisk");
cache.addDiskStore(disk);
RegionAttributesCreation attrs = new RegionAttributesCreation(cache);
attrs.setDataPolicy(DataPolicy.PERSISTENT_REPLICATE);
attrs.setDiskStoreName("mydisk");
cache.createVMRegion("whatever", attrs);
testXml(cache);
}
private boolean checkIndexUsed(QueryObserverImpl observer, String indexName) {
return observer.isIndexesUsed && observer.indexName.equals(indexName);
}
private static class QueryObserverImpl extends QueryObserverAdapter {
boolean isIndexesUsed = false;
List<String> indexesUsed = new ArrayList<>();
String indexName;
@Override
public void beforeIndexLookup(Index index, int oper, Object key) {
indexName = index.getName();
indexesUsed.add(index.getName());
}
@Override
public void afterIndexLookup(Collection results) {
if (results != null) {
isIndexesUsed = true;
}
}
public void reset() {
indexName = null;
isIndexesUsed = false;
indexesUsed.clear();
}
}
private static class TestObject implements Serializable {
public int CR_ID;
public int CR_ID_2;
public int R_ID;
public int HASH_ID;
public int ID;
public Map positions;
public TestObject(int ID) {
this.ID = ID;
CR_ID = ID % 2;
CR_ID_2 = ID % 2;
R_ID = ID;
HASH_ID = ID;
positions = new HashMap();
positions.put(ID, "TEST_STRING");
}
public int ID() {
return ID;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mediapackage.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* A HTTP Live Streaming (HLS) manifest configuration.
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/HlsManifestCreateOrUpdateParameters"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class HlsManifestCreateOrUpdateParameters implements Serializable, Cloneable, StructuredPojo {
/**
* This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad
* markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments)
* taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and
* blackout tags based on SCTE-35 messages in the input source. "DATERANGE" inserts EXT-X-DATERANGE tags to signal
* ad and program transition events in HLS and CMAF manifests. For this option, you must set a
* programDateTimeIntervalSeconds value that is greater than 0.
*/
private String adMarkers;
private java.util.List<String> adTriggers;
private String adsOnDeliveryRestrictions;
/**
* The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it is
* created.
*/
private String id;
/** When enabled, an I-Frame only stream will be included in the output. */
private Boolean includeIframeOnlyStream;
/**
* An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the
* manifestName for the OriginEndpoint.
*/
private String manifestName;
/**
* The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding
* EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
*/
private String playlistType;
/** Time window (in seconds) contained in each parent manifest. */
private Integer playlistWindowSeconds;
/**
* The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally, when an
* interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the ingest time of the
* content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME tags will be inserted
* into manifests and no ID3Timed Metadata messages will be generated. Note that irrespective of this parameter, if
* any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it will be passed through to HLS output.
*/
private Integer programDateTimeIntervalSeconds;
/**
* This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad
* markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments)
* taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and
* blackout tags based on SCTE-35 messages in the input source. "DATERANGE" inserts EXT-X-DATERANGE tags to signal
* ad and program transition events in HLS and CMAF manifests. For this option, you must set a
* programDateTimeIntervalSeconds value that is greater than 0.
*
* @param adMarkers
* This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all
* SCTE-35 ad markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad
* markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED"
* generates ad markers and blackout tags based on SCTE-35 messages in the input source. "DATERANGE" inserts
* EXT-X-DATERANGE tags to signal ad and program transition events in HLS and CMAF manifests. For this
* option, you must set a programDateTimeIntervalSeconds value that is greater than 0.
* @see AdMarkers
*/
public void setAdMarkers(String adMarkers) {
this.adMarkers = adMarkers;
}
/**
* This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad
* markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments)
* taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and
* blackout tags based on SCTE-35 messages in the input source. "DATERANGE" inserts EXT-X-DATERANGE tags to signal
* ad and program transition events in HLS and CMAF manifests. For this option, you must set a
* programDateTimeIntervalSeconds value that is greater than 0.
*
* @return This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all
* SCTE-35 ad markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad
* markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED"
* generates ad markers and blackout tags based on SCTE-35 messages in the input source. "DATERANGE" inserts
* EXT-X-DATERANGE tags to signal ad and program transition events in HLS and CMAF manifests. For this
* option, you must set a programDateTimeIntervalSeconds value that is greater than 0.
* @see AdMarkers
*/
public String getAdMarkers() {
return this.adMarkers;
}
/**
* This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad
* markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments)
* taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and
* blackout tags based on SCTE-35 messages in the input source. "DATERANGE" inserts EXT-X-DATERANGE tags to signal
* ad and program transition events in HLS and CMAF manifests. For this option, you must set a
* programDateTimeIntervalSeconds value that is greater than 0.
*
* @param adMarkers
* This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all
* SCTE-35 ad markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad
* markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED"
* generates ad markers and blackout tags based on SCTE-35 messages in the input source. "DATERANGE" inserts
* EXT-X-DATERANGE tags to signal ad and program transition events in HLS and CMAF manifests. For this
* option, you must set a programDateTimeIntervalSeconds value that is greater than 0.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AdMarkers
*/
public HlsManifestCreateOrUpdateParameters withAdMarkers(String adMarkers) {
setAdMarkers(adMarkers);
return this;
}
/**
* This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad
* markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments)
* taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and
* blackout tags based on SCTE-35 messages in the input source. "DATERANGE" inserts EXT-X-DATERANGE tags to signal
* ad and program transition events in HLS and CMAF manifests. For this option, you must set a
* programDateTimeIntervalSeconds value that is greater than 0.
*
* @param adMarkers
* This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all
* SCTE-35 ad markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad
* markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED"
* generates ad markers and blackout tags based on SCTE-35 messages in the input source. "DATERANGE" inserts
* EXT-X-DATERANGE tags to signal ad and program transition events in HLS and CMAF manifests. For this
* option, you must set a programDateTimeIntervalSeconds value that is greater than 0.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AdMarkers
*/
public HlsManifestCreateOrUpdateParameters withAdMarkers(AdMarkers adMarkers) {
this.adMarkers = adMarkers.toString();
return this;
}
/**
* @return
* @see AdTriggersElement
*/
public java.util.List<String> getAdTriggers() {
return adTriggers;
}
/**
* @param adTriggers
* @see AdTriggersElement
*/
public void setAdTriggers(java.util.Collection<String> adTriggers) {
if (adTriggers == null) {
this.adTriggers = null;
return;
}
this.adTriggers = new java.util.ArrayList<String>(adTriggers);
}
/**
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setAdTriggers(java.util.Collection)} or {@link #withAdTriggers(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param adTriggers
* @return Returns a reference to this object so that method calls can be chained together.
* @see AdTriggersElement
*/
public HlsManifestCreateOrUpdateParameters withAdTriggers(String... adTriggers) {
if (this.adTriggers == null) {
setAdTriggers(new java.util.ArrayList<String>(adTriggers.length));
}
for (String ele : adTriggers) {
this.adTriggers.add(ele);
}
return this;
}
/**
* @param adTriggers
* @return Returns a reference to this object so that method calls can be chained together.
* @see AdTriggersElement
*/
public HlsManifestCreateOrUpdateParameters withAdTriggers(java.util.Collection<String> adTriggers) {
setAdTriggers(adTriggers);
return this;
}
/**
* @param adTriggers
* @return Returns a reference to this object so that method calls can be chained together.
* @see AdTriggersElement
*/
public HlsManifestCreateOrUpdateParameters withAdTriggers(AdTriggersElement... adTriggers) {
java.util.ArrayList<String> adTriggersCopy = new java.util.ArrayList<String>(adTriggers.length);
for (AdTriggersElement value : adTriggers) {
adTriggersCopy.add(value.toString());
}
if (getAdTriggers() == null) {
setAdTriggers(adTriggersCopy);
} else {
getAdTriggers().addAll(adTriggersCopy);
}
return this;
}
/**
* @param adsOnDeliveryRestrictions
* @see AdsOnDeliveryRestrictions
*/
public void setAdsOnDeliveryRestrictions(String adsOnDeliveryRestrictions) {
this.adsOnDeliveryRestrictions = adsOnDeliveryRestrictions;
}
/**
* @return
* @see AdsOnDeliveryRestrictions
*/
public String getAdsOnDeliveryRestrictions() {
return this.adsOnDeliveryRestrictions;
}
/**
* @param adsOnDeliveryRestrictions
* @return Returns a reference to this object so that method calls can be chained together.
* @see AdsOnDeliveryRestrictions
*/
public HlsManifestCreateOrUpdateParameters withAdsOnDeliveryRestrictions(String adsOnDeliveryRestrictions) {
setAdsOnDeliveryRestrictions(adsOnDeliveryRestrictions);
return this;
}
/**
* @param adsOnDeliveryRestrictions
* @return Returns a reference to this object so that method calls can be chained together.
* @see AdsOnDeliveryRestrictions
*/
public HlsManifestCreateOrUpdateParameters withAdsOnDeliveryRestrictions(AdsOnDeliveryRestrictions adsOnDeliveryRestrictions) {
this.adsOnDeliveryRestrictions = adsOnDeliveryRestrictions.toString();
return this;
}
/**
* The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it is
* created.
*
* @param id
* The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it
* is created.
*/
public void setId(String id) {
this.id = id;
}
/**
* The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it is
* created.
*
* @return The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it
* is created.
*/
public String getId() {
return this.id;
}
/**
* The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it is
* created.
*
* @param id
* The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it
* is created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HlsManifestCreateOrUpdateParameters withId(String id) {
setId(id);
return this;
}
/**
* When enabled, an I-Frame only stream will be included in the output.
*
* @param includeIframeOnlyStream
* When enabled, an I-Frame only stream will be included in the output.
*/
public void setIncludeIframeOnlyStream(Boolean includeIframeOnlyStream) {
this.includeIframeOnlyStream = includeIframeOnlyStream;
}
/**
* When enabled, an I-Frame only stream will be included in the output.
*
* @return When enabled, an I-Frame only stream will be included in the output.
*/
public Boolean getIncludeIframeOnlyStream() {
return this.includeIframeOnlyStream;
}
/**
* When enabled, an I-Frame only stream will be included in the output.
*
* @param includeIframeOnlyStream
* When enabled, an I-Frame only stream will be included in the output.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HlsManifestCreateOrUpdateParameters withIncludeIframeOnlyStream(Boolean includeIframeOnlyStream) {
setIncludeIframeOnlyStream(includeIframeOnlyStream);
return this;
}
/**
* When enabled, an I-Frame only stream will be included in the output.
*
* @return When enabled, an I-Frame only stream will be included in the output.
*/
public Boolean isIncludeIframeOnlyStream() {
return this.includeIframeOnlyStream;
}
/**
* An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the
* manifestName for the OriginEndpoint.
*
* @param manifestName
* An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the
* manifestName for the OriginEndpoint.
*/
public void setManifestName(String manifestName) {
this.manifestName = manifestName;
}
/**
* An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the
* manifestName for the OriginEndpoint.
*
* @return An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the
* manifestName for the OriginEndpoint.
*/
public String getManifestName() {
return this.manifestName;
}
/**
* An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the
* manifestName for the OriginEndpoint.
*
* @param manifestName
* An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the
* manifestName for the OriginEndpoint.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HlsManifestCreateOrUpdateParameters withManifestName(String manifestName) {
setManifestName(manifestName);
return this;
}
/**
* The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding
* EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
*
* @param playlistType
* The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding
* EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
* @see PlaylistType
*/
public void setPlaylistType(String playlistType) {
this.playlistType = playlistType;
}
/**
* The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding
* EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
*
* @return The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding
* EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
* @see PlaylistType
*/
public String getPlaylistType() {
return this.playlistType;
}
/**
* The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding
* EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
*
* @param playlistType
* The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding
* EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
* @return Returns a reference to this object so that method calls can be chained together.
* @see PlaylistType
*/
public HlsManifestCreateOrUpdateParameters withPlaylistType(String playlistType) {
setPlaylistType(playlistType);
return this;
}
/**
* The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding
* EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
*
* @param playlistType
* The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding
* EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
* @return Returns a reference to this object so that method calls can be chained together.
* @see PlaylistType
*/
public HlsManifestCreateOrUpdateParameters withPlaylistType(PlaylistType playlistType) {
this.playlistType = playlistType.toString();
return this;
}
/**
* Time window (in seconds) contained in each parent manifest.
*
* @param playlistWindowSeconds
* Time window (in seconds) contained in each parent manifest.
*/
public void setPlaylistWindowSeconds(Integer playlistWindowSeconds) {
this.playlistWindowSeconds = playlistWindowSeconds;
}
/**
* Time window (in seconds) contained in each parent manifest.
*
* @return Time window (in seconds) contained in each parent manifest.
*/
public Integer getPlaylistWindowSeconds() {
return this.playlistWindowSeconds;
}
/**
* Time window (in seconds) contained in each parent manifest.
*
* @param playlistWindowSeconds
* Time window (in seconds) contained in each parent manifest.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HlsManifestCreateOrUpdateParameters withPlaylistWindowSeconds(Integer playlistWindowSeconds) {
setPlaylistWindowSeconds(playlistWindowSeconds);
return this;
}
/**
* The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally, when an
* interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the ingest time of the
* content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME tags will be inserted
* into manifests and no ID3Timed Metadata messages will be generated. Note that irrespective of this parameter, if
* any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it will be passed through to HLS output.
*
* @param programDateTimeIntervalSeconds
* The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally,
* when an interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the
* ingest time of the content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME
* tags will be inserted into manifests and no ID3Timed Metadata messages will be generated. Note that
* irrespective of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it
* will be passed through to HLS output.
*/
public void setProgramDateTimeIntervalSeconds(Integer programDateTimeIntervalSeconds) {
this.programDateTimeIntervalSeconds = programDateTimeIntervalSeconds;
}
/**
* The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally, when an
* interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the ingest time of the
* content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME tags will be inserted
* into manifests and no ID3Timed Metadata messages will be generated. Note that irrespective of this parameter, if
* any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it will be passed through to HLS output.
*
* @return The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally,
* when an interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the
* ingest time of the content. If the interval is not specified, or set to 0, then no
* EXT-X-PROGRAM-DATE-TIME tags will be inserted into manifests and no ID3Timed Metadata messages will be
* generated. Note that irrespective of this parameter, if any ID3 Timed Metadata is found in HTTP Live
* Streaming (HLS) input, it will be passed through to HLS output.
*/
public Integer getProgramDateTimeIntervalSeconds() {
return this.programDateTimeIntervalSeconds;
}
/**
* The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally, when an
* interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the ingest time of the
* content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME tags will be inserted
* into manifests and no ID3Timed Metadata messages will be generated. Note that irrespective of this parameter, if
* any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it will be passed through to HLS output.
*
* @param programDateTimeIntervalSeconds
* The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally,
* when an interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the
* ingest time of the content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME
* tags will be inserted into manifests and no ID3Timed Metadata messages will be generated. Note that
* irrespective of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it
* will be passed through to HLS output.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HlsManifestCreateOrUpdateParameters withProgramDateTimeIntervalSeconds(Integer programDateTimeIntervalSeconds) {
setProgramDateTimeIntervalSeconds(programDateTimeIntervalSeconds);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAdMarkers() != null)
sb.append("AdMarkers: ").append(getAdMarkers()).append(",");
if (getAdTriggers() != null)
sb.append("AdTriggers: ").append(getAdTriggers()).append(",");
if (getAdsOnDeliveryRestrictions() != null)
sb.append("AdsOnDeliveryRestrictions: ").append(getAdsOnDeliveryRestrictions()).append(",");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getIncludeIframeOnlyStream() != null)
sb.append("IncludeIframeOnlyStream: ").append(getIncludeIframeOnlyStream()).append(",");
if (getManifestName() != null)
sb.append("ManifestName: ").append(getManifestName()).append(",");
if (getPlaylistType() != null)
sb.append("PlaylistType: ").append(getPlaylistType()).append(",");
if (getPlaylistWindowSeconds() != null)
sb.append("PlaylistWindowSeconds: ").append(getPlaylistWindowSeconds()).append(",");
if (getProgramDateTimeIntervalSeconds() != null)
sb.append("ProgramDateTimeIntervalSeconds: ").append(getProgramDateTimeIntervalSeconds());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof HlsManifestCreateOrUpdateParameters == false)
return false;
HlsManifestCreateOrUpdateParameters other = (HlsManifestCreateOrUpdateParameters) obj;
if (other.getAdMarkers() == null ^ this.getAdMarkers() == null)
return false;
if (other.getAdMarkers() != null && other.getAdMarkers().equals(this.getAdMarkers()) == false)
return false;
if (other.getAdTriggers() == null ^ this.getAdTriggers() == null)
return false;
if (other.getAdTriggers() != null && other.getAdTriggers().equals(this.getAdTriggers()) == false)
return false;
if (other.getAdsOnDeliveryRestrictions() == null ^ this.getAdsOnDeliveryRestrictions() == null)
return false;
if (other.getAdsOnDeliveryRestrictions() != null && other.getAdsOnDeliveryRestrictions().equals(this.getAdsOnDeliveryRestrictions()) == false)
return false;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getIncludeIframeOnlyStream() == null ^ this.getIncludeIframeOnlyStream() == null)
return false;
if (other.getIncludeIframeOnlyStream() != null && other.getIncludeIframeOnlyStream().equals(this.getIncludeIframeOnlyStream()) == false)
return false;
if (other.getManifestName() == null ^ this.getManifestName() == null)
return false;
if (other.getManifestName() != null && other.getManifestName().equals(this.getManifestName()) == false)
return false;
if (other.getPlaylistType() == null ^ this.getPlaylistType() == null)
return false;
if (other.getPlaylistType() != null && other.getPlaylistType().equals(this.getPlaylistType()) == false)
return false;
if (other.getPlaylistWindowSeconds() == null ^ this.getPlaylistWindowSeconds() == null)
return false;
if (other.getPlaylistWindowSeconds() != null && other.getPlaylistWindowSeconds().equals(this.getPlaylistWindowSeconds()) == false)
return false;
if (other.getProgramDateTimeIntervalSeconds() == null ^ this.getProgramDateTimeIntervalSeconds() == null)
return false;
if (other.getProgramDateTimeIntervalSeconds() != null
&& other.getProgramDateTimeIntervalSeconds().equals(this.getProgramDateTimeIntervalSeconds()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAdMarkers() == null) ? 0 : getAdMarkers().hashCode());
hashCode = prime * hashCode + ((getAdTriggers() == null) ? 0 : getAdTriggers().hashCode());
hashCode = prime * hashCode + ((getAdsOnDeliveryRestrictions() == null) ? 0 : getAdsOnDeliveryRestrictions().hashCode());
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getIncludeIframeOnlyStream() == null) ? 0 : getIncludeIframeOnlyStream().hashCode());
hashCode = prime * hashCode + ((getManifestName() == null) ? 0 : getManifestName().hashCode());
hashCode = prime * hashCode + ((getPlaylistType() == null) ? 0 : getPlaylistType().hashCode());
hashCode = prime * hashCode + ((getPlaylistWindowSeconds() == null) ? 0 : getPlaylistWindowSeconds().hashCode());
hashCode = prime * hashCode + ((getProgramDateTimeIntervalSeconds() == null) ? 0 : getProgramDateTimeIntervalSeconds().hashCode());
return hashCode;
}
@Override
public HlsManifestCreateOrUpdateParameters clone() {
try {
return (HlsManifestCreateOrUpdateParameters) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.mediapackage.model.transform.HlsManifestCreateOrUpdateParametersMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.ProtocolSignature;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.mapred.SortedRanges.Range;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID;
import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
import org.apache.hadoop.mapreduce.util.MRJobConfUtil;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener;
import org.apache.hadoop.mapreduce.v2.app.TaskHeartbeatHandler;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptFailEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent;
import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler;
import org.apache.hadoop.mapreduce.v2.app.rm.preemption.AMPreemptionPolicy;
import org.apache.hadoop.mapreduce.v2.app.security.authorize.MRAMPolicyProvider;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.util.StringInterner;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
/**
* This class is responsible for talking to the task umblical.
* It also converts all the old data structures
* to yarn data structures.
*
* This class HAS to be in this package to access package private
* methods/classes.
*/
public class TaskAttemptListenerImpl extends CompositeService
implements TaskUmbilicalProtocol, TaskAttemptListener {
private static final JvmTask TASK_FOR_INVALID_JVM = new JvmTask(null, true);
private static final Logger LOG =
LoggerFactory.getLogger(TaskAttemptListenerImpl.class);
private AppContext context;
private Server server;
protected TaskHeartbeatHandler taskHeartbeatHandler;
private RMHeartbeatHandler rmHeartbeatHandler;
private long commitWindowMs;
private InetSocketAddress address;
private ConcurrentMap<WrappedJvmID, org.apache.hadoop.mapred.Task>
jvmIDToActiveAttemptMap
= new ConcurrentHashMap<WrappedJvmID, org.apache.hadoop.mapred.Task>();
private ConcurrentMap<TaskAttemptId,
AtomicReference<TaskAttemptStatus>> attemptIdToStatus
= new ConcurrentHashMap<>();
/**
* A Map to keep track of the history of logging each task attempt.
*/
private ConcurrentHashMap<TaskAttemptID, TaskProgressLogPair>
taskAttemptLogProgressStamps = new ConcurrentHashMap<>();
private Set<WrappedJvmID> launchedJVMs = Collections
.newSetFromMap(new ConcurrentHashMap<WrappedJvmID, Boolean>());
private JobTokenSecretManager jobTokenSecretManager = null;
private AMPreemptionPolicy preemptionPolicy;
private byte[] encryptedSpillKey;
public TaskAttemptListenerImpl(AppContext context,
JobTokenSecretManager jobTokenSecretManager,
RMHeartbeatHandler rmHeartbeatHandler,
AMPreemptionPolicy preemptionPolicy) {
this(context, jobTokenSecretManager, rmHeartbeatHandler,
preemptionPolicy, null);
}
public TaskAttemptListenerImpl(AppContext context,
JobTokenSecretManager jobTokenSecretManager,
RMHeartbeatHandler rmHeartbeatHandler,
AMPreemptionPolicy preemptionPolicy, byte[] secretShuffleKey) {
super(TaskAttemptListenerImpl.class.getName());
this.context = context;
this.jobTokenSecretManager = jobTokenSecretManager;
this.rmHeartbeatHandler = rmHeartbeatHandler;
this.preemptionPolicy = preemptionPolicy;
this.encryptedSpillKey = secretShuffleKey;
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
registerHeartbeatHandler(conf);
commitWindowMs = conf.getLong(MRJobConfig.MR_AM_COMMIT_WINDOW_MS,
MRJobConfig.DEFAULT_MR_AM_COMMIT_WINDOW_MS);
// initialize the delta threshold for logging the task progress.
MRJobConfUtil.setTaskLogProgressDeltaThresholds(conf);
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
startRpcServer();
super.serviceStart();
}
protected void registerHeartbeatHandler(Configuration conf) {
taskHeartbeatHandler = new TaskHeartbeatHandler(context.getEventHandler(),
context.getClock(), conf.getInt(MRJobConfig.MR_AM_TASK_LISTENER_THREAD_COUNT,
MRJobConfig.DEFAULT_MR_AM_TASK_LISTENER_THREAD_COUNT));
addService(taskHeartbeatHandler);
}
protected void startRpcServer() {
Configuration conf = getConfig();
try {
server = new RPC.Builder(conf).setProtocol(TaskUmbilicalProtocol.class)
.setInstance(this).setBindAddress("0.0.0.0")
.setPortRangeConfig(MRJobConfig.MR_AM_JOB_CLIENT_PORT_RANGE)
.setNumHandlers(
conf.getInt(MRJobConfig.MR_AM_TASK_LISTENER_THREAD_COUNT,
MRJobConfig.DEFAULT_MR_AM_TASK_LISTENER_THREAD_COUNT))
.setVerbose(false).setSecretManager(jobTokenSecretManager).build();
// Enable service authorization?
if (conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
refreshServiceAcls(conf, new MRAMPolicyProvider());
}
server.start();
this.address = NetUtils.createSocketAddrForHost(
context.getNMHostname(),
server.getListenerAddress().getPort());
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
}
void refreshServiceAcls(Configuration configuration,
PolicyProvider policyProvider) {
this.server.refreshServiceAcl(configuration, policyProvider);
}
@Override
protected void serviceStop() throws Exception {
stopRpcServer();
super.serviceStop();
}
protected void stopRpcServer() {
if (server != null) {
server.stop();
}
}
@Override
public InetSocketAddress getAddress() {
return address;
}
/**
* Child checking whether it can commit.
*
* <br>
* Commit is a two-phased protocol. First the attempt informs the
* ApplicationMaster that it is
* {@link #commitPending(TaskAttemptID, TaskStatus)}. Then it repeatedly polls
* the ApplicationMaster whether it {@link #canCommit(TaskAttemptID)} This is
* a legacy from the centralized commit protocol handling by the JobTracker.
*/
@Override
public boolean canCommit(TaskAttemptID taskAttemptID) throws IOException {
LOG.info("Commit go/no-go request from " + taskAttemptID.toString());
// An attempt is asking if it can commit its output. This can be decided
// only by the task which is managing the multiple attempts. So redirect the
// request there.
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID =
TypeConverter.toYarn(taskAttemptID);
taskHeartbeatHandler.progressing(attemptID);
// tell task to retry later if AM has not heard from RM within the commit
// window to help avoid double-committing in a split-brain situation
long now = context.getClock().getTime();
if (now - rmHeartbeatHandler.getLastHeartbeatTime() > commitWindowMs) {
return false;
}
Job job = context.getJob(attemptID.getTaskId().getJobId());
Task task = job.getTask(attemptID.getTaskId());
return task.canCommit(attemptID);
}
/**
* TaskAttempt is reporting that it is in commit_pending and it is waiting for
* the commit Response
*
* <br>
* Commit it a two-phased protocol. First the attempt informs the
* ApplicationMaster that it is
* {@link #commitPending(TaskAttemptID, TaskStatus)}. Then it repeatedly polls
* the ApplicationMaster whether it {@link #canCommit(TaskAttemptID)} This is
* a legacy from the centralized commit protocol handling by the JobTracker.
*/
@Override
public void commitPending(TaskAttemptID taskAttemptID, TaskStatus taskStatsu)
throws IOException, InterruptedException {
LOG.info("Commit-pending state update from " + taskAttemptID.toString());
// An attempt is asking if it can commit its output. This can be decided
// only by the task which is managing the multiple attempts. So redirect the
// request there.
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID =
TypeConverter.toYarn(taskAttemptID);
taskHeartbeatHandler.progressing(attemptID);
//Ignorable TaskStatus? - since a task will send a LastStatusUpdate
context.getEventHandler().handle(
new TaskAttemptEvent(attemptID,
TaskAttemptEventType.TA_COMMIT_PENDING));
}
@Override
public void preempted(TaskAttemptID taskAttemptID, TaskStatus taskStatus)
throws IOException, InterruptedException {
LOG.info("Preempted state update from " + taskAttemptID.toString());
// An attempt is telling us that it got preempted.
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID =
TypeConverter.toYarn(taskAttemptID);
preemptionPolicy.reportSuccessfulPreemption(attemptID);
taskHeartbeatHandler.progressing(attemptID);
context.getEventHandler().handle(
new TaskAttemptEvent(attemptID,
TaskAttemptEventType.TA_PREEMPTED));
}
@Override
public void done(TaskAttemptID taskAttemptID) throws IOException {
LOG.info("Done acknowledgment from " + taskAttemptID.toString());
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID =
TypeConverter.toYarn(taskAttemptID);
taskHeartbeatHandler.progressing(attemptID);
context.getEventHandler().handle(
new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_DONE));
}
@Override
public void fatalError(TaskAttemptID taskAttemptID, String msg, boolean fastFail)
throws IOException {
// This happens only in Child and in the Task.
LOG.error("Task: " + taskAttemptID + " - exited : " + msg);
reportDiagnosticInfo(taskAttemptID, "Error: " + msg);
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID =
TypeConverter.toYarn(taskAttemptID);
// handling checkpoints
preemptionPolicy.handleFailedContainer(attemptID);
context.getEventHandler().handle(
new TaskAttemptFailEvent(attemptID, fastFail));
}
@Override
public void fsError(TaskAttemptID taskAttemptID, String message)
throws IOException {
// This happens only in Child.
LOG.error("Task: " + taskAttemptID + " - failed due to FSError: "
+ message);
reportDiagnosticInfo(taskAttemptID, "FSError: " + message);
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID =
TypeConverter.toYarn(taskAttemptID);
// handling checkpoints
preemptionPolicy.handleFailedContainer(attemptID);
context.getEventHandler().handle(
new TaskAttemptFailEvent(attemptID));
}
@Override
public void shuffleError(TaskAttemptID taskAttemptID, String message) throws IOException {
// TODO: This isn't really used in any MR code. Ask for removal.
}
@Override
public MapTaskCompletionEventsUpdate getMapCompletionEvents(
JobID jobIdentifier, int startIndex, int maxEvents,
TaskAttemptID taskAttemptID) throws IOException {
LOG.info("MapCompletionEvents request from " + taskAttemptID.toString()
+ ". startIndex " + startIndex + " maxEvents " + maxEvents);
// TODO: shouldReset is never used. See TT. Ask for Removal.
boolean shouldReset = false;
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID =
TypeConverter.toYarn(taskAttemptID);
TaskCompletionEvent[] events =
context.getJob(attemptID.getTaskId().getJobId()).getMapAttemptCompletionEvents(
startIndex, maxEvents);
taskHeartbeatHandler.progressing(attemptID);
return new MapTaskCompletionEventsUpdate(events, shouldReset);
}
@Override
public void reportDiagnosticInfo(TaskAttemptID taskAttemptID, String diagnosticInfo)
throws IOException {
diagnosticInfo = StringInterner.weakIntern(diagnosticInfo);
LOG.info("Diagnostics report from " + taskAttemptID.toString() + ": "
+ diagnosticInfo);
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID =
TypeConverter.toYarn(taskAttemptID);
taskHeartbeatHandler.progressing(attemptID);
// This is mainly used for cases where we want to propagate exception traces
// of tasks that fail.
// This call exists as a hadoop mapreduce legacy wherein all changes in
// counters/progress/phase/output-size are reported through statusUpdate()
// call but not diagnosticInformation.
context.getEventHandler().handle(
new TaskAttemptDiagnosticsUpdateEvent(attemptID, diagnosticInfo));
}
@Override
public AMFeedback statusUpdate(TaskAttemptID taskAttemptID,
TaskStatus taskStatus) throws IOException, InterruptedException {
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId yarnAttemptID =
TypeConverter.toYarn(taskAttemptID);
AMFeedback feedback = new AMFeedback();
feedback.setTaskFound(true);
AtomicReference<TaskAttemptStatus> lastStatusRef =
attemptIdToStatus.get(yarnAttemptID);
if (lastStatusRef == null) {
// The task is not known, but it could be in the process of tearing
// down gracefully or receiving a thread dump signal. Tolerate unknown
// tasks as long as they have unregistered recently.
if (!taskHeartbeatHandler.hasRecentlyUnregistered(yarnAttemptID)) {
LOG.error("Status update was called with illegal TaskAttemptId: "
+ yarnAttemptID);
feedback.setTaskFound(false);
}
return feedback;
}
// Propagating preemption to the task if TASK_PREEMPTION is enabled
if (getConfig().getBoolean(MRJobConfig.TASK_PREEMPTION, false)
&& preemptionPolicy.isPreempted(yarnAttemptID)) {
feedback.setPreemption(true);
LOG.info("Setting preemption bit for task: "+ yarnAttemptID
+ " of type " + yarnAttemptID.getTaskId().getTaskType());
}
if (taskStatus == null) {
//We are using statusUpdate only as a simple ping
if (LOG.isDebugEnabled()) {
LOG.debug("Ping from " + taskAttemptID.toString());
}
return feedback;
}
// if we are here there is an actual status update to be processed
taskHeartbeatHandler.progressing(yarnAttemptID);
TaskAttemptStatus taskAttemptStatus =
new TaskAttemptStatus();
taskAttemptStatus.id = yarnAttemptID;
// Task sends the updated progress to the TT.
taskAttemptStatus.progress = taskStatus.getProgress();
// log the new progress
taskAttemptLogProgressStamps.computeIfAbsent(taskAttemptID,
k -> new TaskProgressLogPair(taskAttemptID))
.update(taskStatus.getProgress());
// Task sends the updated state-string to the TT.
taskAttemptStatus.stateString = taskStatus.getStateString();
// Task sends the updated phase to the TT.
taskAttemptStatus.phase = TypeConverter.toYarn(taskStatus.getPhase());
// Counters are updated by the task. Convert counters into new format as
// that is the primary storage format inside the AM to avoid multiple
// conversions and unnecessary heap usage.
taskAttemptStatus.counters = new org.apache.hadoop.mapreduce.Counters(
taskStatus.getCounters());
// Map Finish time set by the task (map only)
if (taskStatus.getIsMap() && taskStatus.getMapFinishTime() != 0) {
taskAttemptStatus.mapFinishTime = taskStatus.getMapFinishTime();
}
// Shuffle Finish time set by the task (reduce only).
if (!taskStatus.getIsMap() && taskStatus.getShuffleFinishTime() != 0) {
taskAttemptStatus.shuffleFinishTime = taskStatus.getShuffleFinishTime();
}
// Sort finish time set by the task (reduce only).
if (!taskStatus.getIsMap() && taskStatus.getSortFinishTime() != 0) {
taskAttemptStatus.sortFinishTime = taskStatus.getSortFinishTime();
}
// Not Setting the task state. Used by speculation - will be set in TaskAttemptImpl
//taskAttemptStatus.taskState = TypeConverter.toYarn(taskStatus.getRunState());
//set the fetch failures
if (taskStatus.getFetchFailedMaps() != null
&& taskStatus.getFetchFailedMaps().size() > 0) {
taskAttemptStatus.fetchFailedMaps =
new ArrayList<org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId>();
for (TaskAttemptID failedMapId : taskStatus.getFetchFailedMaps()) {
taskAttemptStatus.fetchFailedMaps.add(
TypeConverter.toYarn(failedMapId));
}
}
// Task sends the information about the nextRecordRange to the TT
// TODO: The following are not needed here, but needed to be set somewhere inside AppMaster.
// taskStatus.getRunState(); // Set by the TT/JT. Transform into a state TODO
// taskStatus.getStartTime(); // Used to be set by the TaskTracker. This should be set by getTask().
// taskStatus.getFinishTime(); // Used to be set by TT/JT. Should be set when task finishes
// // This was used by TT to do counter updates only once every minute. So this
// // isn't ever changed by the Task itself.
// taskStatus.getIncludeCounters();
coalesceStatusUpdate(yarnAttemptID, taskAttemptStatus, lastStatusRef);
return feedback;
}
@Override
public long getProtocolVersion(String arg0, long arg1) throws IOException {
return TaskUmbilicalProtocol.versionID;
}
@Override
public void reportNextRecordRange(TaskAttemptID taskAttemptID, Range range)
throws IOException {
// This is used when the feature of skipping records is enabled.
// This call exists as a hadoop mapreduce legacy wherein all changes in
// counters/progress/phase/output-size are reported through statusUpdate()
// call but not the next record range information.
throw new IOException("Not yet implemented.");
}
@Override
public JvmTask getTask(JvmContext context) throws IOException {
// A rough imitation of code from TaskTracker.
JVMId jvmId = context.jvmId;
LOG.info("JVM with ID : " + jvmId + " asked for a task");
JvmTask jvmTask = null;
// TODO: Is it an authorized container to get a task? Otherwise return null.
// TODO: Child.java's firstTaskID isn't really firstTaskID. Ask for update
// to jobId and task-type.
WrappedJvmID wJvmID = new WrappedJvmID(jvmId.getJobId(), jvmId.isMap,
jvmId.getId());
// Try to look up the task. We remove it directly as we don't give
// multiple tasks to a JVM
if (!jvmIDToActiveAttemptMap.containsKey(wJvmID)) {
LOG.info("JVM with ID: " + jvmId + " is invalid and will be killed.");
jvmTask = TASK_FOR_INVALID_JVM;
} else {
if (!launchedJVMs.contains(wJvmID)) {
jvmTask = null;
LOG.info("JVM with ID: " + jvmId
+ " asking for task before AM launch registered. Given null task");
} else {
// remove the task as it is no more needed and free up the memory.
// Also we have already told the JVM to process a task, so it is no
// longer pending, and further request should ask it to exit.
org.apache.hadoop.mapred.Task task =
jvmIDToActiveAttemptMap.remove(wJvmID);
launchedJVMs.remove(wJvmID);
LOG.info("JVM with ID: " + jvmId + " given task: " + task.getTaskID());
task.setEncryptedSpillKey(encryptedSpillKey);
jvmTask = new JvmTask(task, false);
}
}
return jvmTask;
}
@Override
public void registerPendingTask(
org.apache.hadoop.mapred.Task task, WrappedJvmID jvmID) {
// Create the mapping so that it is easy to look up
// when the jvm comes back to ask for Task.
// A JVM not present in this map is an illegal task/JVM.
jvmIDToActiveAttemptMap.put(jvmID, task);
}
@Override
public void registerLaunchedTask(
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID,
WrappedJvmID jvmId) {
// The AM considers the task to be launched (Has asked the NM to launch it)
// The JVM will only be given a task after this registartion.
launchedJVMs.add(jvmId);
taskHeartbeatHandler.register(attemptID);
attemptIdToStatus.put(attemptID, new AtomicReference<>());
}
@Override
public void unregister(
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID,
WrappedJvmID jvmID) {
// Unregistration also comes from the same TaskAttempt which does the
// registration. Events are ordered at TaskAttempt, so unregistration will
// always come after registration.
// Remove from launchedJVMs before jvmIDToActiveAttemptMap to avoid
// synchronization issue with getTask(). getTask should be checking
// jvmIDToActiveAttemptMap before it checks launchedJVMs.
// remove the mappings if not already removed
launchedJVMs.remove(jvmID);
jvmIDToActiveAttemptMap.remove(jvmID);
//unregister this attempt
taskHeartbeatHandler.unregister(attemptID);
attemptIdToStatus.remove(attemptID);
}
@Override
public ProtocolSignature getProtocolSignature(String protocol,
long clientVersion, int clientMethodsHash) throws IOException {
return ProtocolSignature.getProtocolSignature(this,
protocol, clientVersion, clientMethodsHash);
}
// task checkpoint bookeeping
@Override
public TaskCheckpointID getCheckpointID(TaskID taskId) {
TaskId tid = TypeConverter.toYarn(taskId);
return preemptionPolicy.getCheckpointID(tid);
}
@Override
public void setCheckpointID(TaskID taskId, TaskCheckpointID cid) {
TaskId tid = TypeConverter.toYarn(taskId);
preemptionPolicy.setCheckpointID(tid, cid);
}
private void coalesceStatusUpdate(TaskAttemptId yarnAttemptID,
TaskAttemptStatus taskAttemptStatus,
AtomicReference<TaskAttemptStatus> lastStatusRef) {
List<TaskAttemptId> fetchFailedMaps = taskAttemptStatus.fetchFailedMaps;
TaskAttemptStatus lastStatus = null;
boolean done = false;
while (!done) {
lastStatus = lastStatusRef.get();
if (lastStatus != null && lastStatus.fetchFailedMaps != null) {
// merge fetchFailedMaps from the previous update
if (taskAttemptStatus.fetchFailedMaps == null) {
taskAttemptStatus.fetchFailedMaps = lastStatus.fetchFailedMaps;
} else {
taskAttemptStatus.fetchFailedMaps =
new ArrayList<>(lastStatus.fetchFailedMaps.size() +
fetchFailedMaps.size());
taskAttemptStatus.fetchFailedMaps.addAll(
lastStatus.fetchFailedMaps);
taskAttemptStatus.fetchFailedMaps.addAll(
fetchFailedMaps);
}
}
// lastStatusRef may be changed by either the AsyncDispatcher when
// it processes the update, or by another IPC server handler
done = lastStatusRef.compareAndSet(lastStatus, taskAttemptStatus);
if (!done) {
LOG.info("TaskAttempt " + yarnAttemptID +
": lastStatusRef changed by another thread, retrying...");
// let's revert taskAttemptStatus.fetchFailedMaps
taskAttemptStatus.fetchFailedMaps = fetchFailedMaps;
}
}
boolean asyncUpdatedNeeded = (lastStatus == null);
if (asyncUpdatedNeeded) {
context.getEventHandler().handle(
new TaskAttemptStatusUpdateEvent(taskAttemptStatus.id,
lastStatusRef));
}
}
@VisibleForTesting
ConcurrentMap<TaskAttemptId,
AtomicReference<TaskAttemptStatus>> getAttemptIdToStatus() {
return attemptIdToStatus;
}
/**
* Entity to keep track of the taskAttempt, last time it was logged,
* and the
* progress that has been logged.
*/
class TaskProgressLogPair {
/**
* The taskAttemptId of that history record.
*/
private final TaskAttemptID taskAttemptID;
/**
* Timestamp of last time the progress was logged.
*/
private volatile long logTimeStamp;
/**
* Snapshot of the last logged progress.
*/
private volatile double prevProgress;
TaskProgressLogPair(final TaskAttemptID attemptID) {
taskAttemptID = attemptID;
prevProgress = 0.0;
logTimeStamp = 0;
}
private void resetLog(final boolean doLog,
final float progress, final double processedProgress,
final long timestamp) {
if (doLog) {
prevProgress = processedProgress;
logTimeStamp = timestamp;
LOG.info("Progress of TaskAttempt " + taskAttemptID + " is : "
+ progress);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Progress of TaskAttempt " + taskAttemptID + " is : "
+ progress);
}
}
}
public void update(final float progress) {
final double processedProgress =
MRJobConfUtil.convertTaskProgressToFactor(progress);
final double diffProgress = processedProgress - prevProgress;
final long currentTime = Time.monotonicNow();
boolean result =
(Double.compare(diffProgress,
MRJobConfUtil.getTaskProgressMinDeltaThreshold()) >= 0);
if (!result) {
// check if time has expired.
result = ((currentTime - logTimeStamp)
>= MRJobConfUtil.getTaskProgressWaitDeltaTimeThreshold());
}
// It is helpful to log the progress when it reaches 1.0F.
if (Float.compare(progress, 1.0f) == 0) {
result = true;
taskAttemptLogProgressStamps.remove(taskAttemptID);
}
resetLog(result, progress, processedProgress, currentTime);
}
}
}
| |
package edu.cmu.lti.bic.sbs.evaluator;
import java.util.Calendar;
import edu.cmu.lti.bic.sbs.gson.Drug;
import edu.cmu.lti.bic.sbs.gson.Patient;
import edu.cmu.lti.bic.sbs.gson.Prescription;
import edu.cmu.lti.bic.sbs.gson.Tool;
/**
* The Step Class
*
* @author Victor Zhao, Xing Sun, Ryan Sun
*
*/
public class Step {
// private Medicine medUsed;
private int timeUsed = -1;
private Tool toolUsed;
private Prescription prescriptionUsed;
private Patient patient;
private StepRule stepRule;
// private undefined patientStatus;
/**
*
* @return The step description in serialize string.
*
*/
public String getStep(boolean needTime) {
System.out.println(timeUsed);
StringBuilder output = new StringBuilder();
if (needTime)
output.append(timeUsed/1000+"\t\t");
output.append(prescriptionUsed.toString() + "\t\t" + toolUsed.toString() + "\n");
return output.toString();
}
/**
*
* default constructor
*
*/
public Step() {
}
/**
*
* Construtor copy another step
*
* @param s
*
*/
public Step(Step s) {
this.timeUsed = s.getTime();
this.toolUsed = s.getTool();
this.prescriptionUsed = s.getPrescription();
this.patient = s.getPatient();
this.stepRule = s.getStepRule();
}
/**
* Step initializer
*
* Called when building the gold standard
*
* @param apatient
* The patient instance
* @param prescription
* The prescription instance
* @param tool
* The tool instance
* @param time
* The time stamp
*/
public Step(Patient apatient, Prescription prescription, Tool tool, int time) {
prescriptionUsed = prescription;
timeUsed = time;
toolUsed = tool;
patient = apatient;
}
public Step(Patient apatient, Prescription prescription, Tool tool, int time,
String ruleFiles) {
prescriptionUsed = prescription;
timeUsed = time;
toolUsed = tool;
patient = apatient;
stepRule = new StepRule(ruleFiles, this);
}
public void setRule(String ruleFiles) {
stepRule = new StepRule(ruleFiles, this);
}
/**
* The patient setter.
*
* @param p
* The incoming patient instance
*/
public void setPatient(Patient p) {
patient = p;
}
/**
* The patient getter.
*
* @return The patient instance
*/
public Patient getPatient() {
return patient;
}
/**
* The prescription setter.
*
* @param p
* incoming prescription instance
*/
public void setPrescription(Prescription p) {
prescriptionUsed = p;
}
/**
* prescription getter
*
* @return the prescription within one step
*/
public Prescription getPrescription() {
return prescriptionUsed;
}
/**
* The tool setter
*
* @param t
* incoming tool
*/
public void setTool(Tool t) {
toolUsed = t;
}
/**
* The tool getter
*
* @return the tool instance
*/
public Tool getTool() {
return toolUsed;
}
/**
* The time setter
*
* @param t
* the incoming time instance
*/
public void setTime(int t) {
timeUsed = t;
}
/**
* The timer getter
*
* @return the timer instance
*/
public int getTime() {
return timeUsed;
}
public boolean isComplete() {
return (prescriptionUsed != null) && (patient != null) && (toolUsed != null)
&& (timeUsed != -1);
}
public double stepScore(Step a) {
if (stepRule == null) {
if (this.toolUsed.getId().equals(a.toolUsed.getId())
&& this.prescriptionUsed.getDrug().getId()
.equals(a.prescriptionUsed.getDrug().getId())) {
double dosePenalty = 0.0;
double timePenalty = 0.0;
if (this.prescriptionUsed.getDose() != 0)
dosePenalty = Math.abs(this.prescriptionUsed.getDose() - a.prescriptionUsed.getDose())
/ this.prescriptionUsed.getDose();
timePenalty = timeUsed - a.timeUsed;
// if(dosePenalty>=1||timePenalty>=10000) return 0;
return 1.0 * (1 - dosePenalty) * (1.0 - timePenalty / 10000);
} else {
return Integer.MIN_VALUE;
}
} else {
double score = stepRule.maxScore();
return score;
}
}
public double stepPatientScore() {
double res = 0.0;
double oLpenalty = 1;
double rRpenalty = 1;
double bPpenalty = 1;
double hRpenalty = 1;
if (stepRule != null) {
// add code here
}
double oL = patient.getOxygenLevel().getOlNum() - 80;
if (oL < 0) {
res += oL * oLpenalty;
}
double rR = Math.max(12.0 - patient.getRepirationRate().getRrNum(), patient.getRepirationRate()
.getRrNum() - 20.0);
if (rR > 0) {
res -= rR * rRpenalty;
}
double bP = Math.max(patient.getBloodPressure().getDiastolicBloodPressure() - 100
+ patient.getBloodPressure().getSystolicBloodPressure() - 160, 140
- patient.getBloodPressure().getDiastolicBloodPressure()
- patient.getBloodPressure().getSystolicBloodPressure());
if (bP > 0) {
res -= bP * bPpenalty;
}
double hR = Math.max(patient.getHeartRate().getHrNum() - 100, 60 - patient.getHeartRate()
.getHrNum());
if (hR > 0) {
res -= hR * hRpenalty;
}
return res;
}
public static void main(String[] args) {
Step s = new Step(new Patient(), new Prescription(new Drug(), 10.0, "ml"), new Tool("0",
"Call Code", 0), (int)Calendar.getInstance().getTimeInMillis());
Step a = new Step(new Patient(), new Prescription(new Drug(), 20.0, "ml"), new Tool("0",
"Call Code", 0), (int)Calendar.getInstance().getTimeInMillis());
System.out.println(s.stepScore(a));
}
/**
* @return the timeUsed
*/
public int getTimeUsed() {
return timeUsed;
}
/**
* @param timeUsed
* the timeUsed to set
*/
public void setTimeUsed(int timeUsed) {
this.timeUsed = timeUsed;
}
/**
* @return the toolUsed
*/
public Tool getToolUsed() {
return toolUsed;
}
/**
* @param toolUsed
* the toolUsed to set
*/
public void setToolUsed(Tool toolUsed) {
this.toolUsed = toolUsed;
}
/**
* @return the prescriptionUsed
*/
public Prescription getPrescriptionUsed() {
return prescriptionUsed;
}
/**
* @param prescriptionUsed
* the prescriptionUsed to set
*/
public void setPrescriptionUsed(Prescription prescriptionUsed) {
this.prescriptionUsed = prescriptionUsed;
}
/**
* @return the stepRule
*/
public StepRule getStepRule() {
return stepRule;
}
/**
* @param stepRule
* the stepRule to set
*/
public void setStepRule(StepRule stepRule) {
this.stepRule = stepRule;
}
}
| |
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache;
import org.ehcache.events.StateChangeListener;
import org.ehcache.exceptions.StateTransitionException;
import org.ehcache.spi.LifeCycled;
import org.slf4j.Logger;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicReference;
/**
* @author Alex Snaps
*/
final class StatusTransitioner {
private final AtomicReference<InternalStatus.Transition> currentState;
private volatile Thread maintenanceLease;
private final Logger logger;
private final CopyOnWriteArrayList<LifeCycled> hooks = new CopyOnWriteArrayList<LifeCycled>();
private final CopyOnWriteArrayList<StateChangeListener> listeners = new CopyOnWriteArrayList<StateChangeListener>();
StatusTransitioner(Logger logger) {
this.currentState = new AtomicReference<InternalStatus.Transition>(InternalStatus.initial());
this.logger = logger;
}
Status currentStatus() {
return currentState.get().get().toPublicStatus();
}
boolean isTransitioning() {
return !currentState.get().done();
}
void checkAvailable() {
final Status status = currentStatus();
if(status == Status.MAINTENANCE && Thread.currentThread() != maintenanceLease) {
throw new IllegalStateException("State is " + status + ", yet you don't own it!");
} else if(status == Status.UNINITIALIZED) {
throw new IllegalStateException("State is " + status);
}
}
void checkMaintenance() {
final Status status = currentStatus();
if(status == Status.MAINTENANCE && Thread.currentThread() != maintenanceLease) {
throw new IllegalStateException("State is " + status + ", yet you don't own it!");
} else if (status != Status.MAINTENANCE) {
throw new IllegalStateException("State is " + status);
}
}
Transition init() {
logger.trace("Initializing");
InternalStatus.Transition st;
for (InternalStatus.Transition cs; !currentState.compareAndSet(cs = currentState.get(), st = cs.get().init()););
return new Transition(st, null, "Initialize");
}
Transition close() {
logger.trace("Closing");
InternalStatus.Transition st;
if(maintenanceLease != null && Thread.currentThread() != maintenanceLease) {
throw new IllegalStateException("You don't own this MAINTENANCE lease");
}
for (InternalStatus.Transition cs; !currentState.compareAndSet(cs = currentState.get(), st = cs.get().close()););
return new Transition(st, null, "Close");
}
Transition maintenance() {
logger.trace("Entering Maintenance");
InternalStatus.Transition st;
for (InternalStatus.Transition cs; !currentState.compareAndSet(cs = currentState.get(), st = cs.get().maintenance()););
return new Transition(st, Thread.currentThread(), "Enter Maintenance");
}
Transition exitMaintenance() {
checkMaintenance();
logger.trace("Exiting Maintenance");
InternalStatus.Transition st;
for (InternalStatus.Transition cs; !currentState.compareAndSet(cs = currentState.get(), st = cs.get().close()););
return new Transition(st, Thread.currentThread(), "Exit Maintenance");
}
void addHook(LifeCycled hook) {
validateHookRegistration();
hooks.add(hook);
}
void removeHook(LifeCycled hook) {
validateHookRegistration();
hooks.remove(hook);
}
private void validateHookRegistration() {
if(currentStatus() != Status.UNINITIALIZED) {
throw new IllegalStateException("Can't modify hooks when not in " + Status.UNINITIALIZED);
}
}
void registerListener(StateChangeListener listener) {
if(!listeners.contains(listener)) {
listeners.add(listener);
}
}
void deregisterListener(StateChangeListener listener) {
listeners.remove(listener);
}
private void runInitHooks() throws Exception {
Deque<LifeCycled> initiated = new ArrayDeque<LifeCycled>();
for (LifeCycled hook : hooks) {
try {
hook.init();
initiated.add(hook);
} catch (Exception initException) {
while (!initiated.isEmpty()) {
try {
initiated.pop().close();
} catch (Exception closeException) {
logger.error("Failed to close() while shutting down because of .init() having thrown", closeException);
}
}
throw initException;
}
}
}
private void runCloseHooks() throws Exception {
Deque<LifeCycled> initiated = new ArrayDeque<LifeCycled>();
for (LifeCycled hook : hooks) {
initiated.addFirst(hook);
}
Exception firstFailure = null;
while (!initiated.isEmpty()) {
try {
initiated.pop().close();
} catch (Exception closeException) {
if (firstFailure == null) {
firstFailure = closeException;
} else {
logger.error("A LifeCyclable has thrown already while closing down", closeException);
}
}
}
if (firstFailure != null) {
throw firstFailure;
}
}
private void fireTransitionEvent(Status previousStatus, Status newStatus) {
for (StateChangeListener listener : listeners) {
listener.stateTransition(previousStatus, newStatus);
}
}
final class Transition {
private final InternalStatus.Transition st;
private final Thread thread;
private final String action;
public Transition(final InternalStatus.Transition st, final Thread thread, final String action) {
this.st = st;
this.thread = thread;
this.action = action;
}
public void succeeded() {
try {
switch(st.to()) {
case AVAILABLE:
runInitHooks();
break;
case UNINITIALIZED:
runCloseHooks();
break;
case MAINTENANCE:
break;
default:
throw new IllegalArgumentException("Didn't expect that enum value: " + st.to());
}
st.succeeded();
} catch (Exception e) {
st.failed();
throw new StateTransitionException(e);
}
try {
fireTransitionEvent(st.from().toPublicStatus(), st.to().toPublicStatus());
} finally {
maintenanceLease = thread;
logger.info("{} successful.", action);
}
}
public void failed(Throwable t) {
st.failed();
logger.error("{} failed.", action);
if (t != null) {
if(t instanceof StateTransitionException) {
throw (StateTransitionException) t;
}
throw new StateTransitionException(t);
}
}
}
}
| |
/*
* Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.xml.internal.ws.handler;
import com.sun.xml.internal.ws.api.BindingID;
import com.sun.xml.internal.ws.api.WSBinding;
import com.sun.xml.internal.ws.streaming.XMLStreamReaderUtil;
import com.sun.xml.internal.ws.transport.http.DeploymentDescriptorParser;
import com.sun.xml.internal.ws.util.HandlerAnnotationInfo;
import com.sun.xml.internal.ws.util.JAXWSUtils;
import com.sun.xml.internal.ws.util.UtilException;
import javax.annotation.PostConstruct;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamReader;
import javax.xml.ws.handler.Handler;
import javax.xml.ws.handler.PortInfo;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.logging.Logger;
public class HandlerChainsModel {
private static final Logger logger = Logger.getLogger(
com.sun.xml.internal.ws.util.Constants.LoggingDomain + ".util");
private Class annotatedClass;
private List<HandlerChainType> handlerChains;
private String id;
/** Creates a new instance of HandlerChains */
private HandlerChainsModel(Class annotatedClass) {
this.annotatedClass = annotatedClass;
}
private List<HandlerChainType> getHandlerChain() {
if (handlerChains == null) {
handlerChains = new ArrayList<HandlerChainType>();
}
return handlerChains;
}
public String getId() {
return id;
}
public void setId(String value) {
this.id = value;
}
/**
* reader should be on <handler-chains> element
*/
public static HandlerChainsModel parseHandlerConfigFile(Class annotatedClass, XMLStreamReader reader) {
ensureProperName(reader,QNAME_HANDLER_CHAINS);
HandlerChainsModel handlerModel = new HandlerChainsModel(annotatedClass);
List<HandlerChainType> hChains = handlerModel.getHandlerChain();
XMLStreamReaderUtil.nextElementContent(reader);
while (reader.getName().equals(QNAME_HANDLER_CHAIN)) {
HandlerChainType hChain = new HandlerChainType();
XMLStreamReaderUtil.nextElementContent(reader);
if (reader.getName().equals(QNAME_CHAIN_PORT_PATTERN)) {
QName portNamePattern = XMLStreamReaderUtil.getElementQName(reader);
hChain.setPortNamePattern(portNamePattern);
XMLStreamReaderUtil.nextElementContent(reader);
} else if (reader.getName().equals(QNAME_CHAIN_PROTOCOL_BINDING)) {
String bindingList = XMLStreamReaderUtil.getElementText(reader);
StringTokenizer stk = new StringTokenizer(bindingList);
while(stk.hasMoreTokens()) {
String token = stk.nextToken();
// This will convert tokens into Binding URI
hChain.addProtocolBinding(token);
}
XMLStreamReaderUtil.nextElementContent(reader);
} else if (reader.getName().equals(QNAME_CHAIN_SERVICE_PATTERN)) {
QName serviceNamepattern = XMLStreamReaderUtil.getElementQName(reader);
hChain.setServiceNamePattern(serviceNamepattern);
XMLStreamReaderUtil.nextElementContent(reader);
}
List<HandlerType> handlers = hChain.getHandlers();
// process all <handler> elements
while (reader.getName().equals(QNAME_HANDLER)) {
HandlerType handler = new HandlerType();
XMLStreamReaderUtil.nextContent(reader);
if (reader.getName().equals(QNAME_HANDLER_NAME)) {
String handlerName =
XMLStreamReaderUtil.getElementText(reader).trim();
handler.setHandlerName(handlerName);
XMLStreamReaderUtil.nextContent(reader);
}
// handler class
ensureProperName(reader, QNAME_HANDLER_CLASS);
String handlerClass =
XMLStreamReaderUtil.getElementText(reader).trim();
handler.setHandlerClass(handlerClass);
XMLStreamReaderUtil.nextContent(reader);
// init params (ignored)
while (reader.getName().equals(QNAME_HANDLER_PARAM)) {
skipInitParamElement(reader);
}
// headers (ignored)
while (reader.getName().equals(QNAME_HANDLER_HEADER)) {
skipTextElement(reader);
}
// roles (not stored per handler)
while (reader.getName().equals(QNAME_HANDLER_ROLE)) {
List<String> soapRoles = handler.getSoapRoles();
soapRoles.add(XMLStreamReaderUtil.getElementText(reader));
XMLStreamReaderUtil.nextContent(reader);
}
handlers.add(handler);
// move past </handler>
ensureProperName(reader, QNAME_HANDLER);
XMLStreamReaderUtil.nextContent(reader);
}
// move past </handler-chain>
ensureProperName(reader, QNAME_HANDLER_CHAIN);
hChains.add(hChain);
XMLStreamReaderUtil.nextContent(reader);
}
return handlerModel;
}
/**
* <p>This method is called internally by HandlerAnnotationProcessor,
* and by
* {@link com.sun.xml.internal.ws.transport.http.DeploymentDescriptorParser}
* directly when it reaches the handler chains element in the
* descriptor file it is parsing.
* @param reader should be on <handler-chains> element
* @return A HandlerAnnotationInfo object that stores the
* handlers and roles.
*/
public static HandlerAnnotationInfo parseHandlerFile(XMLStreamReader reader,
ClassLoader classLoader, QName serviceName, QName portName,
WSBinding wsbinding) {
ensureProperName(reader,QNAME_HANDLER_CHAINS);
String bindingId = wsbinding.getBindingId().toString();
HandlerAnnotationInfo info = new HandlerAnnotationInfo();
XMLStreamReaderUtil.nextElementContent(reader);
List<Handler> handlerChain = new ArrayList<Handler>();
Set<String> roles = new HashSet<String>();
while (reader.getName().equals(QNAME_HANDLER_CHAIN)) {
XMLStreamReaderUtil.nextElementContent(reader);
if (reader.getName().equals(QNAME_CHAIN_PORT_PATTERN)) {
if (portName == null) {
logger.warning("handler chain sepcified for port " +
"but port QName passed to parser is null");
}
boolean parseChain = JAXWSUtils.matchQNames(portName,
XMLStreamReaderUtil.getElementQName(reader));
if (!parseChain) {
skipChain(reader);
continue;
}
XMLStreamReaderUtil.nextElementContent(reader);
} else if (reader.getName().equals(QNAME_CHAIN_PROTOCOL_BINDING)) {
if (bindingId == null) {
logger.warning("handler chain sepcified for bindingId " +
"but bindingId passed to parser is null");
}
String bindingConstraint = XMLStreamReaderUtil.getElementText(reader);
boolean skipThisChain = true;
StringTokenizer stk = new StringTokenizer(bindingConstraint);
List<String> bindingList = new ArrayList<String>();
while(stk.hasMoreTokens()) {
String tokenOrURI = stk.nextToken();
/*
Convert short-form tokens to API's binding ids
Unknown token, Put it as it is
*/
tokenOrURI = DeploymentDescriptorParser.getBindingIdForToken(tokenOrURI);
String binding = BindingID.parse(tokenOrURI).toString();
bindingList.add(binding);
}
if(bindingList.contains(bindingId)){
skipThisChain = false;
}
if (skipThisChain) {
skipChain(reader);
continue;
}
XMLStreamReaderUtil.nextElementContent(reader);
} else if (reader.getName().equals(QNAME_CHAIN_SERVICE_PATTERN)) {
if (serviceName == null) {
logger.warning("handler chain sepcified for service " +
"but service QName passed to parser is null");
}
boolean parseChain = JAXWSUtils.matchQNames(
serviceName,
XMLStreamReaderUtil.getElementQName(reader));
if (!parseChain) {
skipChain(reader);
continue;
}
XMLStreamReaderUtil.nextElementContent(reader);
}
// process all <handler> elements
while (reader.getName().equals(QNAME_HANDLER)) {
Handler handler;
XMLStreamReaderUtil.nextContent(reader);
if (reader.getName().equals(QNAME_HANDLER_NAME)) {
skipTextElement(reader);
}
// handler class
ensureProperName(reader, QNAME_HANDLER_CLASS);
try {
handler = (Handler) loadClass(classLoader,
XMLStreamReaderUtil.getElementText(reader).trim()).newInstance();
} catch (InstantiationException ie){
throw new RuntimeException(ie);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
XMLStreamReaderUtil.nextContent(reader);
// init params (ignored)
while (reader.getName().equals(QNAME_HANDLER_PARAM)) {
skipInitParamElement(reader);
}
// headers (ignored)
while (reader.getName().equals(QNAME_HANDLER_HEADER)) {
skipTextElement(reader);
}
// roles (not stored per handler)
while (reader.getName().equals(QNAME_HANDLER_ROLE)) {
roles.add(XMLStreamReaderUtil.getElementText(reader));
XMLStreamReaderUtil.nextContent(reader);
}
// call @PostConstruct method on handler if present
for (Method method : handler.getClass().getMethods()) {
if (method.getAnnotation(PostConstruct.class) == null) {
continue;
}
try {
method.invoke(handler, new Object [0]);
break;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
handlerChain.add(handler);
// move past </handler>
ensureProperName(reader, QNAME_HANDLER);
XMLStreamReaderUtil.nextContent(reader);
}
// move past </handler-chain>
ensureProperName(reader, QNAME_HANDLER_CHAIN);
XMLStreamReaderUtil.nextContent(reader);
}
info.setHandlers(handlerChain);
info.setRoles(roles);
return info;
}
public HandlerAnnotationInfo getHandlersForPortInfo(PortInfo info){
HandlerAnnotationInfo handlerInfo = new HandlerAnnotationInfo();
List<Handler> handlerClassList = new ArrayList<Handler>();
Set<String> roles = new HashSet<String>();
for(HandlerChainType hchain : handlerChains) {
boolean hchainMatched = false;
if((!hchain.isConstraintSet()) ||
JAXWSUtils.matchQNames(info.getServiceName(), hchain.getServiceNamePattern()) ||
JAXWSUtils.matchQNames(info.getPortName(), hchain.getPortNamePattern()) ||
hchain.getProtocolBindings().contains(info.getBindingID()) ){
hchainMatched = true;
}
if(hchainMatched) {
for(HandlerType handler : hchain.getHandlers()) {
try {
Handler handlerClass = (Handler) loadClass(annotatedClass.getClassLoader(),
handler.getHandlerClass()).newInstance();
callHandlerPostConstruct(handlerClass);
handlerClassList.add(handlerClass);
} catch (InstantiationException ie){
throw new RuntimeException(ie);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
roles.addAll(handler.getSoapRoles());
}
}
}
handlerInfo.setHandlers(handlerClassList);
handlerInfo.setRoles(roles);
return handlerInfo;
}
private static Class loadClass(ClassLoader loader, String name) {
try {
return Class.forName(name, true, loader);
} catch (ClassNotFoundException e) {
throw new UtilException(
"util.handler.class.not.found",
name);
}
}
private static void callHandlerPostConstruct(Object handlerClass) {
// call @PostConstruct method on handler if present
for (Method method : handlerClass.getClass().getMethods()) {
if (method.getAnnotation(PostConstruct.class) == null) {
continue;
}
try {
method.invoke(handlerClass, new Object [0]);
break;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
private static void skipChain(XMLStreamReader reader) {
while (XMLStreamReaderUtil.nextContent(reader) !=
XMLStreamConstants.END_ELEMENT ||
!reader.getName().equals(QNAME_HANDLER_CHAIN)) {}
XMLStreamReaderUtil.nextElementContent(reader);
}
private static void skipTextElement(XMLStreamReader reader) {
XMLStreamReaderUtil.nextContent(reader);
XMLStreamReaderUtil.nextElementContent(reader);
XMLStreamReaderUtil.nextElementContent(reader);
}
private static void skipInitParamElement(XMLStreamReader reader) {
int state;
do {
state = XMLStreamReaderUtil.nextContent(reader);
} while (state != XMLStreamReader.END_ELEMENT ||
!reader.getName().equals(QNAME_HANDLER_PARAM));
XMLStreamReaderUtil.nextElementContent(reader);
}
private static void ensureProperName(XMLStreamReader reader,
QName expectedName) {
if (!reader.getName().equals(expectedName)) {
failWithLocalName("util.parser.wrong.element", reader,
expectedName.getLocalPart());
}
}
static void ensureProperName(XMLStreamReader reader, String expectedName) {
if (!reader.getLocalName().equals(expectedName)) {
failWithLocalName("util.parser.wrong.element", reader,
expectedName);
}
}
private static void failWithLocalName(String key,
XMLStreamReader reader, String arg) {
throw new UtilException(key,
Integer.toString(reader.getLocation().getLineNumber()),
reader.getLocalName(),
arg );
}
public static final String PROTOCOL_SOAP11_TOKEN = "##SOAP11_HTTP";
public static final String PROTOCOL_SOAP12_TOKEN = "##SOAP12_HTTP";
public static final String PROTOCOL_XML_TOKEN = "##XML_HTTP";
public static final String NS_109 =
"http://java.sun.com/xml/ns/javaee";
public static final QName QNAME_CHAIN_PORT_PATTERN =
new QName(NS_109, "port-name-pattern");
public static final QName QNAME_CHAIN_PROTOCOL_BINDING =
new QName(NS_109, "protocol-bindings");
public static final QName QNAME_CHAIN_SERVICE_PATTERN =
new QName(NS_109, "service-name-pattern");
public static final QName QNAME_HANDLER_CHAIN =
new QName(NS_109, "handler-chain");
public static final QName QNAME_HANDLER_CHAINS =
new QName(NS_109, "handler-chains");
public static final QName QNAME_HANDLER =
new QName(NS_109, "handler");
public static final QName QNAME_HANDLER_NAME =
new QName(NS_109, "handler-name");
public static final QName QNAME_HANDLER_CLASS =
new QName(NS_109, "handler-class");
public static final QName QNAME_HANDLER_PARAM =
new QName(NS_109, "init-param");
public static final QName QNAME_HANDLER_PARAM_NAME =
new QName(NS_109, "param-name");
public static final QName QNAME_HANDLER_PARAM_VALUE =
new QName(NS_109, "param-value");
public static final QName QNAME_HANDLER_HEADER =
new QName(NS_109, "soap-header");
public static final QName QNAME_HANDLER_ROLE =
new QName(NS_109, "soap-role");
static class HandlerChainType {
//constraints
QName serviceNamePattern;
QName portNamePattern;
List<String> protocolBindings;
// This flag is set if one of the above constraint is set on handler chain
boolean constraintSet = false;
List<HandlerType> handlers;
String id;
/** Creates a new instance of HandlerChain */
public HandlerChainType() {
protocolBindings = new ArrayList<String>();
}
public void setServiceNamePattern(QName value) {
this.serviceNamePattern = value;
constraintSet = true;
}
public QName getServiceNamePattern() {
return serviceNamePattern;
}
public void setPortNamePattern(QName value) {
this.portNamePattern = value;
constraintSet = true;
}
public QName getPortNamePattern() {
return portNamePattern;
}
public List<java.lang.String> getProtocolBindings() {
return this.protocolBindings;
}
public void addProtocolBinding(String tokenOrURI){
/*
Convert short-form tokens to API's binding ids
Unknown token, Put it as it is
*/
tokenOrURI = DeploymentDescriptorParser.getBindingIdForToken(tokenOrURI);
String binding = BindingID.parse(tokenOrURI).toString();
protocolBindings.add(binding);
constraintSet = true;
}
public boolean isConstraintSet() {
return constraintSet || !protocolBindings.isEmpty();
}
public java.lang.String getId() {
return id;
}
public void setId(java.lang.String value) {
this.id = value;
}
public List<HandlerType> getHandlers() {
if (handlers == null) {
handlers = new ArrayList<HandlerType>();
}
return this.handlers;
}
}
static class HandlerType {
String handlerName;
String handlerClass;
List<String> soapRoles;
java.lang.String id;
/** Creates a new instance of HandlerComponent */
public HandlerType() {
}
public String getHandlerName() {
return handlerName;
}
public void setHandlerName(String value) {
this.handlerName = value;
}
public String getHandlerClass() {
return handlerClass;
}
public void setHandlerClass(String value) {
this.handlerClass = value;
}
public java.lang.String getId() {
return id;
}
public void setId(java.lang.String value) {
this.id = value;
}
public List<String> getSoapRoles() {
if (soapRoles == null) {
soapRoles = new ArrayList<String>();
}
return this.soapRoles;
}
}
}
| |
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package javax.swing;
import java.awt.*;
import java.awt.image.*;
import java.beans.ConstructorProperties;
import java.beans.Transient;
import java.net.URL;
import java.io.Serializable;
import java.io.ObjectOutputStream;
import java.io.ObjectInputStream;
import java.io.IOException;
import java.util.Locale;
import javax.accessibility.*;
import sun.awt.AppContext;
import java.lang.reflect.Field;
import java.security.*;
/**
* An implementation of the Icon interface that paints Icons
* from Images. Images that are created from a URL, filename or byte array
* are preloaded using MediaTracker to monitor the loaded state
* of the image.
*
* <p>
* For further information and examples of using image icons, see
* <a href="https://docs.oracle.com/javase/tutorial/uiswing/components/icon.html">How to Use Icons</a>
* in <em>The Java Tutorial.</em>
*
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is
* appropriate for short term storage or RMI between applications running
* the same version of Swing. As of 1.4, support for long term storage
* of all JavaBeans™
* has been added to the <code>java.beans</code> package.
* Please see {@link java.beans.XMLEncoder}.
*
* @author Jeff Dinkins
* @author Lynn Monsanto
*/
public class ImageIcon implements Icon, Serializable, Accessible {
/* Keep references to the filename and location so that
* alternate persistence schemes have the option to archive
* images symbolically rather than including the image data
* in the archive.
*/
transient private String filename;
transient private URL location;
transient Image image;
transient int loadStatus = 0;
ImageObserver imageObserver;
String description = null;
/**
* Do not use this shared component, which is used to track image loading.
* It is left for backward compatibility only.
* @deprecated since 1.8
*/
@Deprecated
protected final static Component component;
/**
* Do not use this shared media tracker, which is used to load images.
* It is left for backward compatibility only.
* @deprecated since 1.8
*/
@Deprecated
protected final static MediaTracker tracker;
static {
component = AccessController.doPrivileged(new PrivilegedAction<Component>() {
public Component run() {
try {
final Component component = createNoPermsComponent();
// 6482575 - clear the appContext field so as not to leak it
Field appContextField =
Component.class.getDeclaredField("appContext");
appContextField.setAccessible(true);
appContextField.set(component, null);
return component;
} catch (Throwable e) {
// We don't care about component.
// So don't prevent class initialisation.
e.printStackTrace();
return null;
}
}
});
tracker = new MediaTracker(component);
}
private static Component createNoPermsComponent() {
// 7020198 - set acc field to no permissions and no subject
// Note, will have appContext set.
return AccessController.doPrivileged(
new PrivilegedAction<Component>() {
public Component run() {
return new Component() {
};
}
},
new AccessControlContext(new ProtectionDomain[]{
new ProtectionDomain(null, null)
})
);
}
/**
* Id used in loading images from MediaTracker.
*/
private static int mediaTrackerID;
private final static Object TRACKER_KEY = new StringBuilder("TRACKER_KEY");
int width = -1;
int height = -1;
/**
* Creates an ImageIcon from the specified file. The image will
* be preloaded by using MediaTracker to monitor the loading state
* of the image.
* @param filename the name of the file containing the image
* @param description a brief textual description of the image
* @see #ImageIcon(String)
*/
public ImageIcon(String filename, String description) {
image = Toolkit.getDefaultToolkit().getImage(filename);
if (image == null) {
return;
}
this.filename = filename;
this.description = description;
loadImage(image);
}
/**
* Creates an ImageIcon from the specified file. The image will
* be preloaded by using MediaTracker to monitor the loading state
* of the image. The specified String can be a file name or a
* file path. When specifying a path, use the Internet-standard
* forward-slash ("/") as a separator.
* (The string is converted to an URL, so the forward-slash works
* on all systems.)
* For example, specify:
* <pre>
* new ImageIcon("images/myImage.gif") </pre>
* The description is initialized to the <code>filename</code> string.
*
* @param filename a String specifying a filename or path
* @see #getDescription
*/
@ConstructorProperties({"description"})
public ImageIcon (String filename) {
this(filename, filename);
}
/**
* Creates an ImageIcon from the specified URL. The image will
* be preloaded by using MediaTracker to monitor the loaded state
* of the image.
* @param location the URL for the image
* @param description a brief textual description of the image
* @see #ImageIcon(String)
*/
public ImageIcon(URL location, String description) {
image = Toolkit.getDefaultToolkit().getImage(location);
if (image == null) {
return;
}
this.location = location;
this.description = description;
loadImage(image);
}
/**
* Creates an ImageIcon from the specified URL. The image will
* be preloaded by using MediaTracker to monitor the loaded state
* of the image.
* The icon's description is initialized to be
* a string representation of the URL.
* @param location the URL for the image
* @see #getDescription
*/
public ImageIcon (URL location) {
this(location, location.toExternalForm());
}
/**
* Creates an ImageIcon from the image.
* @param image the image
* @param description a brief textual description of the image
*/
public ImageIcon(Image image, String description) {
this(image);
this.description = description;
}
/**
* Creates an ImageIcon from an image object.
* If the image has a "comment" property that is a string,
* then the string is used as the description of this icon.
* @param image the image
* @see #getDescription
* @see Image#getProperty
*/
public ImageIcon (Image image) {
this.image = image;
Object o = image.getProperty("comment", imageObserver);
if (o instanceof String) {
description = (String) o;
}
loadImage(image);
}
/**
* Creates an ImageIcon from an array of bytes which were
* read from an image file containing a supported image format,
* such as GIF, JPEG, or (as of 1.3) PNG.
* Normally this array is created
* by reading an image using Class.getResourceAsStream(), but
* the byte array may also be statically stored in a class.
*
* @param imageData an array of pixels in an image format supported
* by the AWT Toolkit, such as GIF, JPEG, or (as of 1.3) PNG
* @param description a brief textual description of the image
* @see Toolkit#createImage
*/
public ImageIcon (byte[] imageData, String description) {
this.image = Toolkit.getDefaultToolkit().createImage(imageData);
if (image == null) {
return;
}
this.description = description;
loadImage(image);
}
/**
* Creates an ImageIcon from an array of bytes which were
* read from an image file containing a supported image format,
* such as GIF, JPEG, or (as of 1.3) PNG.
* Normally this array is created
* by reading an image using Class.getResourceAsStream(), but
* the byte array may also be statically stored in a class.
* If the resulting image has a "comment" property that is a string,
* then the string is used as the description of this icon.
*
* @param imageData an array of pixels in an image format supported by
* the AWT Toolkit, such as GIF, JPEG, or (as of 1.3) PNG
* @see Toolkit#createImage
* @see #getDescription
* @see Image#getProperty
*/
public ImageIcon (byte[] imageData) {
this.image = Toolkit.getDefaultToolkit().createImage(imageData);
if (image == null) {
return;
}
Object o = image.getProperty("comment", imageObserver);
if (o instanceof String) {
description = (String) o;
}
loadImage(image);
}
/**
* Creates an uninitialized image icon.
*/
public ImageIcon() {
}
/**
* Loads the image, returning only when the image is loaded.
* @param image the image
*/
protected void loadImage(Image image) {
MediaTracker mTracker = getTracker();
synchronized(mTracker) {
int id = getNextID();
mTracker.addImage(image, id);
try {
mTracker.waitForID(id, 0);
} catch (InterruptedException e) {
System.out.println("INTERRUPTED while loading Image");
}
loadStatus = mTracker.statusID(id, false);
mTracker.removeImage(image, id);
width = image.getWidth(imageObserver);
height = image.getHeight(imageObserver);
}
}
/**
* Returns an ID to use with the MediaTracker in loading an image.
*/
private int getNextID() {
synchronized(getTracker()) {
return ++mediaTrackerID;
}
}
/**
* Returns the MediaTracker for the current AppContext, creating a new
* MediaTracker if necessary.
*/
private MediaTracker getTracker() {
Object trackerObj;
AppContext ac = AppContext.getAppContext();
// Opt: Only synchronize if trackerObj comes back null?
// If null, synchronize, re-check for null, and put new tracker
synchronized(ac) {
trackerObj = ac.get(TRACKER_KEY);
if (trackerObj == null) {
Component comp = new Component() {};
trackerObj = new MediaTracker(comp);
ac.put(TRACKER_KEY, trackerObj);
}
}
return (MediaTracker) trackerObj;
}
/**
* Returns the status of the image loading operation.
* @return the loading status as defined by java.awt.MediaTracker
* @see MediaTracker#ABORTED
* @see MediaTracker#ERRORED
* @see MediaTracker#COMPLETE
*/
public int getImageLoadStatus() {
return loadStatus;
}
/**
* Returns this icon's <code>Image</code>.
* @return the <code>Image</code> object for this <code>ImageIcon</code>
*/
@Transient
public Image getImage() {
return image;
}
/**
* Sets the image displayed by this icon.
* @param image the image
*/
public void setImage(Image image) {
this.image = image;
loadImage(image);
}
/**
* Gets the description of the image. This is meant to be a brief
* textual description of the object. For example, it might be
* presented to a blind user to give an indication of the purpose
* of the image.
* The description may be null.
*
* @return a brief textual description of the image
*/
public String getDescription() {
return description;
}
/**
* Sets the description of the image. This is meant to be a brief
* textual description of the object. For example, it might be
* presented to a blind user to give an indication of the purpose
* of the image.
* @param description a brief textual description of the image
*/
public void setDescription(String description) {
this.description = description;
}
/**
* Paints the icon.
* The top-left corner of the icon is drawn at
* the point (<code>x</code>, <code>y</code>)
* in the coordinate space of the graphics context <code>g</code>.
* If this icon has no image observer,
* this method uses the <code>c</code> component
* as the observer.
*
* @param c the component to be used as the observer
* if this icon has no image observer
* @param g the graphics context
* @param x the X coordinate of the icon's top-left corner
* @param y the Y coordinate of the icon's top-left corner
*/
public synchronized void paintIcon(Component c, Graphics g, int x, int y) {
if(imageObserver == null) {
g.drawImage(image, x, y, c);
} else {
g.drawImage(image, x, y, imageObserver);
}
}
/**
* Gets the width of the icon.
*
* @return the width in pixels of this icon
*/
public int getIconWidth() {
return width;
}
/**
* Gets the height of the icon.
*
* @return the height in pixels of this icon
*/
public int getIconHeight() {
return height;
}
/**
* Sets the image observer for the image. Set this
* property if the ImageIcon contains an animated GIF, so
* the observer is notified to update its display.
* For example:
* <pre>
* icon = new ImageIcon(...)
* button.setIcon(icon);
* icon.setImageObserver(button);
* </pre>
*
* @param observer the image observer
*/
public void setImageObserver(ImageObserver observer) {
imageObserver = observer;
}
/**
* Returns the image observer for the image.
*
* @return the image observer, which may be null
*/
@Transient
public ImageObserver getImageObserver() {
return imageObserver;
}
/**
* Returns a string representation of this image.
*
* @return a string representing this image
*/
public String toString() {
if (description != null) {
return description;
}
return super.toString();
}
private void readObject(ObjectInputStream s)
throws ClassNotFoundException, IOException
{
s.defaultReadObject();
int w = s.readInt();
int h = s.readInt();
int[] pixels = (int[])(s.readObject());
if (pixels != null) {
Toolkit tk = Toolkit.getDefaultToolkit();
ColorModel cm = ColorModel.getRGBdefault();
image = tk.createImage(new MemoryImageSource(w, h, cm, pixels, 0, w));
loadImage(image);
}
}
private void writeObject(ObjectOutputStream s)
throws IOException
{
s.defaultWriteObject();
int w = getIconWidth();
int h = getIconHeight();
int[] pixels = image != null? new int[w * h] : null;
if (image != null) {
try {
PixelGrabber pg = new PixelGrabber(image, 0, 0, w, h, pixels, 0, w);
pg.grabPixels();
if ((pg.getStatus() & ImageObserver.ABORT) != 0) {
throw new IOException("failed to load image contents");
}
}
catch (InterruptedException e) {
throw new IOException("image load interrupted");
}
}
s.writeInt(w);
s.writeInt(h);
s.writeObject(pixels);
}
/**
* --- Accessibility Support ---
*/
private AccessibleImageIcon accessibleContext = null;
/**
* Gets the AccessibleContext associated with this ImageIcon.
* For image icons, the AccessibleContext takes the form of an
* AccessibleImageIcon.
* A new AccessibleImageIcon instance is created if necessary.
*
* @return an AccessibleImageIcon that serves as the
* AccessibleContext of this ImageIcon
* @beaninfo
* expert: true
* description: The AccessibleContext associated with this ImageIcon.
* @since 1.3
*/
public AccessibleContext getAccessibleContext() {
if (accessibleContext == null) {
accessibleContext = new AccessibleImageIcon();
}
return accessibleContext;
}
/**
* This class implements accessibility support for the
* <code>ImageIcon</code> class. It provides an implementation of the
* Java Accessibility API appropriate to image icon user-interface
* elements.
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is
* appropriate for short term storage or RMI between applications running
* the same version of Swing. As of 1.4, support for long term storage
* of all JavaBeans™
* has been added to the <code>java.beans</code> package.
* Please see {@link java.beans.XMLEncoder}.
* @since 1.3
*/
protected class AccessibleImageIcon extends AccessibleContext
implements AccessibleIcon, Serializable {
/*
* AccessibleContest implementation -----------------
*/
/**
* Gets the role of this object.
*
* @return an instance of AccessibleRole describing the role of the
* object
* @see AccessibleRole
*/
public AccessibleRole getAccessibleRole() {
return AccessibleRole.ICON;
}
/**
* Gets the state of this object.
*
* @return an instance of AccessibleStateSet containing the current
* state set of the object
* @see AccessibleState
*/
public AccessibleStateSet getAccessibleStateSet() {
return null;
}
/**
* Gets the Accessible parent of this object. If the parent of this
* object implements Accessible, this method should simply return
* getParent().
*
* @return the Accessible parent of this object -- can be null if this
* object does not have an Accessible parent
*/
public Accessible getAccessibleParent() {
return null;
}
/**
* Gets the index of this object in its accessible parent.
*
* @return the index of this object in its parent; -1 if this
* object does not have an accessible parent.
* @see #getAccessibleParent
*/
public int getAccessibleIndexInParent() {
return -1;
}
/**
* Returns the number of accessible children in the object. If all
* of the children of this object implement Accessible, than this
* method should return the number of children of this object.
*
* @return the number of accessible children in the object.
*/
public int getAccessibleChildrenCount() {
return 0;
}
/**
* Returns the nth Accessible child of the object.
*
* @param i zero-based index of child
* @return the nth Accessible child of the object
*/
public Accessible getAccessibleChild(int i) {
return null;
}
/**
* Returns the locale of this object.
*
* @return the locale of this object
*/
public Locale getLocale() throws IllegalComponentStateException {
return null;
}
/*
* AccessibleIcon implementation -----------------
*/
/**
* Gets the description of the icon. This is meant to be a brief
* textual description of the object. For example, it might be
* presented to a blind user to give an indication of the purpose
* of the icon.
*
* @return the description of the icon
*/
public String getAccessibleIconDescription() {
return ImageIcon.this.getDescription();
}
/**
* Sets the description of the icon. This is meant to be a brief
* textual description of the object. For example, it might be
* presented to a blind user to give an indication of the purpose
* of the icon.
*
* @param description the description of the icon
*/
public void setAccessibleIconDescription(String description) {
ImageIcon.this.setDescription(description);
}
/**
* Gets the height of the icon.
*
* @return the height of the icon
*/
public int getAccessibleIconHeight() {
return ImageIcon.this.height;
}
/**
* Gets the width of the icon.
*
* @return the width of the icon
*/
public int getAccessibleIconWidth() {
return ImageIcon.this.width;
}
private void readObject(ObjectInputStream s)
throws ClassNotFoundException, IOException
{
s.defaultReadObject();
}
private void writeObject(ObjectOutputStream s)
throws IOException
{
s.defaultWriteObject();
}
} // AccessibleImageIcon
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.core.request.mapper;
import java.lang.reflect.Modifier;
import java.util.List;
import org.apache.wicket.protocol.http.WebApplication;
import org.apache.wicket.request.Request;
import org.apache.wicket.request.Url;
import org.apache.wicket.request.component.IRequestablePage;
import org.apache.wicket.request.mapper.info.PageComponentInfo;
import org.apache.wicket.request.mapper.parameter.IPageParametersEncoder;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import org.apache.wicket.request.mapper.parameter.PageParametersEncoder;
import org.apache.wicket.util.lang.Args;
import org.apache.wicket.util.lang.PackageName;
import org.apache.wicket.util.string.Strings;
/**
* A request mapper that mounts all bookmarkable pages in a given package.
* <p>
* To mount this mapper onto a path use the {@link WebApplication#mountPackage(String, Class)}, ex:
*
* <pre>
* MyApp#init() {
*
* super.init();
* mountPackage("/my/path", MyPage.class);
* }
* </pre>
*
* will result in urls like {@code /my/path/MyPage}
* </p>
*
* <pre>
* Page Class - Render (BookmarkablePageRequestHandler)
* /MyPage
* (will redirect to hybrid alternative if page is not stateless)
*
* Page Instance - Render Hybrid (RenderPageRequestHandler for pages that were created using bookmarkable URLs)
* /MyPage?2
*
* Page Instance - Bookmarkable Listener (BookmarkableListenerInterfaceRequestHandler)
* /MyPage?2-click-foo-bar-baz
* /MyPage?2-click.1-foo-bar-baz (1 is behavior index)
* (these will redirect to hybrid if page is not stateless)
* </pre>
*/
public class PackageMapper extends AbstractBookmarkableMapper
{
/**
* the name of the package for which all bookmarkable pages should be mounted
*/
private final PackageName packageName;
/**
* Constructor.
*
* @param packageName
*/
public PackageMapper(String mountPath, final PackageName packageName)
{
this(mountPath, packageName, new PageParametersEncoder());
}
/**
* Constructor.
*
* @param packageName
* @param pageParametersEncoder
*/
public PackageMapper(String mountPath, final PackageName packageName,
final IPageParametersEncoder pageParametersEncoder)
{
super(mountPath, pageParametersEncoder);
Args.notNull(packageName, "packageName");
this.packageName = packageName;
}
@Override
protected Url buildUrl(UrlInfo info)
{
Class<? extends IRequestablePage> pageClass = info.getPageClass();
PackageName pageClassPackageName = PackageName.forClass(pageClass);
if (pageClassPackageName.equals(packageName))
{
Url url = new Url();
for (String s : mountSegments)
{
url.getSegments().add(s);
}
String fullyQualifiedClassName = pageClass.getName();
String packageRelativeClassName = fullyQualifiedClassName;
int packageNameLength = packageName.getName().length();
if (packageNameLength > 0)
{
packageRelativeClassName = fullyQualifiedClassName.substring(packageNameLength + 1);
}
packageRelativeClassName = transformForUrl(packageRelativeClassName);
url.getSegments().add(packageRelativeClassName);
encodePageComponentInfo(url, info.getPageComponentInfo());
PageParameters copy = new PageParameters(info.getPageParameters());
if (setPlaceholders(copy, url) == false)
{
// mandatory parameter is not provided => cannot build Url
return null;
}
return encodePageParameters(url, copy, pageParametersEncoder);
}
return null;
}
@Override
protected UrlInfo parseRequest(Request request)
{
Url url = request.getUrl();
if (urlStartsWithMountedSegments(url))
{
// try to extract page and component information from URL
PageComponentInfo info = getPageComponentInfo(url);
final List<String> segments = url.getSegments();
if (segments.size() <= mountSegments.length)
{
return null;
}
// load the page class
String name = segments.get(mountSegments.length);
if (Strings.isEmpty(name))
{
return null;
}
String className = cleanClassName(name);
if (isValidClassName(className) == false)
{
return null;
}
className = transformFromUrl(className);
String fullyQualifiedClassName = packageName.getName() + '.' + className;
Class<? extends IRequestablePage> pageClass = getPageClass(fullyQualifiedClassName);
if (pageClass != null && Modifier.isAbstract(pageClass.getModifiers()) == false &&
IRequestablePage.class.isAssignableFrom(pageClass))
{
// extract the PageParameters from URL if there are any
Url urlWithoutPageSegment = new Url(url);
urlWithoutPageSegment.getSegments().remove(mountSegments.length);
Request requestWithoutPageSegment = request.cloneWithUrl(urlWithoutPageSegment);
PageParameters pageParameters = extractPageParameters(requestWithoutPageSegment, urlWithoutPageSegment);
return new UrlInfo(info, pageClass, pageParameters);
}
}
return null;
}
/**
* filter out invalid class names for package mapper. getting trash for class names
* can e.g. happen when the home page is in the same package that is mounted by package mapper
* but the request was previously mapped by e.g. {@link HomePageMapper}. We then get some
* strange url like '/example/..' and wicket tries to look up class name '..'.
* <p/>
* @see <a href="https://issues.apache.org/jira/browse/WICKET-4303">WICKET-4303</a>
* <p/>
*/
private boolean isValidClassName(String className)
{
// darn simple check - feel free to enhance this method to your needs
if (Strings.isEmpty(className))
{
return false;
}
// java class names never start with '.'
if (className.startsWith("."))
{
return false;
}
return true;
}
/**
* Gives a chance to specializations of this mapper to transform the alias of the class name to
* the real class name
*
* @param classNameAlias
* the alias for the class name
* @return the real class name
*/
protected String transformFromUrl(final String classNameAlias)
{
return classNameAlias;
}
/**
* Gives a chance to specializations of this mapper to transform the real class name to an alias
* which is prettier to represent in the Url
*
* @param className
* the real class name
* @return the class name alias
*/
protected String transformForUrl(final String className)
{
return className;
}
@Override
protected boolean pageMustHaveBeenCreatedBookmarkable()
{
return false;
}
@Override
protected boolean checkPageClass(Class<? extends IRequestablePage> pageClass)
{
PackageName pageClassPackageName = PackageName.forClass(pageClass);
return packageName.equals(pageClassPackageName);
}
}
| |
package com.flipzu;
/**
* Copyright 2011 Flipzu
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Initial Release: Dario Rapisardi <dario@rapisardi.org>
*
*/
import static org.jboss.netty.buffer.ChannelBuffers.dynamicBuffer;
import java.io.IOException;
import java.io.InputStream;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBufferInputStream;
import com.xuggle.ferry.IBuffer;
import com.xuggle.xuggler.IAudioSamples;
import com.xuggle.xuggler.ICodec;
import com.xuggle.xuggler.IContainer;
import com.xuggle.xuggler.IContainerFormat;
import com.xuggle.xuggler.IPacket;
import com.xuggle.xuggler.IStream;
import com.xuggle.xuggler.IStreamCoder;
import com.xuggle.xuggler.IAudioSamples.Format;
import com.xuggle.xuggler.ICodec.ID;
/**
* Estela Xuggler Transcoder
*
* @author Dario Rapisardi <dario@rapisardi.org>
*
*/
public class XugglerTranscoder extends FzTranscoder {
private IContainer mOContainer = null;
private IContainerFormat oFmt = null;
private IPacket firstPacket = null; // contains stream metadata
private IPacket lastPacket = null;
private IStream os = null;
private IStreamCoder oc = null;
private final Debug debug = Debug.getInstance();
private boolean initEncode() {
debug.logTranscoder("XugglerTranscoder, initEncode() called");
if (mOContainer == null)
mOContainer = IContainer.make();
if (oFmt == null) {
oFmt = IContainerFormat.make();
oFmt.setOutputFormat(Config.getInstance().getOutputCodec(), null, null);
}
if ( !mOContainer.isOpened() ) {
int retval = 0;
retval = mOContainer.open("/dev/null", IContainer.Type.WRITE, oFmt);
if (retval < 0) {
debug.logTranscoder("XugglerTranscoder, initEncode, can't open output container");
return false;
}
}
/* create output audio coder */
if ( os == null ) {
os = mOContainer.addNewStream(0);
oc = os.getStreamCoder();
if ( Config.getInstance().getOutputCodec().equals("mp3"))
oc.setCodecID(ID.CODEC_ID_MP3);
else if ( Config.getInstance().getOutputCodec().equals("aac"))
oc.setCodec(ID.CODEC_ID_AAC);
else
oc.setCodec(ID.CODEC_ID_MP3); // default
oc.setChannels(Config.getInstance().getOutputChannels());
oc.setBitRate(Config.getInstance().getOutputBitrate());
oc.setSampleRate(Config.getInstance().getOutputSampleRate());
if (oc.open() < 0) {
debug.logTranscoder("XugglerTranscoder, initEncode, can't open output codec");
return false;
}
}
return true;
}
public ChannelBuffer transcode ( ChannelBuffer buf ) throws IOException {
ChannelBuffer decoded_data = decode(buf);
if ( decoded_data == null ) {
debug.logTranscoder("XugglerTranscoder, transcode, got NULL buffer from decode()");
} else {
debug.logTranscoder("XugglerTranscoder, transcode, got " + decoded_data.readableBytes() + " decoded bytes");
}
ChannelBuffer encoded_data = encode( decoded_data );
if ( encoded_data == null ) {
debug.logTranscoder("XugglerTranscoder, transcode, got NULL buffer from encode()");
} else {
debug.logTranscoder("XugglerTranscoder, transcode, got " + encoded_data.readableBytes() + " encoded bytes");
}
return encoded_data;
}
/* receives a ChannelBuffer with encoded data (like AAC) */
/* returns a ChannelBuffer with PCM raw data */
public synchronized ChannelBuffer decode ( ChannelBuffer buf ) {
/* create dynamic return buffer */
ChannelBuffer ret_buf = null;
if (buf == null) {
return buf;
}
if (buf.readableBytes() == 0)
return ret_buf;
IContainer mIContainer = IContainer.make();
IContainerFormat mICfmt = IContainerFormat.make();
int retval = 0;
ChannelBufferInputStream cb = new ChannelBufferInputStream(buf);
retval = mIContainer.open((InputStream) cb, mICfmt, false, false);
if (retval < 0) {
debug.logTranscoder("XugglerTranscoder, decode, can't open stream");
return ret_buf;
}
int numStreams = mIContainer.getNumStreams();
if (numStreams != 1) {
debug.logTranscoder("XugglerTranscoder, decode, wrong number of streams");
mIContainer.close();
return ret_buf;
}
mIContainer.queryStreamMetaData();
/* look for AUDIO stream */
IStream is = mIContainer.getStream(0);
IStreamCoder ic = is.getStreamCoder();
ICodec.Type cType = ic.getCodecType();
IAudioSamples inSamples = null;
if ( cType == ICodec.Type.CODEC_TYPE_AUDIO ) {
if (ic.open() < 0) {
debug.logTranscoder("XugglerTranscoder, decode, can't open input codec");
mIContainer.close();
return ret_buf;
}
debug.logTranscoder("XugglerTranscoder, got channels " + ic.getChannels() + " sample rate " + ic.getSampleRate() + " for codec " + ic.getCodecID());
inSamples = IAudioSamples.make(1024, ic.getChannels());
if ( inSamples == null ) {
debug.logTranscoder("XugglerTranscoder, decode, can't allocate IAudioSamples");
return ret_buf;
}
} else {
debug.logTranscoder("XugglerTranscoder, decode, can't find audio stream");
return ret_buf;
}
IPacket iPacket = IPacket.make();
retval = 0;
/* consume stream metadata. Xuggler needs this */
if ( firstPacket != null ) {
retval = ic.decodeAudio(inSamples, firstPacket, 0);
debug.logTranscoder("XugglerTranscoder, decode, firstPacket retval " + retval + " with size " + firstPacket.getSize());
}
/* see if we have to decode an uncomplete packet from previous call */
if ( lastPacket != null ) {
debug.logTranscoder("XugglerTranscoder, decode, writing lastPacket");
/* create new concatenated packet */
if ( mIContainer.readNextPacket(iPacket) == 0) {
IBuffer buf1 = lastPacket.getData();
IBuffer buf2 = iPacket.getData();
IBuffer new_buf = IBuffer.make(null, buf1.getSize()+buf2.getSize());
byte[] ba1 = new byte[buf1.getSize()];
byte[] ba2 = new byte[buf2.getSize()];
ba1 = buf1.getByteArray(0, buf1.getSize());
ba2 = buf2.getByteArray(0, buf2.getSize());
new_buf.put(ba1, 0, 0, ba1.length);
new_buf.put(ba2, 0, ba1.length, ba2.length);
IPacket new_packet = IPacket.make(new_buf);
retval = ic.decodeAudio(inSamples, new_packet, 0);
debug.logTranscoder("XugglerTranscoder, decode, lastPacket decode retval " + retval);
if ( inSamples != null && retval > 0 ) {
if (inSamples != null && inSamples.getSize() > 0) {
if ( ret_buf == null )
ret_buf = dynamicBuffer(inSamples.getSize());
ret_buf.writeBytes(inSamples.getByteBuffer());
}
}
}
}
/* start of the decoding loop */
while (mIContainer.readNextPacket(iPacket) == 0) {
if ( firstPacket == null ) {
firstPacket = IPacket.make(iPacket, true);
debug.logTranscoder("XugglerTranscoder, decode, firstPacket size " + firstPacket.getSize());
}
if ( inSamples != null )
retval = ic.decodeAudio(inSamples, iPacket, 0);
else
break;
// debug.logTranscoder("XugglerTranscoder, decode, got retval " + retval);
if (retval > 0) {
/* write to file */
if (inSamples != null && inSamples.getSize() > 0) {
if ( ret_buf == null )
ret_buf = dynamicBuffer(inSamples.getSize());
ret_buf.writeBytes(inSamples.getByteBuffer());
}
}
lastPacket = IPacket.make(iPacket, true);
}
ic.close();
is.delete();
mIContainer.close();
mIContainer = null;
ic = null;
is = null;
return ret_buf;
}
/* receives a ChannelBuffer with PCM raw data */
/* returns a ChannelBuffer with encoded data (like MP3) */
public synchronized ChannelBuffer encode ( ChannelBuffer buf ) {
if (!initEncode())
return null;
/* create dynamic return buffer */
ChannelBuffer ret_buf = null;
if (buf == null) {
return buf;
}
if (buf.readableBytes() == 0)
return ret_buf;
/* copy raw data to IAudioSamples for encoding */
IPacket oPacket = IPacket.make();
byte[] raw_buf = new byte[buf.readableBytes()];
buf.getBytes(0, raw_buf);
IBuffer raw_data = IBuffer.make(null, raw_buf, 0, raw_buf.length);
IAudioSamples outSamples = IAudioSamples.make(raw_data, oc.getChannels(), IAudioSamples.Format.FMT_S16);
int numSamples = oc.getAudioFrameSize()*(raw_buf.length/(oc.getAudioFrameSize()*2));
outSamples.setComplete(true, numSamples, oc.getSampleRate(), oc.getChannels(), Format.FMT_S16, IAudioSamples.samplesToDefaultPts(numSamples, oc.getSampleRate()));
int retval = 0;
int numSamplesConsumed = 0;
/* encoding loop */
while (numSamplesConsumed < outSamples.getNumSamples()) {
retval = oc.encodeAudio(oPacket, outSamples, numSamplesConsumed);
// debug.logTranscoder("XugglerTranscoder, encode, retval for encodeAudio " + retval);
numSamplesConsumed += retval;
if ( oPacket.isComplete()) {
int sz = oPacket.getByteBuffer().capacity();
if ( ret_buf == null )
ret_buf = dynamicBuffer(sz);
ret_buf.writeBytes(oPacket.getByteBuffer());
// debug.logTranscoder("XugglerTranscoder, encoded " + sz + " bytes");
}
}
return ret_buf;
}
protected void finalize() throws Throwable {
debug.logTranscoder("XugglerTranscoder(), finalize()");
if ( mOContainer != null ) {
mOContainer.close();
mOContainer = null;
}
if ( oc != null ) {
oc.close();
oc = null;
}
if ( os != null ) {
os.delete();
os = null;
}
super.finalize();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.vault.util.xml.serialize;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import javax.xml.namespace.NamespaceContext;
import javax.xml.stream.FactoryConfigurationError;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import com.ctc.wstx.api.WstxOutputProperties;
import com.ctc.wstx.stax.WstxOutputFactory;
import com.sun.xml.txw2.output.IndentingXMLStreamWriter;
/** StAX XML Stream Writer filter. Adds the following functionality:
* <ul>
* <li>optional line break before each attribute</li>
* <li>new line at end</li>
* <li>indentation for elements and comments</li>
* </ul>
*/
public class FormattingXmlStreamWriter implements XMLStreamWriter, AutoCloseable {
private final Writer rawWriter;
private final XMLStreamWriter writer;
private final OutputFormat output;
private final IndentingXMLStreamWriter elementIndentingXmlWriter;
int numNamespaceDeclarations = 0;
int numAttributes = 0;
private int depth = 0;
private Attribute bufferedAttribute;
public static FormattingXmlStreamWriter create(OutputStream output, OutputFormat format)
throws XMLStreamException, FactoryConfigurationError {
// always use WoodstoX
XMLOutputFactory factory = new WstxOutputFactory();
factory.setProperty(WstxOutputProperties.P_USE_DOUBLE_QUOTES_IN_XML_DECL, true);
return new FormattingXmlStreamWriter(factory, output, format);
}
private FormattingXmlStreamWriter(XMLOutputFactory factory, OutputStream output, OutputFormat format)
throws XMLStreamException, FactoryConfigurationError {
this(factory.createXMLStreamWriter(output, StandardCharsets.UTF_8.name()), format);
}
private FormattingXmlStreamWriter(XMLStreamWriter writer, OutputFormat output) {
this.output = output;
this.writer = writer;
this.rawWriter = (Writer) writer.getProperty(WstxOutputProperties.P_OUTPUT_UNDERLYING_WRITER);
if (this.rawWriter == null) {
throw new IllegalStateException("Could not get underlying writer!");
}
this.elementIndentingXmlWriter = new IndentingXMLStreamWriter(writer);
this.elementIndentingXmlWriter.setIndentStep(output.getIndent());
}
@Override
public void writeEndDocument() throws XMLStreamException {
// nothing can be written after writeEndDocument() has been called, therefore call the additional new line before
elementIndentingXmlWriter.writeEndDocument();
addLineBreak(true);
}
@Override
public void writeStartElement(String localName) throws XMLStreamException {
onStartElement();
elementIndentingXmlWriter.writeStartElement(localName);
}
@Override
public void writeStartElement(String namespaceURI, String localName) throws XMLStreamException {
onStartElement();
elementIndentingXmlWriter.writeStartElement(namespaceURI, localName);
}
@Override
public void writeStartElement(String prefix, String localName, String namespaceURI) throws XMLStreamException {
onStartElement();
elementIndentingXmlWriter.writeStartElement(prefix, localName, namespaceURI);
}
@Override
public void writeEndElement() throws XMLStreamException {
// is it new element or
flushBufferedAttribute();
depth--;
elementIndentingXmlWriter.writeEndElement();
}
private void onStartElement() throws XMLStreamException {
flushBufferedAttribute();
numNamespaceDeclarations = 0;
numAttributes = 0;
depth++;
}
@Override
public void writeNamespace(String prefix, String namespaceURI) throws XMLStreamException {
numNamespaceDeclarations++;
elementIndentingXmlWriter.writeNamespace(prefix, namespaceURI);
}
@Override
public void writeAttribute(String localName, String value) throws XMLStreamException {
if (onAttribute(null, null, localName, value)) {
elementIndentingXmlWriter.writeAttribute(localName, value);
}
}
@Override
public void writeAttribute(String prefix, String namespaceURI, String localName, String value) throws XMLStreamException {
if (onAttribute(prefix, namespaceURI, localName, value)) {
elementIndentingXmlWriter.writeAttribute(prefix, namespaceURI, localName, value);
}
}
@Override
public void writeAttribute(String namespaceURI, String localName, String value) throws XMLStreamException {
if (onAttribute(null, namespaceURI, localName, value)) {
elementIndentingXmlWriter.writeAttribute(namespaceURI, localName, value);
}
}
private final class Attribute {
private final String prefix;
private final String namespaceURI;
private final String localName;
private final String value;
public Attribute(String prefix, String namespaceURI, String localName, String value) {
super();
this.prefix = prefix;
this.namespaceURI = namespaceURI;
this.localName = localName;
this.value = value;
}
public void write(XMLStreamWriter writer) throws XMLStreamException {
if (prefix == null) {
if (namespaceURI == null) {
writer.writeAttribute(localName, value);
} else {
writer.writeAttribute(namespaceURI, localName, value);
}
} else {
writer.writeAttribute(prefix, namespaceURI, localName, value);
}
}
}
private boolean onAttribute(String prefix, String namespaceURI, String localName, String value) throws XMLStreamException {
numAttributes++;
if (output.isSplitAttributesByLineBreaks()) {
// if the amount of namespace declarations + attributes is bigger than 1
if (numNamespaceDeclarations + numAttributes > 1) {
if (bufferedAttribute != null) {
addLineBreak(true);
indent(true);
flushBufferedAttribute();
}
addLineBreak(true);
indent(true);
} else {
bufferedAttribute = new Attribute(prefix, namespaceURI, localName, value);
// buffer attributes to wait for the next ones
return false;
}
}
return true;
}
private boolean flushBufferedAttribute() throws XMLStreamException {
if (bufferedAttribute != null) {
bufferedAttribute.write(writer);
bufferedAttribute = null;
return true;
}
return false;
}
private void indent(boolean isAttribute) throws XMLStreamException {
// writeCharacters does close the current element and changes the state!
// Stax2.writeSpace cannot be used either due to https://github.com/FasterXML/woodstox/issues/95
// instead write directly to underlying writer
try {
writer.flush();
if (depth > 0) {
for (int i = 0; i < depth; i++) {
final String indent;
if (isAttribute && i == depth - 1) {
// leave out one space as that is automatically added by any XMLStreamWriter between any two attributes
indent = output.getIndent().substring(0, output.getIndent().length() - 1);
} else {
indent = output.getIndent();
}
rawWriter.write(indent);
}
}
rawWriter.flush();
} catch (IOException e) {
throw new XMLStreamException("Could not indent attribute", e);
}
}
private void addLineBreak(boolean keepState) throws XMLStreamException {
if (keepState) {
try {
writer.flush();
rawWriter.write('\n');
rawWriter.flush();
} catch (IOException e) {
throw new XMLStreamException("Could not add line break", e);
}
} else {
writeCharacters("\n");
}
}
@Override
public void writeComment(String data) throws XMLStreamException {
flushBufferedAttribute();
addLineBreak(false);
indent(false);
elementIndentingXmlWriter.writeComment(data);
}
public void close() throws XMLStreamException {
elementIndentingXmlWriter.close();
}
public void setIndentStep(String s) {
elementIndentingXmlWriter.setIndentStep(s);
}
public void flush() throws XMLStreamException {
elementIndentingXmlWriter.flush();
}
public void writeDefaultNamespace(String namespaceURI) throws XMLStreamException {
elementIndentingXmlWriter.writeDefaultNamespace(namespaceURI);
}
public void writeProcessingInstruction(String target) throws XMLStreamException {
elementIndentingXmlWriter.writeProcessingInstruction(target);
}
public void writeProcessingInstruction(String target, String data) throws XMLStreamException {
elementIndentingXmlWriter.writeProcessingInstruction(target, data);
}
public void writeStartDocument() throws XMLStreamException {
elementIndentingXmlWriter.writeStartDocument();
}
public void writeStartDocument(String version) throws XMLStreamException {
elementIndentingXmlWriter.writeStartDocument(version);
}
public void writeDTD(String dtd) throws XMLStreamException {
elementIndentingXmlWriter.writeDTD(dtd);
}
public void writeStartDocument(String encoding, String version) throws XMLStreamException {
elementIndentingXmlWriter.writeStartDocument(encoding, version);
}
public void writeEntityRef(String name) throws XMLStreamException {
elementIndentingXmlWriter.writeEntityRef(name);
}
public void writeEmptyElement(String namespaceURI, String localName) throws XMLStreamException {
elementIndentingXmlWriter.writeEmptyElement(namespaceURI, localName);
}
public String getPrefix(String uri) throws XMLStreamException {
return elementIndentingXmlWriter.getPrefix(uri);
}
public void setPrefix(String prefix, String uri) throws XMLStreamException {
elementIndentingXmlWriter.setPrefix(prefix, uri);
}
public void writeEmptyElement(String prefix, String localName, String namespaceURI) throws XMLStreamException {
elementIndentingXmlWriter.writeEmptyElement(prefix, localName, namespaceURI);
}
public void setDefaultNamespace(String uri) throws XMLStreamException {
elementIndentingXmlWriter.setDefaultNamespace(uri);
}
public void writeEmptyElement(String localName) throws XMLStreamException {
elementIndentingXmlWriter.writeEmptyElement(localName);
}
public void setNamespaceContext(NamespaceContext context) throws XMLStreamException {
elementIndentingXmlWriter.setNamespaceContext(context);
}
public NamespaceContext getNamespaceContext() {
return elementIndentingXmlWriter.getNamespaceContext();
}
public Object getProperty(String name) throws IllegalArgumentException {
return elementIndentingXmlWriter.getProperty(name);
}
public void writeCharacters(String text) throws XMLStreamException {
elementIndentingXmlWriter.writeCharacters(text);
}
public void writeCharacters(char[] text, int start, int len) throws XMLStreamException {
elementIndentingXmlWriter.writeCharacters(text, start, len);
}
public void writeCData(String data) throws XMLStreamException {
elementIndentingXmlWriter.writeCData(data);
}
public String toString() {
return elementIndentingXmlWriter.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.protocol.http.util.accesslog;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import org.apache.jmeter.protocol.http.control.CookieManager;
import org.apache.jmeter.protocol.http.sampler.HTTPSampler;
import org.apache.jmeter.testelement.TestCloneable;
import org.apache.jmeter.testelement.TestElement;
import org.apache.jmeter.testelement.ThreadListener;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provides Session Filtering for the AccessLog Sampler.
*/
public class SessionFilter implements Filter, Serializable, TestCloneable,ThreadListener {
private static final java.util.regex.Pattern IP_PATTERN = java.util.regex.Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}");
private static final long serialVersionUID = 233L;
private static final Logger log = LoggerFactory.getLogger(SessionFilter.class);
private static final boolean USE_JAVA_REGEX = !JMeterUtils.getPropDefault(
"jmeter.regex.engine", "oro").equalsIgnoreCase("oro");
/**
* Protects access to managersInUse
*/
private static final Object LOCK = new Object();
/**
* These objects are static across multiple threads in a test, via clone()
* method.
*/
private final Map<String, CookieManager> cookieManagers;
private final Set<CookieManager> managersInUse;
private CookieManager lastUsed;
/**
* Creates a new SessionFilter and initializes its fields to new collections
*/
public SessionFilter() {
this(new ConcurrentHashMap<>(), Collections.synchronizedSet(new HashSet<>()));
}
/**
* Creates a new SessionFilter, but re-uses the given collections
*
* @param cookieManagers
* {@link CookieManager}s to be used for the different IPs
* @param managersInUse
* CookieManagers currently in use by other threads
*/
public SessionFilter(Map<String, CookieManager> cookieManagers, Set<CookieManager> managersInUse) {
this.cookieManagers = cookieManagers;
this.managersInUse = managersInUse;
}
/*
* (non-Javadoc)
*
* @see org.apache.jmeter.protocol.http.util.accesslog.LogFilter#excPattern(java.lang.String)
*/
protected boolean hasExcPattern(String text) {
return false;
}
protected String getIpAddress(String logLine) {
if (USE_JAVA_REGEX) {
return getIpAddressWithJavaRegex(logLine);
}
return getIpAddressWithOroRegex(logLine);
}
private String getIpAddressWithJavaRegex(String logLine) {
Matcher matcher = IP_PATTERN.matcher(logLine);
if (matcher.find()) {
return matcher.group(0);
}
return "";
}
private String getIpAddressWithOroRegex(String logLine) {
Pattern incIp = JMeterUtils.getPatternCache().getPattern("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}",
Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.SINGLELINE_MASK);
Perl5Matcher matcher = JMeterUtils.getMatcher();
matcher.contains(logLine, incIp);
return matcher.getMatch().group(0);
}
/**
* {@inheritDoc}
*/
@Override
public void reset() {
cookieManagers.clear();
}
/**
* {@inheritDoc}
*/
@Override
public Object clone() {
return new SessionFilter(cookieManagers, managersInUse);
}
/**
* {@inheritDoc}
*/
@Override
public void excludeFiles(String[] filenames) {
// NOOP
}
/**
* {@inheritDoc}
*/
@Override
public void excludePattern(String[] regexp) {
// NOOP
}
/**
* {@inheritDoc}
*/
@Override
public String filter(String text) {
return text;
}
/**
* {@inheritDoc}
*/
@Override
public void includeFiles(String[] filenames) {
// NOOP
}
/**
* {@inheritDoc}
*/
@Override
public void includePattern(String[] regexp) {
// NOOP
}
/**
* {@inheritDoc}
*/
@Override
public boolean isFiltered(String path,TestElement sampler) {
String ipAddr = getIpAddress(path);
CookieManager cm = getCookieManager(ipAddr);
((HTTPSampler)sampler).setCookieManager(cm);
return false;
}
@SuppressWarnings("ThreadPriorityCheck")
protected CookieManager getCookieManager(String ipAddr)
{
CookieManager cm;
// First have to release the cookie we were using so other
// threads stuck in wait can move on
synchronized(LOCK) {
if(lastUsed != null) {
managersInUse.remove(lastUsed);
LOCK.notifyAll();
}
}
// let notified threads move on and get lock on managersInUse
if(lastUsed != null) {
Thread.yield();
}
// here is the core routine to find appropriate cookie manager and
// check it's not being used. If used, wait until whoever's using it gives
// it up
synchronized(LOCK) {
cm = cookieManagers.get(ipAddr);
if(cm == null) {
cm = new CookieManager();
cm.testStarted();
cookieManagers.put(ipAddr,cm);
}
while(managersInUse.contains(cm)) {
try {
LOCK.wait();
} catch (InterruptedException e) {
log.info("SessionFilter wait interrupted");
Thread.currentThread().interrupt();
}
}
managersInUse.add(cm);
lastUsed = cm;
}
return cm;
}
/**
* {@inheritDoc}
*/
@Override
public void setReplaceExtension(String oldextension, String newextension) {
// NOOP
}
/**
* {@inheritDoc}
*/
@Override
public void threadFinished() {
synchronized(LOCK) {
managersInUse.remove(lastUsed);
LOCK.notifyAll();
}
}
/**
* {@inheritDoc}
*/
@Override
public void threadStarted() {
// NOOP
}
}
| |
package cn.sswukang.library.lib.sticky_header.sticky;
import android.graphics.Rect;
import androidx.annotation.Nullable;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import cn.sswukang.library.lib.sticky_header.caching.HeaderProvider;
import cn.sswukang.library.lib.sticky_header.calculation.DimensionCalculator;
import cn.sswukang.library.lib.sticky_header.util.OrientationProvider;
/**
* Calculates the position and location of header views
*/
class HeaderPositionCalculator<VH extends RecyclerView.ViewHolder> {
private final StickyRecyclerHeadersAdapter<VH> mAdapter;
private final OrientationProvider mOrientationProvider;
private final HeaderProvider mHeaderProvider;
private final DimensionCalculator mDimensionCalculator;
/**
* The following fields are used as buffers for internal calculations. Their sole purpose is to avoid
* allocating new Rect every time we need one.
*/
private final Rect mTempRect1 = new Rect();
private final Rect mTempRect2 = new Rect();
HeaderPositionCalculator(StickyRecyclerHeadersAdapter<VH> adapter, HeaderProvider headerProvider,
OrientationProvider orientationProvider, DimensionCalculator dimensionCalculator) {
mAdapter = adapter;
mHeaderProvider = headerProvider;
mOrientationProvider = orientationProvider;
mDimensionCalculator = dimensionCalculator;
}
/**
* Determines if a view should have a sticky header.
* The view has a sticky header if:
* 1. It is the first element in the recycler view
* 2. It has a valid ID associated to its position
*
* @param itemView given by the RecyclerView
* @param orientation of the Recyclerview
* @param position of the list item in question
* @return True if the view should have a sticky header
*/
public boolean hasStickyHeader(View itemView, int orientation, int position) {
int offset, margin;
mDimensionCalculator.initMargins(mTempRect1, itemView);
if (orientation == LinearLayout.VERTICAL) {
offset = itemView.getTop();
margin = mTempRect1.top;
} else {
offset = itemView.getLeft();
margin = mTempRect1.left;
}
return offset <= margin && mAdapter.getHeaderId(position) >= 0;
}
/**
* Determines if an item in the list should have a header that is different than the item in the
* list that immediately precedes it. Items with no headers will always return false.
*
* @param position of the list item in questions
* @param isReverseLayout TRUE if layout manager has flag isReverseLayout
* @return true if this item has a different header than the previous item in the list
*/
public boolean hasNewHeader(int position, boolean isReverseLayout) {
if (indexOutOfBounds(position)) {
return false;
}
long headerId = mAdapter.getHeaderId(position);
if (headerId < 0) {
return false;
}
long nextItemHeaderId = -1;
int nextItemPosition = position + (isReverseLayout ? 1 : -1);
if (!indexOutOfBounds(nextItemPosition)) {
nextItemHeaderId = mAdapter.getHeaderId(nextItemPosition);
}
int firstItemPosition = isReverseLayout ? mAdapter.getItemCount() - 1 : 0;
return position == firstItemPosition || headerId != nextItemHeaderId;
}
private boolean indexOutOfBounds(int position) {
return position < 0 || position >= mAdapter.getItemCount();
}
public void initHeaderBounds(Rect bounds, RecyclerView recyclerView, View header, View firstView, boolean firstHeader) {
int orientation = mOrientationProvider.getOrientation(recyclerView);
initDefaultHeaderOffset(bounds, recyclerView, header, firstView, orientation);
if (firstHeader && isStickyHeaderBeingPushedOffscreen(recyclerView, header)) {
View viewAfterNextHeader = getFirstViewUnobscuredByHeader(recyclerView, header);
int firstViewUnderHeaderPosition = recyclerView.getChildAdapterPosition(viewAfterNextHeader);
View secondHeader = mHeaderProvider.getHeader(recyclerView, firstViewUnderHeaderPosition);
translateHeaderWithNextHeader(recyclerView, mOrientationProvider.getOrientation(recyclerView), bounds,
header, viewAfterNextHeader, secondHeader);
}
}
private void initDefaultHeaderOffset(Rect headerMargins, RecyclerView recyclerView, View header, View firstView, int orientation) {
int translationX, translationY;
mDimensionCalculator.initMargins(mTempRect1, header);
ViewGroup.LayoutParams layoutParams = firstView.getLayoutParams();
int leftMargin = 0;
int topMargin = 0;
if (layoutParams instanceof ViewGroup.MarginLayoutParams) {
ViewGroup.MarginLayoutParams marginLayoutParams = (ViewGroup.MarginLayoutParams) layoutParams;
leftMargin = marginLayoutParams.leftMargin;
topMargin = marginLayoutParams.topMargin;
}
if (orientation == LinearLayoutManager.VERTICAL) {
translationX = firstView.getLeft() - leftMargin + mTempRect1.left;
translationY = Math.max(firstView.getTop() - topMargin - header.getHeight() - mTempRect1.bottom, getListTop(recyclerView) + mTempRect1.top);
} else {
translationY = firstView.getTop() - topMargin + mTempRect1.top;
translationX = Math.max(firstView.getLeft() - leftMargin - header.getWidth() - mTempRect1.right, getListLeft(recyclerView) + mTempRect1.left);
}
headerMargins.set(translationX, translationY, translationX + header.getWidth(), translationY + header.getHeight());
}
private boolean isStickyHeaderBeingPushedOffscreen(RecyclerView recyclerView, View stickyHeader) {
View viewAfterHeader = getFirstViewUnobscuredByHeader(recyclerView, stickyHeader);
int firstViewUnderHeaderPosition = recyclerView.getChildAdapterPosition(viewAfterHeader);
if (firstViewUnderHeaderPosition == RecyclerView.NO_POSITION) {
return false;
}
boolean isReverseLayout = mOrientationProvider.isReverseLayout(recyclerView);
if (firstViewUnderHeaderPosition > 0 && hasNewHeader(firstViewUnderHeaderPosition, isReverseLayout)) {
View nextHeader = mHeaderProvider.getHeader(recyclerView, firstViewUnderHeaderPosition);
mDimensionCalculator.initMargins(mTempRect1, nextHeader);
mDimensionCalculator.initMargins(mTempRect2, stickyHeader);
if (viewAfterHeader != null) {
if (mOrientationProvider.getOrientation(recyclerView) == LinearLayoutManager.VERTICAL) {
int topOfNextHeader = viewAfterHeader.getTop() - mTempRect1.bottom - nextHeader.getHeight() - mTempRect1.top;
int bottomOfThisHeader = recyclerView.getPaddingTop() + stickyHeader.getBottom() + mTempRect2.top + mTempRect2.bottom;
return topOfNextHeader < bottomOfThisHeader;
} else {
int leftOfNextHeader = viewAfterHeader.getLeft() - mTempRect1.right - nextHeader.getWidth() - mTempRect1.left;
int rightOfThisHeader = recyclerView.getPaddingLeft() + stickyHeader.getRight() + mTempRect2.left + mTempRect2.right;
return leftOfNextHeader < rightOfThisHeader;
}
}
}
return false;
}
private void translateHeaderWithNextHeader(RecyclerView recyclerView, int orientation, Rect translation,
View currentHeader, @Nullable View viewAfterNextHeader, View nextHeader) {
mDimensionCalculator.initMargins(mTempRect1, nextHeader);
mDimensionCalculator.initMargins(mTempRect2, currentHeader);
if (orientation == LinearLayoutManager.VERTICAL) {
int topOfStickyHeader = getListTop(recyclerView) + mTempRect2.top + mTempRect2.bottom;
int shiftFromNextHeader = (viewAfterNextHeader != null ? viewAfterNextHeader.getTop() : 0)
- nextHeader.getHeight() - mTempRect1.bottom - mTempRect1.top - currentHeader.getHeight() - topOfStickyHeader;
if (shiftFromNextHeader < topOfStickyHeader) {
translation.top += shiftFromNextHeader;
}
} else {
int leftOfStickyHeader = getListLeft(recyclerView) + mTempRect2.left + mTempRect2.right;
int shiftFromNextHeader = (viewAfterNextHeader != null ? viewAfterNextHeader.getLeft() : 0)
- nextHeader.getWidth() - mTempRect1.right - mTempRect1.left - currentHeader.getWidth() - leftOfStickyHeader;
if (shiftFromNextHeader < leftOfStickyHeader) {
translation.left += shiftFromNextHeader;
}
}
}
/**
* Returns the first item currently in the RecyclerView that is not obscured by a header.
*
* @param parent Recyclerview containing all the list items
* @return first item that is fully beneath a header
*/
@Nullable
private View getFirstViewUnobscuredByHeader(RecyclerView parent, View firstHeader) {
boolean isReverseLayout = mOrientationProvider.isReverseLayout(parent);
int step = isReverseLayout ? -1 : 1;
int from = isReverseLayout ? parent.getChildCount() - 1 : 0;
for (int i = from; i >= 0 && i <= parent.getChildCount() - 1; i += step) {
View child = parent.getChildAt(i);
if (!itemIsObscuredByHeader(parent, child, firstHeader, mOrientationProvider.getOrientation(parent))) {
return child;
}
}
return null;
}
/**
* Determines if an item is obscured by a header
*
* @param parent recycler view
* @param item to determine if obscured by header
* @param header that might be obscuring the item
* @param orientation of the {@link RecyclerView}
* @return true if the item view is obscured by the header view
*/
private boolean itemIsObscuredByHeader(RecyclerView parent, View item, View header, int orientation) {
RecyclerView.LayoutParams layoutParams = (RecyclerView.LayoutParams) item.getLayoutParams();
mDimensionCalculator.initMargins(mTempRect1, header);
int adapterPosition = parent.getChildAdapterPosition(item);
if (adapterPosition == RecyclerView.NO_POSITION || mHeaderProvider.getHeader(parent, adapterPosition) != header) {
// Resolves https://github.com/timehop/sticky-headers-recyclerview/issues/36
// Handles an edge case where a trailing header is smaller than the current sticky header.
return false;
}
if (orientation == LinearLayoutManager.VERTICAL) {
int itemTop = item.getTop() - layoutParams.topMargin;
int headerBottom = getListTop(parent) + header.getBottom() + mTempRect1.bottom + mTempRect1.top;
return itemTop < headerBottom;
} else {
int itemLeft = item.getLeft() - layoutParams.leftMargin;
int headerRight = getListLeft(parent) + header.getRight() + mTempRect1.right + mTempRect1.left;
return itemLeft < headerRight;
}
}
private int getListTop(RecyclerView view) {
if (view.getLayoutManager().getClipToPadding()) {
return view.getPaddingTop();
} else {
return 0;
}
}
private int getListLeft(RecyclerView view) {
if (view.getLayoutManager().getClipToPadding()) {
return view.getPaddingLeft();
} else {
return 0;
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.application.options.codeStyle;
import com.intellij.openapi.project.Project;
import com.intellij.psi.codeStyle.CodeStyleScheme;
import com.intellij.psi.codeStyle.CodeStyleSchemes;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.impl.source.codeStyle.CodeStyleSchemeImpl;
import com.intellij.util.EventDispatcher;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class CodeStyleSchemesModel {
private final List<CodeStyleScheme> mySchemes = new ArrayList<CodeStyleScheme>();
private CodeStyleScheme myGlobalSelected;
private final CodeStyleSchemeImpl myProjectScheme;
private final CodeStyleScheme myDefault;
private final Map<CodeStyleScheme, CodeStyleSettings> mySettingsToClone = new HashMap<CodeStyleScheme, CodeStyleSettings>();
private final EventDispatcher<CodeStyleSettingsListener> myDispatcher = EventDispatcher.create(CodeStyleSettingsListener.class);
private final Project myProject;
private boolean myUsePerProjectSettings;
public static final String PROJECT_SCHEME_NAME = "Project";
public CodeStyleSchemesModel(Project project) {
myProject = project;
myProjectScheme = new CodeStyleSchemeImpl(PROJECT_SCHEME_NAME, false, CodeStyleSchemes.getInstance().getDefaultScheme());
reset();
myDefault = CodeStyleSchemes.getInstance().getDefaultScheme();
}
public void selectScheme(final CodeStyleScheme selected, @Nullable Object source) {
if (myGlobalSelected != selected && selected != myProjectScheme) {
myGlobalSelected = selected;
myDispatcher.getMulticaster().currentSchemeChanged(source);
}
}
public void addScheme(final CodeStyleScheme newScheme, boolean changeSelection) {
mySchemes.add(newScheme);
myDispatcher.getMulticaster().schemeListChanged();
if (changeSelection) {
selectScheme(newScheme, this);
}
}
public void removeScheme(final CodeStyleScheme scheme) {
mySchemes.remove(scheme);
myDispatcher.getMulticaster().schemeListChanged();
if (myGlobalSelected == scheme) {
selectScheme(myDefault, this);
}
}
public CodeStyleSettings getCloneSettings(final CodeStyleScheme scheme) {
if (!mySettingsToClone.containsKey(scheme)) {
mySettingsToClone.put(scheme, scheme.getCodeStyleSettings().clone());
}
return mySettingsToClone.get(scheme);
}
public CodeStyleScheme getSelectedScheme(){
if (myUsePerProjectSettings) {
return myProjectScheme;
}
return myGlobalSelected;
}
public void addListener(CodeStyleSettingsListener listener) {
myDispatcher.addListener(listener);
}
public List<CodeStyleScheme> getSchemes() {
return Collections.unmodifiableList(mySchemes);
}
public void reset() {
myUsePerProjectSettings = getProjectSettings().USE_PER_PROJECT_SETTINGS;
CodeStyleScheme[] allSchemes = CodeStyleSchemes.getInstance().getSchemes();
mySettingsToClone.clear();
mySchemes.clear();
ContainerUtil.addAll(mySchemes, allSchemes);
myGlobalSelected = CodeStyleSchemes.getInstance().findPreferredScheme(getProjectSettings().PREFERRED_PROJECT_CODE_STYLE);
CodeStyleSettings perProjectSettings = getProjectSettings().PER_PROJECT_SETTINGS;
if (perProjectSettings != null) {
myProjectScheme.setCodeStyleSettings(perProjectSettings);
}
myDispatcher.getMulticaster().schemeListChanged();
myDispatcher.getMulticaster().currentSchemeChanged(this);
}
public boolean isUsePerProjectSettings() {
return myUsePerProjectSettings;
}
public void setUsePerProjectSettings(final boolean usePerProjectSettings) {
setUsePerProjectSettings(usePerProjectSettings, false);
}
/**
* Updates 'use per-project settings' value within the current model and optionally at the project settings.
*
* @param usePerProjectSettings defines whether 'use per-project settings' are in use
* @param commit flag that defines if current project settings should be applied as well
*/
public void setUsePerProjectSettings(final boolean usePerProjectSettings, final boolean commit) {
if (commit) {
final CodeStyleSettingsManager projectSettings = getProjectSettings();
projectSettings.USE_PER_PROJECT_SETTINGS = usePerProjectSettings;
projectSettings.PER_PROJECT_SETTINGS = myProjectScheme.getCodeStyleSettings();
}
if (myUsePerProjectSettings != usePerProjectSettings) {
myUsePerProjectSettings = usePerProjectSettings;
myDispatcher.getMulticaster().usePerProjectSettingsOptionChanged();
myDispatcher.getMulticaster().currentSchemeChanged(this);
}
}
private CodeStyleSettingsManager getProjectSettings() {
return CodeStyleSettingsManager.getInstance(myProject);
}
public boolean isSchemeListModified() {
CodeStyleSchemes schemes = CodeStyleSchemes.getInstance();
if (getProjectSettings().USE_PER_PROJECT_SETTINGS != myUsePerProjectSettings) return true;
if (!myUsePerProjectSettings &&
getSelectedScheme() != schemes.findPreferredScheme(getProjectSettings().PREFERRED_PROJECT_CODE_STYLE)) {
return true;
}
Set<CodeStyleScheme> configuredSchemesSet = new HashSet<CodeStyleScheme>(getSchemes());
Set<CodeStyleScheme> savedSchemesSet = new HashSet<CodeStyleScheme>(Arrays.asList(schemes.getSchemes()));
return !configuredSchemesSet.equals(savedSchemesSet);
}
public void apply() {
CodeStyleSettingsManager projectSettingsManager = getProjectSettings();
projectSettingsManager.USE_PER_PROJECT_SETTINGS = myUsePerProjectSettings;
projectSettingsManager.PREFERRED_PROJECT_CODE_STYLE =
myUsePerProjectSettings || myGlobalSelected == null ? null : myGlobalSelected.getName();
projectSettingsManager.PER_PROJECT_SETTINGS = myProjectScheme.getCodeStyleSettings();
final CodeStyleScheme[] savedSchemes = CodeStyleSchemes.getInstance().getSchemes();
final Set<CodeStyleScheme> savedSchemesSet = new HashSet<CodeStyleScheme>(Arrays.asList(savedSchemes));
List<CodeStyleScheme> configuredSchemes = getSchemes();
for (CodeStyleScheme savedScheme : savedSchemes) {
if (!configuredSchemes.contains(savedScheme)) {
CodeStyleSchemes.getInstance().deleteScheme(savedScheme);
}
}
for (CodeStyleScheme configuredScheme : configuredSchemes) {
if (!savedSchemesSet.contains(configuredScheme)) {
CodeStyleSchemes.getInstance().addScheme(configuredScheme);
}
}
CodeStyleSchemes.getInstance().setCurrentScheme(myGlobalSelected);
// We want to avoid the situation when 'real code style' differs from the copy stored here (e.g. when 'real code style' changes
// are 'committed' by pressing 'Apply' button). So, we reset the copies here assuming that this method is called on 'Apply'
// button processing
mySettingsToClone.clear();
}
public static boolean cannotBeModified(final CodeStyleScheme currentScheme) {
return currentScheme.isDefault();
}
public static boolean cannotBeDeleted(final CodeStyleScheme currentScheme) {
return currentScheme.isDefault();
}
public void fireCurrentSettingsChanged() {
myDispatcher.getMulticaster().currentSettingsChanged();
}
public void fireSchemeChanged(CodeStyleScheme scheme) {
myDispatcher.getMulticaster().schemeChanged(scheme);
}
public CodeStyleScheme getSelectedGlobalScheme() {
return myGlobalSelected;
}
public void copyToProject(final CodeStyleScheme selectedScheme) {
myProjectScheme.getCodeStyleSettings().copyFrom(selectedScheme.getCodeStyleSettings());
myDispatcher.getMulticaster().schemeChanged(myProjectScheme);
}
public CodeStyleScheme exportProjectScheme(@NotNull String name) {
CodeStyleScheme newScheme = createNewScheme(name, myProjectScheme);
((CodeStyleSchemeImpl)newScheme).setCodeStyleSettings(getCloneSettings(myProjectScheme));
addScheme(newScheme, false);
return newScheme;
}
public CodeStyleScheme createNewScheme(final String preferredName, final CodeStyleScheme parentScheme) {
String name;
if (preferredName == null) {
if (parentScheme == null) throw new IllegalArgumentException("parentScheme must not be null");
// Generate using parent name
name = null;
for (int i = 1; name == null; i++) {
String currName = parentScheme.getName() + " (" + i + ")";
if (findSchemeByName(currName) == null) {
name = currName;
}
}
}
else {
name = null;
for (int i = 0; name == null; i++) {
String currName = i == 0 ? preferredName : preferredName + " (" + i + ")";
if (findSchemeByName(currName) == null) {
name = currName;
}
}
}
return new CodeStyleSchemeImpl(name, false, parentScheme);
}
@Nullable
private CodeStyleScheme findSchemeByName(final String name) {
for (CodeStyleScheme scheme : mySchemes) {
if (name.equals(scheme.getName())) return scheme;
}
return null;
}
public CodeStyleScheme getProjectScheme() {
return myProjectScheme;
}
public boolean isProjectScheme(CodeStyleScheme scheme) {
return myProjectScheme == scheme;
}
public List<CodeStyleScheme> getAllSortedSchemes() {
List<CodeStyleScheme> schemes = new ArrayList<CodeStyleScheme>();
schemes.addAll(getSchemes());
schemes.add(myProjectScheme);
Collections.sort(schemes, new Comparator<CodeStyleScheme>() {
@Override
public int compare(CodeStyleScheme s1, CodeStyleScheme s2) {
if (isProjectScheme(s1)) return -1;
if (isProjectScheme(s2)) return 1;
if (s1.isDefault()) return -1;
if (s2.isDefault()) return 1;
return s1.getName().compareToIgnoreCase(s2.getName());
}
});
return schemes;
}
public Project getProject() {
return myProject;
}
}
| |
/*
* Copyright 2011-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.lettuce.core.cluster;
import static io.lettuce.test.LettuceExtension.Connection;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.*;
import java.util.stream.Collectors;
import javax.enterprise.inject.New;
import javax.inject.Inject;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import io.lettuce.core.*;
import io.lettuce.core.api.StatefulRedisConnection;
import io.lettuce.core.cluster.api.StatefulRedisClusterConnection;
import io.lettuce.core.cluster.api.async.RedisAdvancedClusterAsyncCommands;
import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands;
import io.lettuce.core.cluster.api.reactive.RedisAdvancedClusterReactiveCommands;
import io.lettuce.core.cluster.api.reactive.RedisClusterReactiveCommands;
import io.lettuce.core.cluster.api.sync.RedisAdvancedClusterCommands;
import io.lettuce.core.cluster.api.sync.RedisClusterCommands;
import io.lettuce.core.cluster.models.partitions.Partitions;
import io.lettuce.core.cluster.models.partitions.RedisClusterNode;
import io.lettuce.core.codec.Base16;
import io.lettuce.test.*;
import io.lettuce.test.condition.EnabledOnCommand;
import io.lettuce.test.settings.TestSettings;
/**
* Integration tests for {@link StatefulRedisClusterConnection}.
*
* @author Mark Paluch
* @author Jon Chambers
*/
@SuppressWarnings("rawtypes")
@ExtendWith(LettuceExtension.class)
class AdvancedClusterClientIntegrationTests extends TestSupport {
private static final String KEY_ON_NODE_1 = "a";
private static final String KEY_ON_NODE_2 = "b";
private final RedisClusterClient clusterClient;
private final StatefulRedisClusterConnection<String, String> clusterConnection;
private final RedisAdvancedClusterAsyncCommands<String, String> async;
private final RedisAdvancedClusterCommands<String, String> sync;
@Inject
AdvancedClusterClientIntegrationTests(RedisClusterClient clusterClient,
StatefulRedisClusterConnection<String, String> clusterConnection) {
this.clusterClient = clusterClient;
this.clusterConnection = clusterConnection;
this.async = clusterConnection.async();
this.sync = clusterConnection.sync();
}
@BeforeEach
void setUp() {
this.sync.flushall();
}
@Test
void nodeConnections() {
assertThat(clusterClient.getPartitions()).hasSize(4);
for (RedisClusterNode redisClusterNode : clusterClient.getPartitions()) {
RedisClusterAsyncCommands<String, String> nodeConnection = async.getConnection(redisClusterNode.getNodeId());
String myid = TestFutures.getOrTimeout(nodeConnection.clusterMyId());
assertThat(myid).isEqualTo(redisClusterNode.getNodeId());
}
}
@Test
void unknownNodeId() {
assertThatThrownBy(() -> async.getConnection("unknown")).isInstanceOf(RedisException.class);
}
@Test
void invalidHost() {
assertThatThrownBy(() -> async.getConnection("invalid-host", -1)).isInstanceOf(RedisException.class);
}
@Test
void partitions() {
Partitions partitions = async.getStatefulConnection().getPartitions();
assertThat(partitions).hasSize(4);
}
@Test
void differentConnections() {
for (RedisClusterNode redisClusterNode : clusterClient.getPartitions()) {
RedisClusterAsyncCommands<String, String> nodeId = async.getConnection(redisClusterNode.getNodeId());
RedisClusterAsyncCommands<String, String> hostAndPort = async.getConnection(redisClusterNode.getUri().getHost(),
redisClusterNode.getUri().getPort());
assertThat(nodeId).isNotSameAs(hostAndPort);
}
StatefulRedisClusterConnection<String, String> statefulConnection = async.getStatefulConnection();
for (RedisClusterNode redisClusterNode : clusterClient.getPartitions()) {
StatefulRedisConnection<String, String> nodeId = statefulConnection.getConnection(redisClusterNode.getNodeId());
StatefulRedisConnection<String, String> hostAndPort = statefulConnection.getConnection(redisClusterNode.getUri()
.getHost(), redisClusterNode.getUri().getPort());
assertThat(nodeId).isNotSameAs(hostAndPort);
}
RedisAdvancedClusterCommands<String, String> sync = statefulConnection.sync();
for (RedisClusterNode redisClusterNode : clusterClient.getPartitions()) {
RedisClusterCommands<String, String> nodeId = sync.getConnection(redisClusterNode.getNodeId());
RedisClusterCommands<String, String> hostAndPort = sync.getConnection(redisClusterNode.getUri().getHost(),
redisClusterNode.getUri().getPort());
assertThat(nodeId).isNotSameAs(hostAndPort);
}
RedisAdvancedClusterReactiveCommands<String, String> rx = statefulConnection.reactive();
for (RedisClusterNode redisClusterNode : clusterClient.getPartitions()) {
RedisClusterReactiveCommands<String, String> nodeId = rx.getConnection(redisClusterNode.getNodeId());
RedisClusterReactiveCommands<String, String> hostAndPort = rx.getConnection(redisClusterNode.getUri().getHost(),
redisClusterNode.getUri().getPort());
assertThat(nodeId).isNotSameAs(hostAndPort);
}
}
@Test
void msetRegular() {
Map<String, String> mset = Collections.singletonMap(key, value);
String result = sync.mset(mset);
assertThat(result).isEqualTo("OK");
assertThat(sync.get(key)).isEqualTo(value);
}
@Test
void msetCrossSlot() {
Map<String, String> mset = prepareMset();
String result = sync.mset(mset);
assertThat(result).isEqualTo("OK");
for (String mykey : mset.keySet()) {
String s1 = sync.get(mykey);
assertThat(s1).isEqualTo("value-" + mykey);
}
}
@Test
void msetnxCrossSlot() {
Map<String, String> mset = prepareMset();
String key = mset.keySet().iterator().next();
Map<String, String> submap = Collections.singletonMap(key, mset.get(key));
assertThat(sync.msetnx(submap)).isTrue();
assertThat(sync.msetnx(mset)).isFalse();
for (String mykey : mset.keySet()) {
String s1 = sync.get(mykey);
assertThat(s1).isEqualTo("value-" + mykey);
}
}
@Test
void mgetRegular() {
msetRegular();
List<KeyValue<String, String>> result = sync.mget(key);
assertThat(result).hasSize(1);
}
@Test
void mgetCrossSlot() {
msetCrossSlot();
List<String> keys = new ArrayList<>();
List<KeyValue<String, String>> expectation = new ArrayList<>();
for (char c = 'a'; c < 'z'; c++) {
String key = new String(new char[] { c, c, c });
keys.add(key);
expectation.add(kv(key, "value-" + key));
}
List<KeyValue<String, String>> result = sync.mget(keys.toArray(new String[keys.size()]));
assertThat(result).hasSize(keys.size());
assertThat(result).isEqualTo(expectation);
}
@Test
@EnabledOnCommand("UNLINK")
void delRegular() {
msetRegular();
Long result = sync.unlink(key);
assertThat(result).isEqualTo(1);
assertThat(TestFutures.getOrTimeout(async.get(key))).isNull();
}
@Test
void delCrossSlot() {
List<String> keys = prepareKeys();
Long result = sync.del(keys.toArray(new String[keys.size()]));
assertThat(result).isEqualTo(25);
for (String mykey : keys) {
String s1 = sync.get(mykey);
assertThat(s1).isNull();
}
}
@Test
@EnabledOnCommand("UNLINK")
void unlinkRegular() {
msetRegular();
Long result = sync.unlink(key);
assertThat(result).isEqualTo(1);
assertThat(sync.get(key)).isNull();
}
@Test
@EnabledOnCommand("UNLINK")
void unlinkCrossSlot() {
List<String> keys = prepareKeys();
Long result = sync.unlink(keys.toArray(new String[keys.size()]));
assertThat(result).isEqualTo(25);
for (String mykey : keys) {
String s1 = sync.get(mykey);
assertThat(s1).isNull();
}
}
private List<String> prepareKeys() {
msetCrossSlot();
List<String> keys = new ArrayList<>();
for (char c = 'a'; c < 'z'; c++) {
String key = new String(new char[] { c, c, c });
keys.add(key);
}
return keys;
}
@Test
void clientSetname() {
String name = "test-cluster-client";
assertThat(clusterClient.getPartitions().size()).isGreaterThan(0);
sync.clientSetname(name);
for (RedisClusterNode redisClusterNode : clusterClient.getPartitions()) {
RedisClusterCommands<String, String> nodeConnection = async.getStatefulConnection().sync()
.getConnection(redisClusterNode.getNodeId());
assertThat(nodeConnection.clientList()).contains(name);
}
assertThat(sync.clientGetname()).isEqualTo(name);
}
@Test
void clientSetnameRunOnError() {
assertThatThrownBy(() -> sync.clientSetname("not allowed")).isInstanceOf(RedisCommandExecutionException.class);
}
@Test
void dbSize() {
writeKeysToTwoNodes();
RedisClusterCommands<String, String> nodeConnection1 = clusterConnection.getConnection(ClusterTestSettings.host,
ClusterTestSettings.port1).sync();
RedisClusterCommands<String, String> nodeConnection2 = clusterConnection.getConnection(ClusterTestSettings.host,
ClusterTestSettings.port1).sync();
assertThat(nodeConnection1.dbsize()).isEqualTo(1);
assertThat(nodeConnection2.dbsize()).isEqualTo(1);
Long dbsize = sync.dbsize();
assertThat(dbsize).isEqualTo(2);
}
@Test
void flushall() {
writeKeysToTwoNodes();
assertThat(sync.flushall()).isEqualTo("OK");
Long dbsize = sync.dbsize();
assertThat(dbsize).isEqualTo(0);
}
@Test
void flushallAsync() {
writeKeysToTwoNodes();
assertThat(sync.flushallAsync()).isEqualTo("OK");
Wait.untilTrue(() -> sync.get(KEY_ON_NODE_1) == null).waitOrTimeout();
Wait.untilTrue(() -> sync.get(KEY_ON_NODE_2) == null).waitOrTimeout();
assertThat(sync.get(KEY_ON_NODE_1)).isNull();
assertThat(sync.get(KEY_ON_NODE_2)).isNull();
}
@Test
void flushdb() {
writeKeysToTwoNodes();
assertThat(sync.flushdb()).isEqualTo("OK");
Long dbsize = sync.dbsize();
assertThat(dbsize).isEqualTo(0);
}
@Test
void keys() {
writeKeysToTwoNodes();
assertThat(sync.keys("*")).contains(KEY_ON_NODE_1, KEY_ON_NODE_2);
}
@Test
void keysStreaming() {
writeKeysToTwoNodes();
ListStreamingAdapter<String> result = new ListStreamingAdapter<>();
assertThat(sync.keys(result, "*")).isEqualTo(2);
assertThat(result.getList()).contains(KEY_ON_NODE_1, KEY_ON_NODE_2);
}
@Test
void randomKey() {
writeKeysToTwoNodes();
assertThat(sync.randomkey()).isIn(KEY_ON_NODE_1, KEY_ON_NODE_2);
}
@Test
void scriptFlush() {
assertThat(sync.scriptFlush()).isEqualTo("OK");
}
@Test
void scriptKill() {
assertThatThrownBy(sync::scriptKill).hasMessageContaining("NOTBUSY");
}
@Test
void scriptLoad() {
assertThat(sync.scriptFlush()).isEqualTo("OK");
String script = "return true";
String sha = Base16.digest(script.getBytes());
assertThat(sync.scriptExists(sha)).contains(false);
String returnedSha = sync.scriptLoad(script);
assertThat(returnedSha).isEqualTo(sha);
assertThat(sync.scriptExists(sha)).contains(true);
}
@Test
@Disabled("Run me manually, I will shutdown all your cluster nodes so you need to restart the Redis Cluster after this test")
void shutdown() {
sync.shutdown(true);
}
@Test
void testSync() {
RedisAdvancedClusterCommands<String, String> sync = async.getStatefulConnection().sync();
sync.set(key, value);
assertThat(sync.get(key)).isEqualTo(value);
RedisClusterCommands<String, String> node2Connection = sync.getConnection(ClusterTestSettings.host,
ClusterTestSettings.port2);
assertThat(node2Connection.get(key)).isEqualTo(value);
assertThat(sync.getStatefulConnection()).isSameAs(async.getStatefulConnection());
}
@Test
@Inject
void routeCommandToNoAddrPartition(@New StatefulRedisClusterConnection<String, String> connectionUnderTest) {
RedisAdvancedClusterCommands<String, String> sync = connectionUnderTest.sync();
try {
Partitions partitions = clusterClient.getPartitions();
for (RedisClusterNode partition : partitions) {
partition.setUri(RedisURI.create("redis://non.existent.host:1234"));
}
sync.set("A", "value");// 6373
} catch (Exception e) {
assertThat(e).isInstanceOf(RedisException.class).hasMessageContaining("Unable to connect to");
} finally {
clusterClient.getPartitions().clear();
clusterClient.reloadPartitions();
}
}
@Test
@Inject
void routeCommandToForbiddenHostOnRedirect(
@Connection(requiresNew = true) StatefulRedisClusterConnection<String, String> connectionUnderTest) {
RedisAdvancedClusterCommands<String, String> sync = connectionUnderTest.sync();
try {
Partitions partitions = clusterClient.getPartitions();
for (RedisClusterNode partition : partitions) {
partition.setSlots(Collections.singletonList(0));
if (partition.getUri().getPort() == 7380) {
partition.setSlots(Collections.singletonList(6373));
} else {
partition.setUri(RedisURI.create("redis://non.existent.host:1234"));
}
}
partitions.updateCache();
sync.set("A", "value");// 6373
} catch (Exception e) {
assertThat(e).isInstanceOf(RedisException.class).hasMessageContaining("not allowed");
} finally {
clusterClient.getPartitions().clear();
clusterClient.reloadPartitions();
}
}
@Test
void getConnectionToNotAClusterMemberForbidden() {
StatefulRedisClusterConnection<String, String> sync = clusterClient.connect();
try {
sync.getConnection(TestSettings.host(), TestSettings.port());
} catch (RedisException e) {
assertThat(e).hasRootCauseExactlyInstanceOf(IllegalArgumentException.class);
}
sync.close();
}
@Test
void getConnectionToNotAClusterMemberAllowed() {
clusterClient.setOptions(ClusterClientOptions.builder().validateClusterNodeMembership(false).build());
StatefulRedisClusterConnection<String, String> connection = clusterClient.connect();
connection.getConnection(TestSettings.host(), TestSettings.port());
connection.close();
}
@Test
@Inject
void pipelining(@New StatefulRedisClusterConnection<String, String> connectionUnderTest) {
RedisAdvancedClusterAsyncCommands<String, String> async = connectionUnderTest.async();
// preheat the first connection
TestFutures.awaitOrTimeout(async.get(key(0)));
int iterations = 1000;
async.setAutoFlushCommands(false);
List<RedisFuture<?>> futures = new ArrayList<>();
for (int i = 0; i < iterations; i++) {
futures.add(async.set(key(i), value(i)));
}
for (int i = 0; i < iterations; i++) {
assertThat(this.sync.get(key(i))).as("Key " + key(i) + " must be null").isNull();
}
async.flushCommands();
boolean result = TestFutures.awaitOrTimeout(futures);
assertThat(result).isTrue();
for (int i = 0; i < iterations; i++) {
assertThat(this.sync.get(key(i))).as("Key " + key(i) + " must be " + value(i)).isEqualTo(value(i));
}
}
@Test
void clusterScan() {
RedisAdvancedClusterCommands<String, String> sync = async.getStatefulConnection().sync();
sync.mset(KeysAndValues.MAP);
Set<String> allKeys = new HashSet<>();
KeyScanCursor<String> scanCursor = null;
do {
if (scanCursor == null) {
scanCursor = sync.scan();
} else {
scanCursor = sync.scan(scanCursor);
}
allKeys.addAll(scanCursor.getKeys());
} while (!scanCursor.isFinished());
assertThat(allKeys).containsAll(KeysAndValues.KEYS);
}
@Test
void clusterScanWithArgs() {
RedisAdvancedClusterCommands<String, String> sync = async.getStatefulConnection().sync();
sync.mset(KeysAndValues.MAP);
Set<String> allKeys = new HashSet<>();
KeyScanCursor<String> scanCursor = null;
do {
if (scanCursor == null) {
scanCursor = sync.scan(ScanArgs.Builder.matches("a*"));
} else {
scanCursor = sync.scan(scanCursor, ScanArgs.Builder.matches("a*"));
}
allKeys.addAll(scanCursor.getKeys());
} while (!scanCursor.isFinished());
assertThat(allKeys)
.containsAll(KeysAndValues.KEYS.stream().filter(k -> k.startsWith("a")).collect(Collectors.toList()));
}
@Test
void clusterScanStreaming() {
RedisAdvancedClusterCommands<String, String> sync = async.getStatefulConnection().sync();
sync.mset(KeysAndValues.MAP);
ListStreamingAdapter<String> adapter = new ListStreamingAdapter<>();
StreamScanCursor scanCursor = null;
do {
if (scanCursor == null) {
scanCursor = sync.scan(adapter);
} else {
scanCursor = sync.scan(adapter, scanCursor);
}
} while (!scanCursor.isFinished());
assertThat(adapter.getList()).containsAll(KeysAndValues.KEYS);
}
@Test
void clusterScanStreamingWithArgs() {
RedisAdvancedClusterCommands<String, String> sync = async.getStatefulConnection().sync();
sync.mset(KeysAndValues.MAP);
ListStreamingAdapter<String> adapter = new ListStreamingAdapter<>();
StreamScanCursor scanCursor = null;
do {
if (scanCursor == null) {
scanCursor = sync.scan(adapter, ScanArgs.Builder.matches("a*"));
} else {
scanCursor = sync.scan(adapter, scanCursor, ScanArgs.Builder.matches("a*"));
}
} while (!scanCursor.isFinished());
assertThat(adapter.getList()).containsAll(
KeysAndValues.KEYS.stream().filter(k -> k.startsWith("a")).collect(Collectors.toList()));
}
@Test
void clusterScanCursorFinished() {
assertThatThrownBy(() -> sync.scan(ScanCursor.FINISHED)).isInstanceOf(IllegalArgumentException.class);
}
@Test
void clusterScanCursorNotReused() {
assertThatThrownBy(() -> sync.scan(ScanCursor.of("dummy"))).isInstanceOf(IllegalArgumentException.class);
}
String value(int i) {
return value + "-" + i;
}
String key(int i) {
return key + "-" + i;
}
private void writeKeysToTwoNodes() {
sync.set(KEY_ON_NODE_1, value);
sync.set(KEY_ON_NODE_2, value);
}
Map<String, String> prepareMset() {
Map<String, String> mset = new HashMap<>();
for (char c = 'a'; c < 'z'; c++) {
String key = new String(new char[] { c, c, c });
mset.put(key, "value-" + key);
}
return mset;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// For unit tests @see TestCookieManager
package org.apache.jmeter.protocol.http.control;
import java.io.Serializable;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.Map;
import org.apache.commons.collections.map.LRUMap;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.URIException;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.impl.cookie.DateParseException;
import org.apache.http.impl.cookie.DateUtils;
import org.apache.jmeter.config.ConfigTestElement;
import org.apache.jmeter.engine.event.LoopIterationEvent;
import org.apache.jmeter.protocol.http.sampler.HTTPSampleResult;
import org.apache.jmeter.protocol.http.util.HTTPConstants;
import org.apache.jmeter.testelement.TestIterationListener;
import org.apache.jmeter.testelement.TestStateListener;
import org.apache.jmeter.testelement.property.BooleanProperty;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
/**
* Handles HTTP Caching
*/
public class CacheManager extends ConfigTestElement implements TestStateListener, TestIterationListener, Serializable {
private static final Date EXPIRED_DATE = new Date(0L);
private static final long serialVersionUID = 234L;
private static final Logger log = LoggingManager.getLoggerForClass();
private static final String[] CACHEABLE_METHODS = JMeterUtils.getPropDefault("cacheable_methods", "GET").split("[ ,]");
static {
log.info("Will only cache the following methods: "+Arrays.toString(CACHEABLE_METHODS));
}
//+ JMX attributes, do not change values
public static final String CLEAR = "clearEachIteration"; // $NON-NLS-1$
public static final String USE_EXPIRES = "useExpires"; // $NON-NLS-1$
public static final String MAX_SIZE = "maxSize"; // $NON-NLS-1$
//-
private transient InheritableThreadLocal<Map<String, CacheEntry>> threadCache;
private transient boolean useExpires; // Cached value
private static final int DEFAULT_MAX_SIZE = 5000;
private static final long ONE_YEAR_MS = 365*24*60*60*1000L;
public CacheManager() {
setProperty(new BooleanProperty(CLEAR, false));
setProperty(new BooleanProperty(USE_EXPIRES, false));
clearCache();
useExpires = false;
}
/*
* Holder for storing cache details.
* Perhaps add original response later?
*/
// package-protected to allow access by unit-test cases
static class CacheEntry{
private final String lastModified;
private final String etag;
private final Date expires;
public CacheEntry(String lastModified, Date expires, String etag){
this.lastModified = lastModified;
this.etag = etag;
this.expires = expires;
}
public String getLastModified() {
return lastModified;
}
public String getEtag() {
return etag;
}
@Override
public String toString(){
return lastModified+" "+etag;
}
public Date getExpires() {
return expires;
}
}
/**
* Save the Last-Modified, Etag, and Expires headers if the result is cacheable.
* Version for Java implementation.
* @param conn connection
* @param res result
*/
public void saveDetails(URLConnection conn, HTTPSampleResult res){
if (isCacheable(res)){
String lastModified = conn.getHeaderField(HTTPConstants.LAST_MODIFIED);
String expires = conn.getHeaderField(HTTPConstants.EXPIRES);
String etag = conn.getHeaderField(HTTPConstants.ETAG);
String url = conn.getURL().toString();
String cacheControl = conn.getHeaderField(HTTPConstants.CACHE_CONTROL);
String date = conn.getHeaderField(HTTPConstants.DATE);
setCache(lastModified, cacheControl, expires, etag, url, date);
}
}
/**
* Save the Last-Modified, Etag, and Expires headers if the result is
* cacheable. Version for Commons HttpClient implementation.
*
* @param method
* {@link HttpMethod} to get header information from
* @param res
* result to decide if result is cacheable
* @throws URIException
* if extraction of the the uri from <code>method</code> fails
*/
public void saveDetails(HttpMethod method, HTTPSampleResult res) throws URIException{
if (isCacheable(res)){
String lastModified = getHeader(method ,HTTPConstants.LAST_MODIFIED);
String expires = getHeader(method ,HTTPConstants.EXPIRES);
String etag = getHeader(method ,HTTPConstants.ETAG);
String url = method.getURI().toString();
String cacheControl = getHeader(method, HTTPConstants.CACHE_CONTROL);
String date = getHeader(method, HTTPConstants.DATE);
setCache(lastModified, cacheControl, expires, etag, url, date);
}
}
/**
* Save the Last-Modified, Etag, and Expires headers if the result is
* cacheable. Version for Apache HttpClient implementation.
*
* @param method
* {@link HttpResponse} to extract header information from
* @param res
* result to decide if result is cacheable
*/
public void saveDetails(HttpResponse method, HTTPSampleResult res) {
if (isCacheable(res)){
String lastModified = getHeader(method ,HTTPConstants.LAST_MODIFIED);
String expires = getHeader(method ,HTTPConstants.EXPIRES);
String etag = getHeader(method ,HTTPConstants.ETAG);
String cacheControl = getHeader(method, HTTPConstants.CACHE_CONTROL);
String date = getHeader(method, HTTPConstants.DATE);
setCache(lastModified, cacheControl, expires, etag, res.getUrlAsString(), date); // TODO correct URL?
}
}
// helper method to save the cache entry
private void setCache(String lastModified, String cacheControl, String expires, String etag, String url, String date) {
if (log.isDebugEnabled()){
log.debug("setCache("
+ lastModified + ","
+ cacheControl + ","
+ expires + ","
+ etag + ","
+ url + ","
+ date
+ ")");
}
Date expiresDate = null; // i.e. not using Expires
if (useExpires) {// Check that we are processing Expires/CacheControl
final String MAX_AGE = "max-age=";
if(cacheControl != null && cacheControl.contains("no-store")) {
// We must not store an CacheEntry, otherwise a
// conditional request may be made
return;
}
if (expires != null) {
try {
expiresDate = DateUtils.parseDate(expires);
} catch (org.apache.http.impl.cookie.DateParseException e) {
if (log.isDebugEnabled()){
log.debug("Unable to parse Expires: '"+expires+"' "+e);
}
expiresDate = CacheManager.EXPIRED_DATE; // invalid dates must be treated as expired
}
}
// if no-cache is present, ensure that expiresDate remains null, which forces revalidation
if(cacheControl != null && !cacheControl.contains("no-cache")) {
// the max-age directive overrides the Expires header,
if(cacheControl.contains(MAX_AGE)) {
long maxAgeInSecs = Long.parseLong(
cacheControl.substring(cacheControl.indexOf(MAX_AGE)+MAX_AGE.length())
.split("[, ]")[0] // Bug 51932 - allow for optional trailing attributes
);
expiresDate=new Date(System.currentTimeMillis()+maxAgeInSecs*1000);
} else if(expires==null) { // No max-age && No expires
if(!StringUtils.isEmpty(lastModified) && !StringUtils.isEmpty(date)) {
try {
Date responseDate = DateUtils.parseDate( date );
Date lastModifiedAsDate = DateUtils.parseDate( lastModified );
// see https://developer.mozilla.org/en/HTTP_Caching_FAQ
// see http://www.ietf.org/rfc/rfc2616.txt#13.2.4
expiresDate=new Date(System.currentTimeMillis()
+Math.round((responseDate.getTime()-lastModifiedAsDate.getTime())*0.1));
} catch(DateParseException e) {
// date or lastModified may be null or in bad format
if(log.isWarnEnabled()) {
log.warn("Failed computing expiration date with following info:"
+lastModified + ","
+ cacheControl + ","
+ expires + ","
+ etag + ","
+ url + ","
+ date);
}
// TODO Can't see anything in SPEC
expiresDate = new Date(System.currentTimeMillis()+ONE_YEAR_MS);
}
} else {
// TODO Can't see anything in SPEC
expiresDate = new Date(System.currentTimeMillis()+ONE_YEAR_MS);
}
}
// else expiresDate computed in (expires!=null) condition is used
}
}
getCache().put(url, new CacheEntry(lastModified, expiresDate, etag));
}
// Helper method to deal with missing headers - Commons HttpClient
private String getHeader(HttpMethod method, String name){
org.apache.commons.httpclient.Header hdr = method.getResponseHeader(name);
return hdr != null ? hdr.getValue() : null;
}
// Apache HttpClient
private String getHeader(HttpResponse method, String name) {
org.apache.http.Header hdr = method.getLastHeader(name);
return hdr != null ? hdr.getValue() : null;
}
/*
* Is the sample result OK to cache?
* i.e is it in the 2xx range, and is it a cacheable method?
*/
private boolean isCacheable(HTTPSampleResult res){
final String responseCode = res.getResponseCode();
return isCacheableMethod(res)
&& "200".compareTo(responseCode) <= 0 // $NON-NLS-1$
&& "299".compareTo(responseCode) >= 0; // $NON-NLS-1$
}
private boolean isCacheableMethod(HTTPSampleResult res) {
final String resMethod = res.getHTTPMethod();
for(String method : CACHEABLE_METHODS) {
if (method.equalsIgnoreCase(resMethod)) {
return true;
}
}
return false;
}
/**
* Check the cache, and if there is a match, set the headers:
* <ul>
* <li>If-Modified-Since</li>
* <li>If-None-Match</li>
* </ul>
* Commons HttpClient version
* @param url URL to look up in cache
* @param method where to set the headers
*/
public void setHeaders(URL url, HttpMethod method) {
CacheEntry entry = getCache().get(url.toString());
if (log.isDebugEnabled()){
log.debug(method.getName()+"(OACH) "+url.toString()+" "+entry);
}
if (entry != null){
final String lastModified = entry.getLastModified();
if (lastModified != null){
method.setRequestHeader(HTTPConstants.IF_MODIFIED_SINCE, lastModified);
}
final String etag = entry.getEtag();
if (etag != null){
method.setRequestHeader(HTTPConstants.IF_NONE_MATCH, etag);
}
}
}
/**
* Check the cache, and if there is a match, set the headers:
* <ul>
* <li>If-Modified-Since</li>
* <li>If-None-Match</li>
* </ul>
* Apache HttpClient version.
* @param url {@link URL} to look up in cache
* @param request where to set the headers
*/
public void setHeaders(URL url, HttpRequestBase request) {
CacheEntry entry = getCache().get(url.toString());
if (log.isDebugEnabled()){
log.debug(request.getMethod()+"(OAH) "+url.toString()+" "+entry);
}
if (entry != null){
final String lastModified = entry.getLastModified();
if (lastModified != null){
request.setHeader(HTTPConstants.IF_MODIFIED_SINCE, lastModified);
}
final String etag = entry.getEtag();
if (etag != null){
request.setHeader(HTTPConstants.IF_NONE_MATCH, etag);
}
}
}
/**
* Check the cache, and if there is a match, set the headers:
* <ul>
* <li>If-Modified-Since</li>
* <li>If-None-Match</li>
* </ul>
* @param url {@link URL} to look up in cache
* @param conn where to set the headers
*/
public void setHeaders(HttpURLConnection conn, URL url) {
CacheEntry entry = getCache().get(url.toString());
if (log.isDebugEnabled()){
log.debug(conn.getRequestMethod()+"(Java) "+url.toString()+" "+entry);
}
if (entry != null){
final String lastModified = entry.getLastModified();
if (lastModified != null){
conn.addRequestProperty(HTTPConstants.IF_MODIFIED_SINCE, lastModified);
}
final String etag = entry.getEtag();
if (etag != null){
conn.addRequestProperty(HTTPConstants.IF_NONE_MATCH, etag);
}
}
}
/**
* Check the cache, if the entry has an expires header and the entry has not expired, return true<br>
* @param url {@link URL} to look up in cache
* @return <code>true</code> if entry has an expires header and the entry has not expired, else <code>false</code>
*/
public boolean inCache(URL url) {
CacheEntry entry = getCache().get(url.toString());
if (log.isDebugEnabled()){
log.debug("inCache "+url.toString()+" "+entry);
}
if (entry != null){
final Date expiresDate = entry.getExpires();
if (expiresDate != null) {
if (expiresDate.after(new Date())) {
if (log.isDebugEnabled()){
log.debug("Expires= " + expiresDate + " (Valid)");
}
return true;
} else {
if (log.isDebugEnabled()){
log.debug("Expires= " + expiresDate + " (Expired)");
}
}
}
}
return false;
}
private Map<String, CacheEntry> getCache(){
return threadCache.get();
}
public boolean getClearEachIteration() {
return getPropertyAsBoolean(CLEAR);
}
public void setClearEachIteration(boolean clear) {
setProperty(new BooleanProperty(CLEAR, clear));
}
public boolean getUseExpires() {
return getPropertyAsBoolean(USE_EXPIRES);
}
public void setUseExpires(boolean expires) {
setProperty(new BooleanProperty(USE_EXPIRES, expires));
}
/**
* @return int cache max size
*/
public int getMaxSize() {
return getPropertyAsInt(MAX_SIZE, DEFAULT_MAX_SIZE);
}
/**
* @param size int cache max size
*/
public void setMaxSize(int size) {
setProperty(MAX_SIZE, size, DEFAULT_MAX_SIZE);
}
@Override
public void clear(){
super.clear();
clearCache();
}
private void clearCache() {
log.debug("Clear cache");
threadCache = new InheritableThreadLocal<Map<String, CacheEntry>>(){
@Override
protected Map<String, CacheEntry> initialValue(){
// Bug 51942 - this map may be used from multiple threads
@SuppressWarnings("unchecked") // LRUMap is not generic currently
Map<String, CacheEntry> map = new LRUMap(getMaxSize());
return Collections.<String, CacheEntry>synchronizedMap(map);
}
};
}
@Override
public void testStarted() {
}
@Override
public void testEnded() {
}
@Override
public void testStarted(String host) {
}
@Override
public void testEnded(String host) {
}
@Override
public void testIterationStart(LoopIterationEvent event) {
if (getClearEachIteration()) {
clearCache();
}
useExpires=getUseExpires(); // cache the value
}
}
| |
/*******************************************************************************
* Copyright (C) 2015 Brocade Communications Systems, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* https://github.com/brocade/vTM-eclipse/LICENSE
* This software is distributed "AS IS".
*
* Contributors:
* Brocade Communications Systems - Main Implementation
******************************************************************************/
package com.zeus.eclipsePlugin.codedata;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import com.zeus.eclipsePlugin.ZDebug;
import com.zeus.eclipsePlugin.codedata.CodePossibility.Type;
/**
* This represents the code information for a particular ZXTM version.
*/
public class VersionCodeData implements Iterable<FunctionGroup>
{
private int major, minor;
private TreeSet<String> keywords = new TreeSet<String>();
private HashMap<String, FunctionGroup> groupTable = new HashMap<String, FunctionGroup>();
private TreeSet<FunctionGroup> groupTree = new TreeSet<FunctionGroup>();
/** The longest possible group name */
private int maxGroupLength = 0;
/**
* Creates a empty TrafficScriptVersion class for the specified version.
* @param major The major version, e.g. for 5.1r3 = 5
* @param minor The minor version, e.g. for 5.1r3 = 1
*/
public VersionCodeData( int major, int minor )
{
this.major = major;
this.minor = minor;
}
/**
* The keywords used in TrafficScript for this ZXTM version.
* @return An array of keywords.
*/
public Set<String> getKeywords()
{
return keywords;
}
/**
* Add a function group.
* @param group The FunctionGroup to add.
*/
public void addGroup( FunctionGroup group )
{
groupTable.put( group.getLowerCaseName(), group );
groupTree.add( group );
if( group.getName().length() > maxGroupLength ) {
maxGroupLength = group.getName().length();
}
}
/**
* Get a function group with the specified name. Case insensitive.
* @param name The name of group to return.
* @return The group with the specified name or null if it does not exist.
*/
public FunctionGroup getGroup( String name )
{
return groupTable.get( name.toLowerCase() );
}
/**
* Get a sorted collection of all the groups in this code data version.
* @return A sorted collection of function groups.
*/
public Collection<FunctionGroup> getGroups()
{
return groupTree;
}
/**
* Iterate over all the function groups in this TrafficScript version.
* @return An iterator for all the function groups.
*/
/* Override */
public Iterator<FunctionGroup> iterator()
{
return groupTree.iterator();
}
/**
* Get the major version.
* @return The major version
*/
public int getMajorVersion()
{
return major;
}
/**
* Get the minor version.
* @return The minor version
*/
public int getMinorVersion()
{
return minor;
}
/**
* Set the version. Can only be called by a sub class, and should not be used
* after loading data.
* @param major The major version.
* @param minor The minor version.
*/
protected void setVersion( int major, int minor )
{
this.major = major;
this.minor = minor;
}
/**
* Set the keywords that this version of TrafficScript uses.
* @param keywords An array of keywords
*/
public void setKeywords( String[] keywords )
{
this.keywords.clear();
for( String keyword : keywords ) {
this.keywords.add( keyword );
}
}
/**
* Change a major and a minor version into a string.
* @param major The major version
* @param minor The minor version
* @return A string representation of the passed version.
*/
public static String createVersionString( int major, int minor )
{
return major + "." + minor;
}
/**
* Get the version of this code data as a String.
* @return The String representing this data's version.
*/
public String getVersionString()
{
return createVersionString( major, minor );
}
/**
* Get the function matching the passed in string. The name includes the
* group prefix.
* @param fullName The name of the function data you want, including function
* group at the front.
* @return The function data for the named function, or null if there is no
* such function.
*/
public Function getFunctionMatching( String fullName )
{
int dot = fullName.lastIndexOf( '.' );
if( dot == fullName.length() - 1 || dot == -1 ) return null;
String groupName = fullName.substring( 0, dot );
FunctionGroup group = this.getGroup( groupName );
if( group == null ) return null;
String functionName = fullName.substring( dot + 1 );
return group.getFunction( functionName );
}
/**
* Return all groups starting with the passed in prefix.
* @param start The start of a groups you want to match.
* @return A collection of groups that match the passed string.
*/
public Collection<FunctionGroup> getGroupsStartingWith( String start )
{
if( start.length() == 0 ) {
return groupTree;
}
start = start.toLowerCase();
if( start.length() > maxGroupLength )
return new TreeSet<FunctionGroup>();
try {
// Uses tree subset to find the groups greater than start or less than
return groupTree.subSet(
new FunctionGroup( start ),
new FunctionGroup( getSearchLimit( start ) )
);
} catch( IllegalArgumentException e ) {
ZDebug.printStackTrace( e, "Invalid search limit" );
}
return new TreeSet<FunctionGroup>();
}
/**
* Return all keywords starting with the passed in prefix.
* @param start The start of a keywords you want to match.
* @return A collection of keywords that match the passed string.
*/
public Collection<String> getKeywordsStartingWith( String start )
{
if( start.length() == 0 ) {
return keywords;
}
start = start.toLowerCase();
return keywords.subSet( start, getSearchLimit( start ) );
}
/**
* Get a set of possibilities to complete the passed word.
* @param word The word you want to complete
* @param types The types of completions you want.
* @return A set of possibilities to complete the code.
*/
public Collection<CodePossibility> getPossiblilities( String word, Type[] types )
{
ZDebug.print( 4, "getPossiblilities( ", word, " )" );
// Optimisation
if( word.length() > 0 && !Character.isLetter( word.charAt( 0 ) ) ) {
return new ArrayList<CodePossibility>(0);
}
// Get what types of possibility the caller wants
boolean listGroups = false, listFunctions = false, listKeywords = false;
for( Type type : types ) {
switch( type ) {
case FUNCTION: listFunctions = true; break;
case GROUP: listGroups = true; break;
case KEYWORD: listKeywords = true; break;
}
}
// Split up into parts (for functions and groups)
String[] parts = word.split( "\\.", -1 );
if( parts.length < 1 ) return new HashSet<CodePossibility>(0);
String groupName = "";
String funcName = null;
for( int i = 0; i < parts.length - 1; i++ ) {
if( i != 0 ) groupName += ".";
groupName += parts[i];
}
ZDebug.print( 5, "Word Before: '", word, "'" );
FunctionGroup currentGroup = this.getGroup( groupName );
if( parts.length > 0 ) {
funcName = parts[parts.length - 1];
}
TreeSet<CodePossibility> list = new TreeSet<CodePossibility>();
// Suggest some keywords
if( listKeywords ) {
for( String keyword : getKeywordsStartingWith( word ) ) {
list.add( new CodePossibility( keyword, word ) );
}
}
// Suggest some groups
if( listGroups ) {
ZDebug.print( 5, "Getting groups that start with: ", word );
for( FunctionGroup group : getGroupsStartingWith( word ) ) {
list.add( new CodePossibility( group, word ) );
}
}
// Suggest some functions
if( listFunctions ) {
if( currentGroup != null ) {
ZDebug.print( 5, "Getting functions in '", currentGroup, "' that start with: ", funcName );
for( Function function : currentGroup.getFunctionsStartingWith( funcName ) ) {
list.add( new CodePossibility( function, funcName ) );
}
}
}
return list;
}
/**
* For searching for strings that start with a character using TreeSets
* subSet function. Returns the string that should be used as the upper
* limit for the subset.
* @param The string you want the subset that matches the start of.
* @return The upper limit of the subset, exclusive.
*/
static final String getSearchLimit( String start )
{
String front = start.substring( 0, start.length() - 1 );
front += (char)(start.charAt( start.length() - 1 ) + 1);
return front;
}
}
| |
package graphql.language;
import graphql.PublicApi;
import graphql.schema.idl.TypeInfo;
import graphql.util.TraversalControl;
import graphql.util.TraverserContext;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.function.Function;
import static graphql.util.TreeTransformerUtil.changeNode;
import static java.util.Comparator.naturalOrder;
import static java.util.Comparator.nullsLast;
/**
* A class that helps you sort AST nodes
*/
@PublicApi
public class AstSorter {
/**
* This will sort nodes in specific orders and then alphabetically.
*
* The order is :
* <ul>
* <li>Query operation definitions</li>
* <li>Mutation operation definitions</li>
* <li>Subscriptions operation definitions</li>
* <li>Fragment definitions</li>
* <li>Directive definitions</li>
* <li>Schema definitions</li>
* <li>Object Type definitions</li>
* <li>Interface Type definitions</li>
* <li>Union Type definitions</li>
* <li>Enum Type definitions</li>
* <li>Scalar Type definitions</li>
* <li>Input Object Type definitions</li>
* </ul>
*
* After those groupings they will be sorted alphabetic. All arguments and directives on elements
* will be sorted alphabetically by name.
*
* @param nodeToBeSorted the node to be sorted
* @param <T> of type {@link graphql.language.Node}
*
* @return a new sorted node (because {@link graphql.language.Node}s are immutable)
*/
public <T extends Node> T sort(T nodeToBeSorted) {
NodeVisitorStub visitor = new NodeVisitorStub() {
@Override
public TraversalControl visitDocument(Document node, TraverserContext<Node> context) {
Document changedNode = node.transform(builder -> {
List<Definition> definitions = sort(node.getDefinitions(), comparingDefinitions());
builder.definitions(definitions);
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitOperationDefinition(OperationDefinition node, TraverserContext<Node> context) {
OperationDefinition changedNode = node.transform(builder -> {
builder.variableDefinitions(sort(node.getVariableDefinitions(), comparing(VariableDefinition::getName)));
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.selectionSet(sortSelectionSet(node.getSelectionSet()));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitField(Field node, TraverserContext<Node> context) {
Field changedNode = node.transform(builder -> {
builder.arguments(sort(node.getArguments(), comparing(Argument::getName)));
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.selectionSet(sortSelectionSet(node.getSelectionSet()));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitFragmentDefinition(FragmentDefinition node, TraverserContext<Node> context) {
FragmentDefinition changedNode = node.transform(builder -> {
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.selectionSet(sortSelectionSet(node.getSelectionSet()));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitInlineFragment(InlineFragment node, TraverserContext<Node> context) {
InlineFragment changedNode = node.transform(builder -> {
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.selectionSet(sortSelectionSet(node.getSelectionSet()));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitFragmentSpread(FragmentSpread node, TraverserContext<Node> context) {
FragmentSpread changedNode = node.transform(builder -> {
List<Directive> directives = sort(node.getDirectives(), comparing(Directive::getName));
builder.directives(directives);
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitDirective(Directive node, TraverserContext<Node> context) {
Directive changedNode = node.transform(builder -> {
List<Argument> arguments = sort(node.getArguments(), comparing(Argument::getName));
builder.arguments(arguments);
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitObjectValue(ObjectValue node, TraverserContext<Node> context) {
ObjectValue changedNode = node.transform(builder -> {
List<ObjectField> objectFields = sort(node.getObjectFields(), comparing(ObjectField::getName));
builder.objectFields(objectFields);
});
return changeNode(context, changedNode);
}
// SDL classes here
@Override
public TraversalControl visitSchemaDefinition(SchemaDefinition node, TraverserContext<Node> context) {
SchemaDefinition changedNode = node.transform(builder -> {
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.operationTypeDefinitions(sort(node.getOperationTypeDefinitions(), comparing(OperationTypeDefinition::getName)));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitEnumTypeDefinition(EnumTypeDefinition node, TraverserContext<Node> context) {
EnumTypeDefinition changedNode = node.transform(builder -> {
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.enumValueDefinitions(sort(node.getEnumValueDefinitions(), comparing(EnumValueDefinition::getName)));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitScalarTypeDefinition(ScalarTypeDefinition node, TraverserContext<Node> context) {
ScalarTypeDefinition changedNode = node.transform(builder -> {
List<Directive> directives = sort(node.getDirectives(), comparing(Directive::getName));
builder.directives(directives);
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitInputObjectTypeDefinition(InputObjectTypeDefinition node, TraverserContext<Node> context) {
InputObjectTypeDefinition changedNode = node.transform(builder -> {
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.inputValueDefinitions(sort(node.getInputValueDefinitions(), comparing(InputValueDefinition::getName)));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitObjectTypeDefinition(ObjectTypeDefinition node, TraverserContext<Node> context) {
ObjectTypeDefinition changedNode = node.transform(builder -> {
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.implementz(sort(node.getImplements(), comparingTypes()));
builder.fieldDefinitions(sort(node.getFieldDefinitions(), comparing(FieldDefinition::getName)));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitInterfaceTypeDefinition(InterfaceTypeDefinition node, TraverserContext<Node> context) {
InterfaceTypeDefinition changedNode = node.transform(builder -> {
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.implementz(sort(node.getImplements(), comparingTypes()));
builder.definitions(sort(node.getFieldDefinitions(), comparing(FieldDefinition::getName)));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitUnionTypeDefinition(UnionTypeDefinition node, TraverserContext<Node> context) {
UnionTypeDefinition changedNode = node.transform(builder -> {
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.memberTypes(sort(node.getMemberTypes(), comparingTypes()));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitFieldDefinition(FieldDefinition node, TraverserContext<Node> context) {
FieldDefinition changedNode = node.transform(builder -> {
builder.directives(sort(node.getDirectives(), comparing(Directive::getName)));
builder.inputValueDefinitions(sort(node.getInputValueDefinitions(), comparing(InputValueDefinition::getName)));
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitInputValueDefinition(InputValueDefinition node, TraverserContext<Node> context) {
InputValueDefinition changedNode = node.transform(builder -> {
List<Directive> directives = sort(node.getDirectives(), comparing(Directive::getName));
builder.directives(directives);
});
return changeNode(context, changedNode);
}
@Override
public TraversalControl visitDirectiveDefinition(DirectiveDefinition node, TraverserContext<Node> context) {
DirectiveDefinition changedNode = node.transform(builder -> {
builder.inputValueDefinitions(sort(node.getInputValueDefinitions(), comparing(InputValueDefinition::getName)));
builder.directiveLocations(sort(node.getDirectiveLocations(), comparing(DirectiveLocation::getName)));
});
return changeNode(context, changedNode);
}
};
AstTransformer astTransformer = new AstTransformer();
Node newDoc = astTransformer.transform(nodeToBeSorted, visitor);
//noinspection unchecked
return (T) newDoc;
}
private Comparator<Type> comparingTypes() {
return comparing(type -> TypeInfo.typeInfo(type).getName());
}
private Comparator<Selection> comparingSelections() {
Function<Selection, String> byName = s -> {
if (s instanceof FragmentSpread) {
return ((FragmentSpread) s).getName();
}
if (s instanceof Field) {
return ((Field) s).getName();
}
if (s instanceof InlineFragment) {
TypeName typeCondition = ((InlineFragment) s).getTypeCondition();
return typeCondition == null ? "" : typeCondition.getName();
}
return "";
};
Function<Selection, Integer> byType = s -> {
if (s instanceof Field) {
return 1;
}
if (s instanceof FragmentSpread) {
return 2;
}
if (s instanceof InlineFragment) {
return 3;
}
return 4;
};
return comparing(byType).thenComparing(comparing(byName));
}
private Comparator<Definition> comparingDefinitions() {
Function<Definition, String> byName = d -> {
if (d instanceof OperationDefinition) {
String name = ((OperationDefinition) d).getName();
return name == null ? "" : name;
}
if (d instanceof FragmentDefinition) {
return ((FragmentDefinition) d).getName();
}
if (d instanceof DirectiveDefinition) {
return ((DirectiveDefinition) d).getName();
}
if (d instanceof TypeDefinition) {
return ((TypeDefinition) d).getName();
}
return "";
};
Function<Definition, Integer> byType = d -> {
if (d instanceof OperationDefinition) {
OperationDefinition.Operation operation = ((OperationDefinition) d).getOperation();
if (OperationDefinition.Operation.QUERY == operation || operation == null) {
return 101;
}
if (OperationDefinition.Operation.MUTATION == operation) {
return 102;
}
if (OperationDefinition.Operation.SUBSCRIPTION == operation) {
return 104;
}
return 100;
}
if (d instanceof FragmentDefinition) {
return 200;
}
// SDL
if (d instanceof DirectiveDefinition) {
return 300;
}
if (d instanceof SchemaDefinition) {
return 400;
}
if (d instanceof TypeDefinition) {
if (d instanceof ObjectTypeDefinition) {
return 501;
}
if (d instanceof InterfaceTypeDefinition) {
return 502;
}
if (d instanceof UnionTypeDefinition) {
return 503;
}
if (d instanceof EnumTypeDefinition) {
return 504;
}
if (d instanceof ScalarTypeDefinition) {
return 505;
}
if (d instanceof InputObjectTypeDefinition) {
return 506;
}
return 500;
}
return -1;
};
return comparing(byType).thenComparing(byName);
}
private SelectionSet sortSelectionSet(SelectionSet selectionSet) {
if (selectionSet == null) {
return null;
}
List<Selection> selections = sort(selectionSet.getSelections(), comparingSelections());
return selectionSet.transform(builder -> builder.selections(selections));
}
private <T> List<T> sort(List<T> items, Comparator<T> comparing) {
items = new ArrayList<>(items);
items.sort(comparing);
return items;
}
private <T, U extends Comparable<? super U>> Comparator<T> comparing(
Function<? super T, ? extends U> keyExtractor) {
return Comparator.comparing(keyExtractor, nullsLast(naturalOrder()));
}
}
| |
package enchiridion;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.imageio.ImageIO;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.texture.DynamicTexture;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.IIcon;
import net.minecraft.util.ResourceLocation;
import org.apache.logging.log4j.Level;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import cpw.mods.fml.client.FMLClientHandler;
import cpw.mods.fml.common.FMLLog;
import enchiridion.api.Formatting;
import enchiridion.api.GuideHandler;
import enchiridion.api.StackHelper;
import enchiridion.api.XMLHelper;
import enchiridion.api.pages.PageImage;
import enchiridion.api.pages.PageImage.LinkedTexture;
public class CustomBooks {
public static final String id = "booksid";
public static final HashMap<String, BookInfo> bookInfo = new HashMap();
public static class BookInfo {
boolean onWorldStart = false;
ItemStack onCrafting;
Object[] crafting;
String author, displayName;
public Integer bookColor;
public String background;
public String path;
public BookInfo(String displayName, String author, Integer bookColor) {
this.displayName = displayName;
this.author = author;
this.bookColor = bookColor;
}
}
public static void preInit() {
File folder = Enchiridion.root;
if (!folder.exists()) {
folder.mkdir();
}
for (File file : folder.listFiles()) {
String zipName = file.getName();
// Continue if the file i a zip
if (zipName.substring(zipName.lastIndexOf(".") + 1, zipName.length()).equals("zip")) {
FMLLog.getLogger().log(Level.TRACE, "[Enchiridion] Attempting to read data for the installed Guide Book: " + zipName);
try {
ZipFile zipfile = new ZipFile(file);
Enumeration enumeration = zipfile.entries();
while (enumeration.hasMoreElements()) {
ZipEntry zipentry = (ZipEntry) enumeration.nextElement();
String fileName = zipentry.getName();
String extension = fileName.substring(fileName.length() - 3, fileName.length());
if (!zipentry.isDirectory()) {
if (FMLClientHandler.instance() != null && extension.equals("png")) {
try {
String id = fileName.substring(0, fileName.lastIndexOf('.'));
BufferedImage img = ImageIO.read(zipfile.getInputStream(zipentry));
DynamicTexture dmTexture = new DynamicTexture(img);
ResourceLocation texture = Minecraft.getMinecraft().getTextureManager().getDynamicTextureLocation(id, dmTexture);
LinkedTexture linked = new LinkedTexture(img.getHeight(), img.getWidth(), dmTexture, texture);
String identifier = (zipName.substring(0, zipName.length() - 4) + "|" + fileName.substring(0, fileName.length() - 4));
PageImage.addToCache(identifier, linked);
} catch (Exception e) {
e.printStackTrace();
FMLLog.getLogger().log(Level.WARN, "[Enchiridion] Failed to Read Image Data of " + fileName);
}
} else if (extension.equals("xml")) {
try {
String id = fileName.substring(0, fileName.lastIndexOf('.'));
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder build = factory.newDocumentBuilder();
Document doc = build.parse(zipfile.getInputStream(zipentry));
doc.getDocumentElement().normalize();
ClientProxy.bookCache.put(id, doc);
} catch (Exception e) {
e.printStackTrace();
FMLLog.getLogger().log(Level.WARN, "[Enchiridion] Failed to Read XML Data of " + fileName);
}
}
}
}
zipfile.close();
FMLLog.getLogger().log(Level.INFO, "[Enchiridion] Sucessfully finished reading the installed Guide Book: " + zipName);
} catch (Exception e) {
FMLLog.getLogger().log(Level.ERROR, "[Enchiridion] Failed to read the installed Guide Book: " + zipName);
}
}
}
if (GuideHandler.DEBUG_ENABLED) {
File debugFolder = new File(Enchiridion.root + File.separator + "debug");
if (!debugFolder.exists()) {
debugFolder.mkdir();
}
for (File file : debugFolder.listFiles()) {
String xmlName = file.getName();
if (xmlName.substring(xmlName.lastIndexOf(".") + 1, xmlName.length()).equals("xml")) {
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder build = factory.newDocumentBuilder();
Document doc = build.parse(file);
doc.getDocumentElement().normalize();
ClientProxy.bookCache.put(xmlName.substring(0, xmlName.lastIndexOf('.')), doc);
FMLLog.getLogger().log(Level.INFO, "[Enchiridion] Sucessfully loaded debug mode custom book xml " + xmlName);
} catch (Exception e) {
FMLLog.getLogger().log(Level.WARN, "[Enchiridion] Failed to load debug mode custom book xml " + xmlName);
}
}
}
}
}
public static void setup(String key, Element xml) {
String displayName = XMLHelper.getElement(xml, "name");
String author = XMLHelper.getElement(xml, "author");
Integer color = XMLHelper.getElementAsHex(xml, "color", 0xFFFFFF);
BookInfo info = new BookInfo(displayName, author, color);
info.displayName = Formatting.getColor(XMLHelper.getAttribute(XMLHelper.getNode(xml, "name"), "color")) + info.displayName;
info.author = Formatting.getColor(XMLHelper.getAttribute(XMLHelper.getNode(xml, "author"), "color")) + info.author;
if (XMLHelper.getAttribAsBoolean(xml, "gen")) info.onWorldStart = true;
info.background = XMLHelper.getElement(xml, "background");
String onCrafting = XMLHelper.getElement(xml, "onCrafting");
if (onCrafting != null && !onCrafting.equals("")) {
info.onCrafting = StackHelper.getStackFromString(onCrafting);
}
String crafting = XMLHelper.getElement(xml, "crafting");
if (crafting != null & !crafting.equals("")) {
String[] items = crafting.split("\\|");
Object[] recipe = new Object[items.length];
for (int i = 0; i < recipe.length; i++) {
if (items[i].startsWith("OD:")) {
recipe[i] = items[i].substring(3);
} else if (!items[i].equals("")) {
recipe[i] = StackHelper.getStackFromString(items[i]);
}
}
info.crafting = recipe;
}
info.path = XMLHelper.getElement(xml, "icon");
bookInfo.put(key, info);
icons.put(key, new CustomIconAtlas(key, info.path));
}
public static Document getDebugMode(String xml) {
File debugFolder = new File(Enchiridion.root + File.separator + "debug");
String file = debugFolder + File.separator + xml + ".xml";
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder build = factory.newDocumentBuilder();
Document doc = build.parse(file);
doc.getDocumentElement().normalize();
return doc;
} catch (Exception e) {
return null;
}
}
public static String getID(ItemStack stack) {
return stack.stackTagCompound.getString(id);
}
public static BookInfo getBookInfo(ItemStack stack) {
return bookInfo.get(getID(stack));
}
public static ItemStack create(String key) {
ItemStack guide = new ItemStack(Enchiridion.items, 1, ItemEnchiridion.GUIDE);
guide.setTagCompound(new NBTTagCompound());
guide.stackTagCompound.setString(CustomBooks.id, key);
return guide;
}
private static HashMap<String, CustomIconAtlas> icons = new HashMap();
public static IIcon getIcon(ItemStack stack) {
BookInfo info = bookInfo.get(getID(stack));
if (info != null && !isNull(info.path)) {
return TextureHandler.map.getTextureExtry(info.path);
}
return ((ItemEnchiridion) stack.getItem()).icons[stack.getItemDamage()];
}
private static boolean isNull(String path) {
return path != null && !path.equals("");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.hops;
import java.util.ArrayList;
import org.apache.sysml.api.DMLScript;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.hops.Hop.MultiThreadedHop;
import org.apache.sysml.hops.rewrite.HopRewriteUtils;
import org.apache.sysml.lops.Aggregate;
import org.apache.sysml.lops.Group;
import org.apache.sysml.lops.Lop;
import org.apache.sysml.lops.LopsException;
import org.apache.sysml.lops.SortKeys;
import org.apache.sysml.lops.Transform;
import org.apache.sysml.lops.LopProperties.ExecType;
import org.apache.sysml.lops.Transform.OperationTypes;
import org.apache.sysml.parser.Expression.DataType;
import org.apache.sysml.parser.Expression.ValueType;
import org.apache.sysml.runtime.instructions.gpu.context.GPUContextPool;
import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
/**
* Reorg (cell) operation: aij
* Properties:
* Symbol: ', rdiag, rshape, rsort
* 1 Operand (except sort and reshape take additional arguments)
*
* Semantic: change indices (in mapper or reducer)
*
*
* NOTE MB: reshape integrated here because (1) ParameterizedBuiltinOp requires name-value pairs for params
* and (2) most importantly semantic of reshape is exactly a reorg op.
*/
public class ReorgOp extends Hop implements MultiThreadedHop
{
public static boolean FORCE_DIST_SORT_INDEXES = false;
public boolean bSortSPRewriteApplicable = false;
private ReOrgOp op;
private int _maxNumThreads = -1; //-1 for unlimited
private ReorgOp() {
//default constructor for clone
}
public ReorgOp(String l, DataType dt, ValueType vt, ReOrgOp o, Hop inp)
{
super(l, dt, vt);
op = o;
getInput().add(0, inp);
inp.getParent().add(this);
//compute unknown dims and nnz
refreshSizeInformation();
}
public ReorgOp(String l, DataType dt, ValueType vt, ReOrgOp o, ArrayList<Hop> inp)
{
super(l, dt, vt);
op = o;
for( int i=0; i<inp.size(); i++ ) {
Hop in = inp.get(i);
getInput().add(i, in);
in.getParent().add(this);
}
//compute unknown dims and nnz
refreshSizeInformation();
}
@Override
public void checkArity() throws HopsException {
int sz = _input.size();
switch( op ) {
case TRANSPOSE:
case DIAG:
case REV:
HopsException.check(sz == 1, this, "should have arity 1 for op %s but has arity %d", op, sz);
break;
case RESHAPE:
case SORT:
HopsException.check(sz == 4, this, "should have arity 4 for op %s but has arity %d", op, sz);
break;
default:
throw new HopsException("Unsupported lops construction for operation type '" + op + "'.");
}
}
@Override
public void setMaxNumThreads( int k ) {
_maxNumThreads = k;
}
@Override
public int getMaxNumThreads() {
return _maxNumThreads;
}
public ReOrgOp getOp()
{
return op;
}
@Override
public String getOpString() {
String s = new String("");
s += "r(" + HopsTransf2String.get(op) + ")";
return s;
}
@Override
public Lop constructLops()
throws HopsException, LopsException
{
//return already created lops
if( getLops() != null )
return getLops();
ExecType et = optFindExecType();
switch( op )
{
case TRANSPOSE:
{
Lop lin = getInput().get(0).constructLops();
if( lin instanceof Transform && ((Transform)lin).getOperationType()==OperationTypes.Transpose )
setLops(lin.getInputs().get(0)); //if input is already a transpose, avoid redundant transpose ops
else if( getDim1()==1 && getDim2()==1 )
setLops(lin); //if input of size 1x1, avoid unnecessary transpose
else { //general case
int k = OptimizerUtils.getConstrainedNumThreads(_maxNumThreads);
if(DMLScript.USE_ACCELERATOR && (DMLScript.FORCE_ACCELERATOR || getMemEstimate() < GPUContextPool
.initialGPUMemBudget())) {
et = ExecType.GPU;
}
Transform transform1 = new Transform( lin,
HopsTransf2Lops.get(op), getDataType(), getValueType(), et, k);
setOutputDimensions(transform1);
setLineNumbers(transform1);
setLops(transform1);
}
break;
}
case DIAG:
{
Transform transform1 = new Transform( getInput().get(0).constructLops(),
HopsTransf2Lops.get(op), getDataType(), getValueType(), et);
setOutputDimensions(transform1);
setLineNumbers(transform1);
setLops(transform1);
break;
}
case REV:
{
Lop rev = null;
if( et == ExecType.MR ) {
Lop tmp = new Transform( getInput().get(0).constructLops(),
HopsTransf2Lops.get(op), getDataType(), getValueType(), et);
setOutputDimensions(tmp);
setLineNumbers(tmp);
Group group1 = new Group(tmp, Group.OperationTypes.Sort,
DataType.MATRIX, getValueType());
setOutputDimensions(group1);
setLineNumbers(group1);
rev = new Aggregate(group1, Aggregate.OperationTypes.Sum,
DataType.MATRIX, getValueType(), et);
}
else { //CP/SPARK
rev = new Transform( getInput().get(0).constructLops(),
HopsTransf2Lops.get(op), getDataType(), getValueType(), et);
}
setOutputDimensions(rev);
setLineNumbers(rev);
setLops(rev);
break;
}
case RESHAPE:
{
if( et==ExecType.MR )
{
Transform transform1 = new Transform( getInput().get(0).constructLops(),
HopsTransf2Lops.get(op), getDataType(), getValueType(), et);
setOutputDimensions(transform1);
setLineNumbers(transform1);
for( int i=1; i<=3; i++ ) //rows, cols, byrow
{
Lop ltmp = getInput().get(i).constructLops();
transform1.addInput(ltmp);
ltmp.addOutput(transform1);
}
transform1.setLevel(); //force order of added lops
Group group1 = new Group(
transform1, Group.OperationTypes.Sort, DataType.MATRIX,
getValueType());
setOutputDimensions(group1);
setLineNumbers(group1);
Aggregate agg1 = new Aggregate(
group1, Aggregate.OperationTypes.Sum, DataType.MATRIX,
getValueType(), et);
setOutputDimensions(agg1);
setLineNumbers(agg1);
setLops(agg1);
}
else //CP/SPARK
{
Transform transform1 = new Transform( getInput().get(0).constructLops(),
HopsTransf2Lops.get(op), getDataType(), getValueType(), et);
setOutputDimensions(transform1);
setLineNumbers(transform1);
for( int i=1; i<=3; i++ ) //rows, cols, byrow
{
Lop ltmp = getInput().get(i).constructLops();
transform1.addInput(ltmp);
ltmp.addOutput(transform1);
}
transform1.setLevel(); //force order of added lops
setLops(transform1);
}
break;
}
case SORT:
{
Hop input = getInput().get(0);
Hop by = getInput().get(1);
Hop desc = getInput().get(2);
Hop ixret = getInput().get(3);
if( et==ExecType.MR )
{
if( !(desc instanceof LiteralOp && ixret instanceof LiteralOp) ) {
LOG.warn("Unsupported non-constant ordering parameters, using defaults and mark for recompilation.");
setRequiresRecompile();
desc = new LiteralOp(false);
ixret = new LiteralOp(false);
}
//Step 1: extraction (if unknown ncol or multiple columns)
Hop vinput = input;
if( input.getDim2() != 1 ) {
vinput = new IndexingOp("tmp1", getDataType(), getValueType(), input, new LiteralOp(1L),
HopRewriteUtils.createValueHop(input, true), by, by, false, true);
vinput.refreshSizeInformation();
vinput.setOutputBlocksizes(getRowsInBlock(), getColsInBlock());
HopRewriteUtils.copyLineNumbers(this, vinput);
}
//Step 2: Index vector sort
Hop voutput = null;
if( 2*OptimizerUtils.estimateSize(vinput.getDim1(), vinput.getDim2())
> OptimizerUtils.getLocalMemBudget()
|| FORCE_DIST_SORT_INDEXES )
{
//large vector, fallback to MR sort
//sort indexes according to given values
SortKeys sort = new SortKeys(
vinput.constructLops(), HopRewriteUtils.getBooleanValueSafe((LiteralOp)desc),
SortKeys.OperationTypes.Indexes,
vinput.getDataType(), vinput.getValueType(), ExecType.MR);
sort.getOutputParameters().setDimensions(vinput.getDim1(), 1,
vinput.getRowsInBlock(), vinput.getColsInBlock(), vinput.getNnz());
setLineNumbers(sort);
//note: this sortindexes includes also the shift by offsets and
//final aggregate because sideways passing of offsets would
//not nicely fit the current instruction model
setLops(sort);
voutput = this;
}
else
{
//small vector, use in-memory sort
ArrayList<Hop> sinputs = new ArrayList<Hop>();
sinputs.add(vinput);
sinputs.add(new LiteralOp(1)); //by (always vector)
sinputs.add(desc);
sinputs.add(new LiteralOp(true)); //indexreturn (always indexes)
voutput = new ReorgOp("tmp3", getDataType(), getValueType(), ReOrgOp.SORT, sinputs);
HopRewriteUtils.copyLineNumbers(this, voutput);
//explicitly construct CP lop; otherwise there is danger of infinite recursion if forced runtime platform.
voutput.setLops( constructCPOrSparkSortLop(vinput, sinputs.get(1), sinputs.get(2), sinputs.get(3), ExecType.CP, false) );
voutput.getLops().getOutputParameters().setDimensions(vinput.getDim1(), vinput.getDim2(), vinput.getRowsInBlock(), vinput.getColsInBlock(), vinput.getNnz());
setLops( voutput.constructLops() );
}
//Step 3: Data permutation (only required for sorting data)
// -- done via X' = table(seq(), IX') %*% X;
if( !HopRewriteUtils.getBooleanValueSafe((LiteralOp)ixret) )
{
//generate seq
DataGenOp seq = HopRewriteUtils.createSeqDataGenOp(voutput);
seq.setName("tmp4");
seq.refreshSizeInformation();
seq.computeMemEstimate(new MemoTable()); //select exec type
HopRewriteUtils.copyLineNumbers(this, seq);
//generate table
TernaryOp table = new TernaryOp("tmp5", DataType.MATRIX, ValueType.DOUBLE, OpOp3.CTABLE, seq, voutput, new LiteralOp(1L) );
table.setOutputBlocksizes(getRowsInBlock(), getColsInBlock());
table.refreshSizeInformation();
table.setForcedExecType(ExecType.MR); //force MR
HopRewriteUtils.copyLineNumbers(this, table);
table.setDisjointInputs(true);
table.setOutputEmptyBlocks(false);
//generate matrix mult
AggBinaryOp mmult = HopRewriteUtils.createMatrixMultiply(table, input);
mmult.setForcedExecType(ExecType.MR); //force MR
setLops( mmult.constructLops() );
//cleanups
HopRewriteUtils.removeChildReference(table, input);
}
}
else //CP or Spark
{
if( et==ExecType.SPARK && !FORCE_DIST_SORT_INDEXES)
bSortSPRewriteApplicable = isSortSPRewriteApplicable();
Lop transform1 = constructCPOrSparkSortLop(input, by, desc, ixret, et, bSortSPRewriteApplicable);
setOutputDimensions(transform1);
setLineNumbers(transform1);
setLops(transform1);
}
break;
}
default:
throw new HopsException("Unsupported lops construction for operation type '"+op+"'.");
}
//add reblock/checkpoint lops if necessary
constructAndSetLopsDataFlowProperties();
return getLops();
}
private static Lop constructCPOrSparkSortLop( Hop input, Hop by, Hop desc, Hop ixret, ExecType et, boolean bSortIndInMem )
throws HopsException, LopsException
{
Transform transform1 = new Transform( input.constructLops(), HopsTransf2Lops.get(ReOrgOp.SORT),
input.getDataType(), input.getValueType(), et, bSortIndInMem);
for( Hop c : new Hop[]{by,desc,ixret} ) {
Lop ltmp = c.constructLops();
transform1.addInput(ltmp);
ltmp.addOutput(transform1);
}
transform1.setLevel(); //force order of added lops
return transform1;
}
@Override
protected double computeOutputMemEstimate( long dim1, long dim2, long nnz )
{
//no dedicated mem estimation per op type, because always propagated via refreshSizeInformation
double sparsity = OptimizerUtils.getSparsity(dim1, dim2, nnz);
return OptimizerUtils.estimateSizeExactSparsity(dim1, dim2, sparsity);
}
@Override
protected double computeIntermediateMemEstimate( long dim1, long dim2, long nnz )
{
if( op == ReOrgOp.SORT )
{
Hop ixreturn = getInput().get(3);
if( !(ixreturn instanceof LiteralOp && !HopRewriteUtils.getBooleanValueSafe((LiteralOp)ixreturn)
&& (dim2==1 || nnz==0) ) ) //NOT early abort case
{
//Version 2: memory requirements for temporary index int[] array,
//(temporary double[] array already covered by output)
return dim1 * 4;
//Version 1: memory requirements for temporary index Integer[] array
//8-16 (12) bytes for object, 4byte int payload, 4-8 (8) byte pointers.
//return dim1 * 24;
}
}
//default: no intermediate memory requirements
return 0;
}
@Override
protected long[] inferOutputCharacteristics( MemoTable memo )
{
long[] ret = null;
Hop input = getInput().get(0);
MatrixCharacteristics mc = memo.getAllInputStats(input);
switch(op)
{
case TRANSPOSE:
{
// input is a [k1,k2] matrix and output is a [k2,k1] matrix
// #nnz in output is exactly the same as in input
if( mc.dimsKnown() )
ret = new long[]{ mc.getCols(), mc.getRows(), mc.getNonZeros() };
break;
}
case REV:
{
// dims and nnz are exactly the same as in input
if( mc.dimsKnown() )
ret = new long[]{ mc.getRows(), mc.getCols(), mc.getNonZeros() };
break;
}
case DIAG:
{
// NOTE: diag is overloaded according to the number of columns of the input
long k = mc.getRows();
// CASE a) DIAG V2M
// input is a [1,k] or [k,1] matrix, and output is [k,k] matrix
// #nnz in output is in the worst case k => sparsity = 1/k
if( k == 1 )
ret = new long[]{k, k, ((mc.getNonZeros()>=0) ? mc.getNonZeros() : k)};
// CASE b) DIAG M2V
// input is [k,k] matrix and output is [k,1] matrix
// #nnz in the output is likely to be k (a dense matrix)
if( k > 1 )
ret = new long[]{k, 1, ((mc.getNonZeros()>=0) ? Math.min(k,mc.getNonZeros()) : k) };
break;
}
case RESHAPE:
{
// input is a [k1,k2] matrix and output is a [k3,k4] matrix with k1*k2=k3*k4
// #nnz in output is exactly the same as in input
if( mc.dimsKnown() ) {
if( _dim1 > 0 )
ret = new long[]{ _dim1, mc.getRows()*mc.getCols()/_dim1, mc.getNonZeros()};
else if( _dim2 > 0 )
ret = new long[]{ mc.getRows()*mc.getCols()/_dim2, _dim2, mc.getNonZeros()};
}
break;
}
case SORT:
{
// input is a [k1,k2] matrix and output is a [k1,k3] matrix, where k3=k2 if no index return;
// otherwise k3=1 (for the index vector)
Hop input4 = getInput().get(3); //indexreturn
boolean unknownIxRet = !(input4 instanceof LiteralOp);
if( !unknownIxRet ) {
boolean ixret = HopRewriteUtils.getBooleanValueSafe((LiteralOp)input4);
long dim2 = ixret ? 1 : mc.getCols();
long nnz = ixret ? mc.getRows() : mc.getNonZeros();
ret = new long[]{ mc.getRows(), dim2, nnz};
}
else {
ret = new long[]{ mc.getRows(), -1, -1};
}
}
}
return ret;
}
@Override
public boolean allowsAllExecTypes()
{
return true;
}
@Override
protected ExecType optFindExecType() throws HopsException {
checkAndSetForcedPlatform();
ExecType REMOTE = OptimizerUtils.isSparkExecutionMode() ? ExecType.SPARK : ExecType.MR;
if( _etypeForced != null )
{
_etype = _etypeForced;
}
else
{
if ( OptimizerUtils.isMemoryBasedOptLevel() ) {
_etype = findExecTypeByMemEstimate();
}
// Choose CP, if the input dimensions are below threshold or if the input is a vector
else if ( getInput().get(0).areDimsBelowThreshold() || getInput().get(0).isVector() )
{
_etype = ExecType.CP;
}
else
{
_etype = REMOTE;
}
//check for valid CP dimensions and matrix size
checkAndSetInvalidCPDimsAndSize();
}
//mark for recompile (forever)
if( ConfigurationManager.isDynamicRecompilation() && !dimsKnown(true) && _etype==REMOTE )
setRequiresRecompile();
return _etype;
}
@Override
public void refreshSizeInformation()
{
Hop input1 = getInput().get(0);
switch(op)
{
case TRANSPOSE:
{
// input is a [k1,k2] matrix and output is a [k2,k1] matrix
// #nnz in output is exactly the same as in input
setDim1(input1.getDim2());
setDim2(input1.getDim1());
setNnz(input1.getNnz());
break;
}
case REV:
{
// dims and nnz are exactly the same as in input
setDim1(input1.getDim1());
setDim2(input1.getDim2());
setNnz(input1.getNnz());
break;
}
case DIAG:
{
// NOTE: diag is overloaded according to the number of columns of the input
long k = input1.getDim1();
setDim1(k);
// CASE a) DIAG_V2M
// input is a [1,k] or [k,1] matrix, and output is [k,k] matrix
// #nnz in output is in the worst case k => sparsity = 1/k
if( input1.getDim2()==1 ) {
setDim2(k);
setNnz( (input1.getNnz()>=0) ? input1.getNnz() : k );
}
// CASE b) DIAG_M2V
// input is [k,k] matrix and output is [k,1] matrix
// #nnz in the output is likely to be k (a dense matrix)
if( input1.getDim2()>1 ){
setDim2(1);
setNnz( (input1.getNnz()>=0) ? Math.min(k,input1.getNnz()) : k );
}
break;
}
case RESHAPE:
{
// input is a [k1,k2] matrix and output is a [k3,k4] matrix with k1*k2=k3*k4
// #nnz in output is exactly the same as in input
Hop input2 = getInput().get(1); //rows
Hop input3 = getInput().get(2); //cols
refreshRowsParameterInformation(input2); //refresh rows
refreshColsParameterInformation(input3); //refresh cols
setNnz(input1.getNnz());
if( !dimsKnown() &&input1.dimsKnown() ) { //reshape allows to infer dims, if input and 1 dim known
if(_dim1 > 0)
_dim2 = (input1._dim1*input1._dim2)/_dim1;
else if(_dim2 > 0)
_dim1 = (input1._dim1*input1._dim2)/_dim2;
}
break;
}
case SORT:
{
// input is a [k1,k2] matrix and output is a [k1,k3] matrix, where k3=k2 if no index return;
// otherwise k3=1 (for the index vector)
Hop input4 = getInput().get(3); //indexreturn
boolean unknownIxRet = !(input4 instanceof LiteralOp);
_dim1 = input1.getDim1();
if( !unknownIxRet ) {
boolean ixret = HopRewriteUtils.getBooleanValueSafe((LiteralOp)input4);
_dim2 = ixret ? 1 : input1.getDim2();
_nnz = ixret ? input1.getDim1() : input1.getNnz();
}
else {
_dim2 = -1;
_nnz = -1;
}
break;
}
}
}
@Override
public Object clone() throws CloneNotSupportedException
{
ReorgOp ret = new ReorgOp();
//copy generic attributes
ret.clone(this, false);
//copy specific attributes
ret.op = op;
ret._maxNumThreads = _maxNumThreads;
return ret;
}
@Override
public boolean compare( Hop that )
{
if( !(that instanceof ReorgOp) )
return false;
ReorgOp that2 = (ReorgOp)that;
boolean ret = (op == that2.op)
&& (_maxNumThreads == that2._maxNumThreads)
&& (getInput().size()==that.getInput().size());
//compare all childs (see reshape, sort)
if( ret ) //sizes matched
for( int i=0; i<_input.size(); i++ )
ret &= getInput().get(i) == that2.getInput().get(i);
return ret;
}
/**
* This will check if there is sufficient memory locally (twice the size of second matrix, for original and sort data), and remotely (size of second matrix (sorted data)).
* @return true if sufficient memory locally
*/
private boolean isSortSPRewriteApplicable()
{
boolean ret = false;
Hop input = getInput().get(0);
//note: both cases (partitioned matrix, and sorted double array), require to
//fit the broadcast twice into the local memory budget. Also, the memory
//constraint only needs to take the rhs into account because the output is
//guaranteed to be an aggregate of <=16KB
double size = input.dimsKnown() ?
OptimizerUtils.estimateSize(input.getDim1(), 1) : //dims known and estimate fits
input.getOutputMemEstimate(); //dims unknown but worst-case estimate fits
if( OptimizerUtils.checkSparkBroadcastMemoryBudget(size) ) {
ret = true;
}
return ret;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.translog;
import org.apache.lucene.util.Counter;
import org.elasticsearch.Assertions;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.index.seqno.SequenceNumbers;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
public class TranslogDeletionPolicy {
private final Map<Object, RuntimeException> openTranslogRef;
public void assertNoOpenTranslogRefs() {
if (openTranslogRef != null && openTranslogRef.isEmpty() == false) {
AssertionError e = new AssertionError("not all translog generations have been released");
openTranslogRef.values().forEach(e::addSuppressed);
throw e;
}
}
/**
* Records how many retention locks are held against each
* translog generation
*/
private final Map<Long, Counter> translogRefCounts = new HashMap<>();
private long localCheckpointOfSafeCommit = SequenceNumbers.NO_OPS_PERFORMED;
private long retentionSizeInBytes;
private long retentionAgeInMillis;
private int retentionTotalFiles;
public TranslogDeletionPolicy(long retentionSizeInBytes, long retentionAgeInMillis, int retentionTotalFiles) {
this.retentionSizeInBytes = retentionSizeInBytes;
this.retentionAgeInMillis = retentionAgeInMillis;
this.retentionTotalFiles = retentionTotalFiles;
if (Assertions.ENABLED) {
openTranslogRef = new ConcurrentHashMap<>();
} else {
openTranslogRef = null;
}
}
public synchronized void setLocalCheckpointOfSafeCommit(long newCheckpoint) {
if (newCheckpoint < this.localCheckpointOfSafeCommit) {
throw new IllegalArgumentException("local checkpoint of the safe commit can't go backwards: " +
"current [" + this.localCheckpointOfSafeCommit + "] new [" + newCheckpoint + "]");
}
this.localCheckpointOfSafeCommit = newCheckpoint;
}
public synchronized void setRetentionSizeInBytes(long bytes) {
retentionSizeInBytes = bytes;
}
public synchronized void setRetentionAgeInMillis(long ageInMillis) {
retentionAgeInMillis = ageInMillis;
}
synchronized void setRetentionTotalFiles(int retentionTotalFiles) {
this.retentionTotalFiles = retentionTotalFiles;
}
/**
* acquires the basis generation for a new snapshot. Any translog generation above, and including, the returned generation
* will not be deleted until the returned {@link Releasable} is closed.
*/
synchronized Releasable acquireTranslogGen(final long translogGen) {
translogRefCounts.computeIfAbsent(translogGen, l -> Counter.newCounter(false)).addAndGet(1);
final AtomicBoolean closed = new AtomicBoolean();
assert assertAddTranslogRef(closed);
return () -> {
if (closed.compareAndSet(false, true)) {
releaseTranslogGen(translogGen);
assert assertRemoveTranslogRef(closed);
}
};
}
private boolean assertAddTranslogRef(Object reference) {
final RuntimeException existing = openTranslogRef.put(reference, new RuntimeException());
if (existing != null) {
throw new AssertionError("double adding of closing reference", existing);
}
return true;
}
private boolean assertRemoveTranslogRef(Object reference) {
return openTranslogRef.remove(reference) != null;
}
/** returns the number of generations that were acquired for snapshots */
synchronized int pendingTranslogRefCount() {
return translogRefCounts.size();
}
/**
* releases a generation that was acquired by {@link #acquireTranslogGen(long)}
*/
private synchronized void releaseTranslogGen(long translogGen) {
Counter current = translogRefCounts.get(translogGen);
if (current == null || current.get() <= 0) {
throw new IllegalArgumentException("translog gen [" + translogGen + "] wasn't acquired");
}
if (current.addAndGet(-1) == 0) {
translogRefCounts.remove(translogGen);
}
}
/**
* returns the minimum translog generation that is still required by the system. Any generation below
* the returned value may be safely deleted
*
* @param readers current translog readers
* @param writer current translog writer
*/
synchronized long minTranslogGenRequired(List<TranslogReader> readers, TranslogWriter writer) throws IOException {
long minByLocks = getMinTranslogGenRequiredByLocks();
long minByAge = getMinTranslogGenByAge(readers, writer, retentionAgeInMillis, currentTime());
long minBySize = getMinTranslogGenBySize(readers, writer, retentionSizeInBytes);
final long minByAgeAndSize;
if (minBySize == Long.MIN_VALUE && minByAge == Long.MIN_VALUE) {
// both size and age are disabled;
minByAgeAndSize = Long.MAX_VALUE;
} else {
minByAgeAndSize = Math.max(minByAge, minBySize);
}
long minByNumFiles = getMinTranslogGenByTotalFiles(readers, writer, retentionTotalFiles);
return Math.min(Math.max(minByAgeAndSize, minByNumFiles), minByLocks);
}
static long getMinTranslogGenBySize(List<TranslogReader> readers, TranslogWriter writer, long retentionSizeInBytes) {
if (retentionSizeInBytes >= 0) {
long totalSize = writer.sizeInBytes();
long minGen = writer.getGeneration();
for (int i = readers.size() - 1; i >= 0 && totalSize < retentionSizeInBytes; i--) {
final TranslogReader reader = readers.get(i);
totalSize += reader.sizeInBytes();
minGen = reader.getGeneration();
}
return minGen;
} else {
return Long.MIN_VALUE;
}
}
static long getMinTranslogGenByAge(List<TranslogReader> readers, TranslogWriter writer, long maxRetentionAgeInMillis, long now)
throws IOException {
if (maxRetentionAgeInMillis >= 0) {
for (TranslogReader reader: readers) {
if (now - reader.getLastModifiedTime() <= maxRetentionAgeInMillis) {
return reader.getGeneration();
}
}
return writer.getGeneration();
} else {
return Long.MIN_VALUE;
}
}
static long getMinTranslogGenByTotalFiles(List<TranslogReader> readers, TranslogWriter writer, final int maxTotalFiles) {
long minGen = writer.generation;
int totalFiles = 1; // for the current writer
for (int i = readers.size() - 1; i >= 0 && totalFiles < maxTotalFiles; i--) {
totalFiles++;
minGen = readers.get(i).generation;
}
return minGen;
}
protected long currentTime() {
return System.currentTimeMillis();
}
private long getMinTranslogGenRequiredByLocks() {
return translogRefCounts.keySet().stream().reduce(Math::min).orElse(Long.MAX_VALUE);
}
/**
* Returns the local checkpoint of the safe commit. This value is used to calculate the min required generation for recovery.
*/
public synchronized long getLocalCheckpointOfSafeCommit() {
return localCheckpointOfSafeCommit;
}
synchronized long getTranslogRefCount(long gen) {
final Counter counter = translogRefCounts.get(gen);
return counter == null ? 0 : counter.get();
}
}
| |
package com.mypurecloud.sdk.v2.guest;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.apache.http.Header;
import org.apache.http.HttpEntityEnclosingRequest;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.HttpResponse;
import org.apache.http.HttpResponseInterceptor;
import org.apache.http.RequestLine;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.message.BasicRequestLine;
import org.apache.http.protocol.HttpContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>A filter that logs both requests and responses to SLF4J.
*
* <h1>Available detail levels</h2>
* <ul>
* <li>NONE - don't log anything
* <li>MINIMAL - only log the verb, url, and response code
* <li>HEADERS - as above, but also log all the headers for both the request and response
* <li>FULL - as above, but also log the full body for both the request and response
*/
public class SLF4JInterceptor implements HttpRequestInterceptor, HttpResponseInterceptor {
private static final Logger LOGGER = LoggerFactory.getLogger(SLF4JInterceptor.class);
// Attribute for tracking requests and responses
private static final String SLF4J_REQUEST_DATA = "slf4j-request-data";
private volatile DetailLevel detailLevel;
public SLF4JInterceptor() {
this(DetailLevel.MINIMAL);
}
public SLF4JInterceptor(DetailLevel detailLevel) {
this.detailLevel = detailLevel;
}
/**
* The level of detail to log
*
* <ul>
* <li>NONE - don't log anything
* <li>MINIMAL - only log the verb, url, and response code
* <li>HEADERS - as above, but also log all the headers for both the request and response
* <li>FULL - as above, but also log the full body for both the request and response
*/
public static enum DetailLevel {
NONE, MINIMAL, HEADERS, FULL
}
/**
* @return the current detail level of the filter
*/
public DetailLevel getDetailLevel() {
return detailLevel;
}
/**
* Sets the detail level
* @param detailLevel - the new detail level to use
*/
public void setDetailLevel(DetailLevel detailLevel) {
this.detailLevel = detailLevel;
}
private static class RequestData {
public final RequestLine requestLine;
public final long startTime;
private RequestData(RequestLine requestLine, long startTime) {
this.requestLine = requestLine;
this.startTime = startTime;
}
}
@Override
public void process(HttpRequest request, HttpContext context) throws HttpException, IOException {
if (LOGGER.isDebugEnabled()) {
RequestData requestData = new RequestData(request.getRequestLine(), System.currentTimeMillis());
context.setAttribute(SLF4J_REQUEST_DATA, requestData);
logRequest(request);
}
}
@Override
public void process(HttpResponse response, HttpContext context) throws HttpException, IOException {
if (LOGGER.isDebugEnabled()) {
RequestLine requestLine;
long tookMs;
Object reqDataAttr = context.getAttribute(SLF4J_REQUEST_DATA);
if (reqDataAttr == null || !(reqDataAttr instanceof RequestData)) {
LOGGER.error("Could not determine the request associated with this response");
requestLine = new BasicRequestLine("<UNKNOWN METHOD>", "<UNKNOWN URL>", null);
tookMs = -1;
} else {
RequestData requestData = (RequestData) reqDataAttr;
requestLine = requestData.requestLine;
tookMs = System.currentTimeMillis() - requestData.startTime;
}
logResponse(response, requestLine, tookMs);
}
}
/**
* Builds the log message for requests
*
* <pre>
* >>>> GET http://api.example.com/endpoint >>>>
* ---- HEADERS ----
* Header-1: Value1
* Header-2: Value2
* ---- BODY (24-bytes) ----
* Body body body body body
* >>>> END >>>>
* </pre>
*
* @param request - the request to build a message for
*/
private void logRequest(HttpRequest request) throws IOException {
if (detailLevel.compareTo(DetailLevel.MINIMAL) >= 0) {
final StringBuilder messageBuilder = new StringBuilder();
// Log the verb and url
String uriString = String.format(">>>> %s %s >>>>", request.getRequestLine().getMethod(), request.getRequestLine().getUri());
messageBuilder.append(uriString).append(System.lineSeparator());
// Add the headers
if (detailLevel.compareTo(DetailLevel.HEADERS) >= 0) {
messageBuilder.append("---- HEADERS ----").append(System.lineSeparator());
messageBuilder.append(formatHeaders(request.getAllHeaders()));
// Add the request body if it exists
if (detailLevel.compareTo(DetailLevel.FULL) >= 0) {
// This is ugly, but it's the only way to access the body
if (request instanceof HttpEntityEnclosingRequest &&
((HttpEntityEnclosingRequest) request).getEntity() != null) {
byte[] data = extractRequestBody((HttpEntityEnclosingRequest) request);
messageBuilder.append(String.format("---- BODY (%d bytes) ----", data.length)).append(System.lineSeparator());
messageBuilder.append(new String(data)).append(System.lineSeparator());
} else {
messageBuilder.append("---- NO BODY ----").append(System.lineSeparator());
}
}
messageBuilder.append(">>>> END >>>>").append(System.lineSeparator());
}
LOGGER.debug(messageBuilder.toString());
}
}
/**
* Builds the log message for responses
*
* <pre>
* <<<< GET http://api.example.com/endpoint <<<<
* 404 Not Found (219 ms)
* ---- HEADERS ----
* Header-3: Value3
* Header-4: Value4
* ---- NO BODY ----
* <<<< END <<<<
* </pre>
*
* @param response - the response to build a message for
* @param request - the request line of the initial request for the response
* @param tookMs - how long the request took, in milliseconds
*/
private void logResponse(HttpResponse response, RequestLine requestLine, long tookMs) throws IOException {
if (detailLevel.compareTo(DetailLevel.MINIMAL) >= 0) {
StringBuilder messageBuilder = new StringBuilder();
// Log the verb and url, along with the status code
String uriString = String.format("<<<< %s %s <<<<", requestLine.getMethod(), requestLine.getUri());
messageBuilder.append(uriString).append(System.lineSeparator());
messageBuilder.append(String.format(" %d %s (%d ms)",
response.getStatusLine().getStatusCode(),
response.getStatusLine().getReasonPhrase(),
tookMs))
.append(System.lineSeparator());
// Append the headers
if (detailLevel.compareTo(DetailLevel.HEADERS) >= 0) {
messageBuilder.append("---- HEADERS ----").append(System.lineSeparator());
messageBuilder.append(formatHeaders(response.getAllHeaders()));
// Add the response body if it exists
if (detailLevel.compareTo(DetailLevel.FULL) >= 0) {
// Write the log message
if (response.getEntity() != null) {
byte[] responseBody = extractResponseBody(response);
messageBuilder.append(String.format("---- BODY (%d bytes) ----", responseBody.length)).append(System.lineSeparator());
messageBuilder.append(new String(responseBody)).append(System.lineSeparator());
} else {
messageBuilder.append("---- NO BODY ----").append(System.lineSeparator());
}
}
messageBuilder.append("<<<< END <<<<").append(System.lineSeparator());
}
LOGGER.debug(messageBuilder.toString());
}
}
/**
* Extracts the body of a request, resetting the stream if necessary so
* that the request behaves as if it were unchanged
*
* @return the body of the response
*/
private static final byte[] extractRequestBody(HttpEntityEnclosingRequest request) throws IOException {
byte[] data = toByteArray(request.getEntity().getContent());
// Reset the response input stream if necessary
if (!request.getEntity().isRepeatable()) {
request.setEntity(new ByteArrayEntity(data, ContentType.get(request.getEntity())));
}
return data;
}
/**
* Extracts the body of a response, resetting the stream if necessary so
* that the response behaves as if it were unchanged
*
* @return the body of the response
*/
private static final byte[] extractResponseBody(HttpResponse response) throws IOException {
byte[] data = toByteArray(response.getEntity().getContent());
// Reset the response input stream if necessary
if (!response.getEntity().isRepeatable()) {
response.setEntity(new ByteArrayEntity(data, ContentType.get(response.getEntity())));
}
return data;
}
/**
* Reads an input stream into a byte array, then closes the stream
*
* @return an array containing all of the data from the stream
*/
private static final byte[] toByteArray(InputStream response) throws IOException {
final int BUFFER_SIZE = 2048; // How many bytes to copy at once
// Clone the stream by reading it into a byte array
byte[] buffer = new byte[BUFFER_SIZE];
ByteArrayOutputStream byteArrayStream = new ByteArrayOutputStream();
try (InputStream stream = response) {
int read;
while ((read = stream.read(buffer)) != -1) {
byteArrayStream.write(buffer, 0, read);
}
byteArrayStream.flush();
}
return byteArrayStream.toByteArray();
}
private static class HeaderComparator implements Comparator<Header> {
@Override
public int compare(Header a, Header b) {
return a.getName().compareTo(b.getName());
}
}
/**
* Formats an array of headers into a human-readable format
* @param headers - the headers
* @return a string containing all of the headers
*/
private static String formatHeaders(Header[] headers) {
List<Header> sortedHeaders = Arrays.asList(headers);
Collections.sort(sortedHeaders, new HeaderComparator());
StringBuilder sb = new StringBuilder();
for (Header header : sortedHeaders) {
String headerString = String.format("%s: %s", header.getName(), header.getValue());
sb.append(headerString).append(System.lineSeparator());
}
return sb.toString();
}
}
| |
package cgeo.geocaching.maps.google.v1;
import static android.view.ViewGroup.LayoutParams.WRAP_CONTENT;
import cgeo.geocaching.location.Geopoint;
import cgeo.geocaching.location.Viewport;
import cgeo.geocaching.maps.CachesOverlay;
import cgeo.geocaching.maps.PositionAndScaleOverlay;
import cgeo.geocaching.maps.interfaces.GeneralOverlay;
import cgeo.geocaching.maps.interfaces.GeoPointImpl;
import cgeo.geocaching.maps.interfaces.MapControllerImpl;
import cgeo.geocaching.maps.interfaces.MapProjectionImpl;
import cgeo.geocaching.maps.interfaces.MapViewImpl;
import cgeo.geocaching.maps.interfaces.OnMapDragListener;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.utils.Log;
import com.google.android.maps.GeoPoint;
import com.google.android.maps.MapView;
import org.apache.commons.lang3.reflect.MethodUtils;
import org.eclipse.jdt.annotation.NonNull;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.GestureDetector;
import android.view.GestureDetector.SimpleOnGestureListener;
import android.view.Gravity;
import android.view.MotionEvent;
import android.widget.FrameLayout;
import android.widget.ZoomButtonsController;
public class GoogleMapView extends MapView implements MapViewImpl {
private GestureDetector gestureDetector;
private OnMapDragListener onDragListener;
private final GoogleMapController mapController = new GoogleMapController(getController());
public GoogleMapView(final Context context, final AttributeSet attrs) {
super(context, attrs);
initialize(context);
}
public GoogleMapView(final Context context, final AttributeSet attrs, final int defStyle) {
super(context, attrs, defStyle);
initialize(context);
}
public GoogleMapView(final Context context, final String apiKey) {
super(context, apiKey);
initialize(context);
}
private void initialize(final Context context) {
if (isInEditMode()) {
return;
}
gestureDetector = new GestureDetector(context, new GestureListener());
}
@Override
public void draw(final Canvas canvas) {
try {
if (getMapZoomLevel() > 22) { // to avoid too close zoom level (mostly on Samsung Galaxy S series)
getController().setZoom(22);
}
super.draw(canvas);
} catch (final Exception e) {
Log.e("GoogleMapView.draw", e);
}
}
@Override
public void displayZoomControls(final boolean takeFocus) {
try {
// Push zoom controls to the right
final FrameLayout.LayoutParams zoomParams = new FrameLayout.LayoutParams(WRAP_CONTENT, WRAP_CONTENT);
zoomParams.gravity = Gravity.RIGHT;
// The call to retrieve the zoom buttons controller is undocumented and works so far on all devices
// supported by Google Play, but fails at least on one Jolla.
final ZoomButtonsController controller = (ZoomButtonsController) MethodUtils.invokeMethod(this, "getZoomButtonsController");
controller.getZoomControls().setLayoutParams(zoomParams);
super.displayZoomControls(takeFocus);
} catch (final NoSuchMethodException ignored) {
Log.w("GoogleMapView.displayZoomControls: unable to explicitly place the zoom buttons");
} catch (final NoSuchMethodError ignored) {
Log.w("GoogleMapView.displayZoomControls: unable to explicitly place the zoom buttons");
} catch (final Exception e) {
Log.e("GoogleMapView.displayZoomControls", e);
}
}
@Override
public MapControllerImpl getMapController() {
return mapController;
}
@Override
@NonNull
public GeoPointImpl getMapViewCenter() {
final GeoPoint point = getMapCenter();
return new GoogleGeoPoint(point.getLatitudeE6(), point.getLongitudeE6());
}
@Override
public Viewport getViewport() {
return new Viewport(getMapViewCenter(), getLatitudeSpan() / 1e6, getLongitudeSpan() / 1e6);
}
@Override
public void clearOverlays() {
getOverlays().clear();
}
@Override
public MapProjectionImpl getMapProjection() {
return new GoogleMapProjection(getProjection());
}
@Override
public CachesOverlay createAddMapOverlay(final Context context, final Drawable drawable) {
final GoogleCacheOverlay ovl = new GoogleCacheOverlay(context, drawable);
getOverlays().add(ovl);
return ovl.getBase();
}
@Override
public PositionAndScaleOverlay createAddPositionAndScaleOverlay(final Geopoint coords, final String geocode) {
final GoogleOverlay ovl = new GoogleOverlay(this, coords, geocode);
getOverlays().add(ovl);
return (PositionAndScaleOverlay) ovl.getBase();
}
@Override
public int getMapZoomLevel() {
return getZoomLevel();
}
@Override
public void setMapSource() {
setSatellite(GoogleMapProvider.isSatelliteSource(Settings.getMapSource()));
}
@Override
public void repaintRequired(final GeneralOverlay overlay) {
invalidate();
}
@Override
public void setOnDragListener(final OnMapDragListener onDragListener) {
this.onDragListener = onDragListener;
}
@Override
public boolean onTouchEvent(final MotionEvent ev) {
try {
gestureDetector.onTouchEvent(ev);
return super.onTouchEvent(ev);
} catch (final Exception e) {
Log.e("GoogleMapView.onTouchEvent", e);
}
return false;
}
private class GestureListener extends SimpleOnGestureListener {
@Override
public boolean onDoubleTap(final MotionEvent e) {
getController().zoomInFixing((int) e.getX(), (int) e.getY());
if (onDragListener != null) {
onDragListener.onDrag();
}
return true;
}
@Override
public boolean onScroll(final MotionEvent e1, final MotionEvent e2,
final float distanceX, final float distanceY) {
if (onDragListener != null) {
onDragListener.onDrag();
}
return super.onScroll(e1, e2, distanceX, distanceY);
}
}
@Override
public boolean needsInvertedColors() {
return false;
}
@Override
public boolean hasMapThemes() {
// Not supported
return false;
}
@Override
public void setMapTheme() {
// Not supported
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregationBuilder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.maxBucket;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.core.IsNull.notNullValue;
@ESIntegTestCase.SuiteScopeTestCase
public class MaxBucketIT extends ESIntegTestCase {
private static final String SINGLE_VALUED_FIELD_NAME = "l_value";
static int numDocs;
static int interval;
static int minRandomValue;
static int maxRandomValue;
static int numValueBuckets;
static long[] valueCounts;
@Override
public void setupSuiteScopeCluster() throws Exception {
assertAcked(client().admin().indices().prepareCreate("idx")
.addMapping("type", "tag", "type=keyword").get());
createIndex("idx_unmapped");
numDocs = randomIntBetween(6, 20);
interval = randomIntBetween(2, 5);
minRandomValue = 0;
maxRandomValue = 20;
numValueBuckets = ((maxRandomValue - minRandomValue) / interval) + 1;
valueCounts = new long[numValueBuckets];
List<IndexRequestBuilder> builders = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
int fieldValue = randomIntBetween(minRandomValue, maxRandomValue);
builders.add(client().prepareIndex("idx", "type").setSource(
jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval))
.endObject()));
final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1);
valueCounts[bucket]++;
}
assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer"));
for (int i = 0; i < 2; i++) {
builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(
jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()));
}
indexRandom(true, builders);
ensureSearchable();
}
public void testDocCountTopLevel() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
.addAggregation(maxBucket("max_bucket", "histo>_count")).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValueBuckets));
List<String> maxKeys = new ArrayList<>();
double maxValue = Double.NEGATIVE_INFINITY;
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
if (bucket.getDocCount() > maxValue) {
maxValue = bucket.getDocCount();
maxKeys = new ArrayList<>();
maxKeys.add(bucket.getKeyAsString());
} else if (bucket.getDocCount() == maxValue) {
maxKeys.add(bucket.getKeyAsString());
}
}
InternalBucketMetricValue maxBucketValue = response.getAggregations().get("max_bucket");
assertThat(maxBucketValue, notNullValue());
assertThat(maxBucketValue.getName(), equalTo("max_bucket"));
assertThat(maxBucketValue.value(), equalTo(maxValue));
assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()])));
}
public void testDocCountAsSubAgg() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
terms("terms")
.field("tag")
.order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
.subAggregation(maxBucket("max_bucket", "histo>_count"))).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<? extends Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
List<String> maxKeys = new ArrayList<>();
double maxValue = Double.NEGATIVE_INFINITY;
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
if (bucket.getDocCount() > maxValue) {
maxValue = bucket.getDocCount();
maxKeys = new ArrayList<>();
maxKeys.add(bucket.getKeyAsString());
} else if (bucket.getDocCount() == maxValue) {
maxKeys.add(bucket.getKeyAsString());
}
}
InternalBucketMetricValue maxBucketValue = termsBucket.getAggregations().get("max_bucket");
assertThat(maxBucketValue, notNullValue());
assertThat(maxBucketValue.getName(), equalTo("max_bucket"));
assertThat(maxBucketValue.value(), equalTo(maxValue));
assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()])));
}
}
public void testMetricTopLevel() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.addAggregation(maxBucket("max_bucket", "terms>sum")).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(interval));
List<String> maxKeys = new ArrayList<>();
double maxValue = Double.NEGATIVE_INFINITY;
for (int i = 0; i < interval; ++i) {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
assertThat(bucket.getDocCount(), greaterThan(0L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
if (sum.value() > maxValue) {
maxValue = sum.value();
maxKeys = new ArrayList<>();
maxKeys.add(bucket.getKeyAsString());
} else if (sum.value() == maxValue) {
maxKeys.add(bucket.getKeyAsString());
}
}
InternalBucketMetricValue maxBucketValue = response.getAggregations().get("max_bucket");
assertThat(maxBucketValue, notNullValue());
assertThat(maxBucketValue.getName(), equalTo("max_bucket"));
assertThat(maxBucketValue.value(), equalTo(maxValue));
assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()])));
}
public void testMetricAsSubAgg() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
terms("terms")
.field("tag")
.order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.subAggregation(maxBucket("max_bucket", "histo>sum"))).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<? extends Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
List<String> maxKeys = new ArrayList<>();
double maxValue = Double.NEGATIVE_INFINITY;
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
if (bucket.getDocCount() != 0) {
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
if (sum.value() > maxValue) {
maxValue = sum.value();
maxKeys = new ArrayList<>();
maxKeys.add(bucket.getKeyAsString());
} else if (sum.value() == maxValue) {
maxKeys.add(bucket.getKeyAsString());
}
}
}
InternalBucketMetricValue maxBucketValue = termsBucket.getAggregations().get("max_bucket");
assertThat(maxBucketValue, notNullValue());
assertThat(maxBucketValue.getName(), equalTo("max_bucket"));
assertThat(maxBucketValue.value(), equalTo(maxValue));
assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()])));
}
}
public void testMetricAsSubAggOfSingleBucketAgg() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
filter("filter", termQuery("tag", "tag0"))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.subAggregation(maxBucket("max_bucket", "histo>sum"))).execute().actionGet();
assertSearchResponse(response);
Filter filter = response.getAggregations().get("filter");
assertThat(filter, notNullValue());
assertThat(filter.getName(), equalTo("filter"));
Histogram histo = filter.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
List<String> maxKeys = new ArrayList<>();
double maxValue = Double.NEGATIVE_INFINITY;
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
if (bucket.getDocCount() != 0) {
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
if (sum.value() > maxValue) {
maxValue = sum.value();
maxKeys = new ArrayList<>();
maxKeys.add(bucket.getKeyAsString());
} else if (sum.value() == maxValue) {
maxKeys.add(bucket.getKeyAsString());
}
}
}
InternalBucketMetricValue maxBucketValue = filter.getAggregations().get("max_bucket");
assertThat(maxBucketValue, notNullValue());
assertThat(maxBucketValue.getName(), equalTo("max_bucket"));
assertThat(maxBucketValue.value(), equalTo(maxValue));
assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()])));
}
public void testMetricAsSubAggWithInsertZeros() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
terms("terms")
.field("tag")
.order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.subAggregation(maxBucket("max_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS)))
.execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<? extends Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
List<String> maxKeys = new ArrayList<>();
double maxValue = Double.NEGATIVE_INFINITY;
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
if (sum.value() > maxValue) {
maxValue = sum.value();
maxKeys = new ArrayList<>();
maxKeys.add(bucket.getKeyAsString());
} else if (sum.value() == maxValue) {
maxKeys.add(bucket.getKeyAsString());
}
}
InternalBucketMetricValue maxBucketValue = termsBucket.getAggregations().get("max_bucket");
assertThat(maxBucketValue, notNullValue());
assertThat(maxBucketValue.getName(), equalTo("max_bucket"));
assertThat(maxBucketValue.value(), equalTo(maxValue));
assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()])));
}
}
public void testNoBuckets() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*"))
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.addAggregation(maxBucket("max_bucket", "terms>sum")).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(0));
InternalBucketMetricValue maxBucketValue = response.getAggregations().get("max_bucket");
assertThat(maxBucketValue, notNullValue());
assertThat(maxBucketValue.getName(), equalTo("max_bucket"));
assertThat(maxBucketValue.value(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(maxBucketValue.keys(), equalTo(new String[0]));
}
public void testNested() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
terms("terms")
.field("tag")
.order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
.subAggregation(maxBucket("max_histo_bucket", "histo>_count")))
.addAggregation(maxBucket("max_terms_bucket", "terms>max_histo_bucket")).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<? extends Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
List<String> maxTermsKeys = new ArrayList<>();
double maxTermsValue = Double.NEGATIVE_INFINITY;
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
List<String> maxHistoKeys = new ArrayList<>();
double maxHistoValue = Double.NEGATIVE_INFINITY;
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
if (bucket.getDocCount() > maxHistoValue) {
maxHistoValue = bucket.getDocCount();
maxHistoKeys = new ArrayList<>();
maxHistoKeys.add(bucket.getKeyAsString());
} else if (bucket.getDocCount() == maxHistoValue) {
maxHistoKeys.add(bucket.getKeyAsString());
}
}
InternalBucketMetricValue maxBucketValue = termsBucket.getAggregations().get("max_histo_bucket");
assertThat(maxBucketValue, notNullValue());
assertThat(maxBucketValue.getName(), equalTo("max_histo_bucket"));
assertThat(maxBucketValue.value(), equalTo(maxHistoValue));
assertThat(maxBucketValue.keys(), equalTo(maxHistoKeys.toArray(new String[maxHistoKeys.size()])));
if (maxHistoValue > maxTermsValue) {
maxTermsValue = maxHistoValue;
maxTermsKeys = new ArrayList<>();
maxTermsKeys.add(termsBucket.getKeyAsString());
} else if (maxHistoValue == maxTermsValue) {
maxTermsKeys.add(termsBucket.getKeyAsString());
}
}
InternalBucketMetricValue maxBucketValue = response.getAggregations().get("max_terms_bucket");
assertThat(maxBucketValue, notNullValue());
assertThat(maxBucketValue.getName(), equalTo("max_terms_bucket"));
assertThat(maxBucketValue.value(), equalTo(maxTermsValue));
assertThat(maxBucketValue.keys(), equalTo(maxTermsKeys.toArray(new String[maxTermsKeys.size()])));
}
/**
* https://github.com/elastic/elasticsearch/issues/33514
*
* This bug manifests as the max_bucket agg ("peak") being added to the response twice, because
* the pipeline agg is run twice. This makes invalid JSON and breaks conversion to maps.
* The bug was caused by an UnmappedTerms being the chosen as the first reduction target. UnmappedTerms
* delegated reduction to the first non-unmapped agg, which would reduce and run pipeline aggs. But then
* execution returns to the UnmappedTerms and _it_ runs pipelines as well, doubling up on the values.
*
* Applies to any pipeline agg, not just max.
*/
public void testFieldIsntWrittenOutTwice() throws Exception {
// you need to add an additional index with no fields in order to trigger this (or potentially a shard)
// so that there is an UnmappedTerms in the list to reduce.
createIndex("foo_1");
XContentBuilder builder = jsonBuilder().startObject().startObject("properties")
.startObject("@timestamp").field("type", "date").endObject()
.startObject("license").startObject("properties")
.startObject("count").field("type", "long").endObject()
.startObject("partnumber").field("type", "text").startObject("fields").startObject("keyword")
.field("type", "keyword").field("ignore_above", 256)
.endObject().endObject().endObject()
.endObject().endObject().endObject().endObject();
assertAcked(client().admin().indices().prepareCreate("foo_2")
.addMapping("doc", builder).get());
XContentBuilder docBuilder = jsonBuilder().startObject()
.startObject("license").field("partnumber", "foobar").field("count", 2).endObject()
.field("@timestamp", "2018-07-08T08:07:00.599Z")
.endObject();
client().prepareIndex("foo_2", "doc").setSource(docBuilder).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
client().admin().indices().prepareRefresh();
TermsAggregationBuilder groupByLicenseAgg = AggregationBuilders.terms("group_by_license_partnumber")
.field("license.partnumber.keyword");
MaxBucketPipelineAggregationBuilder peakPipelineAggBuilder =
PipelineAggregatorBuilders.maxBucket("peak", "licenses_per_day>total_licenses");
SumAggregationBuilder sumAggBuilder = AggregationBuilders.sum("total_licenses").field("license.count");
DateHistogramAggregationBuilder licensePerDayBuilder =
AggregationBuilders.dateHistogram("licenses_per_day").field("@timestamp").dateHistogramInterval(DateHistogramInterval.DAY);
licensePerDayBuilder.subAggregation(sumAggBuilder);
groupByLicenseAgg.subAggregation(licensePerDayBuilder);
groupByLicenseAgg.subAggregation(peakPipelineAggBuilder);
SearchResponse response = client().prepareSearch("foo_*").setSize(0).addAggregation(groupByLicenseAgg).get();
BytesReference bytes = XContentHelper.toXContent(response, XContentType.JSON, false);
XContentHelper.convertToMap(bytes, false, XContentType.JSON);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.portfolio.loanaccount.api;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriInfo;
import org.apache.commons.lang.StringUtils;
import org.apache.fineract.accounting.journalentry.api.DateParam;
import org.apache.fineract.commands.domain.CommandWrapper;
import org.apache.fineract.commands.service.CommandWrapperBuilder;
import org.apache.fineract.commands.service.PortfolioCommandSourceWritePlatformService;
import org.apache.fineract.infrastructure.core.api.ApiRequestParameterHelper;
import org.apache.fineract.infrastructure.core.data.CommandProcessingResult;
import org.apache.fineract.infrastructure.core.exception.UnrecognizedQueryParamException;
import org.apache.fineract.infrastructure.core.serialization.ApiRequestJsonSerializationSettings;
import org.apache.fineract.infrastructure.core.serialization.DefaultToApiJsonSerializer;
import org.apache.fineract.infrastructure.core.service.DateUtils;
import org.apache.fineract.infrastructure.security.service.PlatformSecurityContext;
import org.apache.fineract.portfolio.loanaccount.data.LoanTransactionData;
import org.apache.fineract.portfolio.loanaccount.service.LoanReadPlatformService;
import org.apache.fineract.portfolio.paymenttype.data.PaymentTypeData;
import org.apache.fineract.portfolio.paymenttype.service.PaymentTypeReadPlatformService;
import org.joda.time.LocalDate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
@Path("/loans/{loanId}/transactions")
@Component
@Scope("singleton")
public class LoanTransactionsApiResource {
private final Set<String> RESPONSE_DATA_PARAMETERS = new HashSet<>(Arrays.asList("id", "type", "date", "currency", "amount",
"externalId"));
private final String resourceNameForPermissions = "LOAN";
private final PlatformSecurityContext context;
private final LoanReadPlatformService loanReadPlatformService;
private final ApiRequestParameterHelper apiRequestParameterHelper;
private final DefaultToApiJsonSerializer<LoanTransactionData> toApiJsonSerializer;
private final PortfolioCommandSourceWritePlatformService commandsSourceWritePlatformService;
private final PaymentTypeReadPlatformService paymentTypeReadPlatformService;
@Autowired
public LoanTransactionsApiResource(final PlatformSecurityContext context, final LoanReadPlatformService loanReadPlatformService,
final ApiRequestParameterHelper apiRequestParameterHelper,
final DefaultToApiJsonSerializer<LoanTransactionData> toApiJsonSerializer,
final PortfolioCommandSourceWritePlatformService commandsSourceWritePlatformService,
PaymentTypeReadPlatformService paymentTypeReadPlatformService) {
this.context = context;
this.loanReadPlatformService = loanReadPlatformService;
this.apiRequestParameterHelper = apiRequestParameterHelper;
this.toApiJsonSerializer = toApiJsonSerializer;
this.commandsSourceWritePlatformService = commandsSourceWritePlatformService;
this.paymentTypeReadPlatformService = paymentTypeReadPlatformService;
}
private boolean is(final String commandParam, final String commandValue) {
return StringUtils.isNotBlank(commandParam) && commandParam.trim().equalsIgnoreCase(commandValue);
}
@GET
@Path("template")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
public String retrieveTransactionTemplate(@PathParam("loanId") final Long loanId, @QueryParam("command") final String commandParam,
@Context final UriInfo uriInfo, @QueryParam("dateFormat") final String dateFormat,
@QueryParam("transactionDate") final DateParam transactionDateParam, @QueryParam("locale") final String locale) {
this.context.authenticatedUser().validateHasReadPermission(this.resourceNameForPermissions);
LoanTransactionData transactionData = null;
if (is(commandParam, "repayment")) {
transactionData = this.loanReadPlatformService.retrieveLoanTransactionTemplate(loanId);
} else if (is(commandParam, "waiveinterest")) {
transactionData = this.loanReadPlatformService.retrieveWaiveInterestDetails(loanId);
} else if (is(commandParam, "writeoff")) {
transactionData = this.loanReadPlatformService.retrieveLoanWriteoffTemplate(loanId);
} else if (is(commandParam, "close-rescheduled")) {
transactionData = this.loanReadPlatformService.retrieveNewClosureDetails();
} else if (is(commandParam, "close")) {
transactionData = this.loanReadPlatformService.retrieveNewClosureDetails();
} else if (is(commandParam, "disburse")) {
transactionData = this.loanReadPlatformService.retrieveDisbursalTemplate(loanId, true);
} else if (is(commandParam, "disburseToSavings")) {
transactionData = this.loanReadPlatformService.retrieveDisbursalTemplate(loanId, false);
} else if (is(commandParam, "recoverypayment")) {
transactionData = this.loanReadPlatformService.retrieveRecoveryPaymentTemplate(loanId);
} else if (is(commandParam, "prepayLoan")) {
LocalDate transactionDate = null;
if (transactionDateParam == null) {
transactionDate = DateUtils.getLocalDateOfTenant();
} else {
transactionDate = LocalDate.fromDateFields(transactionDateParam.getDate("transactionDate", dateFormat, locale));
}
transactionData = this.loanReadPlatformService.retrieveLoanPrePaymentTemplate(loanId, transactionDate);
} else if (is(commandParam, "refundbycash")) {
transactionData = this.loanReadPlatformService.retrieveRefundByCashTemplate(loanId);
} else if (is(commandParam, "refundbytransfer")) {
transactionData = this.loanReadPlatformService.retrieveDisbursalTemplate(loanId, true);
} else if (is(commandParam, "foreclosure")) {
LocalDate transactionDate = null;
if (transactionDateParam == null) {
transactionDate = DateUtils.getLocalDateOfTenant();
} else {
transactionDate = LocalDate.fromDateFields(transactionDateParam.getDate("transactionDate", dateFormat, locale));
}
transactionData = this.loanReadPlatformService.retrieveLoanForeclosureTemplate(loanId, transactionDate);
} else {
throw new UnrecognizedQueryParamException("command", commandParam);
}
final ApiRequestJsonSerializationSettings settings = this.apiRequestParameterHelper.process(uriInfo.getQueryParameters());
return this.toApiJsonSerializer.serialize(settings, transactionData, this.RESPONSE_DATA_PARAMETERS);
}
@GET
@Path("{transactionId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
public String retrieveTransaction(@PathParam("loanId") final Long loanId, @PathParam("transactionId") final Long transactionId,
@Context final UriInfo uriInfo) {
this.context.authenticatedUser().validateHasReadPermission(this.resourceNameForPermissions);
LoanTransactionData transactionData = this.loanReadPlatformService.retrieveLoanTransaction(loanId, transactionId);
final ApiRequestJsonSerializationSettings settings = this.apiRequestParameterHelper.process(uriInfo.getQueryParameters());
if (settings.isTemplate()) {
final Collection<PaymentTypeData> paymentTypeOptions = this.paymentTypeReadPlatformService.retrieveAllPaymentTypes();
transactionData = LoanTransactionData.templateOnTop(transactionData, paymentTypeOptions);
}
return this.toApiJsonSerializer.serialize(settings, transactionData, this.RESPONSE_DATA_PARAMETERS);
}
@POST
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
public String executeLoanTransaction(@PathParam("loanId") final Long loanId, @QueryParam("command") final String commandParam,
final String apiRequestBodyAsJson) {
final CommandWrapperBuilder builder = new CommandWrapperBuilder().withJson(apiRequestBodyAsJson);
CommandProcessingResult result = null;
if (is(commandParam, "repayment")) {
final CommandWrapper commandRequest = builder.loanRepaymentTransaction(loanId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (is(commandParam, "waiveinterest")) {
final CommandWrapper commandRequest = builder.waiveInterestPortionTransaction(loanId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (is(commandParam, "writeoff")) {
final CommandWrapper commandRequest = builder.writeOffLoanTransaction(loanId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (is(commandParam, "close-rescheduled")) {
final CommandWrapper commandRequest = builder.closeLoanAsRescheduledTransaction(loanId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (is(commandParam, "close")) {
final CommandWrapper commandRequest = builder.closeLoanTransaction(loanId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (is(commandParam, "undowriteoff")) {
final CommandWrapper commandRequest = builder.undoWriteOffLoanTransaction(loanId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (is(commandParam, "recoverypayment")) {
final CommandWrapper commandRequest = builder.loanRecoveryPaymentTransaction(loanId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (is(commandParam, "refundByCash")) {
final CommandWrapper commandRequest = builder.refundLoanTransactionByCash(loanId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
} else if (is(commandParam, "foreclosure")) {
final CommandWrapper commandRequest = builder.loanForeclosure(loanId).build();
result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
}
if (result == null) { throw new UnrecognizedQueryParamException("command", commandParam); }
return this.toApiJsonSerializer.serialize(result);
}
@POST
@Path("{transactionId}")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
public String adjustLoanTransaction(@PathParam("loanId") final Long loanId, @PathParam("transactionId") final Long transactionId,
final String apiRequestBodyAsJson) {
final CommandWrapperBuilder builder = new CommandWrapperBuilder().withJson(apiRequestBodyAsJson);
final CommandWrapper commandRequest = builder.adjustTransaction(loanId, transactionId).build();
final CommandProcessingResult result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest);
return this.toApiJsonSerializer.serialize(result);
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2007-2015 Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.broad.igv.sam;
import org.broad.igv.AbstractHeadlessTest;
import org.junit.Ignore;
import java.util.*;
import static org.junit.Assert.assertTrue;
/**
* @author jrobinso
*/
@Ignore("The active tests in this class have been moved to AlignmentDataManagerTest")
public class AlignmentIntervalLoaderTest extends AbstractHeadlessTest {
public static void main(String[] args) {
//Represents total number of alignments
long totalLength = (long) 1e6;
//Memory used per alignment
int longseach = 100;
int maxKeep = 1000;
float fmaxKeep = (float) maxKeep;
int maxBucketDepth = (int) 1e5;
long seed = 5310431327l;
long t1 = System.currentTimeMillis();
liveSample(totalLength, longseach, seed, maxKeep, maxBucketDepth * 10);
long t2 = System.currentTimeMillis();
System.out.println("Time for live sampling: " + (t2 - t1) + " mSec");
long t3 = System.currentTimeMillis();
downSample(totalLength, longseach, seed, maxKeep, maxBucketDepth);
long t4 = System.currentTimeMillis();
System.out.println("Time for down sampling: " + (t4 - t3) + " mSec");
}
/**
* Test that our live sample gives a uniform distribution
*/
@Ignore
public void testLiveSample() throws Exception {
int totalLength = (int) 1e4;
//Store the number of times each index is sampled
int[] counts = new int[totalLength];
List<long[]> samples;
int longseach = 1;
long seed = 212338399;
Random rand = new Random(seed);
int maxKeep = 1000;
int maxBucketDepth = Integer.MAX_VALUE;
int trials = 10000;
for (int _ = 0; _ < trials; _++) {
seed = rand.nextLong();
samples = liveSample(totalLength, longseach, seed, maxKeep, maxBucketDepth);
for (long[] dat : samples) {
counts[(int) dat[0]] += 1;
}
}
float avgFreq = ((float) maxKeep) / totalLength;
int avgCount = (int) (avgFreq * trials);
double stdDev = Math.sqrt(trials / 12);
int numStds = 4;
int ind = 0;
//System.out.println("Expected number of times sampled: " + avgCount + ". Stdev " + stdDev);
for (int cnt : counts) {
//System.out.println("ind: " + ind + " cnt: " + cnt);
assertTrue("Index " + ind + " outside of expected sampling range at " + cnt, Math.abs(cnt - avgCount) < numStds * stdDev);
ind++;
}
}
private static List<long[]> liveSample(long totalLength, int longseach, long seed, int maxKeep, int maxBucketDepth) {
List<long[]> liveSampled = new ArrayList<long[]>(maxKeep);
float fmaxKeep = (float) maxKeep;
RandDataIterator iter1 = new RandDataIterator(totalLength, longseach);
float prob = 0;
Random rand = new Random(seed);
int numAfterMax = 1;
for (long[] data : iter1) {
if (liveSampled.size() < maxKeep) {
liveSampled.add(data);
} else if (liveSampled.size() > maxBucketDepth) {
break;
} else {
//Calculate whether to accept this element
prob = fmaxKeep / (maxKeep + numAfterMax);
numAfterMax += 1;
boolean keep = rand.nextFloat() < prob;
if (keep) {
//Choose one to replace
int torep = rand.nextInt(maxKeep);
liveSampled.remove(torep);
liveSampled.add(data);
}
}
}
return liveSampled;
}
private static List<long[]> downSample(long totalLength, int longseach, long seed, int maxKeep, int maxBucketDepth) {
List<long[]> downSampled = new ArrayList<long[]>(maxKeep);
RandDataIterator iter2 = new RandDataIterator(totalLength, longseach);
Random rand = new Random(seed);
for (long[] data : iter2) {
if (downSampled.size() < maxBucketDepth) {
downSampled.add(data);
} else {
break;
}
}
//Actual downsampling
while (downSampled.size() > maxKeep) {
downSampled.remove(rand.nextInt(downSampled.size()));
}
return downSampled;
}
/**
* Iterator over garbage data.
*/
private static class RandDataIterator implements Iterable<long[]>, Iterator<long[]> {
private long counter;
private long length;
private int longseach;
/**
* @param length Number of elements that this iterator will have
* @param longseach how large each element will be (byte array)
*/
public RandDataIterator(long length, int longseach) {
this.length = length;
this.longseach = longseach;
}
public boolean hasNext() {
return counter < length;
}
public long[] next() {
if (!hasNext()) {
return null;
}
long[] arr = new long[longseach];
Arrays.fill(arr, counter);
counter++;
return arr;
}
public void remove() {
throw new UnsupportedOperationException("Can't remove");
}
public Iterator<long[]> iterator() {
return this;
}
}
}
| |
package org.batfish.representation.cisco_nxos;
import static com.google.common.base.MoreObjects.firstNonNull;
import static java.util.Collections.singletonList;
import static org.batfish.datamodel.Configuration.DEFAULT_VRF_NAME;
import static org.batfish.datamodel.Names.generatedBgpCommonExportPolicyName;
import static org.batfish.datamodel.Names.generatedBgpDefaultRouteExportPolicyName;
import static org.batfish.datamodel.Names.generatedBgpPeerEvpnExportPolicyName;
import static org.batfish.datamodel.Names.generatedBgpPeerEvpnImportPolicyName;
import static org.batfish.datamodel.Names.generatedBgpPeerExportPolicyName;
import static org.batfish.datamodel.Names.generatedBgpPeerImportPolicyName;
import static org.batfish.datamodel.Names.generatedEvpnToBgpv4VrfLeakPolicyName;
import static org.batfish.datamodel.routing_policy.Common.generateSuppressionPolicy;
import static org.batfish.datamodel.routing_policy.statement.Statements.ExitAccept;
import static org.batfish.datamodel.routing_policy.statement.Statements.RemovePrivateAs;
import static org.batfish.representation.cisco_nxos.CiscoNxosConfiguration.BGP_LOCAL_WEIGHT;
import static org.batfish.representation.cisco_nxos.Vrf.MAC_VRF_OFFSET;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.SortedSet;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.ParametersAreNonnullByDefault;
import org.batfish.common.Warnings;
import org.batfish.datamodel.BgpActivePeerConfig;
import org.batfish.datamodel.BgpPassivePeerConfig;
import org.batfish.datamodel.BgpPeerConfig;
import org.batfish.datamodel.BgpProcess;
import org.batfish.datamodel.Bgpv4ToEvpnVrfLeakConfig;
import org.batfish.datamodel.ConcreteInterfaceAddress;
import org.batfish.datamodel.Configuration;
import org.batfish.datamodel.EvpnToBgpv4VrfLeakConfig;
import org.batfish.datamodel.GeneratedRoute;
import org.batfish.datamodel.Interface;
import org.batfish.datamodel.InterfaceType;
import org.batfish.datamodel.Ip;
import org.batfish.datamodel.LineAction;
import org.batfish.datamodel.LongSpace;
import org.batfish.datamodel.OriginType;
import org.batfish.datamodel.Prefix;
import org.batfish.datamodel.RoutingProtocol;
import org.batfish.datamodel.Vrf;
import org.batfish.datamodel.VrfLeakConfig;
import org.batfish.datamodel.bgp.AddressFamilyCapabilities;
import org.batfish.datamodel.bgp.AllowRemoteAsOutMode;
import org.batfish.datamodel.bgp.BgpAggregate;
import org.batfish.datamodel.bgp.EvpnAddressFamily;
import org.batfish.datamodel.bgp.Ipv4UnicastAddressFamily;
import org.batfish.datamodel.bgp.Layer2VniConfig;
import org.batfish.datamodel.bgp.Layer3VniConfig;
import org.batfish.datamodel.bgp.RouteDistinguisher;
import org.batfish.datamodel.bgp.community.ExtendedCommunity;
import org.batfish.datamodel.routing_policy.Common;
import org.batfish.datamodel.routing_policy.RoutingPolicy;
import org.batfish.datamodel.routing_policy.communities.CommunityIs;
import org.batfish.datamodel.routing_policy.communities.HasCommunity;
import org.batfish.datamodel.routing_policy.communities.InputCommunities;
import org.batfish.datamodel.routing_policy.communities.MatchCommunities;
import org.batfish.datamodel.routing_policy.expr.AsnValue;
import org.batfish.datamodel.routing_policy.expr.AutoAs;
import org.batfish.datamodel.routing_policy.expr.BooleanExpr;
import org.batfish.datamodel.routing_policy.expr.BooleanExprs;
import org.batfish.datamodel.routing_policy.expr.CallExpr;
import org.batfish.datamodel.routing_policy.expr.Conjunction;
import org.batfish.datamodel.routing_policy.expr.DestinationNetwork;
import org.batfish.datamodel.routing_policy.expr.LiteralInt;
import org.batfish.datamodel.routing_policy.expr.LiteralLong;
import org.batfish.datamodel.routing_policy.expr.LiteralOrigin;
import org.batfish.datamodel.routing_policy.expr.MatchBgpSessionType;
import org.batfish.datamodel.routing_policy.expr.MatchBgpSessionType.Type;
import org.batfish.datamodel.routing_policy.expr.MatchPrefixSet;
import org.batfish.datamodel.routing_policy.expr.MatchProtocol;
import org.batfish.datamodel.routing_policy.expr.NamedPrefixSet;
import org.batfish.datamodel.routing_policy.expr.Not;
import org.batfish.datamodel.routing_policy.expr.SelfNextHop;
import org.batfish.datamodel.routing_policy.statement.If;
import org.batfish.datamodel.routing_policy.statement.SetDefaultTag;
import org.batfish.datamodel.routing_policy.statement.SetNextHop;
import org.batfish.datamodel.routing_policy.statement.SetOrigin;
import org.batfish.datamodel.routing_policy.statement.SetTag;
import org.batfish.datamodel.routing_policy.statement.SetWeight;
import org.batfish.datamodel.routing_policy.statement.Statement;
import org.batfish.datamodel.routing_policy.statement.Statements;
import org.batfish.datamodel.vxlan.Layer2Vni;
import org.batfish.datamodel.vxlan.Layer3Vni;
import org.batfish.representation.cisco_nxos.BgpVrfL2VpnEvpnAddressFamilyConfiguration.RetainRouteType;
/**
* A utility class for converting between Cisco NX-OS configurations and the Batfish
* vendor-independent {@link org.batfish.datamodel}.
*/
@ParametersAreNonnullByDefault
public final class Conversions {
/** Matches anything but the IPv4 default route. */
static final Not NOT_DEFAULT_ROUTE = new Not(Common.matchDefaultRoute());
private static final int MAX_ADMINISTRATIVE_COST = 32767;
private static final Statement ROUTE_MAP_DENY_STATEMENT =
new If(
BooleanExprs.CALL_EXPR_CONTEXT,
ImmutableList.of(Statements.ReturnFalse.toStaticStatement()),
ImmutableList.of(Statements.ExitReject.toStaticStatement()));
/**
* Computes the router ID on Cisco NX-OS.
*
* <p>See
* https://www.cisco.com/c/en/us/td/docs/switches/datacenter/sw/nx-os/tech_note/cisco_nxos_ios_ospf_comparison.html
* for a description of the algorithm, which is in practice applied per-VRF.
*/
// See CiscoNxosTest#testRouterId for a test that is verifiable using GNS3.
@Nonnull
static Ip getBgpRouterId(BgpVrfConfiguration vrfConfig, Configuration c, Vrf vrf, Warnings w) {
// If Router ID is configured in the VRF-Specific BGP config, it always wins.
if (vrfConfig.getRouterId() != null) {
return vrfConfig.getRouterId();
}
return inferRouterId(vrf.getName(), c.getAllInterfaces(vrf.getName()), w, "BGP process");
}
/** Infers router ID on Cisco NX-OS when not configured in a routing process */
@Nonnull
static Ip inferRouterId(
String vrfName, Map<String, Interface> vrfIfaces, Warnings w, String processDesc) {
String messageBase =
String.format(
"Router-id is not manually configured for %s in VRF %s", processDesc, vrfName);
// Otherwise, Router ID is defined based on the interfaces in the VRF that have IP addresses.
// NX-OS does use shutdown interfaces to configure router-id.
Map<String, org.batfish.datamodel.Interface> interfaceMap =
vrfIfaces.entrySet().stream()
.filter(e -> e.getValue().getConcreteAddress() != null)
.collect(Collectors.toMap(Entry::getKey, Entry::getValue));
if (interfaceMap.isEmpty()) {
w.redFlag(
String.format(
"%s. Unable to infer default router-id as no interfaces have IP addresses",
messageBase));
// With no interfaces in the VRF that have IP addresses, show ip bgp vrf all reports 0.0.0.0
// as the router ID. Of course, this is not really relevant as no routes will be exchanged.
return Ip.ZERO;
}
// Next, NX-OS prefers the IP of loopback0 if one exists.
org.batfish.datamodel.Interface loopback0 = interfaceMap.get("loopback0");
if (loopback0 != null) {
// No need to warn.
return loopback0.getConcreteAddress().getIp();
}
// Next, NX-OS prefers "first" loopback interface. Older versions of NX-OS appear to be
// non-deterministic, newer ones always choose the smallest loopback IP.
Collection<org.batfish.datamodel.Interface> interfaces = interfaceMap.values();
Optional<Ip> lowestLoopback =
interfaces.stream()
.filter(i -> i.getInterfaceType() == InterfaceType.LOOPBACK)
.map(org.batfish.datamodel.Interface::getConcreteAddress)
.map(ConcreteInterfaceAddress::getIp)
.min(Comparator.naturalOrder());
if (lowestLoopback.isPresent()) {
return lowestLoopback.get();
}
// Finally, NX-OS uses the first non-loopback interface defined in the vrf, assuming no loopback
// addresses with IP address are present in the vrf. Older versions of NX-OS are
// non-deterministic, newer ones choose the smallest IP.
Optional<Ip> lowestIp =
interfaces.stream()
.map(org.batfish.datamodel.Interface::getConcreteAddress)
.filter(Objects::nonNull)
.map(ConcreteInterfaceAddress::getIp)
.min(Comparator.naturalOrder());
assert lowestIp.isPresent(); // This cannot happen if interfaces is non-empty.
return lowestIp.get();
}
private static boolean isActive(String name, BgpVrfNeighborConfiguration neighbor, Warnings w) {
// Shutdown
if (firstNonNull(neighbor.getShutdown(), Boolean.FALSE)) {
return false;
}
// No active address family that we support.
if (neighbor.getIpv4UnicastAddressFamily() == null
&& neighbor.getIpv6UnicastAddressFamily() == null
&& neighbor.getL2VpnEvpnAddressFamily() == null) {
w.redFlag("No supported address-family configured for " + name);
return false;
}
return true;
}
@VisibleForTesting
public static @Nonnull String generatedAttributeMapName(
long localAs, @Nullable String attributeMap) {
if (attributeMap == null) {
return String.format("~BGP_AGGREGATE_ATTRIBUTE_MAP:%s:%s", localAs, attributeMap);
}
return String.format("~BGP_AGGREGATE_ATTRIBUTE_MAP:%s", localAs);
}
/**
* NX-OS-specific defaults for aggregate routes:
*
* <ul>
* <li>Origin type {@link OriginType#IGP}.
* <li>Tagged with the process ASN (for the generating VRF).
* <li>Weight {@link CiscoNxosConfiguration#BGP_LOCAL_WEIGHT}.
* </ul>
*/
static @Nonnull String generateAttributeMap(
BgpGlobalConfiguration bgpGlobal,
BgpVrfConfiguration bgpVrf,
@Nullable String attributeMap,
Configuration c) {
long localAs = firstNonNull(bgpVrf.getLocalAs(), bgpGlobal.getLocalAs());
String name = generatedAttributeMapName(localAs, attributeMap);
if (c.getRoutingPolicies().containsKey(name)) {
// Already done.
return name;
}
RoutingPolicy.Builder p = RoutingPolicy.builder().setName(name).setOwner(c);
p.addStatement(new SetOrigin(new LiteralOrigin(OriginType.IGP, null)));
p.addStatement(new SetTag(new LiteralLong(localAs)));
p.addStatement(new SetWeight(new LiteralInt(BGP_LOCAL_WEIGHT)));
if (attributeMap != null) {
// Trivial If+CallExpr means ignore any permit/deny action taken by attributeMap.
p.addStatement(new If(new CallExpr(attributeMap), ImmutableList.of()));
}
p.addStatement(ExitAccept.toStaticStatement());
RoutingPolicy rp = p.build();
c.getRoutingPolicies().put(rp.getName(), rp);
return name;
}
static @Nonnull BgpAggregate toBgpAggregate(
Prefix prefix,
BgpGlobalConfiguration bgpGlobal,
BgpVrfConfiguration bgpVrf,
BgpVrfAddressFamilyAggregateNetworkConfiguration vsAggregate,
Configuration c,
Warnings w) {
// TODO: handle as-set
// TODO: handle suppress-map
// TODO: verify undefined route-map can be treated as omitted
String attributeMap = vsAggregate.getAttributeMap();
if (attributeMap != null && !c.getRoutingPolicies().containsKey(attributeMap)) {
w.redFlag(
String.format("Ignoring undefined aggregate-address attribute-map %s", attributeMap));
attributeMap = null;
}
attributeMap = generateAttributeMap(bgpGlobal, bgpVrf, attributeMap, c);
return BgpAggregate.of(
prefix,
generateSuppressionPolicy(vsAggregate.getSummaryOnly(), c),
// TODO: put advertise-map here
null,
attributeMap);
}
@Nonnull
static Map<Ip, BgpActivePeerConfig> getNeighbors(
Configuration c,
CiscoNxosConfiguration vsConfig,
Vrf vrf,
BgpProcess proc,
BgpGlobalConfiguration bgpConfig,
BgpVrfConfiguration bgpVrf,
@Nullable Ip nveIp,
Warnings warnings) {
return bgpVrf.getNeighbors().entrySet().stream()
.peek(e -> e.getValue().doInherit(bgpConfig, warnings))
.filter(e -> isActive(getTextDesc(e.getKey(), vrf), e.getValue(), warnings))
.collect(
ImmutableMap.toImmutableMap(
Entry::getKey,
e ->
(BgpActivePeerConfig)
Conversions.toBgpNeighbor(
c,
vsConfig,
vrf,
proc,
e.getKey().toPrefix(),
bgpConfig,
bgpVrf,
e.getValue(),
false,
nveIp,
warnings)));
}
@Nonnull
static Map<Prefix, BgpPassivePeerConfig> getPassiveNeighbors(
Configuration c,
CiscoNxosConfiguration vsConfig,
Vrf vrf,
BgpProcess proc,
BgpGlobalConfiguration bgpConfig,
BgpVrfConfiguration bgpVrf,
@Nullable Ip nveIp,
Warnings warnings) {
return bgpVrf.getPassiveNeighbors().entrySet().stream()
.peek(e -> e.getValue().doInherit(bgpConfig, warnings))
.filter(e -> isActive(getTextDesc(e.getKey(), vrf), e.getValue(), warnings))
.collect(
ImmutableMap.toImmutableMap(
Entry::getKey,
e ->
(BgpPassivePeerConfig)
Conversions.toBgpNeighbor(
c,
vsConfig,
vrf,
proc,
e.getKey(),
bgpConfig,
bgpVrf,
e.getValue(),
true,
nveIp,
warnings)));
}
@Nullable
private static Ip computeUpdateSource(
String vrfName,
Map<String, org.batfish.datamodel.Interface> vrfInterfaces,
Prefix prefix,
BgpVrfNeighborConfiguration neighbor,
boolean dynamic,
Warnings warnings) {
String updateSourceInterface = neighbor.getUpdateSource();
if (updateSourceInterface != null) {
org.batfish.datamodel.Interface iface = vrfInterfaces.get(updateSourceInterface);
if (iface == null) {
warnings.redFlag(
String.format(
"BGP neighbor %s in vrf %s: configured update-source %s does not exist or "
+ "is not associated with this vrf",
dynamic ? prefix : prefix.getStartIp(), vrfName, updateSourceInterface));
return null;
}
ConcreteInterfaceAddress address = iface.getConcreteAddress();
if (address == null) {
warnings.redFlag(
String.format(
"BGP neighbor %s in vrf %s: configured update-source %s has no IP address",
dynamic ? prefix : prefix.getStartIp(), vrfName, updateSourceInterface));
return null;
}
return address.getIp();
} else if (dynamic) {
return Ip.AUTO;
}
Optional<Ip> firstMatchingInterfaceAddress =
vrfInterfaces.values().stream()
.flatMap(i -> i.getAllConcreteAddresses().stream())
.filter(ia -> ia != null && ia.getPrefix().containsIp(prefix.getStartIp()))
.map(ConcreteInterfaceAddress::getIp)
.findFirst();
if (firstMatchingInterfaceAddress.isPresent()) {
/* TODO: Warn here? Seems like this may be standard practice, e.g., for a /31. */
return firstMatchingInterfaceAddress.get();
}
warnings.redFlag(
String.format(
"BGP neighbor %s in vrf %s: could not determine update source",
prefix.getStartIp(), vrfName));
return null;
}
/**
* Extracts the AS numbers from "match as-number" statements. See
* https://www.cisco.com/c/m/en_us/techdoc/dc/reference/cli/nxos/commands/bgp/match-as-number.html
*/
private static @Nonnull LongSpace extractRouteMapAsns(RouteMap map) {
LongSpace asns = LongSpace.EMPTY;
// Iterate backwards to support permit/deny semantics properly.
for (RouteMapEntry entry : map.getEntries().descendingMap().values()) {
RouteMapMatchAsNumber matchAsn = entry.getMatchAsNumber();
if (matchAsn == null) {
continue;
}
// Note: other match clauses are documented ignored.
if (entry.getAction() == LineAction.PERMIT) {
asns = asns.union(entry.getMatchAsNumber().getAsns());
} else {
asns = asns.difference(entry.getMatchAsNumber().getAsns());
}
}
return asns;
}
@Nonnull
private static BgpPeerConfig toBgpNeighbor(
Configuration c,
CiscoNxosConfiguration vsConfig,
Vrf vrf,
BgpProcess proc,
Prefix prefix,
BgpGlobalConfiguration bgpConfig,
BgpVrfConfiguration vrfConfig,
BgpVrfNeighborConfiguration neighbor,
boolean dynamic,
@Nullable Ip nveIp,
Warnings warnings) {
BgpPeerConfig.Builder<?, ?> newNeighborBuilder;
if (dynamic) {
LongSpace remoteAsns;
if (neighbor.getRemoteAs() != null) {
remoteAsns = LongSpace.of(neighbor.getRemoteAs());
} else {
remoteAsns =
Optional.ofNullable(neighbor.getRemoteAsRouteMap())
.map(vsConfig.getRouteMaps()::get)
.map(Conversions::extractRouteMapAsns)
.orElse(LongSpace.EMPTY);
}
if (remoteAsns.isEmpty()) {
warnings.redFlag("No remote-as configured for " + getTextDesc(prefix, vrf));
}
newNeighborBuilder =
BgpPassivePeerConfig.builder().setRemoteAsns(remoteAsns).setPeerPrefix(prefix);
} else {
newNeighborBuilder =
BgpActivePeerConfig.builder()
.setRemoteAsns(
Optional.ofNullable(neighbor.getRemoteAs())
.map(LongSpace::of)
.orElse(LongSpace.EMPTY))
.setPeerAddress(prefix.getStartIp());
// No remote AS set.
if (neighbor.getRemoteAs() == null) {
warnings.redFlag("No remote-as configured for " + getTextDesc(prefix.getStartIp(), vrf));
}
}
newNeighborBuilder.setClusterId(
firstNonNull(vrfConfig.getClusterId(), proc.getRouterId()).asLong());
newNeighborBuilder.setDescription(neighbor.getDescription());
newNeighborBuilder.setEbgpMultihop(firstNonNull(neighbor.getEbgpMultihopTtl(), 0) > 1);
newNeighborBuilder.setEnforceFirstAs(bgpConfig.getEnforceFirstAs());
if (neighbor.getInheritPeer() != null) {
newNeighborBuilder.setGroup(neighbor.getInheritPeer());
}
long localAs;
if (neighbor.getLocalAs() != null) {
localAs = neighbor.getLocalAs();
} else if (vrfConfig.getLocalAs() != null) {
localAs = vrfConfig.getLocalAs();
} else {
localAs = bgpConfig.getLocalAs();
}
newNeighborBuilder.setLocalAs(localAs);
newNeighborBuilder.setLocalIp(
computeUpdateSource(
vrf.getName(), c.getAllInterfaces(vrf.getName()), prefix, neighbor, dynamic, warnings));
@Nullable
BgpVrfNeighborAddressFamilyConfiguration naf4 = neighbor.getIpv4UnicastAddressFamily();
@Nullable BgpVrfIpv4AddressFamilyConfiguration af4 = vrfConfig.getIpv4UnicastAddressFamily();
if (naf4 != null) {
// import policy
RoutingPolicy importPolicy =
createNeighborImportPolicy(
c,
generatedBgpPeerImportPolicyName(
vrf.getName(), dynamic ? prefix.toString() : prefix.getStartIp().toString()),
naf4,
warnings);
// export policy
RoutingPolicy exportPolicy =
createExportPolicyFromStatements(
generatedBgpPeerExportPolicyName(
vrf.getName(), dynamic ? prefix.toString() : prefix.getStartIp().toString()),
getExportStatementsForIpv4(
c, naf4, neighbor, newNeighborBuilder, vrf.getName(), warnings),
c);
Ipv4UnicastAddressFamily.Builder ipv4FamilyBuilder =
Ipv4UnicastAddressFamily.builder()
.setAddressFamilyCapabilities(
getAddressFamilyCapabilities(naf4, af4 != null && af4.getSuppressInactive()))
.setExportPolicy(exportPolicy.getName())
.setImportPolicy(importPolicy.getName())
.setRouteReflectorClient(firstNonNull(naf4.getRouteReflectorClient(), Boolean.FALSE));
newNeighborBuilder.setIpv4UnicastAddressFamily(ipv4FamilyBuilder.build());
}
// If neighbor has EVPN configured, set it up.
@Nullable
BgpVrfNeighborAddressFamilyConfiguration neighborL2VpnAf = neighbor.getL2VpnEvpnAddressFamily();
if (neighborL2VpnAf != null) {
@Nullable
BgpVrfL2VpnEvpnAddressFamilyConfiguration vrfL2VpnAf = vrfConfig.getL2VpnEvpnAddressFamily();
EvpnAddressFamily.Builder evpnFamilyBuilder =
EvpnAddressFamily.builder().setPropagateUnmatched(false).setNveIp(nveIp);
RoutingPolicy importPolicy =
createNeighborImportPolicy(
c,
generatedBgpPeerEvpnImportPolicyName(
vrf.getName(), dynamic ? prefix.toString() : prefix.getStartIp().toString()),
neighborL2VpnAf,
warnings);
evpnFamilyBuilder
.setAddressFamilyCapabilities(getAddressFamilyCapabilities(neighborL2VpnAf, false))
.setImportPolicy(importPolicy.getName())
.setRouteReflectorClient(
firstNonNull(neighborL2VpnAf.getRouteReflectorClient(), Boolean.FALSE));
if (vrfL2VpnAf != null) {
if (vrfL2VpnAf.getRetainMode() == RetainRouteType.ROUTE_MAP) {
warnings.redFlag("retain route-target is not supported for route-maps");
} else {
evpnFamilyBuilder.setPropagateUnmatched(
vrfL2VpnAf.getRetainMode() == RetainRouteType.ALL);
}
}
evpnFamilyBuilder.setL2Vnis(getL2VniConfigs(c, vrf, proc, localAs, vsConfig, warnings));
evpnFamilyBuilder.setL3Vnis(getL3VniConfigs(c, vrf, proc, localAs, vsConfig, warnings));
List<Statement> evpnStatements =
getExportStatementsForEvpn(c, neighborL2VpnAf, neighbor, warnings);
RoutingPolicy exportPolicy =
createExportPolicyFromStatements(
generatedBgpPeerEvpnExportPolicyName(
vrf.getName(), dynamic ? prefix.toString() : prefix.getStartIp().toString()),
evpnStatements,
c);
newNeighborBuilder.setEvpnAddressFamily(
evpnFamilyBuilder.setExportPolicy(exportPolicy.getName()).build());
}
return newNeighborBuilder.build();
}
private static SortedSet<Layer2VniConfig> getL2VniConfigs(
Configuration c,
Vrf vrfContainingBgpNeighbor,
BgpProcess viBgpProcess,
long localAs,
CiscoNxosConfiguration vsConfig,
Warnings warnings) {
if (!vrfContainingBgpNeighbor.getName().equals(DEFAULT_VRF_NAME)) {
// TODO: figure out what to do with BGP neighbors in non default tenant VRFs
return ImmutableSortedSet.of();
}
ImmutableSortedSet.Builder<Layer2VniConfig> layer2Vnis = ImmutableSortedSet.naturalOrder();
// looping over all VRFs in VI configuration so we can get all VNI settings which were valid and
// mapped to some VRF (including the default VRF)
for (Layer2Vni l2Vni : c.getDefaultVrf().getLayer2Vnis().values()) {
Integer macVrfId = getMacVrfIdForL2Vni(vsConfig, l2Vni.getVni());
if (macVrfId == null) {
continue;
}
EvpnVni evpnVni =
Optional.ofNullable(vsConfig.getEvpn())
.map(evpn -> evpn.getVni(l2Vni.getVni()))
.orElse(null);
if (evpnVni == null) {
continue;
}
ExtendedCommunityOrAuto exportRtOrAuto = evpnVni.getExportRt();
if (exportRtOrAuto == null) {
// export route target is not present as auto and neither is user-defined, no L2 routes
// (MAC-routes)
// will be exported for hosts in this VNI. Assuming this to be an invalid EVPN
// configuration
// for lack of explicit doc from Cisco
warnings.redFlag(
String.format(
"No export route-target defined for L2 VNI '%s', no L2 routes will be exported",
l2Vni.getVni()));
continue;
}
ExtendedCommunityOrAuto importRtOrAuto = evpnVni.getImportRt();
if (importRtOrAuto == null) {
// import route target is not present as auto and neither is user-defined, no L2 routes
// (MAC-routes)
// will be imported for this VNI. Assuming this to be an invalid EVPN configuration for
// lack
// of explicit doc from Cisco
warnings.redFlag(
String.format(
"No import route-target defined for L2 VNI '%s', no L2 routes will be imported",
l2Vni.getVni()));
continue;
}
RouteDistinguisher rd =
Optional.ofNullable(evpnVni.getRd())
.map(RouteDistinguisherOrAuto::getRouteDistinguisher)
.orElse(null);
layer2Vnis.add(
Layer2VniConfig.builder()
.setVni(l2Vni.getVni())
.setVrf(DEFAULT_VRF_NAME)
.setRouteDistinguisher(
firstNonNull(rd, RouteDistinguisher.from(viBgpProcess.getRouterId(), macVrfId)))
.setImportRouteTarget(
importRtOrAuto.isAuto()
? toRouteTarget(localAs, l2Vni.getVni()).matchString()
: importRtOrAuto.getExtendedCommunity().matchString())
.setRouteTarget(
exportRtOrAuto.isAuto()
? toRouteTarget(localAs, l2Vni.getVni())
: exportRtOrAuto.getExtendedCommunity())
.build());
}
return layer2Vnis.build();
}
private static SortedSet<Layer3VniConfig> getL3VniConfigs(
Configuration c,
Vrf vrfContainingBgpNeighbor,
BgpProcess viBgpProcess,
long localAs,
CiscoNxosConfiguration vsConfig,
Warnings warnings) {
if (!vrfContainingBgpNeighbor.getName().equals(DEFAULT_VRF_NAME)) {
// TODO: figure out what to do with tenant VRFs
return ImmutableSortedSet.of();
}
ImmutableSortedSet.Builder<Layer3VniConfig> layer3Vnis = ImmutableSortedSet.naturalOrder();
// looping over all VRFs in VI configuration so we can get all VNI settings which were valid and
// mapped to some VRF (including the default VRF)
for (Vrf tenantVrf : c.getVrfs().values()) {
for (Layer3Vni l3Vni : tenantVrf.getLayer3Vnis().values()) {
org.batfish.representation.cisco_nxos.Vrf vsTenantVrfForL3Vni =
getVrfForL3Vni(vsConfig.getVrfs(), l3Vni.getVni());
// there should be a tenant VRF for this VNI and that VRF should have an IPv4 AF
// (other being IPv6 which we do not support); if not true then skip this VNI
if (vsTenantVrfForL3Vni == null
|| !vsTenantVrfForL3Vni.getAddressFamilies().containsKey(AddressFamily.IPV4_UNICAST)) {
continue;
}
RouteDistinguisher rd =
Optional.ofNullable(vsTenantVrfForL3Vni.getRd())
.map(RouteDistinguisherOrAuto::getRouteDistinguisher)
.orElse(null);
ExtendedCommunityOrAuto exportRtOrAuto =
vsTenantVrfForL3Vni
.getAddressFamilies()
.get(AddressFamily.IPV4_UNICAST)
.getExportRtEvpn();
if (exportRtOrAuto == null) {
// export route target is not present as auto and neither is user-defined, no L3 routes
// (IP-routes)
// will be exported from this VRF. Assuming this to be an invalid L3 VNI configuration
// for lack of explicit doc from Cisco. (Cisco auto-generates it in common cases)
warnings.redFlag(
String.format(
"No export route-target defined for L3 VNI '%s', no L3 routes will be exported",
l3Vni.getVni()));
continue;
}
ExtendedCommunityOrAuto importRtOrAuto =
vsTenantVrfForL3Vni
.getAddressFamilies()
.get(AddressFamily.IPV4_UNICAST)
.getImportRtEvpn();
if (importRtOrAuto == null) {
// import route target is not present as auto and neither is user-defined, no L3 routes
// (IP-routes)
// will be imported into this VRF. Assuming this to be an invalid L3 VNI configuration
// for lack of explicit doc from Cisco. (Cisco auto-generates it in common cases)
warnings.redFlag(
String.format(
"No import route-target defined for L3 VNI '%s', no L3 routes will be imported",
l3Vni.getVni()));
continue;
}
layer3Vnis.add(
Layer3VniConfig.builder()
.setVni(l3Vni.getVni())
.setVrf(tenantVrf.getName())
.setImportRouteTarget(
importRtOrAuto.isAuto()
? toRouteTarget(localAs, l3Vni.getVni()).matchString()
: importRtOrAuto.getExtendedCommunity().matchString())
.setRouteDistinguisher(
firstNonNull(
rd,
RouteDistinguisher.from(
viBgpProcess.getRouterId(), vsTenantVrfForL3Vni.getId())))
.setRouteTarget(
exportRtOrAuto.isAuto()
? toRouteTarget(localAs, l3Vni.getVni())
: exportRtOrAuto.getExtendedCommunity())
// NXOS advertises EVPN type-5 always
.setAdvertiseV4Unicast(true)
.build());
}
}
return layer3Vnis.build();
}
/**
* Gets the MAC-VRF ID for the supplied L2 VNI as per
* https://www.cisco.com/c/en/us/td/docs/switches/datacenter/nexus9000/sw/7-x/vxlan/configuration/guide/b_Cisco_Nexus_9000_Series_NX-OS_VXLAN_Configuration_Guide_7x/b_Cisco_Nexus_9000_Series_NX-OS_VXLAN_Configuration_Guide_7x_chapter_0100.html
*/
@Nullable
private static Integer getMacVrfIdForL2Vni(CiscoNxosConfiguration vsConfig, Integer l2Vni) {
Integer vlanNumber =
vsConfig.getVlans().values().stream()
.filter(vlan -> l2Vni.equals(vlan.getVni()))
.findFirst()
.map(Vlan::getId)
.orElse(null);
if (vlanNumber == null) {
return null;
}
return MAC_VRF_OFFSET + vlanNumber;
}
/** Get the tenant VRF associated with a L3 VNI */
@Nullable
static org.batfish.representation.cisco_nxos.Vrf getVrfForL3Vni(
Map<String, org.batfish.representation.cisco_nxos.Vrf> vrfs, int vni) {
return vrfs.values().stream()
.filter(vrf -> vrf.getVni() != null && vrf.getVni() == vni)
.findFirst()
.orElse(null);
}
/**
* Convert AS number and VNI to an extended route target community as per type 0 route
* distinguisher standard (2byte : 4 byte). So, converts AS number to 2 byte and uses VNI as it is
* since it is already 3 bytes.
*
* <p>See <a
* href="https://www.cisco.com/c/en/us/td/docs/switches/datacenter/nexus9000/sw/7-x/vxlan/configuration/guide/b_Cisco_Nexus_9000_Series_NX-OS_VXLAN_Configuration_Guide_7x/b_Cisco_Nexus_9000_Series_NX-OS_VXLAN_Configuration_Guide_7x_chapter_0100.html">
* Cisco NX-OS documentation</a> for detailed explanation.
*/
@VisibleForTesting
static @Nonnull ExtendedCommunity toRouteTarget(long asn, long vni) {
return ExtendedCommunity.target(asn & 0xFFFFL, vni);
}
/**
* Create and return an export policy from a list of statements. The policy is auto-added to the
* given {@link Configuration}.
*/
private static RoutingPolicy createExportPolicyFromStatements(
String policyName, List<Statement> statements, Configuration configuration) {
return RoutingPolicy.builder()
.setOwner(configuration)
.setName(policyName)
.setStatements(statements)
.build();
}
/**
* Create and return an import policy for the given address family. The policy is auto-added to
* the given {@link Configuration}.
*/
private static RoutingPolicy createNeighborImportPolicy(
Configuration c, String policyName, BgpVrfNeighborAddressFamilyConfiguration af, Warnings w) {
RoutingPolicy.Builder ret = RoutingPolicy.builder().setOwner(c).setName(policyName);
String routeMap = af.getInboundRouteMap();
String prefixList = af.getInboundPrefixList();
// Use inbound route-map or prefix-list if set, preferring route-map for now
if (routeMap != null) {
ret.addStatement(Statements.SetWriteIntermediateBgpAttributes.toStaticStatement());
ret.addStatement(
new If(
new CallExpr(routeMapOrRejectAll(af.getInboundRouteMap(), c)),
ImmutableList.of(
Statements.SetReadIntermediateBgpAttributes.toStaticStatement(),
new SetDefaultTag(AsnValue.of(AutoAs.instance())),
Statements.ExitAccept.toStaticStatement())));
ret.addStatement(Statements.ExitReject.toStaticStatement());
// TODO Support using multiple filters in import policies
if (prefixList != null) {
w.redFlag(
"Batfish does not support configuring more than one filter"
+ " (route-map/prefix-list) for incoming BGP routes. When this occurs,"
+ " only the route-map will be used, or the prefix-list if no route-map is"
+ " configured.");
}
} else if (prefixList != null) {
ret.addStatement(new SetTag(AsnValue.of(AutoAs.instance())));
ret.addStatement(getPrefixListStatement(c, prefixList));
} else {
// Accept everything if neither is set
ret.addStatement(new SetTag(AsnValue.of(AutoAs.instance())));
ret.addStatement(Statements.ExitAccept.toStaticStatement());
}
return ret.build();
}
/**
* Get the statement for the specified prefix-list used as a destination-network filter for BGP
* routes. If the prefix-list is undefined, the statement will simply accept all destination
* networks.
*/
private static Statement getPrefixListStatement(Configuration c, String prefixList) {
// An undefined prefix-list is treated as matching everything in this context
if (!c.getRouteFilterLists().containsKey(prefixList)) {
return Statements.ExitAccept.toStaticStatement();
}
return new If(
new MatchPrefixSet(DestinationNetwork.instance(), new NamedPrefixSet(prefixList)),
ImmutableList.of(Statements.ExitAccept.toStaticStatement()),
ImmutableList.of(Statements.ExitReject.toStaticStatement()));
}
/** Get address family capabilities for IPv4 and L2VPN address families */
private static AddressFamilyCapabilities getAddressFamilyCapabilities(
BgpVrfNeighborAddressFamilyConfiguration naf, boolean inheritedSupressInactive) {
return AddressFamilyCapabilities.builder()
.setAdvertiseInactive(!firstNonNull(naf.getSuppressInactive(), inheritedSupressInactive))
.setAllowLocalAsIn(firstNonNull(naf.getAllowAsIn(), 0) > 0)
.setAllowRemoteAsOut(
firstNonNull(naf.getDisablePeerAsCheck(), Boolean.FALSE)
? AllowRemoteAsOutMode.ALWAYS
: AllowRemoteAsOutMode.EXCEPT_FIRST)
.setSendCommunity(firstNonNull(naf.getSendCommunityStandard(), Boolean.FALSE))
.setSendExtendedCommunity(firstNonNull(naf.getSendCommunityExtended(), Boolean.FALSE))
.build();
}
/**
* Implements the NX-OS behavior for undefined route-maps when used in BGP import/export policies.
*
* <p>Always returns {@code null} when given a null {@code mapName}, and non-null otherwise.
*/
private static @Nullable String routeMapOrRejectAll(@Nullable String mapName, Configuration c) {
if (mapName == null || c.getRoutingPolicies().containsKey(mapName)) {
return mapName;
}
String undefinedName = mapName + "~undefined";
if (!c.getRoutingPolicies().containsKey(undefinedName)) {
// For undefined route-map, generate a route-map that denies everything.
RoutingPolicy.builder()
.setName(undefinedName)
.addStatement(ROUTE_MAP_DENY_STATEMENT)
.setOwner(c)
.build();
}
return undefinedName;
}
/** Get export statements for EVPN address family */
private static List<Statement> getExportStatementsForEvpn(
Configuration configuration,
BgpVrfNeighborAddressFamilyConfiguration naf,
BgpVrfNeighborConfiguration neighbor,
Warnings w) {
ImmutableList.Builder<Statement> statementsBuilder = ImmutableList.builder();
if (neighbor.getRemovePrivateAs() != null) {
statementsBuilder.add(RemovePrivateAs.toStaticStatement());
}
// Peer-specific export policy
Conjunction peerExportGuard = new Conjunction();
// Always export BGP or IBGP routes
List<BooleanExpr> peerExportConditions = peerExportGuard.getConjuncts();
peerExportConditions.add(new MatchProtocol(RoutingProtocol.BGP, RoutingProtocol.IBGP));
String outboundMap = naf.getOutboundRouteMap();
String outboundPrefixList = naf.getOutboundPrefixList();
// Export policy generated for outbound route-map (if any)
if (outboundMap != null) {
peerExportConditions.add(new CallExpr(routeMapOrRejectAll(outboundMap, configuration)));
// TODO Support using multiple filters in import policies
if (outboundPrefixList != null) {
w.redFlag(
"Batfish does not support configuring more than one filter"
+ " (route-map/prefix-list) for outgoing BGP routes. When this occurs,"
+ " only the route-map will be used.");
}
} else if (outboundPrefixList != null) {
statementsBuilder.add(getPrefixListStatement(configuration, outboundPrefixList));
}
return statementsBuilder
.add(
new If(
"peer-export policy main conditional: exitAccept if true / exitReject if false",
peerExportGuard,
ImmutableList.of(Statements.ExitAccept.toStaticStatement()),
ImmutableList.of(Statements.ExitReject.toStaticStatement())))
.build();
}
/** Get export statements for IPv4 address family */
private static List<Statement> getExportStatementsForIpv4(
Configuration configuration,
BgpVrfNeighborAddressFamilyConfiguration naf,
BgpVrfNeighborConfiguration neighbor,
BgpPeerConfig.Builder<?, ?> newNeighborBuilder,
String vrfName,
Warnings w) {
ImmutableList.Builder<Statement> statementsBuilder = ImmutableList.builder();
// Next Hop Self
if (firstNonNull(naf.getNextHopSelf(), Boolean.FALSE)) {
Statement nextHopSelf = new SetNextHop(SelfNextHop.getInstance());
if (firstNonNull(naf.getRouteReflectorClient(), Boolean.FALSE)) {
// When route-reflector-client is set, this statement does not apply to reflected IBGP
// routes
nextHopSelf =
new If(
new Conjunction(
ImmutableList.of(
new MatchBgpSessionType(Type.IBGP),
new MatchProtocol(RoutingProtocol.IBGP))),
ImmutableList.of(),
ImmutableList.of(nextHopSelf));
}
statementsBuilder.add(nextHopSelf);
}
if (neighbor.getRemovePrivateAs() != null) {
// TODO(handle different types of RemovePrivateAs)
statementsBuilder.add(RemovePrivateAs.toStaticStatement());
}
// If defaultOriginate is set, generate route and default route export policy. Default route
// will match this policy and get exported without going through the rest of the export policy.
// TODO Verify that nextHopSelf and removePrivateAs settings apply to default-originate route.
if (firstNonNull(naf.getDefaultOriginate(), Boolean.FALSE)) {
initBgpDefaultRouteExportPolicy(configuration);
statementsBuilder.add(
new If(
"Export default route from peer with default-originate configured",
new CallExpr(generatedBgpDefaultRouteExportPolicyName(true)),
singletonList(Statements.ReturnTrue.toStaticStatement()),
ImmutableList.of()));
GeneratedRoute defaultRoute =
GeneratedRoute.builder()
.setNetwork(Prefix.ZERO)
.setAdmin(MAX_ADMINISTRATIVE_COST)
.setGenerationPolicy(naf.getDefaultOriginateMap())
.build();
newNeighborBuilder.setGeneratedRoutes(ImmutableSet.of(defaultRoute));
}
// Peer-specific export policy, after matching default-originate route.
Conjunction peerExportGuard = new Conjunction();
// Common BGP export policy
List<BooleanExpr> peerExportConditions = peerExportGuard.getConjuncts();
peerExportConditions.add(new CallExpr(generatedBgpCommonExportPolicyName(vrfName)));
String outboundMap = naf.getOutboundRouteMap();
String outboundPrefixList = naf.getOutboundPrefixList();
// Export policy generated for route-map (if any)
if (outboundMap != null) {
peerExportConditions.add(new CallExpr(routeMapOrRejectAll(outboundMap, configuration)));
// TODO Support using multiple filters in import policies
if (outboundPrefixList != null) {
w.redFlag(
"Batfish does not support configuring more than one filter"
+ " (route-map/prefix-list) for outgoing BGP routes. When this occurs,"
+ " only the route-map will be used.");
}
} else if (outboundPrefixList != null) {
statementsBuilder.add(getPrefixListStatement(configuration, outboundPrefixList));
}
return statementsBuilder
.add(
new If(
"peer-export policy main conditional: exitAccept if true / exitReject if false",
peerExportGuard,
ImmutableList.of(Statements.ExitAccept.toStaticStatement()),
ImmutableList.of(Statements.ExitReject.toStaticStatement())))
.build();
}
/**
* Initializes export policy for default routes if it doesn't already exist. This policy is the
* same across BGP processes, so only one is created for each configuration.
*/
static void initBgpDefaultRouteExportPolicy(Configuration c) {
String defaultRouteExportPolicyName = generatedBgpDefaultRouteExportPolicyName(true);
if (!c.getRoutingPolicies().containsKey(defaultRouteExportPolicyName)) {
RoutingPolicy.builder()
.setOwner(c)
.setName(defaultRouteExportPolicyName)
.addStatement(
new If(
new Conjunction(
ImmutableList.of(
Common.matchDefaultRoute(),
new MatchProtocol(RoutingProtocol.AGGREGATE))),
ImmutableList.of(
new SetOrigin(new LiteralOrigin(OriginType.IGP, null)),
Statements.ReturnTrue.toStaticStatement())))
.addStatement(Statements.ReturnFalse.toStaticStatement())
.build();
}
}
static void convertBgpLeakConfigs(
org.batfish.representation.cisco_nxos.Vrf vrf,
Vrf newVrf,
BgpGlobalConfiguration bgpGlobalConfig,
@Nullable BgpProcess newDefaultVrfBgpProcess,
Configuration c) {
// TODO: handle v4<=>v4 leaking
long localAs = bgpGlobalConfig.getLocalAs();
if (localAs == 0 || newDefaultVrfBgpProcess == null) {
// no BGP process at all or for this VRF
return;
}
VrfAddressFamily ipv4af = vrf.getAddressFamily(AddressFamily.IPV4_UNICAST);
ExtendedCommunityOrAuto exportRtOrAuto = ipv4af.getExportRtEvpn();
ExtendedCommunityOrAuto importRtOrAuto = ipv4af.getImportRtEvpn();
RouteDistinguisherOrAuto rdOrAuto = vrf.getRd();
if (rdOrAuto == null) {
// cannot export nor import without a route distinguisher
return;
}
Integer vni = vrf.getVni();
if (vni == null) {
// cannot leak to/from evpn rib without a vni
return;
}
RouteDistinguisher rd =
RouteDistinguisher.from(newDefaultVrfBgpProcess.getRouterId(), vrf.getId());
if (exportRtOrAuto != null) {
// This VRF exports BGPv4 into default VRF's EVPN. Create VRF leak config for default VRF
ExtendedCommunity exportRt =
exportRtOrAuto.isAuto()
? toRouteTarget(localAs, vni)
: exportRtOrAuto.getExtendedCommunity();
Bgpv4ToEvpnVrfLeakConfig leakConfig =
Bgpv4ToEvpnVrfLeakConfig.builder()
.setImportFromVrf(vrf.getName())
.setSrcVrfRouteDistinguisher(rd)
.setAttachRouteTargets(exportRt)
.build();
org.batfish.datamodel.Vrf defaultVrf = c.getVrfs().get(DEFAULT_VRF_NAME);
getOrInitVrfLeakConfig(defaultVrf).addBgpv4ToEvpnVrfLeakConfig(leakConfig);
}
if (importRtOrAuto != null) {
// This VRF imports default VRF's EVPN into its BGPv4. Create VRF leak config for it
ExtendedCommunity importRt =
importRtOrAuto.isAuto()
? toRouteTarget(localAs, vni)
: importRtOrAuto.getExtendedCommunity();
assert importRt != null;
RoutingPolicy importPolicy =
RoutingPolicy.builder()
.setOwner(c)
.setName(generatedEvpnToBgpv4VrfLeakPolicyName(vrf.getName()))
.addStatement(
// Only import EVPN routes that match this VRF's import route target
new If(
new MatchCommunities(
InputCommunities.instance(), new HasCommunity(new CommunityIs(importRt))),
ImmutableList.of(Statements.ReturnTrue.toStaticStatement())))
.addStatement(Statements.ReturnFalse.toStaticStatement())
.build();
getOrInitVrfLeakConfig(newVrf)
.addEvpnToBgpv4VrfLeakConfig(
EvpnToBgpv4VrfLeakConfig.builder()
.setImportFromVrf(DEFAULT_VRF_NAME)
.setImportPolicy(importPolicy.getName())
.build());
}
}
private static VrfLeakConfig getOrInitVrfLeakConfig(Vrf vrf) {
if (vrf.getVrfLeakConfig() == null) {
vrf.setVrfLeakConfig(new VrfLeakConfig(true));
}
return vrf.getVrfLeakConfig();
}
private static String getTextDesc(Ip ip, Vrf v) {
return String.format("BGP neighbor %s in vrf %s", ip.toString(), v.getName());
}
private static String getTextDesc(Prefix prefix, Vrf v) {
return String.format("BGP neighbor %s in vrf %s", prefix.toString(), v.getName());
}
private Conversions() {} // prevent instantiation of utility class.
}
| |
package org.hisp.dhis.validation;
/*
* Copyright (c) 2004-2018, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.google.common.collect.Sets;
import org.hisp.dhis.category.CategoryCombo;
import org.hisp.dhis.category.CategoryOptionCombo;
import org.hisp.dhis.common.IdentifiableObjectStore;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dataset.DataSetElement;
import org.hisp.dhis.expression.Expression;
import org.hisp.dhis.expression.ExpressionService;
import org.springframework.transaction.annotation.Transactional;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author Margrethe Store
* @author Lars Helge Overland
* @author Jim Grace
*/
@Transactional
public class DefaultValidationRuleService
implements ValidationRuleService
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private ValidationRuleStore validationRuleStore;
public void setValidationRuleStore( ValidationRuleStore validationRuleStore )
{
this.validationRuleStore = validationRuleStore;
}
private IdentifiableObjectStore<ValidationRuleGroup> validationRuleGroupStore;
public void setValidationRuleGroupStore( IdentifiableObjectStore<ValidationRuleGroup> validationRuleGroupStore )
{
this.validationRuleGroupStore = validationRuleGroupStore;
}
private ExpressionService expressionService;
public void setExpressionService( ExpressionService expressionService )
{
this.expressionService = expressionService;
}
// -------------------------------------------------------------------------
// ValidationRule CRUD operations
// -------------------------------------------------------------------------
@Override
public int saveValidationRule( ValidationRule validationRule )
{
validationRuleStore.save( validationRule );
return validationRule.getId();
}
@Override
public void updateValidationRule( ValidationRule validationRule )
{
validationRuleStore.update( validationRule );
}
@Override
public void deleteValidationRule( ValidationRule validationRule )
{
validationRuleStore.delete( validationRule );
}
@Override
public ValidationRule getValidationRule( int id )
{
return validationRuleStore.get( id );
}
@Override
public ValidationRule getValidationRule( String uid )
{
return validationRuleStore.getByUid( uid );
}
@Override
public ValidationRule getValidationRuleByName( String name )
{
return validationRuleStore.getByName( name );
}
@Override
public List<ValidationRule> getAllValidationRules()
{
return validationRuleStore.getAll();
}
@Override
public List<ValidationRule> getAllFormValidationRules()
{
return validationRuleStore.getAllFormValidationRules();
}
@Override
public int getValidationRuleCount()
{
return validationRuleStore.getCount();
}
@Override
public int getValidationRuleCountByName( String name )
{
return validationRuleStore.getCountLikeName( name );
}
@Override
public List<ValidationRule> getValidationRulesBetween( int first, int max )
{
return validationRuleStore.getAllOrderedName( first, max ) ;
}
@Override
public List<ValidationRule> getValidationRulesBetweenByName( String name, int first, int max )
{
return validationRuleStore.getAllLikeName( name, first, max ) ;
}
@Override
public Collection<ValidationRule> getValidationRulesForDataSet( DataSet dataSet )
{
Set<String> elementsAndOptionCombos = new HashSet<>();
for ( DataSetElement dataSetElement : dataSet.getDataSetElements() )
{
DataElement dataElement = dataSetElement.getDataElement();
elementsAndOptionCombos.add( dataElement.getUid() );
CategoryCombo catCombo = dataSetElement.hasCategoryCombo()
? dataSetElement.getCategoryCombo()
: dataElement.getCategoryCombo();
for ( CategoryOptionCombo optionCombo : catCombo.getOptionCombos() )
{
elementsAndOptionCombos.add( dataElement.getUid() + Expression.SEPARATOR + optionCombo.getUid() );
}
}
Set<ValidationRule> rulesForDataSet = new HashSet<>();
for ( ValidationRule rule : getAllFormValidationRules() )
{
if ( !Sets.intersection( expressionService.getElementsAndOptionCombosInExpression( rule.getLeftSide().getExpression() ), elementsAndOptionCombos ).isEmpty() ||
!Sets.intersection( expressionService.getElementsAndOptionCombosInExpression( rule.getRightSide().getExpression() ), elementsAndOptionCombos ).isEmpty() )
{
rulesForDataSet.add( rule );
}
}
return rulesForDataSet;
}
@Override
public Set<DataElement> getDataElements( ValidationRule validationRule )
{
Set<DataElement> elements = new HashSet<>();
elements.addAll( expressionService.getDataElementsInExpression( validationRule.getLeftSide().getExpression() ) );
elements.addAll( expressionService.getDataElementsInExpression( validationRule.getRightSide().getExpression() ) );
return elements;
}
@Override
public List<ValidationRule> getValidationRulesWithNotificationTemplates()
{
return validationRuleStore.getValidationRulesWithNotificationTemplates();
}
// -------------------------------------------------------------------------
// ValidationRuleGroup CRUD operations
// -------------------------------------------------------------------------
@Override
public int addValidationRuleGroup( ValidationRuleGroup validationRuleGroup )
{
validationRuleGroupStore.save( validationRuleGroup );
return validationRuleGroup.getId();
}
@Override
public void deleteValidationRuleGroup( ValidationRuleGroup validationRuleGroup )
{
validationRuleGroupStore.delete( validationRuleGroup );
}
@Override
public void updateValidationRuleGroup( ValidationRuleGroup validationRuleGroup )
{
validationRuleGroupStore.update( validationRuleGroup );
}
@Override
public ValidationRuleGroup getValidationRuleGroup( int id )
{
return validationRuleGroupStore.get( id );
}
@Override
public ValidationRuleGroup getValidationRuleGroup( String uid )
{
return validationRuleGroupStore.getByUid( uid );
}
@Override
public List<ValidationRuleGroup> getAllValidationRuleGroups()
{
return validationRuleGroupStore.getAll();
}
@Override
public ValidationRuleGroup getValidationRuleGroupByName( String name )
{
return validationRuleGroupStore.getByName( name );
}
@Override
public int getValidationRuleGroupCount()
{
return validationRuleGroupStore.getCount();
}
@Override
public int getValidationRuleGroupCountByName( String name )
{
return validationRuleGroupStore.getCountLikeName( name ) ;
}
@Override
public List<ValidationRuleGroup> getValidationRuleGroupsBetween( int first, int max )
{
return validationRuleGroupStore.getAllOrderedName( first, max );
}
@Override
public List<ValidationRuleGroup> getValidationRuleGroupsBetweenByName( String name, int first, int max )
{
return validationRuleGroupStore.getAllLikeName( name, first, max ) ;
}
}
| |
package org.mtransit.android.commons.provider;
import android.annotation.SuppressLint;
import android.content.ContentValues;
import android.content.Context;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.net.Uri;
import android.text.TextUtils;
import android.util.SparseArray;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.json.JSONArray;
import org.json.JSONObject;
import org.mtransit.android.commons.ArrayUtils;
import org.mtransit.android.commons.FileUtils;
import org.mtransit.android.commons.MTLog;
import org.mtransit.android.commons.NetworkUtils;
import org.mtransit.android.commons.R;
import org.mtransit.android.commons.SqlUtils;
import org.mtransit.android.commons.StringUtils;
import org.mtransit.android.commons.TimeUtils;
import org.mtransit.android.commons.UriUtils;
import org.mtransit.android.commons.data.POI;
import org.mtransit.android.commons.data.POIStatus;
import org.mtransit.android.commons.data.RouteTripStop;
import org.mtransit.android.commons.data.Schedule;
import org.mtransit.android.commons.data.Trip;
import org.mtransit.commons.CharUtils;
import org.mtransit.commons.CleanUtils;
import java.io.BufferedWriter;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.SocketException;
import java.net.URL;
import java.net.URLConnection;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
@SuppressLint("Registered")
public class CaEdmontonProvider extends MTContentProvider implements StatusProviderContract {
private static final String LOG_TAG = CaEdmontonProvider.class.getSimpleName();
@NonNull
@Override
public String getLogTag() {
return LOG_TAG;
}
@NonNull
private static UriMatcher getNewUriMatcher(@NonNull String authority) {
UriMatcher URI_MATCHER = new UriMatcher(UriMatcher.NO_MATCH);
StatusProvider.append(URI_MATCHER, authority);
return URI_MATCHER;
}
@Nullable
private static UriMatcher uriMatcher = null;
/**
* Override if multiple {@link CaEdmontonProvider} implementations in same app.
*/
@NonNull
private static UriMatcher getURIMATCHER(@NonNull Context context) {
if (uriMatcher == null) {
uriMatcher = getNewUriMatcher(getAUTHORITY(context));
}
return uriMatcher;
}
@Nullable
private static String authority = null;
/**
* Override if multiple {@link CaEdmontonProvider} implementations in same app.
*/
@NonNull
private static String getAUTHORITY(@NonNull Context context) {
if (authority == null) {
authority = context.getResources().getString(R.string.ca_edmonton_authority);
}
return authority;
}
@Nullable
private static Uri authorityUri = null;
/**
* Override if multiple {@link CaEdmontonProvider} implementations in same app.
*/
@NonNull
private static Uri getAUTHORITY_URI(@NonNull Context context) {
if (authorityUri == null) {
authorityUri = UriUtils.newContentUri(getAUTHORITY(context));
}
return authorityUri;
}
private static final long ETSLIVE_STATUS_MAX_VALIDITY_IN_MS = TimeUnit.HOURS.toMillis(1L);
private static final long ETSLIVE_STATUS_VALIDITY_IN_MS = TimeUnit.MINUTES.toMillis(10L);
private static final long ETSLIVE_STATUS_VALIDITY_IN_FOCUS_IN_MS = TimeUnit.MINUTES.toMillis(1L);
private static final long ETSLIVE_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_MS = TimeUnit.MINUTES.toMillis(1L);
private static final long ETSLIVE_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_FOCUS_IN_MS = TimeUnit.MINUTES.toMillis(1L);
@Override
public long getStatusMaxValidityInMs() {
return ETSLIVE_STATUS_MAX_VALIDITY_IN_MS;
}
@Override
public long getStatusValidityInMs(boolean inFocus) {
if (inFocus) {
return ETSLIVE_STATUS_VALIDITY_IN_FOCUS_IN_MS;
}
return ETSLIVE_STATUS_VALIDITY_IN_MS;
}
@Override
public long getMinDurationBetweenRefreshInMs(boolean inFocus) {
if (inFocus) {
return ETSLIVE_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_FOCUS_IN_MS;
}
return ETSLIVE_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_MS;
}
@Override
public void cacheStatus(@NonNull POIStatus newStatusToCache) {
StatusProvider.cacheStatusS(this, newStatusToCache);
}
@Nullable
@Override
public POIStatus getCachedStatus(@NonNull StatusProviderContract.Filter statusFilter) {
if (!(statusFilter instanceof Schedule.ScheduleStatusFilter)) {
MTLog.w(this, "getNewStatus() > Can't find new schedule without schedule filter!");
return null;
}
Schedule.ScheduleStatusFilter scheduleStatusFilter = (Schedule.ScheduleStatusFilter) statusFilter;
RouteTripStop rts = scheduleStatusFilter.getRouteTripStop();
if (TextUtils.isEmpty(rts.getStop().getCode()) || TextUtils.isEmpty(rts.getRoute().getShortName())) {
return null;
}
String uuid = getAgencyRouteStopTargetUUID(rts);
POIStatus status = StatusProvider.getCachedStatusS(this, uuid);
if (status != null) {
status.setTargetUUID(rts.getUUID()); // target RTS UUID instead of custom Clever Devices tags
if (status instanceof Schedule) {
((Schedule) status).setDescentOnly(rts.isDescentOnly());
}
}
return status;
}
@NonNull
private static String getAgencyRouteStopTargetUUID(@NonNull RouteTripStop rts) {
return getAgencyRouteStopTargetUUID(rts.getAuthority(), rts.getRoute().getShortName(), rts.getStop().getCode());
}
@NonNull
private static String getAgencyRouteStopTargetUUID(String agencyAuthority, String routeShortName, String stopCode) {
return POI.POIUtils.getUUID(agencyAuthority, routeShortName, stopCode);
}
@Override
public boolean purgeUselessCachedStatuses() {
return StatusProvider.purgeUselessCachedStatuses(this);
}
@Override
public boolean deleteCachedStatus(int cachedStatusId) {
return StatusProvider.deleteCachedStatus(this, cachedStatusId);
}
@NonNull
@Override
public String getStatusDbTableName() {
return CaEdmontonDbHelper.T_ETSLIVE_STATUS;
}
@Override
public int getStatusType() {
return POI.ITEM_STATUS_TYPE_SCHEDULE;
}
@Nullable
@Override
public POIStatus getNewStatus(@NonNull StatusProviderContract.Filter statusFilter) {
if (!(statusFilter instanceof Schedule.ScheduleStatusFilter)) {
MTLog.w(this, "getNewStatus() > Can't find new schedule without schedule filter!");
return null;
}
Schedule.ScheduleStatusFilter scheduleStatusFilter = (Schedule.ScheduleStatusFilter) statusFilter;
RouteTripStop rts = scheduleStatusFilter.getRouteTripStop();
if (TextUtils.isEmpty(rts.getStop().getCode()) || TextUtils.isEmpty(rts.getRoute().getShortName())) {
return null;
}
loadRealTimeStatusFromWWW(rts);
return getCachedStatus(statusFilter);
}
private static final String ETSLIVE_URL = "https://etslive.edmonton.ca/InfoWeb";
private static final String JSON_VERSION = "version";
private static final String JSON_METHOD = "method";
private static final String JSON_PARAMS = "params";
private static final String JSON_STOP_ABBR = "StopAbbr";
private static final String JSON_LINE_ABBR = "LineAbbr";
private static final String JSON_NUM_TIMES_PER_LINE = "NumTimesPerLine";
private static final String JSON_NUM_STOP_TIMES = "NumStopTimes";
private static final String JSON_VERSION_1_1 = "1.1";
private static final String JSON_METHOD_GET_BUS_TIMES = "GetBusTimes";
private static final int JSON_NUM_TIMES_PER_LINE_COUNT = 15;
private static final int JSON_NUM_STOP_TIMES_COUNT = 40;
@Nullable
private static String getJSONPostParameters(@NonNull RouteTripStop rts) {
String stopCode = rts.getStop().getCode();
String rsn = rts.getRoute().getShortName();
if (!CharUtils.isDigitsOnly(rsn)) {
rsn = String.valueOf(rts.getRoute().getId());
}
if (TextUtils.isEmpty(stopCode) || !CharUtils.isDigitsOnly(stopCode)) {
MTLog.w(LOG_TAG, "Can't create real-time status JSON (invalid stop code) for %s", rts);
return null;
}
if (TextUtils.isEmpty(rsn) || !CharUtils.isDigitsOnly(rsn)) {
MTLog.w(LOG_TAG, "Can't create real-time status JSON (invalid route short name) for %s", rts);
return null;
}
try {
JSONObject json = new JSONObject();
json.put(JSON_VERSION, JSON_VERSION_1_1);
json.put(JSON_METHOD, JSON_METHOD_GET_BUS_TIMES);
JSONObject jParams = new JSONObject();
jParams.put(JSON_STOP_ABBR, Integer.parseInt(stopCode));
jParams.put(JSON_LINE_ABBR, Integer.parseInt(rsn));
jParams.put(JSON_NUM_TIMES_PER_LINE, JSON_NUM_TIMES_PER_LINE_COUNT);
jParams.put(JSON_NUM_STOP_TIMES, JSON_NUM_STOP_TIMES_COUNT);
json.put(JSON_PARAMS, jParams);
return json.toString();
} catch (Exception e) {
MTLog.w(LOG_TAG, e, "Error while creating JSON POST parameters for '%s'!", rts);
return null;
}
}
private void loadRealTimeStatusFromWWW(@NonNull RouteTripStop rts) {
try {
//noinspection UnnecessaryLocalVariable
String urlString = ETSLIVE_URL;
String jsonPostParams = getJSONPostParameters(rts);
MTLog.i(this, "Loading from '%s' for stop '%s'...", ETSLIVE_URL, rts.getStop().getCode());
if (TextUtils.isEmpty(jsonPostParams)) {
MTLog.w(this, "loadPredictionsFromWWW() > skip (invalid JSON post parameters!)");
return;
}
URL url = new URL(urlString);
URLConnection urlc = url.openConnection();
NetworkUtils.setupUrlConnection(urlc);
HttpURLConnection httpUrlConnection = (HttpURLConnection) urlc;
try {
httpUrlConnection.setDoOutput(true);
httpUrlConnection.setRequestMethod("POST");
httpUrlConnection.addRequestProperty("Content-Type", "application/json");
OutputStream os = httpUrlConnection.getOutputStream();
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os, FileUtils.getUTF8()));
writer.write(jsonPostParams);
writer.flush();
writer.close();
os.close();
long newLastUpdateInMs = TimeUtils.currentTimeMillis();
String jsonString = FileUtils.getString(httpUrlConnection.getInputStream());
Collection<POIStatus> statuses = parseAgencyJSON(jsonString, rts, newLastUpdateInMs);
StatusProvider.deleteCachedStatus(this, ArrayUtils.asArrayList(getAgencyRouteStopTargetUUID(rts)));
if (statuses != null) {
for (POIStatus status : statuses) {
StatusProvider.cacheStatusS(this, status);
}
}
} catch (Exception e) {
MTLog.w(this, e, "Error while posting query!");
} finally {
httpUrlConnection.disconnect();
}
} catch (UnknownHostException uhe) {
if (MTLog.isLoggable(android.util.Log.DEBUG)) {
MTLog.w(this, uhe, "No Internet Connection!");
} else {
MTLog.w(this, "No Internet Connection!");
}
} catch (SocketException se) {
MTLog.w(LOG_TAG, se, "No Internet Connection!");
} catch (Exception e) { // Unknown error
MTLog.e(LOG_TAG, e, "INTERNAL ERROR: Unknown Exception");
}
}
private static final TimeZone EDMONTON_TZ = TimeZone.getTimeZone("America/Edmonton");
@NonNull
private Calendar getNewBeginningOfTodayCal() {
Calendar beginningOfTodayCal = Calendar.getInstance(EDMONTON_TZ);
beginningOfTodayCal.set(Calendar.HOUR_OF_DAY, 0);
beginningOfTodayCal.set(Calendar.MINUTE, 0);
beginningOfTodayCal.set(Calendar.SECOND, 0);
beginningOfTodayCal.set(Calendar.MILLISECOND, 0);
return beginningOfTodayCal;
}
private static final long PROVIDER_PRECISION_IN_MS = TimeUnit.SECONDS.toMillis(10L);
private static final String JSON_RESULT = "result";
private static final String JSON_STOP_TIME_RESULT = "StopTimeResult";
private static final String JSON_STOP_TIMES = "StopTimes";
private static final String JSON_TRIP_ID = "TripId";
private static final String JSON_DESTINATION_SIGN = "DestinationSign";
private static final String JSON_REAL_TIME_RESULTS = "RealTimeResults";
private static final String JSON_REAL_TIME = "RealTime";
private static final String JSON_IGNORE_ADHERENCE = "IgnoreAdherence";
@Nullable
private Collection<POIStatus> parseAgencyJSON(@Nullable String jsonString, @NonNull RouteTripStop rts, long newLastUpdateInMs) {
try {
ArrayList<POIStatus> result = new ArrayList<>();
JSONObject json = jsonString == null ? null : new JSONObject(jsonString);
if (json != null && json.has(JSON_RESULT)) {
JSONArray jResults = json.getJSONArray(JSON_RESULT);
if (jResults.length() > 0) {
final long beginningOfTodayInMs = getNewBeginningOfTodayCal().getTimeInMillis();
final Schedule newSchedule = new Schedule(
getAgencyRouteStopTargetUUID(rts),
newLastUpdateInMs,
getStatusMaxValidityInMs(),
newLastUpdateInMs,
PROVIDER_PRECISION_IN_MS,
false
);
for (int r = 0; r < jResults.length(); r++) {
JSONObject jResult = jResults.getJSONObject(r);
SparseArray<String> tripIdDestinationSigns = extractTripIdDestinations(jResult);
if (jResult != null && jResult.has(JSON_REAL_TIME_RESULTS)) {
JSONArray jRealTimeResults = jResult.getJSONArray(JSON_REAL_TIME_RESULTS);
if (jRealTimeResults.length() > 0) {
for (int rtr = 0; rtr < jRealTimeResults.length(); rtr++) {
JSONObject jRealTimeResult = jRealTimeResults.getJSONObject(rtr);
if (jRealTimeResult != null && jRealTimeResult.has(JSON_REAL_TIME)) {
int nbSecondsSinceMorning = jRealTimeResult.getInt(JSON_REAL_TIME);
long t = beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(nbSecondsSinceMorning);
Schedule.Timestamp timestamp = new Schedule.Timestamp(TimeUtils.timeToTheTensSecondsMillis(t));
try {
if (jRealTimeResult.has(JSON_TRIP_ID)) {
int tripId = jRealTimeResult.getInt(JSON_TRIP_ID);
String destinationSign = tripIdDestinationSigns.get(tripId);
if (!TextUtils.isEmpty(destinationSign)) {
timestamp.setHeadsign(Trip.HEADSIGN_TYPE_STRING, cleanTripHeadsign(destinationSign));
}
}
} catch (Exception e) {
MTLog.w(this, e, "Error while adding destination sign %s!", jRealTimeResult);
}
if (jRealTimeResult.has(JSON_IGNORE_ADHERENCE)) {
timestamp.setRealTime(!jRealTimeResult.optBoolean(JSON_IGNORE_ADHERENCE, true));
}
newSchedule.addTimestampWithoutSort(timestamp);
}
}
}
}
}
newSchedule.sortTimestamps();
result.add(newSchedule);
}
}
return result;
} catch (Exception e) {
MTLog.w(this, e, "Error while parsing JSON '%s'!", jsonString);
return null;
}
}
@NonNull
private SparseArray<String> extractTripIdDestinations(@Nullable JSONObject jResult) {
SparseArray<String> tripIdDestinationSigns = new SparseArray<>();
try {
if (jResult != null && jResult.has(JSON_STOP_TIME_RESULT)) {
JSONArray jStopTimeResults = jResult.getJSONArray(JSON_STOP_TIME_RESULT);
if (jStopTimeResults.length() > 0) {
for (int str = 0; str < jStopTimeResults.length(); str++) {
JSONObject jStopTimeResult = jStopTimeResults.getJSONObject(str);
if (jStopTimeResult != null && jStopTimeResult.has(JSON_STOP_TIMES)) {
JSONArray jStopTimes = jStopTimeResult.getJSONArray(JSON_STOP_TIMES);
if (jStopTimes.length() > 0) {
for (int st = 0; st < jStopTimes.length(); st++) {
JSONObject jStopTime = jStopTimes.getJSONObject(st);
try {
if (jStopTime != null && jStopTime.has(JSON_TRIP_ID) && jStopTime.has(JSON_DESTINATION_SIGN)) {
tripIdDestinationSigns.put(jStopTime.getInt(JSON_TRIP_ID), jStopTime.getString(JSON_DESTINATION_SIGN));
}
} catch (Exception e) {
MTLog.w(this, e, "Error while parsing trip destination %s!", jStopTime);
}
}
}
}
}
}
}
} catch (Exception e) {
MTLog.w(this, e, "Error while parsing trip destinations!");
}
return tripIdDestinationSigns;
}
private static final Pattern STARTS_WITH_RSN = Pattern.compile("(^[\\d]+\\s)", Pattern.CASE_INSENSITIVE);
private static final Pattern WEST_EDMONTON_MALL = Pattern.compile("((^|\\W)(west edmonton mall)(\\W|$))", Pattern.CASE_INSENSITIVE);
private static final String WEST_EDMONTON_MALL_REPLACEMENT = "$2" + "WEM" + "$4";
private static final Pattern EDMONTON = Pattern.compile("((^|\\W)(edmonton)(\\W|$))", Pattern.CASE_INSENSITIVE);
private static final String EDMONTON_REPLACEMENT = "$2" + "Edm" + "$4";
private static final Pattern TRANSIT_CENTER = Pattern.compile("((^|\\W)(transit center|transit centre)(\\W|$))", Pattern.CASE_INSENSITIVE);
private static final String TRANSIT_CENTER_REPLACEMENT = "$2" + "TC" + "$4";
private static final Pattern TOWN_CENTER = Pattern.compile("((^|\\W)(town center|town centre)(\\W|$))", Pattern.CASE_INSENSITIVE);
private static final String TOWN_CENTER_REPLACEMENT = "$2" + "TC" + "$4";
private static final String VIA = " via ";
@NonNull
private String cleanTripHeadsign(@NonNull String tripHeadsign) {
try {
int indexOfVIA = tripHeadsign.toLowerCase(Locale.ENGLISH).indexOf(VIA);
if (indexOfVIA >= 0) {
tripHeadsign = tripHeadsign.substring(0, indexOfVIA);
}
tripHeadsign = STARTS_WITH_RSN.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = WEST_EDMONTON_MALL.matcher(tripHeadsign).replaceAll(WEST_EDMONTON_MALL_REPLACEMENT);
tripHeadsign = EDMONTON.matcher(tripHeadsign).replaceAll(EDMONTON_REPLACEMENT);
tripHeadsign = TRANSIT_CENTER.matcher(tripHeadsign).replaceAll(TRANSIT_CENTER_REPLACEMENT);
tripHeadsign = TOWN_CENTER.matcher(tripHeadsign).replaceAll(TOWN_CENTER_REPLACEMENT);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
} catch (Exception e) {
MTLog.w(this, e, "Error while cleaning trip head sign '%s'!", tripHeadsign);
return tripHeadsign;
}
}
@Override
public boolean onCreateMT() {
ping();
return true;
}
@Override
public void ping() {
// DO NOTHING
}
@Nullable
private CaEdmontonDbHelper dbHelper;
private static int currentDbVersion = -1;
@NonNull
private CaEdmontonDbHelper getDBHelper(@NonNull Context context) {
if (dbHelper == null) { // initialize
dbHelper = getNewDbHelper(context);
currentDbVersion = getCurrentDbVersion();
} else { // reset
try {
if (currentDbVersion != getCurrentDbVersion()) {
dbHelper.close();
dbHelper = null;
return getDBHelper(context);
}
} catch (Exception e) { // fail if locked, will try again later
MTLog.w(this, e, "Can't check DB version!");
}
}
return dbHelper;
}
/**
* Override if multiple {@link CaEdmontonProvider} implementations in same app.
*/
public int getCurrentDbVersion() {
//noinspection ConstantConditions // TODO requireContext()
return CaEdmontonDbHelper.getDbVersion(getContext());
}
/**
* Override if multiple {@link CaEdmontonProvider} implementations in same app.
*/
@NonNull
public CaEdmontonDbHelper getNewDbHelper(@NonNull Context context) {
return new CaEdmontonDbHelper(context.getApplicationContext());
}
@NonNull
@Override
public UriMatcher getURI_MATCHER() {
//noinspection ConstantConditions // TODO requireContext()
return getURIMATCHER(getContext());
}
@NonNull
@Override
public Uri getAuthorityUri() {
//noinspection ConstantConditions // TODO requireContext()
return getAUTHORITY_URI(getContext());
}
@NonNull
private SQLiteOpenHelper getDBHelper() {
//noinspection ConstantConditions // TODO requireContext()
return getDBHelper(getContext());
}
@NonNull
@Override
public SQLiteDatabase getReadDB() {
return getDBHelper().getReadableDatabase();
}
@NonNull
@Override
public SQLiteDatabase getWriteDB() {
return getDBHelper().getWritableDatabase();
}
@Nullable
@Override
public Cursor queryMT(@NonNull Uri uri, @Nullable String[] projection, @Nullable String selection, @Nullable String[] selectionArgs, @Nullable String sortOrder) {
Cursor cursor = StatusProvider.queryS(this, uri, selection);
if (cursor != null) {
return cursor;
}
throw new IllegalArgumentException(String.format("Unknown URI (query): '%s'", uri));
}
@Nullable
@Override
public String getTypeMT(@NonNull Uri uri) {
String type = StatusProvider.getTypeS(this, uri);
if (type != null) {
return type;
}
throw new IllegalArgumentException(String.format("Unknown URI (type): '%s'", uri));
}
@Override
public int deleteMT(@NonNull Uri uri, @Nullable String selection, @Nullable String[] selectionArgs) {
MTLog.w(this, "The delete method is not available.");
return 0;
}
@Override
public int updateMT(@NonNull Uri uri, @Nullable ContentValues values, @Nullable String selection, @Nullable String[] selectionArgs) {
MTLog.w(this, "The update method is not available.");
return 0;
}
@Nullable
@Override
public Uri insertMT(@NonNull Uri uri, @Nullable ContentValues values) {
MTLog.w(this, "The insert method is not available.");
return null;
}
public static class CaEdmontonDbHelper extends MTSQLiteOpenHelper {
private static final String LOG_TAG = CaEdmontonDbHelper.class.getSimpleName();
@NonNull
@Override
public String getLogTag() {
return LOG_TAG;
}
/**
* Override if multiple {@link CaEdmontonDbHelper} implementations in same app.
*/
protected static final String DB_NAME = "ca_edmonton.db";
static final String T_ETSLIVE_STATUS = StatusProvider.StatusDbHelper.T_STATUS;
private static final String T_ETSLIVE_STATUS_SQL_CREATE = StatusProvider.StatusDbHelper.getSqlCreateBuilder(T_ETSLIVE_STATUS).build();
private static final String T_ETSLIVE_STATUS_SQL_DROP = SqlUtils.getSQLDropIfExistsQuery(T_ETSLIVE_STATUS);
private static int dbVersion = -1;
/**
* Override if multiple {@link CaEdmontonDbHelper} in same app.
*/
public static int getDbVersion(@NonNull Context context) {
if (dbVersion < 0) {
dbVersion = context.getResources().getInteger(R.integer.ca_edmonton_db_version);
}
return dbVersion;
}
CaEdmontonDbHelper(@NonNull Context context) {
super(context, DB_NAME, null, getDbVersion(context));
}
@Override
public void onCreateMT(@NonNull SQLiteDatabase db) {
initAllDbTables(db);
}
@Override
public void onUpgradeMT(@NonNull SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL(T_ETSLIVE_STATUS_SQL_DROP);
initAllDbTables(db);
}
public boolean isDbExist(@NonNull Context context) {
return SqlUtils.isDbExist(context, DB_NAME);
}
private void initAllDbTables(@NonNull SQLiteDatabase db) {
db.execSQL(T_ETSLIVE_STATUS_SQL_CREATE);
}
}
}
| |
package hex.grid;
import hex.*;
import hex.grid.HyperSpaceWalker.BaseWalker;
import water.*;
import water.exceptions.H2OConcurrentModificationException;
import water.exceptions.H2OIllegalArgumentException;
import water.fvec.Frame;
import water.util.Log;
import water.util.PojoUtils;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.Map;
/**
* Grid search job.
*
* This job represents a generic interface to launch "any" hyper space
* search. It triggers sub-jobs for each point in hyper space. It produces
* <code>Grid</code> object which contains a list of build models. A triggered
* model builder job can fail!
*
* Grid search is parametrized by hyper space walk strategy ({@link
* hex.grid.HyperSpaceWalker} which defines how the space of hyper parameters
* is traversed.
*
* The job is started by the <code>startGridSearch</code> method which create a new grid search, put
* representation of Grid into distributed KV store, and for each parameter in hyper space of
* possible parameters, it launches a separated model building job. The launch of jobs is sequential
* and blocking. So after finish the last model, whole grid search job is done as well.
*
* By default, the grid search invokes cartezian grid search, but it can be
* modified by passing explicit hyper space walk strategy via the
* {@link #startGridSearch(Key, HyperSpaceWalker)} method.
*
* If any of forked jobs fails then the failure is ignored, and grid search
* normally continue in traversing the hyper space.
*
* Typical usage from Java is:
* <pre>{@code
* // Create initial parameters and fill them by references to data
* GBMModel.GBMParameters params = new GBMModel.GBMParameters();
* params._train = fr._key;
* params._response_column = "cylinders";
*
* // Define hyper-space to search
* HashMap<String,Object[]> hyperParms = new HashMap<>();
* hyperParms.put("_ntrees", new Integer[]{1, 2});
* hyperParms.put("_distribution",new DistributionFamily[] {DistributionFamily.multinomial});
* hyperParms.put("_max_depth",new Integer[]{1,2,5});
* hyperParms.put("_learn_rate",new Float[]{0.01f,0.1f,0.3f});
*
* // Launch grid search job creating GBM models
* GridSearch gridSearchJob = GridSearch.startGridSearch(params, hyperParms, GBM_MODEL_FACTORY);
*
* // Block till the end of the job and get result
* Grid grid = gridSearchJob.get()
*
* // Get built models
* Model[] models = grid.getModels()
* }</pre>
*
* @see hex.grid.HyperSpaceWalker
* @see #startGridSearch(Key, HyperSpaceWalker)
*/
public final class GridSearch<MP extends Model.Parameters> extends Keyed<GridSearch> {
public final Key<Grid> _result;
public final Job<Grid> _job;
/** Walks hyper space and for each point produces model parameters. It is
* used only locally to fire new model builders. */
private final transient HyperSpaceWalker<MP, ?> _hyperSpaceWalker;
private GridSearch(Key<Grid> gkey, HyperSpaceWalker<MP, ?> hyperSpaceWalker) {
assert hyperSpaceWalker != null : "Grid search needs to know how to walk around hyper space!";
_hyperSpaceWalker = hyperSpaceWalker;
_result = gkey;
String algoName = hyperSpaceWalker.getParams().algoName();
_job = new Job<>(gkey, Grid.class.getName(), algoName + " Grid Search");
// Note: do not validate parameters of created model builders here!
// Leave it to launch time, and just mark the corresponding model builder job as failed.
}
Job<Grid> start() {
final long gridSize = _hyperSpaceWalker.getMaxHyperSpaceSize();
Log.info("Starting gridsearch: estimated size of search space = " + gridSize);
// Create grid object and lock it
// Creation is done here, since we would like make sure that after leaving
// this function the grid object is in DKV and accessible.
final Grid<MP> grid;
Keyed keyed = DKV.getGet(_result);
if (keyed != null) {
if (! (keyed instanceof Grid))
throw new H2OIllegalArgumentException("Name conflict: tried to create a Grid using the ID of a non-Grid object that's already in H2O: " + _job._result + "; it is a: " + keyed.getClass());
grid = (Grid) keyed;
Frame specTrainFrame = _hyperSpaceWalker.getParams().train();
Frame oldTrainFrame = grid.getTrainingFrame();
if (oldTrainFrame != null && !specTrainFrame._key.equals(oldTrainFrame._key) ||
oldTrainFrame != null && specTrainFrame.checksum() != oldTrainFrame.checksum())
throw new H2OIllegalArgumentException("training_frame", "grid", "Cannot append new models to a grid with different training input");
grid.write_lock(_job);
} else {
grid =
new Grid<>(_result,
_hyperSpaceWalker.getParams(),
_hyperSpaceWalker.getHyperParamNames(),
_hyperSpaceWalker.getParametersBuilderFactory().getFieldNamingStrategy());
grid.delete_and_lock(_job);
}
Model model = null;
HyperSpaceWalker.HyperSpaceIterator<MP> it = _hyperSpaceWalker.iterator();
long gridWork=0;
if (gridSize > 0) {//if total grid space is known, walk it all and count up models to be built (not subject to time-based or converge-based early stopping)
int count=0;
while (it.hasNext(model) && (it.max_models() > 0 && count++ < it.max_models())) { //only walk the first max_models models, if specified
try {
Model.Parameters parms = it.nextModelParameters(model);
gridWork += (parms._nfolds > 0 ? (parms._nfolds+1/*main model*/) : 1) *parms.progressUnits();
} catch(Throwable ex) {
//swallow invalid combinations
}
}
} else {
//TODO: Future totally unbounded search: need a time-based progress bar
gridWork = Long.MAX_VALUE;
}
it.reset();
// Install this as job functions
return _job.start(new H2O.H2OCountedCompleter() {
@Override public void compute2() {
gridSearch(grid);
tryComplete();
}
}, gridWork, it.max_runtime_secs());
}
/**
* Returns expected number of models in resulting Grid object.
*
* The number can differ from final number of models due to visiting duplicate points in hyper
* space.
*
* @return expected number of models produced by this grid search
*/
public long getModelCount() {
return _hyperSpaceWalker.getMaxHyperSpaceSize();
}
/**
* Invokes grid search based on specified hyper space walk strategy.
*
* It updates passed grid object in distributed store.
*
* @param grid grid object to save results; grid already locked
*/
private void gridSearch(Grid<MP> grid) {
Model model = null;
// Prepare nice model key and override default key by appending model counter
//String protoModelKey = _hyperSpaceWalker.getParams()._model_id == null
// ? grid._key + "_model_"
// : _hyperSpaceWalker.getParams()._model_id.toString() + H2O.calcNextUniqueModelId("") + "_";
String protoModelKey = grid._key + "_model_";
try {
// Get iterator to traverse hyper space
HyperSpaceWalker.HyperSpaceIterator<MP> it = _hyperSpaceWalker.iterator();
// Number of traversed model parameters
int counter = grid.getModelCount();
while (it.hasNext(model)) {
if(_job.stop_requested() ) return; // Handle end-user cancel request
double max_runtime_secs = it.max_runtime_secs();
double time_remaining_secs = Double.MAX_VALUE;
if (max_runtime_secs > 0) {
time_remaining_secs = it.time_remaining_secs();
if (time_remaining_secs < 0) {
Log.info("Grid max_runtime_secs of " + max_runtime_secs + " secs has expired; stopping early.");
return;
}
}
MP params;
try {
// Get parameters for next model
params = it.nextModelParameters(model);
// Sequential model building, should never propagate
// exception up, just mark combination of model parameters as wrong
// Do we need to limit the model build time?
if (max_runtime_secs > 0) {
Log.info("Grid time is limited to: " + max_runtime_secs + " for grid: " + grid._key + ". Remaining time is: " + time_remaining_secs);
double scale = params._nfolds > 0 ? params._nfolds+1 : 1; //remaining time per cv model is less
if (params._max_runtime_secs == 0) { // unlimited
params._max_runtime_secs = time_remaining_secs/scale;
Log.info("Due to the grid time limit, changing model max runtime to: " + params._max_runtime_secs + " secs.");
} else {
double was = params._max_runtime_secs;
params._max_runtime_secs = Math.min(params._max_runtime_secs, time_remaining_secs/scale);
Log.info("Due to the grid time limit, changing model max runtime from: " + was + " secs to: " + params._max_runtime_secs + " secs.");
}
}
try {
ScoringInfo scoringInfo = new ScoringInfo();
scoringInfo.time_stamp_ms = System.currentTimeMillis();
//// build the model!
model = buildModel(params, grid, counter++, protoModelKey);
if (model!=null) {
model.fillScoringInfo(scoringInfo);
grid.setScoringInfos(ScoringInfo.prependScoringInfo(scoringInfo, grid.getScoringInfos()));
ScoringInfo.sort(grid.getScoringInfos(), _hyperSpaceWalker.search_criteria().stopping_metric()); // Currently AUTO for Cartesian and user-specified for RandomDiscrete
}
} catch (RuntimeException e) { // Catch everything
if (!Job.isCancelledException(e)) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
Log.warn("Grid search: model builder for parameters " + params + " failed! Exception: ", e, sw.toString());
}
grid.appendFailedModelParameters(params, e);
}
} catch (IllegalArgumentException e) {
Log.warn("Grid search: construction of model parameters failed! Exception: ", e);
// Model parameters cannot be constructed for some reason
it.modelFailed(model);
Object[] rawParams = it.getCurrentRawParameters();
grid.appendFailedModelParameters(rawParams, e);
} finally {
// Update progress by 1 increment
_job.update(1);
// Always update grid in DKV after model building attempt
grid.update(_job);
} // finally
if (model != null && grid.getScoringInfos() != null && // did model build and scoringInfo creation succeed?
_hyperSpaceWalker.stopEarly(model, grid.getScoringInfos())) {
Log.info("Convergence detected based on simple moving average of the loss function. Grid building completed.");
break;
}
} // while (it.hasNext(model))
Log.info("For grid: " + grid._key + " built: " + grid.getModelCount() + " models.");
} finally {
grid.unlock(_job);
}
}
/**
* Build a model based on specified parameters and save it to resulting Grid object.
*
* Returns a model run with these parameters, typically built on demand and cached - expected to
* be an expensive operation. If the model in question is "in progress", a 2nd build will NOT be
* kicked off. This is a blocking call.
*
* If a new model is created, then the Grid object is updated in distributed store. If a model for
* given parameters already exists, it is directly returned without updating the Grid object. If
* model building fails then the Grid object is not updated and the method returns
* <code>null</code>.
*
* @param params parameters for a new model
* @param grid grid object holding created models
* @param paramsIdx index of generated model parameter
* @param protoModelKey prototype of model key
* @return return a new model if it does not exist
*/
private Model buildModel(final MP params, Grid<MP> grid, int paramsIdx, String protoModelKey) {
// Make sure that the model is not yet built (can be case of duplicated hyper parameters).
// We first look in the grid _models cache, then we look in the DKV.
// FIXME: get checksum here since model builder will modify instance of params!!!
final long checksum = params.checksum();
Key<Model> key = grid.getModelKey(checksum);
if (key != null) {
if (DKV.get(key) == null) {
// We know about a model that's been removed; rebuild.
Log.info("GridSearch.buildModel(): model with these parameters was built but removed, rebuilding; checksum: " + checksum);
} else {
Log.info("GridSearch.buildModel(): model with these parameters already exists, skipping; checksum: " + checksum);
return key.get();
}
}
// Is there a model with the same params in the DKV?
final Key<Model>[] modelKeys = KeySnapshot.globalSnapshot().filter(new KeySnapshot.KVFilter() {
@Override
public boolean filter(KeySnapshot.KeyInfo k) {
if (! Value.isSubclassOf(k._type, Model.class))
return false;
Model m = ((Model)k._key.get());
if ((m == null) || (m._parms == null))
return false;
try {
return m._parms.checksum() == checksum;
} catch (H2OConcurrentModificationException e) {
// We are inspecting model parameters that doesn't belong to us - they might be modified (or deleted) while
// checksum is being calculated: we skip them (see PUBDEV-5286)
Log.warn("GridSearch encountered concurrent modification while searching DKV", e);
return false;
} catch (final RuntimeException e) {
Throwable ex = e;
boolean concurrentModification = false;
while (ex.getCause() != null) {
ex = ex.getCause();
if (ex instanceof H2OConcurrentModificationException) {
concurrentModification = true;
break;
}
}
if (! concurrentModification)
throw e;
Log.warn("GridSearch encountered concurrent modification while searching DKV", e);
return false;
}
}
}).keys();
if (modelKeys.length > 0) {
grid.putModel(checksum, modelKeys[0]);
return modelKeys[0].get();
}
// Modify model key to have nice version with counter
// Note: Cannot create it before checking the cache since checksum would differ for each model
Key<Model> result = Key.make(protoModelKey + paramsIdx);
// Build a new model
// THIS IS BLOCKING call since we do not have enough information about free resources
// FIXME: we should allow here any launching strategy (not only sequential)
Model m = (Model)startBuildModel(result,params, grid).dest().get();
grid.putModel(checksum, result);
return m;
}
/**
* Triggers model building process but do not block on it.
*
* @param params parameters for a new model
* @param grid resulting grid object
* @return A Future of a model run with these parameters, typically built on demand and not cached
* - expected to be an expensive operation. If the model in question is "in progress", a 2nd
* build will NOT be kicked off. This is a non-blocking call.
*/
private ModelBuilder startBuildModel(Key result, MP params, Grid<MP> grid) {
if (grid.getModel(params) != null) return null;
ModelBuilder mb = ModelBuilder.make(params.algoName(), _job, result);
mb._parms = params;
mb.trainModelNested(null);
return mb;
}
/**
* Defines a key for a new Grid object holding results of grid search.
*
* @return a grid key for a particular modeling class and frame.
* @throws java.lang.IllegalArgumentException if frame is not saved to distributed store.
*/
protected static Key<Grid> gridKeyName(String modelName, Frame fr) {
if (fr == null || fr._key == null) {
throw new IllegalArgumentException("The frame being grid-searched over must have a Key");
}
return Key.make("Grid_" + modelName + "_" + fr._key.toString() + H2O.calcNextUniqueModelId(""));
}
/**
* Start a new grid search job. This is the method that gets called by GridSearchHandler.do_train().
* <p>
* This method launches a "classical" grid search traversing cartesian grid of parameters
* point-by-point, <b>or</b> a random hyperparameter search, depending on the value of the <i>strategy</i>
* parameter.
*
* @param destKey A key to store result of grid search under.
* @param params Default parameters for model builder. This object is used to create
* a specific model parameters for a combination of hyper parameters.
* @param hyperParams A set of arrays of hyper parameter values, used to specify a simple
* fully-filled-in grid search.
* @param paramsBuilderFactory defines a strategy for creating a new model parameters based on
* common parameters and list of hyper-parameters
* @return GridSearch Job, with models run with these parameters, built as needed - expected to be
* an expensive operation. If the models in question are "in progress", a 2nd build will NOT be
* kicked off. This is a non-blocking call.
*/
public static <MP extends Model.Parameters> Job<Grid> startGridSearch(
final Key<Grid> destKey,
final MP params,
final Map<String, Object[]> hyperParams,
final ModelParametersBuilderFactory<MP> paramsBuilderFactory,
final HyperSpaceSearchCriteria search_criteria) {
return startGridSearch(destKey, BaseWalker.WalkerFactory.create(params, hyperParams, paramsBuilderFactory, search_criteria));
}
/**
* Start a new grid search job.
*
* <p>This method launches "classical" grid search traversing cartesian grid of parameters
* point-by-point. For more advanced hyperparameter search behavior call the referenced method.
*
* @param destKey A key to store result of grid search under.
* @param params Default parameters for model builder. This object is used to create a
* specific model parameters for a combination of hyper parameters.
* @param hyperParams A set of arrays of hyper parameter values, used to specify a simple
* fully-filled-in grid search.
* @return GridSearch Job, with models run with these parameters, built as needed - expected to be
* an expensive operation. If the models in question are "in progress", a 2nd build will NOT be
* kicked off. This is a non-blocking call.
*
* @see #startGridSearch(Key, Model.Parameters, Map, ModelParametersBuilderFactory, HyperSpaceSearchCriteria)
*/
public static <MP extends Model.Parameters> Job<Grid> startGridSearch(final Key<Grid> destKey,
final MP params,
final Map<String, Object[]> hyperParams) {
return startGridSearch(
destKey,
params,
hyperParams,
new SimpleParametersBuilderFactory<MP>(),
new HyperSpaceSearchCriteria.CartesianSearchCriteria());
}
/**
* Start a new grid search job. <p> This method launches any grid search traversing space of hyper
* parameters based on specified strategy.
*
* @param destKey A key to store result of grid search under.
* @param hyperSpaceWalker defines a strategy for traversing a hyper space. The object itself
* holds definition of hyper space.
* @return GridSearch Job, with models run with these parameters, built as needed - expected to be
* an expensive operation. If the models in question are "in progress", a 2nd build will NOT be
* kicked off. This is a non-blocking call.
*/
public static <MP extends Model.Parameters> Job<Grid> startGridSearch(
final Key<Grid> destKey,
final HyperSpaceWalker<MP, ?> hyperSpaceWalker) {
// Compute key for destination object representing grid
MP params = hyperSpaceWalker.getParams();
Key<Grid> gridKey = destKey != null ? destKey
: gridKeyName(params.algoName(), params.train());
// Start the search
return new GridSearch(gridKey, hyperSpaceWalker).start();
}
/**
* The factory is producing a parameters builder which uses reflection to setup field values.
*
* @param <MP> type of model parameters object
*/
public static class SimpleParametersBuilderFactory<MP extends Model.Parameters>
implements ModelParametersBuilderFactory<MP> {
@Override
public ModelParametersBuilder<MP> get(MP initialParams) {
return new SimpleParamsBuilder<>(initialParams);
}
@Override
public PojoUtils.FieldNaming getFieldNamingStrategy() {
return PojoUtils.FieldNaming.CONSISTENT;
}
/**
* The builder modifies initial model parameters directly by reflection.
*
* Usage:
* <pre>{@code
* GBMModel.GBMParameters params =
* new SimpleParamsBuilder(initialParams)
* .set("_ntrees", 30).set("_learn_rate", 0.01).build()
* }</pre>
*
* @param <MP> type of model parameters object
*/
public static class SimpleParamsBuilder<MP extends Model.Parameters>
implements ModelParametersBuilder<MP> {
final private MP params;
public SimpleParamsBuilder(MP initialParams) {
params = initialParams;
}
@Override
public ModelParametersBuilder<MP> set(String name, Object value) {
PojoUtils.setField(params, name, value, PojoUtils.FieldNaming.CONSISTENT);
return this;
}
@Override
public MP build() {
return params;
}
}
}
}
| |
package ru.stqa.pft.addressbook.model;
import com.google.gson.annotations.Expose;
import com.thoughtworks.xstream.annotations.XStreamAlias;
import com.thoughtworks.xstream.annotations.XStreamOmitField;
import org.hibernate.annotations.Type;
import javax.persistence.*;
import java.io.File;
import java.util.HashSet;
import java.util.Set;
@Entity
@Table(name = "addressbook")
@XStreamAlias("contact")
public class AddressData {
@Id
@Column(name = "id")
@XStreamOmitField
private int id = Integer.MAX_VALUE;
@Expose
@Column(name = "firstname")
private String firstName;
@Expose
@Column(name = "lastname")
private String lastName;
@Expose
private String address;
@Transient
private String address2;
@Transient
private String allEmail;
@Expose
@Column(name = "home")
@Type(type = "text")
private String homeTelephoneNumber;
@Column(name = "mobile")
@Type(type = "text")
private String mobileTelephoneNumber;
@Column(name = "work")
@Type(type = "text")
private String workTelephoneNumber;
@Expose
private String email1;
@Transient
private String email2;
@Transient
private String email3;
@Transient
private String allAddress;
@Transient
private String allPhones;
@ManyToMany(fetch = FetchType.EAGER)
@JoinTable(name = "address_in_groups", joinColumns = @JoinColumn(name = "id"), inverseJoinColumns = @JoinColumn(name = "group_id"))
private Set<GroupData> groups = new HashSet<GroupData>();
@Override
public String toString() {
return "AddressData{" +
"id=" + id +
", firstName='" + firstName + '\'' +
", lastName='" + lastName + '\'' +
'}';
}
@Column(name = "photo")
@Type(type = "text")
private String foto;
public File getFoto() {
return new File(foto);
}
public AddressData withFoto(File foto) {
this.foto = foto.getPath();
return this;
}
public String getAllAddress() {
return allAddress;
}
public AddressData withAllAddress(String allAddress) {
this.allAddress = allAddress;
return this;
}
public String getEmail1() {
return email1;
}
public AddressData withEmail1(String email1) {
this.email1 = email1;
return this;
}
public String getEmail2() {
return email2;
}
public AddressData withEmail2(String email2) {
this.email2 = email2;
return this;
}
public String getEmail3() {
return email3;
}
public AddressData withEmail3(String email3) {
this.email3 = email3;
return this;
}
public String getAllEmail() {
return allEmail;
}
public AddressData withAllEmail(String email) {
this.allEmail = email;
return this;
}
public String getAddress2() {
return address2;
}
public AddressData withAddress2(String address2) {
this.address2 = address2;
return this;
}
public String getAllPhones() {
return allPhones;
}
public AddressData withAllPhones(String allPhones) {
this.allPhones = allPhones;
return this;
}
public AddressData withId(int id) {
this.id = id;
return this;
}
public int getId() {
return id;
}
public AddressData withFirstName(String firstName) {
this.firstName = firstName;
return this;
}
public AddressData withLastName(String lastName) {
this.lastName = lastName;
return this;
}
public Groups getGroups() {
return new Groups(groups);
}
public AddressData withAddress(String address) {
this.address = address;
return this;
}
public AddressData withHomeTelephoneNumber(String telephoneNumber) {
this.homeTelephoneNumber = telephoneNumber;
return this;
}
public AddressData withMobileTelephoneNumber (String mobileTelephoneNumber){
this.mobileTelephoneNumber = mobileTelephoneNumber;
return this;
}
public AddressData withWorkTelephoneNumber (String workTelephoneNumber){
this.workTelephoneNumber = workTelephoneNumber;
return this;
}
public String getFirstName() {
return firstName;
}
// public String getMiddleName() {
// return middleName;
// }
public String getLastName() {
return lastName;
}
public String getAddress() {
return address;
}
public String getHomeTelephoneNumber() {
return homeTelephoneNumber;
}
public String getMobileTelephoneNumber() {
return mobileTelephoneNumber;
}
public String getWorkTelephoneNumber() {
return workTelephoneNumber;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AddressData that = (AddressData) o;
if (id != that.id) return false;
if (firstName != null ? !firstName.equals(that.firstName) : that.firstName != null) return false;
return lastName != null ? lastName.equals(that.lastName) : that.lastName == null;
}
@Override
public int hashCode() {
int result = id;
result = 31 * result + (firstName != null ? firstName.hashCode() : 0);
result = 31 * result + (lastName != null ? lastName.hashCode() : 0);
return result;
}
public AddressData inGroup(GroupData group){
groups.add(group);
return this;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.scr.impl;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
import org.apache.felix.scr.impl.config.ScrConfiguration;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleEvent;
import org.osgi.framework.Constants;
import org.osgi.framework.SynchronousBundleListener;
import org.osgi.service.component.ComponentConstants;
import org.osgi.service.log.LogService;
import org.osgi.util.tracker.ServiceTracker;
/**
* This activator is used to cover requirement described in section 112.8.1 @@ -27,14
* 37,202 @@ in active bundles.
*
*/
public class Activator implements BundleActivator, SynchronousBundleListener
{
// name of the LogService class (this is a string to not create a reference to the class)
static final String LOGSERVICE_CLASS = "org.osgi.service.log.LogService";
// name of the PackageAdmin class (this is a string to not create a reference to the class)
static final String PACKAGEADMIN_CLASS = "org.osgi.service.packageadmin.PackageAdmin";
// Our configuration from bundle context properties and Config Admin
private ScrConfiguration m_configuration;
// Flag that sets error messages
private static int m_logLevel = LogService.LOG_DEBUG;
// this bundle's context
private static BundleContext m_context;
// the log service to log messages to
private static ServiceTracker m_logService;
// the package admin service (see BindMethod.getParameterClass)
private static ServiceTracker m_packageAdmin;
// map of BundleComponentActivator instances per Bundle indexed by Bundle id
private Map m_componentBundles;
// registry of managed component
private ComponentRegistry m_componentRegistry;
// thread acting upon configurations
private ComponentActorThread m_componentActor;
// whether to support two workarounds to run the OSGi R 4.2 CT
// See hasCtWorkaround()
private static boolean m_ctWorkaround;
/**
* Registers this instance as a (synchronous) bundle listener and loads the
* components of already registered bundles.
*
* @param context The <code>BundleContext</code> of the SCR implementation
* bundle.
*/
public void start( BundleContext context ) throws Exception
{
m_context = context;
// require the log service
m_logService = new ServiceTracker( context, LOGSERVICE_CLASS, null );
m_logService.open();
// prepare component registry
m_componentBundles = new HashMap();
m_componentRegistry = new ComponentRegistry( context );
// get the configuration
m_configuration = new ScrConfiguration( context );
// configure logging from context properties
m_logLevel = m_configuration.getLogLevel();
// log SCR startup
log( LogService.LOG_INFO, context.getBundle(), " Version = "
+ context.getBundle().getHeaders().get( Constants.BUNDLE_VERSION ), null );
// check whether we workaround OSGi CT issues
m_ctWorkaround = ScrConfiguration.hasCtWorkaround( context );
// create and start the component actor
m_componentActor = new ComponentActorThread();
Thread t = new Thread(m_componentActor, "SCR Component Actor");
t.setDaemon( true );
t.start();
// register for bundle updates
context.addBundleListener( this );
// 112.8.2 load all components of active bundles
loadAllComponents( context );
// register the Gogo and old Shell commands
ScrCommand.register(context, m_componentRegistry, m_configuration);
}
/**
* Unregisters this instance as a bundle listener and unloads all components
* which have been registered during the active life time of the SCR
* implementation bundle.
*
* @param context The <code>BundleContext</code> of the SCR implementation
* bundle.
*/
public void stop( BundleContext context ) throws Exception
{
// unregister as bundle listener
context.removeBundleListener( this );
// 112.8.2 dispose off all active components
disposeAllComponents();
// dispose component registry
m_componentRegistry.dispose();
// terminate the actor thread
if ( m_componentActor != null )
{
m_componentActor.terminate();
m_componentActor = null;
}
// close the LogService tracker now
if ( m_logService != null )
{
m_logService.close();
m_logService = null;
}
// close the PackageAdmin tracker now
if ( m_packageAdmin != null )
{
m_packageAdmin.close();
m_packageAdmin = null;
}
// remove the reference to the component context
m_context = null;
}
// ---------- BundleListener Interface -------------------------------------
/**
* Loads and unloads any components provided by the bundle whose state
* changed. If the bundle has been started, the components are loaded. If
* the bundle is about to stop, the components are unloaded.
*
* @param event The <code>BundleEvent</code> representing the bundle state
* change.
*/
public void bundleChanged( BundleEvent event )
{
if ( event.getType() == BundleEvent.LAZY_ACTIVATION || event.getType() == BundleEvent.STARTED )
{
// FELIX-1666 LAZY_ACTIVATION event is sent if the bundle has lazy
// activation policy and is waiting for class loader access to
// actually load it; STARTED event is sent if bundle has regular
// activation policy or if the lazily activated bundle finally is
// really started. In both cases just try to load the components
loadComponents( event.getBundle() );
}
else if ( event.getType() == BundleEvent.STOPPING )
{
disposeComponents( event.getBundle() );
}
}
//---------- Component Management -----------------------------------------
// Loads the components of all bundles currently active.
private void loadAllComponents( BundleContext context )
{
Bundle[] bundles = context.getBundles();
for ( int i = 0; i < bundles.length; i++ )
{
Bundle bundle = bundles[i];
if ( ComponentRegistry.isBundleActive( bundle ) )
{
loadComponents( bundle );
}
}
}
/**
* Loads the components of the given bundle. If the bundle has no
* <i>Service-Component</i> header, this method has no effect. The
* fragments of a bundle are not checked for the header (112.4.1).
* <p>
* This method calls the {@link #getBundleContext(Bundle)} method to find
* the <code>BundleContext</code> of the bundle. If the context cannot be
* found, this method does not load components for the bundle.
*/
private void loadComponents( Bundle bundle )
{
if ( bundle.getHeaders().get( "Service-Component" ) == null )
{
// no components in the bundle, abandon
return;
}
// there should be components, load them with a bundle context
BundleContext context = bundle.getBundleContext();
if ( context == null )
{
log( LogService.LOG_ERROR, m_context.getBundle(), "Cannot get BundleContext of bundle "
+ bundle.getSymbolicName() + "/" + bundle.getBundleId(), null );
return;
}
// FELIX-1666 method is called for the LAZY_ACTIVATION event and
// the started event. Both events cause this method to be called;
// so we have to make sure to not load components twice
// FELIX-2231 Mark bundle loaded early to prevent concurrent loading
// if LAZY_ACTIVATION and STARTED event are fired at the same time
final boolean loaded;
final Long bundleId = new Long( bundle.getBundleId() );
synchronized ( m_componentBundles )
{
if ( m_componentBundles.containsKey( bundleId ) )
{
loaded = true;
}
else
{
m_componentBundles.put( bundleId, bundleId );
loaded = false;
}
}
// terminate if already loaded (or currently being loaded)
if ( loaded )
{
log( LogService.LOG_DEBUG, m_context.getBundle(), "Components for bundle " + bundle.getSymbolicName()
+ "/" + bundle.getBundleId() + " already loaded. Nothing to do.", null );
return;
}
try
{
BundleComponentActivator ga = new BundleComponentActivator( m_componentRegistry, m_componentActor, context,
m_configuration );
// replace bundle activator in the map
synchronized ( m_componentBundles )
{
m_componentBundles.put( bundleId, ga );
}
}
catch ( Exception e )
{
// remove the bundle id from the bundles map to ensure it is
// not marked as being loaded
synchronized ( m_componentBundles )
{
m_componentBundles.remove( bundleId );
}
if ( e instanceof IllegalStateException && bundle.getState() != Bundle.ACTIVE )
{
log(
LogService.LOG_DEBUG,
m_context.getBundle(),
"Bundle "
+ bundle.getSymbolicName()
+ "/"
+ bundle.getBundleId()
+ " has been stopped while trying to activate its components. Trying again when the bundles gets startet again.",
e );
}
else
{
log( LogService.LOG_ERROR, m_context.getBundle(), "Error while loading components of bundle "
+ bundle.getSymbolicName() + "/" + bundle.getBundleId(), e );
}
}
}
/**
* Unloads components of the given bundle. If no components have been loaded
* for the bundle, this method has no effect.
*/
private void disposeComponents( Bundle bundle )
{
final Object ga;
synchronized ( m_componentBundles )
{
ga = m_componentBundles.remove( new Long( bundle.getBundleId() ) );
}
if ( ga instanceof BundleComponentActivator )
{
try
{
( ( BundleComponentActivator ) ga ).dispose( ComponentConstants.DEACTIVATION_REASON_BUNDLE_STOPPED );
}
catch ( Exception e )
{
log( LogService.LOG_ERROR, m_context.getBundle(), "Error while disposing components of bundle "
+ bundle.getSymbolicName() + "/" + bundle.getBundleId(), e );
}
}
}
// Unloads all components registered with the SCR
private void disposeAllComponents()
{
final Object[] activators;
synchronized ( m_componentBundles )
{
activators = m_componentBundles.values().toArray();
m_componentBundles.clear();
}
for ( int i = 0; i < activators.length; i++ )
{
if ( activators[i] instanceof BundleComponentActivator )
{
final BundleComponentActivator ga = ( BundleComponentActivator ) activators[i];
try
{
final Bundle bundle = ga.getBundleContext().getBundle();
try
{
ga.dispose( ComponentConstants.DEACTIVATION_REASON_DISPOSED );
}
catch ( Exception e )
{
log( LogService.LOG_ERROR, m_context.getBundle(), "Error while disposing components of bundle "
+ bundle.getSymbolicName() + "/" + bundle.getBundleId(), e );
}
}
catch ( IllegalStateException e )
{
//bundle context was already shut down in another thread, bundle is not available.
}
}
}
}
/**
* Method to actually emit the log message. If the LogService is available,
* the message will be logged through the LogService. Otherwise the message
* is logged to stdout (or stderr in case of LOG_ERROR level messages),
*
* @param level The log level to log the message at
* @param message The message to log
* @param ex An optional <code>Throwable</code> whose stack trace is written,
* or <code>null</code> to not log a stack trace.
*/
public static void log( int level, Bundle bundle, String message, Throwable ex )
{
if ( m_logLevel >= level )
{
Object logger = ( m_logService != null ) ? m_logService.getService() : null;
if ( logger == null )
{
// output depending on level
PrintStream out = ( level == LogService.LOG_ERROR ) ? System.err : System.out;
// level as a string
StringBuffer buf = new StringBuffer();
switch ( level )
{
case ( LogService.LOG_DEBUG ):
buf.append( "DEBUG: " );
break;
case ( LogService.LOG_INFO ):
buf.append( "INFO : " );
break;
case ( LogService.LOG_WARNING ):
buf.append( "WARN : " );
break;
case ( LogService.LOG_ERROR ):
buf.append( "ERROR: " );
break;
default:
buf.append( "UNK : " );
break;
}
// bundle information
if ( bundle != null )
{
buf.append( bundle.getSymbolicName() );
buf.append( " (" );
buf.append( bundle.getBundleId() );
buf.append( "): " );
}
// the message
buf.append( message );
// keep the message and the stacktrace together
synchronized ( out)
{
out.println( buf );
if ( ex != null )
{
ex.printStackTrace( out );
}
}
}
else
{
( ( LogService ) logger ).log( level, message, ex );
}
}
}
public static Object getPackageAdmin()
{
if ( m_packageAdmin == null )
{
synchronized ( Activator.class )
{
if ( m_packageAdmin == null )
{
m_packageAdmin = new ServiceTracker( m_context, PACKAGEADMIN_CLASS, null );
m_packageAdmin.open();
}
}
}
return m_packageAdmin.getService();
}
/**
* Returns <code>true</code> if the <code>ds.ctworkaround</code> framework
* property has been set to <code>true</code>. Otherwise <code>false</code>
* is returned.
* <p>
* If this method returns <code>true</code>, two workarounds for the OSGi
* Compendium R 4.2 CT for Declarative Services are active:
* <ul>
* <li>The <code>ComponentContext.getProperties()</code> implementation
* always returns the same writeable <code>Dictionary</code> instead of
* a read-only dictionary</li>
* <li>Location binding of <code>Configuration</code> objects supplied to
* components is ignored.</li>
* </ul>
* <p>
* Setting the <code>ds.ctworkaround</code> framework property is required
* to pass the CT but setting the property in a productive environment
* is strongly discouraged.
*
* @return <code>true</code> if the <code>ds.ctworkaround</code> framework
* property is set to <code>true</code>.
* @see <a href="https://issues.apache.org/jira/browse/FELIX-2526">FELIX-2526</a>
*/
public static boolean hasCtWorkaround()
{
return m_ctWorkaround;
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticbeanstalk.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Request to create or update a group of environments.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticbeanstalk-2010-12-01/ComposeEnvironments"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ComposeEnvironmentsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the application to which the specified source bundles belong.
* </p>
*/
private String applicationName;
/**
* <p>
* The name of the group to which the target environments belong. Specify a group name only if the environment name
* defined in each target environment's manifest ends with a + (plus) character. See <a
* href="http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html">Environment Manifest
* (env.yaml)</a> for details.
* </p>
*/
private String groupName;
/**
* <p>
* A list of version labels, specifying one or more application source bundles that belong to the target
* application. Each source bundle must include an environment manifest that specifies the name of the environment
* and the name of the solution stack to use, and optionally can specify environment links to create.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> versionLabels;
/**
* <p>
* The name of the application to which the specified source bundles belong.
* </p>
*
* @param applicationName
* The name of the application to which the specified source bundles belong.
*/
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
/**
* <p>
* The name of the application to which the specified source bundles belong.
* </p>
*
* @return The name of the application to which the specified source bundles belong.
*/
public String getApplicationName() {
return this.applicationName;
}
/**
* <p>
* The name of the application to which the specified source bundles belong.
* </p>
*
* @param applicationName
* The name of the application to which the specified source bundles belong.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ComposeEnvironmentsRequest withApplicationName(String applicationName) {
setApplicationName(applicationName);
return this;
}
/**
* <p>
* The name of the group to which the target environments belong. Specify a group name only if the environment name
* defined in each target environment's manifest ends with a + (plus) character. See <a
* href="http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html">Environment Manifest
* (env.yaml)</a> for details.
* </p>
*
* @param groupName
* The name of the group to which the target environments belong. Specify a group name only if the
* environment name defined in each target environment's manifest ends with a + (plus) character. See <a
* href="http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html">Environment
* Manifest (env.yaml)</a> for details.
*/
public void setGroupName(String groupName) {
this.groupName = groupName;
}
/**
* <p>
* The name of the group to which the target environments belong. Specify a group name only if the environment name
* defined in each target environment's manifest ends with a + (plus) character. See <a
* href="http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html">Environment Manifest
* (env.yaml)</a> for details.
* </p>
*
* @return The name of the group to which the target environments belong. Specify a group name only if the
* environment name defined in each target environment's manifest ends with a + (plus) character. See <a
* href="http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html">Environment
* Manifest (env.yaml)</a> for details.
*/
public String getGroupName() {
return this.groupName;
}
/**
* <p>
* The name of the group to which the target environments belong. Specify a group name only if the environment name
* defined in each target environment's manifest ends with a + (plus) character. See <a
* href="http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html">Environment Manifest
* (env.yaml)</a> for details.
* </p>
*
* @param groupName
* The name of the group to which the target environments belong. Specify a group name only if the
* environment name defined in each target environment's manifest ends with a + (plus) character. See <a
* href="http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-cfg-manifest.html">Environment
* Manifest (env.yaml)</a> for details.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ComposeEnvironmentsRequest withGroupName(String groupName) {
setGroupName(groupName);
return this;
}
/**
* <p>
* A list of version labels, specifying one or more application source bundles that belong to the target
* application. Each source bundle must include an environment manifest that specifies the name of the environment
* and the name of the solution stack to use, and optionally can specify environment links to create.
* </p>
*
* @return A list of version labels, specifying one or more application source bundles that belong to the target
* application. Each source bundle must include an environment manifest that specifies the name of the
* environment and the name of the solution stack to use, and optionally can specify environment links to
* create.
*/
public java.util.List<String> getVersionLabels() {
if (versionLabels == null) {
versionLabels = new com.amazonaws.internal.SdkInternalList<String>();
}
return versionLabels;
}
/**
* <p>
* A list of version labels, specifying one or more application source bundles that belong to the target
* application. Each source bundle must include an environment manifest that specifies the name of the environment
* and the name of the solution stack to use, and optionally can specify environment links to create.
* </p>
*
* @param versionLabels
* A list of version labels, specifying one or more application source bundles that belong to the target
* application. Each source bundle must include an environment manifest that specifies the name of the
* environment and the name of the solution stack to use, and optionally can specify environment links to
* create.
*/
public void setVersionLabels(java.util.Collection<String> versionLabels) {
if (versionLabels == null) {
this.versionLabels = null;
return;
}
this.versionLabels = new com.amazonaws.internal.SdkInternalList<String>(versionLabels);
}
/**
* <p>
* A list of version labels, specifying one or more application source bundles that belong to the target
* application. Each source bundle must include an environment manifest that specifies the name of the environment
* and the name of the solution stack to use, and optionally can specify environment links to create.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setVersionLabels(java.util.Collection)} or {@link #withVersionLabels(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param versionLabels
* A list of version labels, specifying one or more application source bundles that belong to the target
* application. Each source bundle must include an environment manifest that specifies the name of the
* environment and the name of the solution stack to use, and optionally can specify environment links to
* create.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ComposeEnvironmentsRequest withVersionLabels(String... versionLabels) {
if (this.versionLabels == null) {
setVersionLabels(new com.amazonaws.internal.SdkInternalList<String>(versionLabels.length));
}
for (String ele : versionLabels) {
this.versionLabels.add(ele);
}
return this;
}
/**
* <p>
* A list of version labels, specifying one or more application source bundles that belong to the target
* application. Each source bundle must include an environment manifest that specifies the name of the environment
* and the name of the solution stack to use, and optionally can specify environment links to create.
* </p>
*
* @param versionLabels
* A list of version labels, specifying one or more application source bundles that belong to the target
* application. Each source bundle must include an environment manifest that specifies the name of the
* environment and the name of the solution stack to use, and optionally can specify environment links to
* create.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ComposeEnvironmentsRequest withVersionLabels(java.util.Collection<String> versionLabels) {
setVersionLabels(versionLabels);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getApplicationName() != null)
sb.append("ApplicationName: ").append(getApplicationName()).append(",");
if (getGroupName() != null)
sb.append("GroupName: ").append(getGroupName()).append(",");
if (getVersionLabels() != null)
sb.append("VersionLabels: ").append(getVersionLabels());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ComposeEnvironmentsRequest == false)
return false;
ComposeEnvironmentsRequest other = (ComposeEnvironmentsRequest) obj;
if (other.getApplicationName() == null ^ this.getApplicationName() == null)
return false;
if (other.getApplicationName() != null && other.getApplicationName().equals(this.getApplicationName()) == false)
return false;
if (other.getGroupName() == null ^ this.getGroupName() == null)
return false;
if (other.getGroupName() != null && other.getGroupName().equals(this.getGroupName()) == false)
return false;
if (other.getVersionLabels() == null ^ this.getVersionLabels() == null)
return false;
if (other.getVersionLabels() != null && other.getVersionLabels().equals(this.getVersionLabels()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getApplicationName() == null) ? 0 : getApplicationName().hashCode());
hashCode = prime * hashCode + ((getGroupName() == null) ? 0 : getGroupName().hashCode());
hashCode = prime * hashCode + ((getVersionLabels() == null) ? 0 : getVersionLabels().hashCode());
return hashCode;
}
@Override
public ComposeEnvironmentsRequest clone() {
return (ComposeEnvironmentsRequest) super.clone();
}
}
| |
/*
* Copyright 2016-2017 The Sponge authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openksavi.sponge.jruby.core;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.jruby.RubyClass;
import org.jruby.RubyProc;
import org.jruby.RubySymbol;
import org.jruby.embed.EmbedEvalUnit;
import org.jruby.embed.LocalContextScope;
import org.jruby.embed.LocalVariableBehavior;
import org.jruby.embed.ScriptingContainer;
import org.jruby.exceptions.RaiseException;
import org.jruby.javasupport.JavaEmbedUtils;
import org.jruby.runtime.builtin.IRubyObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.openksavi.sponge.SpongeException;
import org.openksavi.sponge.action.ActionBuilder;
import org.openksavi.sponge.core.engine.BaseSpongeEngine;
import org.openksavi.sponge.core.kb.BaseScriptKnowledgeBaseInterpreter;
import org.openksavi.sponge.core.kb.CachedScriptClassInstancePovider;
import org.openksavi.sponge.core.kb.ScriptClassInstanceProvider;
import org.openksavi.sponge.core.plugin.BasePlugin;
import org.openksavi.sponge.core.util.SpongeUtils;
import org.openksavi.sponge.core.util.exception.SpongeExceptionUtils;
import org.openksavi.sponge.engine.SpongeEngine;
import org.openksavi.sponge.jruby.JRubyActionBuilder;
import org.openksavi.sponge.jruby.JRubyRule;
import org.openksavi.sponge.jruby.RubyConstants;
import org.openksavi.sponge.jruby.RubyUtils;
import org.openksavi.sponge.kb.KnowledgeBase;
import org.openksavi.sponge.kb.KnowledgeBaseConstants;
import org.openksavi.sponge.kb.ScriptKnowledgeBaseInterpreter;
import org.openksavi.sponge.plugin.Plugin;
import org.openksavi.sponge.rule.Rule;
/**
* Knowledge base interpreter supporting knowledge base to be defined in the JRuby (Ruby) language.
*/
public class JRubyKnowledgeBaseInterpreter extends BaseScriptKnowledgeBaseInterpreter {
private static final Logger logger = LoggerFactory.getLogger(JRubyKnowledgeBaseInterpreter.class);
public static final String PROP_RUBY_PATH = "ruby.path";
/** JRuby scripting container. This is the interface to JRuby used by the engine. */
private ScriptingContainer container;
public JRubyKnowledgeBaseInterpreter(SpongeEngine engine, KnowledgeBase knowledgeBase) {
super(new JRubyKnowledgeBaseEngineOperations((BaseSpongeEngine) engine, knowledgeBase), RubyConstants.TYPE);
}
@Override
protected void prepareInterpreter() {
overwriteProcessorClass(Rule.class, JRubyRule.class);
overwriteProcessorBuilderClass(ActionBuilder.class, JRubyActionBuilder.class);
container = new ScriptingContainer(LocalContextScope.SINGLETHREAD, LocalVariableBehavior.PERSISTENT);
setLoadPaths(getEngineOperations() != null ? getEngineOperations().getEngine() : null);
addSpecific();
getSimplifiedImportClasses().forEach((interfaceClass, scriptClass) -> addImport(scriptClass, interfaceClass.getSimpleName()));
addImport(BasePlugin.class, Plugin.class.getSimpleName());
getStandardImportClasses().forEach(cls -> addImport(cls));
addImport(RubyUtils.class);
container.put(createVariableName(KnowledgeBaseConstants.VAR_ENGINE_OPERATIONS), getEngineOperations());
container.setErrorWriter(new JRubyLogErrorWriter());
}
/**
* Clears the interpreter state.
*/
@Override
public void onClear() {
synchronized (interpteterSynchro) {
invalidateCache();
if (container != null) {
container.terminate();
}
container = null;
}
}
protected void addSpecific() {
eval("include Java");
}
private void setLoadPaths(SpongeEngine engine) {
if (engine != null) {
String rubyPath = getEngineOperations().getProperty(PROP_RUBY_PATH, null);
if (rubyPath != null) {
List<String> paths = Arrays.asList(StringUtils.split(rubyPath, PROP_PATH_SEPARATOR));
container.setLoadPaths(paths);
}
}
}
protected String createVariableName(String name) {
return "$" + name;
}
@Override
public void setVariable(String name, Object value) {
try {
container.put(createVariableName(name), value);
} catch (Throwable e) {
throw SpongeUtils.wrapException(this, e);
}
}
@Override
public boolean hasVariable(String name) {
try {
return container.getProvider().getRuntime().getGlobalVariables().isDefined(createVariableName(name));
} catch (Throwable e) {
throw SpongeUtils.wrapException(this, e);
}
}
@Override
public Object getVariable(String name) {
if (!hasVariable(name)) {
throw new SpongeException("Variable '" + name + "' not found");
}
try {
return container.get(createVariableName(name));
} catch (Throwable e) {
throw SpongeUtils.wrapException("getVariable", this, e);
}
}
/**
* Adds import from the package.
*
* @param clazz class to be imported.
*/
protected void addImport(Class<?> clazz) {
eval("java_import " + clazz.getName());
}
protected void addImport(Class<?> clazz, String alias) {
eval("java_import " + clazz.getName());
eval(alias + " = " + clazz.getName());
}
@SuppressWarnings("unchecked")
@Override
public <T> T invokeOptionalFunction(String name, T defaultValue) {
// Return if doesn't exist.
if (eval("defined?(" + name + ")") == null) {
return defaultValue;
}
try {
return (T) container.callMethod(null, name, Object.class);
} catch (Throwable e) {
throw SpongeUtils.wrapException(name, this, e);
}
}
@Override
public <T> T invokeFunction(String name, Class<T> cls, Object... args) {
try {
return container.callMethod(null, name, args, cls);
} catch (Throwable e) {
throw SpongeUtils.wrapException(name, this, e);
}
}
@Override
public boolean isKnowledgeBaseException(Throwable exception) {
return SpongeExceptionUtils.containsException(exception, RaiseException.class);
}
@Override
public Throwable getJavaException(Throwable knowledgeBaseException) {
return SpongeExceptionUtils.getException(knowledgeBaseException, RaiseException.class);
}
/**
* Returns {@code null} if not script-based processor.
*/
@Override
public String getScriptKnowledgeBaseProcessorClassName(Object processorClass) {
if (processorClass instanceof RubyClass) {
RubyClass rubyClass = (RubyClass) processorClass;
return rubyClass.getName();
}
return null;
}
@SuppressWarnings("unchecked")
@Override
public <T> T eval(String expression) {
try {
return (T) container.runScriptlet(expression);
} catch (Throwable e) {
throw SpongeUtils.wrapException("eval", this, e);
}
}
@SuppressWarnings("unchecked")
@Override
public <T> T eval(Reader reader, String filename) {
try {
return (T) container.runScriptlet(reader, filename);
} catch (Throwable e) {
throw SpongeUtils.wrapException(filename, this, e);
}
}
@Override
public Object invokeMethod(Object target, String name, Object... args) {
try {
IRubyObject rubyObject = JavaEmbedUtils.javaToRuby(container.getProvider().getRuntime(), target);
// Important casting to an array of objects.
return container.callMethod(rubyObject, name, (Object[]) args);
} catch (Throwable e) {
throw SpongeUtils.wrapException(target + "." + name, this, e);
}
}
@Override
protected ScriptKnowledgeBaseInterpreter createInterpreterInstance(SpongeEngine engine, KnowledgeBase knowledgeBase) {
return new JRubyKnowledgeBaseInterpreter(engine, knowledgeBase);
}
public ScriptingContainer getScriptContainer() {
return container;
}
public Object callRubyProc(RubyProc proc, Object... args) {
return container.callMethod(proc, "call", args);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void scanToAutoEnable() {
Object evalResult = eval("Module.constants");
if (evalResult == null || !(evalResult instanceof Collection)) {
return;
}
List processorRubyTypes = getProcessorClasses().values().stream().map(processorClass -> eval(processorClass.getSimpleName()))
.collect(Collectors.toList());
List<Object> autoEnabled = new ArrayList<>();
((Collection) evalResult).stream().filter(Objects::nonNull).forEachOrdered(element -> {
String symbolString = ((RubySymbol) element).asJavaString();
Object symbol = null;
try {
symbol = eval(symbolString);
} catch (Throwable e) {
logger.debug("JRuby eval(" + symbolString + ") exception", e);
}
if (symbol != null && symbol instanceof RubyClass) {
RubyClass rubyClass = (RubyClass) symbol;
// Java-based processor classes (that have getJavaProxy) are not auto-enabled.
if (!processorRubyTypes.contains(rubyClass) && CollectionUtils.containsAny(rubyClass.getAncestorList(), processorRubyTypes)
&& !rubyClass.getJavaProxy()) {
if (!isProcessorAbstract(rubyClass.getName())) {
autoEnabled.add(rubyClass);
((JRubyKnowledgeBaseEngineOperations) getEngineOperations()).enable(rubyClass);
}
}
}
});
if (logger.isDebugEnabled() && !autoEnabled.isEmpty()) {
logger.debug("Auto-enabling: {}", autoEnabled);
}
}
@Override
protected <T> ScriptClassInstanceProvider<T> createScriptClassInstancePovider() {
return new CachedScriptClassInstancePovider<EmbedEvalUnit, T>(getEngineOperations().getEngine(),
(expression) -> container.parse(expression), "%s.new", (script, javaClass) -> (T) script.run().toJava(javaClass));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.scheduler.adaptive;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.core.execution.SavepointFormatType;
import org.apache.flink.queryablestate.KvStateID;
import org.apache.flink.runtime.accumulators.AccumulatorSnapshot;
import org.apache.flink.runtime.checkpoint.CheckpointCoordinator;
import org.apache.flink.runtime.checkpoint.CheckpointMetrics;
import org.apache.flink.runtime.checkpoint.CompletedCheckpoint;
import org.apache.flink.runtime.checkpoint.TaskStateSnapshot;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.AccessExecution;
import org.apache.flink.runtime.executiongraph.ArchivedExecutionGraph;
import org.apache.flink.runtime.executiongraph.ErrorInfo;
import org.apache.flink.runtime.executiongraph.ExecutionAttemptID;
import org.apache.flink.runtime.executiongraph.ExecutionGraph;
import org.apache.flink.runtime.executiongraph.TaskExecutionStateTransition;
import org.apache.flink.runtime.io.network.partition.ResultPartitionID;
import org.apache.flink.runtime.jobgraph.IntermediateDataSetID;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.jobmanager.PartitionProducerDisposedException;
import org.apache.flink.runtime.jobmaster.SerializedInputSplit;
import org.apache.flink.runtime.messages.FlinkJobNotFoundException;
import org.apache.flink.runtime.messages.checkpoint.DeclineCheckpoint;
import org.apache.flink.runtime.operators.coordination.CoordinationRequest;
import org.apache.flink.runtime.operators.coordination.CoordinationResponse;
import org.apache.flink.runtime.operators.coordination.OperatorEvent;
import org.apache.flink.runtime.query.KvStateLocation;
import org.apache.flink.runtime.query.UnknownKvStateLocation;
import org.apache.flink.runtime.scheduler.ExecutionGraphHandler;
import org.apache.flink.runtime.scheduler.KvStateHandler;
import org.apache.flink.runtime.scheduler.OperatorCoordinatorHandler;
import org.apache.flink.runtime.scheduler.exceptionhistory.ExceptionHistoryEntry;
import org.apache.flink.runtime.scheduler.exceptionhistory.RootExceptionHistoryEntry;
import org.apache.flink.runtime.scheduler.stopwithsavepoint.StopWithSavepointTerminationManager;
import org.apache.flink.runtime.state.KeyGroupRange;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.concurrent.FutureUtils;
import org.slf4j.Logger;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.Executor;
/**
* Abstract state class which contains an {@link ExecutionGraph} and the required handlers to
* execute common operations.
*/
abstract class StateWithExecutionGraph implements State {
private final Context context;
private final ExecutionGraph executionGraph;
private final ExecutionGraphHandler executionGraphHandler;
private final OperatorCoordinatorHandler operatorCoordinatorHandler;
private final KvStateHandler kvStateHandler;
private final Logger logger;
private final ClassLoader userCodeClassLoader;
private final List<ExceptionHistoryEntry> failureCollection;
StateWithExecutionGraph(
Context context,
ExecutionGraph executionGraph,
ExecutionGraphHandler executionGraphHandler,
OperatorCoordinatorHandler operatorCoordinatorHandler,
Logger logger,
ClassLoader userClassCodeLoader,
List<ExceptionHistoryEntry> failureCollection) {
this.context = context;
this.executionGraph = executionGraph;
this.executionGraphHandler = executionGraphHandler;
this.operatorCoordinatorHandler = operatorCoordinatorHandler;
this.kvStateHandler = new KvStateHandler(executionGraph);
this.logger = logger;
this.userCodeClassLoader = userClassCodeLoader;
this.failureCollection = new ArrayList<>(failureCollection);
FutureUtils.assertNoException(
executionGraph
.getTerminationFuture()
.thenAcceptAsync(
jobStatus -> {
if (jobStatus.isGloballyTerminalState()) {
context.runIfState(
this,
() -> {
convertFailures(this.failureCollection)
.ifPresent(context::archiveFailure);
onGloballyTerminalState(jobStatus);
});
}
},
context.getMainThreadExecutor()));
}
ExecutionGraph getExecutionGraph() {
return executionGraph;
}
JobID getJobId() {
return executionGraph.getJobID();
}
protected OperatorCoordinatorHandler getOperatorCoordinatorHandler() {
return operatorCoordinatorHandler;
}
protected ExecutionGraphHandler getExecutionGraphHandler() {
return executionGraphHandler;
}
@Override
public void onLeave(Class<? extends State> newState) {
if (!StateWithExecutionGraph.class.isAssignableFrom(newState)) {
// we are leaving the StateWithExecutionGraph --> we need to dispose temporary services
operatorCoordinatorHandler.disposeAllOperatorCoordinators();
}
}
@Override
public ArchivedExecutionGraph getJob() {
return ArchivedExecutionGraph.createFrom(executionGraph, getJobStatus());
}
@Override
public void suspend(Throwable cause) {
executionGraph.suspend(cause);
Preconditions.checkState(executionGraph.getState().isTerminalState());
context.goToFinished(ArchivedExecutionGraph.createFrom(executionGraph));
}
@Override
public Logger getLogger() {
return logger;
}
void notifyPartitionDataAvailable(ResultPartitionID partitionID) {
executionGraph.notifyPartitionDataAvailable(partitionID);
}
SerializedInputSplit requestNextInputSplit(
JobVertexID vertexID, ExecutionAttemptID executionAttempt) throws IOException {
return executionGraphHandler.requestNextInputSplit(vertexID, executionAttempt);
}
ExecutionState requestPartitionState(
IntermediateDataSetID intermediateResultId, ResultPartitionID resultPartitionId)
throws PartitionProducerDisposedException {
return executionGraphHandler.requestPartitionState(intermediateResultId, resultPartitionId);
}
void acknowledgeCheckpoint(
JobID jobID,
ExecutionAttemptID executionAttemptID,
long checkpointId,
CheckpointMetrics checkpointMetrics,
TaskStateSnapshot checkpointState) {
executionGraphHandler.acknowledgeCheckpoint(
jobID, executionAttemptID, checkpointId, checkpointMetrics, checkpointState);
}
void declineCheckpoint(DeclineCheckpoint decline) {
executionGraphHandler.declineCheckpoint(decline);
}
void reportCheckpointMetrics(
ExecutionAttemptID executionAttemptID,
long checkpointId,
CheckpointMetrics checkpointMetrics) {
executionGraphHandler.reportCheckpointMetrics(
executionAttemptID, checkpointId, checkpointMetrics);
}
void updateAccumulators(AccumulatorSnapshot accumulatorSnapshot) {
executionGraph.updateAccumulators(accumulatorSnapshot);
}
KvStateLocation requestKvStateLocation(JobID jobId, String registrationName)
throws FlinkJobNotFoundException, UnknownKvStateLocation {
return kvStateHandler.requestKvStateLocation(jobId, registrationName);
}
void notifyKvStateRegistered(
JobID jobId,
JobVertexID jobVertexId,
KeyGroupRange keyGroupRange,
String registrationName,
KvStateID kvStateId,
InetSocketAddress kvStateServerAddress)
throws FlinkJobNotFoundException {
kvStateHandler.notifyKvStateRegistered(
jobId,
jobVertexId,
keyGroupRange,
registrationName,
kvStateId,
kvStateServerAddress);
}
void notifyKvStateUnregistered(
JobID jobId,
JobVertexID jobVertexId,
KeyGroupRange keyGroupRange,
String registrationName)
throws FlinkJobNotFoundException {
kvStateHandler.notifyKvStateUnregistered(
jobId, jobVertexId, keyGroupRange, registrationName);
}
CompletableFuture<String> triggerSavepoint(
String targetDirectory, boolean cancelJob, SavepointFormatType formatType) {
final CheckpointCoordinator checkpointCoordinator =
executionGraph.getCheckpointCoordinator();
StopWithSavepointTerminationManager.checkSavepointActionPreconditions(
checkpointCoordinator, targetDirectory, getJobId(), logger);
logger.info(
"Triggering {}savepoint for job {}.",
cancelJob ? "cancel-with-" : "",
executionGraph.getJobID());
if (cancelJob) {
checkpointCoordinator.stopCheckpointScheduler();
}
return checkpointCoordinator
.triggerSavepoint(targetDirectory, formatType)
.thenApply(CompletedCheckpoint::getExternalPointer)
.handleAsync(
(path, throwable) -> {
if (throwable != null) {
if (cancelJob && context.isState(this)) {
startCheckpointScheduler(checkpointCoordinator);
}
throw new CompletionException(throwable);
} else if (cancelJob && context.isState(this)) {
logger.info(
"Savepoint stored in {}. Now cancelling {}.",
path,
executionGraph.getJobID());
cancel();
}
return path;
},
context.getMainThreadExecutor());
}
CompletableFuture<String> triggerCheckpoint() {
final CheckpointCoordinator checkpointCoordinator =
executionGraph.getCheckpointCoordinator();
final JobID jobID = executionGraph.getJobID();
if (checkpointCoordinator == null) {
throw new IllegalStateException(String.format("Job %s is not a streaming job.", jobID));
}
logger.info("Triggering a checkpoint for job {}.", jobID);
return checkpointCoordinator
.triggerCheckpoint(false)
.thenApply(CompletedCheckpoint::getExternalPointer)
.handleAsync(
(path, throwable) -> {
if (throwable != null) {
throw new CompletionException(throwable);
}
return path;
},
context.getMainThreadExecutor());
}
private void startCheckpointScheduler(final CheckpointCoordinator checkpointCoordinator) {
if (checkpointCoordinator.isPeriodicCheckpointingConfigured()) {
try {
checkpointCoordinator.startCheckpointScheduler();
} catch (IllegalStateException ignored) {
// Concurrent shut down of the coordinator
}
}
}
void deliverOperatorEventToCoordinator(
ExecutionAttemptID taskExecutionId, OperatorID operatorId, OperatorEvent evt)
throws FlinkException {
operatorCoordinatorHandler.deliverOperatorEventToCoordinator(
taskExecutionId, operatorId, evt);
}
CompletableFuture<CoordinationResponse> deliverCoordinationRequestToCoordinator(
OperatorID operatorId, CoordinationRequest request) throws FlinkException {
return operatorCoordinatorHandler.deliverCoordinationRequestToCoordinator(
operatorId, request);
}
/** Transition to different state when failure occurs. Stays in the same state by default. */
abstract void onFailure(Throwable cause);
/**
* Transition to different state when the execution graph reaches a globally terminal state.
*
* @param globallyTerminalState globally terminal state which the execution graph reached
*/
abstract void onGloballyTerminalState(JobStatus globallyTerminalState);
@Override
public void handleGlobalFailure(Throwable cause) {
failureCollection.add(ExceptionHistoryEntry.createGlobal(cause));
onFailure(cause);
}
/**
* Updates the execution graph with the given task execution state transition.
*
* @param taskExecutionStateTransition taskExecutionStateTransition to update the ExecutionGraph
* with
* @return {@code true} if the update was successful; otherwise {@code false}
*/
boolean updateTaskExecutionState(TaskExecutionStateTransition taskExecutionStateTransition) {
// collect before updateState, as updateState may deregister the execution
final Optional<AccessExecution> maybeExecution =
executionGraph.findExecution(taskExecutionStateTransition.getID());
final Optional<String> maybeTaskName =
executionGraph.findVertexWithAttempt(taskExecutionStateTransition.getID());
final ExecutionState desiredState = taskExecutionStateTransition.getExecutionState();
boolean successfulUpdate = getExecutionGraph().updateState(taskExecutionStateTransition);
if (successfulUpdate && desiredState == ExecutionState.FAILED) {
final AccessExecution execution =
maybeExecution.orElseThrow(NoSuchElementException::new);
final String taskName = maybeTaskName.orElseThrow(NoSuchElementException::new);
final ExecutionState currentState = execution.getState();
if (currentState == desiredState) {
failureCollection.add(ExceptionHistoryEntry.create(execution, taskName));
onFailure(
ErrorInfo.handleMissingThrowable(
taskExecutionStateTransition.getError(userCodeClassLoader)));
}
}
return successfulUpdate;
}
List<ExceptionHistoryEntry> getFailures() {
return failureCollection;
}
private static Optional<RootExceptionHistoryEntry> convertFailures(
List<ExceptionHistoryEntry> failureCollection) {
if (failureCollection.isEmpty()) {
return Optional.empty();
}
final ExceptionHistoryEntry first = failureCollection.remove(0);
return Optional.of(
RootExceptionHistoryEntry.fromExceptionHistoryEntry(first, failureCollection));
}
/** Context of the {@link StateWithExecutionGraph} state. */
interface Context extends StateTransitions.ToFinished {
/**
* Run the given action if the current state equals the expected state.
*
* @param expectedState expectedState is the expected state
* @param action action to run if the current state equals the expected state
*/
void runIfState(State expectedState, Runnable action);
/**
* Checks whether the current state is the expected state.
*
* @param expectedState expectedState is the expected state
* @return {@code true} if the current state equals the expected state; otherwise {@code
* false}
*/
boolean isState(State expectedState);
/**
* Gets the main thread executor.
*
* @return the main thread executor
*/
Executor getMainThreadExecutor();
/** Archive failure. */
void archiveFailure(RootExceptionHistoryEntry failure);
}
}
| |
/**
* Copyright 2016 Pinterest, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pinterest.deployservice.db;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import com.pinterest.deployservice.bean.AcceptanceStatus;
import com.pinterest.deployservice.bean.AcceptanceType;
import com.pinterest.deployservice.bean.AgentBean;
import com.pinterest.deployservice.bean.AgentErrorBean;
import com.pinterest.deployservice.bean.AgentState;
import com.pinterest.deployservice.bean.AgentStatus;
import com.pinterest.deployservice.bean.BuildBean;
import com.pinterest.deployservice.bean.ConfigHistoryBean;
import com.pinterest.deployservice.bean.DataBean;
import com.pinterest.deployservice.bean.DeployBean;
import com.pinterest.deployservice.bean.DeployFilterBean;
import com.pinterest.deployservice.bean.DeployPriority;
import com.pinterest.deployservice.bean.DeployQueryResultBean;
import com.pinterest.deployservice.bean.DeployStage;
import com.pinterest.deployservice.bean.DeployState;
import com.pinterest.deployservice.bean.DeployType;
import com.pinterest.deployservice.bean.EnvState;
import com.pinterest.deployservice.bean.EnvironBean;
import com.pinterest.deployservice.bean.EnvironState;
import com.pinterest.deployservice.bean.GroupRolesBean;
import com.pinterest.deployservice.bean.HostBean;
import com.pinterest.deployservice.bean.HostState;
import com.pinterest.deployservice.bean.OverridePolicy;
import com.pinterest.deployservice.bean.PromoteBean;
import com.pinterest.deployservice.bean.PromoteType;
import com.pinterest.deployservice.bean.RatingBean;
import com.pinterest.deployservice.bean.Resource;
import com.pinterest.deployservice.bean.Role;
import com.pinterest.deployservice.bean.ScheduleBean;
import com.pinterest.deployservice.bean.ScheduleState;
import com.pinterest.deployservice.bean.TagBean;
import com.pinterest.deployservice.bean.TagTargetType;
import com.pinterest.deployservice.bean.TagValue;
import com.pinterest.deployservice.bean.TokenRolesBean;
import com.pinterest.deployservice.bean.UserRolesBean;
import com.pinterest.deployservice.common.CommonUtils;
import com.pinterest.deployservice.common.Constants;
import com.pinterest.deployservice.dao.AgentDAO;
import com.pinterest.deployservice.dao.AgentErrorDAO;
import com.pinterest.deployservice.dao.BuildDAO;
import com.pinterest.deployservice.dao.ConfigHistoryDAO;
import com.pinterest.deployservice.dao.DataDAO;
import com.pinterest.deployservice.dao.DeployDAO;
import com.pinterest.deployservice.dao.EnvironDAO;
import com.pinterest.deployservice.dao.GroupDAO;
import com.pinterest.deployservice.dao.GroupRolesDAO;
import com.pinterest.deployservice.dao.HostDAO;
import com.pinterest.deployservice.dao.PromoteDAO;
import com.pinterest.deployservice.dao.RatingDAO;
import com.pinterest.deployservice.dao.ScheduleDAO;
import com.pinterest.deployservice.dao.TagDAO;
import com.pinterest.deployservice.dao.TokenRolesDAO;
import com.pinterest.deployservice.dao.UserRolesDAO;
import com.ibatis.common.jdbc.ScriptRunner;
import com.mysql.management.driverlaunched.ServerLauncherSocketFactory;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.joda.time.Interval;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class DBDAOTest {
private final static String DEFAULT_BASE_DIR = "/tmp/deploy-unit-test";
private final static String DEFAULT_DB_NAME = "deploy";
private final static int DEFAULT_PORT = 3303;
private static BuildDAO buildDAO;
private static AgentDAO agentDAO;
private static AgentErrorDAO agentErrorDAO;
private static DataDAO dataDAO;
private static DeployDAO deployDAO;
private static EnvironDAO environDAO;
private static PromoteDAO promoteDAO;
private static HostDAO hostDAO;
private static GroupDAO groupDAO;
private static RatingDAO ratingDAO;
private static UserRolesDAO userRolesDAO;
private static TokenRolesDAO tokenRolesDAO;
private static GroupRolesDAO groupRolesDAO;
private static ConfigHistoryDAO configHistoryDAO;
private static TagDAO tagDAO;
private static ScheduleDAO scheduleDAO;
@BeforeClass
public static void setUpClass() throws Exception {
try {
// making sure we do not have anything running
ServerLauncherSocketFactory.shutdown(new File(DEFAULT_BASE_DIR), null);
} catch (Exception e) {
// ignore
}
BasicDataSource DATASOURCE = DatabaseUtil.createMXJDataSource(DEFAULT_DB_NAME,
DEFAULT_BASE_DIR, DEFAULT_PORT);
Connection conn = DATASOURCE.getConnection();
ScriptRunner runner = new ScriptRunner(conn, false, true);
runner.runScript(new BufferedReader(new InputStreamReader(
DBDAOTest.class.getResourceAsStream("/sql/cleanup.sql"))));
runner.runScript(new BufferedReader(new InputStreamReader(
DBDAOTest.class.getResourceAsStream("/sql/deploy.sql"))));
buildDAO = new DBBuildDAOImpl(DATASOURCE);
agentDAO = new DBAgentDAOImpl(DATASOURCE);
agentErrorDAO = new DBAgentErrorDAOImpl(DATASOURCE);
dataDAO = new DBDataDAOImpl(DATASOURCE);
deployDAO = new DBDeployDAOImpl(DATASOURCE);
environDAO = new DBEnvironDAOImpl(DATASOURCE);
promoteDAO = new DBPromoteDAOImpl(DATASOURCE);
hostDAO = new DBHostDAOImpl(DATASOURCE);
groupDAO = new DBGroupDAOImpl(DATASOURCE);
ratingDAO = new DBRatingsDAOImpl(DATASOURCE);
userRolesDAO = new DBUserRolesDAOImpl(DATASOURCE);
groupRolesDAO = new DBGroupRolesDAOImpl(DATASOURCE);
tokenRolesDAO = new DBTokenRolesDAOImpl(DATASOURCE);
configHistoryDAO = new DBConfigHistoryDAOImpl(DATASOURCE);
tagDAO = new DBTagDAOImpl(DATASOURCE);
scheduleDAO = new DBScheduleDAOImpl(DATASOURCE);
}
@AfterClass
public static void tearDownClass() throws Exception {
ServerLauncherSocketFactory.shutdown(new File(DEFAULT_BASE_DIR), null);
}
@Test
public void testDeploymentQueries() throws Exception {
long now = System.currentTimeMillis();
BuildBean buildBean1 =
genDefaultBuildInfoBean("bbb-1", "s-1", "ccc-1", "r-1", now);
BuildBean buildBean2 =
genDefaultBuildInfoBean("bbb-2", "s-1", "ccc-1", "r-1", now + 1000);
BuildBean buildBean3 =
genDefaultBuildInfoBean("bbb-3", "s-1", "ccc-1", "r-1", now + 2000);
BuildBean buildBean4 =
genDefaultBuildInfoBean("bbb-4", "s-2", "ccc-2", "r-1", now + 3000);
buildDAO.insert(buildBean1);
buildDAO.insert(buildBean2);
buildDAO.insert(buildBean3);
buildDAO.insert(buildBean4);
DeployBean deployBean1 =
genDefaultDeployBean("d-1", "env-1", "bbb-1", now, DeployState.SUCCEEDED);
DeployBean deployBean2 =
genDefaultDeployBean("d-2", "env-1", "bbb-1", now + 1000, DeployState.SUCCEEDED);
DeployBean deployBean3 =
genDefaultDeployBean("d-3", "env-1", "bbb-1", now + 2000, DeployState.RUNNING);
DeployBean deployBean4 =
genDefaultDeployBean("d-4", "env-2", "bbb-2", now, DeployState.FAILING);
// just so we have the build
BuildBean buildBeanx =
genDefaultBuildInfoBean("d-x", "s-1", "ccc-x", "r-1", now);
buildDAO.insert(buildBeanx);
DeployBean deployBean5 =
genDefaultDeployBean("d-5", "env-3", "bcc-x", now, DeployState.SUCCEEDING);
DeployBean deployBean6 =
genDefaultDeployBean("d-6", "env-2", "bbb-4", now, DeployState.SUCCEEDED);
deployDAO.insert(deployBean1);
deployDAO.insert(deployBean2);
deployDAO.insert(deployBean3);
deployDAO.insert(deployBean4);
deployDAO.insert(deployBean5);
deployDAO.insert(deployBean6);
deployBean1.setOperator("bar");
DeployBean updateBean = new DeployBean();
updateBean.setOperator("bar");
deployDAO.update("d-1", updateBean);
assertTrue(EqualsBuilder.reflectionEquals(deployBean1, deployDAO.getById("d-1")));
DeployQueryResultBean queryResult;
DeployFilterBean buildFilter = new DeployFilterBean();
buildFilter.setCommit("ccc-1");
buildFilter.setRepo("repo-1");
buildFilter.setBranch("branch-1");
buildFilter.setCommitDate(now);
buildFilter.setOldestFirst(true);
buildFilter.setPageIndex(1);
buildFilter.setPageSize(10);
DeployQueryFilter buildFilterBean = new DeployQueryFilter(buildFilter);
queryResult = deployDAO.getAllDeploys(buildFilterBean);
assertEquals(queryResult.getTotal().longValue(), 5L);
assertFalse(queryResult.isTruncated());
assertEquals(queryResult.getDeploys().size(), 5);
buildFilter.setCommit("ccc-2");
buildFilter.setCommitDate(now + 3000);
buildFilterBean = new DeployQueryFilter(buildFilter);
queryResult = deployDAO.getAllDeploys(buildFilterBean);
assertEquals(queryResult.getTotal().longValue(), 1);
assertFalse(queryResult.isTruncated());
assertEquals(queryResult.getDeploys().size(), 1);
DeployBean deployBean = queryResult.getDeploys().get(0);
assertEquals("d-6", deployBean.getDeploy_id());
assertEquals("bbb-4", deployBean.getBuild_id());
DeployFilterBean envFilter = new DeployFilterBean();
envFilter.setEnvIds(Arrays.asList("env-1"));
buildFilter.setCommitDate(now);
envFilter.setBefore(now + 2000);
envFilter.setAfter(now);
envFilter.setPageIndex(1);
envFilter.setPageSize(2);
DeployQueryFilter envFilterBean1 = new DeployQueryFilter(envFilter);
queryResult = deployDAO.getAllDeploys(envFilterBean1);
assertEquals(queryResult.getTotal().longValue(), 3);
assertTrue(queryResult.isTruncated());
assertEquals(queryResult.getDeploys().size(), 2);
envFilter.setPageIndex(2);
envFilter.setPageSize(2);
DeployQueryFilter envFilterBean2 = new DeployQueryFilter(envFilter);
queryResult = deployDAO.getAllDeploys(envFilterBean2);
assertEquals(queryResult.getTotal().longValue(), 3);
assertFalse(queryResult.isTruncated());
assertEquals(queryResult.getDeploys().size(), 1);
assertTrue(EqualsBuilder.reflectionEquals(queryResult.getDeploys().get(0), deployBean1));
envFilter.setPageIndex(3);
envFilter.setPageSize(2);
DeployQueryFilter envFilterBean3 = new DeployQueryFilter(envFilter);
queryResult = deployDAO.getAllDeploys(envFilterBean3);
assertEquals(queryResult.getTotal().longValue(), 3);
assertFalse(queryResult.isTruncated());
assertEquals(queryResult.getDeploys().size(), 0);
DeployFilterBean envFilter2 = new DeployFilterBean();
envFilter2.setEnvIds(Arrays.asList("env-1"));
envFilter2.setBefore(now + 1000);
envFilter2.setAfter(now);
envFilter2.setPageIndex(1);
envFilter2.setPageSize(10);
DeployQueryFilter envFilterBean4 = new DeployQueryFilter(envFilter2);
queryResult = deployDAO.getAllDeploys(envFilterBean4);
assertEquals(queryResult.getTotal().longValue(), 2);
assertFalse(queryResult.isTruncated());
assertEquals(queryResult.getDeploys().size(), 2);
DeployFilterBean filter = new DeployFilterBean();
filter.setAfter(now - 1000);
filter.setPageIndex(2);
filter.setPageSize(2);
DeployQueryFilter filterBean = new DeployQueryFilter(filter);
DeployQueryResultBean resultBean = deployDAO.getAllDeploys(filterBean);
assertEquals(resultBean.getDeploys().size(), 2);
assertEquals(resultBean.getTotal().longValue(), 6);
EnvironBean envBean1 = genDefaultEnvBean("env-1", "s-1", "prod", "d-3");
EnvironBean envBean2 = genDefaultEnvBean("env-2", "s-2", "prod", "d-4");
EnvironBean envBean3 = genDefaultEnvBean("env-3", "s-3", "prod", "d-5");
environDAO.insert(envBean1);
environDAO.insert(envBean2);
environDAO.insert(envBean3);
DeployFilterBean ongoingFilter = new DeployFilterBean();
ongoingFilter.setDeployStates(Arrays.asList(DeployState.RUNNING, DeployState.FAILING));
DeployQueryFilter ongoingFilterBean = new DeployQueryFilter(ongoingFilter);
DeployQueryResultBean ongoingResultBean = deployDAO.getAllDeploys(ongoingFilterBean);
assertEquals(ongoingResultBean.getDeploys().size(), 2);
deployBean5.setAcc_status(AcceptanceStatus.ACCEPTED);
deployDAO.update("d-5", deployBean5);
List<DeployBean>
beans =
deployDAO.getAcceptedDeploys("env-3", new Interval(0, Long.MAX_VALUE), 100);
assertEquals(beans.size(), 1);
assertEquals(beans.get(0).getDeploy_id(), "d-5");
buildDAO.delete("bbb-1");
buildDAO.delete("bbb-2");
buildDAO.delete("bbb-3");
environDAO.delete("env-1");
environDAO.delete("env-2");
environDAO.delete("env-3");
deployDAO.delete("d-1");
deployDAO.delete("d-2");
deployDAO.delete("d-3");
deployDAO.delete("d-4");
deployDAO.delete("d-5");
assertEquals(deployDAO.getById("d-1"), null);
}
@Test
public void testBuildDAO() throws Exception {
long now = System.currentTimeMillis();
BuildBean buildBean1 =
genDefaultBuildInfoBean("b-1", "sss-1", "c-1", "r-1", now);
BuildBean buildBean2 =
genDefaultBuildInfoBean("b-2", "sss-1", "c-1", "r-1", now + 1000);
BuildBean buildBean22 =
genDefaultBuildInfoBean("b-22", "sss-2", "c-1", "r-1", now + 1000);
BuildBean buildBean3 =
genDefaultBuildInfoBean("b-3", "sss-1", "c-1", "r-1", now + 2000);
buildDAO.insert(buildBean1);
buildDAO.insert(buildBean2);
buildDAO.insert(buildBean22);
buildDAO.insert(buildBean3);
assertTrue(EqualsBuilder.reflectionEquals(buildBean1, buildDAO.getById("b-1")));
assertEquals(buildDAO.getByCommit7("c-1", "", 1, 10).size(), 4);
assertEquals(buildDAO.getByCommit7("c-1", "sss-1", 1, 10).size(), 3);
assertEquals(buildDAO.getByCommit7("c-1", "sss-2", 1, 10).size(), 1);
assertEquals(buildDAO.getBuildNames("sss-", 1, 100).size(), 2);
List<BuildBean>
buildBeans =
buildDAO.getAcceptedBuilds("sss-1", null, new Interval(now, Long.MAX_VALUE), 100);
assertEquals(buildBeans.size(), 2);
BuildBean bean1 = buildBeans.get(0);
assertEquals(bean1.getBuild_id(), "b-3");
BuildBean latestBuildBean = buildDAO.getLatest("sss-1", "branch-1");
assertTrue(EqualsBuilder.reflectionEquals(buildBean3, latestBuildBean));
List<BuildBean> buildBeans1 = buildDAO.getByNameDate("sss-1", null, now + 2000, now);
assertEquals(buildBeans1.size(), 2);
List<BuildBean> buildBeans2 = buildDAO.getByNameDate("sss-1", "branch-1", now + 2000, now);
assertEquals(buildBeans2.size(), 2);
assertTrue(EqualsBuilder.reflectionEquals(buildBeans2.get(0), buildBean3));
List<BuildBean> buildBeans3 = buildDAO.getByName("sss-1", null, 3, 2);
assertEquals(buildBeans3.size(), 0);
List<BuildBean> buildBeans4 = buildDAO.getByName("sss-1", "branch-1", 1, 2);
assertEquals(buildBeans4.size(), 2);
List<BuildBean>
allBuildBeans =
buildDAO.getBuildsFromIds(Arrays.asList("b-1", "b-2", "b-22"));
assertEquals(3, allBuildBeans.size());
allBuildBeans = buildDAO.getBuildsFromIds(Arrays.asList("b-1", "b-2", "Not There"));
assertEquals(2, allBuildBeans.size());
allBuildBeans = buildDAO.getBuildsFromIds(Arrays.asList("Not There"));
assertEquals(0, allBuildBeans.size());
allBuildBeans = buildDAO.getBuildsFromIds(new ArrayList<>());
assertEquals(0, allBuildBeans.size());
buildDAO.delete("b-1");
buildDAO.delete("b-2");
buildDAO.delete("b-22");
buildDAO.delete("b-3");
assertEquals(buildDAO.getById("b-1"), null);
}
@Test
public void testAgentUpdate() throws Exception {
AgentBean
agentBean1 =
genDefaultAgentBean("h1", "id-1", "e-1", "d-1", DeployStage.PRE_DOWNLOAD);
agentDAO.insertOrUpdate(agentBean1);
AgentBean
updateBean1 =
genDefaultAgentBean("h1", "id-1", "e-1", "d-1", DeployStage.POST_DOWNLOAD);
updateBean1.setFirst_deploy_time(10L);
agentDAO.insertOrUpdate(updateBean1);
List<AgentBean> agentBeans = agentDAO.getByHost("h1");
assertEquals(agentBeans.size(), 1);
assertEquals(agentBeans.get(0).getFirst_deploy_time(), new Long(10));
updateBean1.setFirst_deploy_time(1000L);
agentDAO.insertOrUpdate(updateBean1);
agentBeans = agentDAO.getByHost("h1");
assertEquals(agentBeans.size(), 1);
assertEquals(agentBeans.get(0).getFirst_deploy_time(), new Long(10));
}
@Test
public void testAgentUpdateMultiple() throws Exception {
AgentBean
agentBean1 =
genDefaultAgentBean("h5", "id-5", "e-2", "d-1", DeployStage.PRE_DOWNLOAD);
AgentBean
agentBean2 =
genDefaultAgentBean("h6", "id-6", "e-2", "d-1", DeployStage.PRE_DOWNLOAD);
AgentBean
agentBean3 =
genDefaultAgentBean("h7", "id-7", "e-2", "d-1", DeployStage.PRE_DOWNLOAD);
agentDAO.insertOrUpdate(agentBean1);
agentDAO.insertOrUpdate(agentBean2);
agentDAO.insertOrUpdate(agentBean3);
List<String> hostIds = Arrays.asList("id-5", "id-6", "id-7");
AgentBean updateBean = new AgentBean();
updateBean.setState(AgentState.RESET);
updateBean.setDeploy_id("d-2");
agentDAO.updateMultiple(hostIds, "e-2", updateBean);
List<AgentBean> beans = agentDAO.getAllByEnv("e-2");
for (AgentBean bean : beans) {
assertEquals(bean.getState(), AgentState.RESET);
assertEquals(bean.getDeploy_id(), "d-2");
}
}
@Test
public void testFirstDeployCount() throws Exception {
AgentBean
agentBean1 =
genDefaultAgentBean("h12", "id-123", "e-12", "d-12", DeployStage.POST_RESTART);
agentBean1.setFirst_deploy(true);
agentBean1.setStatus(AgentStatus.ABORTED_BY_SERVICE);
AgentBean
agentBean2 =
genDefaultAgentBean("h22", "id-124", "e-12", "d-12", DeployStage.POST_RESTART);
agentBean2.setFirst_deploy(true);
agentDAO.insertOrUpdate(agentBean1);
agentDAO.insertOrUpdate(agentBean2);
long total = agentDAO.countFirstDeployingAgent("e-12");
assertEquals(total, 2);
long total_failed = agentDAO.countFailedFirstDeployingAgent("e-12");
assertEquals(total_failed, 1);
}
@Test
public void testAgentQueries() throws Exception {
AgentBean agentBean1 = genDefaultAgentBean(
"h-1", "id-1", "e-1", "d-1", DeployStage.POST_RESTART);
AgentBean agentBean11 = genDefaultAgentBean(
"h-1", "id-1", "e-2", "d-1", DeployStage.SERVING_BUILD);
AgentBean agentBean2 = genDefaultAgentBean(
"h-2", "id-3", "e-1", "d-2", DeployStage.RESTARTING);
AgentBean agentBean3 = genDefaultAgentBean(
"h-3", "id-4", "e-1", "d-1", DeployStage.SERVING_BUILD);
agentBean3.setFirst_deploy_time(System.currentTimeMillis());
agentDAO.insertOrUpdate(agentBean1);
agentDAO.insertOrUpdate(agentBean11);
agentDAO.insertOrUpdate(agentBean2);
agentDAO.insertOrUpdate(agentBean3);
AgentBean agentBean22 = new AgentBean();
agentBean22.setLast_err_no(22);
agentBean2.setLast_err_no(22);
agentDAO.update("id-3", "e-1", agentBean22);
List<AgentBean> hostStatusList = agentDAO.getByHost("h-1");
assertEquals(hostStatusList.size(), 2);
List<AgentBean> agentBeans = agentDAO.getAllByEnv("e-1");
assertEquals(agentBeans.size(), 3);
int nServing = 0;
int nRestarting = 0;
int nPostRestart = 0;
int nPrestaging = 0;
for (AgentBean agentBean : agentBeans) {
if (agentBean.getDeploy_stage() == DeployStage.SERVING_BUILD) {
nServing++;
}
if (agentBean.getDeploy_stage() == DeployStage.POST_RESTART) {
nPostRestart++;
}
if (agentBean.getDeploy_stage() == DeployStage.RESTARTING) {
nRestarting++;
}
if (agentBean.getDeploy_stage() == DeployStage.PRE_DOWNLOAD) {
nPrestaging++;
}
}
assertEquals(nServing, 1);
assertEquals(nRestarting, 1);
assertEquals(nPostRestart, 1);
assertEquals(nPrestaging, 0);
assertEquals(agentDAO.countDeployingAgent("e-1"), 2);
assertEquals(agentDAO.countServingTotal("e-1"), 1);
assertEquals(agentDAO.countSucceededAgent("e-1", "d-1"), 1);
assertEquals(agentDAO.countStuckAgent("e-1", "d-1"), 0);
assertEquals(agentDAO.getByEnvAndFirstDeployTime("e-1", 0).size(), 1);
assertEquals(agentDAO.getByEnvAndFirstDeployTime("e-2", 0).size(), 0);
agentDAO.deleteAllById("id-1");
assertEquals(agentDAO.countAgentByEnv("e-1"), 2);
agentDAO.delete("id-2", "e-1");
agentDAO.delete("id-3", "e-2");
agentDAO.delete("id-4", "e-1");
assertEquals(agentDAO.countAgentByEnv("e-2"), 0);
}
@Test
public void testEnvDAO() throws Exception {
// Test insert
EnvironBean envBean = genDefaultEnvBean("env-1", "s-1", "prod", "deploy-1");
environDAO.insert(envBean);
// Test getById
EnvironBean envBean2 = environDAO.getById(envBean.getEnv_id());
assertTrue(EqualsBuilder.reflectionEquals(envBean, envBean2));
// Test getByStage
EnvironBean envBean22 = environDAO.getByStage("s-1", "prod");
assertTrue(EqualsBuilder.reflectionEquals(envBean, envBean22));
// Test Watcher Column
assertTrue(envBean2.getWatch_recipients().equals("watcher"));
// Test update
EnvironBean envBean3 = new EnvironBean();
envBean3.setAdv_config_id("config_id_2");
envBean2.setAdv_config_id("config_id_2");
environDAO.update("env-1", envBean3);
EnvironBean envBean4 = environDAO.getById("env-1");
assertTrue(EqualsBuilder.reflectionEquals(envBean2, envBean4));
// Test getByName return 2 envs after add another env
envBean = genDefaultEnvBean("env-2", "s-1", "whatever", "deploy-1");
environDAO.insert(envBean);
List<EnvironBean> envBeans = environDAO.getByName("s-1");
assertEquals(envBeans.size(), 2);
assertEquals(groupDAO.getCapacityHosts("env-1").size(), 0);
assertEquals(groupDAO.getCapacityGroups("env-1").size(), 0);
assertEquals(environDAO.getOverrideHosts("env-1", "s-1", "prod").size(), 0);
assertEquals(environDAO.countTotalCapacity("env-1", "s-1", "prod"), 0);
// Add 2 hosts capacity to env-1, host1 & host2
groupDAO.addHostCapacity("env-1", "host1");
groupDAO.addHostCapacity("env-1", "host2");
// Add 2 groups capacity to env-1, group1 & group2
groupDAO.addGroupCapacity("env-1", "group1");
groupDAO.addGroupCapacity("env-1", "group2");
// env-1 : host1, host2, group1, group2, empty groups though
assertEquals(groupDAO.getCapacityHosts("env-1").size(), 2);
assertEquals(groupDAO.getCapacityGroups("env-1").size(), 2);
assertEquals(environDAO.getOverrideHosts("env-1", "s-1", "prod").size(), 0);
assertEquals(environDAO.countTotalCapacity("env-1", "s-1", "prod"), 0);
assertEquals(environDAO.getMissingHosts("env-1").size(), 2);
ArrayList<String> groupNames = new ArrayList<>();
groupNames.add("group1");
assertEquals(environDAO.getEnvsByGroups(groupNames).size(), 1);
groupNames.add("group-lo");
assertEquals(environDAO.getEnvsByGroups(groupNames).size(), 1);
// Test remove Host capacity and remove host capacity
groupDAO.removeHostCapacity("env-1", "host2");
groupDAO.removeGroupCapacity("env-1", "group2");
// now env-1 associate with only : host1, group1
assertEquals(groupDAO.getCapacityHosts("env-1").size(), 1);
assertEquals(groupDAO.getCapacityGroups("env-1").size(), 1);
assertEquals(environDAO.getOverrideHosts("env-1", "s-1", "prod").size(), 0);
assertEquals(environDAO.getMissingHosts("env-1").size(), 1);
// Added 2 hosts to group1 and group2
Set<String> groups = new HashSet<>(Arrays.asList("group1", "group2"));
hostDAO
.insertOrUpdate("host-1", "1.1.1.1", "id-123434", HostState.ACTIVE.toString(), groups);
hostDAO
.insertOrUpdate("host-2", "1.1.1.2", "id-123435", HostState.TERMINATING.toString(),
groups);
hostDAO
.insertOrUpdate("host-2", "1.1.1.2", "id-123435", HostState.ACTIVE.toString(), groups);
List<HostBean> hostBeans = hostDAO.getHostsByHostId("id-123435");
assertEquals(hostBeans.get(0).getState(), HostState.TERMINATING);
// Total capacity for env-1 should be 2, host-1(group1), host-2(group2) and one missing
// host1
assertEquals(environDAO.getOverrideHosts("env-1", "s-1", "prod").size(), 0);
assertEquals(environDAO.countTotalCapacity("env-1", "s-1", "prod"), 2);
assertEquals(environDAO.getMissingHosts("env-1").size(), 1);
List<String> totalHosts = environDAO.getTotalCapacityHosts("env-1", "s-1", "prod");
assertEquals(totalHosts.size(), 2);
assertTrue(totalHosts.containsAll(Arrays.asList("host-1", "host-2")));
// Now, override host-1 with env2
groupDAO.addHostCapacity("env-2", "host-1");
// override hosts should be 1, host-1
// Total capacity for env1 should be 1, host-2
assertEquals(environDAO.getOverrideHosts("env-1", "s-1", "prod").size(), 1);
assertEquals(environDAO.countTotalCapacity("env-1", "s-1", "prod"), 1);
List<String> totalHosts2 = environDAO.getTotalCapacityHosts("env-1", "s-1", "prod");
assertEquals(totalHosts2.size(), 1);
assertFalse(totalHosts2.contains("hosts-1"));
// ineffective override (noise), add host-2 override on env-1
// override hosts should be 1, host-1
// Total capacity for env1 still is 1, host-2
groupDAO.addHostCapacity("env-1", "host-2"); // noise
assertEquals(environDAO.getOverrideHosts("env-1", "s-1", "prod").size(), 1);
assertEquals(environDAO.countTotalCapacity("env-1", "s-1", "prod"), 1);
List<String> totalHosts3 = environDAO.getTotalCapacityHosts("env-1", "s-1", "prod");
assertEquals(totalHosts3.size(), 1);
assertTrue(totalHosts3.containsAll(Arrays.asList("host-2")));
// env-1 : group1
// env-2 : host-1,
List<EnvironBean> envs = environDAO.getEnvsByHost("host-1");
assertEquals(envs.size(), 1);
assertEquals(envs.get(0).getEnv_name(), "s-1");
assertEquals(envs.get(0).getStage_name(), "whatever");
envs = environDAO.getEnvsByGroups(groups);
assertEquals(envs.size(), 1);
environDAO.delete("env-1");
environDAO.delete("env-2");
envBean = environDAO.getById("env-1");
assertEquals(envBean, null);
}
@Test
public void testHostDAO() throws Exception {
Set<String> groups = new HashSet<>(Arrays.asList("group1", "group2"));
hostDAO.insertOrUpdate("host-1", "1.1.1.1", "id-1", HostState.ACTIVE.toString(), groups);
groups = new HashSet<>(Arrays.asList("group1"));
hostDAO.insertOrUpdate("host-2", "1.1.1.2", "id-2", HostState.ACTIVE.toString(), groups);
hostDAO.insertOrUpdate("host-3", "1.1.1.3", "id-3", HostState.ACTIVE.toString(), groups);
/*
host-1 : group1, group2
host-2 : group1
host-3 : group1
*/
assertEquals(hostDAO.getHostNamesByGroup("group1").size(), 3);
hostDAO.removeHostFromGroup("id-3", "group1");
/*
host-1 : group1, group2
host-2 : group1
*/
assertEquals(hostDAO.getHostNamesByGroup("group1").size(), 2);
assertEquals(hostDAO.getGroupSize("group1").intValue(), 2);
assertEquals(hostDAO.getGroupNamesByHost("host-1").size(), 2);
// test on non-existing group size
assertEquals(hostDAO.getGroupSize("group10").intValue(), 0);
hostDAO.deleteById("id-1");
/*
host-2 : group1
*/
assertEquals(hostDAO.getHostNamesByGroup("group1").size(), 1);
assertEquals(hostDAO.getHostNamesByGroup("group2").size(), 0);
hostDAO.deleteById("id-2");
// test host transactional delete
hostDAO.insertOrUpdate("host-1", "1.1.1.1", "id-1", HostState.ACTIVE.toString(), groups);
AgentBean agentBean = genDefaultAgentBean(
"host-1", "id-1", "e-1", "d-1", DeployStage.SERVING_BUILD);
agentDAO.insertOrUpdate(agentBean);
AgentErrorBean agentErrorBean = new AgentErrorBean();
agentErrorBean.setHost_name("host-1");
agentErrorBean.setHost_id("id-1");
agentErrorBean.setEnv_id("env-test-host");
agentErrorBean.setError_msg("Yeah, it is wrong!");
agentErrorDAO.insert(agentErrorBean);
hostDAO.deleteAllById("id-1");
List<HostBean> hostBeans1 = hostDAO.getHosts("host-1");
assertTrue(hostBeans1.isEmpty());
List<AgentBean> agentBeans = agentDAO.getByHost("host-1");
assertTrue(agentBeans.isEmpty());
AgentErrorBean agentErrorBeans = agentErrorDAO.get("host-1", "env-test-host");
assertNull(agentErrorBeans);
// test hosts_and_envs
groupDAO.addHostCapacity("e-3", "host-3");
assertEquals(environDAO.getMissingHosts("e-3").size(), 1);
Set<String> groups2 = new HashSet<>(Arrays.asList("new_group"));
hostDAO
.insertOrUpdate("host-3", "3.3.3.3", "id-3", HostState.TERMINATING.toString(), groups2);
assertEquals(environDAO.getMissingHosts("e-3").size(), 0);
Collection<HostBean> hostBean3 = hostDAO.getByEnvIdAndHostName("e-3", "host-3");
assertEquals(hostBean3.iterator().next().getHost_name(), "host-3");
groupDAO.addGroupCapacity("e-3", "new_group");
hostBean3 = hostDAO.getByEnvIdAndHostName("e-3", "host-3");
assertEquals(hostBean3.iterator().next().getHost_name(), "host-3");
groupDAO.removeGroupCapacity("e-3", "new_group");
// test host insertOrUpdate
HostBean hostBean1 = new HostBean();
hostBean1.setHost_name("i-9");
hostBean1.setHost_id("i-9");
hostBean1.setGroup_name("test_dup");
Long currentTime = System.currentTimeMillis();
hostBean1.setCreate_date(currentTime);
hostBean1.setLast_update(currentTime);
hostBean1.setState(HostState.PROVISIONED);
hostDAO.insert(hostBean1);
hostDAO.insert(hostBean1);
List<HostBean> hostBeans3 = hostDAO.getHosts("i-9");
assertEquals(hostBeans3.size(), 1);
assertEquals(hostBeans3.get(0).getHost_name(), "i-9");
HashSet<String> groups9 = new HashSet<>(Arrays.asList("test_dup"));
hostDAO
.insertOrUpdate("h-9", "9.9.9.9", "i-9", HostState.PENDING_TERMINATE.toString(),
groups9);
List<HostBean> hostBeans4 = hostDAO.getHosts("h-9");
assertEquals(hostBeans4.size(), 1);
assertEquals(hostBeans4.get(0).getHost_name(), "h-9");
assertEquals(hostBeans4.get(0).getHost_id(), "i-9");
List<HostBean> hostBeans5 = hostDAO.getTerminatingHosts();
assertEquals(hostBeans5.size(), 2);
// Test can retire hosts
HostBean hostBean6 = new HostBean();
hostBean6.setHost_name("i-11");
hostBean6.setHost_id("i-11");
hostBean6.setGroup_name("retire-group");
hostBean6.setCreate_date(currentTime);
hostBean6.setLast_update(currentTime);
hostBean6.setState(HostState.ACTIVE);
hostBean6.setCan_retire(1);
hostDAO.insert(hostBean6);
hostBean6.setHost_name("i-12");
hostBean6.setHost_id("i-12");
hostDAO.insert(hostBean6);
HostBean hostBean7 = new HostBean();
hostBean7.setHost_name("i-13");
hostBean7.setHost_id("i-13");
hostBean7.setGroup_name("retire-group");
hostBean7.setCreate_date(currentTime);
hostBean7.setLast_update(currentTime);
hostBean7.setState(HostState.TERMINATING);
hostBean7.setCan_retire(1);
hostDAO.insert(hostBean7);
Collection<String>
retiredHostBeanIds =
hostDAO.getToBeRetiredHostIdsByGroup("retire-group");
assertEquals(retiredHostBeanIds.size(), 2);
AgentBean
agentBean1 =
genDefaultAgentBean("i-11", "i-11", "e-1", "d-1", DeployStage.RESTARTING);
agentBean1.setStatus(AgentStatus.AGENT_FAILED);
agentDAO.insertOrUpdate(agentBean1);
Collection<String>
retiredAndFailedHostIds =
hostDAO.getToBeRetiredAndFailedHostIdsByGroup("retire-group");
assertEquals(retiredAndFailedHostIds.size(), 1);
Collection<String> failedHostIds = hostDAO.getFailedHostIdsByGroup("retire-group");
assertEquals(failedHostIds.size(), 1);
}
@Test
public void testDataDAO() throws Exception {
DataBean dataBean = genDefaultDataBean("foo1", "name1=value1,name2=value2");
dataDAO.insert(dataBean);
DataBean dataBean2 = dataDAO.getById("foo1");
assertTrue(EqualsBuilder.reflectionEquals(dataBean, dataBean2));
dataDAO.delete("foo1");
DataBean dataBean3 = dataDAO.getById("foo1");
assertEquals(dataBean3, null);
}
@Test
public void testRatingsDAO() throws Exception {
RatingBean ratingBean = genDefaultRatingsBean("1", "foo", System.currentTimeMillis());
ratingDAO.insert(ratingBean);
List<RatingBean> ratingBeans = ratingDAO.getRatingsByAuthor("foo");
assertTrue(ratingBeans.size() == 1);
assertTrue(EqualsBuilder.reflectionEquals(ratingBean, ratingBeans.get(0)));
ratingDAO.delete("1");
List<RatingBean> ratingBeans2 = ratingDAO.getRatingsByAuthor("foo");
assertTrue(ratingBeans2.size() == 0);
}
@Test
public void testAgentErrorDAO() throws Exception {
AgentErrorBean agentErrorBean = new AgentErrorBean();
agentErrorBean.setHost_name("host-1");
agentErrorBean.setHost_id("id-1");
agentErrorBean.setEnv_id("env-1");
agentErrorBean.setError_msg("Yeah, it is wrong!");
agentErrorDAO.insert(agentErrorBean);
AgentErrorBean agentErrorBean2 = new AgentErrorBean();
agentErrorBean2.setHost_name("host-1");
agentErrorBean2.setHost_id("id-1");
agentErrorBean2.setEnv_id("env-2");
agentErrorBean2.setError_msg("never mind!");
agentErrorDAO.insert(agentErrorBean2);
agentErrorBean.setError_msg("what, again?");
agentErrorDAO.update("host-1", "env-1", agentErrorBean);
AgentErrorBean agentErrorBean3 = agentErrorDAO.get("host-1", "env-1");
assertTrue(EqualsBuilder.reflectionEquals(agentErrorBean, agentErrorBean3));
}
@Test
public void testPromoteDAO() throws Exception {
long now = System.currentTimeMillis();
PromoteBean bean1 = new PromoteBean();
bean1.setEnv_id("env1");
bean1.setType(PromoteType.MANUAL);
bean1.setQueue_size(Constants.DEFAULT_PROMOTE_QUEUE_SIZE);
bean1.setDisable_policy(Constants.DEFAULT_PROMOTE_DISABLE_POLICY);
bean1.setFail_policy(Constants.DEFAULT_PROMOTE_FAIL_POLICY);
bean1.setLast_operator("system");
bean1.setLast_update(now);
bean1.setDelay(0);
promoteDAO.insert(bean1);
PromoteBean bean11 = promoteDAO.getById("env1");
assertTrue(EqualsBuilder.reflectionEquals(bean1, bean11));
PromoteBean bean2 = new PromoteBean();
bean2.setEnv_id("env1");
bean2.setType(PromoteType.AUTO);
bean2.setPred_stage("xxx");
bean2.setQueue_size(Constants.DEFAULT_PROMOTE_QUEUE_SIZE);
bean2.setDisable_policy(Constants.DEFAULT_PROMOTE_DISABLE_POLICY);
bean2.setFail_policy(Constants.DEFAULT_PROMOTE_FAIL_POLICY);
bean2.setLast_operator("system");
bean2.setLast_update(now);
bean2.setDelay(0);
promoteDAO.update("env1", bean2);
PromoteBean bean22 = promoteDAO.getById("env1");
assertTrue(EqualsBuilder.reflectionEquals(bean2, bean22));
List<String> ids = promoteDAO.getAutoPromoteEnvIds();
assertEquals(ids.size(), 1);
promoteDAO.delete("env1");
assertEquals(promoteDAO.getById("env1"), null);
}
@Test
public void testGroupDAO() throws Exception {
groupDAO.addGroupCapacity("env-id3", "group3");
groupDAO.addGroupCapacity("env-id4", "group4");
groupDAO.addGroupCapacity("env-id5", "group3");
List<String> envids = groupDAO.getEnvsByGroupName("group3");
assertEquals(envids.size(), 2);
HashSet<String> target_ids = new HashSet<>();
target_ids.addAll(envids);
assertTrue(target_ids.contains("env-id3"));
assertTrue(target_ids.contains("env-id5"));
List<String> groups = groupDAO.getAllEnvGroups();
assertEquals(groups.size(), 2);
}
@Test
public void testUserRolesDAO() throws Exception {
UserRolesBean bean = new UserRolesBean();
bean.setUser_name("test");
bean.setResource_id("envTest");
bean.setResource_type(Resource.Type.ENV);
bean.setRole(Role.ADMIN);
userRolesDAO.insert(bean);
UserRolesBean
bean2 =
userRolesDAO.getByNameAndResource("test", "envTest", Resource.Type.ENV);
assertEquals(bean2.getRole(), Role.ADMIN);
}
@Test
public void testGroupRolesDAO() throws Exception {
GroupRolesBean bean = new GroupRolesBean();
bean.setGroup_name("group");
bean.setResource_id("123");
bean.setResource_type(Resource.Type.ENV);
bean.setRole(Role.ADMIN);
groupRolesDAO.insert(bean);
GroupRolesBean
bean2 =
groupRolesDAO.getByNameAndResource("group", "123", Resource.Type.ENV);
assertEquals(bean2.getRole(), Role.ADMIN);
}
@Test
public void testTokenRolesDAO() throws Exception {
TokenRolesBean bean = new TokenRolesBean();
bean.setScript_name("test");
bean.setToken("token");
bean.setResource_id("envTest");
bean.setResource_type(Resource.Type.ENV);
bean.setRole(Role.ADMIN);
bean.setExpire_date(System.currentTimeMillis());
tokenRolesDAO.insert(bean);
TokenRolesBean
bean2 =
tokenRolesDAO.getByNameAndResource("test", "envTest", Resource.Type.ENV);
assertEquals(bean2.getRole(), Role.ADMIN);
}
@Test
public void testConfigHistoryDAO() throws Exception {
ConfigHistoryBean bean = new ConfigHistoryBean();
bean.setChange_id("id-1");
bean.setConfig_id("group-1");
bean.setConfig_change("Test config change");
bean.setCreation_time(System.currentTimeMillis());
bean.setOperator("lo");
bean.setType("Host Terminate");
configHistoryDAO.insert(bean);
ConfigHistoryBean bean1 = configHistoryDAO.getByChangeId("id-1");
assertEquals(bean1.getType(), "Host Terminate");
bean.setChange_id("id-2");
bean.setConfig_id("group-1");
bean.setConfig_change("Test config change 2");
bean.setCreation_time(System.currentTimeMillis());
bean.setOperator("lo");
bean.setType("Host Launch");
configHistoryDAO.insert(bean);
List<ConfigHistoryBean> beanList = configHistoryDAO.getByConfigId("group-1", 1, 10);
assertEquals(beanList.size(), 2);
}
@Test
public void testTagDAO() throws Exception {
TagBean tag = genTagBean(TagValue.BAD_BUILD, "TestEnv", "BUILD",
genDefaultBuildInfoBean("b-3", "sss-1", "c-1", "r-1", System.currentTimeMillis()));
tagDAO.insert(tag);
TagBean tag2 = tagDAO.getById(tag.getId());
assertNotNull(tag2);
assertEquals(tag.getTarget_id(), tag2.getTarget_id());
BuildBean embededBean = tag2.deserializeTagMetaInfo(BuildBean.class);
assertEquals("b-3", embededBean.getBuild_id());
List<TagBean> targetList = tagDAO.getByTargetId(tag.getTarget_id());
assertEquals(1, targetList.size());
targetList = tagDAO.getByTargetIdAndType(tag.getTarget_id(), TagTargetType.BUILD);
assertEquals(1, targetList.size());
tagDAO.delete(tag.getId());
tag2 = tagDAO.getById(tag.getId());
assertNull(tag2);
targetList = tagDAO.getByTargetId(tag.getTarget_id());
assertEquals(0, targetList.size());
targetList = tagDAO.getByTargetIdAndType(tag.getTarget_id(), TagTargetType.BUILD);
assertEquals(0, targetList.size());
tagDAO
.insert(genTagBean(TagValue.BAD_BUILD, "env1", "BUILD", new HashMap<String, String>()));
tagDAO
.insert(genTagBean(TagValue.BAD_BUILD, "env1", "BUILD", new HashMap<String, String>()));
tagDAO
.insert(genTagBean(TagValue.BAD_BUILD, "env1", "BUILD", new HashMap<String, String>()));
tagDAO
.insert(genTagBean(TagValue.BAD_BUILD, "env1", "BUILD", new HashMap<String, String>()));
assertEquals(4, tagDAO.getByValue(TagValue.BAD_BUILD).size());
assertEquals(0, tagDAO.getByValue(TagValue.GOOD_BUILD).size());
}
@Test
public void testScheduleDAO() throws Exception {
Long time = System.currentTimeMillis();
String id = CommonUtils.getBase64UUID();
ScheduleBean scheduleBean = new ScheduleBean();
scheduleBean.setId(id);
scheduleBean.setTotal_sessions(3);
scheduleBean.setCooldown_times("40,50,60");
scheduleBean.setCurrent_session(2);
scheduleBean.setHost_numbers("50,60,500");
scheduleBean.setState(ScheduleState.COOLING_DOWN);
scheduleBean.setState_start_time(time);
scheduleDAO.insert(scheduleBean);
ScheduleBean bean = scheduleDAO.getById(id);
assertEquals(bean.getTotal_sessions(), (Integer) 3);
assertEquals(bean.getCooldown_times(), "40,50,60");
assertEquals(bean.getCurrent_session(), (Integer) 2);
assertEquals(bean.getHost_numbers(), "50,60,500");
assertEquals(bean.getState(), ScheduleState.COOLING_DOWN);
assertEquals(bean.getState_start_time(), time);
ScheduleBean updateBean = new ScheduleBean();
updateBean.setTotal_sessions(3);
updateBean.setCurrent_session(1);
updateBean.setState(ScheduleState.RUNNING);
scheduleDAO.update(updateBean, id);
ScheduleBean updatedBean = scheduleDAO.getById(id);
assertEquals(updatedBean.getCurrent_session(), (Integer) 1);
assertEquals(updatedBean.getState(), ScheduleState.RUNNING);
assertEquals(updatedBean.getHost_numbers(), "50,60,500");
}
private EnvironBean genDefaultEnvBean(String envId, String envName, String envStage,
String deployId) {
EnvironBean envBean = new EnvironBean();
envBean.setEnv_id(envId);
envBean.setEnv_name(envName);
envBean.setStage_name(envStage);
envBean.setEnv_state(EnvState.NORMAL);
envBean.setMax_parallel(1);
envBean.setPriority(DeployPriority.NORMAL);
envBean.setStuck_th(100);
//To keep the precision, the default success_th value should be 10000 in DB.
envBean.setSuccess_th(10000);
envBean.setDescription("foo");
envBean.setDeploy_id(deployId);
envBean.setAdv_config_id("config_id_1");
envBean.setSc_config_id("envvar_id_1");
envBean.setLast_operator("bar");
envBean.setLast_update(System.currentTimeMillis());
envBean.setAccept_type(AcceptanceType.AUTO);
envBean.setNotify_authors(false);
envBean.setWatch_recipients("watcher");
envBean.setMax_deploy_num(5100);
envBean.setMax_deploy_day(366);
envBean.setIs_docker(false);
envBean.setMax_parallel_pct(0);
envBean.setState(EnvironState.NORMAL);
envBean.setMax_parallel_rp(1);
envBean.setOverride_policy(OverridePolicy.OVERRIDE);
envBean.setAllow_private_build(false);
envBean.setEnsure_trusted_build(false);
return envBean;
}
private DeployBean genDefaultDeployBean(String id, String envId, String buildId,
long startDate, DeployState state) {
DeployBean deployBean = new DeployBean();
deployBean.setDeploy_id(id);
deployBean.setEnv_id(envId);
deployBean.setBuild_id(buildId);
deployBean.setDeploy_type(DeployType.REGULAR);
deployBean.setState(state);
deployBean.setStart_date(startDate);
deployBean.setOperator("foo");
deployBean.setLast_update(startDate);
deployBean.setDescription("foo");
deployBean.setSuc_total(0);
deployBean.setFail_total(0);
deployBean.setTotal(0);
deployBean.setAcc_status(Constants.DEFAULT_ACCEPTANCE_STATUS);
return deployBean;
}
private RatingBean genDefaultRatingsBean(String id, String author, long timestamp) {
RatingBean ratingBean = new RatingBean();
ratingBean.setRating_id(id);
ratingBean.setRating("5");
ratingBean.setTimestamp(timestamp);
ratingBean.setFeedback("good feedback");
ratingBean.setAuthor(author);
return ratingBean;
}
private BuildBean genDefaultBuildInfoBean(String id, String buildName,
String commitId, String repoUrl, long buildDate) {
BuildBean buildBean = new BuildBean();
buildBean.setBuild_id(id);
buildBean.setBuild_name(buildName);
buildBean.setScm_repo("repo-1");
buildBean.setScm_branch("branch-1");
buildBean.setScm_commit(commitId);
buildBean.setScm_commit_7(commitId);
buildBean.setCommit_date(buildDate);
buildBean.setArtifact_url(repoUrl);
buildBean.setPublish_info("jenkins12345");
buildBean.setPublish_date(buildDate);
return buildBean;
}
private AgentBean genDefaultAgentBean(String hostName, String hostId, String envId,
String deployId, DeployStage deployStage) {
AgentBean agentBean = new AgentBean();
agentBean.setHost_name(hostName);
agentBean.setHost_id(hostId);
agentBean.setEnv_id(envId);
agentBean.setDeploy_id(deployId);
agentBean.setDeploy_stage(deployStage);
agentBean.setStart_date(System.currentTimeMillis());
agentBean.setStatus(AgentStatus.SUCCEEDED);
agentBean.setState(AgentState.NORMAL);
agentBean.setLast_update(System.currentTimeMillis());
agentBean.setLast_operator("me");
agentBean.setFail_count(0);
agentBean.setFirst_deploy(false);
agentBean.setStage_start_date(System.currentTimeMillis());
return agentBean;
}
private DataBean genDefaultDataBean(String id, String data) {
DataBean dataBean = new DataBean();
dataBean.setData_id(id);
dataBean.setData_kind("script");
dataBean.setOperator("foo");
dataBean.setTimestamp(System.currentTimeMillis());
dataBean.setData(data);
return dataBean;
}
private TagBean genTagBean(TagValue val, String target_id, String target_type, Object meta_info)
throws Exception {
TagBean bean = new TagBean();
bean.setId(CommonUtils.getBase64UUID());
bean.setCreated_date(System.currentTimeMillis());
bean.setOperator("johndoe");
bean.setValue(TagValue.BAD_BUILD);
bean.setTarget_id(target_id);
bean.setTarget_type(TagTargetType.BUILD);
bean.serializeTagMetaInfo(meta_info);
return bean;
}
}
| |
// -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2009-2011 Google, All Rights reserved
// Copyright 2011-2014 MIT, All rights reserved
// Released under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
package com.google.appinventor.components.runtime;
import com.google.appinventor.components.annotations.DesignerComponent;
import com.google.appinventor.components.annotations.DesignerProperty;
import com.google.appinventor.components.annotations.PropertyCategory;
import com.google.appinventor.components.annotations.SimpleEvent;
import com.google.appinventor.components.annotations.SimpleFunction;
import com.google.appinventor.components.annotations.SimpleObject;
import com.google.appinventor.components.annotations.SimpleProperty;
import com.google.appinventor.components.annotations.UsesPermissions;
import com.google.appinventor.components.common.ComponentCategory;
import com.google.appinventor.components.common.PropertyTypeConstants;
import com.google.appinventor.components.common.YaVersion;
import com.google.appinventor.components.runtime.util.AsynchUtil;
import com.google.appinventor.components.runtime.util.ErrorMessages;
import com.google.appinventor.components.runtime.util.FileUtil;
import com.google.appinventor.components.runtime.Form;
import com.google.appinventor.components.runtime.ReplForm;
import android.app.Activity;
import android.content.Context;
import android.os.Environment;
import android.util.Log;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.StringWriter;
/**
* A Component for working with files and directories on the device.
*
*/
@DesignerComponent(version = YaVersion.FILE_COMPONENT_VERSION,
description = "Non-visible component for storing and retrieving files. Use this component to " +
"write or read files on your device. The default behaviour is to write files to the " +
"private data directory associated with your App. The Companion is special cased to write " +
"files to /sdcard/AppInventor/data to facilitate debugging. " +
"If the file path starts with a slash (/), then the file is created relative to /sdcard. " +
"For example writing a file to /myFile.txt will write the file in /sdcard/myFile.txt.",
category = ComponentCategory.STORAGE,
nonVisible = true,
iconName = "images/file.png")
@SimpleObject
@UsesPermissions(permissionNames = "android.permission.WRITE_EXTERNAL_STORAGE, android.permission.READ_EXTERNAL_STORAGE")
public class File extends AndroidNonvisibleComponent implements Component {
public static final String NO_ASSETS = "No_Assets";
private final Activity activity;
private boolean isRepl = false;
private final int BUFFER_LENGTH = 4096;
private static final String LOG_TAG = "FileComponent";
/**
* Creates a new File component.
* @param container the Form that this component is contained in.
*/
public File(ComponentContainer container) {
super(container.$form());
if (form instanceof ReplForm) { // Note: form is defined in our superclass
isRepl = true;
}
activity = (Activity) container.$context();
}
/**
* Stores the text to a specified file on the phone.
* Calls the Write function to write to the file asynchronously to prevent
* the UI from hanging when there is a large write.
*
* @param text the text to be stored
* @param fileName the file to which the text will be stored
*/
@SimpleFunction(description = "Saves text to a file. If the filename " +
"begins with a slash (/) the file is written to the sdcard. For example writing to " +
"/myFile.txt will write the file to /sdcard/myFile.txt. If the filename does not start " +
"with a slash, it will be written in the programs private data directory where it will " +
"not be accessible to other programs on the phone. There is a special exception for the " +
"AI Companion where these files are written to /sdcard/AppInventor/data to facilitate " +
"debugging. Note that this block will overwrite a file if it already exists." +
"\n\nIf you want to add content to a file use the append block.")
public void SaveFile(String text, String fileName) {
if (fileName.startsWith("/")) {
FileUtil.checkExternalStorageWriteable(); // Only check if writing to sdcard
}
Write(fileName, text, false);
}
/**
* Appends text to a specified file on the phone.
* Calls the Write function to write to the file asynchronously to prevent
* the UI from hanging when there is a large write.
*
* @param text the text to be stored
* @param fileName the file to which the text will be stored
*/
@SimpleFunction(description = "Appends text to the end of a file storage, creating the file if it does not exist. " +
"See the help text under SaveFile for information about where files are written.")
public void AppendToFile(String text, String fileName) {
if (fileName.startsWith("/")) {
FileUtil.checkExternalStorageWriteable(); // Only check if writing to sdcard
}
Write(fileName, text, true);
}
/**
* Retrieve the text stored in a specified file.
*
* @param fileName the file from which the text is read
* @throws FileNotFoundException if the file cannot be found
* @throws IOException if the text cannot be read from the file
*/
@SimpleFunction(description = "Reads text from a file in storage. " +
"Prefix the filename with / to read from a specific file on the SD card. " +
"for instance /myFile.txt will read the file /sdcard/myFile.txt. To read " +
"assets packaged with an application (also works for the Companion) start " +
"the filename with // (two slashes). If a filename does not start with a " +
"slash, it will be read from the applications private storage (for packaged " +
"apps) and from /sdcard/AppInventor/data for the Companion.")
public void ReadFrom(final String fileName) {
try {
InputStream inputStream;
if (fileName.startsWith("//")) {
if (isRepl) {
inputStream = new FileInputStream(Environment.getExternalStorageDirectory().getPath() +
"/AppInventor/assets/" + fileName);
} else {
inputStream = form.getAssets().open(fileName.substring(2));
}
} else {
String filepath = AbsoluteFileName(fileName);
Log.d(LOG_TAG, "filepath = " + filepath);
inputStream = new FileInputStream(filepath);
}
final InputStream asyncInputStream = inputStream;
AsynchUtil.runAsynchronously(new Runnable() {
@Override
public void run() {
AsyncRead(asyncInputStream, fileName);
}
});
} catch (FileNotFoundException e) {
Log.e(LOG_TAG, "FileNotFoundException", e);
form.dispatchErrorOccurredEvent(File.this, "ReadFrom",
ErrorMessages.ERROR_CANNOT_FIND_FILE, fileName);
} catch (IOException e) {
Log.e(LOG_TAG, "IOException", e);
form.dispatchErrorOccurredEvent(File.this, "ReadFrom",
ErrorMessages.ERROR_CANNOT_FIND_FILE, fileName);
}
}
/**
* Delete the specified file.
*
* @param fileName the file to be deleted
*/
@SimpleFunction(description = "Deletes a file from storage. " +
"Prefix the filename with / to delete a specific file in the SD card, for instance /myFile.txt. " +
"will delete the file /sdcard/myFile.txt. If the file does not begin with a /, then the file " +
"located in the programs private storage will be deleted. Starting the file with // is an error " +
"because assets files cannot be deleted.")
public void Delete(String fileName) {
if (fileName.startsWith("//")) {
form.dispatchErrorOccurredEvent(File.this, "DeleteFile",
ErrorMessages.ERROR_CANNOT_DELETE_ASSET, fileName);
return;
}
String filepath = AbsoluteFileName(fileName);
java.io.File file = new java.io.File(filepath);
file.delete();
}
/**
* Writes to the specified file.
* @param filename the file to write
* @param text to write to the file
* @param append determines whether text should be appended to the file,
* or overwrite the file
*/
private void Write(final String filename, final String text, final boolean append) {
if (filename.startsWith("//")) {
if (append) {
form.dispatchErrorOccurredEvent(File.this, "AppendTo",
ErrorMessages.ERROR_CANNOT_WRITE_ASSET, filename);
} else {
form.dispatchErrorOccurredEvent(File.this, "SaveFile",
ErrorMessages.ERROR_CANNOT_WRITE_ASSET, filename);
}
return;
}
AsynchUtil.runAsynchronously(new Runnable() {
@Override
public void run() {
final String filepath = AbsoluteFileName(filename);
final java.io.File file = new java.io.File(filepath);
if(!file.exists()){
try {
file.createNewFile();
} catch (IOException e) {
if (append) {
form.dispatchErrorOccurredEvent(File.this, "AppendTo",
ErrorMessages.ERROR_CANNOT_CREATE_FILE, filepath);
} else {
form.dispatchErrorOccurredEvent(File.this, "SaveFile",
ErrorMessages.ERROR_CANNOT_CREATE_FILE, filepath);
}
return;
}
}
try {
FileOutputStream fileWriter = new FileOutputStream(file, append);
OutputStreamWriter out = new OutputStreamWriter(fileWriter);
out.write(text);
out.flush();
out.close();
fileWriter.close();
} catch (IOException e) {
if (append) {
form.dispatchErrorOccurredEvent(File.this, "AppendTo",
ErrorMessages.ERROR_CANNOT_WRITE_TO_FILE, filepath);
} else {
form.dispatchErrorOccurredEvent(File.this, "SaveFile",
ErrorMessages.ERROR_CANNOT_WRITE_TO_FILE, filepath);
}
}
}
});
}
/**
* Replace Windows-style CRLF with Unix LF as String. This allows
* end-user to treat Windows text files same as Unix or Mac. In
* future, allowing user to choose to normalize new lines might also
* be nice - in case someone really wants to detect Windows-style
* line separators, or save a file which was read (and expect no
* changes in size or checksum).
* @param string to convert
*/
private String normalizeNewLines(String s) {
return s.replaceAll("\r\n", "\n");
}
/**
* Asynchronously reads from the given file. Calls the main event thread
* when the function has completed reading from the file.
* @param filepath the file to read
* @throws FileNotFoundException
* @throws IOException when the system cannot read the file
*/
private void AsyncRead(InputStream fileInput, final String fileName) {
InputStreamReader input = null;
try {
input = new InputStreamReader(fileInput);
StringWriter output = new StringWriter();
char [] buffer = new char[BUFFER_LENGTH];
int offset = 0;
int length = 0;
while ((length = input.read(buffer, offset, BUFFER_LENGTH)) > 0) {
output.write(buffer, 0, length);
}
// Now that we have the file as a String,
// normalize any line separators to avoid compatibility between Windows and Mac
// text files. Users can expect \n to mean a line separator regardless of how
// file was created. Currently only doing this for files opened locally - not files we pull
// from other places like URLs.
final String text = normalizeNewLines(output.toString());
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
GotText(text);
}
});
} catch (FileNotFoundException e) {
Log.e(LOG_TAG, "FileNotFoundException", e);
form.dispatchErrorOccurredEvent(File.this, "ReadFrom",
ErrorMessages.ERROR_CANNOT_FIND_FILE, fileName);
} catch (IOException e) {
Log.e(LOG_TAG, "IOException", e);
form.dispatchErrorOccurredEvent(File.this, "ReadFrom",
ErrorMessages.ERROR_CANNOT_READ_FILE, fileName);
} finally {
if (input != null) {
try {
input.close();
} catch (IOException e) {
// do nothing...
}
}
}
}
/**
* Event indicating that a request has finished.
*
* @param text read from the file
*/
@SimpleEvent (description = "Event indicating that the contents from the file have been read.")
public void GotText(String text) {
// invoke the application's "GotText" event handler.
EventDispatcher.dispatchEvent(this, "GotText", text);
}
/**
* Returns absolute file path.
*
* @param filename the file used to construct the file path
*/
private String AbsoluteFileName(String filename) {
if (filename.startsWith("/")) {
return Environment.getExternalStorageDirectory().getPath() + filename;
} else {
java.io.File dirPath = activity.getFilesDir();
if (isRepl) {
String path = Environment.getExternalStorageDirectory().getPath() + "/AppInventor/data/";
dirPath = new java.io.File(path);
if (!dirPath.exists()) {
dirPath.mkdirs(); // Make sure it exists
}
}
return dirPath.getPath() + "/" + filename;
}
}
}
| |
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.react.modules.dialog;
import javax.annotation.Nullable;
import java.util.Map;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.content.DialogInterface.OnDismissListener;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import com.facebook.common.logging.FLog;
import com.facebook.react.bridge.Callback;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.module.annotations.ReactModule;
@ReactModule(name = DialogModule.NAME)
public class DialogModule extends ReactContextBaseJavaModule implements LifecycleEventListener {
/* package */ static final String FRAGMENT_TAG =
"com.facebook.catalyst.react.dialog.DialogModule";
/* package */ static final String NAME = "DialogManagerAndroid";
/* package */ static final String ACTION_BUTTON_CLICKED = "buttonClicked";
/* package */ static final String ACTION_DISMISSED = "dismissed";
/* package */ static final String KEY_TITLE = "title";
/* package */ static final String KEY_MESSAGE = "message";
/* package */ static final String KEY_BUTTON_POSITIVE = "buttonPositive";
/* package */ static final String KEY_BUTTON_NEGATIVE = "buttonNegative";
/* package */ static final String KEY_BUTTON_NEUTRAL = "buttonNeutral";
/* package */ static final String KEY_ITEMS = "items";
/* package */ static final String KEY_CANCELABLE = "cancelable";
/* package */ static final Map<String, Object> CONSTANTS = MapBuilder.<String, Object>of(
ACTION_BUTTON_CLICKED, ACTION_BUTTON_CLICKED,
ACTION_DISMISSED, ACTION_DISMISSED,
KEY_BUTTON_POSITIVE, DialogInterface.BUTTON_POSITIVE,
KEY_BUTTON_NEGATIVE, DialogInterface.BUTTON_NEGATIVE,
KEY_BUTTON_NEUTRAL, DialogInterface.BUTTON_NEUTRAL);
private boolean mIsInForeground;
public DialogModule(ReactApplicationContext reactContext) {
super(reactContext);
}
@Override
public String getName() {
return NAME;
}
/**
* Helper to allow this module to work with both the standard FragmentManager
* and the Support FragmentManager (for apps that need to use it for legacy reasons).
* Since the two APIs don't share a common interface there's unfortunately some
* code duplication.
*/
private class FragmentManagerHelper {
// Exactly one of the two is null
private final @Nullable android.app.FragmentManager mFragmentManager;
private final @Nullable android.support.v4.app.FragmentManager mSupportFragmentManager;
private @Nullable Object mFragmentToShow;
private boolean isUsingSupportLibrary() {
return mSupportFragmentManager != null;
}
public FragmentManagerHelper(android.support.v4.app.FragmentManager supportFragmentManager) {
mFragmentManager = null;
mSupportFragmentManager = supportFragmentManager;
}
public FragmentManagerHelper(android.app.FragmentManager fragmentManager) {
mFragmentManager = fragmentManager;
mSupportFragmentManager = null;
}
public void showPendingAlert() {
if (mFragmentToShow == null) {
return;
}
if (isUsingSupportLibrary()) {
((SupportAlertFragment) mFragmentToShow).show(mSupportFragmentManager, FRAGMENT_TAG);
} else {
((AlertFragment) mFragmentToShow).show(mFragmentManager, FRAGMENT_TAG);
}
mFragmentToShow = null;
}
private void dismissExisting() {
if (isUsingSupportLibrary()) {
SupportAlertFragment oldFragment =
(SupportAlertFragment) mSupportFragmentManager.findFragmentByTag(FRAGMENT_TAG);
if (oldFragment != null) {
oldFragment.dismiss();
}
} else {
AlertFragment oldFragment =
(AlertFragment) mFragmentManager.findFragmentByTag(FRAGMENT_TAG);
if (oldFragment != null) {
oldFragment.dismiss();
}
}
}
public void showNewAlert(boolean isInForeground, Bundle arguments, Callback actionCallback) {
dismissExisting();
AlertFragmentListener actionListener =
actionCallback != null ? new AlertFragmentListener(actionCallback) : null;
if (isUsingSupportLibrary()) {
SupportAlertFragment alertFragment = new SupportAlertFragment(actionListener, arguments);
if (isInForeground) {
if (arguments.containsKey(KEY_CANCELABLE)) {
alertFragment.setCancelable(arguments.getBoolean(KEY_CANCELABLE));
}
alertFragment.show(mSupportFragmentManager, FRAGMENT_TAG);
} else {
mFragmentToShow = alertFragment;
}
} else {
AlertFragment alertFragment = new AlertFragment(actionListener, arguments);
if (isInForeground) {
if (arguments.containsKey(KEY_CANCELABLE)) {
alertFragment.setCancelable(arguments.getBoolean(KEY_CANCELABLE));
}
alertFragment.show(mFragmentManager, FRAGMENT_TAG);
} else {
mFragmentToShow = alertFragment;
}
}
}
}
/* package */ class AlertFragmentListener implements OnClickListener, OnDismissListener {
private final Callback mCallback;
private boolean mCallbackConsumed = false;
public AlertFragmentListener(Callback callback) {
mCallback = callback;
}
@Override
public void onClick(DialogInterface dialog, int which) {
if (!mCallbackConsumed) {
if (getReactApplicationContext().hasActiveCatalystInstance()) {
mCallback.invoke(ACTION_BUTTON_CLICKED, which);
mCallbackConsumed = true;
}
}
}
@Override
public void onDismiss(DialogInterface dialog) {
if (!mCallbackConsumed) {
if (getReactApplicationContext().hasActiveCatalystInstance()) {
mCallback.invoke(ACTION_DISMISSED);
mCallbackConsumed = true;
}
}
}
}
@Override
public Map<String, Object> getConstants() {
return CONSTANTS;
}
@Override
public void initialize() {
getReactApplicationContext().addLifecycleEventListener(this);
}
@Override
public void onHostPause() {
// Don't show the dialog if the host is paused.
mIsInForeground = false;
}
@Override
public void onHostDestroy() {
}
@Override
public void onHostResume() {
mIsInForeground = true;
// Check if a dialog has been created while the host was paused, so that we can show it now.
FragmentManagerHelper fragmentManagerHelper = getFragmentManagerHelper();
if (fragmentManagerHelper != null) {
fragmentManagerHelper.showPendingAlert();
} else {
FLog.w(DialogModule.class, "onHostResume called but no FragmentManager found");
}
}
@ReactMethod
public void showAlert(
ReadableMap options,
Callback errorCallback,
Callback actionCallback) {
FragmentManagerHelper fragmentManagerHelper = getFragmentManagerHelper();
if (fragmentManagerHelper == null) {
errorCallback.invoke("Tried to show an alert while not attached to an Activity");
return;
}
final Bundle args = new Bundle();
if (options.hasKey(KEY_TITLE)) {
args.putString(AlertFragment.ARG_TITLE, options.getString(KEY_TITLE));
}
if (options.hasKey(KEY_MESSAGE)) {
args.putString(AlertFragment.ARG_MESSAGE, options.getString(KEY_MESSAGE));
}
if (options.hasKey(KEY_BUTTON_POSITIVE)) {
args.putString(AlertFragment.ARG_BUTTON_POSITIVE, options.getString(KEY_BUTTON_POSITIVE));
}
if (options.hasKey(KEY_BUTTON_NEGATIVE)) {
args.putString(AlertFragment.ARG_BUTTON_NEGATIVE, options.getString(KEY_BUTTON_NEGATIVE));
}
if (options.hasKey(KEY_BUTTON_NEUTRAL)) {
args.putString(AlertFragment.ARG_BUTTON_NEUTRAL, options.getString(KEY_BUTTON_NEUTRAL));
}
if (options.hasKey(KEY_ITEMS)) {
ReadableArray items = options.getArray(KEY_ITEMS);
CharSequence[] itemsArray = new CharSequence[items.size()];
for (int i = 0; i < items.size(); i ++) {
itemsArray[i] = items.getString(i);
}
args.putCharSequenceArray(AlertFragment.ARG_ITEMS, itemsArray);
}
if (options.hasKey(KEY_CANCELABLE)) {
args.putBoolean(KEY_CANCELABLE, options.getBoolean(KEY_CANCELABLE));
}
fragmentManagerHelper.showNewAlert(mIsInForeground, args, actionCallback);
}
/**
* Creates a new helper to work with either the FragmentManager or the legacy support
* FragmentManager transparently. Returns null if we're not attached to an Activity.
*
* DO NOT HOLD LONG-LIVED REFERENCES TO THE OBJECT RETURNED BY THIS METHOD, AS THIS WILL CAUSE
* MEMORY LEAKS.
*/
private @Nullable FragmentManagerHelper getFragmentManagerHelper() {
Activity activity = getCurrentActivity();
if (activity == null) {
return null;
}
if (activity instanceof FragmentActivity) {
return new FragmentManagerHelper(((FragmentActivity) activity).getSupportFragmentManager());
} else {
return new FragmentManagerHelper(activity.getFragmentManager());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.support.processor.validation;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Collections;
import javax.xml.XMLConstants;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXResult;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stax.StAXSource;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import org.w3c.dom.Node;
import org.w3c.dom.ls.LSResourceResolver;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.apache.camel.AsyncCallback;
import org.apache.camel.Exchange;
import org.apache.camel.ExpectedBodyTypeException;
import org.apache.camel.RuntimeTransformException;
import org.apache.camel.TypeConverter;
import org.apache.camel.support.AsyncProcessorSupport;
import org.apache.camel.support.builder.xml.XMLConverterHelper;
import org.apache.camel.util.IOHelper;
import static org.apache.camel.support.processor.validation.SchemaReader.ACCESS_EXTERNAL_DTD;
/**
* A processor which validates the XML version of the inbound message body
* against some schema either in XSD or RelaxNG
*/
public class ValidatingProcessor extends AsyncProcessorSupport {
private final SchemaReader schemaReader;
private ValidatorErrorHandler errorHandler = new DefaultValidationErrorHandler();
private boolean useSharedSchema = true;
private boolean failOnNullBody = true;
private boolean failOnNullHeader = true;
private String headerName;
private XMLConverterHelper converter = new XMLConverterHelper();
public ValidatingProcessor() {
schemaReader = new SchemaReader();
}
public ValidatingProcessor(SchemaReader schemaReader) {
// schema reader can be a singelton per schema, therefore make reuse,
// see
// ValidatorEndpoint and ValidatorProducer
this.schemaReader = schemaReader;
}
public boolean process(Exchange exchange, AsyncCallback callback) {
try {
doProcess(exchange);
} catch (Exception e) {
exchange.setException(e);
}
callback.done(true);
return true;
}
protected void doProcess(Exchange exchange) throws Exception {
Schema schema;
if (isUseSharedSchema()) {
schema = getSchema();
} else {
schema = createSchema();
}
Validator validator = schema.newValidator();
// turn off access to external schema by default
if (!Boolean.parseBoolean(exchange.getContext().getGlobalOptions().get(ACCESS_EXTERNAL_DTD))) {
try {
log.debug("Configuring Validator to not allow access to external DTD/Schema");
validator.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, "");
validator.setProperty(XMLConstants.ACCESS_EXTERNAL_SCHEMA, "");
} catch (SAXException e) {
log.warn(e.getMessage(), e);
}
}
// the underlying input stream, which we need to close to avoid locking
// files or
// other resources
Source source = null;
InputStream is = null;
try {
Result result = null;
// only convert to input stream if really needed
if (isInputStreamNeeded(exchange)) {
is = getContentToValidate(exchange, InputStream.class);
if (is != null) {
source = getSource(exchange, is);
}
} else {
Object content = getContentToValidate(exchange);
if (content != null) {
source = getSource(exchange, content);
}
}
if (shouldUseHeader()) {
if (source == null && isFailOnNullHeader()) {
throw new NoXmlHeaderValidationException(exchange, headerName);
}
} else {
if (source == null && isFailOnNullBody()) {
throw new NoXmlBodyValidationException(exchange);
}
}
// CAMEL-7036 We don't need to set the result if the source is an
// instance of
// StreamSource
if (source instanceof DOMSource) {
result = new DOMResult();
} else if (source instanceof SAXSource) {
result = new SAXResult();
} else if (source instanceof StAXSource || source instanceof StreamSource) {
result = null;
}
if (source != null) {
// create a new errorHandler and set it on the validator
// must be a local instance to avoid problems with concurrency
// (to be
// thread safe)
ValidatorErrorHandler handler = errorHandler.getClass().getDeclaredConstructor().newInstance();
validator.setErrorHandler(handler);
try {
log.trace("Validating {}", source);
validator.validate(source, result);
handler.handleErrors(exchange, schema, result);
} catch (SAXParseException e) {
// can be thrown for non well formed XML
throw new SchemaValidationException(exchange, schema, Collections.singletonList(e), Collections.<SAXParseException> emptyList(),
Collections.<SAXParseException> emptyList());
}
}
} finally {
IOHelper.close(is);
}
}
private Object getContentToValidate(Exchange exchange) {
if (shouldUseHeader()) {
return exchange.getIn().getHeader(headerName);
} else {
return exchange.getIn().getBody();
}
}
private <T> T getContentToValidate(Exchange exchange, Class<T> clazz) {
if (shouldUseHeader()) {
return exchange.getIn().getHeader(headerName, clazz);
} else {
return exchange.getIn().getBody(clazz);
}
}
private boolean shouldUseHeader() {
return headerName != null;
}
public void loadSchema() throws Exception {
schemaReader.loadSchema();
}
// Properties
// -----------------------------------------------------------------------
public Schema getSchema() throws IOException, SAXException {
return schemaReader.getSchema();
}
public void setSchema(Schema schema) {
schemaReader.setSchema(schema);
}
public String getSchemaLanguage() {
return schemaReader.getSchemaLanguage();
}
public void setSchemaLanguage(String schemaLanguage) {
schemaReader.setSchemaLanguage(schemaLanguage);
}
public Source getSchemaSource() throws IOException {
return schemaReader.getSchemaSource();
}
public void setSchemaSource(Source schemaSource) {
schemaReader.setSchemaSource(schemaSource);
}
public URL getSchemaUrl() {
return schemaReader.getSchemaUrl();
}
public void setSchemaUrl(URL schemaUrl) {
schemaReader.setSchemaUrl(schemaUrl);
}
public File getSchemaFile() {
return schemaReader.getSchemaFile();
}
public void setSchemaFile(File schemaFile) {
schemaReader.setSchemaFile(schemaFile);
}
public byte[] getSchemaAsByteArray() {
return schemaReader.getSchemaAsByteArray();
}
public void setSchemaAsByteArray(byte[] schemaAsByteArray) {
schemaReader.setSchemaAsByteArray(schemaAsByteArray);
}
public SchemaFactory getSchemaFactory() {
return schemaReader.getSchemaFactory();
}
public void setSchemaFactory(SchemaFactory schemaFactory) {
schemaReader.setSchemaFactory(schemaFactory);
}
public ValidatorErrorHandler getErrorHandler() {
return errorHandler;
}
public void setErrorHandler(ValidatorErrorHandler errorHandler) {
this.errorHandler = errorHandler;
}
public boolean isUseSharedSchema() {
return useSharedSchema;
}
public void setUseSharedSchema(boolean useSharedSchema) {
this.useSharedSchema = useSharedSchema;
}
public LSResourceResolver getResourceResolver() {
return schemaReader.getResourceResolver();
}
public void setResourceResolver(LSResourceResolver resourceResolver) {
schemaReader.setResourceResolver(resourceResolver);
}
public boolean isFailOnNullBody() {
return failOnNullBody;
}
public void setFailOnNullBody(boolean failOnNullBody) {
this.failOnNullBody = failOnNullBody;
}
public boolean isFailOnNullHeader() {
return failOnNullHeader;
}
public void setFailOnNullHeader(boolean failOnNullHeader) {
this.failOnNullHeader = failOnNullHeader;
}
public String getHeaderName() {
return headerName;
}
public void setHeaderName(String headerName) {
this.headerName = headerName;
}
// Implementation methods
// -----------------------------------------------------------------------
protected SchemaFactory createSchemaFactory() {
return schemaReader.createSchemaFactory();
}
protected Source createSchemaSource() throws IOException {
return schemaReader.createSchemaSource();
}
protected Schema createSchema() throws SAXException, IOException {
return schemaReader.createSchema();
}
/**
* Checks whether we need an {@link InputStream} to access the message body
* or header.
* <p/>
* Depending on the content in the message body or header, we may not need
* to convert to {@link InputStream}.
*
* @param exchange the current exchange
* @return <tt>true</tt> to convert to {@link InputStream} beforehand
* converting to {@link Source} afterwards.
*/
protected boolean isInputStreamNeeded(Exchange exchange) {
Object content = getContentToValidate(exchange);
if (content == null) {
return false;
}
if (content instanceof InputStream) {
return true;
} else if (content instanceof Source) {
return false;
} else if (content instanceof String) {
return false;
} else if (content instanceof byte[]) {
return false;
} else if (content instanceof Node) {
return false;
} else if (exchange.getContext().getTypeConverterRegistry().lookup(Source.class, content.getClass()) != null) {
// there is a direct and hopefully optimized converter to Source
return false;
}
// yes an input stream is needed
return true;
}
/**
* Converts the inbound body or header to a {@link Source}, if it is
* <b>not</b> already a {@link Source}.
* <p/>
* This implementation will prefer to source in the following order:
* <ul>
* <li>DOM - DOM if explicit configured to use DOM</li>
* <li>SAX - SAX as 2nd choice</li>
* <li>Stream - Stream as 3rd choice</li>
* <li>DOM - DOM as 4th choice</li>
* </ul>
*/
protected Source getSource(Exchange exchange, Object content) {
// body or header may already be a source
if (content instanceof Source) {
return (Source)content;
}
Source source = null;
if (content instanceof InputStream) {
return new StreamSource((InputStream)content);
}
if (content != null) {
TypeConverter tc = exchange.getContext().getTypeConverterRegistry().lookup(Source.class, content.getClass());
if (tc != null) {
source = tc.convertTo(Source.class, exchange, content);
}
}
if (source == null) {
// then try SAX
source = exchange.getContext().getTypeConverter().tryConvertTo(SAXSource.class, exchange, content);
}
if (source == null) {
// then try stream
source = exchange.getContext().getTypeConverter().tryConvertTo(StreamSource.class, exchange, content);
}
if (source == null) {
// and fallback to DOM
source = exchange.getContext().getTypeConverter().tryConvertTo(DOMSource.class, exchange, content);
}
if (source == null) {
if (isFailOnNullBody()) {
throw new ExpectedBodyTypeException(exchange, Source.class);
} else {
try {
source = converter.toDOMSource(converter.createDocument());
} catch (ParserConfigurationException | TransformerException e) {
throw new RuntimeTransformException(e);
}
}
}
return source;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.base;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleSecurityException;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.RepositoryPluginType;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.metastore.MetaStoreConst;
import org.pentaho.di.repository.RepositoriesMeta;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.RepositoryMeta;
import org.pentaho.di.repository.RepositoryOperation;
import org.pentaho.metastore.api.exceptions.MetaStoreException;
import org.pentaho.metastore.stores.delegate.DelegatingMetaStore;
import org.pentaho.di.version.BuildVersion;
public abstract class AbstractBaseCommandExecutor {
private SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyy/MM/dd HH:mm:ss.SSS" );
public static final String YES = "Y";
private LogChannelInterface log;
private Class<?> pkgClazz;
DelegatingMetaStore metaStore;
private Result result = new Result();
protected Result exitWithStatus( final int exitStatus ) {
getResult().setExitStatus( exitStatus );
return getResult();
}
public DelegatingMetaStore createDefaultMetastore() throws MetaStoreException {
DelegatingMetaStore metaStore = new DelegatingMetaStore();
metaStore.addMetaStore( MetaStoreConst.openLocalPentahoMetaStore() );
metaStore.setActiveMetaStoreName( metaStore.getName() );
return metaStore;
}
protected void logDebug( final String messageKey ) {
if ( getLog().isDebug() ) {
getLog().logDebug( BaseMessages.getString( getPkgClazz(), messageKey ) );
}
}
protected void logDebug( final String messageKey, String... messageTokens ) {
if ( getLog().isDebug() ) {
getLog().logDebug( BaseMessages.getString( getPkgClazz(), messageKey, messageTokens ) );
}
}
protected int calculateAndPrintElapsedTime( Date start, Date stop, String startStopMsgTkn, String processingEndAfterMsgTkn,
String processingEndAfterLongMsgTkn, String processingEndAfterLongerMsgTkn,
String processingEndAfterLongestMsgTkn ) {
String begin = getDateFormat().format( start ).toString();
String end = getDateFormat().format( stop ).toString();
getLog().logMinimal( BaseMessages.getString( getPkgClazz(), startStopMsgTkn, begin, end ) );
long millis = stop.getTime() - start.getTime();
int seconds = (int) ( millis / 1000 );
if ( seconds <= 60 ) {
getLog().logMinimal( BaseMessages.getString( getPkgClazz(), processingEndAfterMsgTkn, String.valueOf( seconds ) ) );
} else if ( seconds <= 60 * 60 ) {
int min = ( seconds / 60 );
int rem = ( seconds % 60 );
getLog().logMinimal( BaseMessages.getString( getPkgClazz(), processingEndAfterLongMsgTkn, String.valueOf( min ),
String.valueOf( rem ), String.valueOf( seconds ) ) );
} else if ( seconds <= 60 * 60 * 24 ) {
int rem;
int hour = ( seconds / ( 60 * 60 ) );
rem = ( seconds % ( 60 * 60 ) );
int min = rem / 60;
rem = rem % 60;
getLog().logMinimal( BaseMessages.getString( getPkgClazz(), processingEndAfterLongerMsgTkn, String.valueOf( hour ),
String.valueOf( min ), String.valueOf( rem ), String.valueOf( seconds ) ) );
} else {
int rem;
int days = ( seconds / ( 60 * 60 * 24 ) );
rem = ( seconds % ( 60 * 60 * 24 ) );
int hour = rem / ( 60 * 60 );
rem = rem % ( 60 * 60 );
int min = rem / 60;
rem = rem % 60;
getLog().logMinimal( BaseMessages.getString( getPkgClazz(), processingEndAfterLongestMsgTkn, String.valueOf( days ),
String.valueOf( hour ), String.valueOf( min ), String.valueOf( rem ), String.valueOf( seconds ) ) );
}
return seconds;
}
protected void printVersion( String kettleVersionMsgTkn ) {
BuildVersion buildVersion = BuildVersion.getInstance();
getLog().logBasic( BaseMessages.getString( getPkgClazz(), kettleVersionMsgTkn, buildVersion.getVersion(),
buildVersion.getRevision(), buildVersion.getBuildDate() ) );
}
public RepositoryMeta loadRepositoryConnection( final String repoName, String loadingAvailableRepMsgTkn,
String noRepsDefinedMsgTkn, String findingRepMsgTkn ) throws KettleException {
RepositoriesMeta repsinfo;
if ( Utils.isEmpty( repoName ) || ( repsinfo = loadRepositoryInfo( loadingAvailableRepMsgTkn, noRepsDefinedMsgTkn ) ) == null ) {
return null;
}
logDebug( findingRepMsgTkn, repoName );
return repsinfo.findRepository( repoName );
}
public RepositoriesMeta loadRepositoryInfo( String loadingAvailableRepMsgTkn, String noRepsDefinedMsgTkn ) throws KettleException {
RepositoriesMeta repsinfo = new RepositoriesMeta();
repsinfo.getLog().setLogLevel( getLog().getLogLevel() );
logDebug( loadingAvailableRepMsgTkn );
try {
repsinfo.readData();
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString( getPkgClazz(), noRepsDefinedMsgTkn ), e );
}
return repsinfo;
}
public Repository establishRepositoryConnection( RepositoryMeta repositoryMeta, final String username, final String password,
final RepositoryOperation... operations ) throws KettleException, KettleSecurityException {
Repository rep = PluginRegistry.getInstance().loadClass( RepositoryPluginType.class, repositoryMeta, Repository.class );
rep.init( repositoryMeta );
rep.getLog().setLogLevel( getLog().getLogLevel() );
rep.connect( username != null ? username : null, password != null ? password : null );
if ( operations != null ) {
// throws KettleSecurityException if username does does have permission for given operations
rep.getSecurityProvider().validateAction( operations );
}
return rep;
}
public void printRepositoryDirectories( Repository repository, RepositoryDirectoryInterface directory ) throws KettleException {
String[] directories = repository.getDirectoryNames( directory.getObjectId() );
if ( directories != null ) {
for ( String dir : directories ) {
System.out.println( dir );
}
}
}
protected void printParameter( String name, String value, String defaultValue, String description ) {
if ( Utils.isEmpty( defaultValue ) ) {
System.out.println( "Parameter: " + name + "=" + Const.NVL( value, "" ) + " : " + Const.NVL( description, "" ) );
} else {
System.out.println( "Parameter: " + name + "=" + Const.NVL( value, "" ) + ", default=" + defaultValue + " : " + Const.NVL( description, "" ) );
}
}
public boolean isEnabled( final String value ) {
return YES.equalsIgnoreCase( value ) || Boolean.parseBoolean( value ); // both are NPE safe, both are case-insensitive
}
public LogChannelInterface getLog() {
return log;
}
public void setLog( LogChannelInterface log ) {
this.log = log;
}
public Class<?> getPkgClazz() {
return pkgClazz;
}
public void setPkgClazz( Class<?> pkgClazz ) {
this.pkgClazz = pkgClazz;
}
public DelegatingMetaStore getMetaStore() {
return metaStore;
}
public void setMetaStore( DelegatingMetaStore metaStore ) {
this.metaStore = metaStore;
}
public SimpleDateFormat getDateFormat() {
return dateFormat;
}
public void setDateFormat( SimpleDateFormat dateFormat ) {
this.dateFormat = dateFormat;
}
public Result getResult() {
return result;
}
public void setResult( Result result ) {
this.result = result;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.execution;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.concurrent.SetThreadName;
import io.airlift.log.Logger;
import io.airlift.stats.CounterStat;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import io.prestosql.OutputBuffers;
import io.prestosql.OutputBuffers.OutputBufferId;
import io.prestosql.Session;
import io.prestosql.TaskSource;
import io.prestosql.execution.StateMachine.StateChangeListener;
import io.prestosql.execution.buffer.BufferResult;
import io.prestosql.execution.buffer.LazyOutputBuffer;
import io.prestosql.execution.buffer.OutputBuffer;
import io.prestosql.memory.QueryContext;
import io.prestosql.operator.PipelineContext;
import io.prestosql.operator.PipelineStatus;
import io.prestosql.operator.TaskContext;
import io.prestosql.operator.TaskStats;
import io.prestosql.sql.planner.PlanFragment;
import io.prestosql.sql.planner.plan.PlanNodeId;
import org.joda.time.DateTime;
import javax.annotation.Nullable;
import java.net.URI;
import java.util.List;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static io.airlift.units.DataSize.Unit.BYTE;
import static io.airlift.units.DataSize.succinctBytes;
import static io.prestosql.execution.TaskState.ABORTED;
import static io.prestosql.execution.TaskState.FAILED;
import static io.prestosql.util.Failures.toFailures;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
public class SqlTask
{
private static final Logger log = Logger.get(SqlTask.class);
private final TaskId taskId;
private final String taskInstanceId;
private final URI location;
private final String nodeId;
private final TaskStateMachine taskStateMachine;
private final OutputBuffer outputBuffer;
private final QueryContext queryContext;
private final SqlTaskExecutionFactory sqlTaskExecutionFactory;
private final AtomicReference<DateTime> lastHeartbeat = new AtomicReference<>(DateTime.now());
private final AtomicLong nextTaskInfoVersion = new AtomicLong(TaskStatus.STARTING_VERSION);
private final AtomicReference<TaskHolder> taskHolderReference = new AtomicReference<>(new TaskHolder());
private final AtomicBoolean needsPlan = new AtomicBoolean(true);
public static SqlTask createSqlTask(
TaskId taskId,
URI location,
String nodeId,
QueryContext queryContext,
SqlTaskExecutionFactory sqlTaskExecutionFactory,
ExecutorService taskNotificationExecutor,
Function<SqlTask, ?> onDone,
DataSize maxBufferSize,
CounterStat failedTasks)
{
SqlTask sqlTask = new SqlTask(taskId, location, nodeId, queryContext, sqlTaskExecutionFactory, taskNotificationExecutor, maxBufferSize);
sqlTask.initialize(onDone, failedTasks);
return sqlTask;
}
private SqlTask(
TaskId taskId,
URI location,
String nodeId,
QueryContext queryContext,
SqlTaskExecutionFactory sqlTaskExecutionFactory,
ExecutorService taskNotificationExecutor,
DataSize maxBufferSize)
{
this.taskId = requireNonNull(taskId, "taskId is null");
this.taskInstanceId = UUID.randomUUID().toString();
this.location = requireNonNull(location, "location is null");
this.nodeId = requireNonNull(nodeId, "nodeId is null");
this.queryContext = requireNonNull(queryContext, "queryContext is null");
this.sqlTaskExecutionFactory = requireNonNull(sqlTaskExecutionFactory, "sqlTaskExecutionFactory is null");
requireNonNull(taskNotificationExecutor, "taskNotificationExecutor is null");
requireNonNull(maxBufferSize, "maxBufferSize is null");
outputBuffer = new LazyOutputBuffer(
taskId,
taskInstanceId,
taskNotificationExecutor,
maxBufferSize,
// Pass a memory context supplier instead of a memory context to the output buffer,
// because we haven't created the task context that holds the the memory context yet.
() -> queryContext.getTaskContextByTaskId(taskId).localSystemMemoryContext());
taskStateMachine = new TaskStateMachine(taskId, taskNotificationExecutor);
}
// this is a separate method to ensure that the `this` reference is not leaked during construction
private void initialize(Function<SqlTask, ?> onDone, CounterStat failedTasks)
{
requireNonNull(onDone, "onDone is null");
requireNonNull(failedTasks, "failedTasks is null");
taskStateMachine.addStateChangeListener(new StateChangeListener<TaskState>()
{
@Override
public void stateChanged(TaskState newState)
{
if (!newState.isDone()) {
return;
}
// Update failed tasks counter
if (newState == FAILED) {
failedTasks.update(1);
}
// store final task info
while (true) {
TaskHolder taskHolder = taskHolderReference.get();
if (taskHolder.isFinished()) {
// another concurrent worker already set the final state
return;
}
if (taskHolderReference.compareAndSet(taskHolder, new TaskHolder(createTaskInfo(taskHolder), taskHolder.getIoStats()))) {
break;
}
}
// make sure buffers are cleaned up
if (newState == FAILED || newState == ABORTED) {
// don't close buffers for a failed query
// closed buffers signal to upstream tasks that everything finished cleanly
outputBuffer.fail();
}
else {
outputBuffer.destroy();
}
try {
onDone.apply(SqlTask.this);
}
catch (Exception e) {
log.warn(e, "Error running task cleanup callback %s", SqlTask.this.taskId);
}
}
});
}
public boolean isOutputBufferOverutilized()
{
return outputBuffer.isOverutilized();
}
public SqlTaskIoStats getIoStats()
{
return taskHolderReference.get().getIoStats();
}
public TaskId getTaskId()
{
return taskStateMachine.getTaskId();
}
public String getTaskInstanceId()
{
return taskInstanceId;
}
public void recordHeartbeat()
{
lastHeartbeat.set(DateTime.now());
}
public TaskInfo getTaskInfo()
{
try (SetThreadName ignored = new SetThreadName("Task-%s", taskId)) {
return createTaskInfo(taskHolderReference.get());
}
}
public TaskStatus getTaskStatus()
{
try (SetThreadName ignored = new SetThreadName("Task-%s", taskId)) {
return createTaskStatus(taskHolderReference.get());
}
}
private TaskStatus createTaskStatus(TaskHolder taskHolder)
{
// Always return a new TaskInfo with a larger version number;
// otherwise a client will not accept the update
long versionNumber = nextTaskInfoVersion.getAndIncrement();
TaskState state = taskStateMachine.getState();
List<ExecutionFailureInfo> failures = ImmutableList.of();
if (state == FAILED) {
failures = toFailures(taskStateMachine.getFailureCauses());
}
int queuedPartitionedDrivers = 0;
int runningPartitionedDrivers = 0;
DataSize physicalWrittenDataSize = new DataSize(0, BYTE);
DataSize userMemoryReservation = new DataSize(0, BYTE);
DataSize systemMemoryReservation = new DataSize(0, BYTE);
// TODO: add a mechanism to avoid sending the whole completedDriverGroups set over the wire for every task status reply
Set<Lifespan> completedDriverGroups = ImmutableSet.of();
long fullGcCount = 0;
Duration fullGcTime = new Duration(0, MILLISECONDS);
if (taskHolder.getFinalTaskInfo() != null) {
TaskStats taskStats = taskHolder.getFinalTaskInfo().getStats();
queuedPartitionedDrivers = taskStats.getQueuedPartitionedDrivers();
runningPartitionedDrivers = taskStats.getRunningPartitionedDrivers();
physicalWrittenDataSize = taskStats.getPhysicalWrittenDataSize();
userMemoryReservation = taskStats.getUserMemoryReservation();
systemMemoryReservation = taskStats.getSystemMemoryReservation();
fullGcCount = taskStats.getFullGcCount();
fullGcTime = taskStats.getFullGcTime();
}
else if (taskHolder.getTaskExecution() != null) {
long physicalWrittenBytes = 0;
TaskContext taskContext = taskHolder.getTaskExecution().getTaskContext();
for (PipelineContext pipelineContext : taskContext.getPipelineContexts()) {
PipelineStatus pipelineStatus = pipelineContext.getPipelineStatus();
queuedPartitionedDrivers += pipelineStatus.getQueuedPartitionedDrivers();
runningPartitionedDrivers += pipelineStatus.getRunningPartitionedDrivers();
physicalWrittenBytes += pipelineContext.getPhysicalWrittenDataSize();
}
physicalWrittenDataSize = succinctBytes(physicalWrittenBytes);
userMemoryReservation = taskContext.getMemoryReservation();
systemMemoryReservation = taskContext.getSystemMemoryReservation();
completedDriverGroups = taskContext.getCompletedDriverGroups();
fullGcCount = taskContext.getFullGcCount();
fullGcTime = taskContext.getFullGcTime();
}
return new TaskStatus(taskStateMachine.getTaskId(),
taskInstanceId,
versionNumber,
state,
location,
nodeId,
completedDriverGroups,
failures,
queuedPartitionedDrivers,
runningPartitionedDrivers,
isOutputBufferOverutilized(),
physicalWrittenDataSize,
userMemoryReservation,
systemMemoryReservation,
fullGcCount,
fullGcTime);
}
private TaskStats getTaskStats(TaskHolder taskHolder)
{
TaskInfo finalTaskInfo = taskHolder.getFinalTaskInfo();
if (finalTaskInfo != null) {
return finalTaskInfo.getStats();
}
SqlTaskExecution taskExecution = taskHolder.getTaskExecution();
if (taskExecution != null) {
return taskExecution.getTaskContext().getTaskStats();
}
// if the task completed without creation, set end time
DateTime endTime = taskStateMachine.getState().isDone() ? DateTime.now() : null;
return new TaskStats(taskStateMachine.getCreatedTime(), endTime);
}
private static Set<PlanNodeId> getNoMoreSplits(TaskHolder taskHolder)
{
TaskInfo finalTaskInfo = taskHolder.getFinalTaskInfo();
if (finalTaskInfo != null) {
return finalTaskInfo.getNoMoreSplits();
}
SqlTaskExecution taskExecution = taskHolder.getTaskExecution();
if (taskExecution != null) {
return taskExecution.getNoMoreSplits();
}
return ImmutableSet.of();
}
private TaskInfo createTaskInfo(TaskHolder taskHolder)
{
TaskStats taskStats = getTaskStats(taskHolder);
Set<PlanNodeId> noMoreSplits = getNoMoreSplits(taskHolder);
TaskStatus taskStatus = createTaskStatus(taskHolder);
return new TaskInfo(
taskStatus,
lastHeartbeat.get(),
outputBuffer.getInfo(),
noMoreSplits,
taskStats,
needsPlan.get());
}
public ListenableFuture<TaskStatus> getTaskStatus(TaskState callersCurrentState)
{
requireNonNull(callersCurrentState, "callersCurrentState is null");
if (callersCurrentState.isDone()) {
return immediateFuture(getTaskStatus());
}
ListenableFuture<TaskState> futureTaskState = taskStateMachine.getStateChange(callersCurrentState);
return Futures.transform(futureTaskState, input -> getTaskStatus(), directExecutor());
}
public ListenableFuture<TaskInfo> getTaskInfo(TaskState callersCurrentState)
{
requireNonNull(callersCurrentState, "callersCurrentState is null");
// If the caller's current state is already done, just return the current
// state of this task as it will either be done or possibly still running
// (due to a bug in the caller), since we can not transition from a done
// state.
if (callersCurrentState.isDone()) {
return immediateFuture(getTaskInfo());
}
ListenableFuture<TaskState> futureTaskState = taskStateMachine.getStateChange(callersCurrentState);
return Futures.transform(futureTaskState, input -> getTaskInfo(), directExecutor());
}
public TaskInfo updateTask(Session session, Optional<PlanFragment> fragment, List<TaskSource> sources, OutputBuffers outputBuffers, OptionalInt totalPartitions)
{
try {
// The LazyOutput buffer does not support write methods, so the actual
// output buffer must be established before drivers are created (e.g.
// a VALUES query).
outputBuffer.setOutputBuffers(outputBuffers);
// assure the task execution is only created once
SqlTaskExecution taskExecution;
synchronized (this) {
// is task already complete?
TaskHolder taskHolder = taskHolderReference.get();
if (taskHolder.isFinished()) {
return taskHolder.getFinalTaskInfo();
}
taskExecution = taskHolder.getTaskExecution();
if (taskExecution == null) {
checkState(fragment.isPresent(), "fragment must be present");
taskExecution = sqlTaskExecutionFactory.create(session, queryContext, taskStateMachine, outputBuffer, fragment.get(), sources, totalPartitions);
taskHolderReference.compareAndSet(taskHolder, new TaskHolder(taskExecution));
needsPlan.set(false);
}
}
if (taskExecution != null) {
taskExecution.addSources(sources);
}
}
catch (Error e) {
failed(e);
throw e;
}
catch (RuntimeException e) {
failed(e);
}
return getTaskInfo();
}
public ListenableFuture<BufferResult> getTaskResults(OutputBufferId bufferId, long startingSequenceId, DataSize maxSize)
{
requireNonNull(bufferId, "bufferId is null");
checkArgument(maxSize.toBytes() > 0, "maxSize must be at least 1 byte");
return outputBuffer.get(bufferId, startingSequenceId, maxSize);
}
public void acknowledgeTaskResults(OutputBufferId bufferId, long sequenceId)
{
requireNonNull(bufferId, "bufferId is null");
outputBuffer.acknowledge(bufferId, sequenceId);
}
public TaskInfo abortTaskResults(OutputBufferId bufferId)
{
requireNonNull(bufferId, "bufferId is null");
log.debug("Aborting task %s output %s", taskId, bufferId);
outputBuffer.abort(bufferId);
return getTaskInfo();
}
public void failed(Throwable cause)
{
requireNonNull(cause, "cause is null");
taskStateMachine.failed(cause);
}
public TaskInfo cancel()
{
taskStateMachine.cancel();
return getTaskInfo();
}
public TaskInfo abort()
{
taskStateMachine.abort();
return getTaskInfo();
}
@Override
public String toString()
{
return taskId.toString();
}
private static final class TaskHolder
{
private final SqlTaskExecution taskExecution;
private final TaskInfo finalTaskInfo;
private final SqlTaskIoStats finalIoStats;
private TaskHolder()
{
this.taskExecution = null;
this.finalTaskInfo = null;
this.finalIoStats = null;
}
private TaskHolder(SqlTaskExecution taskExecution)
{
this.taskExecution = requireNonNull(taskExecution, "taskExecution is null");
this.finalTaskInfo = null;
this.finalIoStats = null;
}
private TaskHolder(TaskInfo finalTaskInfo, SqlTaskIoStats finalIoStats)
{
this.taskExecution = null;
this.finalTaskInfo = requireNonNull(finalTaskInfo, "finalTaskInfo is null");
this.finalIoStats = requireNonNull(finalIoStats, "finalIoStats is null");
}
public boolean isFinished()
{
return finalTaskInfo != null;
}
@Nullable
public SqlTaskExecution getTaskExecution()
{
return taskExecution;
}
@Nullable
public TaskInfo getFinalTaskInfo()
{
return finalTaskInfo;
}
public SqlTaskIoStats getIoStats()
{
// if we are finished, return the final IoStats
if (finalIoStats != null) {
return finalIoStats;
}
// if we haven't started yet, return an empty IoStats
if (taskExecution == null) {
return new SqlTaskIoStats();
}
// get IoStats from the current task execution
TaskContext taskContext = taskExecution.getTaskContext();
return new SqlTaskIoStats(taskContext.getInputDataSize(), taskContext.getInputPositions(), taskContext.getOutputDataSize(), taskContext.getOutputPositions());
}
}
/**
* Listener is always notified asynchronously using a dedicated notification thread pool so, care should
* be taken to avoid leaking {@code this} when adding a listener in a constructor. Additionally, it is
* possible notifications are observed out of order due to the asynchronous execution.
*/
public void addStateChangeListener(StateChangeListener<TaskState> stateChangeListener)
{
taskStateMachine.addStateChangeListener(stateChangeListener);
}
public QueryContext getQueryContext()
{
return queryContext;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.query;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.InnerHitContextBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
public class LegacyHasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQueryBuilder> {
protected static final String PARENT_TYPE = "parent";
protected static final String CHILD_TYPE = "child";
boolean requiresRewrite = false;
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singletonList(ParentJoinPlugin.class);
}
@Override
protected Settings indexSettings() {
return Settings.builder()
.put(super.indexSettings())
.put("index.mapping.single_type", false)
.build();
}
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
// TODO: use a single type when inner hits have been changed to work with join field,
// this test randomly generates queries with inner hits
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mapperService.merge("just_a_type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("just_a_type"
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
}
/**
* @return a {@link HasChildQueryBuilder} with random values all over the place
*/
@Override
protected HasParentQueryBuilder doCreateTestQueryBuilder() {
QueryBuilder innerQueryBuilder = new MatchAllQueryBuilder();
if (randomBoolean()) {
requiresRewrite = true;
innerQueryBuilder = new WrapperQueryBuilder(innerQueryBuilder.toString());
}
HasParentQueryBuilder hqb = new HasParentQueryBuilder(PARENT_TYPE, innerQueryBuilder, randomBoolean());
hqb.ignoreUnmapped(randomBoolean());
if (randomBoolean()) {
hqb.innerHit(new InnerHitBuilder()
.setName(randomAlphaOfLengthBetween(1, 10))
.setSize(randomIntBetween(0, 100))
.addSort(new FieldSortBuilder(STRING_FIELD_NAME_2).order(SortOrder.ASC))
.setIgnoreUnmapped(hqb.ignoreUnmapped()));
}
return hqb;
}
@Override
protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
assertEquals(queryBuilder.score() ? ScoreMode.Max : ScoreMode.None, lpq.getScoreMode());
if (queryBuilder.innerHit() != null) {
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
// doCreateTestQueryBuilder)
queryBuilder = (HasParentQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
assertNotNull(searchContext);
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
for (InnerHitContextBuilder builder : innerHitBuilders.values()) {
builder.build(searchContext, searchContext.innerHits());
}
assertNotNull(searchContext.innerHits());
assertEquals(1, searchContext.innerHits().getInnerHits().size());
assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName()));
InnerHitsContext.InnerHitSubContext innerHits = searchContext.innerHits()
.getInnerHits().get(queryBuilder.innerHit().getName());
assertEquals(innerHits.size(), queryBuilder.innerHit().getSize());
assertEquals(innerHits.sort().sort.getSort().length, 1);
assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2);
}
}
/**
* Test (de)serialization on all previous released versions
*/
public void testSerializationBWC() throws IOException {
for (Version version : VersionUtils.allReleasedVersions()) {
HasParentQueryBuilder testQuery = createTestQueryBuilder();
if (version.before(Version.V_5_2_0) && testQuery.innerHit() != null) {
// ignore unmapped for inner_hits has been added on 5.2
testQuery.innerHit().setIgnoreUnmapped(false);
}
assertSerialization(testQuery, version);
}
}
public void testIllegalValues() throws IOException {
QueryBuilder query = new MatchAllQueryBuilder();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> hasParentQuery(null, query, false));
assertThat(e.getMessage(), equalTo("[has_parent] requires 'type' field"));
e = expectThrows(IllegalArgumentException.class,
() -> hasParentQuery("foo", null, false));
assertThat(e.getMessage(), equalTo("[has_parent] requires 'query' field"));
QueryShardContext context = createShardContext();
HasParentQueryBuilder qb = hasParentQuery("just_a_type", new MatchAllQueryBuilder(), false);
QueryShardException qse = expectThrows(QueryShardException.class, () -> qb.doToQuery(context));
assertThat(qse.getMessage(), equalTo("[has_parent] no child types found for type [just_a_type]"));
}
public void testDeprecatedXContent() throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject();
builder.startObject("has_parent");
builder.field("query");
new TermQueryBuilder("a", "a").toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.field("type", "foo"); // deprecated
builder.endObject();
builder.endObject();
HasParentQueryBuilder queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string());
assertEquals("foo", queryBuilder.type());
assertWarnings("Deprecated field [type] used, expected [parent_type] instead");
}
public void testToQueryInnerQueryType() throws IOException {
String[] searchTypes = new String[]{CHILD_TYPE};
QueryShardContext shardContext = createShardContext();
shardContext.setTypes(searchTypes);
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_TYPE, new IdsQueryBuilder().addIds("id"),
false);
Query query = hasParentQueryBuilder.toQuery(shardContext);
//verify that the context types are still the same as the ones we previously set
assertThat(shardContext.getTypes(), equalTo(searchTypes));
LegacyHasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_TYPE, "id");
}
@Override
public void testMustRewrite() throws IOException {
try {
super.testMustRewrite();
} catch (UnsupportedOperationException e) {
if (requiresRewrite == false) {
throw e;
}
}
}
public void testFromJson() throws IOException {
String json =
"{\n" +
" \"has_parent\" : {\n" +
" \"query\" : {\n" +
" \"term\" : {\n" +
" \"tag\" : {\n" +
" \"value\" : \"something\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
" },\n" +
" \"parent_type\" : \"blog\",\n" +
" \"score\" : true,\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
HasParentQueryBuilder parsed = (HasParentQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, "blog", parsed.type());
assertEquals(json, "something", ((TermQueryBuilder) parsed.query()).value());
}
public void testIgnoreUnmapped() throws IOException {
final HasParentQueryBuilder queryBuilder = new HasParentQueryBuilder("unmapped", new MatchAllQueryBuilder(), false);
queryBuilder.ignoreUnmapped(true);
Query query = queryBuilder.toQuery(createShardContext());
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
final HasParentQueryBuilder failingQueryBuilder = new HasParentQueryBuilder("unmapped", new MatchAllQueryBuilder(), false);
failingQueryBuilder.ignoreUnmapped(false);
QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext()));
assertThat(e.getMessage(),
containsString("[" + HasParentQueryBuilder.NAME + "] query configured 'parent_type' [unmapped] is not a valid type"));
}
public void testIgnoreUnmappedWithRewrite() throws IOException {
// WrapperQueryBuilder makes sure we always rewrite
final HasParentQueryBuilder queryBuilder =
new HasParentQueryBuilder("unmapped", new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), false);
queryBuilder.ignoreUnmapped(true);
QueryShardContext queryShardContext = createShardContext();
Query query = queryBuilder.rewrite(queryShardContext).toQuery(queryShardContext);
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
}
}
| |
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modelmapper.internal;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import org.modelmapper.Provider;
import org.modelmapper.Provider.ProvisionRequest;
import org.modelmapper.TypeMap;
import org.modelmapper.TypeToken;
import org.modelmapper.internal.util.Assert;
import org.modelmapper.internal.util.Callable;
import org.modelmapper.internal.util.Objects;
import org.modelmapper.internal.util.Primitives;
import org.modelmapper.internal.util.Types;
import org.modelmapper.spi.Mapping;
import org.modelmapper.spi.MappingContext;
import org.modelmapper.spi.MappingEngine;
/**
* MappingContext implementation that caches destination values for an object graph by their
* corresponding Mutator.
*
* @author Jonathan Halterman
*/
public class MappingContextImpl<S, D> implements MappingContext<S, D>, ProvisionRequest<D> {
/** Caches previously mapped destination objects by path. */
final Map<String, Object> destinationCache;
/** Tracks destination objects for each source. Used for circular mapping. */
final Map<Object, Object> sourceToDestination;
/** Tracks intermediate destination objects on the path to the destination */
final Map<String, Object> intermediateDestinations;
final Errors errors;
private final MappingContextImpl<?, ?> parent;
private D destination;
/** Absolute path to destination. */
final String destinationPath;
private final Class<D> destinationType;
private final Type genericDestinationType;
private final String typeMapName;
/** Whether requested mapping is to a provided destination object */
private boolean providedDestination;
private MappingImpl mapping;
private final MappingEngineImpl mappingEngine;
private final S source;
private final Class<S> sourceType;
private final SourceChain parentSource;
private TypeMap<S, D> typeMap;
/** Tracks destination hierarchy paths that were shaded by a condition */
private final List<String> shadedPaths;
/**
* Create initial MappingContext.
*/
public MappingContextImpl(S source, Class<S> sourceType, D destination, Class<D> destinationType,
Type genericDestinationType, String typeMapName, MappingEngineImpl mappingEngine) {
parent = null;
this.source = source;
this.sourceType = sourceType;
this.parentSource = new SourceChain();
this.destination = destination;
this.destinationPath = "";
this.destinationType = destinationType;
this.genericDestinationType = genericDestinationType == null ? destinationType
: genericDestinationType;
this.typeMapName = typeMapName;
providedDestination = destination != null;
this.mappingEngine = mappingEngine;
errors = new Errors();
destinationCache = new HashMap<String, Object>();
shadedPaths = new ArrayList<String>();
sourceToDestination = new IdentityHashMap<Object, Object>();
intermediateDestinations = new HashMap<String, Object>();
}
/**
* Create derived MappingContext.
*
* @param inheritValues whether values from the source {@code context} should be inherited
*/
MappingContextImpl(MappingContextImpl<?, ?> context, S source, Class<S> sourceType,
D destination, Class<D> destinationType, Type genericDestinationType, MappingImpl mapping,
boolean inheritValues) {
this.parent = context;
this.source = source;
this.sourceType = sourceType;
this.destination = destination;
this.destinationPath = mapping == null ? context.destinationPath : context.destinationPath
+ mapping.getPath();
this.destinationType = destinationType;
this.genericDestinationType = genericDestinationType == null ? destinationType
: genericDestinationType;
this.providedDestination = context.providedDestination;
this.typeMap = null;
this.typeMapName = null;
this.mapping = mapping;
parentSource = context.parentSource;
mappingEngine = context.mappingEngine;
errors = context.errors;
destinationCache = inheritValues ? context.destinationCache : new HashMap<String, Object>();
shadedPaths = inheritValues ? context.shadedPaths : new ArrayList<String>();
sourceToDestination = context.sourceToDestination;
intermediateDestinations = new HashMap<String, Object>();
}
@Override
public <CS, CD> MappingContext<CS, CD> create(CS source, CD destination) {
Assert.notNull(source, "source");
Assert.notNull(destination, "destination");
return new MappingContextImpl<CS, CD>(this, source, Types.<CS>deProxy(source.getClass()),
destination, Types.<CD>deProxy(destination.getClass()), null, mapping, false);
}
/** Creates a child MappingContext for an element of a destination collection. */
@Override
public <CS, CD> MappingContext<CS, CD> create(CS source, Class<CD> destinationType) {
Assert.notNull(source, "source");
Assert.notNull(destinationType, "destinationType");
return new MappingContextImpl<CS, CD>(this, source, Types.<CS>deProxy(source.getClass()), null,
destinationType, null, null, false);
}
/** Creates a child MappingContext for an element of a destination collection. */
@SuppressWarnings("unchecked")
@Override
public <CS, CD> MappingContext<CS, CD> create(CS source, Type destinationType) {
if (destinationType instanceof Class) {
return create(source, (Class<CD>) destinationType);
}
Assert.notNull(source, "source");
Assert.notNull(destinationType, "destinationType");
TypeToken<CD> destinationTypeToken = TypeToken.of(destinationType);
return new MappingContextImpl<CS, CD>(this, source, Types.<CS>deProxy(source.getClass()), null,
destinationTypeToken.getRawType(), destinationTypeToken.getType(), mapping, false);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null || getClass() != obj.getClass())
return false;
MappingContextImpl<?, ?> other = (MappingContextImpl<?, ?>) obj;
if (!source.equals(other.source))
return false;
if (!sourceType.equals(other.sourceType))
return false;
if (!destinationType.equals(other.destinationType))
return false;
return true;
}
@Override
public D getDestination() {
return destination;
}
@Override
public Class<D> getDestinationType() {
return destinationType;
}
@Override
public Type getGenericDestinationType() {
return genericDestinationType;
}
@Override
public Mapping getMapping() {
return mapping;
}
@Override
public MappingEngine getMappingEngine() {
return mappingEngine;
}
@Override
public MappingContext<?, ?> getParent() {
return parent;
}
@Override
public Class<D> getRequestedType() {
return destinationType;
}
@Override
public S getSource() {
return source;
}
@Override
public Class<S> getSourceType() {
return sourceType;
}
@Override
public TypeMap<S, D> getTypeMap() {
return typeMap;
}
@Override
public String getTypeMapName() {
return typeMapName;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + source.hashCode();
result = prime * result + sourceType.hashCode();
result = prime * result + destinationType.hashCode();
return result;
}
@Override
public String toString() {
return String.format("MappingContext[%s -> %s]", sourceType.getSimpleName(),
destinationType.getSimpleName());
}
@SuppressWarnings("unchecked")
D destinationForSource() {
return (D) sourceToDestination.get(source);
}
/**
* Determines whether the {@code subPath} is shaded.
*/
boolean isShaded(String subPath) {
for (String shadedPath : shadedPaths)
if (subPath.startsWith(shadedPath))
return true;
return false;
}
TypeMap<?, ?> parentTypeMap() {
return parent == null ? null : parent.typeMap;
}
void setDestination(D destination, boolean trackForSource) {
this.destination = destination;
if (trackForSource && !Primitives.isPrimitiveWrapper(sourceType))
sourceToDestination.put(source, destination);
}
void addParentSource(String path, Object parentSource) {
this.parentSource.addSource(path, parentSource);
}
void setTypeMap(TypeMap<S, D> typeMap) {
this.typeMap = typeMap;
}
/**
* Shades the {@code path} such that subsequent subpaths can be skipped during the mapping
* process.
*/
void shadePath(String path) {
shadedPaths.add(path);
}
Type genericDestinationPropertyType(Type type) {
if (type == null
|| !(type instanceof ParameterizedType)
|| genericDestinationType == null
|| destinationType.getTypeParameters().length == 0)
return null;
ParameterizedType parameterizedType = (ParameterizedType) type;
if (parameterizedType.getActualTypeArguments().length == 0)
return null;
if (destinationType.getTypeParameters()[0] == parameterizedType.getActualTypeArguments()[0])
return genericDestinationType;
return null;
}
@SuppressWarnings("all")
<S, D> Object getParentDestination() {
List<Mutator> mutatorChain = (List<Mutator>) mapping.getDestinationProperties();
StringBuilder destPathBuilder = new StringBuilder().append(parent.destinationPath);
Object current = parent.destination;
for (int i = 0; i < mutatorChain.size() - 1; i++) {
if (current == null)
break;
Mutator mutator = mutatorChain.get(i);
String destPath = destPathBuilder.append(mutator.getName()).append('.').toString();
Object source = parent.parentSource.getSource(destPath);
Object next = Objects.firstNonNull(
Objects.callable(parent.destinationCache.get(destPath)),
parent.getCyclicReferenceByPath(destPath),
parent.getDestinationValueByMemberName(current, mutator.getName()));
if (next == null && source != null)
next = mappingEngine.createDestinationViaGlobalProvider(
parent.parentSource.getSource(destPath), mutator.getType(), parent.errors);
if (next != null) {
mutator.setValue(current, next);
parent.destinationCache.put(destPath, next);
}
current = next;
}
return current;
}
private Callable<Object> getDestinationValueByMemberName(final Object current, final String memberName) {
return new Callable<Object>() {
@Override
public Object call() {
if (providedDestination) {
Accessor accessor = TypeInfoRegistry
.typeInfoFor(current.getClass(), mappingEngine.getConfiguration())
.getAccessors()
.get(memberName);
if (accessor != null)
return accessor.getValue(current);
}
return null;
}
};
}
Callable<Object> getCyclicReferenceByPath(final String destinationPath) {
return new Callable<Object>() {
@Override
public Object call() {
return intermediateDestinations.get(destinationPath);
}
};
}
/**
* Returns a new MappingContext with destination object creating via a provider . The provider will
* be Mapping's provider used first, else the TypeMap's property provider, else the TypeMap's provider,
* else the configuration's provider. Returns {@code this} if there is no provider.
*/
@SuppressWarnings("unchecked")
D createDestinationViaProvider() {
Provider<D> provider = null;
if (getMapping() != null) {
provider = (Provider<D>) getMapping().getProvider();
if (provider == null && parentTypeMap() != null)
provider = (Provider<D>) parentTypeMap().getPropertyProvider();
}
if (provider == null && getTypeMap() != null)
provider = getTypeMap().getProvider();
if (provider == null && mappingEngine.getConfiguration().getProvider() != null)
provider = (Provider<D>) mappingEngine.getConfiguration().getProvider();
if (provider == null)
return null;
D destination = provider.get(this);
mappingEngine.validateDestination(destinationType, destination, errors);
setDestination(destination, false);
return destination;
}
public boolean isProvidedDestination() {
return providedDestination;
}
private static class SourceChain {
private final Map<String, Object> sources = new HashMap<String, Object>();
private Object lastSource;
public void addSource(String path, Object source) {
sources.put(path, source);
lastSource = source;
}
public Object getSource(String path) {
Object source = sources.get(path);
if (source == null)
source = lastSource;
return source;
}
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.location.suplclient.asn1.supl2.supl_report;
// Copyright 2008 Google Inc. All Rights Reserved.
/*
* This class is AUTOMATICALLY GENERATED. Do NOT EDIT.
*/
//
//
import com.google.location.suplclient.asn1.base.Asn1Object;
import com.google.location.suplclient.asn1.base.Asn1Sequence;
import com.google.location.suplclient.asn1.base.Asn1Tag;
import com.google.location.suplclient.asn1.base.BitStream;
import com.google.location.suplclient.asn1.base.BitStreamReader;
import com.google.location.suplclient.asn1.base.SequenceComponent;
import com.google.location.suplclient.asn1.supl2.ulp_components.SessionID;
import com.google.common.collect.ImmutableList;
import java.util.Collection;
import javax.annotation.Nullable;
/**
*
*/
public class SessionInformation extends Asn1Sequence {
//
private static final Asn1Tag TAG_SessionInformation
= Asn1Tag.fromClassAndNumber(-1, -1);
public SessionInformation() {
super();
}
@Override
@Nullable
protected Asn1Tag getTag() {
return TAG_SessionInformation;
}
@Override
protected boolean isTagImplicit() {
return true;
}
public static Collection<Asn1Tag> getPossibleFirstTags() {
if (TAG_SessionInformation != null) {
return ImmutableList.of(TAG_SessionInformation);
} else {
return Asn1Sequence.getPossibleFirstTags();
}
}
/**
* Creates a new SessionInformation from encoded stream.
*/
public static SessionInformation fromPerUnaligned(byte[] encodedBytes) {
SessionInformation result = new SessionInformation();
result.decodePerUnaligned(new BitStreamReader(encodedBytes));
return result;
}
/**
* Creates a new SessionInformation from encoded stream.
*/
public static SessionInformation fromPerAligned(byte[] encodedBytes) {
SessionInformation result = new SessionInformation();
result.decodePerAligned(new BitStreamReader(encodedBytes));
return result;
}
@Override protected boolean isExtensible() {
return true;
}
@Override public boolean containsExtensionValues() {
for (SequenceComponent extensionComponent : getExtensionComponents()) {
if (extensionComponent.isExplicitlySet()) return true;
}
return false;
}
private SessionID sessionID_;
public SessionID getSessionID() {
return sessionID_;
}
/**
* @throws ClassCastException if value is not a SessionID
*/
public void setSessionID(Asn1Object value) {
this.sessionID_ = (SessionID) value;
}
public SessionID setSessionIDToNewInstance() {
sessionID_ = new SessionID();
return sessionID_;
}
@Override public Iterable<? extends SequenceComponent> getComponents() {
ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder();
builder.add(new SequenceComponent() {
Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 0);
@Override public boolean isExplicitlySet() {
return getSessionID() != null;
}
@Override public boolean hasDefaultValue() {
return false;
}
@Override public boolean isOptional() {
return false;
}
@Override public Asn1Object getComponentValue() {
return getSessionID();
}
@Override public void setToNewInstance() {
setSessionIDToNewInstance();
}
@Override public Collection<Asn1Tag> getPossibleFirstTags() {
return tag == null ? SessionID.getPossibleFirstTags() : ImmutableList.of(tag);
}
@Override
public Asn1Tag getTag() {
return tag;
}
@Override
public boolean isImplicitTagging() {
return true;
}
@Override public String toIndentedString(String indent) {
return "sessionID : "
+ getSessionID().toIndentedString(indent);
}
});
return builder.build();
}
@Override public Iterable<? extends SequenceComponent>
getExtensionComponents() {
ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder();
return builder.build();
}
@Override public Iterable<BitStream> encodePerUnaligned() {
return super.encodePerUnaligned();
}
@Override public Iterable<BitStream> encodePerAligned() {
return super.encodePerAligned();
}
@Override public void decodePerUnaligned(BitStreamReader reader) {
super.decodePerUnaligned(reader);
}
@Override public void decodePerAligned(BitStreamReader reader) {
super.decodePerAligned(reader);
}
@Override public String toString() {
return toIndentedString("");
}
public String toIndentedString(String indent) {
StringBuilder builder = new StringBuilder();
builder.append("SessionInformation = {\n");
final String internalIndent = indent + " ";
for (SequenceComponent component : getComponents()) {
if (component.isExplicitlySet()) {
builder.append(internalIndent)
.append(component.toIndentedString(internalIndent));
}
}
if (isExtensible()) {
builder.append(internalIndent).append("...\n");
for (SequenceComponent component : getExtensionComponents()) {
if (component.isExplicitlySet()) {
builder.append(internalIndent)
.append(component.toIndentedString(internalIndent));
}
}
}
builder.append(indent).append("};\n");
return builder.toString();
}
}
| |
/**
* Copyright (C) 2009 eXo Platform SAS.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.exoplatform.portal.webui.navigation;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.exoplatform.portal.mop.SiteType;
import org.exoplatform.portal.mop.Visibility;
import org.exoplatform.portal.mop.navigation.NodeChange;
import org.exoplatform.portal.mop.navigation.NodeChangeQueue;
import org.exoplatform.portal.mop.navigation.Scope;
import org.exoplatform.portal.mop.user.UserNavigation;
import org.exoplatform.portal.mop.user.UserNode;
import org.exoplatform.portal.mop.user.UserNodeFilterConfig;
import org.exoplatform.portal.mop.user.UserPortal;
import org.exoplatform.portal.webui.portal.UIPortal;
import org.exoplatform.portal.webui.util.Util;
import org.exoplatform.services.log.ExoLogger;
import org.exoplatform.services.log.Log;
import org.exoplatform.webui.application.WebuiRequestContext;
import org.exoplatform.webui.core.UIComponent;
import org.exoplatform.webui.event.Event;
import org.exoplatform.webui.event.EventListener;
/**
* Created by The eXo Platform SARL Author : Dang Van Minh minhdv81@yahoo.com Jul 12, 2006
*/
public class UIPortalNavigation extends UIComponent {
private boolean useAJAX = true;
private boolean showUserNavigation = true;
private TreeNode treeNode_;
private String cssClassName = "";
private String template;
private final UserNodeFilterConfig NAVIGATION_FILTER_CONFIG;
private Scope navigationScope;
private Log log = ExoLogger.getExoLogger(UIPortalNavigation.class);
public UIPortalNavigation() {
UserNodeFilterConfig.Builder filterConfigBuilder = UserNodeFilterConfig.builder();
filterConfigBuilder.withReadWriteCheck().withVisibility(Visibility.DISPLAYED, Visibility.TEMPORAL);
filterConfigBuilder.withTemporalCheck();
NAVIGATION_FILTER_CONFIG = filterConfigBuilder.build();
}
@Override
public String getTemplate() {
return template != null ? template : super.getTemplate();
}
public void setTemplate(String template) {
this.template = template;
}
public UIComponent getViewModeUIComponent() {
return null;
}
public void setUseAjax(boolean bl) {
useAJAX = bl;
}
public boolean isUseAjax() {
return useAJAX;
}
public boolean isShowUserNavigation() {
return showUserNavigation;
}
public void setShowUserNavigation(boolean showUserNavigation) {
this.showUserNavigation = showUserNavigation;
}
public void setCssClassName(String cssClassName) {
this.cssClassName = cssClassName;
}
public String getCssClassName() {
return cssClassName;
}
public List<UserNode> getNavigations() throws Exception {
WebuiRequestContext context = WebuiRequestContext.getCurrentInstance();
List<UserNode> nodes = new ArrayList<UserNode>();
if (context.getRemoteUser() != null) {
UserNode currRootNode = getCurrentNavigation();
if (currRootNode != null) {
nodes.add(currRootNode);
}
} else {
UserPortal userPortal = Util.getPortalRequestContext().getUserPortalConfig().getUserPortal();
List<UserNavigation> navigations = userPortal.getNavigations();
for (UserNavigation userNav : navigations) {
if (!showUserNavigation && userNav.getKey().getType().equals(SiteType.USER)) {
continue;
}
UserNode rootNode = userPortal.getNode(userNav, navigationScope, NAVIGATION_FILTER_CONFIG, null);
if (rootNode != null) {
nodes.add(rootNode);
}
}
}
return nodes;
}
public void loadTreeNodes() throws Exception {
treeNode_ = new TreeNode();
UserPortal userPortal = Util.getPortalRequestContext().getUserPortalConfig().getUserPortal();
List<UserNavigation> listNavigations = userPortal.getNavigations();
List<UserNode> childNodes = new LinkedList<UserNode>();
for (UserNavigation nav : rearrangeNavigations(listNavigations)) {
if (!showUserNavigation && nav.getKey().getType().equals(SiteType.USER)) {
continue;
}
try {
UserNode rootNode = userPortal.getNode(nav, navigationScope, NAVIGATION_FILTER_CONFIG, null);
if (rootNode != null) {
childNodes.addAll(rootNode.getChildren());
}
} catch (Exception ex) {
log.error(ex.getMessage(), ex);
}
}
treeNode_.setChildren(childNodes);
}
public UserNode resolvePath(String path) throws Exception {
WebuiRequestContext context = WebuiRequestContext.getCurrentInstance();
UserPortal userPortal = Util.getPortalRequestContext().getUserPortalConfig().getUserPortal();
UserNode node;
if (context.getRemoteUser() != null) {
node = userPortal.resolvePath(Util.getUIPortal().getUserNavigation(), NAVIGATION_FILTER_CONFIG, path);
} else {
node = userPortal.resolvePath(NAVIGATION_FILTER_CONFIG, path);
}
if (node != null && !node.getURI().equals(path)) {
// Node has been deleted
return null;
}
return updateNode(node);
}
public UserNode updateNode(UserNode node) {
if (node == null) {
return null;
}
UserPortal userPortal = Util.getPortalRequestContext().getUserPortalConfig().getUserPortal();
NodeChangeQueue<UserNode> queue = new NodeChangeQueue<UserNode>();
userPortal.updateNode(node, navigationScope, queue);
for (NodeChange<UserNode> change : queue) {
if (change instanceof NodeChange.Removed) {
UserNode deletedNode = ((NodeChange.Removed<UserNode>) change).getTarget();
if (hasRelationship(deletedNode, node)) {
// Node has been deleted
return null;
}
}
}
return node;
}
private boolean hasRelationship(UserNode parent, UserNode userNode) {
if (parent.getId().equals(userNode.getId())) {
return true;
}
for (UserNode child : parent.getChildren()) {
if (hasRelationship(child, userNode)) {
return true;
}
}
return false;
}
/**
*
* @param listNavigation
* @return
*/
private List<UserNavigation> rearrangeNavigations(List<UserNavigation> listNavigation) {
List<UserNavigation> returnNavs = new ArrayList<UserNavigation>();
List<UserNavigation> portalNavs = new ArrayList<UserNavigation>();
List<UserNavigation> groupNavs = new ArrayList<UserNavigation>();
List<UserNavigation> userNavs = new ArrayList<UserNavigation>();
for (UserNavigation nav : listNavigation) {
SiteType siteType = nav.getKey().getType();
switch (siteType) {
case PORTAL:
portalNavs.add(nav);
break;
case GROUP:
groupNavs.add(nav);
break;
case USER:
userNavs.add(nav);
break;
}
}
returnNavs.addAll(portalNavs);
returnNavs.addAll(groupNavs);
returnNavs.addAll(userNavs);
return returnNavs;
}
public TreeNode getTreeNodes() {
return treeNode_;
}
public UserNode getSelectedNode() throws Exception {
UIPortal uiPortal = Util.getUIPortal();
if (uiPortal != null) {
return uiPortal.getSelectedUserNode();
}
return null;
}
private UserNode getCurrentNavigation() throws Exception {
UserPortal userPortal = Util.getPortalRequestContext().getUserPortalConfig().getUserPortal();
UserNavigation userNavigation = Util.getUIPortal().getUserNavigation();
try {
UserNode rootNode = userPortal.getNode(userNavigation, navigationScope, NAVIGATION_FILTER_CONFIG, null);
return rootNode;
} catch (Exception ex) {
log.error("Navigation has been deleted");
}
return null;
}
public void setScope(Scope scope) {
this.navigationScope = scope;
}
public static class CollapseNodeActionListener extends EventListener<UIPortalNavigation> {
public void execute(Event<UIPortalNavigation> event) throws Exception {
// get URI
String treePath = event.getRequestContext().getRequestParameter(OBJECTID);
UIPortalNavigation uiNavigation = event.getSource();
TreeNode rootNode = uiNavigation.getTreeNodes();
TreeNode collapseTree = rootNode.findNodes(treePath);
if (collapseTree != null) {
collapseTree.setExpanded(false);
}
Util.getPortalRequestContext().setResponseComplete(true);
}
}
public static class CollapseAllNodeActionListener extends EventListener<UIPortalNavigation> {
public void execute(Event<UIPortalNavigation> event) throws Exception {
UIPortalNavigation uiNavigation = event.getSource();
uiNavigation.loadTreeNodes();
event.getRequestContext().addUIComponentToUpdateByAjax(uiNavigation);
}
}
}
| |
package org.hl7.fhir.dstu3.model.codesystems;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Tue, Dec 6, 2016 09:42-0500 for FHIR v1.8.0
import org.hl7.fhir.exceptions.FHIRException;
public enum ObjectLifecycle {
/**
* Origination, Creation
*/
_1,
/**
* Import / Copy
*/
_2,
/**
* Amendment
*/
_3,
/**
* Verification
*/
_4,
/**
* Translation
*/
_5,
/**
* Access / Use
*/
_6,
/**
* De-identification
*/
_7,
/**
* Aggregation, summarization, derivation
*/
_8,
/**
* Report
*/
_9,
/**
* Export
*/
_10,
/**
* Disclosure
*/
_11,
/**
* Receipt of disclosure
*/
_12,
/**
* Archiving
*/
_13,
/**
* Logical deletion
*/
_14,
/**
* Permanent erasure / Physical destruction
*/
_15,
/**
* added to help the parsers
*/
NULL;
public static ObjectLifecycle fromCode(String codeString) throws FHIRException {
if (codeString == null || "".equals(codeString))
return null;
if ("1".equals(codeString))
return _1;
if ("2".equals(codeString))
return _2;
if ("3".equals(codeString))
return _3;
if ("4".equals(codeString))
return _4;
if ("5".equals(codeString))
return _5;
if ("6".equals(codeString))
return _6;
if ("7".equals(codeString))
return _7;
if ("8".equals(codeString))
return _8;
if ("9".equals(codeString))
return _9;
if ("10".equals(codeString))
return _10;
if ("11".equals(codeString))
return _11;
if ("12".equals(codeString))
return _12;
if ("13".equals(codeString))
return _13;
if ("14".equals(codeString))
return _14;
if ("15".equals(codeString))
return _15;
throw new FHIRException("Unknown ObjectLifecycle code '"+codeString+"'");
}
public String toCode() {
switch (this) {
case _1: return "1";
case _2: return "2";
case _3: return "3";
case _4: return "4";
case _5: return "5";
case _6: return "6";
case _7: return "7";
case _8: return "8";
case _9: return "9";
case _10: return "10";
case _11: return "11";
case _12: return "12";
case _13: return "13";
case _14: return "14";
case _15: return "15";
default: return "?";
}
}
public String getSystem() {
return "http://hl7.org/fhir/object-lifecycle";
}
public String getDefinition() {
switch (this) {
case _1: return "Origination, Creation";
case _2: return "Import / Copy";
case _3: return "Amendment";
case _4: return "Verification";
case _5: return "Translation";
case _6: return "Access / Use";
case _7: return "De-identification";
case _8: return "Aggregation, summarization, derivation";
case _9: return "Report";
case _10: return "Export";
case _11: return "Disclosure";
case _12: return "Receipt of disclosure";
case _13: return "Archiving";
case _14: return "Logical deletion";
case _15: return "Permanent erasure / Physical destruction";
default: return "?";
}
}
public String getDisplay() {
switch (this) {
case _1: return "Origination, Creation";
case _2: return "Import / Copy";
case _3: return "Amendment";
case _4: return "Verification";
case _5: return "Translation";
case _6: return "Access / Use";
case _7: return "De-identification";
case _8: return "Aggregation, summarization, derivation";
case _9: return "Report";
case _10: return "Export";
case _11: return "Disclosure";
case _12: return "Receipt of disclosure";
case _13: return "Archiving";
case _14: return "Logical deletion";
case _15: return "Permanent erasure / Physical destruction";
default: return "?";
}
}
}
| |
/*
* Copyright (c) 2016, GoMint, BlackyPaw and geNAZt
*
* This code is licensed under the BSD license found in the
* LICENSE file in the root directory of this source tree.
*/
package io.gomint.proxy.network.packet;
import io.gomint.jraknet.PacketBuffer;
import io.gomint.proxy.Gamerule;
import io.gomint.proxy.inventory.ItemStack;
import io.gomint.proxy.math.BlockPosition;
import io.gomint.proxy.math.Vector;
import io.gomint.taglib.AllocationLimitReachedException;
import io.gomint.taglib.NBTReader;
import io.gomint.taglib.NBTTagCompound;
import io.gomint.taglib.NBTWriter;
import java.io.IOException;
import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.Map;
import java.util.function.BiConsumer;
/**
* @author geNAZt
* @version 1.0
*/
public abstract class Packet {
/**
* Internal MC:PE id of this packet
*/
protected final int id;
/**
* Constructor for implemented Packets
*
* @param id The id which the Packet should use
*/
protected Packet(int id) {
this.id = id;
}
/**
* Gets the packet's ID.
*
* @return The packet's ID
*/
public int getId() {
return this.id;
}
/**
* Serializes this packet into the given buffer.
*
* @param buffer The buffer to serialize this packet into
*/
public abstract void serialize(PacketBuffer buffer);
/**
* Deserializes this packet from the given buffer.
*
* @param buffer The buffer to deserialize this packet from
*/
public abstract void deserialize(PacketBuffer buffer);
/**
* Returns an estimate length of the packet (used for pre-allocation).
*
* @return The estimate length of the packet or -1 if unknown
*/
public int estimateLength() {
return -1;
}
/**
* Returns the ordering channel to send the packet on.
*
* @return The ordering channel of the packet
*/
public int orderingChannel() {
return 0;
}
public void writeGamerules(Map<Gamerule, Object> gamerules, PacketBuffer buffer) {
if (gamerules == null) {
buffer.writeUnsignedVarInt(0);
return;
}
buffer.writeUnsignedVarInt(gamerules.size());
gamerules.forEach(new BiConsumer<Gamerule, Object>() {
@Override
public void accept(Gamerule gamerule, Object value) {
buffer.writeString(gamerule.getNbtName().toLowerCase());
if (gamerule.getValueType() == Boolean.class) {
buffer.writeByte((byte) 1);
buffer.writeBoolean((Boolean) value);
} else if (gamerule.getValueType() == Integer.class) {
buffer.writeByte((byte) 2);
buffer.writeUnsignedVarInt((Integer) value);
} else if (gamerule.getValueType() == Float.class) {
buffer.writeByte((byte) 3);
buffer.writeLFloat((Float) value);
}
}
});
}
public Map<Gamerule, Object> readGamerules(PacketBuffer buffer) {
int amount = buffer.readUnsignedVarInt();
if (amount == 0) {
return null;
}
Map<Gamerule, Object> gamerules = new HashMap<>();
for (int i = 0; i < amount; i++) {
String name = buffer.readString();
byte type = buffer.readByte();
Object val = null;
switch (type) {
case 1:
val = buffer.readBoolean();
break;
case 2:
val = buffer.readUnsignedVarInt();
break;
case 3:
val = buffer.readLFloat();
break;
}
}
return gamerules;
}
public static void writeItemStack(ItemStack itemStack, PacketBuffer buffer) {
if (itemStack == null || itemStack.getMaterial() == 0) {
buffer.writeSignedVarInt(0);
return;
}
buffer.writeSignedVarInt(itemStack.getMaterial());
buffer.writeSignedVarInt((itemStack.getData() << 8) + (itemStack.getAmount() & 0xff));
NBTTagCompound compound = itemStack.getNbtData();
if (compound == null) {
buffer.writeLShort((short) 0);
} else {
try {
// Vanilla currently only writes one nbt tag (this is hardcoded)
buffer.writeLShort((short) 0xFFFF);
buffer.writeByte((byte) 1);
// NBT Tag
NBTWriter nbtWriter = new NBTWriter(buffer.getBuffer(), ByteOrder.LITTLE_ENDIAN);
nbtWriter.setUseVarint(true);
nbtWriter.write(compound);
} catch (IOException e) {
buffer.writeLShort((short) 0);
}
}
// canPlace and canBreak
buffer.writeSignedVarInt(0);
buffer.writeSignedVarInt(0);
}
public static void writeItemStacks(ItemStack[] itemStacks, PacketBuffer buffer) {
if (itemStacks == null || itemStacks.length == 0) {
buffer.writeUnsignedVarInt(0);
return;
}
buffer.writeUnsignedVarInt(itemStacks.length);
for (ItemStack itemStack : itemStacks) {
writeItemStack(itemStack, buffer);
}
}
/**
* Read in a variable amount of itemstacks
*
* @param buffer The buffer to read from
* @return a list of itemstacks
*/
public static ItemStack[] readItemStacks(PacketBuffer buffer) {
int count = buffer.readUnsignedVarInt();
ItemStack[] itemStacks = new ItemStack[count];
for (int i = 0; i < count; i++) {
itemStacks[i] = readItemStack(buffer);
}
return itemStacks;
}
public static ItemStack readRecipeIngredient(PacketBuffer buffer) {
int id = buffer.readSignedVarInt();
if (id == 0) {
return new ItemStack(0, (short) 0, 0, null);
}
int meta = buffer.readSignedVarInt();
if (meta == 0x7fff) {
meta = -1;
}
int count = buffer.readSignedVarInt();
return new ItemStack(id, (short) meta, count, null);
}
public static ItemStack readItemStack(PacketBuffer buffer) {
int id = buffer.readSignedVarInt();
if (id == 0) {
return new ItemStack(0, (short) 0, 0, null);
}
int temp = buffer.readSignedVarInt();
byte amount = (byte) (temp & 0xFF);
short data = (short) (temp >> 8);
NBTTagCompound nbt = null;
short extraLen = buffer.readLShort();
if (extraLen == -1) {
byte version = buffer.readByte(); // Version
try {
NBTReader nbtReader = new NBTReader(buffer.getBuffer(), ByteOrder.LITTLE_ENDIAN);
nbtReader.setUseVarint(true);
// There is no alloc limit needed here, you can't write so much shit in 32kb, so thats ok
nbt = nbtReader.parse();
} catch (IOException | AllocationLimitReachedException e) {
return null;
}
}
// They implemented additional data for item stacks aside from nbt
int countPlacedOn = buffer.readSignedVarInt();
for (int i = 0; i < countPlacedOn; i++) {
String a = buffer.readString(); // TODO: Implement proper support once we know the string values
System.out.println(a);
}
int countCanBreak = buffer.readSignedVarInt();
for (int i = 0; i < countCanBreak; i++) {
buffer.readString();
}
// Special case shield?
if (id == 355) {
buffer.readSignedVarInt();
}
return new ItemStack(id, data, amount, nbt);
}
public BlockPosition readBlockPosition(PacketBuffer buffer) {
return new BlockPosition(buffer.readSignedVarInt(), buffer.readUnsignedVarInt(), buffer.readSignedVarInt());
}
public void writeBlockPosition(BlockPosition position, PacketBuffer buffer) {
buffer.writeSignedVarInt(position.getX());
buffer.writeUnsignedVarInt(position.getY());
buffer.writeSignedVarInt(position.getZ());
}
void writeVector(Vector vector, PacketBuffer buffer) {
buffer.writeLFloat(vector.getX());
buffer.writeLFloat(vector.getY());
buffer.writeLFloat(vector.getZ());
}
Vector readVector(PacketBuffer buffer) {
return new Vector(buffer.readLFloat(), buffer.readLFloat(), buffer.readLFloat());
}
public void serializeHeader(PacketBuffer buffer) {
buffer.writeUnsignedVarInt(this.id);
}
/**
* Write a array of item stacks to the buffer
*
* @param itemStacks which should be written to the buffer
* @param buffer which should be written to
*/
void writeItemStacksWithIDs(ItemStack[] itemStacks, PacketBuffer buffer) {
if (itemStacks == null || itemStacks.length == 0) {
buffer.writeUnsignedVarInt(0);
return;
}
buffer.writeUnsignedVarInt(itemStacks.length);
for (ItemStack itemStack : itemStacks) {
writeItemStackWithID(itemStack, buffer);
}
}
public static ItemStack readItemStackWithID(PacketBuffer buffer) {
int id = buffer.readSignedVarInt();
ItemStack itemStack = readItemStack(buffer);
if (itemStack != null) {
itemStack.setId(id);
}
return itemStack;
}
public static void writeItemStackWithID(ItemStack itemStack, PacketBuffer buffer) {
buffer.writeSignedVarInt(itemStack.getId());
writeItemStack(itemStack, buffer);
}
/**
* Read in a variable amount of itemstacks
*
* @param buffer The buffer to read from
* @return a list of item stacks
*/
ItemStack[] readItemStacksWithIDs(PacketBuffer buffer) {
int count = buffer.readUnsignedVarInt();
ItemStack[] itemStacks = new ItemStack[count];
for (int i = 0; i < count; i++) {
itemStacks[i] = readItemStackWithID(buffer);
}
return itemStacks;
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2011, Sun Microsystems, Inc., Kohsuke Kawaguchi,
* Erik Ramfelt, Martin Eigenbrodt, Stephen Connolly, Tom Huybrechts
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import com.google.common.collect.ImmutableSet;
import hudson.DescriptorExtensionList;
import hudson.EnvVars;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Launcher.RemoteLauncher;
import hudson.Util;
import hudson.cli.CLI;
import hudson.model.Descriptor.FormException;
import hudson.remoting.Callable;
import hudson.remoting.Channel;
import hudson.remoting.Which;
import hudson.slaves.ComputerLauncher;
import hudson.slaves.DumbSlave;
import hudson.slaves.JNLPLauncher;
import hudson.slaves.NodeDescriptor;
import hudson.slaves.NodeProperty;
import hudson.slaves.NodePropertyDescriptor;
import hudson.slaves.RetentionStrategy;
import hudson.slaves.SlaveComputer;
import hudson.util.ClockDifference;
import hudson.util.DescribableList;
import hudson.util.FormValidation;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.lang.reflect.InvocationTargetException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.servlet.ServletException;
import jenkins.model.Jenkins;
import jenkins.security.MasterToSlaveCallable;
import jenkins.slaves.WorkspaceLocator;
import jenkins.util.SystemProperties;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.DataBoundSetter;
import org.kohsuke.stapler.HttpResponse;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
/**
* Information about a Hudson agent node.
*
* <p>
* Ideally this would have been in the <tt>hudson.slaves</tt> package,
* but for compatibility reasons, it can't.
*
* <p>
* TODO: move out more stuff to {@link DumbSlave}.
*
* On February, 2016 a general renaming was done internally: the "slave" term was replaced by
* "Agent". This change was applied in: UI labels/HTML pages, javadocs and log messages.
* Java classes, fields, methods, etc were not renamed to avoid compatibility issues.
* See <a href="https://jenkins-ci.org/issue/27268">JENKINS-27268</a>.
*
* @author Kohsuke Kawaguchi
*/
public abstract class Slave extends Node implements Serializable {
private static final Logger LOGGER = Logger.getLogger(Slave.class.getName());
/**
* Name of this agent node.
*/
protected String name;
/**
* Description of this node.
*/
private String description;
/**
* Path to the root of the workspace from the view point of this node, such as "/hudson", this need not
* be absolute provided that the launcher establishes a consistent working directory, such as "./.jenkins-slave"
* when used with an SSH launcher.
*
* NOTE: if the administrator is using a relative path they are responsible for ensuring that the launcher used
* provides a consistent working directory
*/
protected final String remoteFS;
/**
* Number of executors of this node.
*/
private int numExecutors = 2;
/**
* Job allocation strategy.
*/
private Mode mode = Mode.NORMAL;
/**
* Agent availability strategy.
*/
private transient RetentionStrategy retentionStrategy;
/**
* The starter that will startup this agent.
*/
private transient ComputerLauncher launcher;
/**
* Whitespace-separated labels.
*/
private String label="";
private transient /*almost final*/ DescribableList<NodeProperty<?>,NodePropertyDescriptor> nodeProperties =
new DescribableList<NodeProperty<?>,NodePropertyDescriptor>(Jenkins.get().getNodesObject());
/**
* Lazily computed set of labels from {@link #label}.
*/
private transient volatile Set<Label> labels;
/**
* Id of user which creates this agent {@link User}.
*/
private String userId;
public Slave(String name, String nodeDescription, String remoteFS, String numExecutors,
Mode mode, String labelString, ComputerLauncher launcher, RetentionStrategy retentionStrategy, List<? extends NodeProperty<?>> nodeProperties) throws FormException, IOException {
this(name,nodeDescription,remoteFS,Util.tryParseNumber(numExecutors, 1).intValue(),mode,labelString,launcher,retentionStrategy, nodeProperties);
}
/**
* @deprecated since 2009-02-20.
*/
@Deprecated
public Slave(String name, String nodeDescription, String remoteFS, int numExecutors,
Mode mode, String labelString, ComputerLauncher launcher, RetentionStrategy retentionStrategy) throws FormException, IOException {
this(name, nodeDescription, remoteFS, numExecutors, mode, labelString, launcher, retentionStrategy, new ArrayList());
}
public Slave(@Nonnull String name, String remoteFS, ComputerLauncher launcher) throws FormException, IOException {
this.name = name;
this.remoteFS = remoteFS;
this.launcher = launcher;
}
/**
* @deprecated as of 1.XXX
* Use {@link #Slave(String, String, ComputerLauncher)} and set the rest through setters.
*/
public Slave(@Nonnull String name, String nodeDescription, String remoteFS, int numExecutors,
Mode mode, String labelString, ComputerLauncher launcher, RetentionStrategy retentionStrategy, List<? extends NodeProperty<?>> nodeProperties) throws FormException, IOException {
this.name = name;
this.description = nodeDescription;
this.numExecutors = numExecutors;
this.mode = mode;
this.remoteFS = Util.fixNull(remoteFS).trim();
this.label = Util.fixNull(labelString).trim();
this.launcher = launcher;
this.retentionStrategy = retentionStrategy;
getAssignedLabels(); // compute labels now
this.nodeProperties.replaceBy(nodeProperties);
Slave node = (Slave) Jenkins.get().getNode(name);
if(node!=null){
this.userId= node.getUserId(); //agent has already existed
}
else{
User user = User.current();
userId = user!=null ? user.getId() : "anonymous";
}
if (name.equals(""))
throw new FormException(Messages.Slave_InvalidConfig_NoName(), null);
// if (remoteFS.equals(""))
// throw new FormException(Messages.Slave_InvalidConfig_NoRemoteDir(name), null);
if (this.numExecutors<=0)
throw new FormException(Messages.Slave_InvalidConfig_Executors(name), null);
}
/**
* Return id of user which created this agent
*
* @return id of user
*/
public String getUserId() {
return userId;
}
public void setUserId(String userId){
this.userId = userId;
}
public ComputerLauncher getLauncher() {
if (launcher == null && !StringUtils.isEmpty(agentCommand)) {
try {
launcher = (ComputerLauncher) Jenkins.get().getPluginManager().uberClassLoader.loadClass("hudson.slaves.CommandLauncher").getConstructor(String.class, EnvVars.class).newInstance(agentCommand, null);
agentCommand = null;
save();
} catch (IOException | ClassNotFoundException | IllegalAccessException | IllegalArgumentException | IllegalStateException | InstantiationException | NoSuchMethodException | SecurityException | InvocationTargetException x) {
LOGGER.log(Level.WARNING, "could not update historical agentCommand setting to CommandLauncher", x);
}
}
// Default launcher does not use Work Directory
return launcher == null ? new JNLPLauncher(false) : launcher;
}
public void setLauncher(ComputerLauncher launcher) {
this.launcher = launcher;
}
public String getRemoteFS() {
return remoteFS;
}
@Override
public String getNodeName() {
return name;
}
@Override
public String toString() {
return getClass().getName() + "[" + name + "]";
}
@Override
public void setNodeName(String name) {
this.name = name;
}
@DataBoundSetter
public void setNodeDescription(String value) {
this.description = value;
}
@Override
public String getNodeDescription() {
return description;
}
@Override
public int getNumExecutors() {
return numExecutors;
}
@DataBoundSetter
public void setNumExecutors(int n) {
this.numExecutors = n;
}
@Override
public Mode getMode() {
return mode;
}
@DataBoundSetter
public void setMode(Mode mode) {
this.mode = mode;
}
@Override
public DescribableList<NodeProperty<?>, NodePropertyDescriptor> getNodeProperties() {
assert nodeProperties != null;
return nodeProperties;
}
@DataBoundSetter
public void setNodeProperties(List<? extends NodeProperty<?>> properties) throws IOException {
nodeProperties.replaceBy(properties);
}
public RetentionStrategy getRetentionStrategy() {
return retentionStrategy == null ? RetentionStrategy.Always.INSTANCE : retentionStrategy;
}
@DataBoundSetter
public void setRetentionStrategy(RetentionStrategy availabilityStrategy) {
this.retentionStrategy = availabilityStrategy;
}
@Override
public String getLabelString() {
return Util.fixNull(label).trim();
}
@Override
@DataBoundSetter
public void setLabelString(String labelString) throws IOException {
this.label = Util.fixNull(labelString).trim();
// Compute labels now.
getAssignedLabels();
}
@Override
public Callable<ClockDifference,IOException> getClockDifferenceCallable() {
return new GetClockDifference1();
}
@Override
public Computer createComputer() {
return new SlaveComputer(this);
}
@Override
public FilePath getWorkspaceFor(TopLevelItem item) {
for (WorkspaceLocator l : WorkspaceLocator.all()) {
FilePath workspace = l.locate(item, this);
if (workspace != null) {
return workspace;
}
}
FilePath r = getWorkspaceRoot();
if(r==null) return null; // offline
return r.child(item.getFullName());
}
@CheckForNull
@Override
public FilePath getRootPath() {
final SlaveComputer computer = getComputer();
if (computer == null) {
// if computer is null then channel is null and thus we were going to return null anyway
return null;
} else {
return createPath(StringUtils.defaultString(computer.getAbsoluteRemoteFs(), remoteFS));
}
}
/**
* Root directory on this agent where all the job workspaces are laid out.
* @return
* null if not connected.
*/
public @CheckForNull FilePath getWorkspaceRoot() {
FilePath r = getRootPath();
if(r==null) return null;
return r.child(WORKSPACE_ROOT);
}
/**
* Web-bound object used to serve jar files for JNLP.
*/
public static final class JnlpJar implements HttpResponse {
private final String fileName;
public JnlpJar(String fileName) {
this.fileName = fileName;
}
public void doIndex( StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
URLConnection con = connect();
// since we end up redirecting users to jnlpJars/foo.jar/, set the content disposition
// so that browsers can download them in the right file name.
// see http://support.microsoft.com/kb/260519 and http://www.boutell.com/newfaq/creating/forcedownload.html
rsp.setHeader("Content-Disposition", "attachment; filename=" + fileName);
try (InputStream in = con.getInputStream()) {
rsp.serveFile(req, in, con.getLastModified(), con.getContentLength(), "*.jar" );
}
}
@Override
public void generateResponse(StaplerRequest req, StaplerResponse rsp, Object node) throws IOException, ServletException {
doIndex(req,rsp);
}
private URLConnection connect() throws IOException {
URL res = getURL();
return res.openConnection();
}
public URL getURL() throws IOException {
String name = fileName;
// Prevent the access to war contents & prevent the folder escaping (SECURITY-195)
if (!ALLOWED_JNLPJARS_FILES.contains(name)) {
throw new MalformedURLException("The specified file path " + fileName + " is not allowed due to security reasons");
}
if (name.equals("hudson-cli.jar") || name.equals("jenkins-cli.jar")) {
File cliJar = Which.jarFile(CLI.class);
if (cliJar.isFile()) {
name = "jenkins-cli.jar";
} else {
URL res = findExecutableJar(cliJar, CLI.class);
if (res != null) {
return res;
}
}
} else if (name.equals("agent.jar") || name.equals("slave.jar") || name.equals("remoting.jar")) {
File remotingJar = Which.jarFile(hudson.remoting.Launcher.class);
if (remotingJar.isFile()) {
name = "lib/" + remotingJar.getName();
} else {
URL res = findExecutableJar(remotingJar, hudson.remoting.Launcher.class);
if (res != null) {
return res;
}
}
}
URL res = Jenkins.get().servletContext.getResource("/WEB-INF/" + name);
if(res==null) {
throw new FileNotFoundException(name); // giving up
} else {
LOGGER.log(Level.FINE, "found {0}", res);
}
return res;
}
/** Useful for {@code JenkinsRule.createSlave}, {@code hudson-dev:run}, etc. */
private @CheckForNull URL findExecutableJar(File notActuallyJAR, Class<?> mainClass) throws IOException {
if (notActuallyJAR.getName().equals("classes")) {
File[] siblings = notActuallyJAR.getParentFile().listFiles();
if (siblings != null) {
for (File actualJar : siblings) {
if (actualJar.getName().endsWith(".jar")) {
try (JarFile jf = new JarFile(actualJar, false)) {
Manifest mf = jf.getManifest();
if (mf != null && mainClass.getName().equals(mf.getMainAttributes().getValue("Main-Class"))) {
LOGGER.log(Level.FINE, "found {0}", actualJar);
return actualJar.toURI().toURL();
}
}
}
}
}
}
return null;
}
public byte[] readFully() throws IOException {
try (InputStream in = connect().getInputStream()) {
return IOUtils.toByteArray(in);
}
}
}
/**
* Creates a launcher for the agent.
*
* @return
* If there is no computer it will return a {@link hudson.Launcher.DummyLauncher}, otherwise it
* will return a {@link hudson.Launcher.RemoteLauncher} instead.
*/
@Nonnull
@Override
public Launcher createLauncher(TaskListener listener) {
SlaveComputer c = getComputer();
if (c == null) {
listener.error("Issue with creating launcher for agent " + name + ". Computer has been disconnected");
return new Launcher.DummyLauncher(listener);
} else {
// TODO: ideally all the logic below should be inside the SlaveComputer class with proper locking to prevent race conditions,
// but so far there is no locks for setNode() hence it requires serious refactoring
// Ensure that the Computer instance still points to this node
// Otherwise we may end up running the command on a wrong (reconnected) Node instance.
Slave node = c.getNode();
if (node != this) {
String message = "Issue with creating launcher for agent " + name + ". Computer has been reconnected";
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.log(Level.WARNING, message, new IllegalStateException("Computer has been reconnected, this Node instance cannot be used anymore"));
}
return new Launcher.DummyLauncher(listener);
}
// RemoteLauncher requires an active Channel instance to operate correctly
final Channel channel = c.getChannel();
if (channel == null) {
reportLauncerCreateError("The agent has not been fully initialized yet",
"No remoting channel to the agent OR it has not been fully initialized yet", listener);
return new Launcher.DummyLauncher(listener);
}
if (channel.isClosingOrClosed()) {
reportLauncerCreateError("The agent is being disconnected",
"Remoting channel is either in the process of closing down or has closed down", listener);
return new Launcher.DummyLauncher(listener);
}
final Boolean isUnix = c.isUnix();
if (isUnix == null) {
// isUnix is always set when the channel is not null, so it should never happen
reportLauncerCreateError("The agent has not been fully initialized yet",
"Cannot determing if the agent is a Unix one, the System status request has not completed yet. " +
"It is an invalid channel state, please report a bug to Jenkins if you see it.",
listener);
return new Launcher.DummyLauncher(listener);
}
return new RemoteLauncher(listener, channel, isUnix).decorateFor(this);
}
}
private void reportLauncerCreateError(@Nonnull String humanReadableMsg, @CheckForNull String exceptionDetails, @Nonnull TaskListener listener) {
String message = "Issue with creating launcher for agent " + name + ". " + humanReadableMsg;
listener.error(message);
if (LOGGER.isLoggable(Level.WARNING)) {
// Send stacktrace to the log as well in order to diagnose the root cause of issues like JENKINS-38527
LOGGER.log(Level.WARNING, message
+ "Probably there is a race condition with Agent reconnection or disconnection, check other log entries",
new IllegalStateException(exceptionDetails != null ? exceptionDetails : humanReadableMsg));
}
}
/**
* Gets the corresponding computer object.
*/
public SlaveComputer getComputer() {
return (SlaveComputer)toComputer();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final Slave that = (Slave) o;
return name.equals(that.name);
}
@Override
public int hashCode() {
return name.hashCode();
}
/**
* Invoked by XStream when this object is read into memory.
*/
protected Object readResolve() {
if(nodeProperties==null)
nodeProperties = new DescribableList<>(Jenkins.get().getNodesObject());
return this;
}
@Override
public SlaveDescriptor getDescriptor() {
Descriptor d = Jenkins.get().getDescriptorOrDie(getClass());
if (d instanceof SlaveDescriptor)
return (SlaveDescriptor) d;
throw new IllegalStateException(d.getClass()+" needs to extend from SlaveDescriptor");
}
public static abstract class SlaveDescriptor extends NodeDescriptor {
public FormValidation doCheckNumExecutors(@QueryParameter String value) {
return FormValidation.validatePositiveInteger(value);
}
/**
* Performs syntactical check on the remote FS for agents.
*/
public FormValidation doCheckRemoteFS(@QueryParameter String value) throws IOException, ServletException {
if(Util.fixEmptyAndTrim(value)==null)
return FormValidation.error(Messages.Slave_Remote_Director_Mandatory());
if(value.startsWith("\\\\") || value.startsWith("/net/"))
return FormValidation.warning(Messages.Slave_Network_Mounted_File_System_Warning());
if (Util.isRelativePath(value)) {
return FormValidation.warning(Messages.Slave_Remote_Relative_Path_Warning());
}
return FormValidation.ok();
}
/**
* Returns the list of {@link ComputerLauncher} descriptors appropriate to the supplied {@link Slave}.
*
* @param it the {@link Slave} or {@code null} to assume the slave is of type {@link #clazz}.
* @return the filtered list
* @since 2.12
*/
@Nonnull
@Restricted(NoExternalUse.class) // intended for use by Jelly EL only (plus hack in DelegatingComputerLauncher)
public final List<Descriptor<ComputerLauncher>> computerLauncherDescriptors(@CheckForNull Slave it) {
DescriptorExtensionList<ComputerLauncher, Descriptor<ComputerLauncher>> all =
Jenkins.get().<ComputerLauncher, Descriptor<ComputerLauncher>>getDescriptorList(
ComputerLauncher.class);
return it == null ? DescriptorVisibilityFilter.applyType(clazz, all)
: DescriptorVisibilityFilter.apply(it, all);
}
/**
* Returns the list of {@link RetentionStrategy} descriptors appropriate to the supplied {@link Slave}.
*
* @param it the {@link Slave} or {@code null} to assume the slave is of type {@link #clazz}.
* @return the filtered list
* @since 2.12
*/
@Nonnull
@SuppressWarnings("unchecked") // used by Jelly EL only
@Restricted(NoExternalUse.class) // used by Jelly EL only
public final List<Descriptor<RetentionStrategy<?>>> retentionStrategyDescriptors(@CheckForNull Slave it) {
return it == null ? DescriptorVisibilityFilter.applyType(clazz, RetentionStrategy.all())
: DescriptorVisibilityFilter.apply(it, RetentionStrategy.all());
}
/**
* Returns the list of {@link NodePropertyDescriptor} appropriate to the supplied {@link Slave}.
*
* @param it the {@link Slave} or {@code null} to assume the slave is of type {@link #clazz}.
* @return the filtered list
* @since 2.12
*/
@Nonnull
@SuppressWarnings("unchecked") // used by Jelly EL only
@Restricted(NoExternalUse.class) // used by Jelly EL only
public final List<NodePropertyDescriptor> nodePropertyDescriptors(@CheckForNull Slave it) {
List<NodePropertyDescriptor> result = new ArrayList<>();
Collection<NodePropertyDescriptor> list =
(Collection) Jenkins.get().getDescriptorList(NodeProperty.class);
for (NodePropertyDescriptor npd : it == null
? DescriptorVisibilityFilter.applyType(clazz, list)
: DescriptorVisibilityFilter.apply(it, list)) {
if (npd.isApplicable(clazz)) {
result.add(npd);
}
}
return result;
}
}
//
// backward compatibility
//
/**
* Command line to launch the agent, like
* "ssh myslave java -jar /path/to/hudson-remoting.jar"
* @deprecated in 1.216
*/
@Deprecated
private transient String agentCommand;
/**
* Obtains the clock difference between this side and that side of the channel.
*
* <p>
* This is a hack to wrap the whole thing into a simple {@link Callable}.
*
* <ol>
* <li>When the callable is sent to remote, we capture the time (on this side) in {@link GetClockDifference2#startTime}
* <li>When the other side receives the callable it is {@link GetClockDifference2}.
* <li>We capture the time on the other side and {@link GetClockDifference3} gets sent from the other side
* <li>When it's read on this side as a return value, it morphs itself into {@link ClockDifference}.
* </ol>
*/
private static final class GetClockDifference1 extends MasterToSlaveCallable<ClockDifference,IOException> {
@Override
public ClockDifference call() {
// this method must be being invoked locally, which means the clock is in sync
return new ClockDifference(0);
}
private Object writeReplace() {
return new GetClockDifference2();
}
private static final long serialVersionUID = 1L;
}
private static final class GetClockDifference2 extends MasterToSlaveCallable<GetClockDifference3,IOException> {
/**
* Capture the time on the master when this object is sent to remote, which is when
* {@link GetClockDifference1#writeReplace()} is run.
*/
private final long startTime = System.currentTimeMillis();
@Override
public GetClockDifference3 call() {
return new GetClockDifference3(startTime);
}
private static final long serialVersionUID = 1L;
}
private static final class GetClockDifference3 implements Serializable {
private final long remoteTime = System.currentTimeMillis();
private final long startTime;
public GetClockDifference3(long startTime) {
this.startTime = startTime;
}
private Object readResolve() {
long endTime = System.currentTimeMillis();
return new ClockDifference((startTime + endTime)/2-remoteTime);
}
}
/**
* Determines the workspace root file name for those who really really need the shortest possible path name.
*/
private static final String WORKSPACE_ROOT = SystemProperties.getString(Slave.class.getName()+".workspaceRoot","workspace");
/**
* Provides a collection of file names, which are accessible via /jnlpJars link.
*/
private static final Set<String> ALLOWED_JNLPJARS_FILES = ImmutableSet.of("agent.jar", "slave.jar", "remoting.jar", "jenkins-cli.jar", "hudson-cli.jar");
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.util.internal;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.io.Closeable;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Method;
import java.net.URL;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.Locale;
/**
* Helper class to load JNI resources.
*
*/
public final class NativeLibraryLoader {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(NativeLibraryLoader.class);
private static final String NATIVE_RESOURCE_HOME = "META-INF/native/";
private static final String OSNAME;
private static final File WORKDIR;
private static final boolean DELETE_NATIVE_LIB_AFTER_LOADING;
static {
OSNAME = SystemPropertyUtil.get("os.name", "").toLowerCase(Locale.US).replaceAll("[^a-z0-9]+", "");
String workdir = SystemPropertyUtil.get("io.netty.native.workdir");
if (workdir != null) {
File f = new File(workdir);
f.mkdirs();
try {
f = f.getAbsoluteFile();
} catch (Exception ignored) {
// Good to have an absolute path, but it's OK.
}
WORKDIR = f;
logger.debug("-Dio.netty.native.workdir: " + WORKDIR);
} else {
WORKDIR = tmpdir();
logger.debug("-Dio.netty.native.workdir: " + WORKDIR + " (io.netty.tmpdir)");
}
DELETE_NATIVE_LIB_AFTER_LOADING = SystemPropertyUtil.getBoolean(
"io.netty.native.deleteLibAfterLoading", true);
}
private static File tmpdir() {
File f;
try {
f = toDirectory(SystemPropertyUtil.get("io.netty.tmpdir"));
if (f != null) {
logger.debug("-Dio.netty.tmpdir: " + f);
return f;
}
f = toDirectory(SystemPropertyUtil.get("java.io.tmpdir"));
if (f != null) {
logger.debug("-Dio.netty.tmpdir: " + f + " (java.io.tmpdir)");
return f;
}
// This shouldn't happen, but just in case ..
if (isWindows()) {
f = toDirectory(System.getenv("TEMP"));
if (f != null) {
logger.debug("-Dio.netty.tmpdir: " + f + " (%TEMP%)");
return f;
}
String userprofile = System.getenv("USERPROFILE");
if (userprofile != null) {
f = toDirectory(userprofile + "\\AppData\\Local\\Temp");
if (f != null) {
logger.debug("-Dio.netty.tmpdir: " + f + " (%USERPROFILE%\\AppData\\Local\\Temp)");
return f;
}
f = toDirectory(userprofile + "\\Local Settings\\Temp");
if (f != null) {
logger.debug("-Dio.netty.tmpdir: " + f + " (%USERPROFILE%\\Local Settings\\Temp)");
return f;
}
}
} else {
f = toDirectory(System.getenv("TMPDIR"));
if (f != null) {
logger.debug("-Dio.netty.tmpdir: " + f + " ($TMPDIR)");
return f;
}
}
} catch (Exception ignored) {
// Environment variable inaccessible
}
// Last resort.
if (isWindows()) {
f = new File("C:\\Windows\\Temp");
} else {
f = new File("/tmp");
}
logger.warn("Failed to get the temporary directory; falling back to: " + f);
return f;
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private static File toDirectory(String path) {
if (path == null) {
return null;
}
File f = new File(path);
f.mkdirs();
if (!f.isDirectory()) {
return null;
}
try {
return f.getAbsoluteFile();
} catch (Exception ignored) {
return f;
}
}
private static boolean isWindows() {
return OSNAME.startsWith("windows");
}
private static boolean isOSX() {
return OSNAME.startsWith("macosx") || OSNAME.startsWith("osx");
}
/**
* Loads the first available library in the collection with the specified
* {@link ClassLoader}.
*
* @throws IllegalArgumentException
* if none of the given libraries load successfully.
*/
public static void loadFirstAvailable(ClassLoader loader, String... names) {
for (String name : names) {
try {
load(name, loader);
logger.debug("Successfully loaded the library: {}", name);
return;
} catch (Throwable t) {
logger.debug("Unable to load the library '{}', trying next name...", name, t);
}
}
throw new IllegalArgumentException("Failed to load any of the given libraries: "
+ Arrays.toString(names));
}
/**
* Load the given library with the specified {@link ClassLoader}
*/
public static void load(String name, ClassLoader loader) {
String libname = System.mapLibraryName(name);
String path = NATIVE_RESOURCE_HOME + libname;
URL url = loader.getResource(path);
if (url == null && isOSX()) {
if (path.endsWith(".jnilib")) {
url = loader.getResource(NATIVE_RESOURCE_HOME + "lib" + name + ".dynlib");
} else {
url = loader.getResource(NATIVE_RESOURCE_HOME + "lib" + name + ".jnilib");
}
}
if (url == null) {
// Fall back to normal loading of JNI stuff
loadLibrary(loader, name, false);
return;
}
int index = libname.lastIndexOf('.');
String prefix = libname.substring(0, index);
String suffix = libname.substring(index, libname.length());
InputStream in = null;
OutputStream out = null;
File tmpFile = null;
try {
tmpFile = File.createTempFile(prefix, suffix, WORKDIR);
in = url.openStream();
out = new FileOutputStream(tmpFile);
byte[] buffer = new byte[8192];
int length;
while ((length = in.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
out.flush();
// Close the output stream before loading the unpacked library,
// because otherwise Windows will refuse to load it when it's in use by other process.
closeQuietly(out);
out = null;
loadLibrary(loader, tmpFile.getPath(), true);
} catch (Exception e) {
throw (UnsatisfiedLinkError) new UnsatisfiedLinkError(
"could not load a native library: " + name).initCause(e);
} finally {
closeQuietly(in);
closeQuietly(out);
// After we load the library it is safe to delete the file.
// We delete the file immediately to free up resources as soon as possible,
// and if this fails fallback to deleting on JVM exit.
if (tmpFile != null && (!DELETE_NATIVE_LIB_AFTER_LOADING || !tmpFile.delete())) {
tmpFile.deleteOnExit();
}
}
}
/**
* Loading the native library into the specified {@link ClassLoader}.
* @param loader - The {@link ClassLoader} where the native library will be loaded into
* @param name - The native library path or name
* @param absolute - Whether the native library will be loaded by path or by name
*/
private static void loadLibrary(final ClassLoader loader, final String name, final boolean absolute) {
try {
// Make sure the helper is belong to the target ClassLoader.
final Class<?> newHelper = tryToLoadClass(loader, NativeLibraryUtil.class);
loadLibraryByHelper(newHelper, name, absolute);
return;
} catch (UnsatisfiedLinkError e) { // Should by pass the UnsatisfiedLinkError here!
logger.debug("Unable to load the library '{}', trying other loading mechanism.", name, e);
} catch (Exception e) {
logger.debug("Unable to load the library '{}', trying other loading mechanism.", name, e);
}
NativeLibraryUtil.loadLibrary(name, absolute); // Fallback to local helper class.
}
private static void loadLibraryByHelper(final Class<?> helper, final String name, final boolean absolute)
throws UnsatisfiedLinkError {
Object ret = AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
try {
// Invoke the helper to load the native library, if succeed, then the native
// library belong to the specified ClassLoader.
Method method = helper.getMethod("loadLibrary", String.class, boolean.class);
method.setAccessible(true);
return method.invoke(null, name, absolute);
} catch (Exception e) {
return e;
}
}
});
if (ret instanceof Throwable) {
Throwable error = (Throwable) ret;
Throwable cause = error.getCause();
if (cause != null) {
if (cause instanceof UnsatisfiedLinkError) {
throw (UnsatisfiedLinkError) cause;
} else {
throw new UnsatisfiedLinkError(cause.getMessage());
}
}
throw new UnsatisfiedLinkError(error.getMessage());
}
}
/**
* Try to load the helper {@link Class} into specified {@link ClassLoader}.
* @param loader - The {@link ClassLoader} where to load the helper {@link Class}
* @param helper - The helper {@link Class}
* @return A new helper Class defined in the specified ClassLoader.
* @throws ClassNotFoundException Helper class not found or loading failed
*/
private static Class<?> tryToLoadClass(final ClassLoader loader, final Class<?> helper)
throws ClassNotFoundException {
try {
return loader.loadClass(helper.getName());
} catch (ClassNotFoundException e) {
// The helper class is NOT found in target ClassLoader, we have to define the helper class.
final byte[] classBinary = classToByteArray(helper);
return AccessController.doPrivileged(new PrivilegedAction<Class<?>>() {
@Override
public Class<?> run() {
try {
// Define the helper class in the target ClassLoader,
// then we can call the helper to load the native library.
Method defineClass = ClassLoader.class.getDeclaredMethod("defineClass", String.class,
byte[].class, int.class, int.class);
defineClass.setAccessible(true);
return (Class<?>) defineClass.invoke(loader, helper.getName(), classBinary, 0,
classBinary.length);
} catch (Exception e) {
throw new IllegalStateException("Define class failed!", e);
}
}
});
}
}
/**
* Load the helper {@link Class} as a byte array, to be redefined in specified {@link ClassLoader}.
* @param clazz - The helper {@link Class} provided by this bundle
* @return The binary content of helper {@link Class}.
* @throws ClassNotFoundException Helper class not found or loading failed
*/
private static byte[] classToByteArray(Class<?> clazz) throws ClassNotFoundException {
String fileName = clazz.getName();
int lastDot = fileName.lastIndexOf('.');
if (lastDot > 0) {
fileName = fileName.substring(lastDot + 1);
}
URL classUrl = clazz.getResource(fileName + ".class");
if (classUrl == null) {
throw new ClassNotFoundException(clazz.getName());
}
byte[] buf = new byte[1024];
ByteArrayOutputStream out = new ByteArrayOutputStream(4096);
InputStream in = null;
try {
in = classUrl.openStream();
for (int r; (r = in.read(buf)) != -1;) {
out.write(buf, 0, r);
}
return out.toByteArray();
} catch (IOException ex) {
throw new ClassNotFoundException(clazz.getName(), ex);
} finally {
closeQuietly(in);
closeQuietly(out);
}
}
private static void closeQuietly(Closeable c) {
if (c != null) {
try {
c.close();
} catch (IOException ignore) {
// ignore
}
}
}
private NativeLibraryLoader() {
// Utility
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.resourcemanager.network.samples;
import com.azure.core.credential.TokenCredential;
import com.azure.core.http.policy.HttpLogDetailLevel;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.management.AzureEnvironment;
import com.azure.identity.DefaultAzureCredentialBuilder;
import com.azure.resourcemanager.AzureResourceManager;
import com.azure.resourcemanager.compute.models.KnownLinuxVirtualMachineImage;
import com.azure.resourcemanager.compute.models.VirtualMachine;
import com.azure.resourcemanager.compute.models.VirtualMachineSizeTypes;
import com.azure.resourcemanager.network.models.Network;
import com.azure.resourcemanager.network.models.NetworkInterface;
import com.azure.resourcemanager.network.models.NetworkSecurityGroup;
import com.azure.resourcemanager.network.models.SecurityRuleProtocol;
import com.azure.core.management.Region;
import com.azure.core.management.profile.AzureProfile;
import com.azure.resourcemanager.samples.SSHShell;
import com.azure.resourcemanager.samples.Utils;
import com.jcraft.jsch.JSchException;
import java.io.UnsupportedEncodingException;
import java.util.Date;
/**
* Azure Network sample for managing network security groups -
* - Create a network security group for the front end of a subnet
* - Create a network security group for the back end of a subnet
* - Create Linux virtual machines for the front end and back end
* -- Apply network security groups
* - List network security groups
* - Update a network security group.
*/
public final class ManageNetworkSecurityGroup {
/**
* Main function which runs the actual sample.
*
* @param azureResourceManager instance of the azure client
* @return true if sample runs successfully
*/
public static boolean runSample(AzureResourceManager azureResourceManager) throws UnsupportedEncodingException, JSchException {
final Region region = Region.US_WEST;
final String frontEndNSGName = Utils.randomResourceName(azureResourceManager, "fensg", 24);
final String backEndNSGName = Utils.randomResourceName(azureResourceManager, "bensg", 24);
final String rgName = Utils.randomResourceName(azureResourceManager, "rgNEMS", 24);
final String vnetName = Utils.randomResourceName(azureResourceManager, "vnet", 24);
final String networkInterfaceName1 = Utils.randomResourceName(azureResourceManager, "nic1", 24);
final String networkInterfaceName2 = Utils.randomResourceName(azureResourceManager, "nic2", 24);
final String publicIPAddressLeafDNS1 = Utils.randomResourceName(azureResourceManager, "pip1", 24);
final String frontEndVMName = Utils.randomResourceName(azureResourceManager, "fevm", 24);
final String backEndVMName = Utils.randomResourceName(azureResourceManager, "bevm", 24);
final String userName = "tirekicker";
try {
final String sshKey = SSHShell.generateSSHKeys(null, null).getSshPublicKey();
// Define a virtual network for VMs in this availability set
System.out.println("Creating a virtual network ...");
Network network = azureResourceManager.networks().define(vnetName)
.withRegion(region)
.withNewResourceGroup(rgName)
.withAddressSpace("172.16.0.0/16")
.defineSubnet("Front-end")
.withAddressPrefix("172.16.1.0/24")
.attach()
.defineSubnet("Back-end")
.withAddressPrefix("172.16.2.0/24")
.attach()
.create();
System.out.println("Created a virtual network: " + network.id());
Utils.print(network);
//============================================================
// Create a network security group for the front end of a subnet
// front end subnet contains two rules
// - ALLOW-SSH - allows SSH traffic into the front end subnet
// - ALLOW-WEB- allows HTTP traffic into the front end subnet
System.out.println("Creating a security group for the front end - allows SSH and HTTP");
NetworkSecurityGroup frontEndNSG = azureResourceManager.networkSecurityGroups().define(frontEndNSGName)
.withRegion(region)
.withNewResourceGroup(rgName)
.defineRule("ALLOW-SSH")
.allowInbound()
.fromAnyAddress()
.fromAnyPort()
.toAnyAddress()
.toPort(22)
.withProtocol(SecurityRuleProtocol.TCP)
.withPriority(100)
.withDescription("Allow SSH")
.attach()
.defineRule("ALLOW-HTTP")
.allowInbound()
.fromAnyAddress()
.fromAnyPort()
.toAnyAddress()
.toPort(80)
.withProtocol(SecurityRuleProtocol.TCP)
.withPriority(101)
.withDescription("Allow HTTP")
.attach()
.create();
System.out.println("Created a security group for the front end: " + frontEndNSG.id());
Utils.print(frontEndNSG);
//============================================================
// Create a network security group for the back end of a subnet
// back end subnet contains two rules
// - ALLOW-SQL - allows SQL traffic only from the front end subnet
// - DENY-WEB - denies all outbound internet traffic from the back end subnet
System.out.println("Creating a security group for the front end - allows SSH and "
+ "denies all outbound internet traffic ");
NetworkSecurityGroup backEndNSG = azureResourceManager.networkSecurityGroups().define(backEndNSGName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.defineRule("ALLOW-SQL")
.allowInbound()
.fromAddress("172.16.1.0/24")
.fromAnyPort()
.toAnyAddress()
.toPort(1433)
.withProtocol(SecurityRuleProtocol.TCP)
.withPriority(100)
.withDescription("Allow SQL")
.attach()
.defineRule("DENY-WEB")
.denyOutbound()
.fromAnyAddress()
.fromAnyPort()
.toAnyAddress()
.toAnyPort()
.withAnyProtocol()
.withDescription("Deny Web")
.withPriority(200)
.attach()
.create();
System.out.println("Created a security group for the back end: " + backEndNSG.id());
Utils.print(backEndNSG);
System.out.println("Creating multiple network interfaces");
System.out.println("Creating network interface 1");
//========================================================
// Create a network interface and apply the
// front end network security group
System.out.println("Creating a network interface for the front end");
NetworkInterface networkInterface1 = azureResourceManager.networkInterfaces().define(networkInterfaceName1)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withExistingPrimaryNetwork(network)
.withSubnet("Front-end")
.withPrimaryPrivateIPAddressDynamic()
.withNewPrimaryPublicIPAddress(publicIPAddressLeafDNS1)
.withIPForwarding()
.withExistingNetworkSecurityGroup(frontEndNSG)
.create();
System.out.println("Created network interface for the front end");
Utils.print(networkInterface1);
//========================================================
// Create a network interface and apply the
// back end network security group
System.out.println("Creating a network interface for the back end");
NetworkInterface networkInterface2 = azureResourceManager.networkInterfaces().define(networkInterfaceName2)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withExistingPrimaryNetwork(network)
.withSubnet("Back-end")
.withPrimaryPrivateIPAddressDynamic()
.withExistingNetworkSecurityGroup(backEndNSG)
.create();
Utils.print(networkInterface2);
//=============================================================
// Create a virtual machine (for the front end)
// with the network interface that has the network security group for the front end
System.out.println("Creating a Linux virtual machine (for the front end) - "
+ "with the network interface that has the network security group for the front end");
Date t1 = new Date();
VirtualMachine frontEndVM = azureResourceManager.virtualMachines().define(frontEndVMName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withExistingPrimaryNetworkInterface(networkInterface1)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername(userName)
.withSsh(sshKey)
.withSize(VirtualMachineSizeTypes.fromString("Standard_D2a_v4"))
.create();
Date t2 = new Date();
System.out.println("Created Linux VM: (took "
+ ((t2.getTime() - t1.getTime()) / 1000) + " seconds) " + frontEndVM.id());
// Print virtual machine details
Utils.print(frontEndVM);
//=============================================================
// Create a virtual machine (for the back end)
// with the network interface that has the network security group for the back end
System.out.println("Creating a Linux virtual machine (for the back end) - "
+ "with the network interface that has the network security group for the back end");
t1 = new Date();
VirtualMachine backEndVM = azureResourceManager.virtualMachines().define(backEndVMName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withExistingPrimaryNetworkInterface(networkInterface2)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername(userName)
.withSsh(sshKey)
.withSize(VirtualMachineSizeTypes.fromString("Standard_D2a_v4"))
.create();
t2 = new Date();
System.out.println("Created a Linux VM: (took "
+ ((t2.getTime() - t1.getTime()) / 1000) + " seconds) " + backEndVM.id());
Utils.print(backEndVM);
//========================================================
// List network security groups
System.out.println("Walking through network security groups");
PagedIterable<NetworkSecurityGroup> networkSecurityGroups = azureResourceManager.networkSecurityGroups().listByResourceGroup(rgName);
for (NetworkSecurityGroup networkSecurityGroup : networkSecurityGroups) {
Utils.print(networkSecurityGroup);
}
//========================================================
// Update a network security group
System.out.println("Updating the front end network security group to allow FTP");
frontEndNSG.update()
.defineRule("ALLOW-FTP")
.allowInbound()
.fromAnyAddress()
.fromAnyPort()
.toAnyAddress()
.toPortRange(20, 21)
.withProtocol(SecurityRuleProtocol.TCP)
.withDescription("Allow FTP")
.withPriority(200)
.attach()
.apply();
System.out.println("Updated the front end network security group");
Utils.print(frontEndNSG);
return true;
} finally {
try {
System.out.println("Deleting Resource Group: " + rgName);
azureResourceManager.resourceGroups().beginDeleteByName(rgName);
System.out.println("Deleted Resource Group: " + rgName);
} catch (NullPointerException npe) {
System.out.println("Did not create any resources in Azure. No clean up is necessary");
} catch (Exception g) {
g.printStackTrace();
}
}
}
/**
* Main entry point.
*
* @param args the parameters
*/
public static void main(String[] args) {
try {
//=============================================================
// Authenticate
final AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE);
final TokenCredential credential = new DefaultAzureCredentialBuilder()
.authorityHost(profile.getEnvironment().getActiveDirectoryEndpoint())
.build();
AzureResourceManager azureResourceManager = AzureResourceManager
.configure()
.withLogLevel(HttpLogDetailLevel.BASIC)
.authenticate(credential, profile)
.withDefaultSubscription();
// Print selected subscription
System.out.println("Selected subscription: " + azureResourceManager.subscriptionId());
runSample(azureResourceManager);
} catch (Exception e) {
System.out.println(e.getMessage());
e.printStackTrace();
}
}
private ManageNetworkSecurityGroup() {
}
}
| |
/*
*
* Derby - Class StatementTest
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.jdbc4;
import org.apache.derbyTesting.functionTests.util.SQLStateConstants;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.TestConfiguration;
import junit.framework.*;
import java.sql.*;
/**
* Tests for new methods added for Statement in JDBC4.
*/
public class StatementTest
extends BaseJDBCTestCase {
/** Default statement used by the tests. */
private Statement stmt = null;
/**
* Create a new test with the given name.
*
* @param name name of the test.
*/
public StatementTest(String name) {
super(name);
}
/**
* Create default connection and statement.
*
* @throws SQLException if setAutoCommit, createStatement or
* BaseJDBCTestCase.getConnection fails.
*/
protected void setUp()
throws SQLException {
getConnection().setAutoCommit(false);
// Create a default statement.
stmt = createStatement();
assertFalse("First statement must be open initially",
stmt.isClosed());
}
/**
* Close default connection and statement if necessary.
*
* @throws SQLException if a database access exception occurs.
*/
protected void tearDown()
throws Exception {
// Close default statement
if (stmt != null) {
stmt.close();
stmt = null;
}
super.tearDown();
}
/**
* Check that <code>isClosed</code> returns <code>true</code> after
* the statement has been explicitly closed.
*/
public void testIsClosedBasic()
throws SQLException {
ResultSet rs = stmt.executeQuery("select count(*) from stmtTable");
assertFalse("Statement should still be open", stmt.isClosed());
rs.close();
assertFalse("Statement should be open after ResultSet has been " +
"closed", stmt.isClosed());
stmt.close();
assertTrue("Statement should be closed, close() has been called",
stmt.isClosed());
}
/**
* Test that creating two statements on the same connection does not
* cause side effects on the statements.
*/
public void testIsClosedWithTwoStatementsOnSameConnection()
throws SQLException {
// Create a second statement on the default connection.
Statement stmt2 = createStatement();
assertFalse("Second statement must be open initially",
stmt2.isClosed());
assertFalse("First statement should not be closed when " +
"creating a second statement", stmt.isClosed());
ResultSet rs = stmt2.executeQuery("select count(*) from stmtTable");
assertFalse("Second statement should be open after call to " +
"execute()", stmt2.isClosed());
assertFalse("First statement should be open after call to " +
"second statment's execute()", stmt.isClosed());
stmt2.close();
assertTrue("Second statement should be closed, close() has " +
"been called!", stmt2.isClosed());
assertFalse("First statement should be open after call to " +
"second statment's close()", stmt.isClosed());
}
/**
* Test that the two statements created on the connection are closed
* when the connection itself is closed.
*/
public void testIsClosedWhenClosingConnection()
throws SQLException {
// Create an extra statement for good measure.
Statement stmt2 = createStatement();
assertFalse("Second statement must be open initially",
stmt2.isClosed());
// Exeute something on it, as opposed to the default statement.
stmt2.execute("select count(*) from stmtTable");
assertFalse("Second statement should be open after call to " +
"execute()", stmt2.isClosed());
// Close the connection. We must commit/rollback first, or else a
// "Invalid transaction state" exception is raised.
rollback();
Connection con = getConnection();
con.close();
assertTrue("Connection should be closed after close()",
con.isClosed());
assertTrue("First statement should be closed, as parent " +
"connection has been closed", stmt.isClosed());
assertTrue("Second statement should be closed, as parent " +
"connection has been closed", stmt2.isClosed());
}
/**
* Check the state of the statement when the connection is first attempted
* closed when in an invalid transaction state, then closed after a
* commit. According to the JDBC 4 API documentation: </i>"It is strongly
* recommended that an application explictly commits or rolls back an
* active transaction prior to calling the close method. If the close
* method is called and there is an active transaction,
* the results are implementation-defined."</i>
* Derby throws an exception and keeps the connection open.
*/
public void testIsClosedWhenClosingConnectionInInvalidState()
throws SQLException {
stmt.executeQuery("select count(*) from stmtTable");
// Connection should now be in an invalid transaction state.
Connection con = stmt.getConnection();
try {
con.close();
fail("Invalid transaction state exception was not thrown");
} catch (SQLException sqle) {
// The SQL State is incorrect in the embedded client, see
// JIRA id DERBY-1168
String expectedState =
SQLStateConstants.INVALID_TRANSACTION_STATE_ACTIVE_SQL_TRANSACTION;
if ( ! expectedState.equals(sqle.getSQLState()) )
{
System.err.println("ERROR: Unexpected SQL State encountered; "
+ "got " + sqle.getSQLState() + ", expected "
+ expectedState +
". Unexpected exception message is " + sqle.getMessage());
throw sqle;
}
}
assertFalse("Statement should still be open, because " +
"Connection.close() failed", stmt.isClosed());
assertFalse("Connection should still be open", con.isClosed());
// Do a commit here, since we do a rollback in another test.
con.commit();
con.close();
assertTrue("Connection should be closed after close()",
con.isClosed());
assertTrue("Statement should be closed, because " +
"the connection has been closed", stmt.isClosed());
stmt.close();
assertTrue("Statement should still be closed", stmt.isClosed());
}
/**
* Execute a query on a statement after the parent connection has been
* closed.
*/
public void testStatementExecuteAfterConnectionClose()
throws SQLException {
Connection con = stmt.getConnection();
con.close();
assertTrue("Connection should be closed after close()",
con.isClosed());
try {
stmt.executeQuery("select count(*) from stmtTable");
} catch (SQLException sqle) {
assertEquals("Unexpected SQL state for performing " +
"operations on a closed statement.",
SQLStateConstants.CONNECTION_EXCEPTION_CONNECTION_DOES_NOT_EXIST,
sqle.getSQLState());
}
assertTrue("Statement should be closed, because " +
"the connection has been closed", stmt.isClosed());
}
public void testIsWrapperForStatement() throws SQLException {
assertTrue(stmt.isWrapperFor(Statement.class));
}
public void testIsNotWrapperForPreparedStatement() throws SQLException {
assertFalse(stmt.isWrapperFor(PreparedStatement.class));
}
public void testIsNotWrapperForCallableStatement() throws SQLException {
assertFalse(stmt.isWrapperFor(CallableStatement.class));
}
public void testIsNotWrapperForResultSet() throws SQLException {
assertFalse(stmt.isWrapperFor(ResultSet.class));
}
public void testUnwrapStatement() throws SQLException {
Statement stmt2 = stmt.unwrap(Statement.class);
assertSame("Unwrap returned wrong object.", stmt, stmt2);
}
public void testUnwrapPreparedStatement() {
try {
PreparedStatement ps = stmt.unwrap(PreparedStatement.class);
fail("Unwrap didn't fail.");
} catch (SQLException e) {
assertSQLState("XJ128", e);
}
}
public void testUnwrapCallableStatement() {
try {
CallableStatement cs = stmt.unwrap(CallableStatement.class);
fail("Unwrap didn't fail.");
} catch (SQLException e) {
assertSQLState("XJ128", e);
}
}
public void testUnwrapResultSet() throws SQLException {
try {
ResultSet rs = stmt.unwrap(ResultSet.class);
fail("Unwrap didn't fail.");
} catch (SQLException e) {
assertSQLState("XJ128", e);
}
}
/**
* Tests isPoolable, setPoolable, and the default poolability.
*/
public void testPoolable() throws SQLException {
assertFalse("Statement cannot be poolable by default",
stmt.isPoolable());
stmt.setPoolable(true);
assertTrue("Statement must be poolable", stmt.isPoolable());
stmt.setPoolable(false);
assertFalse("Statement cannot be poolable", stmt.isPoolable());
}
/**
* Create test suite for StatementTest.
*/
public static Test suite() {
TestSuite suite = new TestSuite("StatementTest suite");
// Decorate test suite with a TestSetup class.
suite.addTest(new StatementTestSetup(
new TestSuite(StatementTest.class)));
suite.addTest(TestConfiguration.clientServerDecorator(
new StatementTestSetup(new TestSuite(StatementTest.class))));
return suite;
}
} // End class StatementTest
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.jsmpp.examples;
import java.io.IOException;
import java.util.Date;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.log4j.PropertyConfigurator;
import org.jsmpp.PDUStringException;
import org.jsmpp.SMPPConstant;
import org.jsmpp.bean.CancelSm;
import org.jsmpp.bean.DataCodings;
import org.jsmpp.bean.DataSm;
import org.jsmpp.bean.DeliveryReceipt;
import org.jsmpp.bean.ESMClass;
import org.jsmpp.bean.GSMSpecificFeature;
import org.jsmpp.bean.MessageMode;
import org.jsmpp.bean.MessageType;
import org.jsmpp.bean.NumberingPlanIndicator;
import org.jsmpp.bean.QuerySm;
import org.jsmpp.bean.RegisteredDelivery;
import org.jsmpp.bean.ReplaceSm;
import org.jsmpp.bean.SubmitMulti;
import org.jsmpp.bean.SubmitMultiResult;
import org.jsmpp.bean.SubmitSm;
import org.jsmpp.bean.TypeOfNumber;
import org.jsmpp.extra.ProcessRequestException;
import org.jsmpp.extra.SessionState;
import org.jsmpp.session.BindRequest;
import org.jsmpp.session.DataSmResult;
import org.jsmpp.session.QuerySmResult;
import org.jsmpp.session.SMPPServerSession;
import org.jsmpp.session.SMPPServerSessionListener;
import org.jsmpp.session.ServerMessageReceiverListener;
import org.jsmpp.session.Session;
import org.jsmpp.session.SessionStateListener;
import org.jsmpp.util.DeliveryReceiptState;
import org.jsmpp.util.MessageIDGenerator;
import org.jsmpp.util.MessageId;
import org.jsmpp.util.RandomMessageIDGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author uudashr
*
*/
public class StressServer implements Runnable, ServerMessageReceiverListener {
private static final int DEFAULT_MAX_WAIT_BIND = 10;
private static final String DEFAULT_LOG4J_PATH = "stress/server-log4j.properties";
private static final Integer DEFAULT_PORT = 8056;
private static final Integer DEFAULT_PROCESSOR_DEGREE = 3;
private static final Logger logger = LoggerFactory.getLogger(StressServer.class);
private final ExecutorService waitBindExecService = Executors.newFixedThreadPool(DEFAULT_MAX_WAIT_BIND);
private final MessageIDGenerator messageIDGenerator = new RandomMessageIDGenerator();
private final AtomicInteger requestCounter = new AtomicInteger();
private int processorDegree;
private int port;
public StressServer(int port, int processorDegree) {
this.port = port;
this.processorDegree = processorDegree;
}
public void run() {
try {
SMPPServerSessionListener sessionListener = new SMPPServerSessionListener(port);
sessionListener.setSessionStateListener(new SessionStateListenerImpl());
sessionListener.setPduProcessorDegree(processorDegree);
new TrafficWatcherThread().start();
logger.info("Listening on port {}", port);
while (true) {
SMPPServerSession serverSession = sessionListener.accept();
logger.info("Accepting connection for session {}", serverSession.getSessionId());
serverSession.setMessageReceiverListener(this);
waitBindExecService.execute(new WaitBindTask(serverSession));
}
} catch (IOException e) {
logger.error("IO error occured", e);
}
}
public QuerySmResult onAcceptQuerySm(QuerySm querySm,
SMPPServerSession source) throws ProcessRequestException {
return null;
}
public MessageId onAcceptSubmitSm(SubmitSm submitSm,
SMPPServerSession source) throws ProcessRequestException {
MessageId messageId = messageIDGenerator.newMessageId();
logger.debug("Receiving submit_sm {}, and return message id {}", new String(submitSm.getShortMessage()), messageId.getValue());
requestCounter.incrementAndGet();
return messageId;
}
public SubmitMultiResult onAcceptSubmitMulti(SubmitMulti submitMulti,
SMPPServerSession source) throws ProcessRequestException {
return null;
}
public DataSmResult onAcceptDataSm(DataSm dataSm, Session source)
throws ProcessRequestException {
return null;
}
public void onAcceptCancelSm(CancelSm cancelSm, SMPPServerSession source)
throws ProcessRequestException {
}
public void onAcceptReplaceSm(ReplaceSm replaceSm, SMPPServerSession source)
throws ProcessRequestException {
}
private class SessionStateListenerImpl implements SessionStateListener {
public void onStateChange(SessionState newState, SessionState oldState,
Object source) {
SMPPServerSession session = (SMPPServerSession)source;
logger.info("New state of " + session.getSessionId() + " is " + newState);
}
}
private class WaitBindTask implements Runnable {
private final SMPPServerSession serverSession;
public WaitBindTask(SMPPServerSession serverSession) {
this.serverSession = serverSession;
}
public void run() {
try {
BindRequest bindRequest = serverSession.waitForBind(1000);
logger.debug("Accepting bind for session {}", serverSession.getSessionId());
try {
bindRequest.accept("sys");
} catch (PDUStringException e) {
logger.error("Invalid system id", e);
bindRequest.reject(SMPPConstant.STAT_ESME_RSYSERR);
}
} catch (IllegalStateException e) {
logger.error("System error", e);
} catch (TimeoutException e) {
logger.warn("Wait for bind has reach timeout", e);
} catch (IOException e) {
logger.error("Failed accepting bind request for session {}", serverSession.getSessionId());
}
}
}
private class DeliveryReceiptTask implements Runnable {
private final SMPPServerSession session;
private final SubmitSm submitSm;
private MessageId messageId;
public DeliveryReceiptTask(SMPPServerSession session,
SubmitSm submitSm, MessageId messageId) {
this.session = session;
this.submitSm = submitSm;
this.messageId = messageId;
}
public void run() {
try {
Thread.sleep(1000);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
String stringValue = Integer.valueOf(messageId.getValue(), 16).toString();
try {
DeliveryReceipt delRec = new DeliveryReceipt(stringValue, 1, 1, new Date(), new Date(), DeliveryReceiptState.DELIVRD, null, new String(submitSm.getShortMessage()));
session.deliverShortMessage(
"mc",
TypeOfNumber.valueOf(submitSm.getDestAddrTon()),
NumberingPlanIndicator.valueOf(submitSm.getDestAddrNpi()),
submitSm.getDestAddress(),
TypeOfNumber.valueOf(submitSm.getSourceAddrTon()),
NumberingPlanIndicator.valueOf(submitSm.getSourceAddrNpi()),
submitSm.getSourceAddr(),
new ESMClass(MessageMode.DEFAULT, MessageType.SMSC_DEL_RECEIPT, GSMSpecificFeature.DEFAULT),
(byte)0,
(byte)0,
new RegisteredDelivery(0),
DataCodings.ZERO,
delRec.toString().getBytes());
logger.debug("Sending delivery reciept for message id " + messageId + ":" + stringValue);
} catch (Exception e) {
logger.error("Failed sending delivery_receipt for message id " + messageId + ":" + stringValue, e);
}
}
}
private class TrafficWatcherThread extends Thread {
@Override
public void run() {
logger.info("Starting traffic watcher...");
while (true) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
int trafficPerSecond = requestCounter.getAndSet(0);
logger.info("Traffic per second : " + trafficPerSecond);
}
}
}
public static void main(String[] args) {
int port;
try {
port = Integer.parseInt(System.getProperty("jsmpp.server.port", DEFAULT_PORT.toString()));
} catch (NumberFormatException e) {
port = DEFAULT_PORT;
}
int processorDegree;
try {
processorDegree = Integer.parseInt(System.getProperty("jsmpp.server.procDegree", DEFAULT_PROCESSOR_DEGREE.toString()));
} catch (NumberFormatException e) {
processorDegree = DEFAULT_PROCESSOR_DEGREE;
}
String log4jPath = System.getProperty("jsmpp.server.log4jPath", DEFAULT_LOG4J_PATH);
PropertyConfigurator.configure(log4jPath);
logger.info("Processor degree: " + processorDegree);
StressServer stressServer = new StressServer(port, processorDegree);
stressServer.run();
}
}
| |
package fr.univnantes.termsuite.framework.service;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.stream.Stream;
import com.google.common.base.Preconditions;
import fr.univnantes.termsuite.engines.gatherer.VariationType;
import fr.univnantes.termsuite.model.ContextVector;
import fr.univnantes.termsuite.model.OccurrenceStore;
import fr.univnantes.termsuite.model.RelationType;
import fr.univnantes.termsuite.model.Term;
import fr.univnantes.termsuite.model.TermOccurrence;
import fr.univnantes.termsuite.model.TermProperty;
import fr.univnantes.termsuite.model.TermWord;
import fr.univnantes.termsuite.utils.TermSuiteConstants;
public class TermService {
private OccurrenceStore occStore;
private TerminologyService terminology;
private Term term;
public ContextVector getContext() {
return term.getContext();
}
public Double getOrthographicScore() {
return term.getOrthographicScore();
}
public Integer getIndependantFrequency() {
return term.getIndependantFrequency();
}
public Double getIndependance() {
return term.getIndependance();
}
public Integer getSwtSize() {
return term.getSwtSize();
}
public Number getNumber(TermProperty property) {
return term.getNumber(property);
}
public TermService(TerminologyService terminology, OccurrenceStore occStore, Term term) {
super();
Preconditions.checkNotNull(term);
Preconditions.checkNotNull(terminology);
this.terminology = terminology;
this.occStore = occStore;
this.term = term;
}
@Override
public int hashCode() {
return term.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof TermService) {
return ((TermService) obj).term.equals(term);
} else
return term.equals(obj);
}
@Override
public String toString() {
return term.toString();
}
public boolean notFiltered() {
return !isFiltered();
}
public boolean isFiltered() {
return isPropertySet(TermProperty.FILTERED) && term.getBoolean(TermProperty.FILTERED);
}
public boolean isCompound() {
return isSingleWord() && this.term.getWords().get(0).getWord().isCompound();
}
/**
* Returns the concatenation of inner words' lemmas.
*/
public String getLemma() {
StringBuilder builder = new StringBuilder();
int i = 0;
for(TermWord tw:this.term.getWords()) {
if(i>0)
builder.append(TermSuiteConstants.WHITESPACE);
builder.append(tw.getWord().getLemma());
i++;
}
return builder.toString();
}
/*
* *******************************************************************************
* PROPERTY GETTERS/SETTERS
* *******************************************************************************
*/
/*
* GROUPING_KEY
*/
public String getGroupingKey() {
return term.getString(TermProperty.GROUPING_KEY);
}
/*
* DOCUMENT_FREQUENCY
*/
public Integer getDocumentFrequency() {
return term.getInteger(TermProperty.DOCUMENT_FREQUENCY);
}
public void setDocumentFrequency(int documentFrequency) {
term.setProperty(TermProperty.DOCUMENT_FREQUENCY, documentFrequency);
}
/*
* FREQUENCY
*/
public Integer getFrequency() {
return term.getInteger(TermProperty.FREQUENCY);
}
public void setFrequency(int frequency) {
term.setProperty(TermProperty.FREQUENCY, frequency);
}
/*
* PATTERN
*/
public String getPattern() {
return term.getString(TermProperty.PATTERN);
}
public void setPattern(String pattern) {
term.setProperty(TermProperty.PATTERN, pattern);
}
/*
* PILOT
*/
public String getPilot() {
return term.getString(TermProperty.PILOT);
}
public void setPilot(String pilot) {
term.setProperty(TermProperty.PILOT, pilot);
}
/*
* SPOTTING_RULE
*/
public String getSpottingRule() {
return term.getString(TermProperty.SPOTTING_RULE);
}
public void setSpottingRule(String spottingRule) {
term.setProperty(TermProperty.SPOTTING_RULE, spottingRule);
}
/*
* GENERAL_FREQUENCY_NORM
*/
public Double getGeneralFrequencyNorm() {
return term.getDouble(TermProperty.GENERAL_FREQUENCY_NORM);
}
public void setGeneralFrequencyNorm(double normalizedGeneralTermFrequency) {
term.setProperty(TermProperty.GENERAL_FREQUENCY_NORM, normalizedGeneralTermFrequency);
}
/*
* FREQUENCY_NORM
*/
public Double getFrequencyNorm() {
return term.getDouble(TermProperty.FREQUENCY_NORM);
}
public void setFrequencyNorm(double normalizedTermFrequency) {
term.setProperty(TermProperty.FREQUENCY_NORM, normalizedTermFrequency);
}
/*
* RANK
*/
public Integer getRank() {
return term.getInteger(TermProperty.RANK);
}
public void setRank(int rank) {
term.setProperty(TermProperty.RANK, rank);
}
/*
* SPECIFICITY
*/
public Double getSpecificity() {
return term.getDouble(TermProperty.SPECIFICITY);
}
public void setSpecificity(double specificity) {
term.setProperty(TermProperty.SPECIFICITY, specificity);
}
/*
* IS_FIXED_EXPRESSION
*/
public Boolean isFixedExpression() {
return term.getBoolean(TermProperty.IS_FIXED_EXPRESSION);
}
public void setFixedExpression(boolean fixedExpression) {
term.setProperty(TermProperty.IS_FIXED_EXPRESSION, fixedExpression);
}
/*
* TF_IDF
*/
public Double getTfIdf() {
return term.getDouble(TermProperty.TF_IDF);
}
public void setTfIdf(double tfIdf) {
term.setProperty(TermProperty.TF_IDF, tfIdf);
}
public Number getPropertyNumberValue(TermProperty p) {
return (Number)term.get(p);
}
public void setDepth(int depth) {
term.setProperty(TermProperty.DEPTH, depth);
}
public Integer getDepth() {
return term.getInteger(TermProperty.DEPTH);
}
public boolean isSingleWord() {
return term.getWords().size() == 1;
}
public boolean isMultiWord() {
return term.getWords().size() > 1;
}
private Semaphore frequencyMutex = new Semaphore(1);
public void incrementFrequency(int increment) {
frequencyMutex.acquireUninterruptibly();
if(term.isPropertySet(TermProperty.FREQUENCY))
term.setProperty(
TermProperty.FREQUENCY,
term.getInteger(TermProperty.FREQUENCY) + increment);
else
term.setProperty(
TermProperty.FREQUENCY,
increment);
frequencyMutex.release();
}
public Term getTerm() {
return this.term;
}
public void setProperty(TermProperty property, Comparable<?> value) {
this.term.setProperty(property, value);
}
public void dropContext() {
this.term.setContext(null);
}
public Stream<RelationService> extensions() {
return terminology.extensions(this.getTerm());
}
public List<TermWord> getWords() {
return term.getWords();
}
public Stream<RelationService> inboundRelations() {
return terminology.inboundRelations(term);
}
public Stream<RelationService> inboundRelations(RelationType relType, RelationType... relTypes) {
return terminology.inboundRelations(term, relType, relTypes);
}
public Stream<RelationService> outboundRelations(RelationType relType, RelationType... relTypes) {
return terminology.outboundRelations(term, relType, relTypes);
}
public Stream<RelationService> outboundRelations() {
return terminology.outboundRelations(term);
}
public boolean isContextSet() {
return this.term.getContext()!= null;
}
public void updateTfIdf() {
term.setProperty(
TermProperty.TF_IDF,
(double)term.getFrequency()/term.getDocumentFrequency());
}
public void updateSpecificity() {
term.setProperty(
TermProperty.SPECIFICITY,
Math.log10(1 + term.getFrequencyNorm()/term.getGeneralFrequencyNorm()));
}
public boolean isPropertySet(TermProperty property) {
return term.isPropertySet(property);
}
public Stream<RelationService> extensionBases() {
return terminology.extensionBases(term);
}
public Stream<RelationService> variations() {
return terminology.variationsFrom(term);
}
public Stream<RelationService> variationBases() {
return terminology.variationsTo(term);
}
public Stream<RelationService> variations(VariationType variationType) {
return variations().filter(rel -> rel.getBooleanIfSet(variationType.getRelationProperty()));
}
public Stream<TermService> getSwts() {
return this.term.getWords().stream()
.filter(TermWord::isSwt)
.map(TermWord::toGroupingKey)
.filter(this.terminology::containsTerm)
.map(this.terminology::getTerm);
}
public Collection<TermOccurrence> getOccurrences() {
return occStore.getOccurrences(term);
}
}
| |
package org.codehaus.mojo.natives.plugin;
/*
* The MIT License
*
* Copyright (c) 2004, The Codehaus
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
* associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
* NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.apache.bcel.classfile.ClassParser;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.classfile.Method;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectHelper;
import org.codehaus.mojo.natives.NativeBuildException;
import org.codehaus.mojo.natives.javah.Javah;
import org.codehaus.mojo.natives.javah.JavahConfiguration;
import org.codehaus.mojo.natives.manager.JavahManager;
import org.codehaus.mojo.natives.manager.NoSuchNativeProviderException;
import org.codehaus.plexus.archiver.util.DefaultFileSet;
import org.codehaus.plexus.archiver.zip.ZipArchiver;
import org.codehaus.plexus.util.FileUtils;
import org.codehaus.plexus.util.StringUtils;
/**
* Generate JNI include files based on a set of class names
*/
@Mojo(name = "javah", defaultPhase = LifecyclePhase.GENERATE_SOURCES, requiresDependencyResolution = ResolutionScope.COMPILE)
public class NativeJavahMojo
extends AbstractNativeMojo
{
/**
* Javah Provider.
* @since 1.0-alpha-2
*/
@Parameter(defaultValue = "default", required = true)
private String javahProvider;
/**
* List of class names to generate native files. Additional JNI interface will automatically discovered from
* project's dependencies of <i>jar</i> type, when <i>javahSearchJNIFromDependencies</i> is true
* @since 1.0-alpha-4
*/
@Parameter
private List javahClassNames = new ArrayList( 0 );
/**
* Enable the search from project dependencies for JNI interfaces, in addition to <i>javahClassNames</i>
* @since 1.0-alpha-4
*/
@Parameter(defaultValue = "false")
private boolean javahSearchJNIFromDependencies;
/**
* Path to javah executable, if present, it will override the default one which bases on architecture type. See
* 'javahProvider' argument
* @since 1.0-alpha-2
*/
@Parameter
private File javahPath;
/**
* Where to place javah generated file
* @since 1.0-alpha-2
*/
@Parameter(defaultValue = "${project.build.directory}/native/javah", required = true)
protected File javahOutputDirectory;
/**
* if configured, this value will be combined with outputDirectory to pass into javah's -o option
* @since 1.0-alpha-4
*/
@Parameter
private String javahOutputFileName;
/**
* Additional javah classname and its corresponding header name. Use this option to create one class per header
* <p/>
*
* <pre>
* <javahIncludes>
* <javahInclude>
* <className>com.some.Class</className>
* <headerName>Class.h</headerName>
* <javahInclude>
* </javahIncludes>
* </pre>
* @since 1.0-alpha-8
*/
@Parameter
private List javahIncludes = new ArrayList();
/**
* Enable javah verbose mode
* @since 1.0-alpha-2
*/
@Parameter(defaultValue = "false")
private boolean javahVerbose;
/**
* Archive all generated include files and deploy as an inczip
*/
@Parameter(defaultValue = "false")
private boolean attach;
/**
* Classifier name when install/deploy generated includes file. See ${attach} for details
*/
@Parameter(defaultValue = "javah")
private String classifier;
/**
* Archive file to bundle all generated include files if enable by ${attach}
* @since 1.0-alpha-8
*/
@Parameter(defaultValue = "${project.build.directory}/${project.build.finalName}.inczip", required = true)
private File incZipFile;
/**
* Set CLASSPATH env variable instead of using -classpath command-line argument. Use this option to allow large
* number of jars in classpath due to command line size limit under Windows
* @since 1.0-alpha-9
*/
@Parameter(defaultValue = "false")
private boolean useEnvClasspath;
/**
* Internal: To look up javah implementation
* @since 1.0-alpha-2
*/
@Component
private JavahManager manager;
/**
* Maven ProjectHelper.
* @since 1.0-alpha-8
*/
@Component
private MavenProjectHelper projectHelper;
/**
* For unit test only
*/
private JavahConfiguration config;
public void execute()
throws MojoExecutionException
{
this.discoverAdditionalJNIClassName();
if ( this.javahClassNames.size() == 0 && this.javahIncludes.size() == 0 )
{
return;
}
try
{
if ( this.javahClassNames.size() != 0 )
{
this.config =
this.createProviderConfiguration( (String[]) javahClassNames.toArray( new String[javahClassNames.size()] ),
this.javahOutputFileName );
this.getJavah().compile( config );
}
for ( int i = 0; i < this.javahIncludes.size(); ++i )
{
JavahInclude javahInclude = (JavahInclude) this.javahIncludes.get( i );
this.config =
this.createProviderConfiguration( new String[] { javahInclude.getClassName() },
javahInclude.getHeaderName() );
this.getJavah().compile( config );
}
if ( this.attach )
{
attachGeneratedIncludeFilesAsIncZip();
}
}
catch ( NativeBuildException e )
{
throw new MojoExecutionException( "Error running javah command", e );
}
this.project.addCompileSourceRoot( this.javahOutputDirectory.getAbsolutePath() );
}
private void attachGeneratedIncludeFilesAsIncZip()
throws MojoExecutionException
{
try
{
ZipArchiver archiver = new ZipArchiver();
DefaultFileSet fileSet = new DefaultFileSet();
fileSet.setUsingDefaultExcludes( true );
fileSet.setDirectory( javahOutputDirectory );
archiver.addFileSet( fileSet );
archiver.setDestFile( this.incZipFile );
archiver.createArchive();
if ( StringUtils.isBlank( this.classifier ) )
{
projectHelper.attachArtifact( this.project, INCZIP_TYPE, null, this.incZipFile );
}
else
{
projectHelper.attachArtifact( this.project, INCZIP_TYPE, this.classifier, this.incZipFile );
}
}
catch ( Exception e )
{
throw new MojoExecutionException( "Unable to archive/deploy generated include files", e );
}
}
private Javah getJavah()
throws MojoExecutionException
{
Javah javah;
try
{
javah = this.manager.getJavah( this.javahProvider );
}
catch ( NoSuchNativeProviderException pe )
{
throw new MojoExecutionException( pe.getMessage() );
}
return javah;
}
/**
* Get all jars in the pom excluding transitive, test, and provided scope dependencies.
*
* @return
*/
private List getJavahArtifacts()
{
List list = new ArrayList();
List artifacts = this.project.getCompileArtifacts();
if ( artifacts != null )
{
for ( Iterator iter = artifacts.iterator(); iter.hasNext(); )
{
Artifact artifact = (Artifact) iter.next();
// pick up only jar files
if ( !"jar".equals( artifact.getType() ) )
{
continue;
}
// exclude some other scopes
if ( Artifact.SCOPE_PROVIDED.equals( artifact.getScope() ) )
{
continue;
}
list.add( artifact );
}
}
return list;
}
/**
* Build classpaths from dependent jars including project output directory (i.e. classes directory )
*
* @return
*/
private String[] getJavahClassPath()
{
List artifacts = this.getJavahArtifacts();
String[] classPaths = new String[artifacts.size() + 1];
classPaths[0] = this.project.getBuild().getOutputDirectory();
Iterator iter = artifacts.iterator();
for ( int i = 1; i < classPaths.length; ++i )
{
Artifact artifact = (Artifact) iter.next();
classPaths[i] = artifact.getFile().getPath();
}
return classPaths;
}
/**
* Get applicable class names to be "javahed"
*/
private void discoverAdditionalJNIClassName()
throws MojoExecutionException
{
if ( !this.javahSearchJNIFromDependencies )
{
return;
}
// scan the immediate dependency list for jni classes
List artifacts = this.getJavahArtifacts();
for ( Iterator iter = artifacts.iterator(); iter.hasNext(); )
{
Artifact artifact = (Artifact) iter.next();
this.getLog().info( "Parsing " + artifact.getFile() + " for native classes." );
try
{
ZipFile zipFile = new ZipFile( artifact.getFile() );
Enumeration zipEntries = zipFile.entries();
while ( zipEntries.hasMoreElements() )
{
ZipEntry zipEntry = (ZipEntry) zipEntries.nextElement();
if ( "class".equals( FileUtils.extension( zipEntry.getName() ) ) )
{
ClassParser parser = new ClassParser( artifact.getFile().getPath(), zipEntry.getName() );
JavaClass clazz = parser.parse();
Method[] methods = clazz.getMethods();
for ( int j = 0; j < methods.length; ++j )
{
if ( methods[j].isNative() )
{
javahClassNames.add( clazz.getClassName() );
this.getLog().info( "Found native class: " + clazz.getClassName() );
break;
}
}
}
}// endwhile
// not full proof
zipFile.close();
}
catch ( IOException ioe )
{
throw new MojoExecutionException( "Error searching for native class in " + artifact.getFile(), ioe );
}
}
}
private JavahConfiguration createProviderConfiguration( String[] classNames, String javahOutputFileName )
throws MojoExecutionException
{
JavahConfiguration config = new JavahConfiguration();
config.setWorkingDirectory( this.workingDirectory );
config.setVerbose( this.javahVerbose );
config.setOutputDirectory( this.javahOutputDirectory );
config.setFileName( javahOutputFileName );
config.setClassPaths( this.getJavahClassPath() );
config.setUseEnvClasspath( useEnvClasspath );
config.setClassNames( classNames );
config.setJavahPath( this.javahPath );
return config;
}
/**
* Internal only for test harness purpose
*
* @return
*/
protected JavahConfiguration getJavahConfiguration()
{
return this.config;
}
/**
* Internal for unit test only
*/
protected MavenProject getProject()
{
return this.project;
}
}
| |
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.javascript.jscomp.CompilerOptions.TracerMode;
import com.google.javascript.rhino.Node;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Deque;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.zip.GZIPOutputStream;
/**
* A PerformanceTracker collects statistics about the runtime of each pass, and
* how much a pass impacts the size of the compiled output, before and after
* gzip.
*
* TODO(moz): Make this GWT compatible.
*
* @author dimvar@google.com (Dimitris Vardoulakis)
*/
@GwtIncompatible("java.io.ByteArrayOutputStream")
public final class PerformanceTracker {
private static final int DEFAULT_WHEN_SIZE_UNTRACKED = -1;
private final PrintStream printStream;
private final OutputStreamWriter output;
private final Node jsRoot;
private final boolean trackSize;
private final boolean trackGzSize;
// Keeps track of AST changes and computes code size estimation
// if there is any.
private final RecentChange codeChange = new RecentChange();
private int initCodeSize = DEFAULT_WHEN_SIZE_UNTRACKED;
private int initGzCodeSize = DEFAULT_WHEN_SIZE_UNTRACKED;
private int runtime = 0;
private int maxMem = 0;
private int runs = 0;
private int changes = 0;
private int loopRuns = 0;
private int loopChanges = 0;
// The following fields for tracking size changes are just estimates.
// They do not take into account preserved license blocks, newline padding,
// or pretty printing (if enabled), since they don't use CodePrinter.
// To get exact sizes, call compiler.toSource() for the final generated code.
private int codeSize = DEFAULT_WHEN_SIZE_UNTRACKED;
private int gzCodeSize = DEFAULT_WHEN_SIZE_UNTRACKED;
private int diff = 0;
private int gzDiff = 0;
private final Deque<Stats> currentPass = new ArrayDeque<>();
/** Summary stats by pass name. */
private final Map<String, Stats> summary = new HashMap<>();
// To share with the rest of the program
private ImmutableMap<String, Stats> summaryCopy;
/** Stats for each run of a compiler pass. */
private final List<Stats> log = new ArrayList<>();
PerformanceTracker(Node jsRoot, TracerMode mode, PrintStream printStream) {
this.jsRoot = jsRoot;
this.printStream = printStream == null ? System.out : printStream;
this.output = new OutputStreamWriter(this.printStream, UTF_8);
switch (mode) {
case TIMING_ONLY:
this.trackSize = false;
this.trackGzSize = false;
break;
case RAW_SIZE:
this.trackSize = true;
this.trackGzSize = false;
break;
case ALL:
this.trackSize = true;
this.trackGzSize = true;
break;
case OFF:
default:
throw new IllegalArgumentException(
"PerformanceTracker can't work without tracer data.");
}
}
CodeChangeHandler getCodeChangeHandler() {
return codeChange;
}
void recordPassStart(String passName, boolean isOneTime) {
currentPass.push(new Stats(passName, isOneTime));
// In Compiler, toSource may be called after every pass X. We don't want it
// to reset the handler, because recordPassStop for pass X has not been
// called, so we are falsely logging that pass X didn't make changes.
if (!passName.equals("toSource")) {
codeChange.reset();
}
}
/**
* Collects information about a pass P after P finishes running, eg, how much
* time P took and what was its impact on code size.
*
* @param passName short name of the pass
* @param runtime execution time in milliseconds
*/
void recordPassStop(String passName, long runtime) {
int allocMem = getAllocatedMegabytes();
Stats logStats = currentPass.pop();
Preconditions.checkState(passName.equals(logStats.pass));
// Populate log and summary
log.add(logStats);
Stats summaryStats = summary.get(passName);
if (summaryStats == null) {
summaryStats = new Stats(passName, logStats.isOneTime);
summary.put(passName, summaryStats);
}
// After parsing, initialize codeSize and gzCodeSize
if (passName.equals(Compiler.PARSING_PASS_NAME) && trackSize) {
CodeSizeEstimatePrinter estimatePrinter = new CodeSizeEstimatePrinter();
CodeGenerator.forCostEstimation(estimatePrinter).add(jsRoot);
initCodeSize = codeSize = estimatePrinter.calcSize();
logStats.size = summaryStats.size = initCodeSize;
if (this.trackGzSize) {
initGzCodeSize = gzCodeSize = estimatePrinter.calcZippedSize();
logStats.gzSize = summaryStats.gzSize = initGzCodeSize;
}
}
// Update fields that aren't related to code size
logStats.runtime = runtime;
logStats.allocMem = allocMem;
logStats.runs = 1;
summaryStats.runtime += runtime;
summaryStats.allocMem = Math.max(allocMem, summaryStats.allocMem);
summaryStats.runs += 1;
if (codeChange.hasCodeChanged()) {
logStats.changes = 1;
summaryStats.changes += 1;
}
// Update fields related to code size
if (codeChange.hasCodeChanged() && trackSize) {
int newSize = 0;
CodeSizeEstimatePrinter estimatePrinter = new CodeSizeEstimatePrinter();
CodeGenerator.forCostEstimation(estimatePrinter).add(jsRoot);
if (trackSize) {
newSize = estimatePrinter.calcSize();
logStats.diff = codeSize - newSize;
summaryStats.diff += logStats.diff;
codeSize = summaryStats.size = logStats.size = newSize;
}
if (trackGzSize) {
newSize = estimatePrinter.calcZippedSize();
logStats.gzDiff = gzCodeSize - newSize;
summaryStats.gzDiff += logStats.gzDiff;
gzCodeSize = summaryStats.gzSize = logStats.gzSize = newSize;
}
}
}
private int bytesToMB(long bytes) {
return (int) (bytes / (1024 * 1024));
}
private int getAllocatedMegabytes() {
Runtime javaRuntime = Runtime.getRuntime();
return bytesToMB(javaRuntime.totalMemory() - javaRuntime.freeMemory());
}
public boolean tracksSize() {
return trackSize;
}
public boolean tracksGzSize() {
return trackGzSize;
}
public int getRuntime() {
calcTotalStats();
return runtime;
}
public int getSize() {
calcTotalStats();
return codeSize;
}
public int getGzSize() {
calcTotalStats();
return gzCodeSize;
}
@VisibleForTesting
int getChanges() {
calcTotalStats();
return changes;
}
@VisibleForTesting
int getLoopChanges() {
calcTotalStats();
return loopChanges;
}
@VisibleForTesting
int getRuns() {
calcTotalStats();
return runs;
}
@VisibleForTesting
int getLoopRuns() {
calcTotalStats();
return loopRuns;
}
public ImmutableMap<String, Stats> getStats() {
calcTotalStats();
return summaryCopy;
}
private void calcTotalStats() {
// This method only does work the first time it's called
if (summaryCopy != null) {
return;
}
summaryCopy = ImmutableMap.copyOf(summary);
for (Entry<String, Stats> entry : summary.entrySet()) {
Stats stats = entry.getValue();
runtime += stats.runtime;
maxMem = Math.max(maxMem, stats.allocMem);
runs += stats.runs;
changes += stats.changes;
if (!stats.isOneTime) {
loopRuns += stats.runs;
loopChanges += stats.changes;
}
diff += stats.diff;
gzDiff += stats.gzDiff;
}
Preconditions.checkState(!trackSize || initCodeSize == diff + codeSize);
Preconditions.checkState(!trackGzSize
|| initGzCodeSize == gzDiff + gzCodeSize);
}
/**
* Prints a summary, which contains aggregate stats for all runs of each pass
* and a log, which contains stats for each individual run.
*/
public void outputTracerReport() {
JvmMetrics.maybeWriteJvmMetrics(this.printStream, "verbose:pretty:all");
try {
calcTotalStats();
ArrayList<Entry<String, Stats>> statEntries = new ArrayList<>();
statEntries.addAll(summary.entrySet());
Collections.sort(
statEntries,
new Comparator<Entry<String, Stats>>() {
@Override
public int compare(Entry<String, Stats> e1, Entry<String, Stats> e2) {
return Long.compare(e1.getValue().runtime, e2.getValue().runtime);
}
});
this.output.write("Summary:\n"
+ "pass,runtime,allocMem,runs,changingRuns,reduction,gzReduction\n");
for (Entry<String, Stats> entry : statEntries) {
String key = entry.getKey();
Stats stats = entry.getValue();
this.output.write(String.format("%s,%d,%d,%d,%d,%d,%d\n", key, stats.runtime,
stats.allocMem, stats.runs, stats.changes, stats.diff, stats.gzDiff));
}
this.output.write("\nTOTAL:"
+ "\nRuntime(ms): " + runtime
+ "\nMax mem usage (measured after each pass)(MB): " + maxMem
+ "\n#Runs: " + runs
+ "\n#Changing runs: " + changes + "\n#Loopable runs: " + loopRuns
+ "\n#Changing loopable runs: " + loopChanges + "\nEstimated Reduction(bytes): " + diff
+ "\nEstimated GzReduction(bytes): " + gzDiff + "\nEstimated Size(bytes): " + codeSize
+ "\nEstimated GzSize(bytes): " + gzCodeSize + "\n\n");
this.output.write("Log:\n"
+ "pass,runtime,allocMem,codeChanged,reduction,gzReduction,size,gzSize\n");
for (Stats stats : log) {
this.output.write(String.format("%s,%d,%d,%b,%d,%d,%d,%d\n",
stats.pass, stats.runtime, stats.allocMem, stats.changes == 1,
stats.diff, stats.gzDiff, stats.size, stats.gzSize));
}
this.output.write("\n");
// this.output can be System.out, so don't close it to not lose subsequent
// error messages. Flush to ensure that you will see the tracer report.
this.output.flush();
} catch (IOException e) {
throw new RuntimeException("Failed to write statistics to output.", e);
}
}
/**
* A Stats object contains statistics about a pass run, such as running time,
* size changes, etc
*/
public static class Stats {
Stats(String pass, boolean iot) {
this.pass = pass;
this.isOneTime = iot;
}
public final String pass;
public final boolean isOneTime;
public long runtime = 0;
public int allocMem = 0;
public int runs = 0;
public int changes = 0;
public int diff = 0;
public int gzDiff = 0;
public int size;
public int gzSize;
}
/** An object to get a gzsize estimate; it doesn't generate code. */
private final class CodeSizeEstimatePrinter extends CodeConsumer {
private int size = 0;
private char lastChar = '\0';
private final ByteArrayOutputStream output = new ByteArrayOutputStream();
private final GZIPOutputStream stream;
private CodeSizeEstimatePrinter() {
try {
stream = new GZIPOutputStream(output);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
void append(String str) {
int len = str.length();
if (len > 0) {
size += len;
lastChar = str.charAt(len - 1);
if (trackGzSize) {
try {
stream.write(str.getBytes(UTF_8));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
@Override
char getLastChar() {
return lastChar;
}
private int calcSize() {
return size;
}
// Called iff trackGzSize is true
private int calcZippedSize() {
try {
stream.finish();
stream.close();
return output.size();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
| |
package com.fincatto.nfe200.classes.nota;
import java.math.BigDecimal;
import org.junit.Assert;
import org.junit.Test;
import com.fincatto.nfe200.classes.NFNotaInfoItemImpostoICMSModalidadeBaseCalulo;
import com.fincatto.nfe200.classes.NFNotaInfoItemModalidadeBCICMSST;
import com.fincatto.nfe200.classes.NFNotaSituacaoOperacionalSimplesNacional;
import com.fincatto.nfe200.classes.NFOrigem;
import com.fincatto.nfe200.classes.nota.NFNotaInfoItemImpostoICMSSN900;
public class NFNotaInfoItemImpostoICMSSN900Test {
@Test(expected = IllegalStateException.class)
public void naoDevePermitirAliquotaAplicavelCalculoCreditoSNComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("100"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirAliquotaImpostoICMSSTComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setPercentualAliquotaImpostoICMSST(new BigDecimal("100"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirPercentualAliquotaImpostoComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setPercentualAliquotaImposto(new BigDecimal("100"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirPercentualMargemValorAdicionadoICMSSTComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setPercentualMargemValorAdicionadoICMSST(new BigDecimal("100"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirPercentualReducaoBCComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setPercentualReducaoBC(new BigDecimal("100"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirPercentualReducaoBCICMSSTComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setPercentualReducaoBCICMSST(new BigDecimal("100"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorBCICMSComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setValorBCICMS(new BigDecimal("1000000000000"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorBCICMSSTComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setValorBCICMSST(new BigDecimal("1000000000000"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorCreditoICMSSNComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setValorCreditoICMSSN(new BigDecimal("1000000000000"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorICMSComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setValorICMS(new BigDecimal("1000000000000"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorICMSSTComTamanhoInvalido() {
new NFNotaInfoItemImpostoICMSSN900().setValorICMSST(new BigDecimal("1000000000000"));
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirAliquotaAplicavelCalculoCreditoSNNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirAliquotaImpostoICMSSTNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDeveModalidadeBCICMSSTNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirModalidadeDeterminacaoBCICMSNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirOrigemNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirPercentualAliquotaImpostoNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test
public void devePermitirPercentualMargemValorAdicionadoICMSSTNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test
public void devePermitirPercentualReducaoBCNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test
public void devePermitirPercentualReducaoBCICMSSTNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirSituacaoOperacaoSNNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorBCICMSNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorBCICMSTNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorBCICMSSTNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorCreditoICMSSNNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorICMSNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test(expected = IllegalStateException.class)
public void naoDevePermitirValorICMSSTNulo() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.toString();
}
@Test
public void deveGerarXMLDeAcordoComOPadraoEstabelecido() {
final NFNotaInfoItemImpostoICMSSN900 icms900 = new NFNotaInfoItemImpostoICMSSN900();
icms900.setAliquotaAplicavelCalculoCreditoSN(new BigDecimal("99.99"));
icms900.setPercentualAliquotaImpostoICMSST(new BigDecimal("99.99"));
icms900.setModalidadeBCICMSST(NFNotaInfoItemModalidadeBCICMSST.LISTA_NEGATIVA);
icms900.setModalidadeDeterminacaoBCICMS(NFNotaInfoItemImpostoICMSModalidadeBaseCalulo.MVA);
icms900.setOrigem(NFOrigem.NACIONAL);
icms900.setPercentualAliquotaImposto(new BigDecimal("99.99"));
icms900.setPercentualMargemValorAdicionadoICMSST(new BigDecimal("99.99"));
icms900.setPercentualReducaoBC(new BigDecimal("99.99"));
icms900.setPercentualReducaoBCICMSST(new BigDecimal("99.99"));
icms900.setSituacaoOperacaoSN(NFNotaSituacaoOperacionalSimplesNacional.IMUNE);
icms900.setValorBCICMS(new BigDecimal("999999999999.99"));
icms900.setValorBCICMSST(new BigDecimal("999999999999.99"));
icms900.setValorCreditoICMSSN(new BigDecimal("999999999999.99"));
icms900.setValorICMS(new BigDecimal("999999999999.99"));
icms900.setValorICMSST(new BigDecimal("999999999999.99"));
final String xmlEsperado = "<NFNotaInfoItemImpostoICMSSN900><orig>0</orig><CSOSN>300</CSOSN><modBC>0</modBC><vBC>999999999999.99</vBC><pRedBC>99.99</pRedBC><pICMS>99.99</pICMS><vICMS>999999999999.99</vICMS><modBCST>1</modBCST><pMVAST>99.99</pMVAST><pRedBCST>99.99</pRedBCST><vBCST>999999999999.99</vBCST><pICMSST>99.99</pICMSST><vICMSST>999999999999.99</vICMSST><pCredSN>99.99</pCredSN><vCredICMSSN>999999999999.99</vCredICMSSN></NFNotaInfoItemImpostoICMSSN900>";
Assert.assertEquals(xmlEsperado, icms900.toString());
}
}
| |
/**
* Copyright 2012 Tobias Gierke <tobias.gierke@code-sourcery.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.codesourcery.jasm16.lexer;
import java.util.*;
import org.apache.commons.lang.StringUtils;
import de.codesourcery.jasm16.OpCode;
import de.codesourcery.jasm16.exceptions.EOFException;
import de.codesourcery.jasm16.exceptions.ParseException;
import de.codesourcery.jasm16.parser.Operator;
import de.codesourcery.jasm16.scanner.IScanner;
import de.codesourcery.jasm16.utils.NumberLiteralHelper;
/**
* Default {@link ILexer} implementation.
*
* @author tobias.gierke@code-sourcery.de
*/
public final class Lexer implements ILexer {
private final IScanner scanner;
private final StringBuilder buffer = new StringBuilder();
private final Set<LexerOption> options = new HashSet<LexerOption>();
private boolean caseSensitiveOpCodes = true;
// internal state
private final List<IToken> currentTokens=new ArrayList<IToken>();
private final Stack<State> marks = new Stack<State>();
private final ParseOffset parseOffset;
public static final class ParseOffset
{
// offset relative to actual scanner offset, used
// when expanding macro invocations
private int baseOffset;
private int currentLineNumber;
private int currentLineStartOffset;
public ParseOffset()
{
this(0,1,0);
}
public ParseOffset(int baseOffset, int currentLineNumber,int currentLineStartOffset)
{
this.baseOffset = baseOffset;
this.currentLineNumber = currentLineNumber;
this.currentLineStartOffset = currentLineStartOffset;
}
public ParseOffset(ParseOffset offset)
{
this.baseOffset = offset.baseOffset;
this.currentLineNumber = offset.currentLineNumber;
this.currentLineStartOffset = offset.currentLineStartOffset;
}
@Override
public String toString() {
return "ParseOffset[ base_offset="+baseOffset+", line_nr="+currentLineNumber+",lineStartingOffset="+currentLineStartOffset+"]";
}
public int baseOffset() { return baseOffset; }
public int currentLineNumber() { return currentLineNumber;}
public int currentLineStartOffset() { return currentLineStartOffset; }
public void apply(ParseOffset offset) {
this.baseOffset = offset.baseOffset;
this.currentLineNumber = offset.currentLineNumber;
this.currentLineStartOffset = offset.currentLineStartOffset;
}
public void newLine(int newLineStartOffset) {
this.currentLineNumber++;
this.currentLineStartOffset = newLineStartOffset;
}
}
protected final class State
{
private final List<IToken> markedTokens = new ArrayList<IToken>();
private final int scannerOffset;
private final ParseOffset offset;
private final Set<LexerOption> options;
protected State()
{
this.markedTokens.addAll( Lexer.this.currentTokens );
this.scannerOffset = Lexer.this.scanner.currentParseIndex();
this.offset = new ParseOffset( Lexer.this.parseOffset );
this.options = new HashSet<>( Lexer.this.options );
}
public void apply()
{
Lexer.this.scanner.setCurrentParseIndex( this.scannerOffset );
Lexer.this.currentTokens.clear();
Lexer.this.currentTokens.addAll( this.markedTokens );
Lexer.this.parseOffset.apply( this.offset );
Lexer.this.options.clear();
Lexer.this.options.addAll( this.options );
}
}
public Lexer(IScanner scanner) {
this(scanner,new ParseOffset());
}
public Lexer(IScanner scanner,ParseOffset offset) {
this.scanner = scanner;
this.parseOffset = offset;
}
@Override
public void mark()
{
marks.push( new State() );
}
@Override
public void clearMark() {
if ( marks.isEmpty() ) {
throw new IllegalStateException("Must call mark() first");
}
marks.pop();
}
@Override
public void reset() throws IllegalStateException
{
if ( marks.isEmpty() ) {
throw new IllegalStateException("Must call mark() first");
}
// TODO: Maybe should be pop() here ???
marks.peek().apply();
}
private void parseNextToken()
{
if ( scanner.eof() ) {
return;
}
// clear buffer
buffer.setLength(0);
// skip whitespace
int startIndex = relativeParseIndex();
while ( ! scanner.eof() && isWhitespace( scanner.peek() ) )
{
buffer.append( scanner.read() );
}
if ( buffer.length() > 0 ) {
currentTokens.add( new Token( TokenType.WHITESPACE , buffer.toString(), startIndex ) );
}
if ( scanner.eof() ) {
return;
}
startIndex = relativeParseIndex();
char currentChar = scanner.peek();
buffer.setLength( 0 );
while ( ! scanner.eof() )
{
currentChar = scanner.peek();
switch( currentChar )
{
case ' ': // whitespace
case '\t': // whitespace
handleString( buffer.toString() , startIndex );
return;
case ';': // single-line comment
handleString( buffer.toString() , startIndex );
startIndex = relativeParseIndex();
scanner.read();
currentTokens.add( new Token(TokenType.SINGLE_LINE_COMMENT, ";" , relativeParseIndex()-1 ) );
return;
case '\\':
handleString( buffer.toString() , startIndex );
startIndex = relativeParseIndex();
scanner.read();
currentTokens.add( new Token(TokenType.STRING_ESCAPE, "\\", relativeParseIndex()-1 ) );
return;
case '\'':
case '"': // string delimiter
handleString( buffer.toString() , startIndex );
startIndex = relativeParseIndex();
scanner.read();
currentTokens.add( new Token(TokenType.STRING_DELIMITER, Character.toString( currentChar ) , relativeParseIndex()-1 ) );
return;
case '\n': // parse unix-style newline
handleString( buffer.toString() , startIndex );
startIndex = relativeParseIndex();
scanner.read();
currentTokens.add( new Token(TokenType.EOL, "\n" , relativeParseIndex()-1 ) );
return;
case '\r': // parse DOS-style newline
buffer.append( scanner.read() );
if ( ! scanner.eof() && scanner.peek() == '\n' )
{
handleString( buffer.toString() , buffer.length()-1 , startIndex );
scanner.read();
currentTokens.add( new Token(TokenType.EOL, "\r\n" , relativeParseIndex()-2 ) );
return;
}
continue;
case ':':
handleString( buffer.toString() , startIndex );
scanner.read();
currentTokens.add( new Token(TokenType.COLON , ":" , relativeParseIndex()-1 ) );
return;
case '(':
handleString( buffer.toString() , startIndex );
scanner.read();
currentTokens.add( new Token(TokenType.PARENS_OPEN , "(" , relativeParseIndex()-1) );
return;
case ')':
handleString( buffer.toString() , startIndex );
scanner.read();
currentTokens.add( new Token(TokenType.PARENS_CLOSE, ")" , relativeParseIndex()-1 ) );
return;
case '[':
handleString( buffer.toString() , startIndex );
scanner.read();
currentTokens.add( new Token(TokenType.ANGLE_BRACKET_OPEN , "[" , relativeParseIndex()-1) );
return;
case ']':
handleString( buffer.toString() , startIndex );
scanner.read();
currentTokens.add( new Token(TokenType.ANGLE_BRACKET_CLOSE, "]" , relativeParseIndex()-1 ) );
return;
case ',':
handleString( buffer.toString() , startIndex );
scanner.read();
currentTokens.add( new Token(TokenType.COMMA , "," , relativeParseIndex()-1 ) );
return;
}
if ( Operator.isOperatorPrefix( currentChar ) )
{
parseOperator( startIndex );
return;
}
// ...keep the rest...some unrecognized character sequence
buffer.append( scanner.read() );
}
handleString( buffer.toString() , startIndex );
}
/**
* Returns the scanner's current parse offset plus the parsing base offset.
* @return
*/
private int relativeParseIndex() {
return this.parseOffset.baseOffset+scanner.currentParseIndex();
}
private void parseOperator(int lastStartIndex)
{
handleString( buffer.toString() , lastStartIndex );
buffer.setLength( 0 );
// consume first character
final int startIndex = relativeParseIndex();
buffer.append( scanner.read() );
List<Operator> possibleOperators = Operator.getPossibleOperatorsByPrefix( buffer.toString() );
while ( ! scanner.eof() && ( possibleOperators.size() > 1 || ( possibleOperators.size() == 1 && ! Operator.isValidOperator( buffer.toString() ) ) ) )
{
char peek = scanner.peek();
if ( Operator.isOperatorPrefix( buffer.toString()+peek ) )
{
buffer.append( scanner.read() );
possibleOperators = Operator.getPossibleOperatorsByPrefix( buffer.toString() );
} else {
break;
}
}
final String operator;
if ( possibleOperators.size() > 1 ) {
operator = Operator.pickOperatorWithLongestMatch( buffer.toString() ).getLiteral();
} else {
operator = buffer.toString();
}
currentTokens.add( new Token( TokenType.OPERATOR , operator , startIndex ) );
}
private void handleString(String buffer, int startIndex)
{
handleString(buffer,buffer.length() , startIndex );
}
private void handleString(String s, int length , int startIndex)
{
/* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* MAKE SURE TO ADJUST isKeyword(String) when changing keywords here
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
*/
/*
* Note that all comparisons here are ordered by
* their probabilities (more likely checks come first).
*/
if ( s.length() == 0 || length <= 0 ) {
return;
}
final String buffer = s.substring(0,length);
OpCode opCode = caseSensitiveOpCodes ? OpCode.fromIdentifier( buffer ) : OpCode.fromIdentifier( buffer.toUpperCase() );
if ( opCode != null ) {
currentTokens.add( new Token( TokenType.INSTRUCTION , buffer , startIndex ) );
return;
}
if ( NumberLiteralHelper.isNumberLiteral( buffer ) ) {
currentTokens.add( new Token(TokenType.NUMBER_LITERAL , buffer , startIndex ) );
return;
}
if ( "push".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.PUSH , buffer , startIndex ) );
return ;
}
if ( "pop".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.POP , buffer , startIndex ) );
return ;
}
if ( ".word".equalsIgnoreCase( buffer ) || "dat".equalsIgnoreCase( buffer ) || ".dat".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.INITIALIZED_MEMORY_WORD , buffer , startIndex ) );
return ;
}
if ( ".equ".equalsIgnoreCase( buffer ) || "#define".equalsIgnoreCase(buffer) ) {
currentTokens.add( new Token(TokenType.EQUATION , buffer , startIndex ) );
return ;
}
if ( "pick".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.PICK , buffer , startIndex ) );
return ;
}
if ( "peek".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.PEEK , buffer , startIndex ) );
return ;
}
if ( ".byte".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.INITIALIZED_MEMORY_BYTE , buffer , startIndex ) );
return ;
}
if ( "pack".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.INITIALIZED_MEMORY_PACK , buffer , startIndex ) );
return ;
}
if ( "reserve".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.UNINITIALIZED_MEMORY_WORDS , buffer , startIndex ) );
return ;
}
if ( ".bss".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.UNINITIALIZED_MEMORY_BYTES , buffer , startIndex ) );
return ;
}
if ( "#include".equals( buffer ) || ".include".equals( buffer ) || "include".equalsIgnoreCase( buffer) || ".incsource".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.INCLUDE_SOURCE, buffer , startIndex ) );
return ;
}
if ( ".incbin".equalsIgnoreCase( buffer ) || "incbin".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.INCLUDE_BINARY , buffer , startIndex ) );
return ;
}
if ( "org".equalsIgnoreCase( buffer ) || ".org".equalsIgnoreCase( buffer ) || ".origin".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.ORIGIN , buffer , startIndex ) );
return ;
}
if ( ".macro".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.START_MACRO , buffer , startIndex ) );
return ;
}
if ( ".endmacro".equalsIgnoreCase( buffer ) ) {
currentTokens.add( new Token(TokenType.END_MACRO , buffer , startIndex ) );
return ;
}
if ( buffer.contains("." ) ) {
int idx = startIndex;
int lastIndex = startIndex;
final StringBuilder tmp = new StringBuilder();
final int len = buffer.length();
for ( int i = 0 ; i <len ; i++ , idx++)
{
final char c = buffer.charAt( i );
if ( c == '.' ) {
if ( tmp.length() > 0 ) {
currentTokens.add( new Token(TokenType.CHARACTERS, tmp.toString() , lastIndex ) );
tmp.setLength(0);
}
currentTokens.add( new Token(TokenType.DOT, "." , idx ) );
lastIndex = idx+1;
continue;
}
tmp.append( c );
}
if ( tmp.length() > 0 ) {
currentTokens.add( new Token(TokenType.CHARACTERS, tmp.toString() , lastIndex ) );
}
return;
}
currentTokens.add( new Token( TokenType.CHARACTERS , buffer , startIndex ) );
}
/**
* Returns whether a given string matches a keyword (case-insensitive).
*
* @param s
* @return
*/
public boolean isKeyword(String buffer)
{
if ( StringUtils.isBlank(buffer) ) {
return false;
}
if ( OpCode.fromIdentifier( buffer ) != null ) {
return true;
}
if ( "push".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( "pop".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( ".word".equalsIgnoreCase( buffer ) || "dat".equalsIgnoreCase( buffer ) || ".dat".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( ".equ".equalsIgnoreCase( buffer ) || "#define".equalsIgnoreCase(buffer) ) {
return true;
}
if ( "pick".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( "peek".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( ".byte".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( "pack".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( "reserve".equalsIgnoreCase( buffer ) ) {
return true ;
}
if ( ".bss".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( "#include".equals( buffer ) || ".include".equals( buffer ) || "include".equalsIgnoreCase( buffer) || ".incsource".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( ".incbin".equalsIgnoreCase( buffer ) || "incbin".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( "org".equalsIgnoreCase( buffer ) || ".org".equalsIgnoreCase( buffer ) || ".origin".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( ".macro".equalsIgnoreCase( buffer ) ) {
return true;
}
if ( ".endmacro".equalsIgnoreCase( buffer ) ) {
return true;
}
return false;
}
private static boolean isWhitespace(char c ) {
return c == ' ' || c == '\t';
}
private IToken currentToken()
{
if ( currentTokens.isEmpty() )
{
parseNextToken();
if ( currentTokens.isEmpty() ) {
return null;
}
return currentTokens.get(0);
}
return currentTokens.get(0);
}
@Override
public boolean eof()
{
return currentToken() == null;
}
@Override
public IToken peek() throws EOFException
{
if ( eof() ) {
throw new EOFException("Premature end of file",currentParseIndex() );
}
return currentToken();
}
@Override
public boolean peek(TokenType t) throws EOFException
{
if ( eof() ) {
throw new EOFException("Premature end of file",currentParseIndex() );
}
return currentToken().hasType(t);
}
@Override
public IToken read() throws EOFException
{
if ( eof() ) {
throw new EOFException("Premature end of file",currentParseIndex() );
}
final IToken result = currentToken();
currentTokens.remove( 0 );
if ( result.isEOL() ) {
this.parseOffset.newLine( result.getStartingOffset()+1);
}
return result;
}
@Override
public int currentParseIndex()
{
final IToken tok = currentToken();
return tok != null ? tok.getStartingOffset() : relativeParseIndex();
}
@Override
public IToken read(TokenType expectedType) throws ParseException,EOFException
{
return read((String) null,expectedType);
}
@Override
public IToken read(String errorMessage, TokenType expectedType) throws ParseException,EOFException
{
final IToken tok = peek();
if ( tok.getType() != expectedType )
{
if ( StringUtils.isBlank( errorMessage ) )
{
if ( expectedType != TokenType.EOL && expectedType != TokenType.WHITESPACE ) {
throw new ParseException( "Expected token of type "+expectedType+" but got '"+tok.getContents()+"'", tok );
}
throw new ParseException( "Expected token of type "+expectedType+" but got "+tok.getType(), tok );
}
throw new ParseException( errorMessage, tok );
}
return read();
}
@Override
public List<IToken> advanceTo(TokenType[] expectedTypes,boolean advancePastMatchedToken)
{
if ( expectedTypes == null ) {
throw new IllegalArgumentException("expectedTokenTypes must not be NULL.");
}
boolean expectingEOL = false;
for ( TokenType t : expectedTypes )
{
if ( TokenType.EOL == t ) {
expectingEOL = true;
break;
}
}
final List<IToken> result = new ArrayList<IToken>();
while( ! eof() )
{
if ( peek().isEOL() )
{
if ( expectingEOL ) {
if ( advancePastMatchedToken ) {
result.add( read() );
}
}
return result; // RETURN
}
for ( TokenType expectedType : expectedTypes )
{
if ( peek().hasType( expectedType ) )
{
if ( advancePastMatchedToken ) {
result.add( read() );
}
return result; // RETURN !
}
}
result.add( read() );
}
return result;
}
@Override
public int getCurrentLineNumber() {
return parseOffset.currentLineNumber();
}
@Override
public int getCurrentLineStartOffset() {
return parseOffset.currentLineStartOffset();
}
@Override
public String toString()
{
return eof() ? "Lexer is at EOF" : peek().toString();
}
@Override
public boolean hasLexerOption(LexerOption option) {
if (option == null) {
throw new IllegalArgumentException("option must not be NULL");
}
return this.options.contains( option );
}
@Override
public void setLexerOption(LexerOption option, boolean enabled)
{
if ( option == null ) {
throw new IllegalArgumentException("option must not be NULL");
}
if ( enabled ) {
options.add( option );
} else {
options.remove( option );
}
if ( option == LexerOption.CASE_INSENSITIVE_OPCODES ) {
caseSensitiveOpCodes = ! enabled;
}
}
@Override
public List<IToken> skipWhitespace(boolean skipEOL)
{
List<IToken> result = new ArrayList<>();
while ( ! eof() && ( peek().isWhitespace() || (skipEOL && peek().isEOL() ) ) )
{
result.add( read() );
}
return result;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator.scalar.annotations;
import com.facebook.presto.metadata.BoundVariables;
import com.facebook.presto.metadata.FunctionRegistry;
import com.facebook.presto.metadata.LongVariableConstraint;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.metadata.SignatureBinder;
import com.facebook.presto.metadata.TypeVariableConstraint;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.function.FunctionDependency;
import com.facebook.presto.spi.function.LiteralParameters;
import com.facebook.presto.spi.function.OperatorDependency;
import com.facebook.presto.spi.function.OperatorType;
import com.facebook.presto.spi.function.SqlType;
import com.facebook.presto.spi.function.TypeParameter;
import com.facebook.presto.spi.function.TypeParameterSpecialization;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.facebook.presto.spi.type.TypeSignature;
import com.facebook.presto.type.Constraint;
import com.facebook.presto.type.LiteralParameter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.primitives.Primitives;
import javax.annotation.Nullable;
import java.lang.annotation.Annotation;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodType;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Stream;
import static com.facebook.presto.metadata.FunctionKind.SCALAR;
import static com.facebook.presto.metadata.Signature.comparableTypeParameter;
import static com.facebook.presto.metadata.Signature.internalOperator;
import static com.facebook.presto.metadata.Signature.internalScalarFunction;
import static com.facebook.presto.metadata.Signature.orderableTypeParameter;
import static com.facebook.presto.metadata.Signature.typeVariable;
import static com.facebook.presto.spi.StandardErrorCode.FUNCTION_IMPLEMENTATION_ERROR;
import static com.facebook.presto.spi.function.OperatorType.BETWEEN;
import static com.facebook.presto.spi.function.OperatorType.CAST;
import static com.facebook.presto.spi.function.OperatorType.EQUAL;
import static com.facebook.presto.spi.function.OperatorType.GREATER_THAN;
import static com.facebook.presto.spi.function.OperatorType.GREATER_THAN_OR_EQUAL;
import static com.facebook.presto.spi.function.OperatorType.HASH_CODE;
import static com.facebook.presto.spi.function.OperatorType.LESS_THAN;
import static com.facebook.presto.spi.function.OperatorType.LESS_THAN_OR_EQUAL;
import static com.facebook.presto.spi.function.OperatorType.NOT_EQUAL;
import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
import static com.facebook.presto.util.ImmutableCollectors.toImmutableList;
import static com.facebook.presto.util.ImmutableCollectors.toImmutableSet;
import static com.google.common.base.Preconditions.checkArgument;
import static java.lang.String.format;
import static java.lang.invoke.MethodHandles.lookup;
import static java.lang.invoke.MethodHandles.permuteArguments;
import static java.lang.reflect.Modifier.isStatic;
import static java.util.Arrays.asList;
import static java.util.Objects.requireNonNull;
public class ScalarImplementation
{
private final Signature signature;
private final boolean nullable;
private final List<Boolean> nullableArguments;
private final MethodHandle methodHandle;
private final List<ImplementationDependency> dependencies;
private final Optional<MethodHandle> constructor;
private final List<ImplementationDependency> constructorDependencies;
private final List<Class<?>> argumentNativeContainerTypes;
private final Map<String, Class<?>> specializedTypeParameters;
public ScalarImplementation(
Signature signature,
boolean nullable,
List<Boolean> nullableArguments,
MethodHandle methodHandle,
List<ImplementationDependency> dependencies,
Optional<MethodHandle> constructor,
List<ImplementationDependency> constructorDependencies,
List<Class<?>> argumentNativeContainerTypes,
Map<String, Class<?>> specializedTypeParameters)
{
this.signature = requireNonNull(signature, "signature is null");
this.nullable = nullable;
this.nullableArguments = ImmutableList.copyOf(requireNonNull(nullableArguments, "nullableArguments is null"));
this.methodHandle = requireNonNull(methodHandle, "methodHandle is null");
this.dependencies = ImmutableList.copyOf(requireNonNull(dependencies, "dependencies is null"));
this.constructor = requireNonNull(constructor, "constructor is null");
this.constructorDependencies = ImmutableList.copyOf(requireNonNull(constructorDependencies, "constructorDependencies is null"));
this.argumentNativeContainerTypes = ImmutableList.copyOf(requireNonNull(argumentNativeContainerTypes, "argumentNativeContainerTypes is null"));
this.specializedTypeParameters = ImmutableMap.copyOf(requireNonNull(specializedTypeParameters, "specializedTypeParameters is null"));
}
public Optional<MethodHandleAndConstructor> specialize(Signature boundSignature, BoundVariables boundVariables, TypeManager typeManager, FunctionRegistry functionRegistry)
{
for (Map.Entry<String, Class<?>> entry : specializedTypeParameters.entrySet()) {
if (!entry.getValue().isAssignableFrom(boundVariables.getTypeVariable(entry.getKey()).getJavaType())) {
return Optional.empty();
}
}
Class<?> returnContainerType = getNullAwareContainerType(typeManager.getType(boundSignature.getReturnType()).getJavaType(), nullable);
if (!returnContainerType.equals(methodHandle.type().returnType())) {
return Optional.empty();
}
for (int i = 0; i < boundSignature.getArgumentTypes().size(); i++) {
Class<?> argumentContainerType = getNullAwareContainerType(typeManager.getType(boundSignature.getArgumentTypes().get(i)).getJavaType(), nullableArguments.get(i));
if (!argumentNativeContainerTypes.get(i).isAssignableFrom(argumentContainerType)) {
return Optional.empty();
}
}
MethodHandle methodHandle = this.methodHandle;
for (ImplementationDependency dependency : dependencies) {
methodHandle = methodHandle.bindTo(dependency.resolve(boundVariables, typeManager, functionRegistry));
}
MethodHandle constructor = null;
if (this.constructor.isPresent()) {
constructor = this.constructor.get();
for (ImplementationDependency dependency : constructorDependencies) {
constructor = constructor.bindTo(dependency.resolve(boundVariables, typeManager, functionRegistry));
}
}
return Optional.of(new MethodHandleAndConstructor(methodHandle, Optional.ofNullable(constructor)));
}
private static Class<?> getNullAwareContainerType(Class<?> clazz, boolean nullable)
{
if (nullable) {
return Primitives.wrap(clazz);
}
return clazz;
}
public boolean hasSpecializedTypeParameters()
{
return !specializedTypeParameters.isEmpty();
}
public Signature getSignature()
{
return signature;
}
public boolean isNullable()
{
return nullable;
}
public List<Boolean> getNullableArguments()
{
return nullableArguments;
}
public MethodHandle getMethodHandle()
{
return methodHandle;
}
public List<ImplementationDependency> getDependencies()
{
return dependencies;
}
public static final class MethodHandleAndConstructor
{
private final MethodHandle methodHandle;
private final Optional<MethodHandle> constructor;
public MethodHandleAndConstructor(MethodHandle methodHandle, Optional<MethodHandle> constructor)
{
this.methodHandle = requireNonNull(methodHandle, "methodHandle is null");
this.constructor = requireNonNull(constructor, "constructor is null");
}
public MethodHandle getMethodHandle()
{
return methodHandle;
}
public Optional<MethodHandle> getConstructor()
{
return constructor;
}
}
private interface ImplementationDependency
{
Object resolve(BoundVariables boundVariables, TypeManager typeManager, FunctionRegistry functionRegistry);
}
private static final class FunctionImplementationDependency
extends ScalarImplementationDependency
{
private FunctionImplementationDependency(String name, TypeSignature returnType, List<TypeSignature> argumentTypes)
{
super(internalScalarFunction(name, returnType, argumentTypes));
}
}
private static final class OperatorImplementationDependency
extends ScalarImplementationDependency
{
private final OperatorType operator;
private OperatorImplementationDependency(OperatorType operator, TypeSignature returnType, List<TypeSignature> argumentTypes)
{
super(internalOperator(operator, returnType, argumentTypes));
this.operator = requireNonNull(operator, "operator is null");
}
public OperatorType getOperator()
{
return operator;
}
}
private abstract static class ScalarImplementationDependency
implements ImplementationDependency
{
private final Signature signature;
private ScalarImplementationDependency(Signature signature)
{
this.signature = requireNonNull(signature, "signature is null");
}
public Signature getSignature()
{
return signature;
}
@Override
public MethodHandle resolve(BoundVariables boundVariables, TypeManager typeManager, FunctionRegistry functionRegistry)
{
Signature signature = SignatureBinder.bindVariables(this.signature, boundVariables, this.signature.getArgumentTypes().size());
return functionRegistry.getScalarFunctionImplementation(signature).getMethodHandle();
}
}
private static final class TypeImplementationDependency
implements ImplementationDependency
{
private final TypeSignature signature;
private TypeImplementationDependency(String signature)
{
this.signature = parseTypeSignature(requireNonNull(signature, "signature is null"));
}
@Override
public Type resolve(BoundVariables boundVariables, TypeManager typeManager, FunctionRegistry functionRegistry)
{
return typeManager.getType(SignatureBinder.bindVariables(signature, boundVariables));
}
}
private static final class LiteralImplementationDependency
implements ImplementationDependency
{
private final String literalName;
private LiteralImplementationDependency(String literalName)
{
this.literalName = requireNonNull(literalName, "literalName is null");
}
@Override
public Long resolve(BoundVariables boundVariables, TypeManager typeManager, FunctionRegistry functionRegistry)
{
return boundVariables.getLongVariable(literalName);
}
}
public static final class Parser
{
private static final Set<OperatorType> COMPARABLE_TYPE_OPERATORS = ImmutableSet.of(EQUAL, NOT_EQUAL, HASH_CODE);
private static final Set<OperatorType> ORDERABLE_TYPE_OPERATORS = ImmutableSet.of(LESS_THAN, LESS_THAN_OR_EQUAL, GREATER_THAN, GREATER_THAN_OR_EQUAL, BETWEEN);
private final String functionName;
private final boolean nullable;
private final List<Boolean> nullableArguments = new ArrayList<>();
private final TypeSignature returnType;
private final List<TypeSignature> argumentTypes = new ArrayList<>();
private final List<Class<?>> argumentNativeContainerTypes = new ArrayList<>();
private final MethodHandle methodHandle;
private final List<ImplementationDependency> dependencies = new ArrayList<>();
private final LinkedHashSet<TypeParameter> typeParameters = new LinkedHashSet<>();
private final Set<String> literalParameters = new HashSet<>();
private final Map<String, Class<?>> specializedTypeParameters;
private final Optional<MethodHandle> constructorMethodHandle;
private final List<ImplementationDependency> constructorDependencies = new ArrayList<>();
private final List<LongVariableConstraint> longVariableConstraints = new ArrayList<>();
private Parser(String functionName, Method method, Map<Set<TypeParameter>, Constructor<?>> constructors)
{
this.functionName = requireNonNull(functionName, "functionName is null");
this.nullable = method.getAnnotation(Nullable.class) != null;
Stream.of(method.getAnnotationsByType(TypeParameter.class))
.forEach(typeParameters::add);
LiteralParameters literalParametersAnnotation = method.getAnnotation(LiteralParameters.class);
if (literalParametersAnnotation != null) {
literalParameters.addAll(asList(literalParametersAnnotation.value()));
}
SqlType returnType = method.getAnnotation(SqlType.class);
checkArgument(returnType != null, format("Method [%s] is missing @SqlType annotation", method));
this.returnType = parseTypeSignature(returnType.value(), literalParameters);
Class<?> actualReturnType = method.getReturnType();
if (Primitives.isWrapperType(actualReturnType)) {
checkArgument(nullable, "Method [%s] has wrapper return type %s but is missing @Nullable", method, actualReturnType.getSimpleName());
}
else if (actualReturnType.isPrimitive()) {
checkArgument(!nullable, "Method [%s] annotated with @Nullable has primitive return type %s", method, actualReturnType.getSimpleName());
}
Stream.of(method.getAnnotationsByType(Constraint.class))
.map(annotation -> new LongVariableConstraint(annotation.variable(), annotation.expression()))
.forEach(longVariableConstraints::add);
this.specializedTypeParameters = getDeclaredSpecializedTypeParameters(method);
parseArguments(method);
this.constructorMethodHandle = getConstructor(method, constructors);
this.methodHandle = getMethodHandle(method);
}
private void parseArguments(Method method)
{
ImmutableSet<String> typeParameterNames = typeParameters.stream()
.map(TypeParameter::value)
.collect(toImmutableSet());
for (int i = 0; i < method.getParameterCount(); i++) {
Annotation[] annotations = method.getParameterAnnotations()[i];
Class<?> parameterType = method.getParameterTypes()[i];
// Skip injected parameters
if (parameterType == ConnectorSession.class) {
continue;
}
if (containsMetaParameter(annotations)) {
checkArgument(annotations.length == 1, "Meta parameters may only have a single annotation [%s]", method);
checkArgument(argumentTypes.isEmpty(), "Meta parameter must come before parameters [%s]", method);
Annotation annotation = annotations[0];
if (annotation instanceof TypeParameter) {
checkArgument(typeParameters.contains(annotation), "Injected type parameters must be declared with @TypeParameter annotation on the method [%s]", method);
}
if (annotation instanceof LiteralParameter) {
checkArgument(literalParameters.contains(((LiteralParameter) annotation).value()), "Parameter injected by @LiteralParameter must be declared with @LiteralParameters on the method [%s]", method);
}
dependencies.add(parseDependency(annotation));
}
else {
SqlType type = Stream.of(annotations)
.filter(SqlType.class::isInstance)
.map(SqlType.class::cast)
.findFirst()
.orElseThrow(() -> new IllegalArgumentException(format("Method [%s] is missing @SqlType annotation for parameter", method)));
boolean nullableArgument = Stream.of(annotations).anyMatch(Nullable.class::isInstance);
if (Primitives.isWrapperType(parameterType)) {
checkArgument(nullableArgument, "Method [%s] has parameter with wrapper type %s that is missing @Nullable", method, parameterType.getSimpleName());
}
else if (parameterType.isPrimitive()) {
checkArgument(!nullableArgument, "Method [%s] has parameter with primitive type %s annotated with @Nullable", method, parameterType.getSimpleName());
}
if (typeParameterNames.contains(type.value()) && !(parameterType == Object.class && nullableArgument)) {
// Infer specialization on this type parameter. We don't do this for @Nullable Object because it could match a type like BIGINT
Class<?> specialization = specializedTypeParameters.get(type.value());
Class<?> nativeParameterType = Primitives.unwrap(parameterType);
checkArgument(specialization == null || specialization.equals(nativeParameterType), "Method [%s] type %s has conflicting specializations %s and %s", method, type.value(), specialization, nativeParameterType);
specializedTypeParameters.put(type.value(), nativeParameterType);
}
argumentNativeContainerTypes.add(parameterType);
argumentTypes.add(parseTypeSignature(type.value(), literalParameters));
nullableArguments.add(nullableArgument);
}
}
}
// Find matching constructor, if this is an instance method, and populate constructorDependencies
private Optional<MethodHandle> getConstructor(Method method, Map<Set<TypeParameter>, Constructor<?>> constructors)
{
if (isStatic(method.getModifiers())) {
return Optional.empty();
}
Constructor<?> constructor = constructors.get(typeParameters);
checkArgument(constructor != null, "Method [%s] is an instance method and requires a public constructor to be declared with %s type parameters", method, typeParameters);
for (int i = 0; i < constructor.getParameterCount(); i++) {
Annotation[] annotations = constructor.getParameterAnnotations()[i];
checkArgument(containsMetaParameter(annotations), "Constructors may only have meta parameters [%s]", constructor);
checkArgument(annotations.length == 1, "Meta parameters may only have a single annotation [%s]", constructor);
Annotation annotation = annotations[0];
if (annotation instanceof TypeParameter) {
checkArgument(typeParameters.contains(annotation), "Injected type parameters must be declared with @TypeParameter annotation on the constructor [%s]", constructor);
}
constructorDependencies.add(parseDependency(annotation));
}
try {
return Optional.of(lookup().unreflectConstructor(constructor));
}
catch (IllegalAccessException e) {
throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, e);
}
}
private Map<String, Class<?>> getDeclaredSpecializedTypeParameters(Method method)
{
Map<String, Class<?>> specializedTypeParameters = new HashMap<>();
TypeParameterSpecialization[] typeParameterSpecializations = method.getAnnotationsByType(TypeParameterSpecialization.class);
ImmutableSet<String> typeParameterNames = typeParameters.stream()
.map(TypeParameter::value)
.collect(toImmutableSet());
for (TypeParameterSpecialization specialization : typeParameterSpecializations) {
checkArgument(typeParameterNames.contains(specialization.name()), "%s does not match any declared type parameters (%s) [%s]", specialization.name(), typeParameters, method);
Class<?> existingSpecialization = specializedTypeParameters.get(specialization.name());
checkArgument(existingSpecialization == null || existingSpecialization.equals(specialization.nativeContainerType()),
"%s has conflicting specializations %s and %s [%s]", specialization.name(), existingSpecialization, specialization.nativeContainerType(), method);
specializedTypeParameters.put(specialization.name(), specialization.nativeContainerType());
}
return specializedTypeParameters;
}
private MethodHandle getMethodHandle(Method method)
{
MethodHandle methodHandle;
try {
methodHandle = lookup().unreflect(method);
}
catch (IllegalAccessException e) {
throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, e);
}
if (!isStatic(method.getModifiers())) {
// Re-arrange the parameters, so that the "this" parameter is after the meta parameters
int[] permutedIndices = new int[methodHandle.type().parameterCount()];
permutedIndices[0] = dependencies.size();
MethodType newType = methodHandle.type().changeParameterType(dependencies.size(), methodHandle.type().parameterType(0));
for (int i = 0; i < dependencies.size(); i++) {
permutedIndices[i + 1] = i;
newType = newType.changeParameterType(i, methodHandle.type().parameterType(i + 1));
}
for (int i = dependencies.size() + 1; i < permutedIndices.length; i++) {
permutedIndices[i] = i;
}
methodHandle = permuteArguments(methodHandle, newType, permutedIndices);
}
return methodHandle;
}
private static List<TypeVariableConstraint> createTypeVariableConstraints(Iterable<TypeParameter> typeParameters, List<ImplementationDependency> dependencies)
{
Set<String> orderableRequired = new HashSet<>();
Set<String> comparableRequired = new HashSet<>();
for (ImplementationDependency dependency : dependencies) {
if (dependency instanceof OperatorImplementationDependency) {
OperatorType operator = ((OperatorImplementationDependency) dependency).getOperator();
if (operator == CAST) {
continue;
}
Set<String> argumentTypes = ((OperatorImplementationDependency) dependency).getSignature().getArgumentTypes().stream()
.map(TypeSignature::getBase)
.collect(toImmutableSet());
checkArgument(argumentTypes.size() == 1, "Operator dependency must only have arguments of a single type");
String argumentType = Iterables.getOnlyElement(argumentTypes);
if (COMPARABLE_TYPE_OPERATORS.contains(operator)) {
comparableRequired.add(argumentType);
}
if (ORDERABLE_TYPE_OPERATORS.contains(operator)) {
orderableRequired.add(argumentType);
}
}
}
ImmutableList.Builder<TypeVariableConstraint> typeVariableConstraints = ImmutableList.builder();
for (TypeParameter typeParameter : typeParameters) {
String name = typeParameter.value();
if (orderableRequired.contains(name)) {
typeVariableConstraints.add(orderableTypeParameter(name));
}
else if (comparableRequired.contains(name)) {
typeVariableConstraints.add(comparableTypeParameter(name));
}
else {
typeVariableConstraints.add(typeVariable(name));
}
}
return typeVariableConstraints.build();
}
private ImplementationDependency parseDependency(Annotation annotation)
{
if (annotation instanceof TypeParameter) {
return new TypeImplementationDependency(((TypeParameter) annotation).value());
}
if (annotation instanceof LiteralParameter) {
return new LiteralImplementationDependency(((LiteralParameter) annotation).value());
}
if (annotation instanceof FunctionDependency) {
FunctionDependency function = (FunctionDependency) annotation;
return new FunctionImplementationDependency(
function.name(),
parseTypeSignature(function.returnType(), literalParameters),
Arrays.stream(function.argumentTypes())
.map(signature -> parseTypeSignature(signature, literalParameters))
.collect(toImmutableList()));
}
if (annotation instanceof OperatorDependency) {
OperatorDependency operator = (OperatorDependency) annotation;
return new OperatorImplementationDependency(
operator.operator(),
parseTypeSignature(operator.returnType(), literalParameters),
Arrays.stream(operator.argumentTypes())
.map(signature -> parseTypeSignature(signature, literalParameters))
.collect(toImmutableList()));
}
throw new IllegalArgumentException("Unsupported annotation " + annotation.getClass().getSimpleName());
}
private static boolean containsMetaParameter(Annotation[] annotations)
{
for (Annotation annotation : annotations) {
if (annotation instanceof TypeParameter ||
annotation instanceof LiteralParameter ||
annotation instanceof FunctionDependency ||
annotation instanceof OperatorDependency) {
return true;
}
}
return false;
}
public ScalarImplementation get()
{
Signature signature = new Signature(
functionName,
SCALAR,
createTypeVariableConstraints(typeParameters, dependencies),
longVariableConstraints,
returnType,
argumentTypes,
false);
return new ScalarImplementation(
signature,
nullable,
nullableArguments,
methodHandle,
dependencies,
constructorMethodHandle,
constructorDependencies,
argumentNativeContainerTypes,
specializedTypeParameters);
}
public static ScalarImplementation parseImplementation(String functionName, Method method, Map<Set<TypeParameter>, Constructor<?>> constructors)
{
return new Parser(functionName, method, constructors).get();
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ssmcontacts.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-contacts-2021-05-03/ListEngagements" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListEngagementsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The pagination token to continue to the next page of results.
* </p>
*/
private String nextToken;
/**
* <p>
* The maximum number of engagements per page of results.
* </p>
*/
private Integer maxResults;
/**
* <p>
* The Amazon Resource Name (ARN) of the incident you're listing engagements for.
* </p>
*/
private String incidentId;
/**
* <p>
* The time range to lists engagements for an incident.
* </p>
*/
private TimeRange timeRangeValue;
/**
* <p>
* The pagination token to continue to the next page of results.
* </p>
*
* @param nextToken
* The pagination token to continue to the next page of results.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The pagination token to continue to the next page of results.
* </p>
*
* @return The pagination token to continue to the next page of results.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The pagination token to continue to the next page of results.
* </p>
*
* @param nextToken
* The pagination token to continue to the next page of results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListEngagementsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The maximum number of engagements per page of results.
* </p>
*
* @param maxResults
* The maximum number of engagements per page of results.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* The maximum number of engagements per page of results.
* </p>
*
* @return The maximum number of engagements per page of results.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* The maximum number of engagements per page of results.
* </p>
*
* @param maxResults
* The maximum number of engagements per page of results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListEngagementsRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the incident you're listing engagements for.
* </p>
*
* @param incidentId
* The Amazon Resource Name (ARN) of the incident you're listing engagements for.
*/
public void setIncidentId(String incidentId) {
this.incidentId = incidentId;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the incident you're listing engagements for.
* </p>
*
* @return The Amazon Resource Name (ARN) of the incident you're listing engagements for.
*/
public String getIncidentId() {
return this.incidentId;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the incident you're listing engagements for.
* </p>
*
* @param incidentId
* The Amazon Resource Name (ARN) of the incident you're listing engagements for.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListEngagementsRequest withIncidentId(String incidentId) {
setIncidentId(incidentId);
return this;
}
/**
* <p>
* The time range to lists engagements for an incident.
* </p>
*
* @param timeRangeValue
* The time range to lists engagements for an incident.
*/
public void setTimeRangeValue(TimeRange timeRangeValue) {
this.timeRangeValue = timeRangeValue;
}
/**
* <p>
* The time range to lists engagements for an incident.
* </p>
*
* @return The time range to lists engagements for an incident.
*/
public TimeRange getTimeRangeValue() {
return this.timeRangeValue;
}
/**
* <p>
* The time range to lists engagements for an incident.
* </p>
*
* @param timeRangeValue
* The time range to lists engagements for an incident.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListEngagementsRequest withTimeRangeValue(TimeRange timeRangeValue) {
setTimeRangeValue(timeRangeValue);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults()).append(",");
if (getIncidentId() != null)
sb.append("IncidentId: ").append(getIncidentId()).append(",");
if (getTimeRangeValue() != null)
sb.append("TimeRangeValue: ").append(getTimeRangeValue());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListEngagementsRequest == false)
return false;
ListEngagementsRequest other = (ListEngagementsRequest) obj;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
if (other.getIncidentId() == null ^ this.getIncidentId() == null)
return false;
if (other.getIncidentId() != null && other.getIncidentId().equals(this.getIncidentId()) == false)
return false;
if (other.getTimeRangeValue() == null ^ this.getTimeRangeValue() == null)
return false;
if (other.getTimeRangeValue() != null && other.getTimeRangeValue().equals(this.getTimeRangeValue()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
hashCode = prime * hashCode + ((getIncidentId() == null) ? 0 : getIncidentId().hashCode());
hashCode = prime * hashCode + ((getTimeRangeValue() == null) ? 0 : getTimeRangeValue().hashCode());
return hashCode;
}
@Override
public ListEngagementsRequest clone() {
return (ListEngagementsRequest) super.clone();
}
}
| |
package crazypants.enderio.conduit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import mekanism.api.gas.Gas;
import mekanism.api.gas.GasStack;
import net.minecraft.block.Block;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.common.util.ForgeDirection;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.FluidTankInfo;
import appeng.api.networking.IGridNode;
import appeng.api.util.AECableType;
import cpw.mods.fml.common.Optional.Method;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import crazypants.enderio.EnderIO;
import crazypants.enderio.TileEntityEio;
import crazypants.enderio.conduit.facade.ItemConduitFacade.FacadeType;
import crazypants.enderio.conduit.gas.IGasConduit;
import crazypants.enderio.conduit.geom.CollidableCache;
import crazypants.enderio.conduit.geom.CollidableComponent;
import crazypants.enderio.conduit.geom.ConduitConnectorType;
import crazypants.enderio.conduit.geom.ConduitGeometryUtil;
import crazypants.enderio.conduit.geom.Offset;
import crazypants.enderio.conduit.geom.Offsets;
import crazypants.enderio.conduit.item.IItemConduit;
import crazypants.enderio.conduit.liquid.ILiquidConduit;
import crazypants.enderio.conduit.me.IMEConduit;
import crazypants.enderio.conduit.power.IPowerConduit;
import crazypants.enderio.conduit.redstone.InsulatedRedstoneConduit;
import crazypants.enderio.config.Config;
import crazypants.render.BoundingBox;
public class TileConduitBundle extends TileEntityEio implements IConduitBundle {
public static final short NBT_VERSION = 1;
private final List<IConduit> conduits = new ArrayList<IConduit>();
private Block facadeId = null;
private int facadeMeta = 0;
private FacadeType facadeType = FacadeType.BASIC;
private boolean facadeChanged;
private final List<CollidableComponent> cachedCollidables = new ArrayList<CollidableComponent>();
private final List<CollidableComponent> cachedConnectors = new ArrayList<CollidableComponent>();
private boolean conduitsDirty = true;
private boolean collidablesDirty = true;
private boolean connectorsDirty = true;
private boolean clientUpdated = false;
private int lightOpacity = -1;
@SideOnly(Side.CLIENT)
private FacadeRenderState facadeRenderAs;
private ConduitDisplayMode lastMode = ConduitDisplayMode.ALL;
public TileConduitBundle() {
blockType = EnderIO.blockConduitBundle;
}
@Override
public void dirty() {
conduitsDirty = true;
collidablesDirty = true;
}
@Override
public boolean shouldRenderInPass(int arg0) {
if(facadeId != null && facadeId.isOpaqueCube() && !ConduitUtil.isFacadeHidden(this, EnderIO.proxy.getClientPlayer())) {
return false;
}
return super.shouldRenderInPass(arg0);
}
@Override
public void writeCustomNBT(NBTTagCompound nbtRoot) {
NBTTagList conduitTags = new NBTTagList();
for (IConduit conduit : conduits) {
NBTTagCompound conduitRoot = new NBTTagCompound();
ConduitUtil.writeToNBT(conduit, conduitRoot);
conduitTags.appendTag(conduitRoot);
}
nbtRoot.setTag("conduits", conduitTags);
if(facadeId != null) {
nbtRoot.setString("facadeId", Block.blockRegistry.getNameForObject(facadeId));
nbtRoot.setString("facadeType", facadeType.name());
} else {
nbtRoot.setString("facadeId", "null");
}
nbtRoot.setInteger("facadeMeta", facadeMeta);
nbtRoot.setShort("nbtVersion", NBT_VERSION);
}
@Override
public void readCustomNBT(NBTTagCompound nbtRoot) {
short nbtVersion = nbtRoot.getShort("nbtVersion");
conduits.clear();
NBTTagList conduitTags = (NBTTagList) nbtRoot.getTag("conduits");
if(conduitTags != null) {
for (int i = 0; i < conduitTags.tagCount(); i++) {
NBTTagCompound conduitTag = conduitTags.getCompoundTagAt(i);
IConduit conduit = ConduitUtil.readConduitFromNBT(conduitTag, nbtVersion);
if(conduit != null) {
conduit.setBundle(this);
conduits.add(conduit);
}
}
}
String fs = nbtRoot.getString("facadeId");
if(fs == null || "null".equals(fs)) {
facadeId = null;
facadeType = FacadeType.BASIC;
} else {
facadeId = Block.getBlockFromName(fs);
if(nbtRoot.hasKey("facadeType")) { // backwards compat, never true in freshly placed bundles
facadeType = FacadeType.valueOf(nbtRoot.getString("facadeType"));
}
}
facadeMeta = nbtRoot.getInteger("facadeMeta");
if(worldObj != null && worldObj.isRemote) {
clientUpdated = true;
}
}
@Override
public boolean hasFacade() {
return facadeId != null;
}
@Override
public void setFacadeId(Block blockID, boolean triggerUpdate) {
this.facadeId = blockID;
if(triggerUpdate) {
facadeChanged = true;
}
}
@Override
public void setFacadeId(Block blockID) {
setFacadeId(blockID, true);
}
@Override
public Block getFacadeId() {
return facadeId;
}
@Override
public void setFacadeMetadata(int meta) {
facadeMeta = meta;
}
@Override
public void setFacadeType(FacadeType type) {
facadeType = type;
}
@Override
public int getFacadeMetadata() {
return facadeMeta;
}
@Override
public FacadeType getFacadeType() {
return facadeType;
}
@Override
@SideOnly(Side.CLIENT)
public FacadeRenderState getFacadeRenderedAs() {
if(facadeRenderAs == null) {
facadeRenderAs = FacadeRenderState.NONE;
}
return facadeRenderAs;
}
@Override
@SideOnly(Side.CLIENT)
public void setFacadeRenderAs(FacadeRenderState state) {
this.facadeRenderAs = state;
}
@Override
public int getLightOpacity() {
if((worldObj != null && !worldObj.isRemote) || lightOpacity == -1) {
return hasFacade() ? facadeId.getLightOpacity() : 0;
}
return lightOpacity;
}
@Override
public void setLightOpacity(int opacity) {
lightOpacity = opacity;
}
@Override
public void onChunkUnload() {
for (IConduit conduit : conduits) {
conduit.onChunkUnload(worldObj);
}
}
@Override
public void doUpdate() {
for (IConduit conduit : conduits) {
conduit.updateEntity(worldObj);
}
if(conduitsDirty) {
doConduitsDirty();
}
if(facadeChanged) {
doFacadeChanged();
}
//client side only, check for changes in rendering of the bundle
if(worldObj.isRemote) {
updateEntityClient();
}
}
private void doConduitsDirty() {
if(!worldObj.isRemote) {
worldObj.markBlockForUpdate(xCoord, yCoord, zCoord);
markDirty();
}
conduitsDirty = false;
}
private void doFacadeChanged() {
//force re-calc of lighting for both client and server
ConduitUtil.forceSkylightRecalculation(worldObj, xCoord, yCoord, zCoord);
//worldObj.updateAllLightTypes(xCoord, yCoord, zCoord);
worldObj.func_147451_t(xCoord, yCoord, zCoord);
worldObj.markBlockForUpdate(xCoord, yCoord, zCoord);
worldObj.notifyBlocksOfNeighborChange(xCoord, yCoord, zCoord, EnderIO.blockConduitBundle);
facadeChanged = false;
}
private void updateEntityClient() {
boolean markForUpdate = false;
if(clientUpdated) {
//TODO: This is not the correct solution here but just marking the block for a render update server side
//seems to get out of sync with the client sometimes so connections are not rendered correctly
markForUpdate = true;
clientUpdated = false;
}
FacadeRenderState curRS = getFacadeRenderedAs();
FacadeRenderState rs = ConduitUtil.getRequiredFacadeRenderState(this, EnderIO.proxy.getClientPlayer());
if(Config.updateLightingWhenHidingFacades) {
int curLO = getLightOpacity();
int shouldBeLO = rs == FacadeRenderState.FULL ? 255 : 0;
if(curLO != shouldBeLO) {
setLightOpacity(shouldBeLO);
//worldObj.updateAllLightTypes(xCoord, yCoord, zCoord);
worldObj.func_147451_t(xCoord, yCoord, zCoord);
}
}
if(curRS != rs) {
setFacadeRenderAs(rs);
if(!ConduitUtil.forceSkylightRecalculation(worldObj, xCoord, yCoord, zCoord)) {
markForUpdate = true;
}
} else { //can do the else as only need to update once
ConduitDisplayMode curMode = ConduitDisplayMode.getDisplayMode(EnderIO.proxy.getClientPlayer().getCurrentEquippedItem());
if(curMode != lastMode) {
markForUpdate = true;
lastMode = curMode;
}
}
if(markForUpdate) {
worldObj.markBlockForUpdate(xCoord, yCoord, zCoord);
}
}
@Override
public void onNeighborBlockChange(Block blockId) {
boolean needsUpdate = false;
for (IConduit conduit : conduits) {
needsUpdate |= conduit.onNeighborBlockChange(blockId);
}
if(needsUpdate) {
dirty();
}
}
@Override
public void onNeighborChange(IBlockAccess world, int x, int y, int z, int tileX, int tileY, int tileZ) {
boolean needsUpdate = false;
for (IConduit conduit : conduits) {
needsUpdate |= conduit.onNeighborChange(world, x, y, z, tileX, tileY, tileZ);
}
if(needsUpdate) {
dirty();
}
}
@Override
public TileConduitBundle getEntity() {
return this;
}
@Override
public boolean hasType(Class<? extends IConduit> type) {
return getConduit(type) != null;
}
@SuppressWarnings("unchecked")
@Override
public <T extends IConduit> T getConduit(Class<T> type) {
if(type == null) {
return null;
}
for (IConduit conduit : conduits) {
if(type.isInstance(conduit)) {
return (T) conduit;
}
}
return null;
}
@Override
public void addConduit(IConduit conduit) {
if(worldObj.isRemote) {
return;
}
conduits.add(conduit);
conduit.setBundle(this);
conduit.onAddedToBundle();
dirty();
}
@Override
public void removeConduit(IConduit conduit) {
if(conduit != null) {
removeConduit(conduit, true);
}
}
public void removeConduit(IConduit conduit, boolean notify) {
if(worldObj.isRemote) {
return;
}
conduit.onRemovedFromBundle();
conduits.remove(conduit);
conduit.setBundle(null);
if(notify) {
dirty();
}
}
@Override
public void onBlockRemoved() {
if(worldObj.isRemote) {
return;
}
List<IConduit> copy = new ArrayList<IConduit>(conduits);
for (IConduit con : copy) {
removeConduit(con, false);
}
dirty();
}
@Override
public Collection<IConduit> getConduits() {
return conduits;
}
@Override
public Set<ForgeDirection> getConnections(Class<? extends IConduit> type) {
IConduit con = getConduit(type);
if(con != null) {
return con.getConduitConnections();
}
return null;
}
@Override
public boolean containsConnection(Class<? extends IConduit> type, ForgeDirection dir) {
IConduit con = getConduit(type);
if(con != null) {
return con.containsConduitConnection(dir);
}
return false;
}
@Override
public boolean containsConnection(ForgeDirection dir) {
for (IConduit con : conduits) {
if(con.containsConduitConnection(dir)) {
return true;
}
}
return false;
}
@Override
public Set<ForgeDirection> getAllConnections() {
EnumSet<ForgeDirection> result = EnumSet.noneOf(ForgeDirection.class);
for (IConduit con : conduits) {
result.addAll(con.getConduitConnections());
}
return result;
}
// Geometry
@Override
public Offset getOffset(Class<? extends IConduit> type, ForgeDirection dir) {
if(getConnectionCount(dir) < 2) {
return Offset.NONE;
}
return Offsets.get(type, dir);
}
@Override
public List<CollidableComponent> getCollidableComponents() {
for (IConduit con : conduits) {
collidablesDirty = collidablesDirty || con.haveCollidablesChangedSinceLastCall();
}
if(collidablesDirty) {
connectorsDirty = true;
}
if(!collidablesDirty && !cachedCollidables.isEmpty()) {
return cachedCollidables;
}
cachedCollidables.clear();
for (IConduit conduit : conduits) {
cachedCollidables.addAll(conduit.getCollidableComponents());
}
addConnectors(cachedCollidables);
collidablesDirty = false;
return cachedCollidables;
}
@Override
public List<CollidableComponent> getConnectors() {
List<CollidableComponent> result = new ArrayList<CollidableComponent>();
addConnectors(result);
return result;
}
private void addConnectors(List<CollidableComponent> result) {
if(conduits.isEmpty()) {
return;
}
for (IConduit con : conduits) {
boolean b = con.haveCollidablesChangedSinceLastCall();
collidablesDirty = collidablesDirty || b;
connectorsDirty = connectorsDirty || b;
}
if(!connectorsDirty && !cachedConnectors.isEmpty()) {
result.addAll(cachedConnectors);
return;
}
cachedConnectors.clear();
//TODO: What an unholly mess!
List<CollidableComponent> coreBounds = new ArrayList<CollidableComponent>();
for (IConduit con : conduits) {
addConduitCores(coreBounds, con);
}
cachedConnectors.addAll(coreBounds);
result.addAll(coreBounds);
// 1st algorithm
List<CollidableComponent> conduitsBounds = new ArrayList<CollidableComponent>();
for (IConduit con : conduits) {
conduitsBounds.addAll(con.getCollidableComponents());
addConduitCores(conduitsBounds, con);
}
Set<Class<IConduit>> collidingTypes = new HashSet<Class<IConduit>>();
for (CollidableComponent conCC : conduitsBounds) {
for (CollidableComponent innerCC : conduitsBounds) {
if(!InsulatedRedstoneConduit.COLOR_CONTROLLER_ID.equals(innerCC.data) && !InsulatedRedstoneConduit.COLOR_CONTROLLER_ID.equals(conCC.data)
&& conCC != innerCC && conCC.bound.intersects(innerCC.bound)) {
collidingTypes.add((Class<IConduit>) conCC.conduitType);
}
}
}
//TODO: Remove the core geometries covered up by this as no point in rendering these
if(!collidingTypes.isEmpty()) {
List<CollidableComponent> colCores = new ArrayList<CollidableComponent>();
for (Class<IConduit> c : collidingTypes) {
IConduit con = getConduit(c);
if(con != null) {
addConduitCores(colCores, con);
}
}
BoundingBox bb = null;
for (CollidableComponent cBB : colCores) {
if(bb == null) {
bb = cBB.bound;
} else {
bb = bb.expandBy(cBB.bound);
}
}
if(bb != null) {
bb = bb.scale(1.05, 1.05, 1.05);
CollidableComponent cc = new CollidableComponent(null, bb, ForgeDirection.UNKNOWN,
ConduitConnectorType.INTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
}
//2nd algorithm
for (IConduit con : conduits) {
if(con.hasConnections()) {
List<CollidableComponent> cores = new ArrayList<CollidableComponent>();
addConduitCores(cores, con);
if(cores.size() > 1) {
BoundingBox bb = cores.get(0).bound;
float area = bb.getArea();
for (CollidableComponent cc : cores) {
bb = bb.expandBy(cc.bound);
}
if(bb.getArea() > area * 1.5f) {
bb = bb.scale(1.05, 1.05, 1.05);
CollidableComponent cc = new CollidableComponent(null, bb, ForgeDirection.UNKNOWN,
ConduitConnectorType.INTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
}
}
}
// Merge all internal conduit connectors into one box
BoundingBox conBB = null;
for (int i = 0; i < result.size(); i++) {
CollidableComponent cc = result.get(i);
if (cc.conduitType == null && cc.data == ConduitConnectorType.INTERNAL) {
conBB = conBB == null ? cc.bound : conBB.expandBy(cc.bound);
result.remove(i);
i--;
cachedConnectors.remove(cc);
}
}
if(conBB != null) {
CollidableComponent cc = new CollidableComponent(null, conBB, ForgeDirection.UNKNOWN, ConduitConnectorType.INTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
// External Connectors
EnumSet<ForgeDirection> externalDirs = EnumSet.noneOf(ForgeDirection.class);
for (IConduit con : conduits) {
Set<ForgeDirection> extCons = con.getExternalConnections();
if(extCons != null) {
for (ForgeDirection dir : extCons) {
if(con.getConnectionMode(dir) != ConnectionMode.DISABLED) {
externalDirs.add(dir);
}
}
}
}
for (ForgeDirection dir : externalDirs) {
BoundingBox bb = ConduitGeometryUtil.instance.getExternalConnectorBoundingBox(dir);
CollidableComponent cc = new CollidableComponent(null, bb, dir, ConduitConnectorType.EXTERNAL);
result.add(cc);
cachedConnectors.add(cc);
}
connectorsDirty = false;
}
private void addConduitCores(List<CollidableComponent> result, IConduit con) {
CollidableCache cc = CollidableCache.instance;
Class<? extends IConduit> type = con.getCollidableType();
if(con.hasConnections()) {
for (ForgeDirection dir : con.getExternalConnections()) {
result.addAll(cc.getCollidables(cc.createKey(type, getOffset(con.getBaseConduitType(), dir), ForgeDirection.UNKNOWN, false), con));
}
for (ForgeDirection dir : con.getConduitConnections()) {
result.addAll(cc.getCollidables(cc.createKey(type, getOffset(con.getBaseConduitType(), dir), ForgeDirection.UNKNOWN, false), con));
}
} else {
result.addAll(cc.getCollidables(cc.createKey(type, getOffset(con.getBaseConduitType(), ForgeDirection.UNKNOWN), ForgeDirection.UNKNOWN, false), con));
}
}
private int getConnectionCount(ForgeDirection dir) {
if(dir == ForgeDirection.UNKNOWN) {
return conduits.size();
}
int result = 0;
for (IConduit con : conduits) {
if(con.containsConduitConnection(dir) || con.containsExternalConnection(dir)) {
result++;
}
}
return result;
}
// ------------ Power -----------------------------
@Override
public int receiveEnergy(ForgeDirection from, int maxReceive, boolean simulate) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.receiveEnergy(from, maxReceive, simulate);
}
return 0;
}
@Override
public int extractEnergy(ForgeDirection from, int maxExtract, boolean simulate) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.extractEnergy(from, maxExtract, simulate);
}
return 0;
}
@Override
public boolean canConnectEnergy(ForgeDirection from) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.canConnectEnergy(from);
}
return false;
}
@Override
public int getEnergyStored(ForgeDirection from) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getEnergyStored(from);
}
return 0;
}
@Override
public int getMaxEnergyStored(ForgeDirection from) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getMaxEnergyStored(from);
}
return 0;
}
@Override
public int getMaxEnergyRecieved(ForgeDirection dir) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getMaxEnergyRecieved(dir);
}
return 0;
}
@Override
public int getEnergyStored() {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getEnergyStored();
}
return 0;
}
@Override
public int getMaxEnergyStored() {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
return pc.getMaxEnergyStored();
}
return 0;
}
@Override
public void setEnergyStored(int stored) {
IPowerConduit pc = getConduit(IPowerConduit.class);
if(pc != null) {
pc.setEnergyStored(stored);
}
}
//------- Liquids -----------------------------
@Override
public int fill(ForgeDirection from, FluidStack resource, boolean doFill) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.fill(from, resource, doFill);
}
return 0;
}
@Override
public FluidStack drain(ForgeDirection from, FluidStack resource, boolean doDrain) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.drain(from, resource, doDrain);
}
return null;
}
@Override
public FluidStack drain(ForgeDirection from, int maxDrain, boolean doDrain) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.drain(from, maxDrain, doDrain);
}
return null;
}
@Override
public boolean canFill(ForgeDirection from, Fluid fluid) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.canFill(from, fluid);
}
return false;
}
@Override
public boolean canDrain(ForgeDirection from, Fluid fluid) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.canDrain(from, fluid);
}
return false;
}
@Override
public FluidTankInfo[] getTankInfo(ForgeDirection from) {
ILiquidConduit lc = getConduit(ILiquidConduit.class);
if(lc != null) {
return lc.getTankInfo(from);
}
return null;
}
// ---- TE Item Conduits
@Override
public ItemStack insertItem(ForgeDirection from, ItemStack item) {
IItemConduit ic = getConduit(IItemConduit.class);
if(ic != null) {
return ic.insertItem(from, item);
}
return item;
}
// ---- Mekanism Gas Tubes
@Override
@Method(modid = "MekanismAPI|gas")
public int receiveGas(ForgeDirection side, GasStack stack) {
return receiveGas(side, stack, true);
}
@Override
@Method(modid = "MekanismAPI|gas")
public int receiveGas(ForgeDirection side, GasStack stack, boolean doTransfer) {
IGasConduit gc = getConduit(IGasConduit.class);
if(gc != null) {
return gc.receiveGas(side, stack, doTransfer);
}
return 0;
}
@Override
@Method(modid = "MekanismAPI|gas")
public GasStack drawGas(ForgeDirection side, int amount) {
return drawGas(side, amount, true);
}
@Override
@Method(modid = "MekanismAPI|gas")
public GasStack drawGas(ForgeDirection side, int amount, boolean doTransfer) {
IGasConduit gc = getConduit(IGasConduit.class);
if(gc != null) {
return gc.drawGas(side, amount, doTransfer);
}
return null;
}
@Override
@Method(modid = "MekanismAPI|gas")
public boolean canReceiveGas(ForgeDirection side, Gas type) {
IGasConduit gc = getConduit(IGasConduit.class);
if(gc != null) {
return gc.canReceiveGas(side, type);
}
return false;
}
@Override
@Method(modid = "MekanismAPI|gas")
public boolean canDrawGas(ForgeDirection side, Gas type) {
IGasConduit gc = getConduit(IGasConduit.class);
if(gc != null) {
return gc.canDrawGas(side, type);
}
return false;
}
@Override
public World getWorld() {
return getWorldObj();
}
private Object node; // IGridNode object, untyped to avoid crash w/o AE2
@Override
@Method(modid = "appliedenergistics2")
public IGridNode getGridNode(ForgeDirection dir) {
if (dir == null || dir == ForgeDirection.UNKNOWN) {
return (IGridNode) node;
} else {
IMEConduit cond = getConduit(IMEConduit.class);
if (cond != null) {
if (cond.getConnectionMode(dir.getOpposite()) == ConnectionMode.IN_OUT) {
return (IGridNode) node;
} else {
return null;
}
}
}
return (IGridNode) node;
}
@Override
@Method(modid = "appliedenergistics2")
public void setGridNode(Object node) {
this.node = (IGridNode) node;
}
@Override
@Method(modid = "appliedenergistics2")
public AECableType getCableConnectionType(ForgeDirection dir) {
IMEConduit cond = getConduit(IMEConduit.class);
if (cond == null) {
return AECableType.NONE;
} else {
return cond.isConnectedTo(dir) ? AECableType.SMART : AECableType.NONE;
}
}
@Override
@Method(modid = "appliedenergistics2")
public void securityBreak() {
}
@Override
public boolean displayPower() {
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Alexey A. Ivanov
*/
package javax.swing.text;
import java.awt.Component;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Rectangle;
import java.awt.Shape;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentEvent.ElementChange;
import javax.swing.event.DocumentEvent.EventType;
import org.apache.harmony.awt.text.TextKit;
import org.apache.harmony.awt.text.TextUtils;
import org.apache.harmony.x.swing.internal.nls.Messages;
public class PlainView extends View implements TabExpander {
protected FontMetrics metrics;
final TextPaintParams paintParams = new TextPaintParams(this);
private Element widestLine;
private int widestLineWidth;
/**
* It is assumed that <code>element</code> is default root of document
* this view represents (PlainDocument).
*/
public PlainView(final Element element) {
super(element);
}
public float getPreferredSpan(final int axis) {
conditionalUpdateMetrics();
switch (axis) {
case X_AXIS:
return getLineWidth(widestLine);
case Y_AXIS:
return metrics.getHeight() * getElement().getElementCount();
default:
throw new IllegalArgumentException(Messages.getString("swing.00", axis)); //$NON-NLS-1$
}
}
public void insertUpdate(final DocumentEvent event, final Shape shape,
final ViewFactory factory) {
updateDamage(event, shape, factory);
}
public void removeUpdate(final DocumentEvent event, final Shape shape,
final ViewFactory factory) {
updateDamage(event, shape, factory);
}
public void changedUpdate(final DocumentEvent event, final Shape shape,
final ViewFactory factory) {
updateDamage(event, shape, factory);
}
/**
* Returns the rectangle where the caret will be painted. This rectangle
* is positioned between characters (at position <code>pos</code>).
* Its width is 1, and height is the height of the line (of the font used).
* <p>For more information about position see
* http://java.sun.com/products/jfc/tsc/articles/text/element_buffer/
* in the heading <strong>The Position Interface</strong>.)
*/
public Shape modelToView(final int pos, final Shape shape,
final Position.Bias bias)
throws BadLocationException {
if (pos < 0 || pos > getDocument().getLength() + 1) {
throw new BadLocationException(Messages.getString("swing.98"), pos); //$NON-NLS-1$
}
final int lineNo = getElement().getElementIndex(pos);
final Element line = getElement().getElement(lineNo);
getDocument().getText(line.getStartOffset(),
pos - line.getStartOffset(), getLineBuffer());
final Rectangle bounds = shape.getBounds();
return new Rectangle(
TextUtils.getTabbedTextWidth(getLineBuffer(), metrics,
bounds.x, this, pos) + bounds.x,
bounds.y + metrics.getHeight() * lineNo,
1, metrics.getHeight());
}
public int viewToModel(final float x, final float y,
final Shape shape,
final Position.Bias[] biasReturn) {
biasReturn[0] = Position.Bias.Forward;
final Rectangle bounds = shape.getBounds();
if (y >= getPreferredSpan(Y_AXIS) + bounds.y) {
return getDocument().getLength();
}
if (y < bounds.y) {
return 0;
}
final int lineNo = (int)(y - bounds.y) / metrics.getHeight();
final Element line = getElement().getElement(lineNo);
final int start = line.getStartOffset();
final int end = line.getEndOffset() - 1;
try {
getDocument().getText(start, end - start, getLineBuffer());
} catch (final BadLocationException e) { }
return start + TextUtils.getTabbedTextOffset(getLineBuffer(), metrics,
bounds.x,
(int)Math.max(x, bounds.x),
this, start);
}
public float nextTabStop(final float x, final int tabOffset) {
conditionalUpdateMetrics();
return paintParams.nextTabStop(x);
}
public void paint(final Graphics g, final Shape shape) {
final Rectangle bounds = shape.getBounds();
int y = bounds.y + metrics.getAscent();
paintParams.updateFields();
final Rectangle clipBounds = g.getClipBounds();
final int height = metrics.getHeight();
final TextKit textKit = getTextKit();
for (int i = 0; i < getElement().getElementCount(); i++, y += height) {
if (!lineToRect(shape, i).intersects(clipBounds)) {
continue;
}
if (textKit != null) {
Element line = getElement().getElement(i);
textKit.paintLayeredHighlights(g, line.getStartOffset(),
line.getEndOffset() - 1,
shape, this);
}
drawLine(i, g, bounds.x, y);
}
}
public void setSize(final float width, final float height) {
conditionalUpdateMetrics();
}
protected void drawLine(final int lineNo, final Graphics g,
final int x, final int y) {
final Element line = getElement().getElement(lineNo);
drawLine(paintParams,
line.getStartOffset(), line.getEndOffset() - 1,
g, x, y);
}
protected int drawSelectedText(final Graphics g,
final int x, final int y,
final int start, final int end)
throws BadLocationException {
return drawText(g, paintParams.selColor, paintParams,
x, y, start, end);
}
protected int drawUnselectedText(final Graphics g,
final int x, final int y,
final int start, final int end)
throws BadLocationException {
return drawText(g, paintParams.color, paintParams,
x, y, start, end);
}
protected final Segment getLineBuffer() {
return paintParams.buffer;
}
protected int getTabSize() {
return paintParams.getTabSize();
}
protected Rectangle lineToRect(final Shape shape, final int lineNo) {
conditionalUpdateMetrics();
int height = metrics.getHeight();
Rectangle bounds = shape.getBounds();
return new Rectangle(bounds.x, bounds.y + height * lineNo,
bounds.width, height);
}
protected void damageLineRange(final int startLine,
final int endLine,
final Shape shape,
final Component component) {
Rectangle lineRect;
for (int i = startLine; i <= endLine; i++) {
lineRect = lineToRect(shape, i);
component.repaint(lineRect.x, lineRect.y,
lineRect.width, lineRect.height);
}
}
protected void updateDamage(final DocumentEvent event, final Shape shape,
final ViewFactory factory) {
if (shape == null) {
return;
}
if (metrics == null) {
updateMetrics();
preferenceChanged(null, true, true);
return;
}
final ElementChange change = event.getChange(getElement());
if (event.getType() == EventType.INSERT) {
updateDamageOnInsert(event, change, shape);
} else {
updateDamageOnRemove(event, change, shape);
}
}
protected void updateMetrics() {
paintParams.updateMetrics();
metrics = paintParams.metrics;
updateWidestLine();
}
final void conditionalUpdateMetrics() {
if (paintParams.areMetricsValid()) {
updateMetrics();
}
}
private void updateWidestLine() {
widestLine = getElement().getElement(0);
widestLineWidth = getLineWidth(widestLine);
updateWidestLine(1, getElement().getElementCount() - 1);
}
private void updateWidestLine(final int start, final int end) {
for (int i = start; i <= end; i++) {
int w = getLineWidth(i);
if (w > widestLineWidth) {
widestLineWidth = w;
widestLine = getElement().getElement(i);
}
}
}
private int getLineWidth(final Element line) {
try {
getDocument().getText(line.getStartOffset(),
line.getEndOffset()
- line.getStartOffset() - 1,
getLineBuffer());
} catch (final BadLocationException e) { }
return TextUtils.getTabbedTextWidth(getLineBuffer(), metrics, 0,
this, line.getStartOffset());
}
private int getLineWidth(final int lineNo) {
return getLineWidth(getElement().getElement(lineNo));
}
private void updateDamageOnInsert(final DocumentEvent event,
final ElementChange change,
final Shape shape) {
boolean linesAdded = change != null;
int start = linesAdded
? change.getIndex()
: getElement().getElementIndex(event.getOffset());
int length = linesAdded ? change.getChildrenAdded().length - 1 : 0;
int width = widestLineWidth;
if (widestLine.getEndOffset() < event.getOffset()
|| widestLine.getStartOffset() > event.getOffset()
+ event.getLength()) {
// The previous longest line was not affected
updateWidestLine(start, start + length);
} else {
updateWidestLine();
}
preferenceChanged(null, widestLineWidth != width, linesAdded);
damageLineRange(start,
linesAdded ? getElement().getElementCount() - 1 : start,
shape, getComponent());
}
private void updateDamageOnRemove(final DocumentEvent event,
final ElementChange change,
final Shape shape) {
int width = widestLineWidth;
if (change != null) {
updateWidestLine();
preferenceChanged(null, widestLineWidth != width, true);
getComponent().repaint();
} else {
int lineNo = getElement().getElementIndex(event.getOffset());
Element line = getElement().getElement(lineNo);
if (widestLine == line) {
updateWidestLine();
preferenceChanged(null, widestLineWidth != width, false);
}
damageLineRange(lineNo, lineNo, shape, getComponent());
}
}
}
| |
package org.apereo.cas.util.scripting;
import org.apereo.cas.util.RegexUtils;
import org.apereo.cas.util.ResourceUtils;
import groovy.lang.Binding;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyObject;
import groovy.lang.GroovyShell;
import groovy.lang.Script;
import lombok.SneakyThrows;
import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.codehaus.groovy.control.CompilerConfiguration;
import org.codehaus.groovy.runtime.InvokerInvocationException;
import org.springframework.core.io.Resource;
import javax.script.Invocable;
import javax.script.ScriptEngineManager;
import javax.script.SimpleBindings;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.PrivilegedActionException;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This is {@link ScriptingUtils}.
*
* @author Misagh Moayyed
* @since 5.1.0
*/
@Slf4j
@UtilityClass
public class ScriptingUtils {
/**
* Pattern indicating groovy script is inlined.
*/
private static final Pattern INLINE_GROOVY_PATTERN = RegexUtils.createPattern("groovy\\s*\\{\\s*(.+)\\s*\\}",
Pattern.DOTALL | Pattern.MULTILINE);
/**
* Pattern indicating groovy script is a file/resource.
*/
private static final Pattern FILE_GROOVY_PATTERN = RegexUtils.createPattern("(file|classpath):(.+\\.groovy)");
/**
* Is inline groovy script ?.
*
* @param script the script
* @return the boolean
*/
public static boolean isInlineGroovyScript(final String script) {
return getMatcherForInlineGroovyScript(script).find();
}
/**
* Is external groovy script ?.
*
* @param script the script
* @return the boolean
*/
public static boolean isExternalGroovyScript(final String script) {
return getMatcherForExternalGroovyScript(script).find();
}
/**
* Gets inline groovy script matcher.
*
* @param script the script
* @return the inline groovy script matcher
*/
public static Matcher getMatcherForInlineGroovyScript(final String script) {
return INLINE_GROOVY_PATTERN.matcher(script);
}
/**
* Gets groovy file script matcher.
*
* @param script the script
* @return the groovy file script matcher
*/
public static Matcher getMatcherForExternalGroovyScript(final String script) {
return FILE_GROOVY_PATTERN.matcher(script);
}
/**
* Execute groovy shell script t.
*
* @param <T> the type parameter
* @param script the script
* @param clazz the clazz
* @return the t
*/
public static <T> T executeGroovyShellScript(final Script script,
final Class<T> clazz) {
return executeGroovyShellScript(script, new HashMap<>(0), clazz);
}
/**
* Execute groovy shell script t.
*
* @param <T> the type parameter
* @param script the script
* @param variables the variables
* @param clazz the clazz
* @return the t
*/
public static <T> T executeGroovyShellScript(final Script script,
final Map<String, Object> variables,
final Class<T> clazz) {
try {
val binding = script.getBinding();
if (!binding.hasVariable("logger")) {
binding.setVariable("logger", LOGGER);
}
if (variables != null && !variables.isEmpty()) {
variables.forEach(binding::setVariable);
}
script.setBinding(binding);
LOGGER.debug("Executing groovy script [{}] with variables [{}]", script, binding.getVariables());
val result = script.run();
return getGroovyScriptExecutionResultOrThrow(clazz, result);
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
return null;
}
/**
* Execute groovy script via run object.
*
* @param <T> the type parameter
* @param groovyScript the groovy script
* @param args the args
* @param clazz the clazz
* @param failOnError the fail on error
* @return the object
*/
public static <T> T executeGroovyScript(final Resource groovyScript,
final Object[] args, final Class<T> clazz,
final boolean failOnError) {
return executeGroovyScript(groovyScript, "run", args, clazz, failOnError);
}
/**
* Execute groovy script.
*
* @param <T> the type parameter
* @param groovyObject the groovy object
* @param args the args
* @param clazz the clazz
* @param failOnError the fail on error
* @return the result
*/
public static <T> T executeGroovyScript(final GroovyObject groovyObject,
final Object[] args, final Class<T> clazz,
final boolean failOnError) {
return executeGroovyScript(groovyObject, "run", args, clazz, failOnError);
}
/**
* Execute groovy script t.
*
* @param <T> the type parameter
* @param groovyScript the groovy script
* @param methodName the method name
* @param clazz the clazz
* @param args the args
* @return the type to return
*/
public static <T> T executeGroovyScript(final Resource groovyScript,
final String methodName,
final Class<T> clazz,
final Object... args) {
return executeGroovyScript(groovyScript, methodName, args, clazz, false);
}
/**
* Execute groovy script.
*
* @param <T> the type parameter
* @param groovyScript the groovy script
* @param methodName the method name
* @param clazz the clazz
* @return the t
*/
public static <T> T executeGroovyScript(final Resource groovyScript,
final String methodName,
final Class<T> clazz) {
return executeGroovyScript(groovyScript, methodName, ArrayUtils.EMPTY_OBJECT_ARRAY, clazz, false);
}
/**
* Execute groovy script t.
*
* @param <T> the type parameter
* @param groovyScript the groovy script
* @param methodName the method name
* @param args the args
* @param clazz the clazz
* @param failOnError the fail on error
* @return the t
*/
@SneakyThrows
public static <T> T executeGroovyScript(final Resource groovyScript,
final String methodName,
final Object[] args,
final Class<T> clazz,
final boolean failOnError) {
if (groovyScript == null || StringUtils.isBlank(methodName)) {
return null;
}
try {
return AccessController.doPrivileged((PrivilegedAction<T>) () -> getGroovyResult(groovyScript, methodName, args, clazz, failOnError));
} catch (final Exception e) {
var cause = e instanceof PrivilegedActionException ? PrivilegedActionException.class.cast(e).getException() : e;
if (failOnError) {
throw cause;
}
LOGGER.error(cause.getMessage(), cause);
}
return null;
}
/**
* Execute groovy script t.
*
* @param <T> the type parameter
* @param groovyObject the groovy object
* @param methodName the method name
* @param args the args
* @param clazz the clazz
* @param failOnError the fail on error
* @return the t
*/
@SneakyThrows
public static <T> T executeGroovyScript(final GroovyObject groovyObject,
final String methodName,
final Object[] args,
final Class<T> clazz,
final boolean failOnError) {
try {
LOGGER.trace("Executing groovy script's [{}] method, with parameters [{}]", methodName, args);
val result = groovyObject.invokeMethod(methodName, args);
LOGGER.trace("Results returned by the groovy script are [{}]", result);
if (!clazz.equals(Void.class)) {
return getGroovyScriptExecutionResultOrThrow(clazz, result);
}
} catch (final Exception e) {
var cause = e instanceof InvokerInvocationException ? e.getCause() : e;
if (failOnError) {
throw cause;
}
LOGGER.error(cause.getMessage(), cause);
}
return null;
}
/**
* Execute groovy script t.
*
* @param <T> the type parameter
* @param groovyObject the groovy object
* @param args the args
* @param clazz the clazz
* @param failOnError the fail on error
* @return the t
*/
@SneakyThrows
public static <T> T executeGroovyScript(final Script groovyObject,
final Map<String, Object> args,
final Class<T> clazz,
final boolean failOnError) {
try {
LOGGER.trace("Executing groovy script with bindings [{}]", args);
val binding = new Binding();
if (args != null && !args.isEmpty()) {
args.forEach(binding::setVariable);
}
if (!binding.hasVariable("logger")) {
binding.setVariable("logger", LOGGER);
}
groovyObject.setBinding(binding);
val result = groovyObject.run();
LOGGER.trace("Results returned by the groovy script are [{}]", result);
if (!clazz.equals(Void.class)) {
return getGroovyScriptExecutionResultOrThrow(clazz, result);
}
} catch (final Exception e) {
var cause = e instanceof InvokerInvocationException ? e.getCause() : e;
if (failOnError) {
throw cause;
}
LOGGER.error(cause.getMessage(), cause);
}
return null;
}
/**
* Parse groovy shell script script.
*
* @param script the script
* @return the script
*/
public static Script parseGroovyShellScript(final String script) {
try {
val shell = new GroovyShell();
LOGGER.debug("Parsing groovy script [{}]", script);
return shell.parse(script);
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
return null;
}
/**
* Parse groovy script groovy object.
*
* @param groovyScript the groovy script
* @param failOnError the fail on error
* @return the groovy object
*/
public static GroovyObject parseGroovyScript(final Resource groovyScript,
final boolean failOnError) {
return AccessController.doPrivileged((PrivilegedAction<GroovyObject>) () -> {
val parent = ScriptingUtils.class.getClassLoader();
try (val loader = new GroovyClassLoader(parent)) {
val groovyClass = loadGroovyClass(groovyScript, loader);
if (groovyClass != null) {
LOGGER.trace("Creating groovy object instance from class [{}]", groovyScript.getURI().getPath());
return (GroovyObject) groovyClass.getDeclaredConstructor().newInstance();
}
LOGGER.warn("Groovy script at [{}] does not exist", groovyScript.getURI().getPath());
} catch (final Exception e) {
if (failOnError) {
throw new RuntimeException(e);
}
LOGGER.error(e.getMessage(), e);
}
return null;
});
}
private Class loadGroovyClass(final Resource groovyScript,
final GroovyClassLoader loader) throws IOException {
if (ResourceUtils.isJarResource(groovyScript)) {
try (val groovyReader = new BufferedReader(new InputStreamReader(groovyScript.getInputStream(), StandardCharsets.UTF_8))) {
return loader.parseClass(groovyReader, groovyScript.getFilename());
}
}
val groovyFile = groovyScript.getFile();
if (groovyFile.exists()) {
return loader.parseClass(groovyFile);
}
return null;
}
@SneakyThrows
private static <T> T getGroovyResult(final Resource groovyScript,
final String methodName,
final Object[] args,
final Class<T> clazz,
final boolean failOnError) {
try {
val groovyObject = parseGroovyScript(groovyScript, failOnError);
if (groovyObject == null) {
LOGGER.error("Could not parse the Groovy script at [{}]", groovyScript);
return null;
}
return executeGroovyScript(groovyObject, methodName, args, clazz, failOnError);
} catch (final Exception e) {
if (failOnError) {
throw e;
}
LOGGER.error(e.getMessage(), e);
}
return null;
}
private static <T> T getGroovyScriptExecutionResultOrThrow(final Class<T> clazz, final Object result) {
if (result != null && !clazz.isAssignableFrom(result.getClass())) {
throw new ClassCastException("Result [" + result + " is of type " + result.getClass() + " when we were expecting " + clazz);
}
return (T) result;
}
/**
* Execute groovy script engine t.
*
* @param <T> the type parameter
* @param scriptFile the script file
* @param args the args
* @param clazz the clazz
* @return the t
*/
public static <T> T executeScriptEngine(final String scriptFile, final Object[] args, final Class<T> clazz) {
try {
val engineName = getScriptEngineName(scriptFile);
if (StringUtils.isBlank(engineName)) {
LOGGER.warn("Script engine name can not be determined for [{}]", engineName);
return null;
}
val engine = new ScriptEngineManager().getEngineByName(engineName);
if (engine == null) {
LOGGER.warn("Script engine is not available for [{}]", engineName);
return null;
}
val resourceFrom = ResourceUtils.getResourceFrom(scriptFile);
val theScriptFile = resourceFrom.getFile();
if (theScriptFile.exists()) {
LOGGER.debug("Created object instance from class [{}]", theScriptFile.getCanonicalPath());
try (val reader = Files.newBufferedReader(theScriptFile.toPath(), StandardCharsets.UTF_8)) {
engine.eval(reader);
}
val invocable = (Invocable) engine;
LOGGER.debug("Executing script's run method, with parameters [{}]", args);
val result = invocable.invokeFunction("run", args);
LOGGER.debug("Groovy script result is [{}]", result);
return getGroovyScriptExecutionResultOrThrow(clazz, result);
}
LOGGER.warn("[{}] script [{}] does not exist, or cannot be loaded", StringUtils.capitalize(engineName), scriptFile);
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
return null;
}
/**
* Execute inline groovy script engine.
*
* @param <T> the type parameter
* @param script the script
* @param variables the variables
* @param clazz the clazz
* @return the t
*/
public static <T> T executeGroovyScriptEngine(final String script,
final Map<String, Object> variables,
final Class<T> clazz) {
try {
val engine = new ScriptEngineManager().getEngineByName("groovy");
if (engine == null) {
LOGGER.warn("Script engine is not available for Groovy");
return null;
}
val binding = new SimpleBindings();
if (variables != null && !variables.isEmpty()) {
binding.putAll(variables);
}
if (!binding.containsKey("logger")) {
binding.put("logger", LOGGER);
}
val result = engine.eval(script, binding);
return getGroovyScriptExecutionResultOrThrow(clazz, result);
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
return null;
}
/**
* Gets object instance from groovy resource.
*
* @param <T> the type parameter
* @param resource the resource
* @param expectedType the expected type
* @return the object instance from groovy resource
*/
public static <T> T getObjectInstanceFromGroovyResource(final Resource resource,
final Class<T> expectedType) {
return getObjectInstanceFromGroovyResource(resource, ArrayUtils.EMPTY_CLASS_ARRAY, ArrayUtils.EMPTY_OBJECT_ARRAY, expectedType);
}
/**
* Gets object instance from groovy resource.
*
* @param <T> the type parameter
* @param resource the resource
* @param constructorArgs the constructor args
* @param args the args
* @param expectedType the expected type
* @return the object instance from groovy resource
*/
public static <T> T getObjectInstanceFromGroovyResource(final Resource resource,
final Class[] constructorArgs,
final Object[] args,
final Class<T> expectedType) {
try {
if (resource == null) {
LOGGER.debug("No groovy script is defined");
return null;
}
val script = IOUtils.toString(resource.getInputStream(), StandardCharsets.UTF_8);
val clazz = AccessController.doPrivileged((PrivilegedAction<Class<T>>) () -> {
val classLoader = new GroovyClassLoader(ScriptingUtils.class.getClassLoader(),
new CompilerConfiguration(), true);
return classLoader.parseClass(script);
});
LOGGER.trace("Preparing constructor arguments [{}] for resource [{}]", args, resource);
val ctor = clazz.getDeclaredConstructor(constructorArgs);
val result = ctor.newInstance(args);
if (!expectedType.isAssignableFrom(result.getClass())) {
throw new ClassCastException("Result [" + result
+ " is of type " + result.getClass()
+ " when we were expecting " + expectedType);
}
return result;
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
return null;
}
/**
* Gets script engine name.
*
* @param scriptFile the script file
* @return the script engine name
*/
public static String getScriptEngineName(final String scriptFile) {
if (scriptFile.endsWith(".py")) {
return "python";
}
if (scriptFile.endsWith(".js")) {
return "js";
}
if (scriptFile.endsWith(".groovy")) {
return "groovy";
}
return null;
}
}
| |
/*
* A translator to translate the parse result into C++.
* Author : Lixue Zhang
*/
package org.morphling.Cpp;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class Cpp_gen {
public static void main(String[] args) {
ReadAndPreprocessing r = new ReadAndPreprocessing(args[0]);
String filename = args[0].split("\\.")[0]+".cpp";
new Parser(r.commands, filename);
/*ReadAndPreprocessing r = new ReadAndPreprocessing("./testcase");
String filename = "mytestcase.txt";
JSONArray ja = new JSONArray(r.commands);
for(int i=0;i<ja.length();++i)
System.out.println(ja.get(i));
new Parser(r.commands, filename);*/
}
}
class ReadAndPreprocessing
{
public String commands;
public ReadAndPreprocessing(String fileName) {
commands = "";
File file = new File(fileName);
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(file));
String tempString = null;
while ((tempString = reader.readLine()) != null) {
commands = commands + (tempString + "\n");
}
reader.close();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e1) {
}
}
}
}
}
class Parser
{
String cmg;
File file;
FileWriter fw;
public Parser(String command_group, String filename)
{
cmg = command_group;
try {
file = new File(filename);
fw = new FileWriter(file);
fw.write("#include <iostream>\nusing namespace::std;\n\n");
fw.write("int main(int argc, char *argv[]) {\n");
translate(cmg, 1);
fw.write("\treturn 0;\n}\n");
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void translate(String obj, int tab) throws JSONException, IOException
{
JSONArray job = new JSONArray(obj);
for(int i=0;i<job.length();++i)
{
JSONObject temp = job.getJSONObject(i);
if(temp.get("sentence").equals("declare"))
declare(temp, tab);
else if(temp.get("sentence").equals("declare_init"))
declare_init(temp, tab);
else if(temp.get("sentence").equals("read"))
read(temp, tab);
else if(temp.get("sentence").equals("write"))
write(temp, tab);
else if(temp.get("sentence").equals("if"))
ifelse(temp, tab);
else if(temp.get("sentence").equals("while"))
WHILE(temp, tab);
else if(temp.get("sentence").equals("expr"))
expr(temp);
else if(temp.get("sentence").equals("assign"))
assign(temp, tab);
else if(temp.get("sentence").equals("break"))
Break(temp, tab);
else if(temp.get("sentence").equals("continue"))
Continue(temp, tab);
}
}
private void declare(JSONObject obj, int tab) throws JSONException, IOException
{
indention(tab);
fw.write(obj.get("type") + " " + obj.get("name") + ";\n");
}
private void declare_init(JSONObject obj, int tab) throws JSONException, IOException
{
indention(tab);
fw.write(obj.get("type") + " " + obj.get("name") + " = ");
expr(obj.getJSONObject("expr"));
fw.write(";\n");
}
private void read(JSONObject obj, int tab) throws JSONException, IOException
{
indention(tab);
fw.write("cin<<"+obj.get("name")+";\n");
}
private void write(JSONObject obj, int tab) throws IOException
{
indention(tab);
fw.write("cout<<");
expr(obj.getJSONObject("expr"));
fw.write("<<endl;\n");
}
private void expr(JSONObject obj) throws JSONException, IOException
{
if(!obj.has("operator"))
{
fw.write(obj.getString("value"));
return;
}
String key = obj.get("operator").toString();
if(key.equals("cast"))
{
fw.write("("+obj.get("e1").toString()+")");
expr(obj.getJSONObject("e2"));
return;
}
JSONObject e1 = null;
JSONObject e2 = null;
if(obj.has("e1"))
e1 = obj.getJSONObject("e1");
if(obj.has("e2"))
e2 = obj.getJSONObject("e2");
fw.write("(");
if(key.equals("-") && !obj.has("e2"))
{
fw.write("-");
expr(e1);
fw.write(")");
return;
}
expr(e1);
if(key.equals("**"))
fw.write("^");
else
fw.write(" "+key+" ");
expr(e2);
fw.write(")");
}
private void ifelse(JSONObject obj, int tab) throws IOException
{
indention(tab);
fw.write("if");
JSONArray branches = new JSONArray(obj.getJSONArray("cases").toString());
for(int i=0;i<branches.length();++i)
{
expr(branches.getJSONObject(i).getJSONObject("condition"));
fw.write("{\n");
translate(branches.getJSONObject(i).getJSONArray("block").toString(), tab+1);
indention(tab);
fw.write("}\n");
if(i<(branches.length()-1))
{
JSONObject temp = branches.getJSONObject(i+1).getJSONObject("condition");
if(!temp.has("e1"))
{
indention(tab);
fw.write("else {\n");
translate(branches.getJSONObject(i+1).getJSONArray("block").toString(), tab+1);
System.out.println("}");
indention(tab);
fw.write("}\n");
break;
}
else
{
indention(tab);
fw.write("else if");
}
}
}
}
private void WHILE(JSONObject obj, int tab) throws IOException
{
indention(tab);
fw.write("while(");
expr(obj.getJSONObject("expr"));
fw.write(")");
fw.write("{\n");
translate(obj.getJSONArray("block").toString(), tab+1);
indention(tab);
fw.write("}\n");
}
private void assign(JSONObject obj, int tab) throws IOException
{
indention(tab);
fw.write(obj.get("name")+" = ");
expr(obj.getJSONObject("expr"));
fw.write(";\n");
}
private void Break(JSONObject obj, int tab) throws IOException
{
indention(tab);
fw.write("break;\n");
}
private void Continue(JSONObject obj, int tab) throws IOException
{
indention(tab);
fw.write("continue;\n");
}
void indention(int tab) throws IOException
{
for(int i=0;i<tab;++i)
fw.write("\t");
}
}
| |
package edu.psu.compbio.seqcode.gse.tools.motifs;
/**
* Reads a list of n motif names on STDIN (each line contains a set of one or more names for the same gene).
* Outputs on STDOUT an n x n matrix of motif presence. The value in each entry (row i column j) is the highest
* score of motif i in the promoter of gene j. An entry of zero indicates that the motif or gene couldn't
* be found or that no motif was found.
*
* Input lines should contain tab-separated aliases for the same gene. Output will be tab separated
*
* cat aliases.txt | java MotifOccurrenceMatrix --species "$MM;mm8" --genes refGene --genes ensGene --upstream 10000 --downstream 2000
*/
import java.io.*;
import java.util.*;
import java.sql.SQLException;
import edu.psu.compbio.seqcode.gse.datasets.general.Region;
import edu.psu.compbio.seqcode.gse.datasets.general.StrandedRegion;
import edu.psu.compbio.seqcode.gse.datasets.motifs.*;
import edu.psu.compbio.seqcode.gse.datasets.species.Gene;
import edu.psu.compbio.seqcode.gse.datasets.species.Genome;
import edu.psu.compbio.seqcode.gse.datasets.species.Organism;
import edu.psu.compbio.seqcode.gse.ewok.verbs.RefGeneGenerator;
import edu.psu.compbio.seqcode.gse.tools.motifs.WeightMatrixScanner;
import edu.psu.compbio.seqcode.gse.tools.utils.Args;
import edu.psu.compbio.seqcode.gse.utils.NotFoundException;
public class MotifOccurrenceMatrix {
private List<List<String>> genes;
private List<WeightMatrix> matrices;
private double[][] bestScores;
private int upstream, downstream;
private List<RefGeneGenerator> geneGenerators;
private Genome genome;
private WeightMatrixLoader loader;
public static void main(String args[]) throws Exception {
MotifOccurrenceMatrix mom = new MotifOccurrenceMatrix();
mom.parseArgs(args);
mom.readGenes();
mom.getMatrices();
mom.computeMatrix();
mom.printMatrix();
}
public MotifOccurrenceMatrix() {
loader = new WeightMatrixLoader();
}
public void parseArgs(String args[]) throws IOException, NotFoundException {
geneGenerators = Args.parseGenes(args);
upstream = Args.parseInteger(args,"upstream",10000);
downstream = Args.parseInteger(args,"downstream",2000);
genome = Args.parseGenome(args).getLast();
}
/**
* reads the list of genes from STDIN. Each line is one elemen of genes
* and contains the tab-separated gene names
*/
public void readGenes() throws IOException {
genes = new ArrayList<List<String>>();
BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
String line = null;
while ((line = reader.readLine()) != null) {
ArrayList<String> aliases = new ArrayList<String>();
String[] split = line.split("\\t");
for (int i = 0; i < split.length; i++) {
aliases.add(split[i]);
}
genes.add(aliases);
}
reader.close();
bestScores = new double[genes.size()][genes.size()];
}
/**
* Finds a WeightMatrix for each gene by searching through the aliases
* in order.
*
* It might eventually make sense to store a set of matrices and
* then score them all and take the highest score for each target gene.
*/
public void getMatrices() {
List<String> types = new ArrayList<String>();
types.add("TRANSFAC");
types.add("MEME");
types.add("TAMO");
types.add(null);
matrices = new ArrayList<WeightMatrix>();
for (int i = 0; i < genes.size(); i++) {
List<String> aliases = genes.get(i);
WeightMatrix matrix = null;
// first try the aliases listed in the input
for (String a : aliases) {
for (String t : types) {
Collection<WeightMatrix> collection = loader.query(a, null, t);
for (WeightMatrix m : collection) {
if (m != null) {
matrix = m;
break;
}
}
if (matrix != null) {break;}
}
if (matrix != null) {break;}
}
if (matrix == null) {
// now see what aliases we have in our database for the gene and try all of those
HashSet<String> morealiases = new HashSet<String>();
for (String a : aliases) {
for (RefGeneGenerator g : geneGenerators) {
Iterator<Gene> iter = g.byName(a);
while (iter.hasNext()) {
Gene gene = iter.next();
morealiases.addAll(gene.getAliases());
}
}
}
for (String a : morealiases) {
for (String t : types) {
Collection<WeightMatrix> collection = loader.query(a, null, t);
for (WeightMatrix m : collection) {
if (m != null) {
matrix = m;
break;
}
}
if (matrix != null) {break;}
}
if (matrix != null) {break;}
}
}
if (matrix == null) {
System.err.println("No Matrix for " + aliases);
}
matrices.add(matrix);
}
}
/**
* Returns the genomic region to search for a given set of aliases.
* Looks through the aliases in order, trying all elements of geneGenerators
* on the first before going on to the second.
*
* Returns null if no promoter region can be found.
*/
public Region getPromoterForGene(List<String> aliases) {
for (String a : aliases) {
for (RefGeneGenerator g : geneGenerators) {
Iterator<Gene> iter = g.byName(a);
if (iter.hasNext()) {
Gene gene = iter.next();
StrandedRegion sr = new StrandedRegion(gene.getGenome(),
gene.getChrom(),
gene.getFivePrime(),
gene.getFivePrime(),
gene.getStrand());
sr = sr.expand(upstream, downstream);
System.err.println(a + " -> " + sr);
return sr;
}
}
}
System.err.println("No promoter for " + aliases);
return null;
}
public void computeMatrix() throws SQLException {
WMHitScoreComparator comparator = new WMHitScoreComparator();
List<Region> promoters = new ArrayList<Region>();
for (int j = 0; j < genes.size(); j++) {
Region promoter = getPromoterForGene(genes.get(j));
promoters.add(promoter);
}
for (int i = 0; i < genes.size(); i++) {
WeightMatrix m = matrices.get(i);
if (m == null) {
continue;
}
for (int j = 0; j < genes.size(); j++) {
Region promoter = promoters.get(j);
if (promoter == null) {
continue;
}
Genome.ChromosomeInfo chrinfo = genome.getChrom(promoter.getChrom());
String seq = genome.getChromosomeSequence(chrinfo, promoter.getStart(), promoter.getEnd());
List<WMHit> hits = WeightMatrixScanner.scanSequence(m,
(float)(m.getMaxScore() * .75),
seq.toCharArray());
Collections.sort(hits,comparator);
if (hits.size() > 0) {
bestScores[i][j] = hits.get(0).score;
} else {
bestScores[i][j] = 0;
}
}
}
}
public void printMatrix () {
for (int i = 0; i < bestScores.length; i++) {
for (int j = 0; j < bestScores[i].length - 1; j++) {
System.out.print(bestScores[i][j] + "\t");
}
System.out.print(bestScores[i][bestScores[i].length - 1] + "\n");
}
}
}
class WMHitScoreComparator implements Comparator<WMHit> {
public int compare(WMHit a, WMHit b) {
return (int)(100 * (b.score - a.score));
}
}
| |
/**
* Copyright 2005 Sakai Foundation Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.sakaiproject.evaluation.utils;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashSet;
import java.util.List;
import junit.framework.TestCase;
import org.sakaiproject.evaluation.constant.EvalConstants;
import org.sakaiproject.evaluation.logic.model.EvalGroup;
import org.sakaiproject.evaluation.model.EvalAnswer;
import org.sakaiproject.evaluation.model.EvalAssignGroup;
import org.sakaiproject.evaluation.model.EvalEvaluation;
import org.sakaiproject.evaluation.test.EvalTestDataLoad;
/**
* Testing out the {@link EvalUtils} utilities
*
* @author Aaron Zeckoski (aaron@caret.cam.ac.uk)
*/
public class EvalUtilsTest extends TestCase {
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#getEvaluationState(org.sakaiproject.evaluation.model.EvalEvaluation, boolean)}.
*/
public void testGetEvaluationState() {
String state;
EvalTestDataLoad etdl = new EvalTestDataLoad(null);
// positive
etdl.evaluationNew.setId( new Long(1) );
state = EvalUtils.getEvaluationState(etdl.evaluationNew, false);
assertEquals(EvalConstants.EVALUATION_STATE_INQUEUE, state);
// test special has no effect on a date determined state
state = EvalUtils.getEvaluationState(etdl.evaluationNew, true);
assertEquals(EvalConstants.EVALUATION_STATE_INQUEUE, state);
etdl.evaluationActive.setId( new Long(2) );
state = EvalUtils.getEvaluationState(etdl.evaluationActive, false);
assertEquals(EvalConstants.EVALUATION_STATE_ACTIVE, state);
etdl.evaluationActiveUntaken.setId( new Long(3) );
state = EvalUtils.getEvaluationState(etdl.evaluationActiveUntaken, false);
assertEquals(EvalConstants.EVALUATION_STATE_ACTIVE, state);
etdl.evaluationClosed.setId( new Long(4) );
state = EvalUtils.getEvaluationState(etdl.evaluationClosed, false);
assertEquals(EvalConstants.EVALUATION_STATE_CLOSED, state);
etdl.evaluationViewable.setId( new Long(5) );
state = EvalUtils.getEvaluationState(etdl.evaluationViewable, false);
assertEquals(EvalConstants.EVALUATION_STATE_VIEWABLE, state);
// negative (null start date) and saved (should not even be possible)
EvalEvaluation invalidEval = new EvalEvaluation(EvalConstants.EVALUATION_TYPE_EVALUATION,
"aaronz", "testing null dates", null, "XXXXXXXX", EvalConstants.SHARING_PRIVATE, 0, null);
invalidEval.setId( new Long(6) );
state = EvalUtils.getEvaluationState( invalidEval, false );
assertEquals(EvalConstants.EVALUATION_STATE_UNKNOWN, state);
// test the cases where a lot of the dates are unset (testing various nulls)
EvalEvaluation datesEval = new EvalEvaluation(EvalConstants.EVALUATION_TYPE_EVALUATION,
"aaronz", "testing null dates", etdl.tomorrow, null, EvalConstants.SHARING_PRIVATE, 0, null);
// new evals are always partial state
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_PARTIAL, state);
// test ignoring the special states
state = EvalUtils.getEvaluationState(datesEval, true);
assertEquals(EvalConstants.EVALUATION_STATE_INQUEUE, state);
// set the id so this eval does not look new
datesEval.setId( new Long(99999) );
// only the start date is set and in the future
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_INQUEUE, state);
// only the start date is set and way in the past
datesEval.setStartDate(etdl.fifteenDaysAgo);
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_ACTIVE, state);
// only the start date (past) and due date (future) are set
datesEval.setDueDate(etdl.tomorrow);
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_ACTIVE, state);
// only the start date (past) and due date (past) are set
datesEval.setDueDate(etdl.yesterday);
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_VIEWABLE, state);
// only the start date (past) and due date (past) and stop date (future) are set
datesEval.setDueDate(etdl.fourDaysAgo);
datesEval.setStopDate(etdl.tomorrow);
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_GRACEPERIOD, state);
// only the start date (past) and due date (past) and stop date (past) are set
datesEval.setStopDate(etdl.threeDaysAgo);
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_VIEWABLE, state);
// all dates set (view date in future)
datesEval.setViewDate(etdl.tomorrow);
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_CLOSED, state);
// all dates set (view date in past)
datesEval.setViewDate(etdl.yesterday);
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_VIEWABLE, state);
// all dates EXCEPT stop date set (view date in future)
datesEval.setStopDate(null);
datesEval.setViewDate(etdl.tomorrow);
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_CLOSED, state);
// all dates EXCEPT stop date set (view date in past)
datesEval.setStopDate(null);
datesEval.setViewDate(etdl.yesterday);
state = EvalUtils.getEvaluationState(datesEval, false);
assertEquals(EvalConstants.EVALUATION_STATE_VIEWABLE, state);
// no exceptions thrown
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#checkStateAfter(java.lang.String, java.lang.String, boolean)}.
*/
public void testCheckStateAfter() {
// check that same works
assertTrue( EvalUtils.checkStateAfter(EvalConstants.EVALUATION_STATE_INQUEUE, EvalConstants.EVALUATION_STATE_INQUEUE, true) );
assertTrue( EvalUtils.checkStateAfter(EvalConstants.EVALUATION_STATE_INQUEUE, EvalConstants.EVALUATION_STATE_PARTIAL, true) );
assertTrue( EvalUtils.checkStateAfter(EvalConstants.EVALUATION_STATE_INQUEUE, EvalConstants.EVALUATION_STATE_PARTIAL, false) );
assertTrue( EvalUtils.checkStateAfter(EvalConstants.EVALUATION_STATE_CLOSED, EvalConstants.EVALUATION_STATE_ACTIVE, false) );
assertTrue( EvalUtils.checkStateAfter(EvalConstants.EVALUATION_STATE_VIEWABLE, EvalConstants.EVALUATION_STATE_CLOSED, false) );
// now check the false cases
assertFalse( EvalUtils.checkStateAfter(EvalConstants.EVALUATION_STATE_ACTIVE, EvalConstants.EVALUATION_STATE_ACTIVE, false) );
assertFalse( EvalUtils.checkStateAfter(EvalConstants.EVALUATION_STATE_INQUEUE, EvalConstants.EVALUATION_STATE_ACTIVE, false) );
assertFalse( EvalUtils.checkStateAfter(EvalConstants.EVALUATION_STATE_GRACEPERIOD, EvalConstants.EVALUATION_STATE_CLOSED, false) );
assertFalse( EvalUtils.checkStateAfter(EvalConstants.EVALUATION_STATE_ACTIVE, EvalConstants.EVALUATION_STATE_VIEWABLE, false) );
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#checkStateBefore(java.lang.String, java.lang.String, boolean)}.
*/
public void testCheckStateBefore() {
// check that same works
assertTrue( EvalUtils.checkStateBefore(EvalConstants.EVALUATION_STATE_ACTIVE, EvalConstants.EVALUATION_STATE_ACTIVE, true) );
assertTrue( EvalUtils.checkStateBefore(EvalConstants.EVALUATION_STATE_INQUEUE, EvalConstants.EVALUATION_STATE_ACTIVE, true) );
assertTrue( EvalUtils.checkStateBefore(EvalConstants.EVALUATION_STATE_INQUEUE, EvalConstants.EVALUATION_STATE_ACTIVE, false) );
assertTrue( EvalUtils.checkStateBefore(EvalConstants.EVALUATION_STATE_GRACEPERIOD, EvalConstants.EVALUATION_STATE_CLOSED, false) );
assertTrue( EvalUtils.checkStateBefore(EvalConstants.EVALUATION_STATE_ACTIVE, EvalConstants.EVALUATION_STATE_VIEWABLE, false) );
// now check the false cases
assertFalse( EvalUtils.checkStateBefore(EvalConstants.EVALUATION_STATE_INQUEUE, EvalConstants.EVALUATION_STATE_INQUEUE, false) );
assertFalse( EvalUtils.checkStateBefore(EvalConstants.EVALUATION_STATE_INQUEUE, EvalConstants.EVALUATION_STATE_PARTIAL, false) );
assertFalse( EvalUtils.checkStateBefore(EvalConstants.EVALUATION_STATE_CLOSED, EvalConstants.EVALUATION_STATE_ACTIVE, false) );
assertFalse( EvalUtils.checkStateBefore(EvalConstants.EVALUATION_STATE_VIEWABLE, EvalConstants.EVALUATION_STATE_CLOSED, false) );
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#validateSharingConstant(java.lang.String)}.
*/
public void testCheckSharingConstant() {
// positive
assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_OWNER) );
assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_PRIVATE) );
assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_PUBLIC) );
assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_SHARED) );
assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_VISIBLE) );
// negative
// exception
try {
EvalUtils.validateSharingConstant("INVALID");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
try {
EvalUtils.validateSharingConstant("");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
try {
EvalUtils.validateSharingConstant(null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
public void testCheckIncludeConstant() {
// positive
assertTrue( EvalUtils.validateEmailIncludeConstant(EvalConstants.EVAL_INCLUDE_ALL) );
assertTrue( EvalUtils.validateEmailIncludeConstant(EvalConstants.EVAL_INCLUDE_ALL) );
assertTrue( EvalUtils.validateEmailIncludeConstant(EvalConstants.EVAL_INCLUDE_ALL) );
// exception
try {
EvalUtils.validateEmailIncludeConstant("INVALID");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
try {
EvalUtils.validateEmailIncludeConstant("");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
try {
EvalUtils.validateEmailIncludeConstant(null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#updateDueStopDates(org.sakaiproject.evaluation.model.EvalEvaluation, int)}.
*/
public void testUpdateDueStopDates() {
Date dueDate;
Date now = new Date();
long nowTime = new Date().getTime();
long hour = 1000 * 60 * 60;
Date nowPlus2 = new Date(nowTime + hour * 2);
Date nowPlus3 = new Date(nowTime + hour * 3);
EvalEvaluation eval = new EvalEvaluation(EvalConstants.EVALUATION_TYPE_EVALUATION, "aaronz", "title",
now, nowPlus2, nowPlus2, nowPlus3,
EvalConstants.EVALUATION_STATE_ACTIVE, EvalConstants.SHARING_VISIBLE, 0, null);
// test that no change happens if the times are within the range
assertEquals(eval.getDueDate(), nowPlus2);
assertEquals(eval.getStopDate(), nowPlus2);
assertEquals(eval.getViewDate(), nowPlus3);
dueDate = EvalUtils.updateDueStopDates(eval, 1);
assertEquals(dueDate, nowPlus2);
assertEquals(eval.getDueDate(), nowPlus2);
assertEquals(eval.getStopDate(), nowPlus2);
assertEquals(eval.getViewDate(), nowPlus3);
// test that no change happens if the times are at the range limit
assertEquals(eval.getDueDate(), nowPlus2);
assertEquals(eval.getStopDate(), nowPlus2);
assertEquals(eval.getViewDate(), nowPlus3);
dueDate = EvalUtils.updateDueStopDates(eval, 2);
assertEquals(dueDate, nowPlus2);
assertEquals(eval.getDueDate(), nowPlus2);
assertEquals(eval.getStopDate(), nowPlus2);
assertEquals(eval.getViewDate(), nowPlus3);
// test that change happens if the times are beyond the limit
assertEquals(eval.getDueDate(), nowPlus2);
assertEquals(eval.getStopDate(), nowPlus2);
assertEquals(eval.getViewDate(), nowPlus3);
dueDate = EvalUtils.updateDueStopDates(eval, 3);
assertEquals(dueDate, nowPlus3);
assertEquals(eval.getDueDate(), nowPlus3);
assertEquals(eval.getStopDate(), nowPlus3);
assertFalse(eval.getViewDate().equals(nowPlus3));
assertTrue(eval.getViewDate().after(eval.getStopDate()));
// test that change happens if the times are way beyond
assertEquals(eval.getDueDate(), nowPlus3);
assertEquals(eval.getStopDate(), nowPlus3);
dueDate = EvalUtils.updateDueStopDates(eval, 24);
assertEquals(dueDate, new Date(nowTime + hour * 24));
assertEquals(eval.getDueDate(), new Date(nowTime + hour * 24));
assertEquals(eval.getStopDate(), new Date(nowTime + hour * 24));
assertTrue(eval.getViewDate().after(eval.getStopDate()));
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#getEndOfDayDate(java.util.Date)}.
*/
public void testGetEndOfDayDate() {
Date endOfDay;
Date testDay;
Calendar cal = new GregorianCalendar();
// test that time moves to the end of the day
cal.set(2000, 10, 29, 10, 01, 10);
testDay = cal.getTime();
endOfDay = EvalUtils.getEndOfDayDate(testDay);
assertNotNull(endOfDay);
assertTrue(testDay.before(endOfDay));
cal.setTime(endOfDay);
assertEquals(23, cal.get(Calendar.HOUR_OF_DAY));
assertEquals(59, cal.get(Calendar.MINUTE));
assertEquals(59, cal.get(Calendar.SECOND));
cal.clear();
// test that if it is already the end of the day it is not changed
cal.set(2000, 10, 29, 23, 59, 59);
testDay = cal.getTime();
endOfDay = EvalUtils.getEndOfDayDate(testDay);
assertNotNull(endOfDay);
assertEquals(endOfDay, testDay);
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#getHoursDifference(java.util.Date, java.util.Date)}.
*/
public void testGetHoursDifference() {
Date startTime = new Date();
Date endTime = new Date();
int difference;
// test same dates
difference = EvalUtils.getHoursDifference(startTime, endTime);
assertEquals(0, difference);
endTime = new Date( startTime.getTime() + (1000 * 60 * 60 * 5) );
difference = EvalUtils.getHoursDifference(startTime, endTime);
assertEquals(5, difference);
difference = EvalUtils.getHoursDifference(endTime, startTime);
assertEquals(-5, difference);
endTime = new Date( startTime.getTime() + (1000l * 60l * 60l * 50l) );
difference = EvalUtils.getHoursDifference(startTime, endTime);
assertEquals(50, difference);
endTime = new Date( startTime.getTime() + (1000l * 60l * 60l * 500l) );
difference = EvalUtils.getHoursDifference(startTime, endTime);
assertEquals(500, difference);
endTime = new Date( startTime.getTime() + (1000l * 60l * 60l * 5000l) );
difference = EvalUtils.getHoursDifference(startTime, endTime);
assertEquals(5000, difference);
endTime = new Date( startTime.getTime() + (1000l * 60l * 60l * 50000l) );
difference = EvalUtils.getHoursDifference(startTime, endTime);
assertEquals(50000, difference);
// check that it rounds correctly
endTime = new Date( startTime.getTime() + (1000 * 60 * 60 * 5) + (1000 * 60 * 30) );
difference = EvalUtils.getHoursDifference(startTime, endTime);
assertEquals(5, difference);
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#getGroupsInCommon(java.util.List, java.util.List)}.
*/
public void testGetGroupsInCommon() {
EvalGroup[] groups;
List<EvalGroup> evalGroups;
List<EvalAssignGroup> assignGroups;
EvalTestDataLoad etdl = new EvalTestDataLoad(null);
// test all empty stuff
evalGroups = new ArrayList<>();
assignGroups = new ArrayList<>();
groups = EvalUtils.getGroupsInCommon(evalGroups, assignGroups);
assertNotNull(groups);
assertEquals(0, groups.length);
// test all unique
evalGroups = new ArrayList<>();
evalGroups.add( new EvalGroup("az", "AZ group", EvalConstants.GROUP_TYPE_PROVIDED) );
assignGroups = new ArrayList<>();
assignGroups.add(etdl.assign1);
assignGroups.add(etdl.assign4);
groups = EvalUtils.getGroupsInCommon(evalGroups, assignGroups);
assertNotNull(groups);
assertEquals(0, groups.length);
// test all the same
evalGroups = new ArrayList<>();
evalGroups.add( new EvalGroup(EvalTestDataLoad.SITE1_REF, "AZ group", EvalConstants.GROUP_TYPE_PROVIDED) );
assignGroups = new ArrayList<>();
assignGroups.add(etdl.assign1);
assignGroups.add(etdl.assign2);
groups = EvalUtils.getGroupsInCommon(evalGroups, assignGroups);
assertNotNull(groups);
assertEquals(1, groups.length);
// test 2 groups of 2 the same
evalGroups = new ArrayList<>();
evalGroups.add( new EvalGroup(EvalTestDataLoad.SITE1_REF, "AZ group", EvalConstants.GROUP_TYPE_PROVIDED) );
evalGroups.add( new EvalGroup(EvalTestDataLoad.SITE2_REF, "AZ group", EvalConstants.GROUP_TYPE_PROVIDED) );
assignGroups = new ArrayList<>();
assignGroups.add(etdl.assign1);
assignGroups.add(etdl.assign4);
groups = EvalUtils.getGroupsInCommon(evalGroups, assignGroups);
assertNotNull(groups);
assertEquals(2, groups.length);
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#makeUniqueIdentifier(int)}.
*/
public void testMakeUniqueIdentifier() {
String id;
HashSet<String> uniqueIds = new HashSet<>();
id = EvalUtils.makeUniqueIdentifier(5);
assertNotNull(id);
assertTrue(5 >= id.length());
id = EvalUtils.makeUniqueIdentifier(10);
assertNotNull(id);
assertTrue(10 >= id.length());
id = EvalUtils.makeUniqueIdentifier(18);
assertNotNull(id);
assertTrue(18 >= id.length());
for (int i = 0; i < 10000; i++) {
uniqueIds.add(EvalUtils.makeUniqueIdentifier(10));
}
assertEquals(10000, uniqueIds.size());
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#getAnswersMapByTempItemAndAssociated(org.sakaiproject.evaluation.model.EvalResponse)}.
*/
public void testGetAnswersMapByTempItemAndAssociated() {
//Map<String, EvalAnswer> answersMap = null;
//EvalTestDataLoad etdl = new EvalTestDataLoad();
//answersMap = EvalUtils.getAnswersMapByTempItemAndAssociated(etdl.response1);
// TODO - cannot test this right now as it depends on hibernate semantics
//fail("Not yet implemented");
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#encodeMultipleAnswers(Integer[])}.
*/
public void testEncodeMultipleAnswers() {
String encoded;
String S = EvalUtils.SEPARATOR;
// positive
encoded = EvalUtils.encodeMultipleAnswers( new Integer[] {0, 5, 2} );
assertNotNull(encoded);
assertEquals(S+"0"+S+"2"+S+"5"+S, encoded);
encoded = EvalUtils.encodeMultipleAnswers( new Integer[] {5, 4, 3, 2, 1} );
assertNotNull(encoded);
assertEquals(S+"1"+S+"2"+S+"3"+S+"4"+S+"5"+S, encoded);
// negative
encoded = EvalUtils.encodeMultipleAnswers( new Integer[] {} );
assertNull(encoded);
encoded = EvalUtils.encodeMultipleAnswers( null );
assertNull(encoded);
// does not throw any exceptions
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#decodeMultipleAnswers(java.lang.String)}.
*/
public void testDecodeMultipleAnswers() {
Integer[] decoded;
String S = EvalUtils.SEPARATOR;
// positive
decoded = EvalUtils.decodeMultipleAnswers(S+"1"+S+"4"+S+"7"+S);
assertNotNull(decoded);
assertEquals(1, decoded[0].intValue());
assertEquals(4, decoded[1].intValue());
assertEquals(7, decoded[2].intValue());
decoded = EvalUtils.decodeMultipleAnswers(S+"3"+S+"5"+S+"1"+S+"9"+S+"7"+S);
assertNotNull(decoded);
assertEquals(1, decoded[0].intValue());
assertEquals(3, decoded[1].intValue());
assertEquals(5, decoded[2].intValue());
assertEquals(7, decoded[3].intValue());
assertEquals(9, decoded[4].intValue());
// do a really simple one
decoded = EvalUtils.decodeMultipleAnswers(S+"9"+S);
assertNotNull(decoded);
assertEquals(9, decoded[0].intValue());
// negative
decoded = EvalUtils.decodeMultipleAnswers("");
assertNotNull(decoded);
assertEquals(0, decoded.length);
decoded = EvalUtils.decodeMultipleAnswers(null);
assertNotNull(decoded);
assertEquals(0, decoded.length);
decoded = EvalUtils.decodeMultipleAnswers(S+S);
assertNotNull(decoded);
assertEquals(0, decoded.length);
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#encodeAnswerNA(org.sakaiproject.evaluation.model.EvalAnswer)}.
*/
public void testEncodeAnswerNA() {
EvalAnswer applicableAnswer = new EvalAnswer(null, null, null, null, null, "text", 3, null, null);
EvalAnswer naAnswer = new EvalAnswer(null, null, null, null, null, "text", EvalConstants.NA_VALUE, null, null);
naAnswer.setMultiAnswerCode("multiCode");
applicableAnswer.NA = false;
naAnswer.NA = true;
assertFalse( EvalUtils.encodeAnswerNA(applicableAnswer) );
assertEquals(false, applicableAnswer.NA);
assertEquals(new Integer(3), applicableAnswer.getNumeric());
assertEquals("text", applicableAnswer.getText());
assertTrue( EvalUtils.encodeAnswerNA(naAnswer) );
assertEquals(true, naAnswer.NA);
assertEquals(EvalConstants.NA_VALUE, naAnswer.getNumeric());
assertNull(naAnswer.getText());
assertNull(naAnswer.getMultiAnswerCode());
try {
EvalUtils.encodeAnswerNA(null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#decodeAnswerNA(org.sakaiproject.evaluation.model.EvalAnswer)}.
*/
public void testDecodeAnswerNA() {
EvalAnswer applicableAnswer = new EvalAnswer(null, null, null, null, null, "text", 3, null, null);
EvalAnswer naAnswer = new EvalAnswer(null, null, null, null, null, "text", EvalConstants.NA_VALUE, null, null);
naAnswer.setMultiAnswerCode("multiCode");
assertFalse( EvalUtils.decodeAnswerNA(applicableAnswer) );
assertEquals(false, applicableAnswer.NA);
assertEquals(new Integer(3), applicableAnswer.getNumeric());
assertEquals("text", applicableAnswer.getText());
assertTrue( EvalUtils.decodeAnswerNA(naAnswer) );
assertEquals(true, naAnswer.NA);
assertEquals(EvalConstants.NA_VALUE, naAnswer.getNumeric());
assertEquals("text", naAnswer.getText());
assertEquals("multiCode", naAnswer.getMultiAnswerCode());
try {
EvalUtils.decodeAnswerNA(null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
/**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#makeResponseRateStringFromCounts(int, int)}.
*/
public void testMakeResponseRateStringFromCounts() {
assertNotNull( EvalUtils.makeResponseRateStringFromCounts(0, 10) );
assertNotNull( EvalUtils.makeResponseRateStringFromCounts(10, 0) );
assertNotNull( EvalUtils.makeResponseRateStringFromCounts(0, 0) );
assertNotNull( EvalUtils.makeResponseRateStringFromCounts(20, 20) );
}
public void testMakeMaxLengthString() {
String result;
String test = "this is a string";
result = EvalUtils.makeMaxLengthString(test, 100);
assertNotNull(result);
assertEquals(test, result);
result = EvalUtils.makeMaxLengthString(test, 10);
assertNotNull(result);
assertEquals("this is ...", result);
// test this leaves the string alone
result = EvalUtils.makeMaxLengthString(test, 0);
assertNotNull(result);
assertEquals(test, result);
// check null is ok
result = EvalUtils.makeMaxLengthString(null, 100);
assertNull(result);
}
public void testIsValidEmail() {
assertTrue( EvalUtils.isValidEmail("aaronz@vt.edu") );
assertTrue( EvalUtils.isValidEmail("aaron@caret.cam.ac.uk") );
assertTrue( EvalUtils.isValidEmail("Aaron.Zeckoski@vt.edu") );
assertTrue( EvalUtils.isValidEmail("aaron@long.and.really.log.domain.info") );
assertFalse( EvalUtils.isValidEmail(null) );
assertFalse( EvalUtils.isValidEmail("") );
assertFalse( EvalUtils.isValidEmail("not an email") );
assertFalse( EvalUtils.isValidEmail("not@email") );
}
public void testCleanupHtmlPtags() {
String original;
String cleanup;
// check the trim ends cases
original = "test with one return\n <p> </p> ";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals("test with one return", cleanup);
original = "test with two returns\n <p> </p>\n <p> </p>";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals("test with two returns", cleanup);
original = "test with multiple returns\n <p> </p> <p> </p>\n <p> </p><p> </p>\n";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals("test with multiple returns", cleanup);
// test the trim surrounding cases
original = "<p>test trimming extra surrounding</p>";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals("test trimming extra surrounding", cleanup);
original = "<p> test trimming extra surrounding</p> ";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals("test trimming extra surrounding", cleanup);
original = "<p>line one</p>\n<p>line two</p>";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals(original, cleanup);
original = "<p> test not trimming</p> <p>extra surrounding </p>";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals(original, cleanup);
// test both at once
original = "<p>test with two returns</p> <p> </p> <p> </p>";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals("test with two returns", cleanup);
// check that strings that should not change do not
original = "no p tags to cleanup";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals(original, cleanup);
original = "no p tags to cleanup, <p>they are all in the middle</p> so ok";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals(original, cleanup);
original = "<p> at the beginning</p> but not at the end";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals(original, cleanup);
original = "nothing at the beginning, <p>at the end</p>";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals(original, cleanup);
// check null and empty are ok
original = null;
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNull(cleanup);
original = "";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals(original, cleanup);
original = " ";
cleanup = EvalUtils.cleanupHtmlPtags(original);
assertNotNull(cleanup);
assertEquals(original, cleanup);
}
}
| |
/*
* Copyright 2015 Groupon.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arpnetworking.metrics.jvm.collectors;
import com.arpnetworking.metrics.Metrics;
import com.arpnetworking.metrics.jvm.ManagementFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.lang.management.GarbageCollectorMXBean;
import java.util.Arrays;
import java.util.Collections;
/**
* Tests the {@link GarbageCollectionMetricsCollector} class.
*
* @author Deepika Misra (deepika at groupon dot com)
*/
public final class GarbageCollectionMetricsCollectorTest {
@Before
public void setUp() {
_metrics = Mockito.mock(Metrics.class);
_managementFactory = Mockito.mock(ManagementFactory.class);
_gcBean1 = Mockito.mock(GarbageCollectorMXBean.class);
_gcBean2 = Mockito.mock(GarbageCollectorMXBean.class);
}
@After
public void tearDown() {
_metrics = null;
_managementFactory = null;
_gcBean1 = null;
_gcBean2 = null;
}
@Test
public void testCollectWithNoGcBeans() {
Mockito.doReturn(Collections.emptyList()).when(_managementFactory).getGarbageCollectorMXBeans();
GarbageCollectionMetricsCollector.newInstance().collect(_metrics, _managementFactory);
Mockito.verifyNoMoreInteractions(_metrics);
}
@Test
public void testCollectWithSingleGcBean() {
createMockBean(_gcBean1, "My Bean", 5L, 100L);
Mockito.doReturn(Collections.singletonList(_gcBean1)).when(_managementFactory).getGarbageCollectorMXBeans();
GarbageCollectionMetricsCollector.newInstance().collect(_metrics, _managementFactory);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 5L);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_time", 100L);
Mockito.verify(_metrics, Mockito.never())
.incrementCounter(Mockito.eq("jvm/garbage_collector/my_bean/collection_count_delta"), Mockito.anyLong());
}
@Test
public void testCollectWithMultipleGcBeans() {
createMockBean(_gcBean1, "My Bean 1", 5L, 100L);
createMockBean(_gcBean2, "My Bean 2", 10L, 400L);
Mockito.doReturn(Arrays.asList(_gcBean1, _gcBean2)).when(_managementFactory).getGarbageCollectorMXBeans();
GarbageCollectionMetricsCollector.newInstance().collect(_metrics, _managementFactory);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean_1/collection_count", 5L);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean_1/collection_time", 100L);
Mockito.verify(_metrics, Mockito.never())
.incrementCounter(Mockito.eq("jvm/garbage_collector/my_bean_1/collection_count_delta"), Mockito.anyLong());
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean_2/collection_count", 10L);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean_2/collection_time", 400L);
Mockito.verify(_metrics, Mockito.never())
.incrementCounter(Mockito.eq("jvm/garbage_collector/my_bean_2/collection_count_delta"), Mockito.anyLong());
}
@Test(expected = Exception.class)
public void testCollectWithExceptionOnCollectionCount() {
createMockBean(_gcBean1, "My Bean 1", 5L, 100L);
createMockBean(_gcBean2, "My Bean 2", 0L, 400L);
Mockito.doThrow(Exception.class).when(_gcBean2).getCollectionCount();
Mockito.doReturn(Arrays.asList(_gcBean1, _gcBean2)).when(_managementFactory).getGarbageCollectorMXBeans();
GarbageCollectionMetricsCollector.newInstance().collect(_metrics, _managementFactory);
}
@Test(expected = Exception.class)
public void testCollectWithExceptionOnCollectionTime() {
createMockBean(_gcBean1, "My Bean 1", 5L, 100L);
createMockBean(_gcBean2, "My Bean 2", 10L, 0L);
Mockito.doThrow(Exception.class).when(_gcBean2).getCollectionTime();
Mockito.doReturn(Arrays.asList(_gcBean1, _gcBean2)).when(_managementFactory).getGarbageCollectorMXBeans();
GarbageCollectionMetricsCollector.newInstance().collect(_metrics, _managementFactory);
}
@Test
public void testCollectWithUndefinedValuesForCollectionCount() {
createMockBean(_gcBean1, "My Bean", -1, 100L);
Mockito.doReturn(Collections.singletonList(_gcBean1)).when(_managementFactory).getGarbageCollectorMXBeans();
GarbageCollectionMetricsCollector.newInstance().collect(_metrics, _managementFactory);
Mockito.verify(_metrics, Mockito.never())
.setGauge(Mockito.eq("jvm/garbage_collector/my_bean/collection_count"), Mockito.anyLong());
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_time", 100L);
Mockito.verify(_metrics, Mockito.never())
.incrementCounter(Mockito.eq("jvm/garbage_collector/my_bean/collection_count_delta"), Mockito.anyLong());
}
@Test
public void testCollectWithUndefinedValuesForCollectionTime() {
createMockBean(_gcBean1, "My Bean", 5L, -1);
Mockito.doReturn(Collections.singletonList(_gcBean1)).when(_managementFactory).getGarbageCollectorMXBeans();
GarbageCollectionMetricsCollector.newInstance().collect(_metrics, _managementFactory);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 5L);
Mockito.verify(_metrics, Mockito.never())
.setGauge(
Mockito.eq("jvm/garbage_collector/my_bean/collection_time"),
Mockito.anyLong());
Mockito.verify(_metrics, Mockito.never())
.incrementCounter(Mockito.eq("jvm/garbage_collector/my_bean/collection_count_delta"), Mockito.anyLong());
}
@Test
public void testCollectCollectionCountDeltaMultipleCalls() {
createMockBean(_gcBean1, "My Bean", 5L, 10L);
Mockito.doReturn(Collections.singletonList(_gcBean1)).when(_managementFactory).getGarbageCollectorMXBeans();
final GarbageCollectionMetricsCollector collector =
(GarbageCollectionMetricsCollector) GarbageCollectionMetricsCollector.newInstance();
collector.collect(_metrics, _managementFactory);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 5L);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_time", 10L);
Mockito.verify(_metrics, Mockito.never())
.setGauge(Mockito.eq("jvm/garbage_collector/my_bean/collection_count_delta"), Mockito.anyLong());
createMockBean(_gcBean1, "My Bean", 7L, 12L);
collector.collect(_metrics, _managementFactory);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 7L);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_time", 12L);
Mockito.verify(_metrics).incrementCounter("jvm/garbage_collector/my_bean/collection_count_delta", 2L);
}
@Test
public void testCollectWithCollectionCountDelta() {
createMockBean(_gcBean1, "My Bean", 3L, 10L);
Mockito.doReturn(Collections.singletonList(_gcBean1)).when(_managementFactory).getGarbageCollectorMXBeans();
final GarbageCollectionMetricsCollector collector =
(GarbageCollectionMetricsCollector) GarbageCollectionMetricsCollector.newInstance();
collector.collect(_metrics, _managementFactory);
createMockBean(_gcBean1, "My Bean", 5L, 10L);
collector.collect(_metrics, _managementFactory);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 3L);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 5L);
Mockito.verify(_metrics, Mockito.times(2))
.setGauge("jvm/garbage_collector/my_bean/collection_time", 10L);
Mockito.verify(_metrics).incrementCounter("jvm/garbage_collector/my_bean/collection_count_delta", 2L);
}
@Test
public void testCollectCollectionCountDeltaWithLastCountUndefined() {
createMockBean(_gcBean1, "My Bean", -1L, 10L);
Mockito.doReturn(Collections.singletonList(_gcBean1)).when(_managementFactory).getGarbageCollectorMXBeans();
final GarbageCollectionMetricsCollector collector =
(GarbageCollectionMetricsCollector) GarbageCollectionMetricsCollector.newInstance();
collector.collect(_metrics, _managementFactory);
createMockBean(_gcBean1, "My Bean", 5L, 10L);
collector.collect(_metrics, _managementFactory);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 5L);
Mockito.verify(_metrics, Mockito.times(2))
.setGauge("jvm/garbage_collector/my_bean/collection_time", 10L);
Mockito.verify(_metrics, Mockito.never())
.incrementCounter(Mockito.eq("jvm/garbage_collector/my_bean/collection_count_delta"), Mockito.anyLong());
}
@Test
public void testCollectCollectionCountDeltaWithCurrentCountUndefined() {
createMockBean(_gcBean1, "My Bean", 3L, 10L);
Mockito.doReturn(Collections.singletonList(_gcBean1)).when(_managementFactory).getGarbageCollectorMXBeans();
final GarbageCollectionMetricsCollector collector =
(GarbageCollectionMetricsCollector) GarbageCollectionMetricsCollector.newInstance();
collector.collect(_metrics, _managementFactory);
createMockBean(_gcBean1, "My Bean", -1L, 10L);
collector.collect(_metrics, _managementFactory);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 3L);
Mockito.verify(_metrics, Mockito.times(2))
.setGauge("jvm/garbage_collector/my_bean/collection_time", 10L);
Mockito.verify(_metrics, Mockito.never())
.incrementCounter(Mockito.eq("jvm/garbage_collector/my_bean/collection_count_delta"), Mockito.anyLong());
}
@Test
public void testCollectCollectionCountDeltaWithNoLastValue() {
createMockBean(_gcBean1, "My Bean", 5L, 10L);
Mockito.doReturn(Collections.singletonList(_gcBean1)).when(_managementFactory).getGarbageCollectorMXBeans();
final GarbageCollectionMetricsCollector collector =
(GarbageCollectionMetricsCollector) GarbageCollectionMetricsCollector.newInstance();
collector.collect(_metrics, _managementFactory);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 5L);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_time", 10L);
Mockito.verify(_metrics, Mockito.never())
.incrementCounter(Mockito.eq("jvm/garbage_collector/my_bean/collection_count_delta"), Mockito.anyLong());
}
@Test
public void testCollectCollectionCountDeltaWithNegativeValue() {
createMockBean(_gcBean1, "My Bean", 7L, 10L);
Mockito.doReturn(Collections.singletonList(_gcBean1)).when(_managementFactory).getGarbageCollectorMXBeans();
final GarbageCollectionMetricsCollector collector =
(GarbageCollectionMetricsCollector) GarbageCollectionMetricsCollector.newInstance();
collector.collect(_metrics, _managementFactory);
createMockBean(_gcBean1, "My Bean", 5L, 10L);
collector.collect(_metrics, _managementFactory);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 7L);
Mockito.verify(_metrics).setGauge("jvm/garbage_collector/my_bean/collection_count", 5L);
Mockito.verify(_metrics, Mockito.times(2))
.setGauge("jvm/garbage_collector/my_bean/collection_time", 10L);
Mockito.verify(_metrics).incrementCounter("jvm/garbage_collector/my_bean/collection_count_delta", -2L);
}
@Test(expected = Exception.class)
public void testCollectWithExceptionWithGettingBeans() {
Mockito.doThrow(Exception.class).when(_managementFactory).getGarbageCollectorMXBeans();
GarbageCollectionMetricsCollector.newInstance().collect(_metrics, _managementFactory);
}
private void createMockBean(
final GarbageCollectorMXBean gcBean,
final String name,
final long collectionCount,
final long collectionTime) {
Mockito.doReturn(name).when(gcBean).getName();
Mockito.doReturn(collectionCount).when(gcBean).getCollectionCount();
Mockito.doReturn(collectionTime).when(gcBean).getCollectionTime();
}
private Metrics _metrics = null;
private ManagementFactory _managementFactory = null;
private GarbageCollectorMXBean _gcBean1 = null;
private GarbageCollectorMXBean _gcBean2 = null;
}
| |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bushstar.kobocoinj.core;
import com.bushstar.kobocoinj.net.AbstractTimeoutHandler;
import com.bushstar.kobocoinj.net.MessageWriteTarget;
import com.bushstar.kobocoinj.net.StreamParser;
import com.bushstar.kobocoinj.utils.Threading;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.channels.NotYetConnectedException;
import java.util.concurrent.locks.Lock;
import static com.google.common.base.Preconditions.*;
/**
* Handles high-level message (de)serialization for peers, acting as the bridge between the
* {@link com.bushstar.kobocoinj.net} classes and {@link Peer}.
*/
public abstract class PeerSocketHandler extends AbstractTimeoutHandler implements StreamParser {
private static final Logger log = LoggerFactory.getLogger(PeerSocketHandler.class);
private final KobocoinSerializer serializer;
protected PeerGroup peerGroup = null;
protected PeerAddress peerAddress;
// If we close() before we know our writeTarget, set this to true to call writeTarget.closeConnection() right away.
private boolean closePending = false;
// writeTarget will be thread-safe, and may call into PeerGroup, which calls us, so we should call it unlocked
@VisibleForTesting MessageWriteTarget writeTarget = null;
// The ByteBuffers passed to us from the writeTarget are static in size, and usually smaller than some messages we
// will receive. For SPV clients, this should be rare (ie we're mostly dealing with small transactions), but for
// messages which are larger than the read buffer, we have to keep a temporary buffer with its bytes.
private byte[] largeReadBuffer;
private int largeReadBufferPos;
private KobocoinSerializer.KobocoinPacketHeader header;
private Lock lock = Threading.lock("PeerSocketHandler");
public PeerSocketHandler(NetworkParameters params, InetSocketAddress remoteIp) {
serializer = new KobocoinSerializer(checkNotNull(params));
this.peerAddress = new PeerAddress(remoteIp);
}
public PeerSocketHandler(NetworkParameters params, PeerAddress peerAddress) {
serializer = new KobocoinSerializer(checkNotNull(params));
this.peerAddress = checkNotNull(peerAddress);
}
/**
* Sends the given message to the peer. Due to the asynchronousness of network programming, there is no guarantee
* the peer will have received it. Throws NotYetConnectedException if we are not yet connected to the remote peer.
* TODO: Maybe use something other than the unchecked NotYetConnectedException here
*/
public void sendMessage(Message message) throws NotYetConnectedException {
lock.lock();
try {
if (writeTarget == null)
throw new NotYetConnectedException();
} finally {
lock.unlock();
}
// TODO: Some round-tripping could be avoided here
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
serializer.serialize(message, out);
writeTarget.writeBytes(out.toByteArray());
} catch (IOException e) {
exceptionCaught(e);
}
}
/**
* Closes the connection to the peer if one exists, or immediately closes the connection as soon as it opens
*/
public void close() {
lock.lock();
try {
if (writeTarget == null) {
closePending = true;
if (peerGroup != null) {
// Handle peer death
peerGroup.handlePeerDeath((Peer) this);
}
return;
}
} finally {
lock.unlock();
}
writeTarget.closeConnection();
}
@Override
protected void timeoutOccurred() {
log.info("{}: Timed out", getAddress());
close();
}
/**
* Called every time a message is received from the network
*/
protected abstract void processMessage(Message m) throws Exception;
@Override
public int receiveBytes(ByteBuffer buff) {
checkArgument(buff.position() == 0 &&
buff.capacity() >= KobocoinSerializer.KobocoinPacketHeader.HEADER_LENGTH + 4);
try {
// Repeatedly try to deserialize messages until we hit a BufferUnderflowException
for (int i = 0; true; i++) {
// If we are in the middle of reading a message, try to fill that one first, before we expect another
if (largeReadBuffer != null) {
// This can only happen in the first iteration
checkState(i == 0);
// Read new bytes into the largeReadBuffer
int bytesToGet = Math.min(buff.remaining(), largeReadBuffer.length - largeReadBufferPos);
buff.get(largeReadBuffer, largeReadBufferPos, bytesToGet);
largeReadBufferPos += bytesToGet;
// Check the largeReadBuffer's status
if (largeReadBufferPos == largeReadBuffer.length) {
// ...processing a message if one is available
processMessage(serializer.deserializePayload(header, ByteBuffer.wrap(largeReadBuffer)));
largeReadBuffer = null;
header = null;
} else // ...or just returning if we don't have enough bytes yet
return buff.position();
}
// Now try to deserialize any messages left in buff
Message message;
int preSerializePosition = buff.position();
try {
message = serializer.deserialize(buff);
} catch (BufferUnderflowException e) {
// If we went through the whole buffer without a full message, we need to use the largeReadBuffer
if (i == 0 && buff.limit() == buff.capacity()) {
// ...so reposition the buffer to 0 and read the next message header
buff.position(0);
try {
serializer.seekPastMagicBytes(buff);
header = serializer.deserializeHeader(buff);
// Initialize the largeReadBuffer with the next message's size and fill it with any bytes
// left in buff
largeReadBuffer = new byte[header.size];
largeReadBufferPos = buff.remaining();
buff.get(largeReadBuffer, 0, largeReadBufferPos);
} catch (BufferUnderflowException e1) {
// If we went through a whole buffer's worth of bytes without getting a header, give up
// In cases where the buff is just really small, we could create a second largeReadBuffer
// that we use to deserialize the magic+header, but that is rather complicated when the buff
// should probably be at least that big anyway (for efficiency)
throw new ProtocolException("No magic bytes+header after reading " + buff.capacity() + " bytes");
}
} else {
// Reposition the buffer to its original position, which saves us from skipping messages by
// seeking past part of the magic bytes before all of them are in the buffer
buff.position(preSerializePosition);
}
return buff.position();
}
// Process our freshly deserialized message
processMessage(message);
}
} catch (Exception e) {
exceptionCaught(e);
return -1; // Returning -1 also throws an IllegalStateException upstream and kills the connection
}
}
/**
* Sets the {@link MessageWriteTarget} used to write messages to the peer. This should almost never be called, it is
* called automatically by {@link com.bushstar.kobocoinj.net.NioClient} or
* {@link com.bushstar.kobocoinj.net.NioClientManager} once the socket finishes initialization.
*/
@Override
public void setWriteTarget(MessageWriteTarget writeTarget) {
checkArgument(writeTarget != null);
lock.lock();
boolean closeNow = false;
try {
checkArgument(this.writeTarget == null);
closeNow = closePending;
this.writeTarget = writeTarget;
} finally {
lock.unlock();
}
if (closeNow)
writeTarget.closeConnection();
}
@Override
public int getMaxMessageSize() {
return Message.MAX_SIZE;
}
/**
* @return the IP address and port of peer.
*/
public PeerAddress getAddress() {
return peerAddress;
}
/** Catch any exceptions, logging them and then closing the channel. */
private void exceptionCaught(Exception e) {
PeerAddress addr = getAddress();
String s = addr == null ? "?" : addr.toString();
if (e instanceof ConnectException || e instanceof IOException) {
// Short message for network errors
log.info(s + " - " + e.getMessage());
} else {
log.warn(s + " - ", e);
Thread.UncaughtExceptionHandler handler = Threading.uncaughtExceptionHandler;
if (handler != null)
handler.uncaughtException(Thread.currentThread(), e);
}
close();
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2012 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.websocket.ui.httppanel.component;
import java.awt.BorderLayout;
import java.util.List;
import java.util.regex.Pattern;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JToggleButton;
import org.apache.commons.configuration.FileConfiguration;
import org.parosproxy.paros.Constant;
import org.zaproxy.zap.extension.httppanel.Message;
import org.zaproxy.zap.extension.httppanel.component.HttpPanelComponentInterface;
import org.zaproxy.zap.extension.httppanel.component.HttpPanelComponentViewsManager;
import org.zaproxy.zap.extension.httppanel.view.HttpPanelDefaultViewSelector;
import org.zaproxy.zap.extension.httppanel.view.HttpPanelView;
import org.zaproxy.zap.extension.search.SearchMatch;
import org.zaproxy.zap.extension.search.SearchableHttpPanelComponent;
import org.zaproxy.zap.extension.websocket.WebSocketMessageDTO;
import org.zaproxy.zap.extension.websocket.messagelocations.WebSocketMessageLocation;
import org.zaproxy.zap.extension.websocket.ui.WebSocketPanel;
import org.zaproxy.zap.extension.websocket.ui.httppanel.models.StringWebSocketPanelViewModel;
import org.zaproxy.zap.extension.websocket.ui.httppanel.views.WebSocketPanelTextView;
import org.zaproxy.zap.model.MessageLocation;
import org.zaproxy.zap.view.messagelocation.MessageLocationHighlight;
import org.zaproxy.zap.view.messagelocation.MessageLocationHighlighter;
public class WebSocketComponent
implements HttpPanelComponentInterface,
SearchableHttpPanelComponent,
MessageLocationHighlighter {
public static final String NAME = "WebSocketComponent";
private static final String BUTTON_TOOL_TIP =
Constant.messages.getString("websocket.panel.component.all.tooltip");
protected JToggleButton buttonShowView;
protected JPanel panelOptions;
protected JPanel panelMoreOptions;
protected JPanel panelMain;
private JLabel informationLabel;
protected WebSocketMessageDTO message;
protected HttpPanelComponentViewsManager views;
public WebSocketComponent() {
this.message = null;
views = new HttpPanelComponentViewsManager("websocket");
initUi();
}
protected void initUi() {
// Common
buttonShowView = new JToggleButton(WebSocketPanel.connectIcon);
buttonShowView.setToolTipText(BUTTON_TOOL_TIP);
panelOptions = new JPanel();
panelOptions.add(views.getSelectableViewsComponent());
informationLabel = new JLabel();
panelMoreOptions = new JPanel();
panelMoreOptions.add(informationLabel);
initViews();
// All
panelMain = new JPanel(new BorderLayout());
panelMain.add(views.getViewsPanel());
setSelected(false);
}
@Override
public void setParentConfigurationKey(String configurationKey) {
views.setConfigurationKey(configurationKey);
}
@Override
public JToggleButton getButton() {
return buttonShowView;
}
@Override
public JPanel getOptionsPanel() {
return panelOptions;
}
@Override
public JPanel getMoreOptionsPanel() {
return panelMoreOptions;
}
@Override
public JPanel getMainPanel() {
return panelMain;
}
@Override
public void setSelected(boolean selected) {
buttonShowView.setSelected(selected);
views.setSelected(selected);
}
@Override
public boolean isEnabled(Message aMessage) {
return (aMessage instanceof WebSocketMessageDTO);
}
protected void initViews() {
views.addView(new WebSocketPanelTextView(new StringWebSocketPanelViewModel()));
}
@Override
public String getName() {
return NAME;
}
@Override
public int getPosition() {
return 2;
}
@Override
public void setMessage(Message aMessage) {
this.message = (WebSocketMessageDTO) aMessage;
StringBuilder sb = new StringBuilder();
sb.append(message.toString()).append(" - ");
if (message.getDateTime() != null) {
sb.append(message.getDateTime()).append(" - ");
}
if (message.getReadableOpcode() != null) {
sb.append(message.getReadableOpcode());
}
informationLabel.setText(sb.toString());
views.setMessage(message);
if (message.getTempUserObj() instanceof Boolean) {
Boolean isConnected = (Boolean) message.getTempUserObj();
ImageIcon icon;
if (isConnected) {
if (aMessage.isInScope()) {
icon = WebSocketPanel.connectTargetIcon;
} else {
icon = WebSocketPanel.connectIcon;
}
} else {
if (aMessage.isInScope()) {
icon = WebSocketPanel.disconnectTargetIcon;
} else {
icon = WebSocketPanel.disconnectIcon;
}
}
buttonShowView.setIcon(icon);
}
}
@Override
public void save() {
if (message == null) {
return;
}
views.save();
}
@Override
public void addView(HttpPanelView view, Object options, FileConfiguration fileConfiguration) {
views.addView(view, fileConfiguration);
}
@Override
public void removeView(String viewName, Object options) {
views.removeView(viewName);
}
@Override
public void clearView() {
views.clearView();
informationLabel.setText("");
}
@Override
public void clearView(boolean enableViewSelect) {
clearView();
setEnableViewSelect(enableViewSelect);
}
@Override
public void setEnableViewSelect(boolean enableViewSelect) {
views.setEnableViewSelect(enableViewSelect);
}
@Override
public void addDefaultViewSelector(
HttpPanelDefaultViewSelector defaultViewSelector, Object options) {
views.addDefaultViewSelector(defaultViewSelector);
}
@Override
public void removeDefaultViewSelector(String defaultViewSelectorName, Object options) {
views.removeDefaultViewSelector(defaultViewSelectorName);
}
@Override
public void loadConfig(FileConfiguration fileConfiguration) {
views.loadConfig(fileConfiguration);
}
@Override
public void saveConfig(FileConfiguration fileConfiguration) {
views.saveConfig(fileConfiguration);
}
@Override
public void setEditable(boolean editable) {
views.setEditable(editable);
}
@Override
public void highlightHeader(SearchMatch sm) {
views.highlight(sm);
}
@Override
public void highlightBody(SearchMatch sm) {
views.highlight(sm);
}
@Override
public void searchHeader(Pattern p, List<SearchMatch> matches) {
views.search(p, matches);
}
@Override
public void searchBody(Pattern p, List<SearchMatch> matches) {
views.search(p, matches);
}
@Override
public HttpPanelView setSelectedView(String viewName) {
return views.setSelectedView(viewName);
}
@Override
public boolean supports(MessageLocation location) {
if (!(location instanceof WebSocketMessageLocation)) {
return false;
}
return views.supports(location);
}
@Override
public boolean supports(Class<? extends MessageLocation> classLocation) {
if (!(WebSocketMessageLocation.class.isAssignableFrom(classLocation))) {
return true;
}
return false;
}
@Override
public MessageLocationHighlight highlight(MessageLocation location) {
if (!(location instanceof WebSocketMessageLocation)) {
return null;
}
return views.highlight(location);
}
@Override
public MessageLocationHighlight highlight(
MessageLocation location, MessageLocationHighlight highlight) {
if (!(location instanceof WebSocketMessageLocation)) {
return null;
}
return views.highlight(location, highlight);
}
@Override
public void removeHighlight(
MessageLocation location, MessageLocationHighlight highlightReference) {
if (!(location instanceof WebSocketMessageLocation)) {
return;
}
views.removeHighlight(location, highlightReference);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.jini.test.spec.javaspace.conformance.snapshot;
import java.util.logging.Level;
// net.jini
import net.jini.core.entry.Entry;
import net.jini.core.lease.Lease;
import net.jini.core.transaction.Transaction;
// com.sun.jini
import com.sun.jini.qa.harness.TestException;
import com.sun.jini.qa.harness.QAConfig;
// com.sun.jini.qa
import com.sun.jini.test.spec.javaspace.conformance.SimpleEntry;
/**
* SnapshotTransactionCommitWriteTest asserts that an entry that is written
* under the non null transaction is not visible outside its transaction until
* the transaction successfully commits.
*
* It tests this statement for snapshots.
*
* @author Mikhail A. Markov
*/
public class SnapshotTransactionCommitWriteTest
extends SnapshotAbstractTestBase {
/**
* Sets up the testing environment.
*
* @param config QAConfig from the runner for setup.
*/
public void setup(QAConfig config) throws Exception {
// mandatory call to parent
super.setup(config);
// get an instance of Transaction Manager
mgr = getTxnManager();
}
/**
* This method asserts that an entry that is written under the
* non null transaction is not visible outside its transaction until the
* transaction successfully commits.
*
* It tests this statement for snapshots.
*
* <P>Notes:<BR>For more information see the JavaSpaces specification
* sections 2.6, 3.1</P>
*/
public void run() throws Exception {
SimpleEntry sampleEntry1 = new SimpleEntry("TestEntry #1", 1);
SimpleEntry sampleEntry2 = new SimpleEntry("TestEntry #2", 2);
Entry snapshot1;
Entry snapshot2;
SimpleEntry result;
Transaction txn;
long leaseTime = timeout2;
// first check that space is empty
if (!checkSpace(space)) {
throw new TestException(
"Space is not empty in the beginning.");
}
// create snapshots of sample entries
snapshot1 = space.snapshot(sampleEntry1);
snapshot2 = space.snapshot(sampleEntry2);
// create the non null transaction
txn = getTransaction();
/*
* write 1-st entry using it's snapshot with Lease.FOREVER lease
* time to the space within the transaction
*/
space.write(snapshot1, txn, Lease.FOREVER);
/*
* check that written entry is available
* in the space within the transaction
*/
result = (SimpleEntry) space.read(sampleEntry1, txn, checkTime);
if (result == null) {
throw new TestException(
"written within the non null transaction "
+ sampleEntry1 + " using it's snapshot with"
+ " Lease.FOREVER lease time is not available"
+ " in the space within the transaction.");
}
/*
* check that written entry is not available
* outside the transaction
*/
result = (SimpleEntry) space.read(sampleEntry1, null, checkTime);
if (result != null) {
throw new TestException(
"written within the non null transaction "
+ sampleEntry1 + " using it's snapshot with"
+ " Lease.FOREVER lease time"
+ " is visible in the space outside the transaction.");
}
logDebugText("Written within the non null transaction "
+ sampleEntry1 + " using it's snapshot with Lease.FOREVER"
+ " lease time is actually available inside the"
+ " transaction and not visible outside it.");
/*
* write 2-nd entry using it's snapshot with Lease.ANY value for
* lease time to the space within the transaction
*/
space.write(snapshot2, txn, Lease.ANY);
/*
* check that written entry is available
* in the space within the transaction
*/
result = (SimpleEntry) space.read(sampleEntry2, txn, checkTime);
if (result == null) {
throw new TestException(
"written within the non null transaction "
+ sampleEntry2 + " using it's snapshot with Lease.ANY"
+ " value for lease time is not available in the space"
+ " within the transaction.");
}
/*
* check that written entry is not available
* outside the transaction
*/
result = (SimpleEntry) space.read(sampleEntry2, null, checkTime);
if (result != null) {
throw new TestException(
"written within the non null transaction "
+ sampleEntry2 + " using it's snapshot with Lease.ANY"
+ " value for lease time"
+ " is visible in the space outside the transaction.");
}
logDebugText("Written within the non null transaction "
+ sampleEntry2 + " using it's snapshot with Lease.ANY"
+ " value for lease time is actually available inside the"
+ " transaction and not visible outside it.");
/*
* write 1-st entry to the space using it's snapshot within
* the transaction again with finite lease time
*/
space.write(snapshot1, txn, leaseTime);
/*
* check that written entry is not available
* outside the transaction
*/
result = (SimpleEntry) space.read(sampleEntry1, null, checkTime);
if (result != null) {
throw new TestException(
"written within the non null transaction "
+ sampleEntry1 + " using it's snapshot with "
+ leaseTime + " lease time"
+ " is visible in the space outside the transaction.");
}
/*
* check that written entry is available
* in the space within the transaction
*/
result = (SimpleEntry) space.take(sampleEntry1, txn, checkTime);
if (result == null) {
throw new TestException(
"performed 2 writes within the non null transaction of "
+ sampleEntry1 + " using it's snapshot, there are no"
+ " entries are available in the space within the"
+ " transaction while 2 are expected");
}
result = (SimpleEntry) space.take(sampleEntry1, txn, checkTime);
if (result == null) {
throw new TestException(
"performed 2 writes within the non null transaction of "
+ sampleEntry1 + " using it's snapshot, there is only"
+ " 1 entry available in the space within the"
+ " transaction while 2 are expected");
}
logDebugText("Written within the non null transaction "
+ sampleEntry1 + " using it's snapshot with " + leaseTime
+ " lease time is actually available inside the"
+ " transaction and not visible outside it.");
/*
* write 1-st entry to the space using it's snapshot
* within the transaction twice
*/
space.write(snapshot1, txn, Lease.FOREVER);
space.write(snapshot1, txn, leaseTime);
// commit the transaction
txnCommit(txn);
// check that 2-nd sample entry is available in the space
result = (SimpleEntry) space.read(sampleEntry2, null, checkTime);
if (result == null) {
throw new TestException(
"written within the non null transaction "
+ sampleEntry2 + " using it's snapshot is not"
+ " available in the space"
+ " after transaction's committing.");
}
// check that both 1-st entries are available in the space
result = (SimpleEntry) space.take(sampleEntry1, null, checkTime);
if (result == null) {
throw new TestException(
"performed 2 writes within the non null transaction of "
+ sampleEntry1 + " using it's snapshot, there are no"
+ " entries are available in the space after"
+ " transaction's committing while 2 are expected");
}
result = (SimpleEntry) space.take(sampleEntry1, null, checkTime);
if (result == null) {
throw new TestException(
"performed 2 writes within the non null transaction of "
+ sampleEntry1 + " using it's snapshot, there is only"
+ " 1 entry available in the space after transaction's"
+ " committing while 2 are expected");
}
logDebugText("All written entries are available in the space"
+ " after transaction's committing.");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.twill.internal;
import com.google.common.base.Function;
import com.google.common.util.concurrent.AbstractIdleService;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.Service;
import com.google.common.util.concurrent.SettableFuture;
import com.google.common.util.concurrent.Uninterruptibles;
import org.apache.twill.api.RunId;
import org.apache.twill.api.ServiceController;
import org.apache.twill.common.Threads;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
/**
* An abstract base class for implementing {@link ServiceController} that deal with Service state transition and
* listener callback.
*/
public abstract class AbstractExecutionServiceController implements ServiceController, Service {
private final RunId runId;
private final ListenerExecutors listenerExecutors;
private final Service serviceDelegate;
private final SettableFuture<State> terminationFuture;
private volatile TerminationStatus terminationStatus;
protected AbstractExecutionServiceController(RunId runId) {
this.runId = runId;
this.listenerExecutors = new ListenerExecutors();
this.serviceDelegate = new ServiceDelegate();
this.terminationFuture = SettableFuture.create();
addListener(new ServiceListenerAdapter() {
@Override
public void failed(State from, Throwable failure) {
terminationFuture.setException(failure);
}
@Override
public void terminated(State from) {
terminationFuture.set(State.TERMINATED);
}
}, Threads.SAME_THREAD_EXECUTOR);
}
protected abstract void startUp();
protected abstract void shutDown();
@Override
public final RunId getRunId() {
return runId;
}
@Override
public Future<? extends ServiceController> terminate() {
stop();
return Futures.transform(terminationFuture, new Function<State, ServiceController>() {
@Override
public ServiceController apply(State input) {
return AbstractExecutionServiceController.this;
}
});
}
@Nullable
@Override
public TerminationStatus getTerminationStatus() {
return terminationStatus;
}
@Override
public void onRunning(final Runnable runnable, Executor executor) {
addListener(new ServiceListenerAdapter() {
@Override
public void running() {
runnable.run();
}
}, executor);
}
@Override
public void onTerminated(final Runnable runnable, Executor executor) {
addListener(new ServiceListenerAdapter() {
@Override
public void failed(State from, Throwable failure) {
runnable.run();
}
@Override
public void terminated(State from) {
runnable.run();
}
}, executor);
}
@Override
public void awaitTerminated() throws ExecutionException {
Uninterruptibles.getUninterruptibly(terminationFuture);
}
@Override
public void awaitTerminated(long timeout, TimeUnit timeoutUnit) throws TimeoutException, ExecutionException {
Uninterruptibles.getUninterruptibly(terminationFuture, timeout, timeoutUnit);
}
public final void addListener(Listener listener, Executor executor) {
listenerExecutors.addListener(new ListenerExecutor(listener, executor));
}
@Override
public final ListenableFuture<State> start() {
serviceDelegate.addListener(listenerExecutors, Threads.SAME_THREAD_EXECUTOR);
return serviceDelegate.start();
}
@Override
public final State startAndWait() {
return Futures.getUnchecked(start());
}
@Override
public final boolean isRunning() {
return serviceDelegate.isRunning();
}
@Override
public final State state() {
return serviceDelegate.state();
}
@Override
public final State stopAndWait() {
return Futures.getUnchecked(stop());
}
@Override
public final ListenableFuture<State> stop() {
return serviceDelegate.stop();
}
protected Executor executor(final State state) {
return new Executor() {
@Override
public void execute(Runnable command) {
Thread t = new Thread(command, getClass().getSimpleName() + " " + state);
t.setDaemon(true);
t.start();
}
};
}
protected final void setTerminationStatus(TerminationStatus status) {
this.terminationStatus = status;
}
private final class ServiceDelegate extends AbstractIdleService {
@Override
protected void startUp() throws Exception {
AbstractExecutionServiceController.this.startUp();
}
@Override
protected void shutDown() throws Exception {
AbstractExecutionServiceController.this.shutDown();
}
@Override
protected Executor executor(State state) {
return AbstractExecutionServiceController.this.executor(state);
}
}
/**
* Inner class for dispatching listener call back to a list of listeners.
*/
private static final class ListenerExecutors implements Listener {
private interface Callback {
void call(Listener listener);
}
private final Queue<ListenerExecutor> listeners = new ConcurrentLinkedQueue<ListenerExecutor>();
private final AtomicReference<Callback> lastState = new AtomicReference<Callback>();
private synchronized void addListener(final ListenerExecutor listener) {
listeners.add(listener);
Callback callback = lastState.get();
if (callback != null) {
callback.call(listener);
}
}
@Override
public synchronized void starting() {
lastState.set(new Callback() {
@Override
public void call(Listener listener) {
listener.starting();
}
});
for (ListenerExecutor listener : listeners) {
listener.starting();
}
}
@Override
public synchronized void running() {
lastState.set(new Callback() {
@Override
public void call(Listener listener) {
listener.running();
}
});
for (ListenerExecutor listener : listeners) {
listener.running();
}
}
@Override
public synchronized void stopping(final State from) {
lastState.set(new Callback() {
@Override
public void call(Listener listener) {
listener.stopping(from);
}
});
for (ListenerExecutor listener : listeners) {
listener.stopping(from);
}
}
@Override
public synchronized void terminated(final State from) {
lastState.set(new Callback() {
@Override
public void call(Listener listener) {
listener.terminated(from);
}
});
for (ListenerExecutor listener : listeners) {
listener.terminated(from);
}
}
@Override
public synchronized void failed(final State from, final Throwable failure) {
lastState.set(new Callback() {
@Override
public void call(Listener listener) {
listener.failed(from, failure);
}
});
for (ListenerExecutor listener : listeners) {
listener.failed(from, failure);
}
}
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.common.collect.ImmutableList;
import com.google.schemaorg.JsonLdContext;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.PopularityScoreSpecification;
import javax.annotation.Nullable;
/** Interface of <a href="http://schema.org/Airline}">http://schema.org/Airline}</a>. */
public interface Airline extends Organization {
/** Builder interface of <a href="http://schema.org/Airline}">http://schema.org/Airline}</a>. */
public interface Builder extends Organization.Builder {
@Override
Builder addJsonLdContext(@Nullable JsonLdContext context);
@Override
Builder addJsonLdContext(@Nullable JsonLdContext.Builder context);
@Override
Builder setJsonLdId(@Nullable String value);
@Override
Builder setJsonLdReverse(String property, Thing obj);
@Override
Builder setJsonLdReverse(String property, Thing.Builder builder);
/** Add a value to property additionalType. */
Builder addAdditionalType(URL value);
/** Add a value to property additionalType. */
Builder addAdditionalType(String value);
/** Add a value to property address. */
Builder addAddress(PostalAddress value);
/** Add a value to property address. */
Builder addAddress(PostalAddress.Builder value);
/** Add a value to property address. */
Builder addAddress(Text value);
/** Add a value to property address. */
Builder addAddress(String value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating.Builder value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(String value);
/** Add a value to property alternateName. */
Builder addAlternateName(Text value);
/** Add a value to property alternateName. */
Builder addAlternateName(String value);
/** Add a value to property alumni. */
Builder addAlumni(Person value);
/** Add a value to property alumni. */
Builder addAlumni(Person.Builder value);
/** Add a value to property alumni. */
Builder addAlumni(String value);
/** Add a value to property areaServed. */
Builder addAreaServed(AdministrativeArea value);
/** Add a value to property areaServed. */
Builder addAreaServed(AdministrativeArea.Builder value);
/** Add a value to property areaServed. */
Builder addAreaServed(GeoShape value);
/** Add a value to property areaServed. */
Builder addAreaServed(GeoShape.Builder value);
/** Add a value to property areaServed. */
Builder addAreaServed(Place value);
/** Add a value to property areaServed. */
Builder addAreaServed(Place.Builder value);
/** Add a value to property areaServed. */
Builder addAreaServed(Text value);
/** Add a value to property areaServed. */
Builder addAreaServed(String value);
/** Add a value to property award. */
Builder addAward(Text value);
/** Add a value to property award. */
Builder addAward(String value);
/** Add a value to property awards. */
Builder addAwards(Text value);
/** Add a value to property awards. */
Builder addAwards(String value);
/** Add a value to property boardingPolicy. */
Builder addBoardingPolicy(BoardingPolicyType value);
/** Add a value to property boardingPolicy. */
Builder addBoardingPolicy(String value);
/** Add a value to property brand. */
Builder addBrand(Brand value);
/** Add a value to property brand. */
Builder addBrand(Brand.Builder value);
/** Add a value to property brand. */
Builder addBrand(Organization value);
/** Add a value to property brand. */
Builder addBrand(Organization.Builder value);
/** Add a value to property brand. */
Builder addBrand(String value);
/** Add a value to property contactPoint. */
Builder addContactPoint(ContactPoint value);
/** Add a value to property contactPoint. */
Builder addContactPoint(ContactPoint.Builder value);
/** Add a value to property contactPoint. */
Builder addContactPoint(String value);
/** Add a value to property contactPoints. */
Builder addContactPoints(ContactPoint value);
/** Add a value to property contactPoints. */
Builder addContactPoints(ContactPoint.Builder value);
/** Add a value to property contactPoints. */
Builder addContactPoints(String value);
/** Add a value to property department. */
Builder addDepartment(Organization value);
/** Add a value to property department. */
Builder addDepartment(Organization.Builder value);
/** Add a value to property department. */
Builder addDepartment(String value);
/** Add a value to property description. */
Builder addDescription(Text value);
/** Add a value to property description. */
Builder addDescription(String value);
/** Add a value to property dissolutionDate. */
Builder addDissolutionDate(Date value);
/** Add a value to property dissolutionDate. */
Builder addDissolutionDate(String value);
/** Add a value to property duns. */
Builder addDuns(Text value);
/** Add a value to property duns. */
Builder addDuns(String value);
/** Add a value to property email. */
Builder addEmail(Text value);
/** Add a value to property email. */
Builder addEmail(String value);
/** Add a value to property employee. */
Builder addEmployee(Person value);
/** Add a value to property employee. */
Builder addEmployee(Person.Builder value);
/** Add a value to property employee. */
Builder addEmployee(String value);
/** Add a value to property employees. */
Builder addEmployees(Person value);
/** Add a value to property employees. */
Builder addEmployees(Person.Builder value);
/** Add a value to property employees. */
Builder addEmployees(String value);
/** Add a value to property event. */
Builder addEvent(Event value);
/** Add a value to property event. */
Builder addEvent(Event.Builder value);
/** Add a value to property event. */
Builder addEvent(String value);
/** Add a value to property events. */
Builder addEvents(Event value);
/** Add a value to property events. */
Builder addEvents(Event.Builder value);
/** Add a value to property events. */
Builder addEvents(String value);
/** Add a value to property faxNumber. */
Builder addFaxNumber(Text value);
/** Add a value to property faxNumber. */
Builder addFaxNumber(String value);
/** Add a value to property founder. */
Builder addFounder(Person value);
/** Add a value to property founder. */
Builder addFounder(Person.Builder value);
/** Add a value to property founder. */
Builder addFounder(String value);
/** Add a value to property founders. */
Builder addFounders(Person value);
/** Add a value to property founders. */
Builder addFounders(Person.Builder value);
/** Add a value to property founders. */
Builder addFounders(String value);
/** Add a value to property foundingDate. */
Builder addFoundingDate(Date value);
/** Add a value to property foundingDate. */
Builder addFoundingDate(String value);
/** Add a value to property foundingLocation. */
Builder addFoundingLocation(Place value);
/** Add a value to property foundingLocation. */
Builder addFoundingLocation(Place.Builder value);
/** Add a value to property foundingLocation. */
Builder addFoundingLocation(String value);
/** Add a value to property globalLocationNumber. */
Builder addGlobalLocationNumber(Text value);
/** Add a value to property globalLocationNumber. */
Builder addGlobalLocationNumber(String value);
/** Add a value to property hasOfferCatalog. */
Builder addHasOfferCatalog(OfferCatalog value);
/** Add a value to property hasOfferCatalog. */
Builder addHasOfferCatalog(OfferCatalog.Builder value);
/** Add a value to property hasOfferCatalog. */
Builder addHasOfferCatalog(String value);
/** Add a value to property hasPOS. */
Builder addHasPOS(Place value);
/** Add a value to property hasPOS. */
Builder addHasPOS(Place.Builder value);
/** Add a value to property hasPOS. */
Builder addHasPOS(String value);
/** Add a value to property iataCode. */
Builder addIataCode(Text value);
/** Add a value to property iataCode. */
Builder addIataCode(String value);
/** Add a value to property image. */
Builder addImage(ImageObject value);
/** Add a value to property image. */
Builder addImage(ImageObject.Builder value);
/** Add a value to property image. */
Builder addImage(URL value);
/** Add a value to property image. */
Builder addImage(String value);
/** Add a value to property isicV4. */
Builder addIsicV4(Text value);
/** Add a value to property isicV4. */
Builder addIsicV4(String value);
/** Add a value to property legalName. */
Builder addLegalName(Text value);
/** Add a value to property legalName. */
Builder addLegalName(String value);
/** Add a value to property location. */
Builder addLocation(Place value);
/** Add a value to property location. */
Builder addLocation(Place.Builder value);
/** Add a value to property location. */
Builder addLocation(PostalAddress value);
/** Add a value to property location. */
Builder addLocation(PostalAddress.Builder value);
/** Add a value to property location. */
Builder addLocation(Text value);
/** Add a value to property location. */
Builder addLocation(String value);
/** Add a value to property logo. */
Builder addLogo(ImageObject value);
/** Add a value to property logo. */
Builder addLogo(ImageObject.Builder value);
/** Add a value to property logo. */
Builder addLogo(URL value);
/** Add a value to property logo. */
Builder addLogo(String value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork.Builder value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(URL value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(String value);
/** Add a value to property makesOffer. */
Builder addMakesOffer(Offer value);
/** Add a value to property makesOffer. */
Builder addMakesOffer(Offer.Builder value);
/** Add a value to property makesOffer. */
Builder addMakesOffer(String value);
/** Add a value to property member. */
Builder addMember(Organization value);
/** Add a value to property member. */
Builder addMember(Organization.Builder value);
/** Add a value to property member. */
Builder addMember(Person value);
/** Add a value to property member. */
Builder addMember(Person.Builder value);
/** Add a value to property member. */
Builder addMember(String value);
/** Add a value to property memberOf. */
Builder addMemberOf(Organization value);
/** Add a value to property memberOf. */
Builder addMemberOf(Organization.Builder value);
/** Add a value to property memberOf. */
Builder addMemberOf(ProgramMembership value);
/** Add a value to property memberOf. */
Builder addMemberOf(ProgramMembership.Builder value);
/** Add a value to property memberOf. */
Builder addMemberOf(String value);
/** Add a value to property members. */
Builder addMembers(Organization value);
/** Add a value to property members. */
Builder addMembers(Organization.Builder value);
/** Add a value to property members. */
Builder addMembers(Person value);
/** Add a value to property members. */
Builder addMembers(Person.Builder value);
/** Add a value to property members. */
Builder addMembers(String value);
/** Add a value to property naics. */
Builder addNaics(Text value);
/** Add a value to property naics. */
Builder addNaics(String value);
/** Add a value to property name. */
Builder addName(Text value);
/** Add a value to property name. */
Builder addName(String value);
/** Add a value to property numberOfEmployees. */
Builder addNumberOfEmployees(QuantitativeValue value);
/** Add a value to property numberOfEmployees. */
Builder addNumberOfEmployees(QuantitativeValue.Builder value);
/** Add a value to property numberOfEmployees. */
Builder addNumberOfEmployees(String value);
/** Add a value to property owns. */
Builder addOwns(OwnershipInfo value);
/** Add a value to property owns. */
Builder addOwns(OwnershipInfo.Builder value);
/** Add a value to property owns. */
Builder addOwns(Product value);
/** Add a value to property owns. */
Builder addOwns(Product.Builder value);
/** Add a value to property owns. */
Builder addOwns(String value);
/** Add a value to property parentOrganization. */
Builder addParentOrganization(Organization value);
/** Add a value to property parentOrganization. */
Builder addParentOrganization(Organization.Builder value);
/** Add a value to property parentOrganization. */
Builder addParentOrganization(String value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action.Builder value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(String value);
/** Add a value to property review. */
Builder addReview(Review value);
/** Add a value to property review. */
Builder addReview(Review.Builder value);
/** Add a value to property review. */
Builder addReview(String value);
/** Add a value to property reviews. */
Builder addReviews(Review value);
/** Add a value to property reviews. */
Builder addReviews(Review.Builder value);
/** Add a value to property reviews. */
Builder addReviews(String value);
/** Add a value to property sameAs. */
Builder addSameAs(URL value);
/** Add a value to property sameAs. */
Builder addSameAs(String value);
/** Add a value to property seeks. */
Builder addSeeks(Demand value);
/** Add a value to property seeks. */
Builder addSeeks(Demand.Builder value);
/** Add a value to property seeks. */
Builder addSeeks(String value);
/** Add a value to property serviceArea. */
Builder addServiceArea(AdministrativeArea value);
/** Add a value to property serviceArea. */
Builder addServiceArea(AdministrativeArea.Builder value);
/** Add a value to property serviceArea. */
Builder addServiceArea(GeoShape value);
/** Add a value to property serviceArea. */
Builder addServiceArea(GeoShape.Builder value);
/** Add a value to property serviceArea. */
Builder addServiceArea(Place value);
/** Add a value to property serviceArea. */
Builder addServiceArea(Place.Builder value);
/** Add a value to property serviceArea. */
Builder addServiceArea(String value);
/** Add a value to property subOrganization. */
Builder addSubOrganization(Organization value);
/** Add a value to property subOrganization. */
Builder addSubOrganization(Organization.Builder value);
/** Add a value to property subOrganization. */
Builder addSubOrganization(String value);
/** Add a value to property taxID. */
Builder addTaxID(Text value);
/** Add a value to property taxID. */
Builder addTaxID(String value);
/** Add a value to property telephone. */
Builder addTelephone(Text value);
/** Add a value to property telephone. */
Builder addTelephone(String value);
/** Add a value to property url. */
Builder addUrl(URL value);
/** Add a value to property url. */
Builder addUrl(String value);
/** Add a value to property vatID. */
Builder addVatID(Text value);
/** Add a value to property vatID. */
Builder addVatID(String value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article.Builder value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(String value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification.Builder value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(String value);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The value of the property.
*/
Builder addProperty(String name, SchemaOrgType value);
/**
* Add a value to property.
*
* @param name The property name.
* @param builder The schema.org object builder for the property value.
*/
Builder addProperty(String name, Thing.Builder builder);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The string value of the property.
*/
Builder addProperty(String name, String value);
/** Build a {@link Airline} object. */
Airline build();
}
/**
* Returns the value list of property boardingPolicy. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getBoardingPolicyList();
/**
* Returns the value list of property iataCode. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getIataCodeList();
}
| |
/*
* Copyright 2015-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.pcep.pcepio.protocol.ver1;
import java.util.LinkedList;
import java.util.ListIterator;
import org.jboss.netty.buffer.ChannelBuffer;
import org.onosproject.pcep.pcepio.exceptions.PcepParseException;
import org.onosproject.pcep.pcepio.protocol.PcepRPObject;
import org.onosproject.pcep.pcepio.types.PcepObjectHeader;
import org.onosproject.pcep.pcepio.types.PcepValueType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.MoreObjects;
/**
* Provides PCEP RP object.
*/
public class PcepRPObjectVer1 implements PcepRPObject {
/*
* RP Object.
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Flags |O|B|R| Pri |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Request-ID-number |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| |
// Optional TLVs //
| |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
protected static final Logger log = LoggerFactory.getLogger(PcepRPObjectVer1.class);
public static final byte RP_OBJ_TYPE = 1;
public static final byte RP_OBJ_CLASS = 2;
public static final byte RP_OBJECT_VERSION = 1;
public static final short RP_OBJ_MINIMUM_LENGTH = 12;
public static final int DEFAULT_REQUEST_ID_NUM = 0;
//Signalled , all default values to be checked.
public static final boolean DEFAULT_OFLAG = false;
public static final boolean DEFAULT_BFLAG = false;
public static final boolean DEFAULT_RFLAG = false;
public static final byte DEFAULT_PRIFLAG = 0;
public static final int OBJECT_HEADER_LENGTH = 4;
public static final int OFLAG_SHIFT_VALUE = 5;
public static final int BFLAG_SHIFT_VALUE = 4;
public static final int RFLAG_SHIFT_VALUE = 3;
public static final int OFLAG_TEMP_SHIFT_VALUE = 0x20;
public static final int BFLAG_TEMP_SHIFT_VALUE = 0x10;
public static final int RFLAG_TEMP_SHIFT_VALUE = 0x08;
public static final int PRIFLAG_TEMP_SHIFT_VALUE = 0x07;
public static final int BIT_SET = 1;
public static final int BIT_RESET = 0;
public static final int MINIMUM_COMMON_HEADER_LENGTH = 4;
public static final PcepObjectHeader DEFAULT_RP_OBJECT_HEADER = new PcepObjectHeader(RP_OBJ_CLASS, RP_OBJ_TYPE,
PcepObjectHeader.REQ_OBJ_OPTIONAL_PROCESS, PcepObjectHeader.RSP_OBJ_PROCESSED, RP_OBJ_MINIMUM_LENGTH);
private PcepObjectHeader rpObjHeader;
private int iRequestIdNum;
private boolean bOFlag;
private boolean bBFlag;
private boolean bRFlag;
private byte yPriFlag; // 3bytes
private LinkedList<PcepValueType> llOptionalTlv;
/**
* Constructor to initialize variables.
*
* @param rpObjHeader RP-OBJECT header
* @param iRequestIdNum Request-ID-number
* @param bOFlag O-flag
* @param bBFlag B-flag
* @param bRFlag R-flag
* @param yPriFlag Pri-flag
* @param llOptionalTlv linked list of Optional TLV
*/
public PcepRPObjectVer1(PcepObjectHeader rpObjHeader, int iRequestIdNum, boolean bOFlag, boolean bBFlag,
boolean bRFlag, byte yPriFlag, LinkedList<PcepValueType> llOptionalTlv) {
this.rpObjHeader = rpObjHeader;
this.iRequestIdNum = iRequestIdNum;
this.bOFlag = bOFlag;
this.bBFlag = bBFlag;
this.bRFlag = bRFlag;
this.yPriFlag = yPriFlag;
this.llOptionalTlv = llOptionalTlv;
}
/**
* Sets RP Object header.
*
* @param obj RP Object header
*/
public void setRPObjHeader(PcepObjectHeader obj) {
this.rpObjHeader = obj;
}
@Override
public void setRequestIdNum(int iRequestIdNum) {
this.iRequestIdNum = iRequestIdNum;
}
@Override
public void setOFlag(boolean bOFlag) {
this.bOFlag = bOFlag;
}
@Override
public void setBFlag(boolean bBFlag) {
this.bBFlag = bBFlag;
}
@Override
public void setRFlag(boolean bRFlag) {
this.bRFlag = bRFlag;
}
@Override
public void setPriFlag(byte yPriFlag) {
this.yPriFlag = yPriFlag;
}
/**
* Returns RP Object header.
*
* @return rpObjHeader
*/
public PcepObjectHeader getRPObjHeader() {
return this.rpObjHeader;
}
@Override
public int getRequestIdNum() {
return this.iRequestIdNum;
}
@Override
public boolean getOFlag() {
return this.bOFlag;
}
@Override
public boolean getBFlag() {
return this.bBFlag;
}
@Override
public boolean getRFlag() {
return this.bRFlag;
}
@Override
public byte getPriFlag() {
return this.yPriFlag;
}
/**
* Reads the channel buffer and returns the object of PcepRPObject.
*
* @param cb of type channel buffer
* @return the object of PcepRPObject
* @throws PcepParseException if mandatory fields are missing
*/
public static PcepRPObject read(ChannelBuffer cb) throws PcepParseException {
log.debug("read");
PcepObjectHeader rpObjHeader;
int iRequestIdNum;
boolean bOFlag;
boolean bBFlag;
boolean bRFlag;
byte yPriFlag; // 3bytes
LinkedList<PcepValueType> llOptionalTlv = new LinkedList<>();
rpObjHeader = PcepObjectHeader.read(cb);
//take only LspObject buffer.
ChannelBuffer tempCb = cb.readBytes(rpObjHeader.getObjLen() - OBJECT_HEADER_LENGTH);
int iTemp = tempCb.readInt();
yPriFlag = (byte) (iTemp & PRIFLAG_TEMP_SHIFT_VALUE);
bOFlag = (iTemp & OFLAG_TEMP_SHIFT_VALUE) == OFLAG_TEMP_SHIFT_VALUE;
bBFlag = (iTemp & BFLAG_TEMP_SHIFT_VALUE) == BFLAG_TEMP_SHIFT_VALUE;
bRFlag = (iTemp & RFLAG_TEMP_SHIFT_VALUE) == RFLAG_TEMP_SHIFT_VALUE;
iRequestIdNum = tempCb.readInt();
// parse optional TLV
llOptionalTlv = parseOptionalTlv(tempCb);
return new PcepRPObjectVer1(rpObjHeader, iRequestIdNum, bOFlag, bBFlag, bRFlag, yPriFlag, llOptionalTlv);
}
@Override
public int write(ChannelBuffer cb) throws PcepParseException {
//write Object header
int objStartIndex = cb.writerIndex();
int objLenIndex = rpObjHeader.write(cb);
if (objLenIndex <= 0) {
throw new PcepParseException("ObjectLength Index is " + objLenIndex);
}
int iTemp;
iTemp = (yPriFlag);
iTemp = (bOFlag) ? (iTemp | OFLAG_SHIFT_VALUE) : iTemp;
iTemp = (bBFlag) ? (iTemp | BFLAG_SHIFT_VALUE) : iTemp;
iTemp = (bRFlag) ? (iTemp | RFLAG_SHIFT_VALUE) : iTemp;
cb.writeInt(iTemp);
cb.writeInt(iRequestIdNum);
// Add optional TLV
packOptionalTlv(cb);
//Update object length now
int length = cb.writerIndex() - objStartIndex;
//will be helpful during print().
rpObjHeader.setObjLen((short) length);
cb.setShort(objLenIndex, (short) length);
return cb.writerIndex();
}
/**
* Returns list of optional tlvs.
*
* @param cb of type channel buffer.
* @return llOutOptionalTlv linked list of Optional TLV
* @throws PcepParseException if mandatory fields are missing
*/
protected static LinkedList<PcepValueType> parseOptionalTlv(ChannelBuffer cb) throws PcepParseException {
LinkedList<PcepValueType> llOutOptionalTlv = new LinkedList<>();
//Currently no optional TLvs, will be added based on requirements.
return llOutOptionalTlv;
}
/**
* Returns optional tlvs.
*
* @param cb of type channel buffer
* @return llOptionalTlv linked list of Optional TLV
*/
protected int packOptionalTlv(ChannelBuffer cb) {
ListIterator<PcepValueType> listIterator = llOptionalTlv.listIterator();
while (listIterator.hasNext()) {
listIterator.next().write(cb);
}
return cb.writerIndex();
}
/**
* Builder class for PCEP rp object.
*/
public static class Builder implements PcepRPObject.Builder {
private boolean bIsHeaderSet = false;
private boolean bIsRequestIdNumSet = false;
private boolean bIsOFlagSet = false;
private boolean bIsRFlagset = false;
private boolean bIsBFlagSet = false;
private boolean bIsPriFlagSet = false;
private PcepObjectHeader rpObjHeader;
private int requestIdNum;
private boolean bOFlag;
private boolean bBFlag;
private boolean bRFlag;
private byte yPriFlag;
private LinkedList<PcepValueType> llOptionalTlv = new LinkedList<>();
private boolean bIsPFlagSet = false;
private boolean bPFlag;
private boolean bIsIFlagSet = false;
private boolean bIFlag;
@Override
public PcepRPObject build() {
PcepObjectHeader lspObjHeader = this.bIsHeaderSet ? this.rpObjHeader : DEFAULT_RP_OBJECT_HEADER;
int requestIdNum = this.bIsRequestIdNumSet ? this.requestIdNum : DEFAULT_REQUEST_ID_NUM;
boolean bOFlag = this.bIsOFlagSet ? this.bOFlag : DEFAULT_OFLAG;
boolean bBFlag = this.bIsBFlagSet ? this.bBFlag : DEFAULT_BFLAG;
boolean bRFlag = this.bIsRFlagset ? this.bRFlag : DEFAULT_RFLAG;
byte yPriFlag = this.bIsPriFlagSet ? this.yPriFlag : DEFAULT_PRIFLAG;
if (bIsPFlagSet) {
lspObjHeader.setPFlag(bPFlag);
}
if (bIsIFlagSet) {
lspObjHeader.setIFlag(bIFlag);
}
return new PcepRPObjectVer1(lspObjHeader, requestIdNum, bOFlag, bBFlag, bRFlag, yPriFlag, llOptionalTlv);
}
@Override
public PcepObjectHeader getRPObjHeader() {
return this.rpObjHeader;
}
@Override
public Builder setRPObjHeader(PcepObjectHeader obj) {
this.rpObjHeader = obj;
this.bIsHeaderSet = true;
return this;
}
@Override
public int getRequestIdNum() {
return this.requestIdNum;
}
@Override
public Builder setRequestIdNum(int value) {
this.requestIdNum = value;
this.bIsRequestIdNumSet = true;
return this;
}
@Override
public Builder setOFlag(boolean value) {
this.bOFlag = value;
this.bIsOFlagSet = true;
return this;
}
@Override
public boolean getBFlag() {
return this.bBFlag;
}
@Override
public Builder setBFlag(boolean value) {
this.bBFlag = value;
this.bIsBFlagSet = true;
return this;
}
@Override
public boolean getRFlag() {
return this.bRFlag;
}
@Override
public Builder setRFlag(boolean value) {
this.bRFlag = value;
this.bIsRFlagset = true;
return this;
}
@Override
public byte getPriFlag() {
return this.yPriFlag;
}
@Override
public Builder setPriFlag(byte value) {
this.yPriFlag = value;
this.bIsPriFlagSet = true;
return this;
}
@Override
public Builder setOptionalTlv(LinkedList<PcepValueType> llOptionalTlv) {
this.llOptionalTlv = llOptionalTlv;
return this;
}
@Override
public LinkedList<PcepValueType> getOptionalTlv() {
return this.llOptionalTlv;
}
@Override
public Builder setPFlag(boolean value) {
this.bPFlag = value;
this.bIsPFlagSet = true;
return this;
}
@Override
public Builder setIFlag(boolean value) {
this.bIFlag = value;
this.bIsIFlagSet = true;
return this;
}
@Override
public boolean getOFlag() {
return this.bOFlag;
}
}
@Override
public LinkedList<PcepValueType> getOptionalTlv() {
return this.llOptionalTlv;
}
@Override
public void setOptionalTlv(LinkedList<PcepValueType> llOptionalTlv) {
this.llOptionalTlv = llOptionalTlv;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("ObjectHeader", rpObjHeader)
.add("OFlag", (bOFlag) ? 1 : 0)
.add("BFlag", (bBFlag) ? 1 : 0)
.add("RFlag", (bRFlag) ? 1 : 0)
.add("PriFlag", yPriFlag)
.add("RequestIdNumber", iRequestIdNum)
.add("OptionalTlv", llOptionalTlv)
.toString();
}
}
| |
package net.ssehub.easy.dslCore.validation;
import java.io.ByteArrayOutputStream;
import java.io.PrintWriter;
import java.io.Writer;
import java.net.URISyntaxException;
import org.eclipse.emf.common.util.BasicDiagnostic;
import org.eclipse.emf.common.util.Diagnostic;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.xtext.diagnostics.Severity;
import org.eclipse.xtext.validation.CheckType;
import org.eclipse.xtext.validation.FeatureBasedDiagnostic;
import net.ssehub.easy.basics.logger.EASyLoggerFactory;
import net.ssehub.easy.basics.logger.EASyLoggerFactory.EASyLogger;
import net.ssehub.easy.dslCore.BundleId;
import net.ssehub.easy.dslCore.ModelUtility;
import net.ssehub.easy.dslCore.TranslationResult;
import net.ssehub.easy.dslCore.translation.Message;
/**
* Some validation helper methods.
*
* @author Holger Eichelberger
*/
public class ValidationUtils {
/**
* Perform Xtext validation or emulate problem markers on editor save (partial parsing problem).
*/
public static final boolean PERFORM_XTEXT_VALIDATION = true;
/**
* Processes messages.
*
* @param result the translation result to take the messages from
* @param diagnostic the diagnostic instance to add messages to
*/
public static void processMessages(TranslationResult<?> result, BasicDiagnostic diagnostic) {
for (int m = 0; m < result.getMessageCount(); m++) {
Message message = result.getMessage(m);
diagnostic.add(ValidationUtils.processMessage(message));
}
}
/**
* Processes a <code>message</code>.
*
* @param message the message to be processed
* @return the related Eclipse diagnosic instance
*/
public static Diagnostic processMessage(Message message) {
Severity severity;
switch (message.getStatus()) {
case ERROR:
severity = Severity.ERROR;
break;
case WARNING:
severity = Severity.WARNING;
break;
default:
severity = Severity.INFO;
break;
}
return createDiagnostic(severity, message.getDescription(),
message.getCause(), message.getCausingFeature(),
message.getCode(), /* offset, length, */null);
}
// checkstyle: stop parameter number check
/**
* Creates an EMF Eclipse diagnostic instance.
*
* @param severity the severity of the diagnostic
* @param message the message
* @param object the causing EMF element in the parse tree
* @param feature the causing grammar feature
* @param index an optional index value (aka code)
* @param code a textual description of the code (may be <b>null</b>)
* @param issueData optional user specific data
* @return the created diagnostic element
*/
private static Diagnostic createDiagnostic(Severity severity, String message,
EObject object, EStructuralFeature feature, int index, String code,
String... issueData) {
int diagnosticSeverity = toDiagnosticSeverity(severity);
Diagnostic result = new FeatureBasedDiagnostic(diagnosticSeverity,
message, object, feature, index, CheckType.NORMAL, code,
issueData);
return result;
}
// checkstyle: resume parameter number check
/**
* Converts an xText severity into a diagnostic severity.
*
* @param severity the xText severity
* @return the diagnostic severity
*/
private static int toDiagnosticSeverity(Severity severity) {
int diagnosticSeverity = -1;
switch (severity) {
case ERROR:
diagnosticSeverity = Diagnostic.ERROR;
break;
case WARNING:
diagnosticSeverity = Diagnostic.WARNING;
break;
case INFO:
diagnosticSeverity = Diagnostic.INFO;
break;
default:
throw new IllegalArgumentException("Unknow severity " + severity);
}
return diagnosticSeverity;
}
/**
* Defines common xText error types.
*
* @author Holger Eichelberger
*/
public enum MessageType {
ERROR,
WARNING,
INFO;
}
/**
* Defines a validation callback to provide the information required for validation.
* @author Holger Eichelberger
*
* @param <R> the model root type
* @param <T> the transformation result type
*/
public interface IModelValidationCallback<R, T> {
/**
* Returns whether (interactive) validation is enabled. Typically, all model files in a project are considered.
* However, some build processes such as Maven may copy a model for packaging it. Then the copy of the model may
* not be valid, as it is not in a model location.
*
* @param uri the URI
* @return <code>true</code> if validation is enabled, <code>false</code> else
*/
public boolean isValidationEnabled(java.net.URI uri);
/**
* Creates a model for validation.
*
* @param root the model root
* @param uri the model URI
* @return the model translation result
*/
public TranslationResult<T> createModel(R root, java.net.URI uri);
/**
* Handles a message.
*
* @param type the message type
* @param message the message text
* @param source the message source
* @param feature the feature within <code>source</code>
* @param identifier an numerical identifier for the message / for testing
*/
public void message(MessageType type, String message, EObject source, EStructuralFeature feature,
int identifier);
/**
* Prints out the translation result.
*
* @param result the translation result obtained from {@link #createModel(Object, java.net.URI)
* @param out the output writer
*/
public void print(TranslationResult<T> result, Writer out);
}
/**
* Returns the responsible logger.
*
* @return the responsible logger
*/
private static final EASyLogger getLogger() {
return EASyLoggerFactory.INSTANCE.getLogger(ValidationUtils.class, BundleId.ID);
}
// checkstyle: stop exception type check
/**
* Checks the model on top-level element layer.
*
* @param <R> the model root type
* @param <T> the transformation result type
* @param unit the variability unit to start tests with
* @param callback the callback providing relevant model information
* @param debug shall debug information be emitted
*/
public static <R extends EObject, T> void checkModel(R unit, IModelValidationCallback<R, T> callback,
boolean debug) {
if (PERFORM_XTEXT_VALIDATION) {
java.net.URI uri = null;
if (null != unit.eResource() && null != unit.eResource().getURI()) {
try {
uri = ModelUtility.toNetUri(unit.eResource().getURI());
if (!"file".equals(uri.getScheme())) {
uri = null; // initializer may yet not be present, xText does not work with other URI schemes
}
} catch (URISyntaxException e) {
getLogger().error("error translating '" + unit.eResource().getURI()
+ "' during validation" + e.getMessage());
}
}
if (null != uri && callback.isValidationEnabled(uri)) {
try {
TranslationResult<T> result = callback.createModel(unit, uri);
for (int m = 0; m < result.getMessageCount(); m++) {
Message message = result.getMessage(m);
switch (message.getStatus()) {
case ERROR:
case UNSUPPORTED:
callback.message(MessageType.ERROR, message.getDescription(), message.getCause(),
message.getCausingFeature(), message.getCode());
break;
case WARNING:
callback.message(MessageType.WARNING, message.getDescription(), message.getCause(),
message.getCausingFeature(), message.getCode());
break;
default:
callback.message(MessageType.INFO, message.getDescription(), message.getCause(),
message.getCausingFeature(), message.getCode());
break;
}
}
if (debug && 0 == result.getMessageCount()) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintWriter pOut = new PrintWriter(out);
pOut.println(">TRANSLATED MODEL");
callback.print(result, pOut);
pOut.println("<TRANSLATED MODEL");
getLogger().info(out.toString());
}
} catch (Exception e) {
String uriText = "";
if (null != unit.eResource() && null != unit.eResource().getURI()) {
uriText = " " + unit.eResource().getURI().toString();
}
getLogger().error("while validating:" + e.getMessage() + uriText);
e.printStackTrace();
}
}
}
}
// checkstyle: resume exception type check
}
| |
package apple.carplay;
import apple.NSObject;
import apple.carplay.protocol.CPBarButtonProviding;
import apple.carplay.protocol.CPListTemplateDelegate;
import apple.carplay.protocol.CPListTemplateItem;
import apple.foundation.NSArray;
import apple.foundation.NSCoder;
import apple.foundation.NSIndexPath;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.MappedReturn;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.ProtocolClassMethod;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("CarPlay")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class CPListTemplate extends CPTemplate implements CPBarButtonProviding {
static {
NatJ.register();
}
@Generated
protected CPListTemplate(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native CPListTemplate alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native CPListTemplate allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("backButton")
public native CPBarButton backButton();
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
/**
* The list template's delegate is informed of list selection events.
*/
@Generated
@Selector("delegate")
@MappedReturn(ObjCObjectMapper.class)
public native CPListTemplateDelegate delegate();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("init")
public native CPListTemplate init();
@Generated
@Selector("initWithCoder:")
public native CPListTemplate initWithCoder(NSCoder coder);
/**
* Initialize a list template with one or more sections of items and an optional title.
*/
@Generated
@Selector("initWithTitle:sections:")
public native CPListTemplate initWithTitleSections(String title, NSArray<? extends CPListSection> sections);
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Selector("leadingNavigationBarButtons")
public native NSArray<? extends CPBarButton> leadingNavigationBarButtons();
@Generated
@Owned
@Selector("new")
public static native CPListTemplate new_objc();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
/**
* The sections displayed in this list.
*/
@Generated
@Selector("sections")
public native NSArray<? extends CPListSection> sections();
@Generated
@Selector("setBackButton:")
public native void setBackButton(CPBarButton value);
/**
* The list template's delegate is informed of list selection events.
*/
@Generated
@Selector("setDelegate:")
public native void setDelegate_unsafe(@Mapped(ObjCObjectMapper.class) CPListTemplateDelegate value);
/**
* The list template's delegate is informed of list selection events.
*/
@Generated
public void setDelegate(@Mapped(ObjCObjectMapper.class) CPListTemplateDelegate value) {
Object __old = delegate();
if (value != null) {
org.moe.natj.objc.ObjCRuntime.associateObjCObject(this, value);
}
setDelegate_unsafe(value);
if (__old != null) {
org.moe.natj.objc.ObjCRuntime.dissociateObjCObject(this, __old);
}
}
@Generated
@Selector("setLeadingNavigationBarButtons:")
public native void setLeadingNavigationBarButtons(NSArray<? extends CPBarButton> value);
@Generated
@Selector("setTrailingNavigationBarButtons:")
public native void setTrailingNavigationBarButtons(NSArray<? extends CPBarButton> value);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("supportsSecureCoding")
public static native boolean supportsSecureCoding();
@Generated
@ProtocolClassMethod("supportsSecureCoding")
public boolean _supportsSecureCoding() {
return supportsSecureCoding();
}
/**
* Title shown in the navigation bar while this template is visible.
*/
@Generated
@Selector("title")
public native String title();
@Generated
@Selector("trailingNavigationBarButtons")
public native NSArray<? extends CPBarButton> trailingNavigationBarButtons();
/**
* Update the list of sections displayed in this list template, reloading
* the table view displaying this list.
*/
@Generated
@Selector("updateSections:")
public native void updateSections(NSArray<? extends CPListSection> sections);
@Generated
@Selector("version")
@NInt
public static native long version_static();
/**
* An optional array of strings, ordered from most to least preferred.
* The variant strings should be provided as localized, displayable content.
* The system will select the first variant that fits the available space.
* <p>
* If the list template does not contain any items (itemCount == 0), then
* the template will display an empty view with a title and subtitle to indicate
* that the template has no list items.
* <p>
* If the list template is updated to contain items, the empty view will be automatically
* removed.
*/
@Generated
@Selector("emptyViewSubtitleVariants")
public native NSArray<String> emptyViewSubtitleVariants();
/**
* An optional array of strings, ordered from most to least preferred.
* The variant strings should be provided as localized, displayable content.
* The system will select the first variant that fits the available space.
* <p>
* If the list template does not contain any items (itemCount == 0), then
* the template will display an empty view with a title and subtitle to indicate
* that the template has no list items.
* <p>
* If the list template is updated to contain items, the empty view will be automatically
* removed.
*/
@Generated
@Selector("emptyViewTitleVariants")
public native NSArray<String> emptyViewTitleVariants();
/**
* Return an @c NSIndexPath for the specified item, if it exists in any section
* in this list template, or nil if not found.
*/
@Generated
@Selector("indexPathForItem:")
public native NSIndexPath indexPathForItem(@Mapped(ObjCObjectMapper.class) CPListTemplateItem item);
/**
* The number of items currently displayed in this list template, across all sections.
*/
@Generated
@Selector("itemCount")
@NUInt
public native long itemCount();
/**
* The maximum number of items, across all sections, that may appear in a @c CPListTemplate.
* <p>
* [@note] Your list template will display the first @c maximumItemCount items, across all sections.
* Any items or sections beyond that limit will be trimmed.
*/
@Generated
@Selector("maximumItemCount")
@NUInt
public static native long maximumItemCount();
/**
* The maximum number of sections that may appear in a @c CPListTemplate.
* <p>
* [@note] Your list template will display the first @c maximumSectionCount sections.
* Any sections beyond that limit will be trimmed.
*/
@Generated
@Selector("maximumSectionCount")
@NUInt
public static native long maximumSectionCount();
/**
* The number of sections currently displayed in this list template.
*/
@Generated
@Selector("sectionCount")
@NUInt
public native long sectionCount();
/**
* An optional array of strings, ordered from most to least preferred.
* The variant strings should be provided as localized, displayable content.
* The system will select the first variant that fits the available space.
* <p>
* If the list template does not contain any items (itemCount == 0), then
* the template will display an empty view with a title and subtitle to indicate
* that the template has no list items.
* <p>
* If the list template is updated to contain items, the empty view will be automatically
* removed.
*/
@Generated
@Selector("setEmptyViewSubtitleVariants:")
public native void setEmptyViewSubtitleVariants(NSArray<String> value);
/**
* An optional array of strings, ordered from most to least preferred.
* The variant strings should be provided as localized, displayable content.
* The system will select the first variant that fits the available space.
* <p>
* If the list template does not contain any items (itemCount == 0), then
* the template will display an empty view with a title and subtitle to indicate
* that the template has no list items.
* <p>
* If the list template is updated to contain items, the empty view will be automatically
* removed.
*/
@Generated
@Selector("setEmptyViewTitleVariants:")
public native void setEmptyViewTitleVariants(NSArray<String> value);
/**
* The configuration of the Assistant Cell.
* <p>
* Assigning to this property will dynamically update the List Template to reflect the visibility, position, and intent identifier of the Assistant Cell.
* <p>
* [@note] The Assistant Cell is only supported by CarPlay Audio and Communication Apps.
* <p>
* Unlike @c CPListItem, your application will not receive a callback when the user selects the cell.
* Instead, configure an Intents app extention to receive user requests from SiriKit, in order to turn the requests into an
* app-specific actions.
*/
@Generated
@Selector("assistantCellConfiguration")
public native CPAssistantCellConfiguration assistantCellConfiguration();
/**
* Initialize a list template with one or more sections of items, an optional title, and configuration for the assistant cell via a @c CPAssistantCellConfiguration object.
* <p>
* [@note] The Assistant Cell is only supported by CarPlay Audio and Communication Apps.
* <p>
* Unlike @c CPListItem, your application will not receive a callback when the user selects the cell.
* Instead, configure an Intents app extention to receive user requests from SiriKit, in order to turn the requests into an
* app-specific actions.
*/
@Generated
@Selector("initWithTitle:sections:assistantCellConfiguration:")
public native CPListTemplate initWithTitleSectionsAssistantCellConfiguration(String title,
NSArray<? extends CPListSection> sections, CPAssistantCellConfiguration assistantCellConfiguration);
/**
* The configuration of the Assistant Cell.
* <p>
* Assigning to this property will dynamically update the List Template to reflect the visibility, position, and intent identifier of the Assistant Cell.
* <p>
* [@note] The Assistant Cell is only supported by CarPlay Audio and Communication Apps.
* <p>
* Unlike @c CPListItem, your application will not receive a callback when the user selects the cell.
* Instead, configure an Intents app extention to receive user requests from SiriKit, in order to turn the requests into an
* app-specific actions.
*/
@Generated
@Selector("setAssistantCellConfiguration:")
public native void setAssistantCellConfiguration(CPAssistantCellConfiguration value);
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.apex.benchmark.spillable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.apex.malhar.lib.state.spillable.SpillableArrayListImpl;
import org.apache.apex.malhar.lib.state.spillable.SpillableArrayListMultimapImpl;
import org.apache.apex.malhar.lib.state.spillable.SpillableMapImpl;
import org.apache.apex.malhar.lib.state.spillable.SpillableStateStore;
import org.apache.apex.malhar.lib.state.spillable.managed.ManagedStateSpillableStateStore;
import org.apache.apex.malhar.lib.utils.serde.LongSerde;
import org.apache.apex.malhar.lib.utils.serde.StringSerde;
import com.datatorrent.api.Context.OperatorContext;
import com.datatorrent.api.DefaultInputPort;
import com.datatorrent.api.Operator;
import com.datatorrent.api.Operator.ShutdownException;
import com.datatorrent.common.util.BaseOperator;
/**
* @since 3.6.0
*/
public class SpillableTestOperator extends BaseOperator implements Operator.CheckpointNotificationListener
{
private static final Logger logger = LoggerFactory.getLogger(SpillableTestOperator.class);
public static final byte[] ID1 = new byte[] {(byte)1};
public static final byte[] ID2 = new byte[] {(byte)2};
public static final byte[] ID3 = new byte[] {(byte)3};
public SpillableArrayListMultimapImpl<String, String> multiMap;
public ManagedStateSpillableStateStore store;
public long totalCount = 0;
public transient long countInWindow;
public long minWinId = -1;
public long committedWinId = -1;
public long windowId;
public SpillableMapImpl<Long, Long> windowToCount;
public long shutdownCount = -1;
public static Throwable errorTrace;
private long lastLogTime;
private long beginTime;
public final transient DefaultInputPort<String> input = new DefaultInputPort<String>()
{
@Override
public void process(String tuple)
{
processTuple(tuple);
}
};
public void processTuple(String tuple)
{
if (++totalCount == shutdownCount) {
throw new RuntimeException("Test recovery. count = " + totalCount);
}
countInWindow++;
multiMap.put("" + windowId, tuple);
}
@Override
public void setup(OperatorContext context)
{
super.setup(context);
if (windowToCount == null) {
windowToCount = createWindowToCountMap(store);
}
if (multiMap == null) {
multiMap = createMultimap(store);
}
store.setup(context);
windowToCount.setup(context);
multiMap.setup(context);
lastLogTime = System.currentTimeMillis();
beginTime = lastLogTime;
checkData();
}
public void checkData()
{
long startTime = System.currentTimeMillis();
logger.debug("check data: totalCount: {}; minWinId: {}; committedWinId: {}; curWinId: {}", totalCount,
this.minWinId, committedWinId, this.windowId);
for (long winId = Math.max(committedWinId + 1, minWinId); winId < this.windowId; ++winId) {
Long count = this.windowToCount.get(winId);
SpillableArrayListImpl<String> datas = (SpillableArrayListImpl<String>)multiMap.get("" + winId);
String msg;
if (((datas == null && count != null) || (datas != null && count == null)) || (datas == null && count == null)) {
msg = "Invalid data/count. datas: " + datas + "; count: " + count;
logger.error(msg);
errorTrace = new RuntimeException(msg);
throw new ShutdownException();
} else {
int dataSize = datas.size();
if ((long)count != (long)dataSize) {
msg = String.format("data size not equal: window Id: %d; datas size: %d; count: %d", winId, dataSize, count);
logger.error(msg);
errorTrace = new RuntimeException(msg);
throw new ShutdownException();
}
}
}
logger.info("check data took {} millis.", System.currentTimeMillis() - startTime);
}
/**
* {@inheritDoc}
*/
@Override
public void beginWindow(long windowId)
{
store.beginWindow(windowId);
multiMap.beginWindow(windowId);
if (minWinId < 0) {
minWinId = windowId;
}
this.windowId = windowId;
countInWindow = 0;
}
@Override
public void endWindow()
{
multiMap.endWindow();
windowToCount.put(windowId, countInWindow);
windowToCount.endWindow();
store.endWindow();
if (windowId % 10 == 0) {
checkData();
logStatistics();
}
}
private long lastTotalCount = 0;
public void logStatistics()
{
long countInPeriod = totalCount - lastTotalCount;
long timeInPeriod = System.currentTimeMillis() - lastLogTime;
long totalTime = System.currentTimeMillis() - beginTime;
logger.info(
"Statistics: total count: {}; period count: {}; total rate (per second): {}; period rate (per second): {}",
totalCount, countInPeriod, totalCount * 1000 / totalTime, countInPeriod * 1000 / timeInPeriod);
}
@Override
public void beforeCheckpoint(long windowId)
{
store.beforeCheckpoint(windowId);
}
@Override
public void checkpointed(long windowId)
{
}
@Override
public void committed(long windowId)
{
this.committedWinId = windowId;
store.committed(windowId);
}
public static SpillableArrayListMultimapImpl<String, String> createMultimap(SpillableStateStore store)
{
return new SpillableArrayListMultimapImpl<String, String>(store, ID1, 0L, new StringSerde(),
new StringSerde());
}
public static SpillableMapImpl<String, String> createMap(SpillableStateStore store)
{
return new SpillableMapImpl<String, String>(store, ID2, 0L, new StringSerde(),
new StringSerde());
}
public static SpillableMapImpl<Long, Long> createWindowToCountMap(SpillableStateStore store)
{
return new SpillableMapImpl<Long, Long>(store, ID3, 0L, new LongSerde(),
new LongSerde());
}
}
| |
/**
* Created: Dec 12, 2014 2:53:44 PM
*/
package org.kfm.camel.processor;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.List;
import java.util.TreeSet;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.PropertyInject;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.io.IOUtils;
import org.apache.log4j.Logger;
import org.kfm.camel.converter.StrokeConverter;
import org.kfm.camel.dao.AfdImageDao;
import org.kfm.camel.dao.NoteDao;
import org.kfm.camel.dao.PageDao;
import org.kfm.camel.dao.TemplateDao;
import org.kfm.camel.entity.DocumentFactory;
import org.kfm.camel.entity.NoteResources;
import org.kfm.camel.entity.PageFactory;
import org.kfm.camel.entity.UploadTransaction;
import org.kfm.camel.entity.evernote.XMLStrokes;
import org.kfm.camel.evernote.EvernoteNoteContentClass;
import org.kfm.camel.exception.ENOAuthTokenExpiredException;
import org.kfm.camel.exception.ENStorageQuotaReachedException;
import org.kfm.camel.image.ImageFactory;
import org.kfm.camel.message.MessageHeader;
import org.kfm.camel.util.Utils;
import org.kfm.jpa.AfdImage;
import org.kfm.jpa.Document;
import org.kfm.jpa.Page;
import org.kfm.jpa.Template;
import org.kfm.jpa.TimeMap;
import org.springframework.beans.factory.annotation.Autowired;
import com.evernote.edam.error.EDAMNotFoundException;
import com.evernote.edam.error.EDAMUserException;
import com.evernote.edam.type.Data;
import com.evernote.edam.type.Note;
import com.evernote.edam.type.NoteAttributes;
import com.evernote.edam.type.Resource;
import com.evernote.edam.type.ResourceAttributes;
import com.livescribe.afp.Afd;
import com.livescribe.afp.PageStroke;
import com.livescribe.afp.stf.STFStroke;
/**
* <p></p>
*
* @author <a href="mailto:kmurdoff@livescribe.com">Kevin F. Murdoff</a>
* @version 1.0
*/
public class StrokesUpdateProcessor implements Processor {
private Logger logger = Logger.getLogger(this.getClass().getName());
@Autowired
private PageDao pageDao;
@Autowired
private NoteDao noteDao;
@Autowired
private TemplateDao templateDao;
@Autowired
private AfdImageDao afdImageDao;
@PropertyInject("evernotebookkeeper.evernote.ui.setting.image")
private String uiSettingImagePath;
@PropertyInject("evernotebookkeeper.evernote.ui.setting.image.mimetype")
private String uiSettingImageMimeType;
@PropertyInject("evernotebookkeeper.livescribe.logo")
private String livescribeLogoPath;
@PropertyInject("evernotebookkeeper.livescribe.logo.mimetype")
private String livescribeLogoMimeType;
/**
* <p></p>
*
*/
public StrokesUpdateProcessor() {
}
/* (non-Javadoc)
* @see org.apache.camel.Processor#process(org.apache.camel.Exchange)
*/
@Override
public void process(Exchange exchange) throws Exception {
String method = "process()";
Boolean isNewDocument = exchange.getIn().getHeader(MessageHeader.IS_NEW_DOCUMENT.getHeader(), Boolean.class);
String penDisplayId = exchange.getIn().getHeader(MessageHeader.PEN_DISPLAY_ID.getHeader(), String.class);
String oAuthAccessToken = exchange.getIn().getHeader(MessageHeader.ACCESS_TOKEN.getHeader(), String.class);
String uid = exchange.getIn().getHeader(MessageHeader.UID.getHeader(), String.class);
Long enUserId = exchange.getIn().getHeader(MessageHeader.EN_USER_ID.getHeader(), Long.class);
UploadTransaction upTx = exchange.getIn().getBody(UploadTransaction.class);
Afd afd = upTx.getAfd();
String afdGuid = afd.getGuid();
String afdName = afd.getTitle();
int afdCopy = afd.getCopy();
String afdVersion = afd.getVersion();
// Iterate over the uploaded strokes-per-page objects.
List<PageStroke> pageStrokes = afd.getPageStrokes();
for (PageStroke pageStroke : pageStrokes) {
int pageIndex = pageStroke.getPageIndex();
String docGuid = pageStroke.getDocumentGuid();
// Convert the new strokes to XMLStrokes for storage in Evernote.
TreeSet<STFStroke> stfStrokes = (TreeSet)pageStroke.getStrokes();
XMLStrokes newXmlStrokes = StrokeConverter.fromSTFStrokes(stfStrokes);
Document document = null;
Page page = null;
// Find a Document (from the database) matching the uploaded AFD.
Document matchingDocument = findMatchingDocument(upTx, docGuid);
// The Evernote Resource object storing the strokes as XML.
Resource strokeResource = null;
// The Evernote Resource object storing the image of strokes (thumbnail?).
Resource strokeImageResource = null;
NoteResources noteResources = null;
//--------------------------------------------------
// After this block, either an existing Document and Page are
// found, or new ones are created.
if (matchingDocument == null) {
document = DocumentFactory.create(afd, docGuid, penDisplayId, enUserId);
page = PageFactory.create(document, pageIndex);
} else {
document = matchingDocument;
Page matchingPage = findMatchingPage(matchingDocument, pageIndex);
if (matchingPage == null) {
page = PageFactory.create(document, pageIndex);
} else {
page = matchingPage;
}
}
Note note = noteDao.find(page, oAuthAccessToken);
String noteTitle = generatePageNoteTitle(page.getLabel(), page.getDocument().getDocName());
if (note == null) {
note = new Note();
note.setTitle(noteTitle);
NoteAttributes noteAttributes = new NoteAttributes();
noteAttributes.setContentClass(EvernoteNoteContentClass.PAGE_VERSION_1_0_2.getLabel());
note.setAttributes(noteAttributes);
note.setContent("<!DOCTYPE en-note SYSTEM \"http://xml.evernote.com/pub/enml2.dtd\"><en-note><h1>Processing Strokes from your Smartpen...</h1></en-note>");
try {
note = noteDao.save(note, oAuthAccessToken, docGuid);
} catch (EDAMNotFoundException e) {
String msg = "Exception thrown";
if (e.getIdentifier().equals("Note.notebookGuid")) {
logger.error(method + " - NotebookGuid '" + note.getNotebookGuid() + "' not found in Evernote.");
// TODO: Finish here.
}
}
addImageResources(page, note);
} else {
String noteNotebookGuid = note.getNotebookGuid();
String docNotebookGuid = document.getEnNotebookGuid();
// ISSUE:
// If the Note's NotebookGuid is different from the Document's,
// this places the Note back in the Notebook stored in the database.
//
// If the user had moved the Note into a different Notebook,
// this part would undo that and place it back in the "original"
// Notebook.
if ((docNotebookGuid != null) && (!docNotebookGuid.equals(noteNotebookGuid))) {
note.setNotebookGuid(docNotebookGuid);
}
// ISSUE:
// If the Note was "deleted" but not "expunged", this
// section pulls the Note out of the Trash.
if ((!note.isActive()) && (note.isSetDeleted())) {
note.setActive(true);
note.setDeletedIsSet(false);
}
if (note.getResourcesSize() > 0) {
// Find the Image and Stroke Resources.
noteResources = findNoteResources(note, page);
strokeResource = noteResources.getStrokeResource();
if ((strokeResource != null) && (strokeResource.getData() != null)) {
// Add existing XMLStrokes to the new XMLStrokes.
XMLStrokes xmlStrokes = StrokeConverter.fromResource(strokeResource);
newXmlStrokes.getList().addAll(xmlStrokes.getList());
} else {
logger.warn(method + " - No stroke Resource found for '" + noteTitle + "'.");
}
} else {
logger.warn(method + " - No Resources found for Note '" + noteTitle + "'.");
}
// Test what "version" of Note ContentClass to bring up-to-date
String noteAttributesContentClass = note.getAttributes().getContentClass();
if (noteAttributesContentClass != null) {
EvernoteNoteContentClass eNCC = EvernoteNoteContentClass.getEvernoteNoteContentClass(noteAttributesContentClass);
switch (eNCC) {
case PAGE_VERSION_1_0_0:
case PAGE_VERSION_1_0_1: {
//Need to inject our UI Setting PNG and Logo Inactive frost GIF (as resources) into Evernote Note
addImageResources(page, note);
note.getAttributes().setContentClass(EvernoteNoteContentClass.PAGE_VERSION_1_0_2.getLabel());
break;
}
case PAGE_VERSION_1_0_2:
break;
default:
break;
}
}
}
// TODO: "Add our strokes" EBK 2279
if (noteResources.getStrokeResource() == null) {
strokeResource = new Resource();
strokeResource.setMime("application/xml");
noteResources.setStrokeResource(strokeResource);
note.addToResources(strokeResource);
}
if (noteResources.getImageResource() == null) {
strokeImageResource = new Resource();
strokeImageResource.setMime("image/png");
noteResources.setImageResource(strokeImageResource);
note.addToResources(strokeImageResource);
}
Data strokeResourceData = createStrokeData(newXmlStrokes, page);
strokeResource.setData(strokeResourceData);
//--------------------------------------------------
// Generate Page Image
//--------------------------------------------------
BufferedImage img = ImageFactory.createImageForPage(page, strokeResource, 1);
Resource newStrokeImageResource = ImageFactory.createImageResource(page, strokeResource, strokeImageResource, scale);
byte[] newStrokeImageHash = newStrokeImageResource.getData().getBodyHash();
String strokeImageHashString = Utils.asHexString(newStrokeImageHash);
page.setEnImageResourceHash(strokeImageHashString);
//--------------------------------------------------
// Matching Document
//--------------------------------------------------
if (matchingDocument != null) {
Page matchingPage = findMatchingPage(matchingDocument, pageIndex);
//--------------------------------------------------
// Matching Page
//--------------------------------------------------
if (matchingPage != null) {
String enNoteGuid = matchingPage.getEnNoteGuid();
// Note note = noteDao.find(matchingPage, oAuthAccessToken);
if (enNoteGuid != null) {
if (note != null) {
String enNotebookGuid = matchingDocument.getEnNotebookGuid();
// If the Notebook GUID has changed, update it with
// the database version.
if (!note.getNotebookGuid().equals(enNotebookGuid)) {
note.setNotebookGuid(enNotebookGuid);
}
// If Note was not 'expunged', move it out of the trash.
if ((!note.isActive()) && (note.isSetDeleted())) {
logger.warn(method + " - Note has been deleted, but not expunged. Moving it out of trash.");
note.setActive(true);
note.setDeletedIsSet(false);
}
// If the Note has Resources ...
if (note.getResourcesSize() > 0) {
// If the stroke Resource was found ...
if ((strokeResource != null) && (strokeResource.getData() != null)) {
// TreeSet<STFStroke> stfStrokes = (TreeSet)pageStroke.getStrokes();
XMLStrokes psXmlStrokes = StrokeConverter.fromSTFStrokes(stfStrokes);
// XMLStrokes previousStrokes = null;
byte[] strokeResourceData = strokeResource.getData().getBody();
XMLStrokes previousStrokes = StrokeConverter.fromBytesToXMLStrokes(strokeResourceData);
previousStrokes.list.addAll(psXmlStrokes.list);
psXmlStrokes = previousStrokes;
}
}
} else {
logger.debug(method + " - Note not found for Page with document GUID '" + docGuid + "' and page index '" + pageIndex + "'.");
}
} else {
logger.debug(method + " - Note GUID not found in Page. Page must never have been sent to Evernote.");
// String noteTitle = generatePageNoteTitle(matchingPage.getLabel(), matchingPage.getDocument().getDocName());
if (note == null) {
note = new Note();
note.setNotebookGuid(matchingDocument.getEnNotebookGuid());
note.setTitle(noteTitle);
// Setting Note attribute contentClass to make note read-only to all other's
NoteAttributes tNoteAttrib = new NoteAttributes();
tNoteAttrib.setContentClass(EvernoteNoteContentClass.PAGE_VERSION_1_0_2.getLabel());
note.setAttributes(tNoteAttrib);
// TODO: Refactor to use Velocity.
// Because we actually commit the "note" to EN (to get
// resources saved) we need empty content to keep from
// getting an exception.
note.setContent("<!DOCTYPE en-note SYSTEM \"http://xml.evernote.com/pub/enml2.dtd\"><en-note><h1>Processing Strokes from your Smartpen...</h1></en-note>");
//--------------------------------------------------
// Create the new Page Note in Evernote.
//--------------------------------------------------
try {
note = noteDao.save(note, oAuthAccessToken, uid);
} catch (EDAMNotFoundException e) {
e.printStackTrace();
// TODO: Retry?
} catch (EDAMUserException e) {
e.printStackTrace();
} catch (ENOAuthTokenExpiredException e) {
e.printStackTrace();
} catch (ENStorageQuotaReachedException e) {
e.printStackTrace();
}
if (note != null) {
addImageResources(matchingPage, note);
}
} else {
note.setTitle(noteTitle);
String noteAttributesContentClass = note.getAttributes().getContentClass();
if (noteAttributesContentClass != null) {
EvernoteNoteContentClass eNCC = EvernoteNoteContentClass.getEvernoteNoteContentClass(noteAttributesContentClass);
switch (eNCC) {
case PAGE_VERSION_1_0_0:
case PAGE_VERSION_1_0_1: {
// Add the logo and settings images as Evernote Resources of the Note.
addImageResources(matchingPage, note);
note.getAttributes().setContentClass(EvernoteNoteContentClass.PAGE_VERSION_1_0_2.getLabel());
break;
}
case PAGE_VERSION_1_0_2:
break;
default:
break;
}
}
}
String tNewStrokeHexHash = null;
byte[] tNewStrokeBytes = null;
byte[] tNewStrokeHash = null;
// TODO: "Add our strokes" EBK 2279
String xml = StrokeConverter.toXmlString(xmlStrokes);
byte[] strokeHash = Utils.toMD5Hash(xml);
String strokeHashHexString = Utils.asHexString(strokeHash);
matchingPage.setEnStrokeResourceHash(strokeHashHexString);
Data strokeResourceData = new Data();
byte[] xmlBytes = xml.getBytes("UTF-8");
// Set the body of the Resource Data to the raw
// bytes of the XML String.
strokeResourceData.setBody(xmlBytes);
// Set the body hash to the MD5 digest of those
// XML String bytes.
strokeResourceData.setBodyHash(strokeHash);
// Set the size of the Resource Data to be the size
// (length) of the XML String bytes.
strokeResourceData.setSize(xmlBytes.length);
}
//--------------------------------------------------
// New Page
//--------------------------------------------------
} else {
// TODO: Create new Page
Page newPage = new Page();
// newPage.set
}
//--------------------------------------------------
// New Document
//--------------------------------------------------
} else {
// TODO: Create new Document
}
}
}
/**
* <p></p>
*
* @param newXmlStrokes
* @param page
* @throws NoSuchAlgorithmException
* @throws UnsupportedEncodingException
*/
private Data createStrokeData(XMLStrokes newXmlStrokes, Page page) throws NoSuchAlgorithmException, UnsupportedEncodingException {
String xml = StrokeConverter.toXmlString(newXmlStrokes);
byte[] strokeHash = Utils.toMD5Hash(xml);
String strokeHashHexString = Utils.asHexString(strokeHash);
page.setEnStrokeResourceHash(strokeHashHexString);
Data strokeResourceData = new Data();
byte[] xmlBytes = xml.getBytes("UTF-8");
// Set the body of the Resource Data to the raw
// bytes of the XML String.
strokeResourceData.setBody(xmlBytes);
// Set the body hash to the MD5 digest of those
// XML String bytes.
strokeResourceData.setBodyHash(strokeHash);
// Set the size of the Resource Data to be the size
// (length) of the XML String bytes.
strokeResourceData.setSize(xmlBytes.length);
return strokeResourceData;
}
/**
* <p></p>
*
* @param page
* @param note
* @throws IOException
*/
private void addImageResources(Page page, Note note) throws IOException {
Resource uiSettingsResource = loadResource(uiSettingImagePath, uiSettingImageMimeType);
note.addToResources(uiSettingsResource);
Resource livescribeLogoResource = loadResource(livescribeLogoPath, livescribeLogoMimeType);
note.addToResources(livescribeLogoResource);
byte[] uiSettingResourceBytes = uiSettingsResource.getData().getBodyHash();
String uiSettingResourceHash = new String(Hex.encodeHex(uiSettingResourceBytes));
page.setEnLsUiSetResourceHash(uiSettingResourceHash);
byte[] livescribeLogoResourceBytes = livescribeLogoResource.getData().getBodyHash();
String livescribeLogoResourceHash = new String(Hex.encodeHex(livescribeLogoResourceBytes));
page.setEnLsLogoResourceHash(livescribeLogoResourceHash);
}
/**
* <p></p>
*
* @param matchingDocument
* @param pageIndex
* @return
*/
private Page findMatchingPage(Document document, int pageIndex) {
String method = "findMatchingPage()";
Page matchingPage = null;
List<Page> pages = document.getPages();
for (Page page : pages) {
if (page.getPageIndex() == pageIndex) {
matchingPage = page;
break;
}
}
return matchingPage;
}
/**
* <p>Returns an object containing both the image and stroke
* <code>Resource</code> of the given <code>Note</code> matching the
* given <code>Page<code> instance.</p>
*
* <p>Returns an <u>empty</u> instance if neither <code>Resource</code> is found.</p>
*
* @param note The <code>Note</code> to search.
* @param page The <code>Page</code> to match.
*
* @return an object containing both the image and stroke
* <code>Resource</code>.
*/
private NoteResources findNoteResources(Note note, Page page) {
String method = "findNoteResources()";
NoteResources noteResources = new NoteResources();
if (note != null) {
List<Resource> resources = note.getResources();
for (Resource noteResource : note.getResources()) {
// Find the Stroke Resource, from Evernote, that
// matches the one associated to this Page.
if (noteResource.getGuid().equals(page.getEnStrokeResourceGuid())) {
logger.debug(method + " - Stroke Resource of newly created/updated Note found.");
noteResources.setStrokeResource(noteResource);
continue;
}
// Find the Image Resource, from Evernote, that
// matches the one associated to this Page.
if (noteResource.getGuid().equals(page.getEnImageResourceGuid())) {
logger.debug(method + " - Image Resource of newly created/updated Note found.");
noteResources.setImageResource(noteResource);
continue;
}
Data noteData = noteResource.getData();
if (noteData != null) {
byte[] dataBytes = noteData.getBodyHash();
try {
byte[] pageStrokeHash = Hex.decodeHex(page.getEnStrokeResourceHash().toCharArray());
// Compare the hash of the Stroke resource from Evernote to the hash stored in the 'page' table record.
if (Arrays.equals(dataBytes, pageStrokeHash)) {
logger.debug(method + " - Found Stroke Resource of newly created/updated Note.");
noteResources.setStrokeResource(noteResource);
continue;
}
} catch (DecoderException e) {
logger.error(method + "DecoderException thrown when decoding stroke Resource Hex hash.");
e.printStackTrace();
}
try {
byte[] pageImageHash = Hex.decodeHex(page.getEnImageResourceHash().toCharArray());
// Compare the hash of the Image resource from Evernote to the hash stored in the 'page' table record.
if (Arrays.equals(dataBytes, pageImageHash)){
logger.debug(method + " - Found Image Resource of newly created/updated Note.");
noteResources.setImageResource(noteResource);
continue;
}
} catch (DecoderException e) {
logger.error(method + "DecoderException thrown when decoding image Resource Hex hash.");
e.printStackTrace();
}
} else {
logger.warn(method + " - The Note's Data was 'null'. Continuing ...");
}
}
} else {
logger.error(method + " - Provided Note instance was 'null'.");
}
return noteResources;
}
/**
* <p></p>
*
* @param upTx
* @param afdGuid
* @return
*/
private Document findMatchingDocument(UploadTransaction upTx, String afdGuid) {
Document matchingDocument = null;
List<Document> documents = upTx.getDocuments();
for (Document document : documents) {
if (document.getGuid().equals(afdGuid)) {
matchingDocument = document;
break;
}
}
return matchingDocument;
}
private String generatePageNoteTitle(String pageNumberString, String documentName) {
String pageTitle = "Blank Page" + " - " + documentName;
try {
String padding = null;
int pageNumber = Integer.parseInt(pageNumberString);
if (pageNumber <= 0) {
return pageTitle;
}
if (pageNumber > 99) {
padding = "";
} else if (isBetween(pageNumber, 10, 99)) {
padding = " ";
} else {
padding = " ";
}
pageTitle = String.format("Page %s%i - %s", padding, pageNumber, documentName);
} catch (NumberFormatException e) {
pageTitle = "Blank Page" + " - " + documentName;
}
return pageTitle;
}
private boolean isBetween(int pgNum, int lower, int upper) {
return lower <= pgNum && pgNum <= upper;
}
private Resource loadResource(String imagePath, String imageMimeType) throws IOException {
byte[] imageBytes = IOUtils.toByteArray(getClass().getClassLoader().getResourceAsStream(imagePath));
// String imageMimeType = AppProperties.getInstance().getProperty(PROP_EN_UI_SETTING_IMAGE_MIMETYPE);
MessageDigest digest = null;
try {
digest = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException nsae) {
logger.error("getUISettingPNGAsEvernoteResource() encountered the following exception(returning <NULL>):", nsae);
return null;
}
byte[] imageHash = digest.digest(imageBytes);
Data resourceData = new Data();
resourceData.setBody(imageBytes);
resourceData.setBodyHash(imageHash);
resourceData.setSize(imageBytes.length);
ResourceAttributes resourceAttributes = new ResourceAttributes();
resourceAttributes.setFileName(imagePath);
Resource resource = new Resource();
resource.setMime(imageMimeType);
resource.setAttributes(resourceAttributes);
resource.setData(resourceData);
return resource;
}
private void printExchange(Exchange exchange) {
Object obj = exchange.getIn().getBody();
if (obj instanceof UploadTransaction) {
logger.debug("Found UploadTransaction in body of Exchange.");
UploadTransaction upTx = (UploadTransaction)obj;
logger.debug(upTx);
} else if (obj instanceof List<?>) {
logger.debug("Found List<?> in body of Exchange.");
} else {
logger.debug("Found '" + obj.getClass().getCanonicalName() + "' in body of Exchange.");
}
}
}
| |
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import java.io.IOException;
import java.text.DateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.EnumSet;
import java.util.Map;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.DecodeHintType;
import com.google.zxing.Result;
import com.google.zxing.ResultMetadataType;
import com.google.zxing.ResultPoint;
import com.google.zxing.client.android.camera.CameraManager;
import com.google.zxing.client.android.clipboard.ClipboardInterface;
import com.google.zxing.client.android.history.HistoryActivity;
import com.google.zxing.client.android.history.HistoryItem;
import com.google.zxing.client.android.history.HistoryManager;
import com.google.zxing.client.android.result.ResultButtonListener;
import com.google.zxing.client.android.result.ResultHandler;
import com.google.zxing.client.android.result.ResultHandlerFactory;
import com.google.zxing.client.android.result.supplement.SupplementalInfoRetriever;
import com.google.zxing.client.android.share.ShareActivity;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.preference.PreferenceManager;
import android.util.Log;
import android.util.TypedValue;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
/**
* This activity opens the camera and does the actual scanning on a background
* thread. It draws a viewfinder to help the user place the barcode correctly,
* shows feedback as the image processing is happening, and then overlays the
* results when a scan is successful.
*
* @author dswitkin@google.com (Daniel Switkin)
* @author Sean Owen
*/
public final class CaptureActivity extends Activity implements SurfaceHolder.Callback {
private static final String TAG = CaptureActivity.class.getSimpleName();
private static final long DEFAULT_INTENT_RESULT_DURATION_MS = 1500L;
private static final long BULK_MODE_SCAN_DELAY_MS = 1000L;
private static final String[] ZXING_URLS = { "http://zxing.appspot.com/scan", "zxing://scan/" };
public static final int HISTORY_REQUEST_CODE = 0x0000bacc;
private static final Collection<ResultMetadataType> DISPLAYABLE_METADATA_TYPES = EnumSet.of(
ResultMetadataType.ISSUE_NUMBER, ResultMetadataType.SUGGESTED_PRICE,
ResultMetadataType.ERROR_CORRECTION_LEVEL, ResultMetadataType.POSSIBLE_COUNTRY);
private CameraManager cameraManager;
private CaptureActivityHandler handler;
private Result savedResultToShow;
private ViewfinderView viewfinderView;
private TextView statusView;
private View resultView;
private Result lastResult;
private boolean hasSurface;
private boolean copyToClipboard;
private IntentSource source;
private String sourceUrl;
private ScanFromWebPageManager scanFromWebPageManager;
private Collection<BarcodeFormat> decodeFormats;
private Map<DecodeHintType, ?> decodeHints;
private String characterSet;
private HistoryManager historyManager;
private InactivityTimer inactivityTimer;
private BeepManager beepManager;
private AmbientLightManager ambientLightManager;
ViewfinderView getViewfinderView() {
return viewfinderView;
}
public Handler getHandler() {
return handler;
}
CameraManager getCameraManager() {
return cameraManager;
}
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.capture);
hasSurface = false;
inactivityTimer = new InactivityTimer(this);
beepManager = new BeepManager(this);
ambientLightManager = new AmbientLightManager(this);
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
/**
* @author MichaelX(xiong_it) added. {@link https://github.com/xiong-it}
* for portrait
*/
if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
} else {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
}
@Override
protected void onResume() {
super.onResume();
// historyManager must be initialized here to update the history
// preference
historyManager = new HistoryManager(this);
historyManager.trimHistory();
// CameraManager must be initialized here, not in onCreate(). This is
// necessary because we don't
// want to open the camera driver and measure the screen size if we're
// going to show the help on
// first launch. That led to bugs where the scanning rectangle was the
// wrong size and partially
// off screen.
cameraManager = new CameraManager(getApplication());
viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view);
viewfinderView.setCameraManager(cameraManager);
resultView = findViewById(R.id.result_view);
statusView = (TextView) findViewById(R.id.status_view);
handler = null;
lastResult = null;
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
/**
* @author MichaelX(xiong_it) deleted. {@link https://github.com/xiong-it}
* for portrait
*/
// if (prefs.getBoolean(PreferencesActivity.KEY_DISABLE_AUTO_ORIENTATION, true)) {
// setRequestedOrientation(getCurrentOrientation());
// } else {
// setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_LANDSCAPE);
// }
resetStatusView();
beepManager.updatePrefs();
ambientLightManager.start(cameraManager);
inactivityTimer.onResume();
Intent intent = getIntent();
copyToClipboard = prefs.getBoolean(PreferencesActivity.KEY_COPY_TO_CLIPBOARD, true)
&& (intent == null || intent.getBooleanExtra(Intents.Scan.SAVE_HISTORY, true));
source = IntentSource.NONE;
sourceUrl = null;
scanFromWebPageManager = null;
decodeFormats = null;
characterSet = null;
if (intent != null) {
String action = intent.getAction();
String dataString = intent.getDataString();
if (Intents.Scan.ACTION.equals(action)) {
// Scan the formats the intent requested, and return the result
// to the calling activity.
source = IntentSource.NATIVE_APP_INTENT;
decodeFormats = DecodeFormatManager.parseDecodeFormats(intent);
decodeHints = DecodeHintManager.parseDecodeHints(intent);
if (intent.hasExtra(Intents.Scan.WIDTH) && intent.hasExtra(Intents.Scan.HEIGHT)) {
int width = intent.getIntExtra(Intents.Scan.WIDTH, 0);
int height = intent.getIntExtra(Intents.Scan.HEIGHT, 0);
if (width > 0 && height > 0) {
cameraManager.setManualFramingRect(width, height);
}
}
if (intent.hasExtra(Intents.Scan.CAMERA_ID)) {
int cameraId = intent.getIntExtra(Intents.Scan.CAMERA_ID, -1);
if (cameraId >= 0) {
cameraManager.setManualCameraId(cameraId);
}
}
String customPromptMessage = intent.getStringExtra(Intents.Scan.PROMPT_MESSAGE);
if (customPromptMessage != null) {
statusView.setText(customPromptMessage);
}
} else if (dataString != null && dataString.contains("http://www.google")
&& dataString.contains("/m/products/scan")) {
// Scan only products and send the result to mobile Product
// Search.
source = IntentSource.PRODUCT_SEARCH_LINK;
sourceUrl = dataString;
decodeFormats = DecodeFormatManager.PRODUCT_FORMATS;
} else if (isZXingURL(dataString)) {
// Scan formats requested in query string (all formats if none
// specified).
// If a return URL is specified, send the results there.
// Otherwise, handle it ourselves.
source = IntentSource.ZXING_LINK;
sourceUrl = dataString;
Uri inputUri = Uri.parse(dataString);
scanFromWebPageManager = new ScanFromWebPageManager(inputUri);
decodeFormats = DecodeFormatManager.parseDecodeFormats(inputUri);
// Allow a sub-set of the hints to be specified by the caller.
decodeHints = DecodeHintManager.parseDecodeHints(inputUri);
}
characterSet = intent.getStringExtra(Intents.Scan.CHARACTER_SET);
}
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) {
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
if (hasSurface) {
// The activity was paused but not stopped, so the surface still
// exists. Therefore
// surfaceCreated() won't be called, so init the camera here.
initCamera(surfaceHolder);
} else {
// Install the callback and wait for surfaceCreated() to init the
// camera.
surfaceHolder.addCallback(this);
}
}
private int getCurrentOrientation() {
int rotation = getWindowManager().getDefaultDisplay().getRotation();
if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
switch (rotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_90:
return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
default:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
}
} else {
switch (rotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_270:
return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
default:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT;
}
}
}
private static boolean isZXingURL(String dataString) {
if (dataString == null) {
return false;
}
for (String url : ZXING_URLS) {
if (dataString.startsWith(url)) {
return true;
}
}
return false;
}
@Override
protected void onPause() {
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
inactivityTimer.onPause();
ambientLightManager.stop();
beepManager.close();
cameraManager.closeDriver();
// historyManager = null; // Keep for onActivityResult
if (!hasSurface) {
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);
}
super.onPause();
}
@Override
protected void onDestroy() {
inactivityTimer.shutdown();
super.onDestroy();
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_BACK:
if (source == IntentSource.NATIVE_APP_INTENT) {
setResult(RESULT_CANCELED);
finish();
return true;
}
if ((source == IntentSource.NONE || source == IntentSource.ZXING_LINK) && lastResult != null) {
restartPreviewAfterDelay(0L);
return true;
}
break;
case KeyEvent.KEYCODE_FOCUS:
case KeyEvent.KEYCODE_CAMERA:
// Handle these events so they don't launch the Camera app
return true;
// Use volume up/down to turn on light
case KeyEvent.KEYCODE_VOLUME_DOWN:
cameraManager.setTorch(false);
return true;
case KeyEvent.KEYCODE_VOLUME_UP:
cameraManager.setTorch(true);
return true;
}
return super.onKeyDown(keyCode, event);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater menuInflater = getMenuInflater();
menuInflater.inflate(R.menu.capture, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
switch (item.getItemId()) {
case R.id.menu_share:
intent.setClassName(this, ShareActivity.class.getName());
startActivity(intent);
break;
case R.id.menu_history:
intent.setClassName(this, HistoryActivity.class.getName());
startActivityForResult(intent, HISTORY_REQUEST_CODE);
break;
case R.id.menu_settings:
intent.setClassName(this, PreferencesActivity.class.getName());
startActivity(intent);
break;
case R.id.menu_help:
intent.setClassName(this, HelpActivity.class.getName());
startActivity(intent);
break;
default:
return super.onOptionsItemSelected(item);
}
return true;
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (resultCode == RESULT_OK && requestCode == HISTORY_REQUEST_CODE && historyManager != null) {
int itemNumber = intent.getIntExtra(Intents.History.ITEM_NUMBER, -1);
if (itemNumber >= 0) {
HistoryItem historyItem = historyManager.buildHistoryItem(itemNumber);
decodeOrStoreSavedBitmap(null, historyItem.getResult());
}
}
}
private void decodeOrStoreSavedBitmap(Bitmap bitmap, Result result) {
// Bitmap isn't used yet -- will be used soon
if (handler == null) {
savedResultToShow = result;
} else {
if (result != null) {
savedResultToShow = result;
}
if (savedResultToShow != null) {
Message message = Message.obtain(handler, R.id.decode_succeeded, savedResultToShow);
handler.sendMessage(message);
}
savedResultToShow = null;
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (holder == null) {
Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!");
} else {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) {
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
}
if (!hasSurface) {
hasSurface = true;
initCamera(holder);
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
hasSurface = false;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
/**
* A valid barcode has been found, so give an indication of success and show
* the results.
*
* @param rawResult
* The contents of the barcode.
* @param scaleFactor
* amount by which thumbnail was scaled
* @param barcode
* A greyscale bitmap of the camera data which was decoded.
*/
public void handleDecode(Result rawResult, Bitmap barcode, float scaleFactor) {
inactivityTimer.onActivity();
lastResult = rawResult;
ResultHandler resultHandler = ResultHandlerFactory.makeResultHandler(this, rawResult);
boolean fromLiveScan = barcode != null;
if (fromLiveScan) {
historyManager.addHistoryItem(rawResult, resultHandler);
// Then not from history, so beep/vibrate and we have an image to
// draw on
beepManager.playBeepSoundAndVibrate();
drawResultPoints(barcode, scaleFactor, rawResult);
}
switch (source) {
case NATIVE_APP_INTENT:
case PRODUCT_SEARCH_LINK:
handleDecodeExternally(rawResult, resultHandler, barcode);
break;
case ZXING_LINK:
if (scanFromWebPageManager == null || !scanFromWebPageManager.isScanFromWebPage()) {
handleDecodeInternally(rawResult, resultHandler, barcode);
} else {
handleDecodeExternally(rawResult, resultHandler, barcode);
}
break;
case NONE:
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (fromLiveScan && prefs.getBoolean(PreferencesActivity.KEY_BULK_MODE, false)) {
Toast.makeText(getApplicationContext(),
getResources().getString(R.string.msg_bulk_mode_scanned) + " (" + rawResult.getText() + ')',
Toast.LENGTH_SHORT).show();
// Wait a moment or else it will scan the same barcode
// continuously about 3 times
restartPreviewAfterDelay(BULK_MODE_SCAN_DELAY_MS);
} else {
handleDecodeInternally(rawResult, resultHandler, barcode);
}
break;
}
}
/**
* Superimpose a line for 1D or dots for 2D to highlight the key features of
* the barcode.
*
* @param barcode
* A bitmap of the captured image.
* @param scaleFactor
* amount by which thumbnail was scaled
* @param rawResult
* The decoded results which contains the points to draw.
*/
private void drawResultPoints(Bitmap barcode, float scaleFactor, Result rawResult) {
ResultPoint[] points = rawResult.getResultPoints();
if (points != null && points.length > 0) {
Canvas canvas = new Canvas(barcode);
Paint paint = new Paint();
paint.setColor(getResources().getColor(R.color.result_points));
if (points.length == 2) {
paint.setStrokeWidth(4.0f);
drawLine(canvas, paint, points[0], points[1], scaleFactor);
} else if (points.length == 4 && (rawResult.getBarcodeFormat() == BarcodeFormat.UPC_A
|| rawResult.getBarcodeFormat() == BarcodeFormat.EAN_13)) {
// Hacky special case -- draw two lines, for the barcode and
// metadata
drawLine(canvas, paint, points[0], points[1], scaleFactor);
drawLine(canvas, paint, points[2], points[3], scaleFactor);
} else {
paint.setStrokeWidth(10.0f);
for (ResultPoint point : points) {
if (point != null) {
canvas.drawPoint(scaleFactor * point.getX(), scaleFactor * point.getY(), paint);
}
}
}
}
}
private static void drawLine(Canvas canvas, Paint paint, ResultPoint a, ResultPoint b, float scaleFactor) {
if (a != null && b != null) {
canvas.drawLine(scaleFactor * a.getX(), scaleFactor * a.getY(), scaleFactor * b.getX(),
scaleFactor * b.getY(), paint);
}
}
// Put up our own UI for how to handle the decoded contents.
private void handleDecodeInternally(Result rawResult, ResultHandler resultHandler, Bitmap barcode) {
CharSequence displayContents = resultHandler.getDisplayContents();
if (copyToClipboard && !resultHandler.areContentsSecure()) {
ClipboardInterface.setText(displayContents, this);
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (resultHandler.getDefaultButtonID() != null
&& prefs.getBoolean(PreferencesActivity.KEY_AUTO_OPEN_WEB, false)) {
resultHandler.handleButtonPress(resultHandler.getDefaultButtonID());
return;
}
statusView.setVisibility(View.GONE);
viewfinderView.setVisibility(View.GONE);
resultView.setVisibility(View.VISIBLE);
ImageView barcodeImageView = (ImageView) findViewById(R.id.barcode_image_view);
if (barcode == null) {
barcodeImageView.setImageBitmap(BitmapFactory.decodeResource(getResources(), R.drawable.launcher_icon));
} else {
barcodeImageView.setImageBitmap(barcode);
}
TextView formatTextView = (TextView) findViewById(R.id.format_text_view);
formatTextView.setText(rawResult.getBarcodeFormat().toString());
TextView typeTextView = (TextView) findViewById(R.id.type_text_view);
typeTextView.setText(resultHandler.getType().toString());
DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
TextView timeTextView = (TextView) findViewById(R.id.time_text_view);
timeTextView.setText(formatter.format(new Date(rawResult.getTimestamp())));
TextView metaTextView = (TextView) findViewById(R.id.meta_text_view);
View metaTextViewLabel = findViewById(R.id.meta_text_view_label);
metaTextView.setVisibility(View.GONE);
metaTextViewLabel.setVisibility(View.GONE);
Map<ResultMetadataType, Object> metadata = rawResult.getResultMetadata();
if (metadata != null) {
StringBuilder metadataText = new StringBuilder(20);
for (Map.Entry<ResultMetadataType, Object> entry : metadata.entrySet()) {
if (DISPLAYABLE_METADATA_TYPES.contains(entry.getKey())) {
metadataText.append(entry.getValue()).append('\n');
}
}
if (metadataText.length() > 0) {
metadataText.setLength(metadataText.length() - 1);
metaTextView.setText(metadataText);
metaTextView.setVisibility(View.VISIBLE);
metaTextViewLabel.setVisibility(View.VISIBLE);
}
}
TextView contentsTextView = (TextView) findViewById(R.id.contents_text_view);
contentsTextView.setText(displayContents);
int scaledSize = Math.max(22, 32 - displayContents.length() / 4);
contentsTextView.setTextSize(TypedValue.COMPLEX_UNIT_SP, scaledSize);
TextView supplementTextView = (TextView) findViewById(R.id.contents_supplement_text_view);
supplementTextView.setText("");
supplementTextView.setOnClickListener(null);
if (PreferenceManager.getDefaultSharedPreferences(this).getBoolean(PreferencesActivity.KEY_SUPPLEMENTAL,
true)) {
SupplementalInfoRetriever.maybeInvokeRetrieval(supplementTextView, resultHandler.getResult(),
historyManager, this);
}
int buttonCount = resultHandler.getButtonCount();
ViewGroup buttonView = (ViewGroup) findViewById(R.id.result_button_view);
buttonView.requestFocus();
for (int x = 0; x < ResultHandler.MAX_BUTTON_COUNT; x++) {
TextView button = (TextView) buttonView.getChildAt(x);
if (x < buttonCount) {
button.setVisibility(View.VISIBLE);
button.setText(resultHandler.getButtonText(x));
button.setOnClickListener(new ResultButtonListener(resultHandler, x));
} else {
button.setVisibility(View.GONE);
}
}
}
// Briefly show the contents of the barcode, then handle the result outside
// Barcode Scanner.
private void handleDecodeExternally(Result rawResult, ResultHandler resultHandler, Bitmap barcode) {
if (barcode != null) {
viewfinderView.drawResultBitmap(barcode);
}
long resultDurationMS;
if (getIntent() == null) {
resultDurationMS = DEFAULT_INTENT_RESULT_DURATION_MS;
} else {
resultDurationMS = getIntent().getLongExtra(Intents.Scan.RESULT_DISPLAY_DURATION_MS,
DEFAULT_INTENT_RESULT_DURATION_MS);
}
if (resultDurationMS > 0) {
String rawResultString = String.valueOf(rawResult);
if (rawResultString.length() > 32) {
rawResultString = rawResultString.substring(0, 32) + " ...";
}
statusView.setText(getString(resultHandler.getDisplayTitle()) + " : " + rawResultString);
}
if (copyToClipboard && !resultHandler.areContentsSecure()) {
CharSequence text = resultHandler.getDisplayContents();
ClipboardInterface.setText(text, this);
}
if (source == IntentSource.NATIVE_APP_INTENT) {
// Hand back whatever action they requested - this can be changed to
// Intents.Scan.ACTION when
// the deprecated intent is retired.
Intent intent = new Intent(getIntent().getAction());
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.putExtra(Intents.Scan.RESULT, rawResult.toString());
intent.putExtra(Intents.Scan.RESULT_FORMAT, rawResult.getBarcodeFormat().toString());
byte[] rawBytes = rawResult.getRawBytes();
if (rawBytes != null && rawBytes.length > 0) {
intent.putExtra(Intents.Scan.RESULT_BYTES, rawBytes);
}
Map<ResultMetadataType, ?> metadata = rawResult.getResultMetadata();
if (metadata != null) {
if (metadata.containsKey(ResultMetadataType.UPC_EAN_EXTENSION)) {
intent.putExtra(Intents.Scan.RESULT_UPC_EAN_EXTENSION,
metadata.get(ResultMetadataType.UPC_EAN_EXTENSION).toString());
}
Number orientation = (Number) metadata.get(ResultMetadataType.ORIENTATION);
if (orientation != null) {
intent.putExtra(Intents.Scan.RESULT_ORIENTATION, orientation.intValue());
}
String ecLevel = (String) metadata.get(ResultMetadataType.ERROR_CORRECTION_LEVEL);
if (ecLevel != null) {
intent.putExtra(Intents.Scan.RESULT_ERROR_CORRECTION_LEVEL, ecLevel);
}
@SuppressWarnings("unchecked")
Iterable<byte[]> byteSegments = (Iterable<byte[]>) metadata.get(ResultMetadataType.BYTE_SEGMENTS);
if (byteSegments != null) {
int i = 0;
for (byte[] byteSegment : byteSegments) {
intent.putExtra(Intents.Scan.RESULT_BYTE_SEGMENTS_PREFIX + i, byteSegment);
i++;
}
}
}
sendReplyMessage(R.id.return_scan_result, intent, resultDurationMS);
} else if (source == IntentSource.PRODUCT_SEARCH_LINK) {
// Reformulate the URL which triggered us into a query, so that the
// request goes to the same
// TLD as the scan URL.
int end = sourceUrl.lastIndexOf("/scan");
String replyURL = sourceUrl.substring(0, end) + "?q=" + resultHandler.getDisplayContents()
+ "&source=zxing";
sendReplyMessage(R.id.launch_product_query, replyURL, resultDurationMS);
} else if (source == IntentSource.ZXING_LINK) {
if (scanFromWebPageManager != null && scanFromWebPageManager.isScanFromWebPage()) {
String replyURL = scanFromWebPageManager.buildReplyURL(rawResult, resultHandler);
scanFromWebPageManager = null;
sendReplyMessage(R.id.launch_product_query, replyURL, resultDurationMS);
}
}
}
private void sendReplyMessage(int id, Object arg, long delayMS) {
if (handler != null) {
Message message = Message.obtain(handler, id, arg);
if (delayMS > 0L) {
handler.sendMessageDelayed(message, delayMS);
} else {
handler.sendMessage(message);
}
}
}
private void initCamera(SurfaceHolder surfaceHolder) {
if (surfaceHolder == null) {
throw new IllegalStateException("No SurfaceHolder provided");
}
if (cameraManager.isOpen()) {
Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?");
return;
}
try {
cameraManager.openDriver(surfaceHolder);
// Creating the handler starts the preview, which can also throw a
// RuntimeException.
if (handler == null) {
handler = new CaptureActivityHandler(this, decodeFormats, decodeHints, characterSet, cameraManager);
}
decodeOrStoreSavedBitmap(null, null);
} catch (IOException ioe) {
Log.w(TAG, ioe);
displayFrameworkBugMessageAndExit();
} catch (RuntimeException e) {
// Barcode Scanner has seen crashes in the wild of this variety:
// java.?lang.?RuntimeException: Fail to connect to camera service
Log.w(TAG, "Unexpected error initializing camera", e);
displayFrameworkBugMessageAndExit();
}
}
private void displayFrameworkBugMessageAndExit() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getString(R.string.app_name));
builder.setMessage(getString(R.string.msg_camera_framework_bug));
builder.setPositiveButton(R.string.button_ok, new FinishListener(this));
builder.setOnCancelListener(new FinishListener(this));
builder.show();
}
public void restartPreviewAfterDelay(long delayMS) {
if (handler != null) {
handler.sendEmptyMessageDelayed(R.id.restart_preview, delayMS);
}
resetStatusView();
}
private void resetStatusView() {
resultView.setVisibility(View.GONE);
statusView.setText(R.string.msg_default_status);
statusView.setVisibility(View.VISIBLE);
viewfinderView.setVisibility(View.VISIBLE);
lastResult = null;
}
public void drawViewfinder() {
viewfinderView.drawViewfinder();
}
}
| |
package org.apache.velocity.runtime.log;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.log.Hierarchy;
import org.apache.log.LogTarget;
import org.apache.log.Logger;
import org.apache.log.Priority;
import org.apache.log.output.io.FileTarget;
import org.apache.velocity.runtime.RuntimeConstants;
import org.apache.velocity.runtime.RuntimeServices;
/**
* Implementation of a Avalon logger.
*
* @author <a href="mailto:jon@latchkey.com">Jon S. Stevens</a>
* @author <a href="mailto:geirm@optonline.net">Geir Magnusson Jr.</a>
* @author <a href="mailto:nbubna@apache.org">Nathan Bubna</a>
* @version $Id$
* @since 1.5
*/
public class AvalonLogChute implements LogChute
{
public static final String AVALON_LOGGER = "runtime.log.logsystem.avalon.logger";
public static final String AVALON_LOGGER_FORMAT = "runtime.log.logsystem.avalon.format";
public static final String AVALON_LOGGER_LEVEL = "runtime.log.logsystem.avalon.level";
private Logger logger = null;
private RuntimeServices rsvc = null;
private static final Map logLevels = new HashMap();
static
{
logLevels.put("trace", Priority.DEBUG);
logLevels.put("debug", Priority.DEBUG);
logLevels.put("info", Priority.INFO);
logLevels.put("warn", Priority.WARN);
logLevels.put("error", Priority.ERROR);
}
/**
* @see org.apache.velocity.runtime.log.LogChute#init(org.apache.velocity.runtime.RuntimeServices)
*/
public void init(RuntimeServices rs) throws Exception
{
this.rsvc = rs;
// if a logger is specified, we will use this instead of the default
String name = (String)rsvc.getProperty(AVALON_LOGGER);
if (name != null)
{
this.logger = Hierarchy.getDefaultHierarchy().getLoggerFor(name);
}
else
{
// use the toString() of RuntimeServices to make a unique logger
logger = Hierarchy.getDefaultHierarchy().getLoggerFor(rsvc.toString());
// if we have a file property, use it to create a FileTarget
String file = (String)rsvc.getProperty(RuntimeConstants.RUNTIME_LOG);
if (StringUtils.isNotEmpty(file))
{
initTarget(file, rsvc);
}
}
}
// creates a file target using the specified file name
private void initTarget(final String file, final RuntimeServices rsvc) throws Exception
{
try
{
String format = null;
Priority level = null;
if (rsvc != null)
{
format = rsvc.getString(AVALON_LOGGER_FORMAT, "%{time} %{message}\\n%{throwable}");
level = (Priority) logLevels.get(rsvc.getString(AVALON_LOGGER_LEVEL, "warn"));
}
VelocityFormatter vf = new VelocityFormatter(format);
// make the target and keep the default behavior of not appending
FileTarget target = new FileTarget(new File(file), false, vf);
logger.setPriority(level);
logger.setLogTargets(new LogTarget[] { target });
log(DEBUG_ID, "AvalonLogChute initialized using file '"+file+'\'');
}
catch (IOException ioe)
{
rsvc.getLog().error("Unable to create log file for AvalonLogChute", ioe);
throw new Exception("Error configuring AvalonLogChute : " + ioe);
}
}
/**
* @param file
* @throws Exception
* @deprecated This method should not be used. It is here only to provide
* backwards compatibility for the deprecated AvalonLogSystem
* class, in case anyone used it and this method directly.
*/
public void init(String file) throws Exception
{
logger = Hierarchy.getDefaultHierarchy().getLoggerFor(rsvc.toString());
initTarget(file, null);
// nag the theoretical user
log(DEBUG_ID, "You shouldn't be using the init(String file) method!");
}
/**
* logs messages
*
* @param level severity level
* @param message complete error message
*/
public void log(int level, String message)
{
/*
* based on level, call the right logger method
* and prefix with the appropos prefix
*/
switch (level)
{
case WARN_ID:
logger.warn(WARN_PREFIX + message );
break;
case INFO_ID:
logger.info(INFO_PREFIX + message);
break;
case DEBUG_ID:
logger.debug(DEBUG_PREFIX + message);
break;
case TRACE_ID:
logger.debug(TRACE_PREFIX + message);
break;
case ERROR_ID:
logger.error(ERROR_PREFIX + message);
break;
default:
logger.info(message);
break;
}
}
/**
* logs messages and error
*
* @param level severity level
* @param message complete error message
* @param t
*/
public void log(int level, String message, Throwable t)
{
switch (level)
{
case WARN_ID:
logger.warn(WARN_PREFIX + message, t);
break;
case INFO_ID:
logger.info(INFO_PREFIX + message, t);
break;
case DEBUG_ID:
logger.debug(DEBUG_PREFIX + message, t);
break;
case TRACE_ID:
logger.debug(TRACE_PREFIX + message, t);
break;
case ERROR_ID:
logger.error(ERROR_PREFIX + message, t);
break;
default:
logger.info(message, t);
break;
}
}
/**
* Checks to see whether the specified level is enabled.
* @param level
* @return True if the specified level is enabled.
*/
public boolean isLevelEnabled(int level)
{
switch (level)
{
// For Avalon, no Trace exists. Log at debug level.
case TRACE_ID:
case DEBUG_ID:
return logger.isDebugEnabled();
case INFO_ID:
return logger.isInfoEnabled();
case WARN_ID:
return logger.isWarnEnabled();
case ERROR_ID:
return logger.isErrorEnabled();
default:
return true;
}
}
/**
* Also do a shutdown if the object is destroy()'d.
* @throws Throwable
*/
protected void finalize() throws Throwable
{
shutdown();
}
/** Close all destinations*/
public void shutdown()
{
logger.unsetLogTargets();
}
}
| |
/*
* (c) Copyright 2017 Palantir Technologies Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.baseline.errorprone;
import com.google.errorprone.CompilationTestHelper;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public final class ValidateConstantMessageTests {
private CompilationTestHelper compilationHelper;
@BeforeEach
public void before() {
compilationHelper = CompilationTestHelper.newInstance(ValidateConstantMessage.class, getClass());
}
@Test
public void testValidateIsTrueNoMessage() {
testPassBoth("Validate.isTrue(param != \"string\");");
}
@Test
public void testValidateIsTrueConstantMessageNoArgs() {
testPassBoth("Validate.isTrue(param != \"string\", \"constant\");");
}
@Test
public void testValidateIsTrueConstantMessageArgs() {
testPassLang3Only("Validate.isTrue(param != \"string\", \"constant %s\", \"arg\");");
}
@Test
public void testValidateIsTrueNonConstantMessageNoArgs() {
testFailBoth("Validate.isTrue(param != \"string\", \"constant\" + param);");
}
@Test
public void testValidateIsTrueNonConstantMessageArgs() {
testFailLang3Only("Validate.isTrue(param != \"string\", \"constant\" + param, \"arg\");");
}
@Test
public void testValidateIsTrueNonConstantMessageDouble() {
testFailBoth("Validate.isTrue(param != \"string\", \"constant\" + param, 0.0);");
}
@Test
public void testValidateIsTrueNonConstantMessageLong() {
testFailBoth("Validate.isTrue(param != \"string\", \"constant\" + param, 123L);");
}
@Test
public void testValidateNotNullNoMessage() {
// CHECKSTYLE:OFF
testPassBoth("Validate.notNull(param);");
// CHECKSTYLE:ON
}
@Test
public void testValidateNotNullConstantMessageNoArgs() {
testPassBoth("Validate.notNull(param, \"constant\");");
}
@Test
public void testValidateNotNullConstantMessageArgs() {
testPassLang3Only("Validate.notNull(param, \"constant %s\", \"arg\");");
}
@Test
public void testValidateNotNullNonConstantMessageNoArgs() {
testFailBoth("Validate.notNull(param, \"constant\" + param);");
}
@Test
public void testValidateNotNullNonConstantMessageArgs() {
testFailLang3Only("Validate.notNull(param, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateNotEmptyNoMessageArray() {
testPassBoth("Validate.notEmpty(arrayArg);");
}
@Test
public void testValidateNotEmptyConstantMessageArrayNoArgs() {
testPassBoth("Validate.notEmpty(arrayArg, \"constant\");");
}
@Test
public void testValidateNotEmptyConstantMessageArrayArgs() {
testPassLang3Only("Validate.notEmpty(arrayArg, \"constant %s\", \"arg\");");
}
@Test
public void testValidateNotEmptyNonConstantMessageArrayNoArgs() {
testFailBoth("Validate.notEmpty(arrayArg, \"constant\" + param);");
}
@Test
public void testValidateNotEmptyNonConstantMessageArrayArgs() {
testFailLang3Only("Validate.notEmpty(arrayArg, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateNotEmptyNoMessageCollection() {
testPassBoth("Validate.notEmpty(collectionArg);");
}
@Test
public void testValidateNotEmptyConstantMessageCollectionNoArgs() {
testPassBoth("Validate.notEmpty(collectionArg, \"constant\");");
}
@Test
public void testValidateNotEmptyConstantMessageCollectionArgs() {
testPassLang3Only("Validate.notEmpty(collectionArg, \"constant %s\", \"arg\");");
}
@Test
public void testValidateNotEmptyNonConstantMessageCollectionNoArgs() {
testFailBoth("Validate.notEmpty(collectionArg, \"constant\" + param);");
}
@Test
public void testValidateNotEmptyNonConstantMessageCollectionArgs() {
testFailLang3Only("Validate.notEmpty(collectionArg, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateNotEmptyNoMessageMap() {
testPassBoth("Validate.notEmpty(mapArg);");
}
@Test
public void testValidateNotEmptyConstantMessageMapNoArgs() {
testPassBoth("Validate.notEmpty(mapArg, \"constant\");");
}
@Test
public void testValidateNotEmptyConstantMessageMapArgs() {
testPassLang3Only("Validate.notEmpty(mapArg, \"constant %s\", \"arg\");");
}
@Test
public void testValidateNotEmptyNonConstantMessageMapNoArgs() {
testFailBoth("Validate.notEmpty(mapArg, \"constant\" + param);");
}
@Test
public void testValidateNotEmptyNonConstantMessageMapArgs() {
testFailLang3Only("Validate.notEmpty(mapArg, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateNotEmptyNoMessageChars() {
testPassBoth("Validate.notEmpty(stringArg);");
}
@Test
public void testValidateNotEmptyConstantMessageCharsNoArgs() {
testPassBoth("Validate.notEmpty(stringArg, \"constant\");");
}
@Test
public void testValidateNotEmptyConstantMessageCharsArgs() {
testPassLang3Only("Validate.notEmpty(stringArg, \"constant %s\", \"arg\");");
}
@Test
public void testValidateNotEmptyNonConstantMessageCharsNoArgs() {
testFailBoth("Validate.notEmpty(stringArg, \"constant\" + param);");
}
@Test
public void testValidateNotEmptyNonConstantMessageCharsArgs() {
testFailLang3Only("Validate.notEmpty(stringArg, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateNotBlankNoMessage() {
testPassLang3Only("Validate.notBlank(stringArg);");
}
@Test
public void testValidateNotBlankConstantMessageNoArgs() {
testPassLang3Only("Validate.notBlank(stringArg, \"constant\");");
}
@Test
public void testValidateNotBlankConstantMessageArgs() {
testPassLang3Only("Validate.notBlank(stringArg, \"constant %s\", \"arg\");");
}
@Test
public void testValidateNotBlankNonConstantMessageNoArgs() {
testFailLang3Only("Validate.notBlank(stringArg, \"constant\" + param);");
}
@Test
public void testValidateNotBlankNonConstantMessageArgs() {
testFailLang3Only("Validate.notBlank(stringArg, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateNoNullElementsNoMessageIterable() {
testPassLang3Only("Validate.noNullElements(iterableArg);");
}
@Test
public void testValidateNoNullElementsConstantMessageIterableNoArgs() {
testPassLang3Only("Validate.noNullElements(iterableArg, \"constant\");");
}
@Test
public void testValidateNoNullElementsConstantMessageIterableArgs() {
testPassLang3Only("Validate.noNullElements(iterableArg, \"constant %s\", \"arg\");");
}
@Test
public void testValidateNoNullElementsNonConstantMessageIterableNoArgs() {
testFailLang3Only("Validate.noNullElements(iterableArg, \"constant\" + param);");
}
@Test
public void testValidateNoNullElementsNonConstantMessageIterableArgs() {
testFailLang3Only("Validate.noNullElements(iterableArg, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateNoNullElementsNoMessageArray() {
testPassBoth("Validate.noNullElements(arrayArg);");
}
@Test
public void testValidateNoNullElementsConstantMessageArrayNoArgs() {
testPassBoth("Validate.noNullElements(arrayArg, \"constant\");");
}
@Test
public void testValidateNoNullElementsConstantMessageArrayArgs() {
testPassLang3Only("Validate.noNullElements(arrayArg, \"constant %s\", \"arg\");");
}
@Test
public void testValidateNoNullElementsNonConstantMessageArrayNoArgs() {
testFailBoth("Validate.noNullElements(arrayArg, \"constant\" + param);");
}
@Test
public void testValidateNoNullElementsNonConstantMessageArrayArgs() {
testFailLang3Only("Validate.noNullElements(arrayArg, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateValidIndexNoMessageArray() {
testPassLang3Only("Validate.validIndex(arrayArg, 1);");
}
@Test
public void testValidateValidIndexConstantMessageArrayNoArgs() {
testPassLang3Only("Validate.validIndex(arrayArg, 1, \"constant\");");
}
@Test
public void testValidateValidIndexConstantMessageArrayArgs() {
testPassLang3Only("Validate.validIndex(arrayArg, 1, \"constant %s\", \"arg\");");
}
@Test
public void testValidateValidIndexNonConstantMessageArrayNoArgs() {
testFailLang3Only("Validate.validIndex(arrayArg, 1, \"constant\" + param);");
}
@Test
public void testValidateValidIndexNonConstantMessageArrayArgs() {
testFailLang3Only("Validate.validIndex(arrayArg, 1, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateValidIndexNoMessageCollection() {
testPassLang3Only("Validate.validIndex(collectionArg, 1);");
}
@Test
public void testValidateValidIndexConstantMessageCollectionNoArgs() {
testPassLang3Only("Validate.validIndex(collectionArg, 1, \"constant\");");
}
@Test
public void testValidateValidIndexConstantMessageCollectionArgs() {
testPassLang3Only("Validate.validIndex(collectionArg, 1, \"constant %s\", \"arg\");");
}
@Test
public void testValidateValidIndexNonConstantMessageCollectionNoArgs() {
testFailLang3Only("Validate.validIndex(collectionArg, 1, \"constant\" + param);");
}
@Test
public void testValidateValidIndexNonConstantMessageCollectionArgs() {
testFailLang3Only("Validate.validIndex(collectionArg, 1, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateValidIndexNoMessageChars() {
testPassLang3Only("Validate.validIndex(stringArg, 1);");
}
@Test
public void testValidateValidIndexConstantMessageCharsNoArgs() {
testPassLang3Only("Validate.validIndex(stringArg, 1, \"constant\");");
}
@Test
public void testValidateValidIndexConstantMessageCharsArgs() {
testPassLang3Only("Validate.validIndex(stringArg, 1, \"constant %s\", \"arg\");");
}
@Test
public void testValidateValidIndexNonConstantMessageCharsNoArgs() {
testFailLang3Only("Validate.validIndex(stringArg, 1, \"constant\" + param);");
}
@Test
public void testValidateValidIndexNonConstantMessageCharsArgs() {
testFailLang3Only("Validate.validIndex(stringArg, 1, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateValidStateNoMessage() {
testPassLang3Only("Validate.validState(bArg);");
}
@Test
public void testValidateValidStateConstantMessageNoArgs() {
testPassLang3Only("Validate.validState(bArg, \"constant\");");
}
@Test
public void testValidateValidStateConstantMessageArgs() {
testPassLang3Only("Validate.validState(bArg, \"constant %s\", \"arg\");");
}
@Test
public void testValidateValidStateNonConstantMessageNoArgs() {
testFailLang3Only("Validate.validState(bArg, \"constant\" + param);");
}
@Test
public void testValidateValidStateNonConstantMessageArgs() {
testFailLang3Only("Validate.validState(bArg, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateMatchesPatternNoMessage() {
testPassLang3Only("Validate.matchesPattern(stringArg, \"[A-Z]+\");");
}
@Test
public void testValidateMatchesPatternConstantMessageNoArgs() {
testPassLang3Only("Validate.matchesPattern(stringArg, \"[A-Z]+\", \"constant\");");
}
@Test
public void testValidateMatchesPatternConstantMessageArgs() {
testPassLang3Only("Validate.matchesPattern(stringArg, \"[A-Z]+\", \"constant %s\", \"arg\");");
}
@Test
public void testValidateMatchesPatternNonConstantMessageNoArgs() {
testFailLang3Only("Validate.matchesPattern(stringArg, \"[A-Z]+\", \"constant\" + param);");
}
@Test
public void testValidateMatchesPatternNonConstantMessageArgs() {
testFailLang3Only("Validate.matchesPattern(stringArg, \"[A-Z]+\", \"constant\" + param, \"arg\");");
}
@Test
public void testValidateNotNanNoMessage() {
testPassLang3Only("Validate.notNaN(dArg);");
}
@Test
public void testValidateNotNanConstantMessageNoArgs() {
testPassLang3Only("Validate.notNaN(dArg, \"constant\");");
}
@Test
public void testValidateNotNanConstantMessageArgs() {
testPassLang3Only("Validate.notNaN(dArg, \"constant %s\", \"arg\");");
}
@Test
public void testValidateNotNanNonConstantMessageNoArgs() {
testFailLang3Only("Validate.notNaN(dArg, \"constant\" + param);");
}
@Test
public void testValidateNotNanNonConstantMessageArgs() {
testFailLang3Only("Validate.notNaN(dArg, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateFiniteNoMessage() {
testPassLang3Only("Validate.finite(dArg);");
}
@Test
public void testValidateFiniteConstantMessageNoArgs() {
testPassLang3Only("Validate.finite(dArg, \"constant\");");
}
@Test
public void testValidateFiniteConstantMessageArgs() {
testPassLang3Only("Validate.finite(dArg, \"constant %s\", \"arg\");");
}
@Test
public void testValidateFiniteNonConstantMessageNoArgs() {
testFailLang3Only("Validate.finite(dArg, \"constant\" + param);");
}
@Test
public void testValidateFiniteNonConstantMessageArgs() {
testFailLang3Only("Validate.finite(dArg, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateInclusiveBetweenNoMessageLong() {
testPassLang3Only("Validate.inclusiveBetween(0L, 100L, 50L);");
}
@Test
public void testValidateInclusiveBetweenConstantMessageLongNoArgs() {
testPassLang3Only("Validate.inclusiveBetween(0L, 100L, 50L, \"constant\");");
}
@Test
public void testValidateInclusiveBetweenConstantMessageLongArgs() {
testPassLang3Only("Validate.inclusiveBetween(0L, 100L, 50L, \"constant %s\", \"arg\");");
}
@Test
public void testValidateInclusiveBetweenNonConstantMessageLongNoArgs() {
testFailLang3Only("Validate.inclusiveBetween(0L, 100L, 50L, \"constant\" + param);");
}
@Test
public void testValidateInclusiveBetweenNonConstantMessageLongArgs() {
testFailLang3Only("Validate.inclusiveBetween(0L, 100L, 50L, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateInclusiveBetweenNoMessageDouble() {
testPassLang3Only("Validate.inclusiveBetween(0.0, 1.0, 0.5);");
}
@Test
public void testValidateInclusiveBetweenConstantMessageDoubleNoArgs() {
testPassLang3Only("Validate.inclusiveBetween(0.0, 1.0, 0.5, \"constant\");");
}
@Test
public void testValidateInclusiveBetweenConstantMessageDoubleArgs() {
testPassLang3Only("Validate.inclusiveBetween(0.0, 1.0, 0.5, \"constant\", \"arg\");");
}
@Test
public void testValidateInclusiveBetweenNonConstantMessageDoubleNoArgs() {
testFailLang3Only("Validate.inclusiveBetween(0.0, 1.0, 0.5, \"constant\" + param);");
}
@Test
public void testValidateInclusiveBetweenNonConstantMessageDoubleArgs() {
testFailLang3Only("Validate.inclusiveBetween(0.0, 1.0, 0.5, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateInclusiveBetweenNoMessageComparable() {
testPassLang3Only("Validate.inclusiveBetween(BigDecimal.ZERO, BigDecimal.TEN, BigDecimal.ONE);");
}
@Test
public void testValidateInclusiveBetweenConstantMessageComparableNoArgs() {
testPassLang3Only("Validate.inclusiveBetween(BigDecimal.ZERO, BigDecimal.TEN, BigDecimal.ONE, \"constant\");");
}
@Test
public void testValidateInclusiveBetweenConstantMessageComparableArgs() {
testPassLang3Only("Validate.inclusiveBetween(BigDecimal.ZERO, BigDecimal.TEN, BigDecimal.ONE, \"constant %s\", "
+ "\"arg\");");
}
@Test
public void testValidateInclusiveBetweenNonConstantMessageComparableNoArgs() {
testFailLang3Only("Validate.inclusiveBetween(BigDecimal.ZERO, BigDecimal.TEN, BigDecimal.ONE, "
+ "\"constant\" + param);");
}
@Test
public void testValidateInclusiveBetweenNonConstantMessageComparableArgs() {
testFailLang3Only("Validate.inclusiveBetween(BigDecimal.ZERO, BigDecimal.TEN, BigDecimal.ONE, "
+ "\"constant\" + param, \"arg\");");
}
@Test
public void testValidateExclusiveBetweenNoMessageLong() {
testPassLang3Only("Validate.exclusiveBetween(0L, 100L, 50L);");
}
@Test
public void testValidateExclusiveBetweenConstantMessageLongNoArgs() {
testPassLang3Only("Validate.exclusiveBetween(0L, 100L, 50L, \"constant\");");
}
@Test
public void testValidateExclusiveBetweenConstantMessageLongArgs() {
testPassLang3Only("Validate.exclusiveBetween(0L, 100L, 50L, \"constant %s\", \"arg\");");
}
@Test
public void testValidateExclusiveBetweenNonConstantMessageLongNoArgs() {
testFailLang3Only("Validate.exclusiveBetween(0L, 100L, 50L, \"constant\" + param);");
}
@Test
public void testValidateExclusiveBetweenNonConstantMessageLongArgs() {
testFailLang3Only("Validate.exclusiveBetween(0L, 100L, 50L, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateExclusiveBetweenNoMessageDouble() {
testPassLang3Only("Validate.exclusiveBetween(0.0, 1.0, 0.5);");
}
@Test
public void testValidateExclusiveBetweenConstantMessageDoubleNoArgs() {
testPassLang3Only("Validate.exclusiveBetween(0.0, 1.0, 0.5, \"constant\");");
}
@Test
public void testValidateExclusiveBetweenConstantMessageDoubleArgs() {
testPassLang3Only("Validate.exclusiveBetween(0.0, 1.0, 0.5, \"constant %s\", \"arg\");");
}
@Test
public void testValidateExclusiveBetweenNonConstantMessageDoubleNoArgs() {
testFailLang3Only("Validate.exclusiveBetween(0.0, 1.0, 0.5, \"constant\" + param);");
}
@Test
public void testValidateExclusiveBetweenNonConstantMessageDoubleArgs() {
testFailLang3Only("Validate.exclusiveBetween(0.0, 1.0, 0.5, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateExclusiveBetweenNoMessageComparable() {
testPassLang3Only("Validate.exclusiveBetween(BigDecimal.ZERO, BigDecimal.TEN, BigDecimal.ONE);");
}
@Test
public void testValidateExclusiveBetweenConstantMessageComparableNoArgs() {
testPassLang3Only("Validate.exclusiveBetween(BigDecimal.ZERO, BigDecimal.TEN, BigDecimal.ONE, \"constant\");");
}
@Test
public void testValidateExclusiveBetweenConstantMessageComparableArgs() {
testPassLang3Only("Validate.exclusiveBetween(BigDecimal.ZERO, BigDecimal.TEN, BigDecimal.ONE, \"constant %s\", "
+ "\"arg\");");
}
@Test
public void testValidateExclusiveBetweenNonConstantMessageComparableNoArgs() {
testFailLang3Only("Validate.exclusiveBetween(BigDecimal.ZERO, BigDecimal.TEN, BigDecimal.ONE, "
+ "\"constant\" + param);");
}
@Test
public void testValidateExclusiveBetweenNonConstantMessageComparableArgs() {
testFailLang3Only("Validate.exclusiveBetween(BigDecimal.ZERO, BigDecimal.TEN, BigDecimal.ONE, "
+ "\"constant\" + param, \"arg\");");
}
@Test
public void testValidateIsInstanceOfNoMessage() {
testPassLang3Only("Validate.isInstanceOf(BigDecimal.class, BigDecimal.ONE);");
}
@Test
public void testValidateIsInstanceOfConstantMessageNoArgs() {
testPassLang3Only("Validate.isInstanceOf(BigDecimal.class, BigDecimal.ONE, \"constant\");");
}
@Test
public void testValidateIsInstanceOfConstantMessageArgs() {
testPassLang3Only("Validate.isInstanceOf(BigDecimal.class, BigDecimal.ONE, \"constant %s\", \"arg\");");
}
@Test
public void testValidateIsInstanceOfNonConstantMessageNoArgs() {
testFailLang3Only("Validate.isInstanceOf(BigDecimal.class, BigDecimal.ONE, \"constant\" + param);");
}
@Test
public void testValidateIsInstanceOfNonConstantMessageArgs() {
testFailLang3Only("Validate.isInstanceOf(BigDecimal.class, BigDecimal.ONE, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateIsAssignableFromNoMessage() {
testPassLang3Only("Validate.isAssignableFrom(Object.class, BigDecimal.class);");
}
@Test
public void testValidateIsAssignableFromConstantMessageNoArgs() {
testPassLang3Only("Validate.isAssignableFrom(Object.class, BigDecimal.class, \"constant\");");
}
@Test
public void testValidateIsAssignableFromConstantMessageArgs() {
testPassLang3Only("Validate.isAssignableFrom(Object.class, BigDecimal.class, \"constant %s\", \"arg\");");
}
@Test
public void testValidateIsAssignableFromNonConstantMessageNoArgs() {
testFailLang3Only("Validate.isAssignableFrom(Object.class, BigDecimal.class, \"constant\" + param);");
}
@Test
public void testValidateIsAssignableFromNonConstantMessageArgs() {
testFailLang3Only("Validate.isAssignableFrom(Object.class, BigDecimal.class, \"constant\" + param, \"arg\");");
}
@Test
public void testValidateAllElementsOfTypeNoMessage() {
testPassLang2Only("Validate.allElementsOfType(collectionArg, BigDecimal.class);");
}
@Test
public void testValidateAllElementsOfTypeConstantMessageNoArgs() {
testPassLang2Only("Validate.allElementsOfType(collectionArg, BigDecimal.class, \"constant\");");
}
@Test
public void testValidateAllElementsOfTypeNonConstantMessageNoArgs() {
testFailLang2Only("Validate.allElementsOfType(collectionArg, BigDecimal.class, \"constant\" + param);");
}
private void testFailLang3Only(String call) {
compilationHelper
.addSourceLines(
"Test.java",
"import org.apache.commons.lang3.Validate;",
"import java.math.BigDecimal;",
"import java.util.Collection;",
"import java.util.Map;",
"class Test {",
" void f(String param, boolean bArg, int iArg, Object oArg, Integer[] arrayArg, "
+ "Collection<String> collectionArg, Map<String, String> mapArg, String stringArg, "
+ "Iterable<String> iterableArg, double dArg) {",
" // BUG: Diagnostic contains: non-constant message",
" " + call,
" }",
"}")
.doTest();
}
private void testPassLang3Only(String call) {
compilationHelper
.addSourceLines(
"Test.java",
"import org.apache.commons.lang3.Validate;",
"import java.math.BigDecimal;",
"import java.util.Collection;",
"import java.util.Map;",
"class Test {",
" void f(String param, boolean bArg, int iArg, Object oArg, Integer[] arrayArg, "
+ "Collection<String> collectionArg, Map<String, String> mapArg, String stringArg, "
+ "Iterable<String> iterableArg, double dArg) {",
" " + call,
" }",
"}")
.doTest();
}
private void testFailLang2Only(String call) {
compilationHelper
.addSourceLines(
"Test.java",
"import org.apache.commons.lang.Validate;",
"import java.math.BigDecimal;",
"import java.util.Collection;",
"import java.util.Map;",
"class Test {",
" void f(String param, boolean bArg, int iArg, Object oArg, Integer[] arrayArg, "
+ "Collection<String> collectionArg, Map<String, String> mapArg, String stringArg, "
+ "Iterable<String> iterableArg, double dArg) {",
" // BUG: Diagnostic contains: non-constant message",
" " + call,
" }",
"}")
.doTest();
}
private void testPassLang2Only(String call) {
compilationHelper
.addSourceLines(
"Test.java",
"import org.apache.commons.lang.Validate;",
"import java.math.BigDecimal;",
"import java.util.Collection;",
"import java.util.Map;",
"class Test {",
" void f(String param, boolean bArg, int iArg, Object oArg, Integer[] arrayArg, "
+ "Collection<String> collectionArg, Map<String, String> mapArg, String stringArg, "
+ "Iterable<String> iterableArg, double dArg) {",
" " + call,
" }",
"}")
.doTest();
}
private void testFailBoth(String call) {
compilationHelper
.addSourceLines(
"Test.java",
"import java.math.BigDecimal;",
"import java.util.Collection;",
"import java.util.Map;",
"class Test {",
" void f(String param, boolean bArg, int iArg, Object oArg, Integer[] arrayArg, "
+ "Collection<String> collectionArg, Map<String, String> mapArg, String stringArg, "
+ "Iterable<String> iterableArg, double dArg) {",
" // BUG: Diagnostic contains: non-constant message",
" org.apache.commons.lang." + call,
" // BUG: Diagnostic contains: non-constant message",
" org.apache.commons.lang3." + call,
" }",
"}")
.doTest();
}
private void testPassBoth(String call) {
compilationHelper
.addSourceLines(
"Test.java",
"import java.math.BigDecimal;",
"import java.util.Collection;",
"import java.util.Map;",
"class Test {",
" void f(String param, boolean bArg, int iArg, Object oArg, Integer[] arrayArg, "
+ "Collection<String> collectionArg, Map<String, String> mapArg, String stringArg, "
+ "Iterable<String> iterableArg, double dArg) {",
" org.apache.commons.lang." + call,
" org.apache.commons.lang3." + call,
" }",
"}")
.doTest();
}
}
| |
/*
* This file is released under terms of BSD license
* See LICENSE file for more information
* @author: not specified
*/
package claw.wani.transformation.sca;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import claw.shenron.transformation.Transformation;
import claw.shenron.translator.Translator;
import claw.tatsu.common.CompilerDirective;
import claw.tatsu.common.Context;
import claw.tatsu.directive.common.Directive;
import claw.tatsu.directive.configuration.AcceleratorConfiguration;
import claw.tatsu.directive.configuration.AcceleratorLocalStrategy;
import claw.tatsu.directive.generator.DirectiveGenerator;
import claw.tatsu.primitive.Body;
import claw.tatsu.primitive.Field;
import claw.tatsu.xcodeml.abstraction.NestedDoStatement;
import claw.tatsu.xcodeml.abstraction.PromotionInfo;
import claw.tatsu.xcodeml.abstraction.Xblock;
import claw.tatsu.xcodeml.exception.IllegalTransformationException;
import claw.tatsu.xcodeml.xnode.XnodeUtil;
import claw.tatsu.xcodeml.xnode.common.Xattr;
import claw.tatsu.xcodeml.xnode.common.Xcode;
import claw.tatsu.xcodeml.xnode.common.XcodeProgram;
import claw.tatsu.xcodeml.xnode.common.Xnode;
import claw.tatsu.xcodeml.xnode.fortran.FmoduleDefinition;
import claw.wani.language.ClawPragma;
import claw.wani.x2t.configuration.Configuration;
import claw.wani.x2t.translator.ClawTranslator;
/**
* Specialized version of SCA transformation for GPU target.
*
* Transformation for the GPU target:
* <ul>
* <li>Automatic promotion is applied to all arrays with intent in, out or
* inout.
* <li>Do statements over the additional dimensions is added as an outer loop
* and wrap the entire body of the subroutine.
* </ul>
*
* Generation of OpenACC directives:
* <ul>
* <li>acc routine seq is generated for subroutine called from the SCA
* subroutine if they are located in the same translation unit.
* <li>acc data region with corresponding present clause for all promoted
* variables with the intent in, out or inout.
* <li>acc parallel region is generated to wrap all the body of the subroutine.
* <li>acc private clause is added to the parallel directive for all local
* variables.
* <li>acc loop is generated for the generated do statement.
* <li>acc loop seq is generated for already existing do statements.
* </ul>
*
* Generation of OpenMP directives on CPU:
* <ul>
* <li>omp parallel do is generated for each generated do statements.
* </ul>
*
* Generation of OpenMP directives on GPU:
* <ul>
* <li>MISSING FEATURE : omp declare target is generated for subroutine called
* from the SCA subroutine if they are located in the same translation unit.
* <li>omp data region with corresponding present clause for all promoted
* variables with the intent to, from or tofrom.
* <li>omp target teams distribute region is generated to wrap all the body of
* the subroutine.
* <li>omp private clause is added to the target directive for all local
* variables.
* <li>omp collapse is generated for the generated do statement (if more that
* 1).
* </ul>
*
* @author clementval
*/
public class ScaGPU extends Sca
{
/**
* Constructs a new SCA transformation triggered from a specific pragma for a
* GPU target.
*
* @param directive The directive that triggered the define transformation.
*/
public ScaGPU(ClawPragma directive)
{
super(directive);
}
@Override
public boolean analyze(XcodeProgram xcodeml, Translator translator)
{
if (!detectParentFunction(xcodeml))
{
return false;
}
ClawTranslator trans = (ClawTranslator) translator;
if (_fctType.isElemental())
{
return analyzeElemental(xcodeml, trans);
} else
{
return analyzeStandard(xcodeml, trans);
}
}
/**
* Perform analysis steps for SCA transformation on standard function/subroutine
* for GPU target.
*
* @param xcodeml Current translation unit.
* @param translator Current translator.
* @return True if the analysis succeed. False otherwise.
*/
private boolean analyzeStandard(XcodeProgram xcodeml, ClawTranslator translator)
{
final Context context = xcodeml.context();
DirectiveGenerator dirGen = context.getGenerator();
/*
* Check if unsupported statements are located in the future parallel region.
*/
if (dirGen.getDirectiveLanguage() != CompilerDirective.NONE)
{
Xnode contains = _fctDef.body().matchSeq(Xcode.F_CONTAINS_STATEMENT);
Xnode parallelRegionStart = Directive.findParallelRegionStart(context, _fctDef, null);
Xnode parallelRegionEnd = Directive.findParallelRegionEnd(context, _fctDef, contains);
List<Xnode> unsupportedStatements = XnodeUtil.getNodes(parallelRegionStart, parallelRegionEnd,
dirGen.getUnsupportedStatements());
if (!unsupportedStatements.isEmpty())
{
List<Xnode> falsePositive = new ArrayList<>();
for (Xnode statement : unsupportedStatements)
{
if (canTransformReturn(statement))
{
falsePositive.add(statement);
} else
{
if (statement != null)
{
xcodeml.addError(
"Unsupported statement in parallel region: " + statement.opcode().fortran(),
statement.lineNo());
} else
{
throw new NullPointerException("statement is null");
}
}
}
// Only one return statement can be transformed at the moment.
if (falsePositive.size() > 1)
{
return false;
}
unsupportedStatements.removeAll(falsePositive);
if (!unsupportedStatements.isEmpty())
{
return false;
}
}
}
detectInductionVariables();
return analyzeDimension(translator.cfg(), xcodeml) && analyzeData(xcodeml, translator);
}
/**
* Check whether a return statement can be transformed or will trigger an
* unsupported statement error. Currently, only if statement located at the
* first level of the function definition are transformable.
*
* @param returnStmt Node pointing to the return statement.
* @return True if the return statement is directly nested in a if-then body.
* False otherwise.
*/
private boolean canTransformReturn(Xnode returnStmt)
{
return returnStmt != null && returnStmt.is(Xcode.F_RETURN_STATEMENT) && returnStmt.ancestorIs(Xcode.BODY)
&& returnStmt.ancestor().ancestorIs(Xcode.THEN)
&& returnStmt.ancestor().ancestor().ancestorIs(Xcode.F_IF_STATEMENT)
&& returnStmt.ancestor().ancestor().ancestor().ancestorIs(Xcode.BODY)
&& returnStmt.ancestor().ancestor().ancestor().ancestor().ancestorIs(Xcode.F_FUNCTION_DEFINITION)
&& returnStmt.ancestor().ancestor().ancestor().ancestor().ancestor().equals(_fctDef);
}
/**
* Perform analysis steps for SCA transformation on ELEMENTAL
* function/subroutine for GPU target.
*
* @param xcodeml Current translation unit.
* @param translator Current translator.
* @return True if the analysis succeed. False otherwise.
*/
private boolean analyzeElemental(XcodeProgram xcodeml, ClawTranslator translator)
{
// Elemental needs model-data directive
if (!_claw.isScaModelConfig() || !translator.cfg().getModelConfig().isLoaded())
{
xcodeml.addError("SCA applied in ELEMENTAL function/subroutine " + "requires model configuration!",
_claw.getPragma());
return false;
}
return analyzeData(xcodeml, translator);
}
@Override
public void transform(XcodeProgram xcodeml, Translator translator, Transformation other) throws Exception
{
ClawTranslator trans = (ClawTranslator) translator;
if (_fctType.isElemental())
{
transformElemental(xcodeml, trans);
} else
{
transformStandard(xcodeml, trans);
}
}
private void transformReturnStatement(XcodeProgram xcodeml) throws IllegalTransformationException
{
List<Xnode> returns = _fctDef.matchAll(Xcode.F_RETURN_STATEMENT);
if (returns.isEmpty())
{
return; // No return statement to be transformed
}
if (returns.size() > 1)
{
throw new IllegalTransformationException(
"RETURN transformation is " + "currently limited to one per subroutine/function");
}
Xnode returnStmt = returns.get(0);
if (!canTransformReturn(returnStmt))
{
throw new IllegalTransformationException("RETURN statement cannot be " + "transformed.");
}
Xnode thenBody = returnStmt.ancestor();
Xnode thenNode = thenBody.ancestor();
Xnode ifNode = thenNode.ancestor();
Xnode elseNode = xcodeml.createElse();
ifNode.append(elseNode);
returnStmt.delete();
thenBody.append(xcodeml.createComment("CLAW: RETURN statement transformed for parallel region"));
Body.shiftIn(ifNode.nextSibling(), _fctDef.lastChild(), elseNode.body(), true);
}
/**
* Apply transformation on standard function/subroutine.
*
* @param xcodeml Current translation unit.
* @param translator Current translator.
* @throws Exception when transformation cannot by applied.
*/
private void transformStandard(XcodeProgram xcodeml, ClawTranslator translator) throws Exception
{
transformReturnStatement(xcodeml);
// Apply the common transformation
super.transform(xcodeml, translator, null);
// Apply specific steps for GPU target
applySpecificTransformation(translator.cfg(), xcodeml);
// Finalize the common steps
super.finalizeTransformation(xcodeml);
}
/**
* Apply transformation on ELEMENTAL function/subroutine.
*
* @param xcodeml Current translation unit.
* @param translator Current translator.
* @throws IllegalTransformationException If transformation fails.
*/
private void transformElemental(XcodeProgram xcodeml, ClawTranslator translator) throws Exception
{
/*
* SCA in ELEMENTAL function. Only flag the function and leave the actual
* transformation until having information on the calling site from another
* translation unit.
*/
if (_fctType.isElemental())
{
_fctType.setBooleanAttribute(Xattr.WAS_ELEMENTAL, true);
if (_fctType.isFunction() && !_fctType.hasAttribute(Xattr.RESULT_NAME))
{
_arrayFieldsInOut.add(_fctDef.getName());
}
if (translator.cfg().getBooleanParameter(Configuration.SCA_ELEMENTAL_PROMOTION_ASSUMED))
{
forceAssumedShapedArrayPromotion = _fctType.isSubroutine()
|| !(_arrayFieldsInOut.contains(_fctType.getResultName())
|| _arrayFieldsInOut.contains(_fctDef.getName()));
}
// SCA ELEMENTAL
FmoduleDefinition modDef = _fctDef.findParentModule();
if (modDef == null)
{
throw new IllegalTransformationException(
"SCA in ELEMENTAL function " + "transformation requires module encapsulation.");
}
transformReturnStatement(xcodeml);
// Apply the common transformation
super.transform(xcodeml, translator, null);
// Remove ELEMENTAL and PURE attributes if present.
removeAttributesWithWaring(xcodeml, _fctType, Xattr.IS_ELEMENTAL);
removeAttributesWithWaring(xcodeml, _fctType, Xattr.IS_PURE);
// Apply specific steps for GPU
applySpecificTransformation(translator.cfg(), xcodeml);
// Finalize the common steps
super.finalizeTransformation(xcodeml);
}
}
/**
* Apply specific transformation steps for GPU target.
*
* @param xcodeml Current translation unit.
* @throws IllegalTransformationException If any transformation fails.
*/
private void applySpecificTransformation(Configuration cfg, XcodeProgram xcodeml)
throws IllegalTransformationException
{
final Context context = xcodeml.context();
AcceleratorConfiguration config = cfg.accelerator();
// TODO nodep passing!
int collapse = Directive.generateLoopSeq(xcodeml, _fctDef, CompilerDirective.CLAW.getPrefix() + " nodep");
if (_fctDef.hasEmptyBody())
{
return; // Nothing to do in this function
}
/*
* Create a nested loop with the new defined dimensions and wrap it around the
* whole subroutine's body. This is for the moment a really naive transformation
* idea but it is our start point. Use the first over clause to create it.
*/
NestedDoStatement loops;
if (forceAssumedShapedArrayPromotion)
{
if (_promotions.isEmpty())
{
throw new IllegalTransformationException(
"Cannot assume shape of " + "array in elemental function/subroutine.",
_claw.getPragma().lineNo());
}
PromotionInfo pi = _promotions.entrySet().iterator().next().getValue();
loops = new NestedDoStatement(_claw.getDefaultLayoutReversed(cfg), pi, xcodeml);
} else
{
loops = new NestedDoStatement(_claw.getDefaultLayoutReversed(cfg), xcodeml);
}
/*
* Subroutine/function can have a contains section with inner subroutines or
* functions. The newly created (nested) do statements should stop before this
* contains section if it exists.
*/
Xnode contains = _fctDef.body().matchSeq(Xcode.F_CONTAINS_STATEMENT);
if (contains != null)
{
Xnode parallelRegionStart = Directive.findParallelRegionStart(context, _fctDef, null);
Xnode parallelRegionEnd = Directive.findParallelRegionEnd(context, _fctDef, contains);
Body.shiftIn(parallelRegionStart, parallelRegionEnd, loops.getInnerStatement().body(), true);
contains.insertBefore(loops.getOuterStatement());
} else
{
// No contains section, all the body is copied to the do statements.
Xnode parallelRegionStart = Directive.findParallelRegionStart(context, _fctDef, null);
Xnode parallelRegionEnd = Directive.findParallelRegionEnd(context, _fctDef, null);
// Define a hook from where we can insert the new do statement
Xnode hook = parallelRegionEnd != null ? parallelRegionEnd.nextSibling() : null;
Body.shiftIn(parallelRegionStart, parallelRegionEnd, loops.getInnerStatement().body(), true);
// Hook is null then we append the do statement to the current fct body
if (hook == null)
{
_fctDef.body().append(loops.getOuterStatement());
} else
{
// Insert new do statement before the hook element
hook.insertBefore(loops.getOuterStatement());
}
}
// Prepare variables list for present/pcreate clauses and handle
// promotion/privatize local strategy
List<String> presentList = _fctDef.getPresentVariables(xcodeml);
List<String> privateList = Collections.emptyList();
List<String> createList = Collections.emptyList();
if (config.getLocalStrategy() == AcceleratorLocalStrategy.PRIVATE)
{
privateList = applyPrivateStrategy(xcodeml);
} else if (config.getLocalStrategy() == AcceleratorLocalStrategy.PROMOTE)
{
createList = applyPromoteStrategy(cfg, xcodeml);
}
// Generate the data region
Xblock doStmtBlock = new Xblock(loops.getOuterStatement());
Directive.generateDataRegionClause(xcodeml, presentList, createList, doStmtBlock);
// Generate the parallel region
Directive.generateParallelLoopClause(xcodeml, privateList, loops.getOuterStatement(), loops.getOuterStatement(),
null, loops.size() + collapse);
Directive.generateRoutineDirectives(xcodeml, _fctDef);
}
/**
* Apply the private local array strategy. Gather all information about local
* array requiring a privatization.
*
* @param xcodeml Current translation unit.
* @return List of private variables.
*/
private List<String> applyPrivateStrategy(XcodeProgram xcodeml)
{
List<String> privateList = _fctDef.getLocalVariables(xcodeml, true);
// Iterate over a copy to be able to remove items
for (String identifier : new ArrayList<>(privateList))
{
if (_promotions.containsKey(identifier))
{
privateList.remove(identifier);
}
}
return privateList;
}
/**
* Apply the promotion local array strategy. Gather all information about local
* variable requiring a promotion and apply it.
*
* @param xcodeml Current translation unit.
* @return List of promoted variable requiring an allocation.
* @throws IllegalTransformationException If promotion of variable fails.
*/
private List<String> applyPromoteStrategy(Configuration cfg, XcodeProgram xcodeml)
throws IllegalTransformationException
{
List<String> createList = _fctDef.getLocalVariables(xcodeml, true);
for (String arrayIdentifier : createList)
{
_arrayFieldsInOut.add(arrayIdentifier);
PromotionInfo promotionInfo = new PromotionInfo(arrayIdentifier,
_claw.getLayoutForData(cfg, arrayIdentifier));
Field.promote(promotionInfo, _fctDef, xcodeml);
_promotions.put(arrayIdentifier, promotionInfo);
Field.adaptArrayRef(promotionInfo, _fctDef.body(), false, xcodeml);
Field.adaptAllocate(promotionInfo, _fctDef.body(), xcodeml);
}
return createList;
}
}
| |
package koopa.grammars.cobol.test;
import junit.framework.TestCase;
import koopa.parsers.Parser;
import koopa.tokenizers.cobol.SourceFormat;
import koopa.tokenizers.cobol.TestTokenizer;
import org.junit.Test;
/** This code was generated from Identifier.stage. */
public class IdentifierTest extends TestCase {
private static koopa.grammars.cobol.CobolGrammar grammar = new koopa.grammars.cobol.CobolGrammar();
@Test
public void testIdentifier_1() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_2() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " 42 ");
assertFalse(parser.accepts(tokenizer) && tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_3() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " 42.42 ");
assertFalse(parser.accepts(tokenizer) && tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_4() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " \"TEXT\" ");
assertFalse(parser.accepts(tokenizer) && tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_5() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD OF MY-RECORD ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_6() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD IN MY-RECORD ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_7() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD OF MY-RECORD-1 IN MY-RECORD-2 ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_8() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD IN MY-RECORD-1 OF MY-RECORD-2 ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_9() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( 42 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_10() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( 42.0 ) ");
assertFalse(parser.accepts(tokenizer) && tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_11() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( MY-INDEX ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_12() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( MY-INDEX + 42 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_13() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( MY-INDEX - 42 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_14() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( MY-INDEX : 42 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_15() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( MY-INDEX : MY-LENGTH ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_16() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( 42 : ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_17() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( MY-INDEX : ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_18() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( 1 2 3 ) ( 42 : 11 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_19() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( A B C ) ( MY-POSITION : MY-LENGTH ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_20() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " MY-FIELD ( A + 1 B C - 2 ) ( 42 : ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_21() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION CAT ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_22() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION CAT ( 1 : 7 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_23() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION CAT ( 3 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_24() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION CAT ( 3 ) ( 1 : 7 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_25() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION CAT ( 3 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_26() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION CAT ( 3 ) ( 1 : 7 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_27() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION CAT ( 3 \" bottles of beer\" ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_28() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION CAT ( 3 \" bottles of beer\" ) ( 1 : 7 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_29() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION FOO ( 1 FUNCTION INC ( 1 ) ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_30() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION CHAR ( IND ( 5 ) ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_31() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION CHAR ( IND ( C ) ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_32() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION NUMVAL ( \"4738\" ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_33() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FUNCTION MAX ( WS-TABLE ( ALL ALL ALL ) ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_34() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " AX-2 IN AX ( CX-SUB OF CX ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_35() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " UNQUAL-ITEM ( SUB1 OF SUBSCRIPTS OF SUBSCRIPTS-PART1 ) ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_36() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " ADDRESS OF CAT ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_37() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " EXCEPTION-OBJECT ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_38() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " NULL ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_39() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " SELF ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_40() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " SUPER ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
@Test
public void testIdentifier_41() {
Parser parser = grammar.identifier();
assertNotNull(parser);
TestTokenizer tokenizer = new TestTokenizer(SourceFormat.FREE, " FOO OF SUPER ");
assertTrue(parser.accepts(tokenizer));
assertTrue(tokenizer.isWhereExpected());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.UUID;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import com.google.common.collect.Lists;
import com.google.common.io.ByteArrayDataInput;
import com.google.common.io.ByteArrayDataOutput;
import com.google.common.io.ByteStreams;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable,
HeapSize {
public static final long MUTATION_OVERHEAD = ClassSize.align(
// This
ClassSize.OBJECT +
// row + OperationWithAttributes.attributes
2 * ClassSize.REFERENCE +
// Timestamp
1 * Bytes.SIZEOF_LONG +
// durability
ClassSize.REFERENCE +
// familyMap
ClassSize.REFERENCE +
// familyMap
ClassSize.TREEMAP);
/**
* The attribute for storing the list of clusters that have consumed the change.
*/
private static final String CONSUMED_CLUSTER_IDS = "_cs.id";
protected byte [] row = null;
protected long ts = HConstants.LATEST_TIMESTAMP;
protected Durability durability = Durability.USE_DEFAULT;
// A Map sorted by column family.
protected NavigableMap<byte [], List<Cell>> familyMap =
new TreeMap<byte [], List<Cell>>(Bytes.BYTES_COMPARATOR);
@Override
public CellScanner cellScanner() {
return CellUtil.createCellScanner(getFamilyCellMap());
}
/**
* Creates an empty list if one doesn't exist for the given column family
* or else it returns the associated list of Cell objects.
*
* @param family column family
* @return a list of Cell objects, returns an empty list if one doesn't exist.
*/
List<Cell> getCellList(byte[] family) {
List<Cell> list = this.familyMap.get(family);
if (list == null) {
list = new ArrayList<Cell>();
}
return list;
}
/*
* Create a nnnnnnnn with this objects row key and the Put identifier.
*
* @return a KeyValue with this objects row key and the Put identifier.
*/
KeyValue createPutKeyValue(byte[] family, byte[] qualifier, long ts, byte[] value) {
return new KeyValue(this.row, family, qualifier, ts, KeyValue.Type.Put, value);
}
/*
* Create a KeyValue with this objects row key and the Put identifier.
*
* @return a KeyValue with this objects row key and the Put identifier.
*/
KeyValue createPutKeyValue(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value) {
return new KeyValue(this.row, 0, this.row == null ? 0 : this.row.length,
family, 0, family == null ? 0 : family.length,
qualifier, ts, KeyValue.Type.Put, value);
}
/**
* Compile the column family (i.e. schema) information
* into a Map. Useful for parsing and aggregation by debugging,
* logging, and administration tools.
* @return Map
*/
@Override
public Map<String, Object> getFingerprint() {
Map<String, Object> map = new HashMap<String, Object>();
List<String> families = new ArrayList<String>();
// ideally, we would also include table information, but that information
// is not stored in each Operation instance.
map.put("families", families);
for (Map.Entry<byte [], List<Cell>> entry : this.familyMap.entrySet()) {
families.add(Bytes.toStringBinary(entry.getKey()));
}
return map;
}
/**
* Compile the details beyond the scope of getFingerprint (row, columns,
* timestamps, etc.) into a Map along with the fingerprinted information.
* Useful for debugging, logging, and administration tools.
* @param maxCols a limit on the number of columns output prior to truncation
* @return Map
*/
@Override
public Map<String, Object> toMap(int maxCols) {
// we start with the fingerprint map and build on top of it.
Map<String, Object> map = getFingerprint();
// replace the fingerprint's simple list of families with a
// map from column families to lists of qualifiers and kv details
Map<String, List<Map<String, Object>>> columns =
new HashMap<String, List<Map<String, Object>>>();
map.put("families", columns);
map.put("row", Bytes.toStringBinary(this.row));
int colCount = 0;
// iterate through all column families affected
for (Map.Entry<byte [], List<Cell>> entry : this.familyMap.entrySet()) {
// map from this family to details for each cell affected within the family
List<Map<String, Object>> qualifierDetails = new ArrayList<Map<String, Object>>();
columns.put(Bytes.toStringBinary(entry.getKey()), qualifierDetails);
colCount += entry.getValue().size();
if (maxCols <= 0) {
continue;
}
// add details for each cell
for (Cell cell: entry.getValue()) {
if (--maxCols <= 0 ) {
continue;
}
// KeyValue v1 expectation. Cast for now until we go all Cell all the time.
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
Map<String, Object> kvMap = kv.toStringMap();
// row and family information are already available in the bigger map
kvMap.remove("row");
kvMap.remove("family");
qualifierDetails.add(kvMap);
}
}
map.put("totalColumns", colCount);
// add the id if set
if (getId() != null) {
map.put("id", getId());
}
return map;
}
/**
* Set the durability for this mutation
* @param d
*/
public void setDurability(Durability d) {
this.durability = d;
}
/** Get the current durability */
public Durability getDurability() {
return this.durability;
}
/**
* Set the durability for this mutation. If this is set to true,
* the default durability of the table is set.
* @param writeToWal
*/
@Deprecated
public void setWriteToWAL(boolean writeToWal) {
if(!writeToWal) {
setDurability(Durability.SKIP_WAL);
} else {
// This is required to handle the case where this method is
// called twice, first with writeToWal = false,
// and then with writeToWal = true
setDurability(Durability.USE_DEFAULT);
}
}
/**
* Get the durability for this mutation.
* @return - true if this mutation is set to write to the WAL either
* synchronously, asynchronously or fsync to disk on the file system.
* - to get the exact durability, use the {#getDurability} method.
*/
@Deprecated
public boolean getWriteToWAL() {
return Durability.SKIP_WAL != getDurability();
}
/*
* Method for retrieving the put's familyMap (family -> KeyValues)
* Application should use the getFamilyCellMap and the Cell interface instead of KeyValue.
*
* @return familyMap
*/
@Deprecated
public Map<byte[], List<KeyValue>> getFamilyMap() {
Map<byte[], List<KeyValue>> fm = new TreeMap<byte[], List<KeyValue>>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], List<Cell>> e : this.familyMap.entrySet()) {
byte[] family = e.getKey();
List<Cell> cells = e.getValue();
List<KeyValue> kvs = new ArrayList<KeyValue>(cells.size());
for (Cell c : cells) {
KeyValue kv = KeyValueUtil.ensureKeyValue(c);
kvs.add(kv);
}
fm.put(family, kvs);
}
return fm;
}
/**
* Method for retrieving the put's familyMap
* @return familyMap
*/
public NavigableMap<byte [], List<Cell>> getFamilyCellMap() {
return this.familyMap;
}
/**
* Method for setting the put's familyMap
*/
public void setFamilyCellMap(NavigableMap<byte [], List<Cell>> map) {
// TODO: Shut this down or move it up to be a Constructor. Get new object rather than change
// this internal data member.
this.familyMap = map;
}
/**
* Method for setting the put's familyMap that is deprecated and inefficient.
* @deprecated use {@link #setFamilyCellMap(NavigableMap)} instead.
*/
@Deprecated
public void setFamilyMap(NavigableMap<byte [], List<KeyValue>> map) {
TreeMap<byte[], List<Cell>> fm = new TreeMap<byte[], List<Cell>>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], List<KeyValue>> e : map.entrySet()) {
fm.put(e.getKey(), Lists.<Cell>newArrayList(e.getValue()));
}
this.familyMap = fm;
}
/**
* Method to check if the familyMap is empty
* @return true if empty, false otherwise
*/
public boolean isEmpty() {
return familyMap.isEmpty();
}
/**
* Method for retrieving the delete's row
* @return row
*/
@Override
public byte [] getRow() {
return this.row;
}
public int compareTo(final Row d) {
return Bytes.compareTo(this.getRow(), d.getRow());
}
/**
* Method for retrieving the timestamp
* @return timestamp
*/
public long getTimeStamp() {
return this.ts;
}
/**
* Marks that the clusters with the given clusterIds have consumed the mutation
* @param clusterIds of the clusters that have consumed the mutation
*/
public void setClusterIds(List<UUID> clusterIds) {
ByteArrayDataOutput out = ByteStreams.newDataOutput();
out.writeInt(clusterIds.size());
for (UUID clusterId : clusterIds) {
out.writeLong(clusterId.getMostSignificantBits());
out.writeLong(clusterId.getLeastSignificantBits());
}
setAttribute(CONSUMED_CLUSTER_IDS, out.toByteArray());
}
/**
* @return the set of clusterIds that have consumed the mutation
*/
public List<UUID> getClusterIds() {
List<UUID> clusterIds = new ArrayList<UUID>();
byte[] bytes = getAttribute(CONSUMED_CLUSTER_IDS);
if(bytes != null) {
ByteArrayDataInput in = ByteStreams.newDataInput(bytes);
int numClusters = in.readInt();
for(int i=0; i<numClusters; i++){
clusterIds.add(new UUID(in.readLong(), in.readLong()));
}
}
return clusterIds;
}
/**
* Number of KeyValues carried by this Mutation.
* @return the total number of KeyValues
*/
public int size() {
int size = 0;
for (List<Cell> cells : this.familyMap.values()) {
size += cells.size();
}
return size;
}
/**
* @return the number of different families
*/
public int numFamilies() {
return familyMap.size();
}
/**
* @return Calculate what Mutation adds to class heap size.
*/
@Override
public long heapSize() {
long heapsize = MUTATION_OVERHEAD;
// Adding row
heapsize += ClassSize.align(ClassSize.ARRAY + this.row.length);
// Adding map overhead
heapsize +=
ClassSize.align(this.familyMap.size() * ClassSize.MAP_ENTRY);
for(Map.Entry<byte [], List<Cell>> entry : this.familyMap.entrySet()) {
//Adding key overhead
heapsize +=
ClassSize.align(ClassSize.ARRAY + entry.getKey().length);
//This part is kinds tricky since the JVM can reuse references if you
//store the same value, but have a good match with SizeOf at the moment
//Adding value overhead
heapsize += ClassSize.align(ClassSize.ARRAYLIST);
int size = entry.getValue().size();
heapsize += ClassSize.align(ClassSize.ARRAY +
size * ClassSize.REFERENCE);
for(Cell cell : entry.getValue()) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
heapsize += kv.heapSize();
}
}
heapsize += getAttributeSize();
heapsize += extraHeapSize();
return ClassSize.align(heapsize);
}
/**
* Subclasses should override this method to add the heap size of their own fields.
* @return the heap size to add (will be aligned).
*/
protected long extraHeapSize(){
return 0L;
}
/**
* @param row Row to check
* @throws IllegalArgumentException Thrown if <code>row</code> is empty or null or
* > {@link HConstants#MAX_ROW_LENGTH}
* @return <code>row</code>
*/
static byte [] checkRow(final byte [] row) {
return checkRow(row, 0, row == null? 0: row.length);
}
/**
* @param row Row to check
* @param offset
* @param length
* @throws IllegalArgumentException Thrown if <code>row</code> is empty or null or
* > {@link HConstants#MAX_ROW_LENGTH}
* @return <code>row</code>
*/
static byte [] checkRow(final byte [] row, final int offset, final int length) {
if (row == null) {
throw new IllegalArgumentException("Row buffer is null");
}
if (length == 0) {
throw new IllegalArgumentException("Row length is 0");
}
if (length > HConstants.MAX_ROW_LENGTH) {
throw new IllegalArgumentException("Row length " + length + " is > " +
HConstants.MAX_ROW_LENGTH);
}
return row;
}
static void checkRow(ByteBuffer row) {
if (row == null) {
throw new IllegalArgumentException("Row buffer is null");
}
if (row.remaining() == 0) {
throw new IllegalArgumentException("Row length is 0");
}
if (row.remaining() > HConstants.MAX_ROW_LENGTH) {
throw new IllegalArgumentException("Row length " + row.remaining() + " is > " +
HConstants.MAX_ROW_LENGTH);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.service;
import java.net.InetAddress;
import java.util.*;
import java.util.concurrent.*;
import com.google.common.collect.Sets;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.compaction.PrecompactedRow;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.gms.Gossiper;
import org.apache.cassandra.locator.AbstractReplicationStrategy;
import org.apache.cassandra.locator.TokenMetadata;
import static org.apache.cassandra.service.ActiveRepairService.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.MerkleTree;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public abstract class AntiEntropyServiceTestAbstract extends SchemaLoader
{
// table and column family to test against
public ActiveRepairService aes;
public String tablename;
public String cfname;
public TreeRequest request;
public ColumnFamilyStore store;
public InetAddress LOCAL, REMOTE;
public Range<Token> local_range;
private boolean initialized;
public abstract void init();
public abstract List<IMutation> getWriteData();
@Before
public void prepare() throws Exception
{
if (!initialized)
{
initialized = true;
init();
LOCAL = FBUtilities.getBroadcastAddress();
// generate a fake endpoint for which we can spoof receiving/sending trees
REMOTE = InetAddress.getByName("127.0.0.2");
store = null;
for (ColumnFamilyStore cfs : Table.open(tablename).getColumnFamilyStores())
{
if (cfs.name.equals(cfname))
{
store = cfs;
break;
}
}
assert store != null : "CF not found: " + cfname;
}
aes = ActiveRepairService.instance;
TokenMetadata tmd = StorageService.instance.getTokenMetadata();
tmd.clearUnsafe();
StorageService.instance.setTokens(Collections.singleton(StorageService.getPartitioner().getRandomToken()));
tmd.updateNormalToken(StorageService.getPartitioner().getMinimumToken(), REMOTE);
assert tmd.isMember(REMOTE);
Gossiper.instance.initializeNodeUnsafe(REMOTE, UUID.randomUUID(), 1);
local_range = StorageService.instance.getPrimaryRangesForEndpoint(tablename, LOCAL).iterator().next();
// (we use REMOTE instead of LOCAL so that the reponses for the validator.complete() get lost)
int gcBefore = (int)(System.currentTimeMillis()/1000) - store.metadata.getGcGraceSeconds();
request = new TreeRequest(UUID.randomUUID().toString(), REMOTE, local_range, gcBefore, new CFPair(tablename, cfname));
// Set a fake session corresponding to this fake request
ActiveRepairService.instance.submitArtificialRepairSession(request, tablename, cfname);
}
@After
public void teardown() throws Exception
{
flushAES();
}
@Test
public void testValidatorPrepare() throws Throwable
{
Validator validator;
// write
Util.writeColumnFamily(getWriteData());
// sample
validator = new Validator(request);
validator.prepare(store);
// and confirm that the tree was split
assertTrue(validator.tree.size() > 1);
}
@Test
public void testValidatorComplete() throws Throwable
{
Validator validator = new Validator(request);
validator.prepare(store);
validator.completeTree();
// confirm that the tree was validated
Token min = validator.tree.partitioner().getMinimumToken();
assert validator.tree.hash(new Range<Token>(min, min)) != null;
}
@Test
public void testValidatorAdd() throws Throwable
{
Validator validator = new Validator(request);
IPartitioner part = validator.tree.partitioner();
Token mid = part.midpoint(local_range.left, local_range.right);
validator.prepare(store);
// add a row
validator.add(new PrecompactedRow(new DecoratedKey(mid, ByteBufferUtil.bytes("inconceivable!")),
TreeMapBackedSortedColumns.factory.create(Schema.instance.getCFMetaData(tablename, cfname))));
validator.completeTree();
// confirm that the tree was validated
assert validator.tree.hash(local_range) != null;
}
@Test
public void testGetNeighborsPlusOne() throws Throwable
{
// generate rf+1 nodes, and ensure that all nodes are returned
Set<InetAddress> expected = addTokens(1 + Table.open(tablename).getReplicationStrategy().getReplicationFactor());
expected.remove(FBUtilities.getBroadcastAddress());
Collection<Range<Token>> ranges = StorageService.instance.getLocalRanges(tablename);
Set<InetAddress> neighbors = new HashSet<InetAddress>();
for (Range<Token> range : ranges)
{
neighbors.addAll(ActiveRepairService.getNeighbors(tablename, range, false));
}
assertEquals(expected, neighbors);
}
@Test
public void testGetNeighborsTimesTwo() throws Throwable
{
TokenMetadata tmd = StorageService.instance.getTokenMetadata();
// generate rf*2 nodes, and ensure that only neighbors specified by the ARS are returned
addTokens(2 * Table.open(tablename).getReplicationStrategy().getReplicationFactor());
AbstractReplicationStrategy ars = Table.open(tablename).getReplicationStrategy();
Set<InetAddress> expected = new HashSet<InetAddress>();
for (Range<Token> replicaRange : ars.getAddressRanges().get(FBUtilities.getBroadcastAddress()))
{
expected.addAll(ars.getRangeAddresses(tmd.cloneOnlyTokenMap()).get(replicaRange));
}
expected.remove(FBUtilities.getBroadcastAddress());
Collection<Range<Token>> ranges = StorageService.instance.getLocalRanges(tablename);
Set<InetAddress> neighbors = new HashSet<InetAddress>();
for (Range<Token> range : ranges)
{
neighbors.addAll(ActiveRepairService.getNeighbors(tablename, range, false));
}
assertEquals(expected, neighbors);
}
@Test
public void testGetNeighborsPlusOneInLocalDC() throws Throwable
{
TokenMetadata tmd = StorageService.instance.getTokenMetadata();
// generate rf+1 nodes, and ensure that all nodes are returned
Set<InetAddress> expected = addTokens(1 + Table.open(tablename).getReplicationStrategy().getReplicationFactor());
expected.remove(FBUtilities.getBroadcastAddress());
// remove remote endpoints
TokenMetadata.Topology topology = tmd.cloneOnlyTokenMap().getTopology();
HashSet<InetAddress> localEndpoints = Sets.newHashSet(topology.getDatacenterEndpoints().get(DatabaseDescriptor.getLocalDataCenter()));
expected = Sets.intersection(expected, localEndpoints);
Collection<Range<Token>> ranges = StorageService.instance.getLocalRanges(tablename);
Set<InetAddress> neighbors = new HashSet<InetAddress>();
for (Range<Token> range : ranges)
{
neighbors.addAll(ActiveRepairService.getNeighbors(tablename, range, true));
}
assertEquals(expected, neighbors);
}
@Test
public void testGetNeighborsTimesTwoInLocalDC() throws Throwable
{
TokenMetadata tmd = StorageService.instance.getTokenMetadata();
// generate rf*2 nodes, and ensure that only neighbors specified by the ARS are returned
addTokens(2 * Table.open(tablename).getReplicationStrategy().getReplicationFactor());
AbstractReplicationStrategy ars = Table.open(tablename).getReplicationStrategy();
Set<InetAddress> expected = new HashSet<InetAddress>();
for (Range<Token> replicaRange : ars.getAddressRanges().get(FBUtilities.getBroadcastAddress()))
{
expected.addAll(ars.getRangeAddresses(tmd.cloneOnlyTokenMap()).get(replicaRange));
}
expected.remove(FBUtilities.getBroadcastAddress());
// remove remote endpoints
TokenMetadata.Topology topology = tmd.cloneOnlyTokenMap().getTopology();
HashSet<InetAddress> localEndpoints = Sets.newHashSet(topology.getDatacenterEndpoints().get(DatabaseDescriptor.getLocalDataCenter()));
expected = Sets.intersection(expected, localEndpoints);
Collection<Range<Token>> ranges = StorageService.instance.getLocalRanges(tablename);
Set<InetAddress> neighbors = new HashSet<InetAddress>();
for (Range<Token> range : ranges)
{
neighbors.addAll(ActiveRepairService.getNeighbors(tablename, range, true));
}
assertEquals(expected, neighbors);
}
@Test
public void testDifferencer() throws Throwable
{
// this next part does some housekeeping so that cleanup in the differencer doesn't error out.
ActiveRepairService.RepairFuture sess = ActiveRepairService.instance.submitArtificialRepairSession(request, tablename, cfname);
// generate a tree
Validator validator = new Validator(request);
validator.prepare(store);
validator.completeTree();
MerkleTree ltree = validator.tree;
// and a clone
validator = new Validator(request);
validator.prepare(store);
validator.completeTree();
MerkleTree rtree = validator.tree;
// change a range in one of the trees
Token ltoken = StorageService.getPartitioner().midpoint(local_range.left, local_range.right);
ltree.invalidate(ltoken);
MerkleTree.TreeRange changed = ltree.get(ltoken);
changed.hash("non-empty hash!".getBytes());
Set<Range> interesting = new HashSet<Range>();
interesting.add(changed);
// difference the trees
// note: we reuse the same endpoint which is bogus in theory but fine here
ActiveRepairService.TreeResponse r1 = new ActiveRepairService.TreeResponse(REMOTE, ltree);
ActiveRepairService.TreeResponse r2 = new ActiveRepairService.TreeResponse(REMOTE, rtree);
ActiveRepairService.RepairSession.Differencer diff = sess.session.new Differencer(cfname, r1, r2);
diff.run();
// ensure that the changed range was recorded
assertEquals("Wrong differing ranges", interesting, new HashSet<Range>(diff.differences));
}
Set<InetAddress> addTokens(int max) throws Throwable
{
TokenMetadata tmd = StorageService.instance.getTokenMetadata();
Set<InetAddress> endpoints = new HashSet<InetAddress>();
for (int i = 1; i <= max; i++)
{
InetAddress endpoint = InetAddress.getByName("127.0.0." + i);
tmd.updateNormalToken(StorageService.getPartitioner().getRandomToken(), endpoint);
endpoints.add(endpoint);
}
return endpoints;
}
void flushAES() throws Exception
{
final ThreadPoolExecutor stage = StageManager.getStage(Stage.ANTI_ENTROPY);
final Callable noop = new Callable<Object>()
{
public Boolean call()
{
return true;
}
};
// send two tasks through the stage: one to follow existing tasks and a second to follow tasks created by
// those existing tasks: tasks won't recursively create more tasks
stage.submit(noop).get(5000, TimeUnit.MILLISECONDS);
stage.submit(noop).get(5000, TimeUnit.MILLISECONDS);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.service.modules.core;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.Path;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.diff.DiffEntry;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Files;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.api.SpecExecutor;
import org.apache.gobblin.runtime.api.TopologySpec;
import org.apache.gobblin.service.modules.flowgraph.DataNode;
import org.apache.gobblin.service.modules.flowgraph.FlowEdge;
import org.apache.gobblin.service.modules.flowgraph.FlowEdgeFactory;
import org.apache.gobblin.service.modules.flowgraph.FlowGraph;
import org.apache.gobblin.service.modules.flowgraph.FlowGraphConfigurationKeys;
import org.apache.gobblin.service.modules.template_catalog.FSFlowCatalog;
import org.apache.gobblin.util.ConfigUtils;
import org.apache.gobblin.util.reflection.GobblinConstructorUtils;
/**
* Service that monitors for changes to {@link org.apache.gobblin.service.modules.flowgraph.FlowGraph} from a git repository.
* The git repository must have an inital commit that has no files since that is used as a base for getting
* the change list.
* The {@link DataNode}s and {@link FlowEdge}s in FlowGraph need to be organized with the following directory structure on git:
* <root_flowGraph_dir>/<nodeName>/<nodeName>.properties
* <root_flowGraph_dir>/<nodeName1>/<nodeName2>/<edgeName>.properties
*/
@Slf4j
public class GitFlowGraphMonitor extends GitMonitoringService {
public static final String GIT_FLOWGRAPH_MONITOR_PREFIX = "gobblin.service.gitFlowGraphMonitor";
private static final String PROPERTIES_EXTENSIONS = "properties";
private static final String CONF_EXTENSIONS = StringUtils.EMPTY;
private static final String FLOW_EDGE_LABEL_JOINER_CHAR = ":";
private static final String DEFAULT_GIT_FLOWGRAPH_MONITOR_REPO_DIR = "git-flowgraph";
private static final String DEFAULT_GIT_FLOWGRAPH_MONITOR_FLOWGRAPH_DIR = "gobblin-flowgraph";
private static final String DEFAULT_GIT_FLOWGRAPH_MONITOR_BRANCH_NAME = "master";
private static final int NODE_FILE_DEPTH = 3;
private static final int EDGE_FILE_DEPTH = 4;
private static final int DEFAULT_GIT_FLOWGRAPH_MONITOR_POLLING_INTERVAL = 60;
private static final Config DEFAULT_FALLBACK =
ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(ConfigurationKeys.GIT_MONITOR_REPO_DIR, DEFAULT_GIT_FLOWGRAPH_MONITOR_REPO_DIR)
.put(ConfigurationKeys.GIT_MONITOR_CONFIG_BASE_DIR, DEFAULT_GIT_FLOWGRAPH_MONITOR_FLOWGRAPH_DIR)
.put(ConfigurationKeys.GIT_MONITOR_BRANCH_NAME, DEFAULT_GIT_FLOWGRAPH_MONITOR_BRANCH_NAME)
.put(ConfigurationKeys.GIT_MONITOR_POLLING_INTERVAL, DEFAULT_GIT_FLOWGRAPH_MONITOR_POLLING_INTERVAL)
.put(JAVA_PROPS_EXTENSIONS, PROPERTIES_EXTENSIONS)
.put(HOCON_FILE_EXTENSIONS, CONF_EXTENSIONS)
.put(SHOULD_CHECKPOINT_HASHES, false)
.build());
private Optional<FSFlowCatalog> flowCatalog;
private FlowGraph flowGraph;
private final Map<URI, TopologySpec> topologySpecMap;
private final Config emptyConfig = ConfigFactory.empty();
private final CountDownLatch initComplete;
public GitFlowGraphMonitor(Config config, Optional<FSFlowCatalog> flowCatalog, FlowGraph graph, Map<URI, TopologySpec> topologySpecMap, CountDownLatch initComplete) {
super(config.getConfig(GIT_FLOWGRAPH_MONITOR_PREFIX).withFallback(DEFAULT_FALLBACK));
this.flowCatalog = flowCatalog;
this.flowGraph = graph;
this.topologySpecMap = topologySpecMap;
this.initComplete = initComplete;
}
/**
* Determine if the service should poll Git. Current behavior is both master and slave(s) will poll Git for
* changes to {@link FlowGraph}.
*/
@Override
public boolean shouldPollGit() {
return this.isActive;
}
/**
* Sort the changes in a commit so that changes to node files appear before changes to edge files. This is done so that
* node related changes are applied to the FlowGraph before edge related changes. An example where the order matters
* is the case when a commit adds a new node n2 as well as adds an edge from an existing node n1 to n2. To ensure that the
* addition of edge n1->n2 is successful, node n2 must exist in the graph and so needs to be added first. For deletions,
* the order does not matter and ordering the changes in the commit will result in the same FlowGraph state as if the changes
* were unordered. In other words, deletion of a node deletes all its incident edges from the FlowGraph. So processing an
* edge deletion later results in a no-op. Note that node and edge files do not change depth in case of modifications.
*
* If there are multiple commits between successive polls to Git, the re-ordering of changes across commits should not
* affect the final state of the FlowGraph. This is because, the order of changes for a given file type (i.e. node or edge)
* is preserved.
*/
@Override
void processGitConfigChanges() throws GitAPIException, IOException {
List<DiffEntry> changes = this.gitRepo.getChanges();
Collections.sort(changes, (o1, o2) -> {
Integer o1Depth = (o1.getNewPath() != null) ? (new Path(o1.getNewPath())).depth() : (new Path(o1.getOldPath())).depth();
Integer o2Depth = (o2.getNewPath() != null) ? (new Path(o2.getNewPath())).depth() : (new Path(o2.getOldPath())).depth();
return o1Depth.compareTo(o2Depth);
});
processGitConfigChangesHelper(changes);
//Decrements the latch count. The countdown latch is initialized to 1. So after the first time the latch is decremented,
// the following operation should be a no-op.
this.initComplete.countDown();
}
/**
* Add an element (i.e., a {@link DataNode}, or a {@link FlowEdge} to
* the {@link FlowGraph} for an added, updated or modified node or edge file.
* @param change
*/
@Override
public void addChange(DiffEntry change) {
Path path = new Path(change.getNewPath());
if (path.depth() == NODE_FILE_DEPTH) {
addDataNode(change);
} else if (path.depth() == EDGE_FILE_DEPTH) {
addFlowEdge(change);
}
}
/**
* Remove an element (i.e. either a {@link DataNode} or a {@link FlowEdge} from the {@link FlowGraph} for
* a renamed or deleted {@link DataNode} or {@link FlowEdge} file.
* @param change
*/
@Override
public void removeChange(DiffEntry change) {
Path path = new Path(change.getOldPath());
if (path.depth() == NODE_FILE_DEPTH) {
removeDataNode(change);
} else if (path.depth() == EDGE_FILE_DEPTH) {
removeFlowEdge(change);
}
}
/**
* Add a {@link DataNode} to the {@link FlowGraph}. The method uses the {@link FlowGraphConfigurationKeys#DATA_NODE_CLASS} config
* to instantiate a {@link DataNode} from the node config file.
* @param change
*/
private void addDataNode(DiffEntry change) {
if (checkFilePath(change.getNewPath(), NODE_FILE_DEPTH)) {
Path nodeFilePath = new Path(this.repositoryDir, change.getNewPath());
try {
Config config = loadNodeFileWithOverrides(nodeFilePath);
Class dataNodeClass = Class.forName(ConfigUtils.getString(config, FlowGraphConfigurationKeys.DATA_NODE_CLASS,
FlowGraphConfigurationKeys.DEFAULT_DATA_NODE_CLASS));
DataNode dataNode = (DataNode) GobblinConstructorUtils.invokeLongestConstructor(dataNodeClass, config);
if (!this.flowGraph.addDataNode(dataNode)) {
log.warn("Could not add DataNode {} to FlowGraph; skipping", dataNode.getId());
} else {
log.info("Added Datanode {} to FlowGraph", dataNode.getId());
}
} catch (Exception e) {
log.warn("Could not add DataNode defined in {} due to exception {}", change.getNewPath(), e.getMessage());
}
}
}
/**
* Remove a {@link DataNode} from the {@link FlowGraph}. The method extracts the nodeId of the
* {@link DataNode} from the node config file and uses it to delete the associated {@link DataNode}.
* @param change
*/
private void removeDataNode(DiffEntry change) {
if (checkFilePath(change.getOldPath(), NODE_FILE_DEPTH)) {
Path nodeFilePath = new Path(this.repositoryDir, change.getOldPath());
Config config = getNodeConfigWithOverrides(ConfigFactory.empty(), nodeFilePath);
String nodeId = config.getString(FlowGraphConfigurationKeys.DATA_NODE_ID_KEY);
if (!this.flowGraph.deleteDataNode(nodeId)) {
log.warn("Could not remove DataNode {} from FlowGraph; skipping", nodeId);
} else {
log.info("Removed DataNode {} from FlowGraph", nodeId);
}
}
}
/**
* Add a {@link FlowEdge} to the {@link FlowGraph}. The method uses the {@link FlowEdgeFactory} instance
* provided by the {@link FlowGraph} to build a {@link FlowEdge} from the edge config file.
* @param change
*/
private void addFlowEdge(DiffEntry change) {
if (checkFilePath(change.getNewPath(), EDGE_FILE_DEPTH)) {
Path edgeFilePath = new Path(this.repositoryDir, change.getNewPath());
try {
Config edgeConfig = loadEdgeFileWithOverrides(edgeFilePath);
List<SpecExecutor> specExecutors = getSpecExecutors(edgeConfig);
Class flowEdgeFactoryClass = Class.forName(ConfigUtils.getString(edgeConfig, FlowGraphConfigurationKeys.FLOW_EDGE_FACTORY_CLASS,
FlowGraphConfigurationKeys.DEFAULT_FLOW_EDGE_FACTORY_CLASS));
FlowEdgeFactory flowEdgeFactory = (FlowEdgeFactory) GobblinConstructorUtils.invokeLongestConstructor(flowEdgeFactoryClass, edgeConfig);
if (flowCatalog.isPresent()) {
FlowEdge edge = flowEdgeFactory.createFlowEdge(edgeConfig, flowCatalog.get(), specExecutors);
if (!this.flowGraph.addFlowEdge(edge)) {
log.warn("Could not add edge {} to FlowGraph; skipping", edge.getId());
} else {
log.info("Added edge {} to FlowGraph", edge.getId());
}
} else {
log.warn("Could not add edge defined in {} to FlowGraph as FlowCatalog is absent", change.getNewPath());
}
} catch (Exception e) {
log.warn("Could not add edge defined in {} due to exception {}", change.getNewPath(), e.getMessage());
}
}
}
/**
* Remove a {@link FlowEdge} from the {@link FlowGraph}. The method uses {@link FlowEdgeFactory}
* to construct the edgeId of the {@link FlowEdge} from the config file and uses it to delete the associated
* {@link FlowEdge}.
* @param change
*/
private void removeFlowEdge(DiffEntry change) {
if (checkFilePath(change.getOldPath(), EDGE_FILE_DEPTH)) {
Path edgeFilePath = new Path(this.repositoryDir, change.getOldPath());
try {
Config config = getEdgeConfigWithOverrides(ConfigFactory.empty(), edgeFilePath);
String edgeId = config.getString(FlowGraphConfigurationKeys.FLOW_EDGE_ID_KEY);
if (!this.flowGraph.deleteFlowEdge(edgeId)) {
log.warn("Could not remove edge {} from FlowGraph; skipping", edgeId);
} else {
log.info("Removed edge {} from FlowGraph", edgeId);
}
} catch (Exception e) {
log.warn("Could not remove edge defined in {} due to exception {}", edgeFilePath, e.getMessage());
}
}
}
/**
* check whether the file has the proper naming and hierarchy
* @param file the relative path from the repo root
* @return false if the file does not conform
*/
private boolean checkFilePath(String file, int depth) {
// The file is either a node file or an edge file and needs to be stored at either:
// flowGraphDir/nodeName/nodeName.properties (if it is a node file), or
// flowGraphDir/nodeName/nodeName/edgeName.properties (if it is an edge file)
Path filePath = new Path(file);
String fileExtension = Files.getFileExtension(filePath.getName());
if (filePath.depth() != depth || !checkFileLevelRelativeToRoot(filePath, depth)
|| !(this.javaPropsExtensions.contains(fileExtension))) {
log.warn("Changed file does not conform to directory structure and file name format, skipping: "
+ filePath);
return false;
}
return true;
}
/**
* Helper to check if a file has proper hierarchy.
* @param filePath path of the node/edge file
* @param depth expected depth of the file
* @return true if the file conforms to the expected hierarchy
*/
private boolean checkFileLevelRelativeToRoot(Path filePath, int depth) {
if (filePath == null) {
return false;
}
Path path = filePath;
for (int i = 0; i < depth - 1; i++) {
path = path.getParent();
}
if (!path.getName().equals(folderName)) {
return false;
}
return true;
}
/**
* Helper that overrides the data.node.id property with name derived from the node file path
* @param nodeConfig node config
* @param nodeFilePath path of the node file
* @return config with overridden data.node.id
*/
private Config getNodeConfigWithOverrides(Config nodeConfig, Path nodeFilePath) {
String nodeId = nodeFilePath.getParent().getName();
return nodeConfig.withValue(FlowGraphConfigurationKeys.DATA_NODE_ID_KEY, ConfigValueFactory.fromAnyRef(nodeId));
}
/**
* Helper that overrides the flow edge properties with name derived from the edge file path
* @param edgeConfig edge config
* @param edgeFilePath path of the edge file
* @return config with overridden edge properties
*/
private Config getEdgeConfigWithOverrides(Config edgeConfig, Path edgeFilePath) {
String source = edgeFilePath.getParent().getParent().getName();
String destination = edgeFilePath.getParent().getName();
String edgeName = Files.getNameWithoutExtension(edgeFilePath.getName());
return edgeConfig.withValue(FlowGraphConfigurationKeys.FLOW_EDGE_SOURCE_KEY, ConfigValueFactory.fromAnyRef(source))
.withValue(FlowGraphConfigurationKeys.FLOW_EDGE_DESTINATION_KEY, ConfigValueFactory.fromAnyRef(destination))
.withValue(FlowGraphConfigurationKeys.FLOW_EDGE_ID_KEY, ConfigValueFactory.fromAnyRef(getEdgeId(source, destination, edgeName)));
}
/**
* This method first retrieves the logical names of all the {@link org.apache.gobblin.runtime.api.SpecExecutor}s
* for this edge and returns the SpecExecutors from the {@link TopologySpec} map.
* @param edgeConfig containing the logical names of SpecExecutors for this edge.
* @return a {@link List<SpecExecutor>}s for this edge.
*/
private List<SpecExecutor> getSpecExecutors(Config edgeConfig)
throws URISyntaxException {
//Get the logical names of SpecExecutors where the FlowEdge can be executed.
List<String> specExecutorNames = ConfigUtils.getStringList(edgeConfig, FlowGraphConfigurationKeys.FLOW_EDGE_SPEC_EXECUTORS_KEY);
//Load all the SpecExecutor configurations for this FlowEdge from the SpecExecutor Catalog.
List<SpecExecutor> specExecutors = new ArrayList<>();
for (String specExecutorName: specExecutorNames) {
URI specExecutorUri = new URI(specExecutorName);
specExecutors.add(this.topologySpecMap.get(specExecutorUri).getSpecExecutor());
}
return specExecutors;
}
/**
* Load the node file.
* @param filePath path of the node file relative to the repository root
* @return the configuration object
* @throws IOException
*/
private Config loadNodeFileWithOverrides(Path filePath) throws IOException {
Config nodeConfig = this.pullFileLoader.loadPullFile(filePath, emptyConfig, false);
return getNodeConfigWithOverrides(nodeConfig, filePath);
}
/**
* Load the edge file.
* @param filePath path of the edge file relative to the repository root
* @return the configuration object
* @throws IOException
*/
private Config loadEdgeFileWithOverrides(Path filePath) throws IOException {
Config edgeConfig = this.pullFileLoader.loadPullFile(filePath, emptyConfig, false);
return getEdgeConfigWithOverrides(edgeConfig, filePath);
}
/**
* Get an edge label from the edge properties
* @param source source data node id
* @param destination destination data node id
* @param edgeName simple name of the edge (e.g. file name without extension of the edge file)
* @return a string label identifying the edge
*/
private String getEdgeId(String source, String destination, String edgeName) {
return Joiner.on(FLOW_EDGE_LABEL_JOINER_CHAR).join(source, destination, edgeName);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.apache.phoenix.util.TestUtil.analyzeTable;
import static org.apache.phoenix.util.TestUtil.getAllSplits;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Properties;
import org.apache.phoenix.query.KeyRange;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.QueryUtil;
import org.apache.phoenix.util.SchemaUtil;
import org.apache.phoenix.util.TestUtil;
import org.junit.Before;
import org.junit.Test;
public class MultiCfQueryExecIT extends ParallelStatsEnabledIT {
private String fullTableName;
@Before
public void generateTableNames() throws SQLException {
String schemaName = TestUtil.DEFAULT_SCHEMA_NAME;
String tableName = "T_" + generateUniqueName();
fullTableName = SchemaUtil.getTableName(schemaName, tableName);
}
private void createTable(Connection conn) throws SQLException {
conn.createStatement().execute(
"create table " + fullTableName + " (id char(15) not null primary key,\n"
+ " a.unique_user_count integer,\n" + " b.unique_org_count integer,\n"
+ " c.db_cpu_utilization decimal(31,10),\n" + " d.transaction_count bigint,\n"
+ " e.cpu_utilization decimal(31,10),\n" + " f.response_time bigint,\n"
+ " g.response_time bigint)");
}
private void initTableValues(Connection conn) throws Exception {
// Insert all rows at ts
PreparedStatement stmt = conn.prepareStatement(
"upsert into " + fullTableName + "(" + " ID, "
+ " TRANSACTION_COUNT, " + " CPU_UTILIZATION, " + " DB_CPU_UTILIZATION,"
+ " UNIQUE_USER_COUNT," + " F.RESPONSE_TIME," + " G.RESPONSE_TIME)"
+
"VALUES (?, ?, ?, ?, ?, ?, ?)");
stmt.setString(1, "000000000000001");
stmt.setInt(2, 100);
stmt.setBigDecimal(3, BigDecimal.valueOf(0.5));
stmt.setBigDecimal(4, BigDecimal.valueOf(0.2));
stmt.setInt(5, 1000);
stmt.setLong(6, 11111);
stmt.setLong(7, 11112);
stmt.execute();
stmt.setString(1, "000000000000002");
stmt.setInt(2, 200);
stmt.setBigDecimal(3, BigDecimal.valueOf(2.5));
stmt.setBigDecimal(4, BigDecimal.valueOf(2.2));
stmt.setInt(5, 2000);
stmt.setLong(6, 2222);
stmt.setLong(7, 22222);
stmt.execute();
conn.commit();
}
@Test
public void testConstantCount() throws Exception {
String query = "SELECT count(1) from " + fullTableName;
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
try {
createTable(conn);
initTableValues(conn);
analyzeTable(conn, fullTableName);
PreparedStatement statement = conn.prepareStatement(query);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(2, rs.getLong(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testCFToDisambiguateInSelectOnly1() throws Exception {
String query = "SELECT F.RESPONSE_TIME,G.RESPONSE_TIME from " + fullTableName + " where ID = '000000000000002'";
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
try {
createTable(conn);
initTableValues(conn);
analyzeTable(conn, fullTableName);
PreparedStatement statement = conn.prepareStatement(query);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(2222, rs.getLong(1));
assertEquals(22222, rs.getLong(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testCFToDisambiguateInSelectOnly2() throws Exception {
String query = "SELECT F.RESPONSE_TIME,G.RESPONSE_TIME from " + fullTableName + " where TRANSACTION_COUNT = 200";
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
try {
createTable(conn);
initTableValues(conn);
analyzeTable(conn, fullTableName);
PreparedStatement statement = conn.prepareStatement(query);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(2222, rs.getLong(1));
assertEquals(22222, rs.getLong(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testGuidePostsForMultiCFs() throws Exception {
String query = "SELECT F.RESPONSE_TIME,G.RESPONSE_TIME from " + fullTableName + " where F.RESPONSE_TIME = 2222";
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
try {
createTable(conn);
initTableValues(conn);
analyzeTable(conn, fullTableName);
PreparedStatement statement = conn.prepareStatement(query);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(2222, rs.getLong(1));
assertEquals(22222, rs.getLong(2));
assertFalse(rs.next());
// Use E column family. Since the column family with the empty key value (the first one, A)
// is always added to the scan, we never really use other guideposts (but this may change).
List<KeyRange> splits = getAllSplits(conn, fullTableName, "e.cpu_utilization IS NOT NULL", "COUNT(*)");
// Since the E column family is not populated, it won't have as many splits
assertEquals(3, splits.size());
// Same as above for G column family.
splits = getAllSplits(conn, fullTableName, "g.response_time IS NOT NULL", "COUNT(*)");
assertEquals(3, splits.size());
} finally {
conn.close();
}
}
@Test
public void testGuidePostsForMultiCFsOverUnevenDistrib() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(
"CREATE TABLE " + fullTableName + " (K1 CHAR(1) NOT NULL, "
+ "K2 VARCHAR NOT NULL, " + "CF1.A INTEGER, "
+ "CF2.B INTEGER, " + "CF3.C INTEGER, " + "CF4.D INTEGER, " + "CF5.E INTEGER, "
+ "CF6.F INTEGER " + "CONSTRAINT PK PRIMARY KEY (K1,K2)) SPLIT ON ('B','C','D')");
for (int i = 0; i < 100; i++) {
String upsert = "UPSERT INTO " + fullTableName + "(K1,K2,A) VALUES('" + Character.toString((char)('A' + i % 10))
+ "','" + (i * 10) + "'," + i + ")";
conn.createStatement().execute(upsert);
if (i % 10 == 0) {
conn.createStatement().execute(
"UPSERT INTO " + fullTableName + "(K1,K2,F) VALUES('" + Character.toString((char)('A' + i % 10))
+ "','" + (i * 10) + "'," + (i * 10) + ")");
}
}
conn.commit();
try {
analyzeTable(conn, fullTableName);
PreparedStatement statement = conn.prepareStatement("select count(*) from " + fullTableName + " where f < 400");
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(4, rs.getLong(1));
assertFalse(rs.next());
List<KeyRange> splits = getAllSplits(conn, fullTableName, "f < 400", "COUNT(*)");
// Uses less populated column f
assertEquals(14, splits.size());
// Uses more populated column a
splits = getAllSplits(conn, fullTableName, "a < 80", "COUNT(*)");
assertEquals(104, splits.size());
} finally {
conn.close();
}
}
@Test
public void testGuidePostsRetrievedForMultiCF() throws Exception {
Connection conn;
PreparedStatement stmt;
ResultSet rs;
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(
"CREATE TABLE " + fullTableName + " ( k INTEGER PRIMARY KEY, A.V1 VARCHAR, B.V2 VARCHAR, C.V3 VARCHAR)");
stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + " VALUES(?,?,?,?)");
stmt.setInt(1, 1);
stmt.setString(2, "A");
stmt.setString(3, "B");
stmt.setString(4, "C");
stmt.execute();
conn.commit();
stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + " VALUES(?,?,?,?)");
stmt.setInt(1, 2);
stmt.setString(2, "D");
stmt.setString(3, "E");
stmt.setString(4, "F");
stmt.execute();
conn.commit();
stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + "(k, A.V1, C.V3) VALUES(?,?,?)");
stmt.setInt(1, 3);
stmt.setString(2, "E");
stmt.setString(3, "X");
stmt.execute();
conn.commit();
stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + "(k, A.V1, C.V3) VALUES(?,?,?)");
stmt.setInt(1, 4);
stmt.setString(2, "F");
stmt.setString(3, "F");
stmt.execute();
conn.commit();
analyzeTable(conn, fullTableName);
rs = conn.createStatement().executeQuery("SELECT B.V2 FROM " + fullTableName + " WHERE B.V2 = 'B'");
assertTrue(rs.next());
assertEquals("B", rs.getString(1));
List<KeyRange> splits = getAllSplits(conn, fullTableName, "C.V3 = 'X'", "A.V1");
assertEquals(5, splits.size());
splits = getAllSplits(conn, fullTableName, "B.V2 = 'B'", "B.V2");
assertEquals(3, splits.size());
conn.close();
}
@Test
public void testCFToDisambiguate2() throws Exception {
String query = "SELECT F.RESPONSE_TIME,G.RESPONSE_TIME from " + fullTableName
+ " where G.RESPONSE_TIME-1 = F.RESPONSE_TIME";
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
try {
createTable(conn);
initTableValues(conn);
analyzeTable(conn, fullTableName);
PreparedStatement statement = conn.prepareStatement(query);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(11111, rs.getLong(1));
assertEquals(11112, rs.getLong(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testDefaultCFToDisambiguate() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
createTable(conn);
initTableValues(conn);
String ddl = "ALTER TABLE " + fullTableName + " ADD response_time BIGINT";
conn.createStatement().execute(ddl);
String dml = "upsert into " + fullTableName + "(" + " ID, " + " RESPONSE_TIME)"
+ "VALUES ('000000000000003', 333)";
conn.createStatement().execute(dml);
conn.commit();
analyzeTable(conn, fullTableName);
String query = "SELECT ID,RESPONSE_TIME from " + fullTableName + " where RESPONSE_TIME = 333";
try {
PreparedStatement statement = conn.prepareStatement(query);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals("000000000000003", rs.getString(1));
assertEquals(333, rs.getLong(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testEssentialColumnFamilyForRowKeyFilter() throws Exception {
String query = "SELECT F.RESPONSE_TIME,G.RESPONSE_TIME from " + fullTableName + " where SUBSTR(ID, 15) = '2'";
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
try {
createTable(conn);
initTableValues(conn);
analyzeTable(conn, fullTableName);
PreparedStatement statement = conn.prepareStatement(query);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(2222, rs.getLong(1));
assertEquals(22222, rs.getLong(2));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testBug3890() throws Exception {
try (Connection conn = DriverManager.getConnection(getUrl())) {
String tableName = generateUniqueName();
String ddl =
"CREATE TABLE IF NOT EXISTS " + tableName + " (HOST CHAR(2) NOT NULL,"
+ " DOMAIN VARCHAR NOT NULL," + " FEATURE VARCHAR NOT NULL,"
+ " DATE DATE NOT NULL," + " USAGE.CORE BIGINT," + " USAGE.DB BIGINT,"
+ " STATS.ACTIVE_VISITOR INTEGER"
+ " CONSTRAINT PK PRIMARY KEY (HOST, DOMAIN, FEATURE, DATE))";
conn.createStatement().execute(ddl);
String upsert = "UPSERT INTO " + tableName + " VALUES (?, ?, ?, ?, ?, ?, ?)";
try (PreparedStatement stmt = conn.prepareStatement(upsert)) {
stmt.setString(1, "H1");
stmt.setString(2, "Salesforce");
stmt.setString(3, "F1");
stmt.setDate(4, new Date(100));
stmt.setLong(5, 100l);
stmt.setLong(6, 2000l);
stmt.setLong(7, 10);
stmt.executeUpdate();
stmt.setString(1, "H2");
stmt.setString(2, "Heroku");
stmt.setString(3, "F1");
stmt.setDate(4, new Date(100));
stmt.setLong(5, 100l);
stmt.setLong(6, 1000l);
stmt.setLong(7, 10);
stmt.executeUpdate();
conn.commit();
}
String query =
"SELECT DOMAIN, AVG(CORE) Average_CPU_Usage, AVG(DB) Average_DB_Usage FROM "
+ tableName + " GROUP BY DOMAIN ORDER BY DOMAIN DESC";
ResultSet rs = conn.createStatement().executeQuery(query);
rs.next();
assertEquals("Salesforce", rs.getString(1));
assertEquals(0, Double.compare(100, rs.getDouble(2)));
assertEquals(0, Double.compare(2000, rs.getDouble(3)));
assertTrue(rs.next());
assertEquals("Heroku", rs.getString(1));
assertEquals(0, Double.compare(100, rs.getDouble(2)));
assertEquals(0, Double.compare(1000, rs.getDouble(3)));
assertFalse(rs.next());
query =
"SELECT TRUNC(DATE,'DAY') DAY, SUM(CORE) TOTAL_CPU_Usage, MIN(CORE) MIN_CPU_Usage, MAX(CORE) MAX_CPU_Usage"
+ " FROM " + tableName + " WHERE DOMAIN LIKE 'Salesforce%'"
+ " GROUP BY TRUNC(DATE,'DAY')";
rs = conn.createStatement().executeQuery(query);
rs.next();
assertEquals(0, rs.getLong(1));
assertEquals((Long) 100l, Long.valueOf(rs.getLong(2)));
assertEquals((Long) 100l, Long.valueOf(rs.getLong(3)));
assertEquals((Long) 100l, Long.valueOf(rs.getLong(4)));
assertFalse(rs.next());
query =
"SELECT HOST, SUM(ACTIVE_VISITOR) TOTAL_ACTIVE_VISITORS FROM " + tableName
+ " WHERE DB > (CORE * 10)" + " GROUP BY HOST";
rs = conn.createStatement().executeQuery(query);
rs.next();
assertEquals("H1", rs.getString(1));
assertEquals(10, rs.getInt(2));
assertFalse(rs.next());
}
}
@Test
public void testBug4658() throws Exception {
try (Connection conn = DriverManager.getConnection(getUrl());
Statement stmt = conn.createStatement()) {
String tableName = generateUniqueName();
stmt.execute("CREATE TABLE " + tableName + " ("
+ "COL1 VARCHAR NOT NULL,"
+ "COL2 VARCHAR NOT NULL,"
+ "COL3 VARCHAR,"
+ "FAM.COL4 VARCHAR,"
+ "CONSTRAINT TRADE_EVENT_PK PRIMARY KEY (COL1, COL2))");
stmt.execute("UPSERT INTO " + tableName + " (COL1, COL2) values ('111', 'AAA')");
stmt.execute("UPSERT INTO " + tableName + " (COL1, COL2) values ('222', 'AAA')");
conn.commit();
try (ResultSet rs = stmt.executeQuery(
"SELECT * FROM " + tableName + " WHERE COL2 = 'AAA' ORDER BY COL1 DESC")) {
assertTrue(rs.next());
assertEquals(rs.getString("COL1"), "222");
assertEquals(rs.getString("COL2"), "AAA");
assertTrue(rs.next());
assertEquals(rs.getString("COL1"), "111");
assertEquals(rs.getString("COL2"), "AAA");
assertFalse(rs.next());
}
// Tests for FORWARD_SCAN hint
String query = "SELECT /*+ FORWARD_SCAN */ * FROM " + tableName + " WHERE COL2 = 'AAA' ORDER BY COL1 DESC";
try (ResultSet rs = stmt.executeQuery("EXPLAIN " + query)) {
String explainPlan = QueryUtil.getExplainPlan(rs);
assertFalse(explainPlan.contains("REVERSE"));
}
try (ResultSet rs = stmt.executeQuery(query)) {
assertTrue(rs.next());
assertEquals(rs.getString("COL1"), "222");
assertEquals(rs.getString("COL2"), "AAA");
assertTrue(rs.next());
assertEquals(rs.getString("COL1"), "111");
assertEquals(rs.getString("COL2"), "AAA");
assertFalse(rs.next());
}
}
}
}
| |
/*
Copyright 2011-2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.Database.PostgreSQL.Savers;
import com.google.common.base.Preconditions;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Iterables;
import com.google.security.zynamics.binnavi.CUtilityFunctions;
import com.google.security.zynamics.binnavi.Database.AbstractSQLProvider;
import com.google.security.zynamics.binnavi.Database.CConnection;
import com.google.security.zynamics.binnavi.Database.CTableNames;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException;
import com.google.security.zynamics.binnavi.Database.Interfaces.SQLProvider;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Functions.PostgreSQLInstructionFunctions;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Functions.PostgreSQLNodeFunctions;
import com.google.security.zynamics.binnavi.Exceptions.MaybeNullException;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.CommentDialogs.CComment;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.CommentDialogs.Interfaces.IComment;
import com.google.security.zynamics.binnavi.Tagging.CTag;
import com.google.security.zynamics.binnavi.disassembly.CCodeNode;
import com.google.security.zynamics.binnavi.disassembly.CFunctionNode;
import com.google.security.zynamics.binnavi.disassembly.CTextNode;
import com.google.security.zynamics.binnavi.disassembly.INaviCodeNode;
import com.google.security.zynamics.binnavi.disassembly.INaviFunction;
import com.google.security.zynamics.binnavi.disassembly.INaviFunctionNode;
import com.google.security.zynamics.binnavi.disassembly.INaviGroupNode;
import com.google.security.zynamics.binnavi.disassembly.INaviInstruction;
import com.google.security.zynamics.binnavi.disassembly.INaviTextNode;
import com.google.security.zynamics.binnavi.disassembly.INaviViewNode;
import com.google.security.zynamics.zylib.general.Pair;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
public final class PostgreSQLNodeSaver {
private static String CODE = "code";
private static String FUNCTION = "function";
private static String GROUP = "group";
private static String TEXT = "text";
/**
* Class is only used to provide state less methods and thus should not be instantiated.
*/
private PostgreSQLNodeSaver() {
}
/**
* Saves the nodes to the nodes table. As a side effect, this function also fills index lists that
* store the indices into the nodes list for all node types. TODO: This method should probably be
* split into two methods.
*
* @param provider Provides the connection to the database.
* @param newViewId ID of the new view that is being saved.
* @param nodes The nodes to save.
* @param functionNodeIndices Index into the nodes list that identifies the function nodes.
* @param codeNodeIndices Index into the nodes list that identifies the code nodes.
* @param textNodeIndices Index into the nodes list that identifies the text nodes.
* @param groupNodeIndices Index into the nodes list that identifies the group nodes.
* @param groupNodeMap Maps between node IDs and group node objects.
* @return The ID of the first node saved to the database.
* @throws SQLException Thrown if saving the nodes failed.
*/
private static int saveNodes(final AbstractSQLProvider provider, final int newViewId,
final List<INaviViewNode> nodes, final List<Integer> functionNodeIndices,
final List<Integer> codeNodeIndices, final List<Integer> textNodeIndices,
final List<Integer> groupNodeIndices, final BiMap<Integer, INaviGroupNode> groupNodeMap)
throws SQLException {
final String query =
"INSERT INTO " + CTableNames.NODES_TABLE
+ "( view_id, parent_id, type, x, y, width, height, color, bordercolor, "
+ " selected, visible) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
final PreparedStatement preparedStatement =
provider.getConnection().getConnection()
.prepareStatement(query, java.sql.Statement.RETURN_GENERATED_KEYS);
int counter = 0;
for (final INaviViewNode node : nodes) {
String nodeType = null;
if (node instanceof CCodeNode) {
nodeType = CODE;
codeNodeIndices.add(counter);
} else if (node instanceof CFunctionNode) {
nodeType = FUNCTION;
functionNodeIndices.add(counter);
} else if (node instanceof INaviGroupNode) {
nodeType = GROUP;
groupNodeIndices.add(counter);
groupNodeMap.put(counter, (INaviGroupNode) node);
} else if (node instanceof CTextNode) {
nodeType = TEXT;
textNodeIndices.add(counter);
}
counter++;
preparedStatement.setInt(1, newViewId);
preparedStatement.setNull(2, Types.INTEGER);
preparedStatement.setObject(3, nodeType, Types.OTHER);
preparedStatement.setDouble(4, node.getX());
preparedStatement.setDouble(5, node.getY());
preparedStatement.setDouble(6, node.getWidth());
preparedStatement.setDouble(7, node.getHeight());
preparedStatement.setInt(8, node.getColor().getRGB());
preparedStatement.setInt(9, node.getBorderColor().getRGB());
preparedStatement.setBoolean(10, node.isSelected());
preparedStatement.setBoolean(11, node.isVisible());
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
final ResultSet resultSet = preparedStatement.getGeneratedKeys();
int lastId = 0;
try {
while (resultSet.next()) {
if (resultSet.isFirst()) {
lastId = resultSet.getInt(1);
break;
}
}
} finally {
preparedStatement.close();
resultSet.close();
}
return lastId;
}
protected static void checkArguments(final AbstractSQLProvider provider, final int newViewId,
final List<INaviViewNode> nodes) {
Preconditions.checkNotNull(provider, "IE01992: Provider argument can not be null");
Preconditions.checkArgument(newViewId > 0,
"IE01993: New View ID argument must be greater then zero");
Preconditions.checkNotNull(nodes, "IE01994: Nodes argument can not be null");
}
/**
* Saves the mapping between code nodes and their instructions to the database.
*
* @param provider The provider used to access the database.
* @param nodes The nodes to save.
* @param firstNode The database index of the first node.
* @param codeNodeIndices Index into the nodes list that identifies the code nodes.
*
* @throws SQLException Thrown if saving the code node instructions failed.
*/
protected static ArrayList<Pair<INaviCodeNode, INaviInstruction>> saveCodeNodeInstructions(
final SQLProvider provider, final List<INaviViewNode> nodes, final int firstNode,
final List<Integer> codeNodeIndices) throws SQLException {
if (!nodes.isEmpty()) {
final Set<INaviInstruction> unsavedInstructions = new HashSet<INaviInstruction>();
for (final int index : codeNodeIndices) {
final CCodeNode node = (CCodeNode) nodes.get(index);
final Iterable<INaviInstruction> instructions = node.getInstructions();
for (final INaviInstruction instruction : instructions) {
if (!(instruction.isStored())) {
unsavedInstructions.add(instruction);
}
}
}
PostgreSQLInstructionFunctions.createInstructions(provider, unsavedInstructions);
final String query =
"INSERT INTO " + CTableNames.CODENODE_INSTRUCTIONS_TABLE
+ " (module_id, node_id, position, address, comment_id) VALUES (?, ?, ?, ?, ?)";
final PreparedStatement preparedStatement =
provider.getConnection().getConnection().prepareStatement(query);
final ArrayList<Pair<INaviCodeNode, INaviInstruction>> instructionsWithUnsavedLocalComments =
new ArrayList<Pair<INaviCodeNode, INaviInstruction>>();
try {
for (final Integer index : codeNodeIndices) {
final INaviCodeNode codeNode = (INaviCodeNode) nodes.get(index);
int position = 0;
for (final INaviInstruction instruction : codeNode.getInstructions()) {
final List<IComment> comments =
codeNode.getComments().getLocalInstructionComment(instruction);
final Integer commentId =
comments == null ? null : comments.size() == 0 ? null : Iterables.getLast(comments)
.getId();
if ((comments != null) && (comments.size() != 0) && (commentId == null)) {
instructionsWithUnsavedLocalComments.add(new Pair<INaviCodeNode, INaviInstruction>(
codeNode, instruction));
}
final int moduleId = instruction.getModule().getConfiguration().getId();
preparedStatement.setInt(1, moduleId);
preparedStatement.setInt(2, firstNode + index);
preparedStatement.setInt(3, position);
preparedStatement.setObject(4, instruction.getAddress().toBigInteger(), Types.BIGINT);
if (commentId == null) {
preparedStatement.setNull(5, Types.INTEGER);
} else {
preparedStatement.setInt(5, commentId);
}
position++;
preparedStatement.addBatch();
}
}
preparedStatement.executeBatch();
} finally {
preparedStatement.close();
}
return instructionsWithUnsavedLocalComments;
}
return null;
}
/**
* Saves the code nodes to the database.
*
* @param provider The connection to the database.
* @param nodes The nodes to save.
* @param firstNode The database index of the first node.
* @param codeNodeIndices Index into the nodes list that identifies the code nodes.
*
* @throws SQLException Thrown if saving the code node instructions failed.
*/
protected static void saveCodeNodes(final SQLProvider provider, final List<INaviViewNode> nodes,
final int firstNode, final List<Integer> codeNodeIndices) throws SQLException {
if (!codeNodeIndices.isEmpty()) {
final List<Pair<INaviCodeNode, INaviInstruction>> instructionsWithUnsavedLocalComments =
PostgreSQLNodeSaver.saveCodeNodeInstructions(provider, nodes, firstNode, codeNodeIndices);
final String query =
"INSERT INTO " + CTableNames.CODE_NODES_TABLE
+ "(module_id, node_id, parent_function, comment_id) VALUES (?, ?, ?, ?)";
final ArrayList<INaviCodeNode> codeNodesWithUnsavedComments = new ArrayList<INaviCodeNode>();
final PreparedStatement preparedStatement =
provider.getConnection().getConnection().prepareStatement(query);
try {
for (final int index : codeNodeIndices) {
final INaviCodeNode codeNode = (INaviCodeNode) nodes.get(index);
codeNode.setId(firstNode + index);
INaviFunction function = null;
try {
function = codeNode.getParentFunction();
} catch (final MaybeNullException e) {
}
final int moduleId =
Iterables.getLast(codeNode.getInstructions()).getModule().getConfiguration().getId();
final List<IComment> comment = codeNode.getComments().getLocalCodeNodeComment();
final Integer commentId =
comment == null ? null : comment.size() == 0 ? null : Iterables.getLast(comment)
.getId();
if ((comment != null) && (comment.size() != 0) && (commentId == null)) {
codeNodesWithUnsavedComments.add(codeNode);
}
preparedStatement.setInt(1, moduleId);
preparedStatement.setInt(2, firstNode + index);
if (function == null) {
preparedStatement.setNull(3, Types.BIGINT);
} else {
preparedStatement.setObject(3, function.getAddress().toBigInteger(), Types.BIGINT);
}
if (commentId == null) {
preparedStatement.setNull(4, Types.INTEGER);
} else {
preparedStatement.setInt(4, commentId);
}
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
} finally {
preparedStatement.close();
}
// TODO (timkornau): this is not the best solution and is more a test then a full fledged
// implementation.
for (final INaviCodeNode codeNode : codeNodesWithUnsavedComments) {
final ArrayList<IComment> codeNodecomments = new ArrayList<IComment>();
for (final IComment comment : codeNode.getComments().getLocalCodeNodeComment()) {
try {
final Integer commentId =
PostgreSQLNodeFunctions.appendLocalCodeNodeComment(provider, codeNode,
comment.getComment(), comment.getUser().getUserId());
final IComment newComment =
new CComment(commentId, comment.getUser(), comment.getParent(),
comment.getComment());
codeNodecomments.add(newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
}
codeNode.getComments().initializeLocalCodeNodeComment(codeNodecomments);
}
// TODO (timkornau): this is not the best solution and is more a test then a full fledged
// implementation.
for (final Pair<INaviCodeNode, INaviInstruction> pair : instructionsWithUnsavedLocalComments) {
final ArrayList<IComment> localInstructionComments = new ArrayList<IComment>();
for (final IComment comment : pair.first().getComments()
.getLocalInstructionComment(pair.second())) {
try {
final int commentId =
PostgreSQLInstructionFunctions.appendLocalInstructionComment(provider,
pair.first(), pair.second(), comment.getComment(), comment.getUser()
.getUserId());
final IComment newComment =
new CComment(commentId, comment.getUser(), comment.getParent(),
comment.getComment());
localInstructionComments.add(newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
}
pair.first().getComments()
.initializeLocalInstructionComment(pair.second(), localInstructionComments);
}
}
}
/**
* Saves the function nodes to the database.
*
* @param provider The connection to the database.
* @param nodes The nodes to save.
* @param firstNode The database index of the first node.
* @param functionNodeIndices Index into the nodes list that identifies the function nodes.
*
* @throws SQLException Thrown if saving the function nodes failed.
*/
protected static void saveFunctionNodes(final SQLProvider provider,
final List<INaviViewNode> nodes, final int firstNode, final List<Integer> functionNodeIndices)
throws SQLException {
if (functionNodeIndices.isEmpty()) {
return;
}
final String query =
"INSERT INTO " + CTableNames.FUNCTION_NODES_TABLE
+ "(module_id, node_id, function, comment_id) VALUES (?, ?, ?, ?)";
final ArrayList<INaviFunctionNode> functionNodesWithUnsavedComments =
new ArrayList<INaviFunctionNode>();
final PreparedStatement preparedStatement =
provider.getConnection().getConnection().prepareStatement(query);
try {
for (final int index : functionNodeIndices) {
final CFunctionNode node = (CFunctionNode) nodes.get(index);
final INaviFunction function = node.getFunction();
final List<IComment> comments = node.getLocalFunctionComment();
final Integer commentId =
comments == null ? null : comments.size() == 0 ? null : Iterables.getLast(comments)
.getId();
if ((comments != null) && (comments.size() != 0) && (commentId == null)) {
functionNodesWithUnsavedComments.add(node);
}
preparedStatement.setInt(1, function.getModule().getConfiguration().getId());
preparedStatement.setInt(2, firstNode + index);
preparedStatement.setObject(3, function.getAddress().toBigInteger(), Types.BIGINT);
if (commentId == null) {
preparedStatement.setNull(4, Types.INTEGER);
} else {
preparedStatement.setInt(4, commentId);
}
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
} finally {
preparedStatement.close();
}
for (final INaviFunctionNode functionNode : functionNodesWithUnsavedComments) {
final ArrayList<IComment> functionNodeComments = new ArrayList<IComment>();
for (final IComment comment : functionNode.getLocalFunctionComment()) {
try {
final Integer commentId =
provider.appendFunctionNodeComment(functionNode, comment.getComment(), comment
.getUser().getUserId());
final IComment newComment =
new CComment(commentId, comment.getUser(), comment.getParent(), comment.getComment());
functionNodeComments.add(newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
}
functionNode.initializeLocalFunctionComment(functionNodeComments);
}
}
/**
* Saves the group nodes to the database.
*
* @param provider The connection to the database.
* @param nodes The nodes to save.
* @param firstNode The database index of the first node.
* @param groupNodeIndices Index into the nodes list that identifies the group nodes.
*
* @throws SQLException Thrown if saving the group nodes failed.
*/
protected static void saveGroupNodes(final SQLProvider provider, final List<INaviViewNode> nodes,
final int firstNode, final List<Integer> groupNodeIndices) throws SQLException {
Preconditions.checkNotNull(provider, "IE02525: connection argument can not be null");
Preconditions.checkNotNull(nodes, "IE02526: nodes argument can not be null");
Preconditions
.checkNotNull(groupNodeIndices, "Error: groupNodeIndices argument can not be null");
if (!groupNodeIndices.isEmpty()) {
final String query =
"INSERT INTO " + CTableNames.GROUP_NODES_TABLE
+ "(node_id, collapsed, comment_id) VALUES (?, ?, ?)";
final PreparedStatement preparedStatement =
provider.getConnection().getConnection().prepareStatement(query);
final List<INaviGroupNode> groupNodesWithUnsavedComments = new ArrayList<INaviGroupNode>();
try {
for (final Integer index : groupNodeIndices) {
final INaviGroupNode node = (INaviGroupNode) nodes.get(index);
preparedStatement.setInt(1, firstNode + index);
preparedStatement.setBoolean(2, node.isCollapsed());
final List<IComment> comment = node.getComments();
final Integer commentId =
comment == null ? null : comment.size() == 0 ? null : Iterables.getLast(comment)
.getId();
if ((comment != null) && (comment.size() != 0) && (commentId == null)) {
groupNodesWithUnsavedComments.add(node);
}
if (commentId == null) {
preparedStatement.setNull(3, Types.INTEGER);
} else {
preparedStatement.setInt(3, commentId);
}
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
} finally {
preparedStatement.close();
}
// TODO (timkornau): this can work better.
for (final INaviGroupNode groupNode : groupNodesWithUnsavedComments) {
final ArrayList<IComment> groupNodeComments = new ArrayList<IComment>();
for (final IComment comment : groupNode.getComments()) {
try {
final Integer commentId =
provider.appendGroupNodeComment(groupNode, comment.getComment(), comment.getUser()
.getUserId());
final IComment newComment =
new CComment(commentId, comment.getUser(), comment.getParent(),
comment.getComment());
groupNodeComments.add(newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
}
groupNode.initializeComment(groupNodeComments);
}
}
}
/**
* Stores parent groups for all nodes that have an assigned parent group.
*
* @param connection Provides the connection to the database.
* @param nodes All nodes that were saved.
* @param firstNode The database index of the first node.
* @param groupNodeMap Maps between node IDs and parent group node objects.
*
* @throws SQLException Thrown if the parent groups could not be assigned.
*/
protected static void saveParentGroups(final CConnection connection,
final List<INaviViewNode> nodes, final int firstNode,
final BiMap<Integer, INaviGroupNode> groupNodeMap) throws SQLException {
int counter = 0;
for (final INaviViewNode node : nodes) {
if (node.getParentGroup() != null) {
final int parentId = firstNode + groupNodeMap.inverse().get(node.getParentGroup());
final int childId = firstNode + counter;
connection.executeUpdate(String.format("UPDATE " + CTableNames.NODES_TABLE
+ " set parent_id = %d WHERE id = %d", parentId, childId), true);
}
counter++;
}
}
/**
*
* TODO (timkornau): this code here has serious issues and is in no way anything that we want to
* keep.
*
* Saves the node tags to the database.
*
* @param connection The connection to the database.
* @param nodes The nodes to save.
* @param firstNode Database index of the first node.
*
* @throws SQLException Thrown if saving the tags failed.
*/
protected static void saveTags(final CConnection connection, final List<INaviViewNode> nodes,
final int firstNode) throws SQLException {
int counter = firstNode;
final String deleteStatement =
"DELETE FROM " + CTableNames.TAGGED_NODES_TABLE + " WHERE node_id IN (%s)";
final String insertStatement = "INSERT INTO " + CTableNames.TAGGED_NODES_TABLE + " VALUES %s ";
boolean isFirst = true;
final StringBuilder range = new StringBuilder();
for (int i = 0; i < nodes.size(); i++) {
if (isFirst) {
range.append(counter);
isFirst = false;
continue;
}
range.append(", ");
range.append(counter);
++counter;
}
if (range.length() != 0) {
connection.executeUpdate(String.format(deleteStatement, range.toString()), true);
}
counter = firstNode;
final StringBuilder insert = new StringBuilder();
isFirst = true;
for (final INaviViewNode node : nodes) {
final Iterator<CTag> it = node.getTagsIterator();
while (it.hasNext()) {
final CTag tag = it.next();
insert.append(isFirst ? "" : ",");
insert.append('(');
insert.append(counter);
insert.append(", ");
insert.append(tag.getId());
insert.append(')');
isFirst = false;
}
++counter;
}
if (insert.length() != 0) {
connection.executeUpdate(String.format(insertStatement, insert.toString()), true);
}
}
/**
* Saves the text nodes to the database.
*
* @param provider The connection to the database.
* @param nodes The nodes to save.
* @param firstNode The database index of the first node.
* @param textNodeIndices Index into the nodes list that identifies the text nodes.
*
* @throws SQLException Thrown if saving the text nodes failed.
*/
protected static void saveTextNodes(final SQLProvider provider, final List<INaviViewNode> nodes,
final int firstNode, final List<Integer> textNodeIndices) throws SQLException {
Preconditions.checkNotNull(provider, "IE02527: provider argument can not be null");
Preconditions.checkNotNull(nodes, "IE02528: nodes argument can not be null");
Preconditions
.checkNotNull(textNodeIndices, "IE02529: textNodeIndices argument can not be null");
if (!textNodeIndices.isEmpty()) {
final String query =
"INSERT INTO " + CTableNames.TEXT_NODES_TABLE + "(node_id, comment_id) VALUES (?, ?)";
final PreparedStatement preparedStatement =
provider.getConnection().getConnection().prepareStatement(query);
final List<INaviTextNode> textNodesWithUnsavedComments = new ArrayList<INaviTextNode>();
try {
for (final Integer index : textNodeIndices) {
final INaviTextNode node = (INaviTextNode) nodes.get(index);
final List<IComment> comment = node.getComments();
final Integer commentId =
comment == null ? null : comment.size() == 0 ? null : Iterables.getLast(comment)
.getId();
if ((comment != null) && (comment.size() != 0) && (commentId == null)) {
textNodesWithUnsavedComments.add(node);
}
preparedStatement.setInt(1, firstNode + index);
if (commentId == null) {
preparedStatement.setNull(2, Types.INTEGER);
} else {
preparedStatement.setInt(2, commentId);
}
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
} finally {
preparedStatement.close();
}
// TODO (timkornau): this needs to be reworked once I have thought of a better idea for the
// unsaved comments to be stored. Possibly one can handle all of those in one query.
for (final INaviTextNode textNode : textNodesWithUnsavedComments) {
final ArrayList<IComment> textNodeComments = new ArrayList<IComment>();
for (final IComment comment : textNode.getComments()) {
try {
final Integer commentId =
provider.appendTextNodeComment(textNode, comment.getComment(), comment.getUser()
.getUserId());
final IComment newComment =
new CComment(commentId, comment.getUser(), comment.getParent(),
comment.getComment());
textNodeComments.add(newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
}
textNode.initializeComment(textNodeComments);
}
}
}
/**
* Sorts the group nodes of a view in a way that makes sure that group nodes inside other group
* nodes come later in the list.
*
* @param groupNodeIndices Database indices of the group nodes to sort.
* @param groupNodeMap Maps between group node database indices and objects.
*
* @return The sorted list of group node indices.
*/
protected static List<Integer> sortGroupNodes(final List<Integer> groupNodeIndices,
final BiMap<Integer, INaviGroupNode> groupNodeMap) {
final List<Integer> sortedList = new ArrayList<Integer>();
final List<Integer> clonedList = new ArrayList<Integer>(groupNodeIndices);
final Set<INaviGroupNode> addedNodes = new HashSet<INaviGroupNode>();
while (!clonedList.isEmpty()) {
for (final Integer id : clonedList) {
final INaviGroupNode node = groupNodeMap.get(id);
if ((node.getParentGroup() == null) || addedNodes.contains(node.getParentGroup())) {
addedNodes.add(node);
sortedList.add(id);
clonedList.remove(id);
break;
}
}
}
return sortedList;
}
/**
* Updates the node IDs of the nodes that were saved to the database.
*
* @param nodes The nodes whose IDs are updated.
* @param firstNode The new ID of the first node.
*/
protected static void updateNodeIds(final List<INaviViewNode> nodes, final int firstNode) {
int newIdCounter = firstNode;
for (final INaviViewNode node : nodes) {
node.setId(newIdCounter);
newIdCounter++;
}
}
/**
* Writes the nodes of a view to the database.
*
* @param provider The connection to the database.
* @param newViewId The ID of the view the nodes belong to.
* @param nodes The nodes to save.
* @throws SQLException Thrown if saving the nodes failed.
*/
public static void writeNodes(final AbstractSQLProvider provider, final int newViewId,
final List<INaviViewNode> nodes) throws SQLException {
Preconditions.checkNotNull(provider, "IE01992: Provider argument can not be null");
Preconditions.checkArgument(newViewId > 0,
"IE01993: New View ID argument must be greater then zero");
Preconditions.checkNotNull(nodes, "IE01994: Nodes argument can not be null");
if (nodes.isEmpty()) {
return;
}
final ArrayList<Integer> functionNodeIndices = new ArrayList<Integer>();
final ArrayList<Integer> codeNodeIndices = new ArrayList<Integer>();
final ArrayList<Integer> textNodeIndices = new ArrayList<Integer>();
final ArrayList<Integer> groupNodeIndices = new ArrayList<Integer>();
final BiMap<Integer, INaviGroupNode> groupNodeMap = HashBiMap.create();
final int firstNode =
saveNodes(provider, newViewId, nodes, functionNodeIndices, codeNodeIndices,
textNodeIndices, groupNodeIndices, groupNodeMap);
// After this point, the nodes table has been filled
// After each saving, the node IDs have to be updated
PostgreSQLNodeSaver.updateNodeIds(nodes, firstNode);
// Now, the individual node type tables can be saved
PostgreSQLNodeSaver.saveGroupNodes(provider, nodes, firstNode,
PostgreSQLNodeSaver.sortGroupNodes(groupNodeIndices, groupNodeMap));
PostgreSQLNodeSaver.saveFunctionNodes(provider, nodes, firstNode, functionNodeIndices);
PostgreSQLNodeSaver.saveCodeNodes(provider, nodes, firstNode, codeNodeIndices);
PostgreSQLNodeSaver.saveTextNodes(provider, nodes, firstNode, textNodeIndices);
// Once all nodes are saved, the parent nodes can be saved too
final CConnection connection = provider.getConnection();
PostgreSQLNodeSaver.saveParentGroups(connection, nodes, firstNode, groupNodeMap);
// And finally, we can save the tags associated with the nodes
PostgreSQLNodeSaver.saveTags(connection, nodes, firstNode);
}
}
| |
package javafixes.math;
import org.junit.Test;
import java.math.BigInteger;
import static java.math.BigInteger.ONE;
import static java.math.BigInteger.ZERO;
import static javafixes.test.Condition.negative;
import static javafixes.test.Condition.positive;
import static javafixes.test.Random.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class PowerUtilTest {
@Test
public void shouldFindIfLongValueCanBeUpscaledByPowerOf10() {
assertThat(PowerUtil.canUpscaleLongByPowerOf10(Long.MAX_VALUE, 0), is(true));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(Long.MIN_VALUE, 0), is(true));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(randomLong(Long.MIN_VALUE, Long.MAX_VALUE), 0), is(true));
for (int n = 1; n <= 18; n++) {
long nPow10 = (long) Math.pow(10, n);
assertThat(PowerUtil.canUpscaleLongByPowerOf10(Long.MAX_VALUE / nPow10, n), is(true));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(Long.MIN_VALUE / nPow10, n), is(true));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(randomLong(Long.MIN_VALUE / nPow10, Long.MAX_VALUE / nPow10), n), is(true));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(Long.MAX_VALUE / nPow10 + 1, n), is(false));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(Long.MIN_VALUE / nPow10 - 1, n), is(false));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(randomLong(Long.MIN_VALUE, Long.MIN_VALUE / nPow10 - 1), n), is(false));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(randomLong(Long.MAX_VALUE / nPow10 + 1, Long.MAX_VALUE), n), is(false));
}
for (int n = 0; n <= 100; n++) {
assertThat(PowerUtil.canUpscaleLongByPowerOf10(0L, n), is(true));
}
assertThat(PowerUtil.canUpscaleLongByPowerOf10(randomLong(positive()), 19), is(false));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(randomLong(negative()), 19), is(false));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(randomLong(positive()), randomInt(20, Integer.MAX_VALUE)), is(false));
assertThat(PowerUtil.canUpscaleLongByPowerOf10(randomLong(negative()), randomInt(20, Integer.MAX_VALUE)), is(false));
}
@Test
public void shouldUpscaleLongByPowerOf10() {
long value;
assertThat(PowerUtil.upscaleByPowerOf10(0L, 0L), equalTo(0L));
value = randomLong(positive());
assertThat(PowerUtil.upscaleByPowerOf10(value, 0L), equalTo(value));
value = randomLong(negative());
assertThat(PowerUtil.upscaleByPowerOf10(value, 0L), equalTo(value));
for (long n = 1L; n < 18L; n++) {
assertThat(PowerUtil.upscaleByPowerOf10(0L, n), equalTo(0L));
long powerOf10 = (long) Math.pow(10, n);
value = Long.MAX_VALUE / powerOf10;
assertThat(PowerUtil.upscaleByPowerOf10(value, n), equalTo(value * powerOf10));
value = 1;
assertThat(PowerUtil.upscaleByPowerOf10(value, n), equalTo(value * powerOf10));
value = randomLong(2, Long.MAX_VALUE / powerOf10 - 1);
assertThat(PowerUtil.upscaleByPowerOf10(value, n), equalTo(value * powerOf10));
value = Long.MIN_VALUE / powerOf10;
assertThat(PowerUtil.upscaleByPowerOf10(value, n), equalTo(value * powerOf10));
value = -1;
assertThat(PowerUtil.upscaleByPowerOf10(value, n), equalTo(value * powerOf10));
value = randomLong(Long.MIN_VALUE / powerOf10 + 1, -2);
assertThat(PowerUtil.upscaleByPowerOf10(value, n), equalTo(value * powerOf10));
}
assertThat(PowerUtil.upscaleByPowerOf10(0L, 19), equalTo(0L));
assertThat(PowerUtil.upscaleByPowerOf10(0L, randomLong(20, Long.MAX_VALUE)), equalTo(0L));
}
@Test
public void shouldNotUpscaleLongByPowerOf10IfResultWouldOverflow() {
long value;
for (long n = 1L; n < 18L; n++) {
long powerOf10 = (long) Math.pow(10, n);
value = 1 + Long.MAX_VALUE / powerOf10;
try {
PowerUtil.upscaleByPowerOf10(value, n);
fail("expected ArithmeticException");
} catch (ArithmeticException expected) {
// expected
}
value = randomLong(2 + Long.MAX_VALUE / powerOf10, Long.MAX_VALUE);
try {
PowerUtil.upscaleByPowerOf10(value, n);
fail("expected ArithmeticException");
} catch (ArithmeticException expected) {
// expected
}
value = -1 + Long.MIN_VALUE / powerOf10;
try {
PowerUtil.upscaleByPowerOf10(value, n);
fail("expected ArithmeticException");
} catch (ArithmeticException expected) {
// expected
}
value = randomLong(Long.MIN_VALUE, -1 + (Long.MIN_VALUE / powerOf10));
try {
PowerUtil.upscaleByPowerOf10(value, n);
fail("expected ArithmeticException");
} catch (ArithmeticException expected) {
// expected
}
}
}
@Test
public void shouldDownscaleLong() {
long value;
assertThat(PowerUtil.downscaleByPowerOf10(0L, 0L), equalTo(0L));
value = randomLong(positive());
assertThat(PowerUtil.downscaleByPowerOf10(value, 0L), equalTo(value));
value = randomLong(negative());
assertThat(PowerUtil.downscaleByPowerOf10(value, 0L), equalTo(value));
for (long n = 1L; n < 18L; n++) {
assertThat(PowerUtil.downscaleByPowerOf10(0L, n), equalTo(0L));
long powerOf10 = (long) Math.pow(10, n);
value = powerOf10;
assertThat(PowerUtil.downscaleByPowerOf10(value, n), equalTo(1L));
value = powerOf10 - 1;
assertThat(PowerUtil.downscaleByPowerOf10(value, n), equalTo(0L));
value = randomLong(powerOf10 + 1, Long.MAX_VALUE);
assertThat(PowerUtil.downscaleByPowerOf10(value, n), equalTo(value / powerOf10));
value = -powerOf10;
assertThat(PowerUtil.downscaleByPowerOf10(value, n), equalTo(-1L));
value = -powerOf10 + 1;
assertThat(PowerUtil.downscaleByPowerOf10(value, n), equalTo(0L));
value = randomLong(Long.MIN_VALUE, -powerOf10 - 1);
assertThat(PowerUtil.downscaleByPowerOf10(value, n), equalTo(value / powerOf10));
}
assertThat(PowerUtil.downscaleByPowerOf10(randomLong(positive()), 19), equalTo(0L));
assertThat(PowerUtil.downscaleByPowerOf10(Long.MAX_VALUE, 19), equalTo(0L));
assertThat(PowerUtil.downscaleByPowerOf10(randomLong(negative()), 19), equalTo(0L));
assertThat(PowerUtil.downscaleByPowerOf10(Long.MIN_VALUE, 19), equalTo(0L));
long n = randomLong(20, Long.MAX_VALUE);
assertThat(PowerUtil.downscaleByPowerOf10(randomLong(positive()), n), equalTo(0L));
assertThat(PowerUtil.downscaleByPowerOf10(Long.MAX_VALUE, n), equalTo(0L));
assertThat(PowerUtil.downscaleByPowerOf10(randomLong(negative()), n), equalTo(0L));
assertThat(PowerUtil.downscaleByPowerOf10(Long.MIN_VALUE, n), equalTo(0L));
}
@Test
public void shouldUpscaleBigInteger() {
BigInteger value;
assertThat(PowerUtil.upscaleByPowerOf10(BigInteger.ZERO, 0L), equalTo(BigInteger.ZERO));
value = randomBigInteger(positive());
assertThat(PowerUtil.upscaleByPowerOf10(value, 0L), equalTo(value));
value = randomBigInteger(negative());
assertThat(PowerUtil.upscaleByPowerOf10(value, 0L), equalTo(value));
for (int n = 1; n < 100; n++) {
assertThat(PowerUtil.upscaleByPowerOf10(BigInteger.ZERO, n), equalTo(BigInteger.ZERO));
BigInteger powerOf10 = BigInteger.TEN;
for (int i = 1; i < n; i++) {
powerOf10 = powerOf10.multiply(BigInteger.TEN);
}
value = randomBigInteger(positive());
assertThat(PowerUtil.upscaleByPowerOf10(value, n), equalTo(value.multiply(powerOf10)));
value = randomBigInteger(negative());
assertThat(PowerUtil.upscaleByPowerOf10(value, n), equalTo(value.multiply(powerOf10)));
}
}
@Test
public void shouldFindNumberOfLongDigits() {
assertThat("wrong number of digits for " + 0L, PowerUtil.numberOfDigits(0L), equalTo(1));
assertThat("wrong number of digits for " + 1L, PowerUtil.numberOfDigits(1L), equalTo(1));
assertThat("wrong number of digits for " + -1L, PowerUtil.numberOfDigits(-1L), equalTo(1));
assertThat("wrong number of digits for " + Long.MAX_VALUE, PowerUtil.numberOfDigits(Long.MAX_VALUE), equalTo(19));
assertThat("wrong number of digits for " + Long.MIN_VALUE, PowerUtil.numberOfDigits(Long.MIN_VALUE), equalTo(19));
for (int n = 1; n < 19; n++) {
long powerOf10 = (long) Math.pow(10, n);
assertThat("wrong number of digits for " + powerOf10, PowerUtil.numberOfDigits(powerOf10), equalTo(n + 1));
assertThat("wrong number of digits for " + -powerOf10, PowerUtil.numberOfDigits(-powerOf10), equalTo(n + 1));
assertThat("wrong number of digits for " + (powerOf10 - 1), PowerUtil.numberOfDigits(powerOf10 - 1), equalTo(n));
assertThat("wrong number of digits for " + (-powerOf10 + 1), PowerUtil.numberOfDigits(-powerOf10 + 1), equalTo(n));
assertThat("wrong number of digits for " + (powerOf10 + 1), PowerUtil.numberOfDigits(powerOf10 + 1), equalTo(n + 1));
assertThat("wrong number of digits for " + (-powerOf10 - 1), PowerUtil.numberOfDigits(-powerOf10 - 1), equalTo(n + 1));
}
}
@Test
public void shouldFindNumberOfBigIntegerDigits() {
assertThat("wrong number of digits for " + ZERO, PowerUtil.numberOfDigits(ZERO), equalTo(1));
assertThat("wrong number of digits for " + ONE, PowerUtil.numberOfDigits(ONE), equalTo(1));
assertThat("wrong number of digits for " + ONE.negate(), PowerUtil.numberOfDigits(ONE.negate()), equalTo(1));
BigInteger powerOf10 = BigInteger.ONE;
// tested for (n) up to 125_000 digits
for (int n = 1; n < 1_000; n++) {
powerOf10 = powerOf10.multiply(BigInteger.TEN);
assertThat("wrong number of digits for " + powerOf10, PowerUtil.numberOfDigits(powerOf10), equalTo(n + 1));
BigInteger negativePowerOf10 = powerOf10.negate();
assertThat("wrong number of digits for " + negativePowerOf10, PowerUtil.numberOfDigits(negativePowerOf10), equalTo(n + 1));
assertThat("wrong number of digits for " + powerOf10.subtract(ONE), PowerUtil.numberOfDigits(powerOf10.subtract(ONE)), equalTo(n));
assertThat("wrong number of digits for " + negativePowerOf10.add(ONE), PowerUtil.numberOfDigits(negativePowerOf10.add(ONE)), equalTo(n));
assertThat("wrong number of digits for " + powerOf10.add(ONE), PowerUtil.numberOfDigits(powerOf10.add(ONE)), equalTo(n + 1));
assertThat("wrong number of digits for " + negativePowerOf10.subtract(ONE), PowerUtil.numberOfDigits(negativePowerOf10.subtract(ONE)), equalTo(n + 1));
}
}
@Test
public void shouldProvidePositiveMaxCachedBigPowerOf10() {
assertThat(PowerUtil.maxCachedBigPowerOf10(), greaterThan(0));
}
@Test
public void shouldFindBigIntegerPowerOf10() {
BigInteger expectedPowerOf10 = BigInteger.ONE;
assertThat(PowerUtil.powerOf10Big(0), equalTo(expectedPowerOf10));
for (int i = 1; i < 200; i++) {
expectedPowerOf10 = expectedPowerOf10.multiply(BigInteger.TEN);
assertThat(PowerUtil.powerOf10Big(i), equalTo(expectedPowerOf10));
}
try {
PowerUtil.powerOf10Big(-1);
fail("should fail with IllegalArgumentException");
} catch (IllegalArgumentException expected) {
// expected
}
try {
PowerUtil.powerOf10Big(Integer.MIN_VALUE);
fail("should fail with IllegalArgumentException");
} catch (IllegalArgumentException expected) {
// expected
}
try {
PowerUtil.powerOf10Big(randomInt(Integer.MIN_VALUE + 1, -2));
fail("should fail with IllegalArgumentException");
} catch (IllegalArgumentException expected) {
// expected
}
}
}
| |
package com.vaguehope.morrigan.dlna.players;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.fourthline.cling.controlpoint.ControlPoint;
import org.fourthline.cling.model.meta.RemoteService;
import org.fourthline.cling.support.model.TransportInfo;
import org.fourthline.cling.support.model.TransportStatus;
import org.seamless.util.MimeType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.vaguehope.morrigan.config.Config;
import com.vaguehope.morrigan.dlna.DlnaException;
import com.vaguehope.morrigan.dlna.MediaFormat;
import com.vaguehope.morrigan.dlna.UpnpHelper;
import com.vaguehope.morrigan.dlna.content.MediaFileLocator;
import com.vaguehope.morrigan.dlna.httpserver.MediaServer;
import com.vaguehope.morrigan.dlna.util.StringHelper;
import com.vaguehope.morrigan.engines.playback.IPlaybackEngine.PlayState;
import com.vaguehope.morrigan.model.media.IMediaTrack;
import com.vaguehope.morrigan.model.media.IMediaTrackList;
import com.vaguehope.morrigan.player.AbstractPlayer;
import com.vaguehope.morrigan.player.PlayItem;
import com.vaguehope.morrigan.player.PlayerRegister;
import com.vaguehope.morrigan.player.PlayerStateStorage;
import com.vaguehope.morrigan.transcode.FfprobeCache;
import com.vaguehope.morrigan.util.Objs;
public abstract class AbstractDlnaPlayer extends AbstractPlayer {
private static final Logger LOG = LoggerFactory.getLogger(AbstractDlnaPlayer.class);
protected final ControlPoint controlPoint;
protected final RemoteService avTransportSvc;
protected final AvTransportActions avTransport;
protected final RenderingControlActions renderingControl;
protected final ScheduledExecutorService scheduledExecutor;
private final MediaServer mediaServer;
private final MediaFileLocator mediaFileLocator;
private final String uid;
protected final PlayerEventCache playerEventCache = new PlayerEventCache();
private final AtomicReference<PlayItem> currentItem = new AtomicReference<>();
private final AtomicInteger currentItemDurationSeconds = new AtomicInteger(-1);
private volatile PlayerState restorePositionState;
public AbstractDlnaPlayer (
final PlayerRegister register,
final ControlPoint controlPoint,
final RemoteService avTransportSvc,
final MediaServer mediaServer,
final MediaFileLocator mediaFileLocator,
final ScheduledExecutorService scheduledExecutor,
final PlayerStateStorage playerStateStorage,
final Config config,
final AvTransportActions avTransportActions,
final RenderingControlActions renderingControlActions) {
super(UpnpHelper.idFromRemoteService(avTransportSvc), avTransportSvc.getDevice().getDetails().getFriendlyName(), register, playerStateStorage, config);
this.controlPoint = controlPoint;
this.avTransportSvc = avTransportSvc;
if (avTransportActions != null) {
this.avTransport = avTransportActions;
}
else {
this.avTransport = new AvTransportActions(controlPoint, avTransportSvc);
}
if (renderingControlActions != null) {
this.renderingControl = renderingControlActions;
}
else {
final RemoteService renderingControlSvc = UpnpHelper.findFirstServiceOfType(avTransportSvc.getDevice(), UpnpHelper.SERVICE_RENDERINGCONTROL);
if (renderingControlSvc != null) {
this.renderingControl = new RenderingControlActions(controlPoint, renderingControlSvc);
}
else {
this.renderingControl = null;
}
}
this.mediaServer = mediaServer;
this.mediaFileLocator = mediaFileLocator;
this.scheduledExecutor = scheduledExecutor;
this.uid = UpnpHelper.remoteServiceUid(avTransportSvc);
addEventListener(this.playerEventCache);
}
public String getUid () {
return this.uid;
}
@Override
protected void onDispose () {
LOG.info("Disposed {}: {}.", this.uid, toString());
}
@Override
public boolean isPlaybackEngineReady () {
return !isDisposed();
}
@Override
public List<PlayItem> getHistory () {
return Collections.emptyList();
}
@Override
public IMediaTrackList<? extends IMediaTrack> getCurrentList () {
final PlayItem item = getCurrentItem();
return item == null ? null : item.getList();
}
@Override
public void setCurrentItem (final PlayItem item) {
final PlayItem old = this.currentItem.getAndSet(item);
if (!Objs.equals(old, item)) {
this.currentItemDurationSeconds.set(-1);
}
}
@Override
public PlayItem getCurrentItem () {
return this.currentItem.get();
}
@Override
public long getCurrentPosition () {
return this.playerEventCache.getPosition();
}
@Override
public int getCurrentTrackDurationAsMeasured () {
return this.currentItemDurationSeconds.get();
}
@Override
public int getCurrentTrackDurationFromRenderer () {
return this.playerEventCache.getDuration();
}
@Override
public void nextTrack () {
checkAlive();
final PlayItem nextItemToPlay = findNextItemToPlay();
if (nextItemToPlay != null) {
loadAndStartPlaying(nextItemToPlay);
}
else {
stopPlaying();
}
}
@Override
protected void loadAndPlay (final PlayItem item, final File altFile) throws DlnaException, IOException {
final String id;
if (altFile != null) {
id = this.mediaFileLocator.fileId(altFile);
}
else if (StringHelper.notBlank(item.getTrack().getRemoteId())) {
id = item.getTrack().getRemoteId();
}
else {
id = this.mediaFileLocator.fileId(new File(item.getTrack().getFilepath()));
}
final String uri;
final MimeType mimeType;
final long fileSize;
final int durationSeconds;
if (altFile != null) {
uri = this.mediaServer.uriForId(id);
mimeType = MediaFormat.identify(altFile).toMimeType();
fileSize = altFile.length();
durationSeconds = readFileDurationSeconds(altFile);
}
else if (StringHelper.notBlank(item.getTrack().getRemoteLocation())) {
uri = item.getTrack().getRemoteLocation();
mimeType = MimeType.valueOf(item.getTrack().getMimeType());
fileSize = item.getTrack().getFileSize();
durationSeconds = item.getTrack().getDuration(); // TODO what if this is not available?
}
else {
uri = this.mediaServer.uriForId(id);
final File file = new File(item.getTrack().getFilepath());
mimeType = MediaFormat.identify(file).toMimeType();
fileSize = file.length();
int d = item.getTrack().getDuration();
if (d < 1) d = readFileDurationSeconds(file);
durationSeconds = d;
}
if (durationSeconds < 1) throw new DlnaException("Can not play track without a known duration.");
final String coverArtUri;
if (StringHelper.notBlank(item.getTrack().getCoverArtRemoteLocation())) {
coverArtUri = item.getTrack().getCoverArtRemoteLocation();
}
else {
final File coverArt = item.getTrack().findCoverArt();
coverArtUri = coverArt != null ? this.mediaServer.uriForId(this.mediaFileLocator.fileId(coverArt)) : null;
}
dlnaPlay(item, id, uri, mimeType, fileSize, durationSeconds, coverArtUri);
// After dlnaPlay() because it will (likely) call setCurrentItem().
this.currentItemDurationSeconds.set(durationSeconds);
}
/**
* Returns valid duration or throws.
*/
private static int readFileDurationSeconds (final File altFile) throws IOException {
final Long fileDurationMillis = FfprobeCache.inspect(altFile).getDurationMillis();
if (fileDurationMillis == null || fileDurationMillis < 1) throw new IOException("Failed to read file duration: " + altFile.getAbsolutePath());
LOG.info("Duration {}ms: {}", fileDurationMillis, altFile.getAbsolutePath());
final int seconds = (int) TimeUnit.MILLISECONDS.toSeconds(fileDurationMillis);
return seconds < 1 ? 1 : seconds; // 0ms < d < 1s gets rounded up to 1s.
}
protected abstract void dlnaPlay (PlayItem item, String id, String uri, MimeType mimeType, long fileSize, int durationSeconds, String coverArtUri) throws DlnaException;
protected abstract boolean shouldBePlaying ();
public PlayerState backupState () {
return new PlayerState(getPlaybackOrder(), getTranscode(), getCurrentItem(), getCurrentPosition(), shouldBePlaying(), getQueue());
}
void restoreBackedUpState (final PlayerState state) {
if (state == null) return;
setPlaybackOrder(state.getPlaybackOrder());
setTranscode(state.getTranscode());
setCurrentItem(state.getCurrentItem());
this.restorePositionState = state;
state.addItemsToQueue(getQueue());
if (state.isPlaying() && state.getCurrentItem() != null) {
loadAndStartPlaying(state.getCurrentItem());
}
markStateRestoreAttempted();
LOG.info("Restored {}: {}.", getUid(), state);
}
protected PlayerState getRestorePositionState () {
return this.restorePositionState;
}
protected void clearRestorePositionState () {
this.restorePositionState = null;
}
protected void recordTrackStarted (final PlayItem item) {
this.scheduledExecutor.execute(new RecordTrackStarted(item));
}
protected void recordTrackCompleted (final PlayItem item) {
this.scheduledExecutor.execute(new RecordTrackCompleted(item));
}
public static PlayState transportIntoToPlayState (final TransportInfo ti) {
if (ti == null) return PlayState.STOPPED;
if (ti.getCurrentTransportStatus() == TransportStatus.OK) {
switch (ti.getCurrentTransportState()) {
case PLAYING:
case RECORDING:
return PlayState.PLAYING;
case PAUSED_PLAYBACK:
case PAUSED_RECORDING:
return PlayState.PAUSED;
case TRANSITIONING:
case CUSTOM:
return PlayState.LOADING;
case STOPPED:
case NO_MEDIA_PRESENT:
return PlayState.STOPPED;
}
}
return PlayState.STOPPED;
}
}
| |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.drivers.lumentum;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.commons.configuration.HierarchicalConfiguration;
import org.apache.commons.configuration.XMLConfiguration;
import org.apache.commons.lang3.StringUtils;
import org.onlab.packet.ChassisId;
import org.onlab.util.Frequency;
import org.onosproject.drivers.utilities.XmlConfigParser;
import org.onosproject.net.ChannelSpacing;
import org.onosproject.net.SparseAnnotations;
import org.onosproject.net.DefaultAnnotations;
import org.onosproject.net.DeviceId;
import org.onosproject.net.Device;
import org.onosproject.net.Port;
import org.onosproject.net.PortNumber;
import org.onosproject.net.AnnotationKeys;
import org.onosproject.net.device.DefaultDeviceDescription;
import org.onosproject.net.device.DefaultPortDescription;
import org.onosproject.net.device.DeviceDescription;
import org.onosproject.net.device.DeviceDescriptionDiscovery;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.device.PortDescription;
import org.onosproject.net.driver.AbstractHandlerBehaviour;
import org.onosproject.net.intent.OpticalPathIntent;
import org.onosproject.netconf.NetconfController;
import org.onosproject.netconf.NetconfException;
import org.onosproject.netconf.NetconfSession;
import org.slf4j.Logger;
import java.io.ByteArrayInputStream;
import java.util.List;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.onosproject.net.optical.device.OmsPortHelper.omsPortDescription;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Device description behaviour for Lumentum ROADM-A Whitebox devices using NETCONF.
*/
public class LumentumNetconfRoadmDiscovery
extends AbstractHandlerBehaviour implements DeviceDescriptionDiscovery {
private static final String PHYSICAL_PORT = "data.physical-ports.physical-port";
private static final String DN = "dn";
private static final String DN_PORT = "port=";
private static final String PORT_EXTENSION = "port-extension";
protected static final String OPTICAL_INPUT = "port-optical-input";
protected static final String OPTICAL_OUTPUT = "port-optical-output";
private static final String PORT_PLUGGABLE = "port-pluggable";
private static final String PORT_ETHERNET = "port-ethernet";
private static final String MAINTENANCE_STATE = "config.maintenance-state";
private static final String PORT_SPEED = "config.loteeth:port-speed";
private static final String IN_SERVICE = "in-service";
private static final String PORT_NAME = "entity-description";
public static final ChannelSpacing CHANNEL_SPACING_50 = ChannelSpacing.CHL_50GHZ;
public static final Frequency START_CENTER_FREQ_50 = Frequency.ofGHz(191_350);
public static final Frequency END_CENTER_FREQ_50 = Frequency.ofGHz(196_100);
private static final int MIN_MUX_PORT = 4101;
private static final int MAX_MUX_PORT = 4120;
private static final int MIN_DEM_PORT = 5201;
private static final int MAX_DEM_PORT = 5220;
private static final int DELTA_MUX_DEM_PORT = MIN_DEM_PORT - MIN_MUX_PORT;
private static final String MUX_PORT_NAME = "Mux Input";
private static final String DEMUX_PORT_NAME = "Demux Output";
private static final String LINE_PORT_NAME = "Optical Line";
private final Logger log = getLogger(getClass());
@Override
public DeviceDescription discoverDeviceDetails() {
SparseAnnotations annotations = DefaultAnnotations.builder().build();
log.debug("Lumentum NETCONF - starting discoverDeviceDetails");
// Some defaults values
String vendor = "Lumentum";
String hwVersion = "not loaded";
String swVersion = "not loaded";
String serialNumber = "not loaded";
String chassisData = "ne=1;chassis=10";
ChassisId chassisId = null;
DeviceId deviceId = handler().data().deviceId();
NetconfSession session = getNetconfSession();
if (session == null) {
log.error("Lumentum NETCONF - session not found for {}", deviceId);
return null;
}
//Retrieve system information from ietf-system
StringBuilder systemRequestBuilder = new StringBuilder();
systemRequestBuilder.append("<system-state xmlns=\"urn:ietf:params:xml:ns:yang:ietf-system\">");
systemRequestBuilder.append("</system-state>");
try {
String reply = session.get(systemRequestBuilder.toString(), null);
log.debug("Lumentum NETCONF - session.get reply {}", reply);
XMLConfiguration xconf = (XMLConfiguration) XmlConfigParser.loadXmlString(reply);
vendor = xconf.getString("data.system-state.platform.machine", vendor);
swVersion = xconf.getString("data.system-state.platform.os-version", swVersion);
} catch (NetconfException e) {
log.error("Lumentum NETCONF error in session.get with filter <system-state>", e);
}
//Retrieve system information
StringBuilder chassisRequestBuilder = new StringBuilder();
chassisRequestBuilder.append("<chassis-list xmlns=\"http://www.lumentum.com/lumentum-ote-equipment\">");
chassisRequestBuilder.append("</chassis-list>");
try {
String reply = session.get(chassisRequestBuilder.toString(), null);
log.debug("Lumentum NETCONF - session.get reply {}", reply);
XMLConfiguration xconf = (XMLConfiguration) XmlConfigParser.loadXmlString(reply);
hwVersion = xconf.getString("data.chassis-list.chassis.state.loteq:hardware-rev", hwVersion);
serialNumber = xconf.getString("data.chassis-list.chassis.state.loteq:serial-no", serialNumber);
chassisData = xconf.getString("data.chassis-list.chassis.dn", chassisData);
String[] parts = chassisData.split("chassis=");
chassisId = new ChassisId(Long.valueOf(parts[1], 10));
} catch (NetconfException e) {
log.error("Lumentum NETCONF error in session.get", e);
}
//Upon connection of a new devices all pre-configured connections are removed
//TODO consider a way to keep "external" FlowRules
rpcRemoveAllConnections("1");
rpcRemoveAllConnections("2");
log.info("Lumentum ROADM20 - discovered details:");
log.info("TYPE {}", Device.Type.ROADM);
log.info("VENDOR {}", vendor);
log.info("HWVERSION {}", hwVersion);
log.info("SWVERSION {}", swVersion);
log.info("SERIAL {}", serialNumber);
log.info("CHASSISID {}", chassisId);
//Return the Device Description
return new DefaultDeviceDescription(deviceId.uri(), Device.Type.ROADM,
vendor, hwVersion, swVersion, serialNumber, chassisId, annotations);
}
@Override
public List<PortDescription> discoverPortDetails() {
DeviceId deviceId = handler().data().deviceId();
DeviceService deviceService = checkNotNull(handler().get(DeviceService.class));
Device device = deviceService.getDevice(deviceId);
//Get the configuration from the device
if (device == null) {
log.error("Lumentum NETCONF - device object not found for {}", deviceId);
return ImmutableList.of();
}
NetconfSession session = getNetconfSession();
if (session == null) {
log.error("Lumentum NETCONF - session not found for {}", deviceId);
return ImmutableList.of();
}
StringBuilder requestBuilder = new StringBuilder();
requestBuilder.append("<physical-ports xmlns=\"http://www.lumentum.com/lumentum-ote-port\" ");
requestBuilder.append("xmlns:lotep=\"http://www.lumentum.com/lumentum-ote-port\" ");
requestBuilder.append("xmlns:lotepopt=\"http://www.lumentum.com/lumentum-ote-port-optical\" ");
requestBuilder.append("xmlns:loteeth=\"http://www.lumentum.com/lumentum-ote-port-ethernet\">");
requestBuilder.append("</physical-ports>");
String reply;
try {
reply = session.get(requestBuilder.toString(), null);
} catch (NetconfException e) {
log.error("Lumentum NETCONF - " +
"discoverPortDetails failed to retrieve port details {}", handler().data().deviceId(), e);
return ImmutableList.of();
}
List<PortDescription> descriptions = parseLumentumRoadmPorts(XmlConfigParser.
loadXml(new ByteArrayInputStream(reply.getBytes())));
return ImmutableList.copyOf(descriptions);
}
/**
* Parses a configuration and returns a set of ports.
*
* @param cfg a hierarchical configuration
* @return a list of port descriptions
*/
protected List<PortDescription> parseLumentumRoadmPorts(HierarchicalConfiguration cfg) {
List<PortDescription> portDescriptions = Lists.newArrayList();
List<HierarchicalConfiguration> ports = cfg.configurationsAt(PHYSICAL_PORT);
ports.stream().forEach(pcfg -> {
DefaultAnnotations.Builder annotations = DefaultAnnotations.builder();
//Load port number
PortNumber portNum = PortNumber.portNumber(
pcfg.getString(DN).substring(pcfg.getString(DN).lastIndexOf(DN_PORT) + 5));
//Load port state
String maintenanceState = pcfg.getString(MAINTENANCE_STATE);
boolean isEnabled = ((maintenanceState != null) && (maintenanceState).equals(IN_SERVICE));
//Load port type (FIBER/COPPER)
Port.Type type = null;
for (Object o : pcfg.getList(PORT_EXTENSION)) {
String s = (String) o;
if (s.equals(OPTICAL_INPUT) || s.equals(OPTICAL_OUTPUT)) {
type = Port.Type.FIBER;
} else if (s.equals(PORT_ETHERNET) || s.equals(PORT_PLUGGABLE)) {
type = Port.Type.COPPER;
}
}
//Load port speed of Ethernet interface, expressed in Mb/s
Long speed = 0L; //should be the speed of optical port
if (type != null) {
if (type.equals(Port.Type.COPPER)) {
String speedString = pcfg.getString(PORT_SPEED);
if (speedString != null) {
speed = Long.parseLong(speedString.substring(speedString.lastIndexOf("speed_") + 6,
speedString.lastIndexOf("Mb")));
} else {
log.error("Lumentum NETCONF - Port speed of Ethernet port not correctly loaded");
}
}
} else {
log.error("Port Type not correctly loaded");
}
/**
* Setting the reverse port value for the unidirectional ports.
*
* In this device each port includes an input fiber and an output fiber.
* The 20 input fibers are numbered from MIN_MUX_PORT = 4101 to MAX_MUX_PORT = 4120.
* The 20 output fibers are numbered from MIN_DEM_PORT = 5201 to MAX_DEM_PORT = 5220.
*
* Where port 520x is always the reverse of 410x.
*/
if ((portNum.toLong() >= MIN_MUX_PORT) && (portNum.toLong() <= MAX_MUX_PORT)) {
Long reversePortId = portNum.toLong() + DELTA_MUX_DEM_PORT;
annotations.set(OpticalPathIntent.REVERSE_PORT_ANNOTATION_KEY, reversePortId.toString());
}
if ((portNum.toLong() >= MIN_DEM_PORT) && (portNum.toLong() <= MAX_DEM_PORT)) {
Long reversePortId = portNum.toLong() - DELTA_MUX_DEM_PORT;
annotations.set(OpticalPathIntent.REVERSE_PORT_ANNOTATION_KEY, reversePortId.toString());
}
//Load other information
pcfg.getKeys().forEachRemaining(k -> {
if (!k.contains(DN) && !k.contains(PORT_SPEED) && !k.contains(PORT_EXTENSION)
&& !k.contains(MAINTENANCE_STATE)) {
String value = pcfg.getString(k);
if (!value.isEmpty()) {
k = StringUtils.replaceEach(k, new String[]{"loteeth:", "lotep:",
"lotepopt:", "config.", "=", ":",
"state."},
new String[]{"", "", "", "", "", "", ""});
annotations.set(k, value);
//To visualize port name in the ROADM app GUI
if (k.equals(PORT_NAME)) {
annotations.set(AnnotationKeys.PORT_NAME, value);
}
}
}
});
log.debug("Lumentum NETCONF - retrieved port {},{},{},{},{}",
portNum, isEnabled, type, speed, annotations.build());
if ((type == Port.Type.FIBER) &&
((annotations.build().value(AnnotationKeys.PORT_NAME)).contains(MUX_PORT_NAME) ||
(annotations.build().value(AnnotationKeys.PORT_NAME)).contains(DEMUX_PORT_NAME) ||
(annotations.build().value(AnnotationKeys.PORT_NAME)).contains(LINE_PORT_NAME))) {
//These are the ports supporting OchSignals
portDescriptions.add(omsPortDescription(portNum,
isEnabled,
START_CENTER_FREQ_50,
END_CENTER_FREQ_50,
CHANNEL_SPACING_50.frequency(),
annotations.build()));
} else {
//These are COPPER ports, or FIBER ports not supporting OchSignals
DefaultPortDescription.Builder portDescriptionBuilder = DefaultPortDescription.builder();
portDescriptionBuilder.withPortNumber(portNum)
.isEnabled(isEnabled)
.type(type)
.portSpeed(speed)
.annotations(annotations.build());
portDescriptions.add(portDescriptionBuilder.build());
}
});
return portDescriptions;
}
//Following Lumentum documentation rpc operation to delete all connections
private boolean rpcRemoveAllConnections(String module) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("<rpc xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">" + "\n");
stringBuilder.append(
"<remove-all-connections xmlns=\"http://www.lumentum.com/lumentum-ote-connection\">" + "\n");
stringBuilder.append("<dn>ne=1;chassis=1;card=1;module=" + module + "</dn>" + "\n");
stringBuilder.append("</remove-all-connections>" + "\n");
stringBuilder.append("</rpc>" + "\n");
return editCrossConnect(stringBuilder.toString());
}
private boolean editCrossConnect(String xcString) {
NetconfSession session = getNetconfSession();
if (session == null) {
log.error("Lumentum NETCONF - session not found for {}", handler().data().deviceId());
return false;
}
try {
return session.editConfig(xcString);
} catch (NetconfException e) {
log.error("Failed to edit the CrossConnect edid-cfg for device {}",
handler().data().deviceId(), e);
log.debug("Failed configuration {}", xcString);
return false;
}
}
/**
* Helper method to get the Netconf session.
*/
private NetconfSession getNetconfSession() {
NetconfController controller =
checkNotNull(handler().get(NetconfController.class));
return controller.getNetconfDevice(did()).getSession();
}
/**
* Helper method to get the device id.
*/
private DeviceId did() {
return data().deviceId();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.orc;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.primitives.UnsignedBytes;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import io.prestosql.orc.OrcWriteValidation.OrcWriteValidationBuilder;
import io.prestosql.orc.OrcWriteValidation.OrcWriteValidationMode;
import io.prestosql.orc.OrcWriterStats.FlushReason;
import io.prestosql.orc.metadata.ColumnEncoding;
import io.prestosql.orc.metadata.ColumnMetadata;
import io.prestosql.orc.metadata.CompressedMetadataWriter;
import io.prestosql.orc.metadata.CompressionKind;
import io.prestosql.orc.metadata.Footer;
import io.prestosql.orc.metadata.Metadata;
import io.prestosql.orc.metadata.OrcColumnId;
import io.prestosql.orc.metadata.OrcMetadataWriter;
import io.prestosql.orc.metadata.OrcType;
import io.prestosql.orc.metadata.Stream;
import io.prestosql.orc.metadata.StripeFooter;
import io.prestosql.orc.metadata.StripeInformation;
import io.prestosql.orc.metadata.statistics.ColumnStatistics;
import io.prestosql.orc.metadata.statistics.StripeStatistics;
import io.prestosql.orc.stream.OrcDataOutput;
import io.prestosql.orc.stream.StreamDataOutput;
import io.prestosql.orc.writer.ColumnWriter;
import io.prestosql.orc.writer.SliceDictionaryColumnWriter;
import io.prestosql.spi.Page;
import io.prestosql.spi.type.Type;
import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.IOException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.airlift.slice.Slices.utf8Slice;
import static io.prestosql.orc.OrcReader.validateFile;
import static io.prestosql.orc.OrcWriterStats.FlushReason.CLOSED;
import static io.prestosql.orc.OrcWriterStats.FlushReason.DICTIONARY_FULL;
import static io.prestosql.orc.OrcWriterStats.FlushReason.MAX_BYTES;
import static io.prestosql.orc.OrcWriterStats.FlushReason.MAX_ROWS;
import static io.prestosql.orc.metadata.ColumnEncoding.ColumnEncodingKind.DIRECT;
import static io.prestosql.orc.metadata.OrcColumnId.ROOT_COLUMN;
import static io.prestosql.orc.metadata.PostScript.MAGIC;
import static io.prestosql.orc.stream.OrcDataOutput.createDataOutput;
import static io.prestosql.orc.writer.ColumnWriters.createColumnWriter;
import static java.lang.Integer.min;
import static java.lang.Math.toIntExact;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
public final class OrcWriter
implements Closeable
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(OrcWriter.class).instanceSize();
private static final String PRESTO_ORC_WRITER_VERSION_METADATA_KEY = "presto.writer.version";
private static final String PRESTO_ORC_WRITER_VERSION;
private final OrcWriterStats stats;
static {
String version = OrcWriter.class.getPackage().getImplementationVersion();
PRESTO_ORC_WRITER_VERSION = version == null ? "UNKNOWN" : version;
}
private final OrcDataSink orcDataSink;
private final List<Type> types;
private final CompressionKind compression;
private final int stripeMaxBytes;
private final int chunkMaxLogicalBytes;
private final int stripeMaxRowCount;
private final int rowGroupMaxRowCount;
private final int maxCompressionBufferSize;
private final Map<String, String> userMetadata;
private final CompressedMetadataWriter metadataWriter;
private final DateTimeZone hiveStorageTimeZone;
private final List<ClosedStripe> closedStripes = new ArrayList<>();
private final ColumnMetadata<OrcType> orcTypes;
private final List<ColumnWriter> columnWriters;
private final DictionaryCompressionOptimizer dictionaryCompressionOptimizer;
private int stripeRowCount;
private int rowGroupRowCount;
private int bufferedBytes;
private long columnWritersRetainedBytes;
private long closedStripesRetainedBytes;
private long previouslyRecordedSizeInBytes;
private boolean closed;
private long fileRowCount;
private Optional<ColumnMetadata<ColumnStatistics>> fileStats;
private long fileStatsRetainedBytes;
@Nullable
private final OrcWriteValidationBuilder validationBuilder;
public OrcWriter(
OrcDataSink orcDataSink,
List<String> columnNames,
List<Type> types,
ColumnMetadata<OrcType> orcTypes,
CompressionKind compression,
OrcWriterOptions options,
boolean writeLegacyVersion,
Map<String, String> userMetadata,
DateTimeZone hiveStorageTimeZone,
boolean validate,
OrcWriteValidationMode validationMode,
OrcWriterStats stats)
{
this.validationBuilder = validate ? new OrcWriteValidationBuilder(validationMode, types)
.setStringStatisticsLimitInBytes(toIntExact(options.getMaxStringStatisticsLimit().toBytes())) : null;
this.orcDataSink = requireNonNull(orcDataSink, "orcDataSink is null");
this.types = ImmutableList.copyOf(requireNonNull(types, "types is null"));
this.compression = requireNonNull(compression, "compression is null");
recordValidation(validation -> validation.setCompression(compression));
recordValidation(validation -> validation.setTimeZone(hiveStorageTimeZone.toTimeZone().toZoneId()));
requireNonNull(options, "options is null");
checkArgument(options.getStripeMaxSize().compareTo(options.getStripeMinSize()) >= 0, "stripeMaxSize must be greater than stripeMinSize");
int stripeMinBytes = toIntExact(requireNonNull(options.getStripeMinSize(), "stripeMinSize is null").toBytes());
this.stripeMaxBytes = toIntExact(requireNonNull(options.getStripeMaxSize(), "stripeMaxSize is null").toBytes());
this.chunkMaxLogicalBytes = Math.max(1, stripeMaxBytes / 2);
this.stripeMaxRowCount = options.getStripeMaxRowCount();
this.rowGroupMaxRowCount = options.getRowGroupMaxRowCount();
recordValidation(validation -> validation.setRowGroupMaxRowCount(rowGroupMaxRowCount));
this.maxCompressionBufferSize = toIntExact(options.getMaxCompressionBufferSize().toBytes());
this.userMetadata = ImmutableMap.<String, String>builder()
.putAll(requireNonNull(userMetadata, "userMetadata is null"))
.put(PRESTO_ORC_WRITER_VERSION_METADATA_KEY, PRESTO_ORC_WRITER_VERSION)
.build();
this.metadataWriter = new CompressedMetadataWriter(new OrcMetadataWriter(writeLegacyVersion), compression, maxCompressionBufferSize);
this.hiveStorageTimeZone = requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null");
this.stats = requireNonNull(stats, "stats is null");
requireNonNull(columnNames, "columnNames is null");
this.orcTypes = requireNonNull(orcTypes, "orcTypes is null");
recordValidation(validation -> validation.setColumnNames(columnNames));
// create column writers
OrcType rootType = orcTypes.get(ROOT_COLUMN);
checkArgument(rootType.getFieldCount() == types.size());
ImmutableList.Builder<ColumnWriter> columnWriters = ImmutableList.builder();
ImmutableSet.Builder<SliceDictionaryColumnWriter> sliceColumnWriters = ImmutableSet.builder();
for (int fieldId = 0; fieldId < types.size(); fieldId++) {
OrcColumnId fieldColumnIndex = rootType.getFieldTypeIndex(fieldId);
Type fieldType = types.get(fieldId);
ColumnWriter columnWriter = createColumnWriter(fieldColumnIndex, orcTypes, fieldType, compression, maxCompressionBufferSize, hiveStorageTimeZone, options.getMaxStringStatisticsLimit());
columnWriters.add(columnWriter);
if (columnWriter instanceof SliceDictionaryColumnWriter) {
sliceColumnWriters.add((SliceDictionaryColumnWriter) columnWriter);
}
else {
for (ColumnWriter nestedColumnWriter : columnWriter.getNestedColumnWriters()) {
if (nestedColumnWriter instanceof SliceDictionaryColumnWriter) {
sliceColumnWriters.add((SliceDictionaryColumnWriter) nestedColumnWriter);
}
}
}
}
this.columnWriters = columnWriters.build();
this.dictionaryCompressionOptimizer = new DictionaryCompressionOptimizer(
sliceColumnWriters.build(),
stripeMinBytes,
stripeMaxBytes,
stripeMaxRowCount,
toIntExact(requireNonNull(options.getDictionaryMaxMemory(), "dictionaryMaxMemory is null").toBytes()));
for (Entry<String, String> entry : this.userMetadata.entrySet()) {
recordValidation(validation -> validation.addMetadataProperty(entry.getKey(), utf8Slice(entry.getValue())));
}
this.previouslyRecordedSizeInBytes = getRetainedBytes();
stats.updateSizeInBytes(previouslyRecordedSizeInBytes);
}
/**
* Number of bytes already flushed to the data sink.
*/
public long getWrittenBytes()
{
return orcDataSink.size();
}
/**
* Number of pending bytes not yet flushed.
*/
public int getBufferedBytes()
{
return bufferedBytes;
}
public long getRetainedBytes()
{
return INSTANCE_SIZE +
columnWritersRetainedBytes +
closedStripesRetainedBytes +
orcDataSink.getRetainedSizeInBytes() +
(validationBuilder == null ? 0 : validationBuilder.getRetainedSize()) +
fileStatsRetainedBytes;
}
public void write(Page page)
throws IOException
{
requireNonNull(page, "page is null");
if (page.getPositionCount() == 0) {
return;
}
checkArgument(page.getChannelCount() == columnWriters.size());
if (validationBuilder != null) {
validationBuilder.addPage(page);
}
while (page != null) {
// align page to row group boundaries
int chunkRows = min(page.getPositionCount(), min(rowGroupMaxRowCount - rowGroupRowCount, stripeMaxRowCount - stripeRowCount));
Page chunk = page.getRegion(0, chunkRows);
// avoid chunk with huge logical size
while (chunkRows > 1 && chunk.getLogicalSizeInBytes() > chunkMaxLogicalBytes) {
chunkRows /= 2;
chunk = chunk.getRegion(0, chunkRows);
}
if (chunkRows < page.getPositionCount()) {
page = page.getRegion(chunkRows, page.getPositionCount() - chunkRows);
}
else {
page = null;
}
writeChunk(chunk);
fileRowCount += chunkRows;
}
long recordedSizeInBytes = getRetainedBytes();
stats.updateSizeInBytes(recordedSizeInBytes - previouslyRecordedSizeInBytes);
previouslyRecordedSizeInBytes = recordedSizeInBytes;
}
private void writeChunk(Page chunk)
throws IOException
{
if (rowGroupRowCount == 0) {
columnWriters.forEach(ColumnWriter::beginRowGroup);
}
// write chunks
bufferedBytes = 0;
for (int channel = 0; channel < chunk.getChannelCount(); channel++) {
ColumnWriter writer = columnWriters.get(channel);
writer.writeBlock(chunk.getBlock(channel));
bufferedBytes += writer.getBufferedBytes();
}
// update stats
rowGroupRowCount += chunk.getPositionCount();
checkState(rowGroupRowCount <= rowGroupMaxRowCount);
stripeRowCount += chunk.getPositionCount();
// record checkpoint if necessary
if (rowGroupRowCount == rowGroupMaxRowCount) {
finishRowGroup();
}
// convert dictionary encoded columns to direct if dictionary memory usage exceeded
dictionaryCompressionOptimizer.optimize(bufferedBytes, stripeRowCount);
// flush stripe if necessary
bufferedBytes = toIntExact(columnWriters.stream().mapToLong(ColumnWriter::getBufferedBytes).sum());
if (stripeRowCount == stripeMaxRowCount) {
flushStripe(MAX_ROWS);
}
else if (bufferedBytes > stripeMaxBytes) {
flushStripe(MAX_BYTES);
}
else if (dictionaryCompressionOptimizer.isFull(bufferedBytes)) {
flushStripe(DICTIONARY_FULL);
}
columnWritersRetainedBytes = columnWriters.stream().mapToLong(ColumnWriter::getRetainedBytes).sum();
}
private void finishRowGroup()
{
Map<OrcColumnId, ColumnStatistics> columnStatistics = new HashMap<>();
columnWriters.forEach(columnWriter -> columnStatistics.putAll(columnWriter.finishRowGroup()));
recordValidation(validation -> validation.addRowGroupStatistics(columnStatistics));
rowGroupRowCount = 0;
}
private void flushStripe(FlushReason flushReason)
throws IOException
{
List<OrcDataOutput> outputData = new ArrayList<>();
long stripeStartOffset = orcDataSink.size();
// add header to first stripe (this is not required but nice to have)
if (closedStripes.isEmpty()) {
outputData.add(createDataOutput(MAGIC));
stripeStartOffset += MAGIC.length();
}
// add stripe data
outputData.addAll(bufferStripeData(stripeStartOffset, flushReason));
// if the file is being closed, add the file footer
if (flushReason == CLOSED) {
outputData.addAll(bufferFileFooter());
}
// write all data
orcDataSink.write(outputData);
// open next stripe
columnWriters.forEach(ColumnWriter::reset);
dictionaryCompressionOptimizer.reset();
rowGroupRowCount = 0;
stripeRowCount = 0;
bufferedBytes = toIntExact(columnWriters.stream().mapToLong(ColumnWriter::getBufferedBytes).sum());
}
/**
* Collect the data for for the stripe. This is not the actual data, but
* instead are functions that know how to write the data.
*/
private List<OrcDataOutput> bufferStripeData(long stripeStartOffset, FlushReason flushReason)
throws IOException
{
if (stripeRowCount == 0) {
verify(flushReason == CLOSED, "An empty stripe is not allowed");
// column writers must be closed or the reset call will fail
columnWriters.forEach(ColumnWriter::close);
return ImmutableList.of();
}
if (rowGroupRowCount > 0) {
finishRowGroup();
}
// convert any dictionary encoded column with a low compression ratio to direct
dictionaryCompressionOptimizer.finalOptimize(bufferedBytes);
columnWriters.forEach(ColumnWriter::close);
List<OrcDataOutput> outputData = new ArrayList<>();
List<Stream> allStreams = new ArrayList<>(columnWriters.size() * 3);
// get index streams
long indexLength = 0;
for (ColumnWriter columnWriter : columnWriters) {
for (StreamDataOutput indexStream : columnWriter.getIndexStreams(metadataWriter)) {
// The ordering is critical because the stream only contain a length with no offset.
outputData.add(indexStream);
allStreams.add(indexStream.getStream());
indexLength += indexStream.size();
}
}
// data streams (sorted by size)
long dataLength = 0;
List<StreamDataOutput> dataStreams = new ArrayList<>(columnWriters.size() * 2);
for (ColumnWriter columnWriter : columnWriters) {
List<StreamDataOutput> streams = columnWriter.getDataStreams();
dataStreams.addAll(streams);
dataLength += streams.stream()
.mapToLong(StreamDataOutput::size)
.sum();
}
Collections.sort(dataStreams);
// add data streams
for (StreamDataOutput dataStream : dataStreams) {
// The ordering is critical because the stream only contain a length with no offset.
outputData.add(dataStream);
allStreams.add(dataStream.getStream());
}
Map<OrcColumnId, ColumnEncoding> columnEncodings = new HashMap<>();
columnWriters.forEach(columnWriter -> columnEncodings.putAll(columnWriter.getColumnEncodings()));
Map<OrcColumnId, ColumnStatistics> columnStatistics = new HashMap<>();
columnWriters.forEach(columnWriter -> columnStatistics.putAll(columnWriter.getColumnStripeStatistics()));
// the 0th column is a struct column for the whole row
columnEncodings.put(ROOT_COLUMN, new ColumnEncoding(DIRECT, 0));
columnStatistics.put(ROOT_COLUMN, new ColumnStatistics((long) stripeRowCount, 0, null, null, null, null, null, null, null, null));
// add footer
Optional<ZoneId> timeZone = Optional.of(hiveStorageTimeZone.toTimeZone().toZoneId());
StripeFooter stripeFooter = new StripeFooter(allStreams, toColumnMetadata(columnEncodings, orcTypes.size()), timeZone);
Slice footer = metadataWriter.writeStripeFooter(stripeFooter);
outputData.add(createDataOutput(footer));
// create final stripe statistics
StripeStatistics statistics = new StripeStatistics(toColumnMetadata(columnStatistics, orcTypes.size()));
recordValidation(validation -> validation.addStripeStatistics(stripeStartOffset, statistics));
StripeInformation stripeInformation = new StripeInformation(stripeRowCount, stripeStartOffset, indexLength, dataLength, footer.length());
ClosedStripe closedStripe = new ClosedStripe(stripeInformation, statistics);
closedStripes.add(closedStripe);
closedStripesRetainedBytes += closedStripe.getRetainedSizeInBytes();
recordValidation(validation -> validation.addStripe(stripeInformation.getNumberOfRows()));
stats.recordStripeWritten(flushReason, stripeInformation.getTotalLength(), stripeInformation.getNumberOfRows(), dictionaryCompressionOptimizer.getDictionaryMemoryBytes());
return outputData;
}
@Override
public void close()
throws IOException
{
if (closed) {
return;
}
closed = true;
stats.updateSizeInBytes(-previouslyRecordedSizeInBytes);
previouslyRecordedSizeInBytes = 0;
try (Closeable ignored = orcDataSink) {
flushStripe(CLOSED);
}
}
/**
* Collect the data for for the file footer. This is not the actual data, but
* instead are functions that know how to write the data.
*/
private List<OrcDataOutput> bufferFileFooter()
throws IOException
{
List<OrcDataOutput> outputData = new ArrayList<>();
Metadata metadata = new Metadata(closedStripes.stream()
.map(ClosedStripe::getStatistics)
.map(Optional::of)
.collect(toList()));
Slice metadataSlice = metadataWriter.writeMetadata(metadata);
outputData.add(createDataOutput(metadataSlice));
fileStats = toFileStats(closedStripes.stream()
.map(ClosedStripe::getStatistics)
.map(StripeStatistics::getColumnStatistics)
.collect(toList()));
fileStatsRetainedBytes = fileStats.map(stats -> stats.stream()
.mapToLong(ColumnStatistics::getRetainedSizeInBytes)
.sum()).orElse(0L);
recordValidation(validation -> validation.setFileStatistics(fileStats));
Map<String, Slice> userMetadata = this.userMetadata.entrySet().stream()
.collect(Collectors.toMap(Entry::getKey, entry -> utf8Slice(entry.getValue())));
Footer footer = new Footer(
fileRowCount,
rowGroupMaxRowCount == 0 ? OptionalInt.empty() : OptionalInt.of(rowGroupMaxRowCount),
closedStripes.stream()
.map(ClosedStripe::getStripeInformation)
.collect(toImmutableList()),
orcTypes,
fileStats,
userMetadata);
closedStripes.clear();
closedStripesRetainedBytes = 0;
Slice footerSlice = metadataWriter.writeFooter(footer);
outputData.add(createDataOutput(footerSlice));
recordValidation(validation -> validation.setVersion(metadataWriter.getOrcMetadataVersion()));
Slice postscriptSlice = metadataWriter.writePostscript(footerSlice.length(), metadataSlice.length(), compression, maxCompressionBufferSize);
outputData.add(createDataOutput(postscriptSlice));
outputData.add(createDataOutput(Slices.wrappedBuffer(UnsignedBytes.checkedCast(postscriptSlice.length()))));
return outputData;
}
private void recordValidation(Consumer<OrcWriteValidationBuilder> task)
{
if (validationBuilder != null) {
task.accept(validationBuilder);
}
}
public void validate(OrcDataSource input)
throws OrcCorruptionException
{
checkState(validationBuilder != null, "validation is not enabled");
validateFile(validationBuilder.build(), input, types, hiveStorageTimeZone);
}
public long getFileRowCount()
{
checkState(closed, "File row count is not available until the writing has finished");
return fileRowCount;
}
public Optional<ColumnMetadata<ColumnStatistics>> getFileStats()
{
checkState(closed, "File statistics are not available until the writing has finished");
return fileStats;
}
private static <T> ColumnMetadata<T> toColumnMetadata(Map<OrcColumnId, T> data, int expectedSize)
{
checkArgument(data.size() == expectedSize);
List<T> list = new ArrayList<>(expectedSize);
for (int i = 0; i < expectedSize; i++) {
list.add(data.get(new OrcColumnId(i)));
}
return new ColumnMetadata<>(ImmutableList.copyOf(list));
}
private static Optional<ColumnMetadata<ColumnStatistics>> toFileStats(List<ColumnMetadata<ColumnStatistics>> stripes)
{
if (stripes.isEmpty()) {
return Optional.empty();
}
int columnCount = stripes.get(0).size();
checkArgument(stripes.stream().allMatch(stripe -> columnCount == stripe.size()));
ImmutableList.Builder<ColumnStatistics> fileStats = ImmutableList.builder();
for (int i = 0; i < columnCount; i++) {
OrcColumnId columnId = new OrcColumnId(i);
fileStats.add(ColumnStatistics.mergeColumnStatistics(stripes.stream()
.map(stripe -> stripe.get(columnId))
.collect(toList())));
}
return Optional.of(new ColumnMetadata<>(fileStats.build()));
}
private static class ClosedStripe
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(ClosedStripe.class).instanceSize() + ClassLayout.parseClass(StripeInformation.class).instanceSize();
private final StripeInformation stripeInformation;
private final StripeStatistics statistics;
public ClosedStripe(StripeInformation stripeInformation, StripeStatistics statistics)
{
this.stripeInformation = requireNonNull(stripeInformation, "stripeInformation is null");
this.statistics = requireNonNull(statistics, "stripeStatistics is null");
}
public StripeInformation getStripeInformation()
{
return stripeInformation;
}
public StripeStatistics getStatistics()
{
return statistics;
}
public long getRetainedSizeInBytes()
{
return INSTANCE_SIZE + statistics.getRetainedSizeInBytes();
}
}
}
| |
/*
Copyright 2011-2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.API.disassembly;
import java.awt.Color;
import java.util.ArrayList;
import java.util.List;
import com.google.common.base.Preconditions;
import com.google.security.zynamics.binnavi.CUtilityFunctions;
import com.google.security.zynamics.binnavi.API.reil.InternalTranslationException;
import com.google.security.zynamics.binnavi.API.reil.ReilGraph;
import com.google.security.zynamics.binnavi.APIHelpers.ObjectFinders;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntDeleteException;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.CommentDialogs.Interfaces.IComment;
import com.google.security.zynamics.binnavi.REIL.InstructionFinders;
import com.google.security.zynamics.binnavi.REIL.ReilGraphConverter;
import com.google.security.zynamics.binnavi.disassembly.CNaviCodeNodeListenerAdapter;
import com.google.security.zynamics.binnavi.disassembly.INaviCodeNode;
import com.google.security.zynamics.binnavi.disassembly.INaviInstruction;
import com.google.security.zynamics.reil.translators.ReilTranslator;
import com.google.security.zynamics.reil.translators.StandardEnvironment;
import com.google.security.zynamics.zylib.general.ListenerProvider;
// / Represents a view node that shows instructions.
/**
* A code node is a node that can be part of view graphs. Each code node represents a basic block.
*/
public final class CodeNode extends ViewNode {
/**
* Wrapped internal code node object.
*/
private final INaviCodeNode m_node;
/**
* Instructions of the code node.
*/
private final List<Instruction> m_instructions = new ArrayList<>();
/**
* Listeners that are notified about changes in the code node.
*/
private final ListenerProvider<ICodeNodeListener> m_listeners =
new ListenerProvider<>();
/**
* Keeps the API code node synchronized with the internal code node.
*/
private final InternalListener m_internalListener = new InternalListener();
/**
* REIL translator used to translate the code node to REIL.
*/
private final ReilTranslator<INaviInstruction> m_translator =
new ReilTranslator<>();
/**
* REIL graph of the code node.
*/
private ReilGraph m_reilGraph;
// / @cond INTERNAL
/**
* Creates a new API code node object.
*
* @param view View the code node belongs to.
* @param node The wrapped internal code node object.
* @param tagManager Tag manager used to tag the node.
*/
// / @endcond
public CodeNode(final View view, final INaviCodeNode node, final TagManager tagManager) {
super(view, node, tagManager);
m_node = node;
for (final INaviInstruction instruction : m_node.getInstructions()) {
m_instructions.add(new Instruction(instruction));
}
node.addListener(m_internalListener);
}
@Override
protected String getName() {
return String.format("Code Node %s", m_node.getAddress().toHexString());
}
@Override
public INaviCodeNode getNative() {
return m_node;
}
// ! Adds an instruction to the code node.
/**
* Adds an instruction to the code node. The instruction is appended at the end of the code node.
*
* Please note that not the instruction object you pass to the function is added to the code node.
* Rather, a copy of the passed instruction object is made and added to the code node. This
* guarantees that each instruction object appears only once in a view.
*
* @param instruction The instruction to add to the code node.
*
* @return The instruction object that was really added to the code node.
*/
public Instruction addInstruction(final Instruction instruction) {
Preconditions.checkNotNull(instruction, "Error: Instruction argument can not be null");
// TODO (timkornau): this is not checked if this does what it should
m_node.addInstruction(instruction.getNative(), null);
return instruction;
// return ObjectFinders.getObject(instruction, m_instructions);
}
// ! Adds a code node listener.
/**
* Adds a listener object that is notified about changes in the code node.
*
* @param listener The listener that is added to the code node.
*/
public void addListener(final ICodeNodeListener listener) {
m_listeners.addListener(listener);
}
// ! Start address of the code node.
/**
* Returns the address of the code node. The address of a code node equals the address of the
* first instruction of the code node.
*
* @return The address of the code node.
*/
public Address getAddress() {
return new Address(m_node.getAddress().toBigInteger());
}
// ! Local comments of the code node.
/**
* Returns the local comment associated with the code node.
*
* @return The local comment associated with the code node.
*/
public List<IComment> getLocalComments() {
return m_node.getComments().getLocalCodeNodeComment();
}
// ! Global comments of the code node.
/**
* Returns the global comment associated with the code node.
*
* @return The global comment associated with the code node.
*/
public List<IComment> getGlobalComments() {
return m_node.getComments().getGlobalCodeNodeComment();
}
// ! Initializes the local code node comments.
/**
* Initializes the local code node comments.
*
* @param comments The List of {@link IComment} to associate to the code node.
*/
public void initializeLocalComment(final List<IComment> comments) {
m_node.getComments().initializeLocalCodeNodeComment(comments);
}
// ! Initializes the global code node comments.
/**
* Initializes the global code node comments.
*
* @param comments The list of {@link IComment} to associate to the code node.
*/
public void initializeGlobalComment(final List<IComment> comments) {
m_node.getComments().initializeGlobalCodeNodeComment(comments);
}
// ! Initialize the local instruction comments.
/**
* Initialize the local instruction comments.
*
* @param instruction The instruction to associate the comments with.
* @param comments The List of {@link IComment} to associate to the {@link Instruction}.
*/
public void initializeLocalInstructionComment(final Instruction instruction,
final List<IComment> comments) {
m_node.getComments().initializeLocalInstructionComment(instruction.getNative(), comments);
}
// ! Append a global code node comment.
/**
* Append a global code node comment.
*
* @param comment The comment string for the new comment to append.
*
* @return The list of currently associated global comments of the code node after the append
* operation on success null on failure.
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntSaveDataException
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntLoadDataException
*/
public List<IComment> appendLocalComment(final String comment)
throws com.google.security.zynamics.binnavi.API.disassembly.CouldntSaveDataException,
com.google.security.zynamics.binnavi.API.disassembly.CouldntLoadDataException {
List<IComment> currentComments = new ArrayList<>();
try {
currentComments = m_node.getComments().appendLocalCodeNodeComment(comment);
} catch (final CouldntSaveDataException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntSaveDataException(
exception);
} catch (final CouldntLoadDataException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntLoadDataException(
exception);
}
return currentComments;
}
// ! Edit a local code node comment.
/**
* Edit a local code node comment.
*
* @param comment The comment which is edited.
* @param newComment The new comment string to replace the old comment string.
*
* @return The edited comment.
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntSaveDataException
*/
public IComment editLocalComment(final IComment comment, final String newComment)
throws com.google.security.zynamics.binnavi.API.disassembly.CouldntSaveDataException {
try {
return m_node.getComments().editLocalCodeNodeComment(comment, newComment);
} catch (final CouldntSaveDataException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntSaveDataException(
exception);
}
}
// ! Delete a local code node comment.
/**
* Delete a local code node comment.
*
* @param comment The {@link IComment} to delete.
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException
*/
public void deleteLocalComment(final IComment comment)
throws com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException {
try {
m_node.getComments().deleteLocalCodeNodeComment(comment);
} catch (final CouldntDeleteException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException(
exception);
}
}
// ! Append a global code node comment.
/**
* Append a global code node comment.
*
* @param comment The {@link String} comment to append.
* @return The List of global comments currently associated to the code node if append operation
* was successful null otherwise.
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntLoadDataException
*/
public List<IComment> appendGlobalComment(final String comment)
throws com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException,
com.google.security.zynamics.binnavi.API.disassembly.CouldntLoadDataException {
try {
return m_node.getComments().appendGlobalCodeNodeComment(comment);
} catch (final CouldntSaveDataException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException(
exception);
} catch (final CouldntLoadDataException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntLoadDataException(
exception);
}
}
// ! Edit a global code node comment.
/**
* Edit a global code node comment.
*
* @param comment The {@link IComment} to be edited.
* @param newComment The {@link String} to replace the comment text.
* @return The edited {@link IComment}.
*/
public IComment editGlobalComment(final IComment comment, final String newComment) {
try {
m_node.getComments().editGlobalCodeNodeComment(comment, newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
return null;
}
// ! Delete a global code node comment.
/**
* Delete a global code node comment.
*
* @param comment The {@link IComment} to delete.
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException
*/
public void deleteGlobalComment(final IComment comment)
throws com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException {
try {
m_node.getComments().deleteGlobalCodeNodeComment(comment);
} catch (final CouldntDeleteException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException(
exception);
}
}
// ! Edit a local instruction comment.
/**
* Edit a local instruction comment.
*
* @param instruction The {@link Instruction} to which the comment is associated.
* @param comment The {@link IComment} which is edited.
* @param newComment The {@link String} to edit the comment with.
*
* @return The edited {@link IComment} if successful null otherwise.
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntSaveDataException
*/
public IComment editLocalInstructionComment(final Instruction instruction,
final IComment comment, final String newComment)
throws com.google.security.zynamics.binnavi.API.disassembly.CouldntSaveDataException {
try {
return m_node.getComments().editLocalInstructionComment(instruction.getNative(), comment, newComment);
} catch (final CouldntSaveDataException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntSaveDataException(
exception);
}
}
// ! Delete a local instruction comment.
/**
* Delete a local instruction comment.
*
* @param instruction The instruction the comment is currently associated to.
* @param comment The comment to delete.
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException
*/
public void deleteLocalInstructionComment(final Instruction instruction, final IComment comment)
throws com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException {
try {
m_node.getComments().deleteLocalInstructionComment(instruction.getNative(), comment);
} catch (final CouldntDeleteException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException(
exception);
}
}
// ! Append a local instruction comment.
/**
* Append a local instruction comment.
*
* @param instruction The instruction to which the comment will be associated.
* @param comment The text for the comment to append.
*
* @return The list of local comments currently associated to the instruction after the append
* operation was successful.
*
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException
* @throws com.google.security.zynamics.binnavi.API.disassembly.CouldntLoadDataException
*/
public List<IComment> appendLocalInstructionComment(final Instruction instruction,
final String comment)
throws com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException,
com.google.security.zynamics.binnavi.API.disassembly.CouldntLoadDataException {
try {
return m_node.getComments().appendLocalInstructionComment(instruction.getNative(), comment);
} catch (final CouldntSaveDataException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntDeleteException(
exception);
} catch (final CouldntLoadDataException exception) {
throw new com.google.security.zynamics.binnavi.API.disassembly.CouldntLoadDataException(
exception);
}
}
// ! Get the instructions of the code node.
/**
* @return The instructions of the code node.
*/
public List<Instruction> getInstructions() {
return new ArrayList<Instruction>(m_instructions);
}
// ! REIL code of the code node.
/**
* Converts the code node to REIL code.
*
* Using this function over manual translation via ReilTranslator has the advantage that REIL
* translation results are automatically cached. Subsequent uses of this function requires no
* additional re-translation of the code node provided that nothing relevant (like added/removed
* code nodes) changed.
*
* @return The REIL representation of the instruction.
*
* @throws InternalTranslationException Thrown if the REIL translation failed.
*/
public ReilGraph getReilCode() throws InternalTranslationException {
if (m_reilGraph == null) {
try {
m_reilGraph =
ReilGraphConverter.createReilGraph(m_translator.translate(new StandardEnvironment(),
m_node));
} catch (final com.google.security.zynamics.reil.translators.InternalTranslationException e) {
throw new InternalTranslationException(e, InstructionFinders.findInstruction(this,
e.getInstruction()));
}
}
return m_reilGraph;
}
// ! Removes an instruction from the code node.
/**
* Removes an instruction from the code node.
*
* @param instruction The instruction to remove from the code node.
*/
public void removeInstruction(final Instruction instruction) {
Preconditions.checkNotNull(instruction, "Error: Instruction argument can not be null");
m_node.removeInstruction(instruction.getNative());
}
// ! Removes a code node listener.
/**
* Removes a listener object from the code node.
*
* @param listener The listener that is removed from the code node.
*/
public void removeListener(final ICodeNodeListener listener) {
m_listeners.removeListener(listener);
}
// ! Changes the background color of an instruction.
/**
* Changes the background color of an instruction in the code node.
*
* @param instruction The instruction whose color is changed.
* @param level Identifies the drawing level. Lower levels are drawn at a higher priority than
* higher levels. Levels below 1000 are reserved for BinNavi and can not be used.
* @param color The color used to highlight the instruction. If this argument is null,
* highlighting is cleared.
*/
public void setInstructionColor(final Instruction instruction, final int level, final Color color) {
Preconditions.checkNotNull(instruction, "Instruction argument can not be null");
if (level < 10000) {
throw new IllegalArgumentException("Drawing levels below 10000 are reserved for BinNavi");
}
m_node.setInstructionColor(instruction.getNative(), level, color);
}
// ! Printable representation of the code node.
/**
* Returns a string representation of the code node.
*
* @return A string representation of the code node.
*/
@Override
public String toString() {
final StringBuffer nodeString = new StringBuffer();
for (final Instruction instruction : m_instructions) {
nodeString.append(instruction.toString());
nodeString.append('\n');
}
return nodeString.toString();
}
/**
* Keeps the API code node synchronized with the internal code node.
*/
private class InternalListener extends CNaviCodeNodeListenerAdapter {
@Override
public void addedInstruction(final INaviCodeNode codeNode, final INaviInstruction instruction) {
final Instruction addedInstruction = new Instruction(instruction);
m_instructions.add(addedInstruction);
for (final ICodeNodeListener listener : m_listeners) {
try {
listener.addedInstruction(CodeNode.this, addedInstruction);
} catch (final Exception exception) {
CUtilityFunctions.logException(exception);
}
}
}
@Override
public void removedInstruction(final INaviCodeNode codeNode, final INaviInstruction instruction) {
final Instruction removedInstruction = ObjectFinders.getObject(instruction, m_instructions);
m_instructions.remove(removedInstruction);
for (final ICodeNodeListener listener : m_listeners) {
try {
listener.removedInstruction(CodeNode.this, removedInstruction);
} catch (final Exception exception) {
CUtilityFunctions.logException(exception);
}
}
}
}
}
| |
package com.groupon.lex.metrics.history.xdr.support;
import com.google.common.collect.Iterators;
import com.groupon.lex.metrics.GroupName;
import com.groupon.lex.metrics.MetricName;
import com.groupon.lex.metrics.MetricValue;
import com.groupon.lex.metrics.history.v2.Compression;
import com.groupon.lex.metrics.history.v2.DictionaryForWrite;
import com.groupon.lex.metrics.history.v2.tables.DictionaryDelta;
import com.groupon.lex.metrics.history.v2.xdr.FromXdr;
import com.groupon.lex.metrics.history.v2.xdr.ToXdr;
import com.groupon.lex.metrics.history.v2.xdr.dictionary_delta;
import com.groupon.lex.metrics.history.v2.xdr.metric_value;
import com.groupon.lex.metrics.history.xdr.ColumnMajorTSData;
import com.groupon.lex.metrics.lib.GCCloseable;
import com.groupon.lex.metrics.lib.SimpleMapEntry;
import com.groupon.lex.metrics.timeseries.TimeSeriesCollection;
import gnu.trove.iterator.TLongIterator;
import gnu.trove.list.TLongList;
import gnu.trove.list.array.TLongArrayList;
import java.io.IOException;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.Collection;
import static java.util.Collections.emptyIterator;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static java.util.Collections.singleton;
import static java.util.Collections.synchronizedSet;
import static java.util.Collections.unmodifiableSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Consumer;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.Value;
import org.acplt.oncrpc.OncRpcException;
import org.acplt.oncrpc.XdrAble;
import org.acplt.oncrpc.XdrDecodingStream;
import org.acplt.oncrpc.XdrEncodingStream;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
public class TmpFileBasedColumnMajorTSData implements ColumnMajorTSData {
private static final Compression TMP_FILE_COMPRESSION = Compression.NONE;
private final TLongList timestamps;
private final Map<GroupName, Group> groups;
private final Map<GroupName, Set<DateTime>> timestampsByGroup;
public static Builder builder() {
return new Builder();
}
public static class Builder {
private final TLongList timestamps = new TLongArrayList();
private final Map<GroupName, GroupWriter> writers = new ConcurrentHashMap<>();
private final Map<GroupName, Set<DateTime>> timestampsByGroup = new ConcurrentHashMap<>();
private Builder() {
/* SKIP */
}
public Builder with(TimeSeriesCollection tsc) throws IOException {
return with(singleton(tsc));
}
public Builder with(Collection<? extends TimeSeriesCollection> tsdata) throws IOException {
try {
for (final TimeSeriesCollection tsc : tsdata) {
tsc.getTSValues().parallelStream()
.peek(tsv -> {
timestampsByGroup.computeIfAbsent(tsv.getGroup(), (g) -> synchronizedSet(new HashSet<>()))
.add(tsc.getTimestamp());
})
.forEach(tsv -> {
final GroupWriter groupWriter = writers.computeIfAbsent(
tsv.getGroup(),
(g) -> new GroupWriter());
try {
groupWriter.add(timestamps.size(), tsv.getMetrics());
} catch (IOException ex) {
throw new RuntimeIOException(ex);
}
});
timestamps.add(tsc.getTimestamp().getMillis());
}
return this;
} catch (RuntimeIOException ex) {
throw ex.getEx();
}
}
/**
* Pad all groups with empty maps, to ensure it's the same size as
* timestamps list.
*
* We want to keep all the Group instances to have the same number of
* maps as the timestamps list, so we can zip the two together.
*/
private void fixBacklog() throws IOException {
try {
writers.values().parallelStream()
.forEach(groupWriter -> {
try {
groupWriter.fixBacklog(timestamps.size());
} catch (IOException ex) {
throw new RuntimeIOException(ex);
}
});
} catch (RuntimeIOException ex) {
throw ex.getEx();
}
}
public TmpFileBasedColumnMajorTSData build() throws IOException {
fixBacklog();
final Map<GroupName, Group> groups;
try {
groups = writers.entrySet().parallelStream()
.unordered()
.map(groupWriter -> {
try {
return SimpleMapEntry.create(groupWriter.getKey(), groupWriter.getValue().asReader());
} catch (IOException ex) {
throw new RuntimeIOException(ex);
}
})
.collect(
HashMap<GroupName, Group>::new,
(map, entry) -> map.put(entry.getKey(), entry.getValue()),
Map::putAll);
} catch (RuntimeIOException ex) {
throw ex.getEx();
}
return new TmpFileBasedColumnMajorTSData(timestamps, groups, timestampsByGroup);
}
}
@Override
public Collection<DateTime> getTimestamps() {
Collection<DateTime> result = new ArrayList<>(timestamps.size());
timestamps.forEach(ts -> {
result.add(new DateTime(ts, DateTimeZone.UTC));
return true;
});
return result;
}
@Override
public Set<GroupName> getGroupNames() {
return unmodifiableSet(groups.keySet());
}
@Override
public Collection<DateTime> getGroupTimestamps(GroupName group) {
return unmodifiableSet(timestampsByGroup.getOrDefault(group, emptySet()));
}
@Override
public Set<MetricName> getMetricNames(GroupName group) {
final Group groupData = groups.get(group);
if (groupData == null)
return emptySet();
return groupData.getMetricNames();
}
@Override
public Map<DateTime, MetricValue> getMetricValues(GroupName group, MetricName metric) {
final Group groupData = groups.get(group);
if (groupData == null || !groupData.getMetricNames().contains(metric))
return emptyMap();
return new MetricValuesMap(timestamps, groupData, metric);
}
private static class MetricValuesMap extends AbstractMap<DateTime, MetricValue> {
private final MetricValuesEntrySet entrySet;
public MetricValuesMap(TLongList timestamps, Group groupData, MetricName metric) {
entrySet = new MetricValuesEntrySet(timestamps, groupData, metric);
}
@Override
public Set<Map.Entry<DateTime, MetricValue>> entrySet() {
return entrySet;
}
@RequiredArgsConstructor
private static class MetricValuesEntrySet extends AbstractSet<Map.Entry<DateTime, MetricValue>> {
@NonNull
private final TLongList timestamps;
@NonNull
private final Group groupData;
@NonNull
private final MetricName metric;
@Override
public Iterator<Map.Entry<DateTime, MetricValue>> iterator() {
return Iterators.transform(
Iterators.filter(
groupData.iterator(timestamps, metric),
timestampedMetric -> timestampedMetric.getValue().isPresent()),
timestampedMetric -> SimpleMapEntry.create(timestampedMetric.getTimestamp(), timestampedMetric.getValue().get()));
}
@Override
public int size() {
return timestamps.size();
}
}
}
private static class GroupWriter {
private final Map<MetricName, MetricWriter> metrics = new ConcurrentHashMap<>();
public void add(int index, Map<MetricName, MetricValue> tsv) throws IOException {
try {
tsv.entrySet().parallelStream()
.forEach(entry -> {
final MetricWriter f = metrics.computeIfAbsent(
entry.getKey(),
(m) -> {
try {
return new MetricWriter();
} catch (IOException ex) {
throw new RuntimeIOException(ex);
}
});
try {
f.fixBacklog(index);
f.add(entry.getValue());
} catch (IOException ex) {
throw new RuntimeIOException(ex);
}
});
} catch (RuntimeIOException ex) {
throw ex.getEx();
}
}
public void fixBacklog(int padUntil) throws IOException {
try {
metrics.values().parallelStream()
.forEach(f -> {
try {
f.fixBacklog(padUntil);
} catch (IOException ex) {
throw new RuntimeIOException(ex);
}
});
} catch (RuntimeIOException ex) {
throw ex.getEx();
}
}
public Group asReader() throws IOException {
final Map<MetricName, Metric> mapping = new HashMap<>();
for (Map.Entry<MetricName, MetricWriter> metricEntry
: metrics.entrySet())
mapping.put(metricEntry.getKey(), metricEntry.getValue().asReader());
return new Group(mapping);
}
}
@AllArgsConstructor
private static class Group {
private final Map<MetricName, Metric> metrics;
public Set<MetricName> getMetricNames() {
return metrics.keySet();
}
public Iterator<TimestampedMetric> iterator(TLongList timestamps, MetricName metricName) {
final Metric m = metrics.get(metricName);
if (m == null) return emptyIterator();
return m.iterator(timestamps);
}
}
private static class MetricWriter {
private final GCCloseable<TmpFile<XdrAbleMetricEntry>> tmpFile;
private final DictionaryForWrite dictionary = new DictionaryForWrite();
private Optional<MetricValue> lastValue = Optional.empty();
private int repeatValue = 0;
private int writtenCount = 0;
public MetricWriter() throws IOException {
this.tmpFile = new GCCloseable<>(new TmpFile<>(TMP_FILE_COMPRESSION));
}
private void addOptMetric(@NonNull Optional<MetricValue> metric, int count) throws IOException {
if (count == 0) return;
if (repeatValue == 0) lastValue = metric;
if (Objects.equals(lastValue, metric)) {
repeatValue += count;
return;
}
try {
tmpFile.get().add(new XdrAbleMetricEntry(dictionary, lastValue, repeatValue));
writtenCount += repeatValue;
} catch (OncRpcException ex) {
throw new IOException(ex);
}
lastValue = metric;
repeatValue = count;
}
public void add(@NonNull MetricValue metric) throws IOException {
addOptMetric(Optional.of(metric), 1);
}
public void fixBacklog(int padUntil) throws IOException {
if (padUntil > size())
addOptMetric(Optional.empty(), padUntil - size());
}
public int size() {
return writtenCount + repeatValue;
}
public Metric asReader() throws IOException {
try {
if (repeatValue > 0) {
tmpFile.get().add(new XdrAbleMetricEntry(dictionary, lastValue, repeatValue));
writtenCount += repeatValue;
repeatValue = 0;
}
} catch (OncRpcException ex) {
throw new IOException(ex);
}
return new Metric(tmpFile);
}
}
@RequiredArgsConstructor
private static class Metric {
private final GCCloseable<TmpFile<XdrAbleMetricEntry>> tmpFile;
public Iterator<TimestampedMetric> iterator(TLongList timestamps) {
final TLongIterator timestampIter = timestamps.iterator();
return Iterators.transform(
iterator(),
metric -> new TimestampedMetric(new DateTime(timestampIter.next(), DateTimeZone.UTC), metric));
}
public Iterator<Optional<MetricValue>> iterator() {
return Iterators.concat(new IteratorImpl(tmpFile));
}
private static class IteratorImpl implements Iterator<Iterator<Optional<MetricValue>>> {
private final Iterator<XdrAbleMetricEntry> inner;
private DictionaryDelta dictionary = new DictionaryDelta();
/*
* Bind the lifetime of tmpFile to the lifetime of this iterator.
*/
private final GCCloseable<TmpFile<XdrAbleMetricEntry>> tmpFile;
public IteratorImpl(@NonNull GCCloseable<TmpFile<XdrAbleMetricEntry>> tmpFile) {
this.tmpFile = tmpFile;
try {
inner = this.tmpFile.get().iterator(XdrAbleMetricEntry::new);
} catch (IOException | OncRpcException ex) {
throw new DecodingException("cannot read: decoding failed", ex);
}
}
@Override
public boolean hasNext() {
return inner.hasNext();
}
@Override
public Iterator<Optional<MetricValue>> next() {
return inner.next()
.decode(dictionary, (updatedDictionary) -> dictionary = updatedDictionary);
}
}
}
private static class XdrAbleMetricEntry implements XdrAble {
private boolean present = false;
private metric_value metric;
private dictionary_delta dd;
private int repeat;
public XdrAbleMetricEntry() {
/* SKIP */
}
public XdrAbleMetricEntry(@NonNull DictionaryForWrite dictionary, @NonNull Optional<MetricValue> value, int repeat) {
this.present = value.isPresent();
this.repeat = repeat;
if (value.isPresent()) {
this.metric = ToXdr.metricValue(value.get(), dictionary.getStringTable()::getOrCreate);
this.dd = dictionary.encode();
dictionary.reset();
}
}
public Iterator<Optional<MetricValue>> decode(DictionaryDelta inputDictionary, Consumer<DictionaryDelta> updateDictionary) {
if (!present) return repeatingIterator(Optional.empty(), repeat);
final DictionaryDelta dictionary = new DictionaryDelta(dd, inputDictionary);
final MetricValue result = FromXdr.metricValue(metric, dictionary::getString);
updateDictionary.accept(dictionary);
return repeatingIterator(Optional.of(result), repeat);
}
@Override
public void xdrEncode(XdrEncodingStream stream) throws OncRpcException, IOException {
stream.xdrEncodeInt(repeat);
stream.xdrEncodeBoolean(present);
if (present) {
dd.xdrEncode(stream);
metric.xdrEncode(stream);
}
}
@Override
public void xdrDecode(XdrDecodingStream stream) throws OncRpcException, IOException {
repeat = stream.xdrDecodeInt();
present = stream.xdrDecodeBoolean();
if (!present) {
metric = null;
dd = null;
} else {
dd = new dictionary_delta(stream);
metric = new metric_value(stream);
}
}
private static <T> Iterator<T> repeatingIterator(T elem, int repeat) {
return Iterators.limit(Iterators.cycle(elem), repeat);
}
}
@Value
private static class TimestampedMetric {
@NonNull
private final DateTime timestamp;
@NonNull
private final Optional<MetricValue> value;
}
@RequiredArgsConstructor
@Getter
private static class RuntimeIOException extends RuntimeException {
private final IOException ex;
}
}
| |
package eu.f3rog.blade.weaving;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import eu.f3rog.blade.compiler.builder.annotation.WeaveParser;
import eu.f3rog.blade.core.Weave;
import eu.f3rog.blade.core.Weaves;
import eu.f3rog.blade.weaving.interfaces.Interfaces;
import eu.f3rog.blade.weaving.util.AWeaver;
import eu.f3rog.javassist.exception.AfterBurnerImpossibleException;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtField;
import javassist.CtMethod;
import javassist.NotFoundException;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.annotation.AnnotationMemberValue;
import javassist.bytecode.annotation.ArrayMemberValue;
import javassist.bytecode.annotation.MemberValue;
import static eu.f3rog.blade.weaving.util.WeavingUtil.getAnnotations;
public final class BladeWeaver
extends AWeaver {
@Override
public void weave(ClassPool classPool, List<CtClass> classes) {
for (CtClass cls : classes) {
String className = cls.getName();
if (className.endsWith("_Helper")) {
CtClass intoClass;
try {
intoClass = classPool.get(className.replace("_Helper", ""));
} catch (NotFoundException e) {
continue;
}
weave(cls, intoClass);
}
}
}
private static class Metadata {
String into;
CtClass[] args;
String statement;
}
private final static class MetadataComparator
implements Comparator<Metadata> {
@Override
public int compare(Metadata l, Metadata r) {
// order descending based on 'into' value - higher priority should be first
if (l == null || l.into == null) {
return 1;
}
if (r == null || r.into == null) {
return -1;
}
return -l.into.compareTo(r.into);
}
}
private static final String HELPER_NAME_FORMAT = "%s.%s_Helper";
/**
* Constructor
*/
public BladeWeaver(boolean debug) {
super(debug);
}
public void weave(CtClass helperClass, CtClass intoClass) {
lognl("|~ Weaving start '%s'", intoClass.getName());
try {
ClassPool classPool = intoClass.getClassPool();
// get field metadata
for (CtField field : helperClass.getDeclaredFields()) {
lognl("field '%s'", field.getName());
List<Metadata> metadata = loadWeaveMetadata(classPool, field);
weave(metadata, intoClass, field);
}
// get method metadata
List<Metadata> allMethodMetadata = new ArrayList<>();
for (CtMethod method : helperClass.getDeclaredMethods()) {
lognl("method '%s'", method.getName());
List<Metadata> metadata = loadWeaveMetadata(classPool, method);
allMethodMetadata.addAll(metadata);
}
// sort metadata based on priority and weave them
Collections.sort(allMethodMetadata, new MetadataComparator());
for (Metadata metadata : allMethodMetadata) {
weave(metadata, intoClass, null);
}
// weave interfaces
for (CtClass interfaceClass : helperClass.getInterfaces()) {
lognl("interface '%s'", interfaceClass.getName());
Interfaces.weaveInterface(interfaceClass, intoClass, getJavassistHelper());
}
lognl("~| Weaving done '%s'", intoClass.getName());
} catch (Exception e) {
lognl("");
lognl("~| Weaving failed '%s'", intoClass.getName());
lognl("");
e.printStackTrace();
throw new IllegalStateException(e);
}
}
private void weave(List<Metadata> m, CtClass intoClass, CtField helperField) throws NotFoundException, CannotCompileException, AfterBurnerImpossibleException {
for (int i = 0, c = m.size(); i < c; i++) {
Metadata metadata = m.get(i);
weave(metadata, intoClass, helperField);
}
}
private void weave(Metadata metadata, CtClass intoClass, CtField helperField) throws NotFoundException, CannotCompileException, AfterBurnerImpossibleException {
if (metadata == null) {
lognl(" ~x nowhere");
return;
}
if (helperField != null && Weave.WEAVE_FIELD.equals(metadata.into)) {
// weave field
CtField f = new CtField(helperField.getType(), helperField.getName(), intoClass);
f.setModifiers(helperField.getModifiers());
log(" ~> field '%s'", f.getName());
if (metadata.statement != null) {
lognl(" ~~~ %s", metadata.statement);
intoClass.addField(f, CtField.Initializer.byExpr(metadata.statement));
} else {
lognl(" ~~~ without statement");
intoClass.addField(f);
}
} else {
String body = "{ " + metadata.statement + " }";
if (Weave.WEAVE_CONSTRUCTOR.equals(metadata.into)) {
log(" ~> constructor");
// weave into constructor
getJavassistHelper().insertConstructor(body, intoClass, metadata.args);
lognl(" ~~~ %s", body);
} else {
// weave into method
WeaveParser.Into into = WeaveParser.parseInto(metadata.into);
if (into.shouldRename()) {
lognl(" ~> rename '%s' to '%s'", into.getMethodName(), into.getRename());
getJavassistHelper().renameMethod(intoClass, into.getMethodName(), into.getRename(), metadata.args);
}
log(" ~> method '%s' %s with %s priority", into.getMethodName(), into.getMethodWeaveType(), into.getPriority());
lognl(" ~~~ %s", body);
switch (into.getMethodWeaveType()) {
case BEFORE_BODY:
getJavassistHelper().insertBeforeBody(body, intoClass, into.getMethodName(), metadata.args);
break;
case AFTER_BODY:
getJavassistHelper().insertAfterBody(body, intoClass, into.getMethodName(), metadata.args);
break;
case BEFORE_SUPER:
try {
getJavassistHelper().insertBeforeSuper(body, intoClass, into.getMethodName(), metadata.args);
} catch (Exception e) { // put at beginning if super not found
getJavassistHelper().insertBeforeBody(body, intoClass, into.getMethodName(), metadata.args);
}
break;
case AFTER_SUPER:
try {
getJavassistHelper().insertAfterSuper(body, intoClass, into.getMethodName(), metadata.args);
} catch (Exception e) { // put at beginning if super not found
getJavassistHelper().insertBeforeBody(body, intoClass, into.getMethodName(), metadata.args);
}
break;
default:
throw new IllegalStateException();
}
}
}
}
private CtClass getHelper(CtClass cls) throws NotFoundException {
return cls.getClassPool()
.get(String.format(HELPER_NAME_FORMAT, cls.getPackageName(), cls.getSimpleName()));
}
private boolean hasHelper(CtClass cls) {
try {
return getHelper(cls) != null;
} catch (NotFoundException e) {
return false;
}
}
private List<Metadata> loadWeaveMetadata(ClassPool classPool, CtMethod method) throws NotFoundException {
AnnotationsAttribute attr = getAnnotations(method);
if (attr != null) {
return loadWeaveMetadata(classPool, attr);
}
return Collections.emptyList();
}
private List<Metadata> loadWeaveMetadata(ClassPool classPool, CtField field) throws NotFoundException {
AnnotationsAttribute attr = getAnnotations(field);
if (attr != null) {
return loadWeaveMetadata(classPool, attr);
}
return Collections.emptyList();
}
private List<Metadata> loadWeaveMetadata(ClassPool classPool, AnnotationsAttribute attr) throws NotFoundException {
Annotation a;
a = attr.getAnnotation(Weave.class.getName());
if (a != null) {
return Collections.singletonList(readWeaveAnnotation(a, classPool));
}
a = attr.getAnnotation(Weaves.class.getName());
if (a != null) {
return readWeavesAnnotation(a, classPool);
}
return Collections.emptyList();
}
private Metadata readWeaveAnnotation(Annotation weaveAnnotation, ClassPool classPool) throws NotFoundException {
Metadata metadata = new Metadata();
// get INTO
MemberValue val = weaveAnnotation.getMemberValue("into");
if (val != null) {
metadata.into = val.toString().replaceAll("\"", "");
}
// get INTO ARGS
metadata.args = readArgs(weaveAnnotation, classPool);
// get STATEMENT
val = weaveAnnotation.getMemberValue("statement");
if (val != null) {
metadata.statement = val.toString().replaceAll("\"", "");
metadata.statement = metadata.statement.replaceAll("'", "\"");
if (metadata.statement.length() == 0) {
metadata.statement = null;
}
}
return metadata;
}
private CtClass[] readArgs(Annotation weaveAnnotation, ClassPool classPool) throws NotFoundException {
ArrayMemberValue arrayMemberValue = (ArrayMemberValue) weaveAnnotation.getMemberValue("args");
if (arrayMemberValue != null) {
MemberValue[] memberValues = arrayMemberValue.getValue();
CtClass[] classes = new CtClass[memberValues.length];
for (int i = 0; i < memberValues.length; i++) {
String className = memberValues[i].toString().replaceAll("\"", "");
classes[i] = classPool.get(className);
}
return classes;
} else {
return new CtClass[0];
}
}
private List<Metadata> readWeavesAnnotation(Annotation weavesAnnotation, ClassPool classPool) throws NotFoundException {
ArrayMemberValue arrayMemberValue = (ArrayMemberValue) weavesAnnotation.getMemberValue("value");
if (arrayMemberValue != null) {
MemberValue[] memberValues = arrayMemberValue.getValue();
List<Metadata> annotations = new ArrayList<>(memberValues.length);
for (int i = 0; i < memberValues.length; i++) {
AnnotationMemberValue memberValue = (AnnotationMemberValue) memberValues[i];
Metadata m = readWeaveAnnotation(memberValue.getValue(), classPool);
annotations.add(m);
}
return annotations;
} else {
return Collections.emptyList();
}
}
}
| |
/**
* Copyright 2012 Tobias Gierke <tobias.gierke@code-sourcery.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.codesourcery.jasm16.ide;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import de.codesourcery.jasm16.compiler.io.DefaultResourceMatcher;
import de.codesourcery.jasm16.compiler.io.FileResource;
import de.codesourcery.jasm16.compiler.io.FileResourceResolver;
import de.codesourcery.jasm16.compiler.io.IResource;
import de.codesourcery.jasm16.compiler.io.IResource.ResourceType;
import de.codesourcery.jasm16.compiler.io.IResourceMatcher;
import de.codesourcery.jasm16.compiler.io.IResourceResolver;
import de.codesourcery.jasm16.emulator.EmulationOptions;
import de.codesourcery.jasm16.exceptions.ResourceNotFoundException;
import de.codesourcery.jasm16.utils.Misc;
import de.codesourcery.jasm16.utils.Misc.IFileVisitor;
/**
* DCPU-16 assembly project.
*
* @author tobias.gierke@code-sourcery.de
*/
public class AssemblyProject extends WorkspaceListener implements IAssemblyProject
{
private static final Logger LOG = Logger.getLogger(AssemblyProject.class);
private static final IResourceMatcher resourceMatcher = new DefaultResourceMatcher();
private final ProjectConfiguration projectConfiguration;
private final AtomicBoolean registeredWithWorkspace = new AtomicBoolean(false);
private final Object RESOURCE_LOCK = new Object();
private IProjectBuilder projectBuilder;
// @GuardedBy( RESOURCE_LOCK )
private final List<IResource> resources = new ArrayList<IResource>();
private final IResourceResolver resolver;
private final IWorkspace workspace;
private boolean isOpen;
public AssemblyProject(IWorkspace workspace , ProjectConfiguration config,boolean isOpen) throws IOException
{
if (config == null) {
throw new IllegalArgumentException("config must not be NULL");
}
if ( workspace == null ) {
throw new IllegalArgumentException("workspace must not be NULL");
}
this.isOpen = isOpen;
this.workspace = workspace;
this.projectConfiguration = config;
resolver = new FileResourceResolver( projectConfiguration.getBaseDirectory() )
{
@Override
protected ResourceType determineResourceType(File file) {
if ( getConfiguration().isSourceFile( file ) ) {
return ResourceType.SOURCE_CODE;
}
return ResourceType.UNKNOWN;
}
@Override
protected File getBaseDirectory() {
return projectConfiguration.getBaseDirectory();
}
};
synchronized( RESOURCE_LOCK ) { // unnecessary since we're inside this classes constructor but makes FindBugs & PMD happy
resources.addAll( scanForResources() );
}
}
public void setProjectBuilder(IProjectBuilder projectBuilder) {
if (projectBuilder == null) {
throw new IllegalArgumentException("projectBuilder must not be null");
}
if ( this.projectBuilder != null ) {
throw new IllegalStateException("Project builder already set on "+this);
}
this.projectBuilder = projectBuilder;
}
public IProjectBuilder getProjectBuilder() {
return projectBuilder;
}
protected File getOutputFileForSource(IResource resource)
{
final String objectCodeFile = getNameWithoutSuffix( resource )+".dcpu16";
final File outputDir = getConfiguration().getOutputFolder();
return new File( outputDir , objectCodeFile );
}
protected String getNameWithoutSuffix(IResource resource) {
String name;
if ( resource instanceof FileResource) {
FileResource file = (FileResource) resource;
name = file.getFile().getName();
} else {
name = resource.getIdentifier();
}
// get base name
final String[] components = name.split("["+Pattern.quote("\\/")+"]");
if ( components.length == 1 ) {
name = components[0];
} else {
name = components[ components.length -1 ];
}
if ( ! name.contains("." ) ) {
return name;
}
final String[] dots = name.split("\\.");
return StringUtils.join( ArrayUtils.subarray( dots , 0 , dots.length-1) );
}
@Override
public void reload() throws IOException
{
final List<IResource> deletedResources=new ArrayList<IResource>();
final List<IResource> newResources= scanForResources();
synchronized( RESOURCE_LOCK ) { // unnecessary since we're inside this classes constructor but makes FindBugs & PMD happy
// find deleted resources
outer:
for ( IResource existing : resources )
{
for ( IResource r : newResources )
{
if ( resourceMatcher.isSame( existing , r ) ) {
continue outer;
}
}
deletedResources.add( existing );
}
// remove existing (=unchanged) resources
for ( Iterator<IResource> it=newResources.iterator() ; it.hasNext() ; )
{
final IResource newResource = it.next();
for ( IResource existingResource : resources ) {
if ( resourceMatcher.isSame( existingResource,newResource ) ) {
it.remove();
break;
}
}
}
}
for ( IResource deleted : deletedResources ) {
workspace.resourceDeleted( this , deleted );
}
for ( IResource added : newResources ) {
workspace.resourceCreated( this , added );
}
}
protected List<IResource> scanForResources() throws IOException {
final Map<String,IResource> result = new HashMap<String,IResource> ();
// scan files
final IFileVisitor visitor = new IFileVisitor()
{
private final ProjectConfiguration projConfig = getConfiguration();
@Override
public boolean visit(File file) throws IOException
{
if ( ! result.containsKey( file.getAbsolutePath() ) )
{
final ResourceType type;
// note: if clauses sorted by probability, most likely comes first
if ( projConfig.isSourceFile( file ) )
{
type = ResourceType.SOURCE_CODE;
}
else if ( ! ProjectConfiguration.isProjectConfigurationFile( file ) )
{
type = ResourceType.UNKNOWN;
} else {
type = ResourceType.PROJECT_CONFIGURATION_FILE;
}
final FileResource resource = new FileResource( file , type);
result.put( file.getAbsolutePath() , resource );
}
return true;
}
};
for ( File f : projectConfiguration.getBaseDirectory().listFiles() ) {
if ( ! visitor.visit( f ) ) {
break;
}
}
for ( File srcFolder : projectConfiguration.getSourceFolders() )
{
if ( srcFolder.exists() ) {
Misc.visitDirectoryTreePostOrder( srcFolder , visitor );
} else {
LOG.warn("scanForResources(): Missing source folder: "+srcFolder.getAbsolutePath());
}
}
// scan binary output folder
final File outputFolder = projectConfiguration.getOutputFolder();
if ( outputFolder.exists() )
{
final IFileVisitor executableVisitor = new IFileVisitor() {
@Override
public boolean visit(File file) throws IOException
{
if ( file.isFile() )
{
if ( file.getName().equals( projectConfiguration.getExecutableName() ) ) {
result.put( file.getAbsolutePath() , new FileResource( file , ResourceType.EXECUTABLE ) );
} else {
result.put( file.getAbsolutePath() , new FileResource( file , ResourceType.OBJECT_FILE ) );
}
}
return true;
}
};
Misc.visitDirectoryTreeInOrder( outputFolder , executableVisitor );
}
return new ArrayList<IResource>( result.values() );
}
@Override
public String getName()
{
return projectConfiguration.getProjectName();
}
@Override
public List<IResource> getAllResources()
{
synchronized( RESOURCE_LOCK ) {
return new ArrayList<IResource>( this.resources );
}
}
@Override
public IResource resolve(String identifier) throws ResourceNotFoundException
{
return resolver.resolve( identifier );
}
@Override
public IResourceResolver getResourceResolver()
{
return resolver;
}
@Override
public IResource resolveRelative(String identifier, IResource parent) throws ResourceNotFoundException {
return resolver.resolveRelative( identifier ,parent );
}
@Override
public ProjectConfiguration getConfiguration() {
return projectConfiguration;
}
@Override
public List<IResource> getResources(ResourceType type)
{
if (type == null) {
throw new IllegalArgumentException("type must not be NULL");
}
final List<IResource> result = new ArrayList<IResource>();
for ( IResource r : getAllResources() ) {
if ( r.hasType( type ) ) {
result.add( r );
}
}
return result;
}
protected void handleResourceDeleted(IResource resource)
{
if (resource == null) {
throw new IllegalArgumentException("resource must not be NULL");
}
for (Iterator<IResource> it = getAllResources().iterator(); it.hasNext();)
{
final IResource existing = it.next();
if ( existing.getIdentifier().equals( resource.getIdentifier() ) )
{
it.remove();
break;
}
}
}
protected void cleanOutputFolder() throws IOException
{
File folder = getConfiguration().getOutputFolder();
if ( ! folder.exists() ) {
if ( ! folder.mkdirs() ) {
throw new IOException("Failed to create output folder "+folder.getAbsolutePath());
}
return;
}
for ( File f : folder.listFiles() )
{
Misc.deleteRecursively( f );
workspace.resourceDeleted( this , new FileResource( f , ResourceType.UNKNOWN) );
}
}
@Override
public IResource lookupResource(String identifier)
{
for ( IResource r : getAllResources() ) {
if ( r.getIdentifier().equals( identifier ) ) {
return r;
}
}
throw new NoSuchElementException("Unable to find resource '"+identifier+" in project "+this);
}
@Override
public void resourceChanged(IAssemblyProject project, IResource resource) {
if ( this != project) {
return;
}
IResource found=null;
synchronized( RESOURCE_LOCK )
{
for ( IResource r : getAllResources() )
{
if ( resourceMatcher.isSame( r , resource ) ) {
found = r;
break;
}
}
}
if ( found == null ) {
return;
}
}
@Override
public void resourceCreated(IAssemblyProject project, IResource resource)
{
if ( this != project) {
return;
}
if ( resource instanceof FileResource)
{
if ( ((FileResource) resource).getFile().isDirectory() )
{
return; // we don't care about directories
}
synchronized( RESOURCE_LOCK )
{
for ( IResource r : getAllResources() ) {
if ( resourceMatcher.isSame( r, resource ) ) // resource update
{
resources.remove( r );
resources.add( resource );
return;
}
}
if ( resource.hasType( ResourceType.EXECUTABLE ) ) {
synchronized( RESOURCE_LOCK )
{
for ( IResource r : getAllResources() ) {
if ( r.hasType( ResourceType.EXECUTABLE ) ) {
throw new IllegalArgumentException("Cannot add executable "+resource+" to project "+this+" , already has executable "+r);
}
}
}
}
resources.add( resource );
}
}
}
@Override
public void resourceDeleted(IAssemblyProject project, IResource resource) {
if ( this != project) {
return;
}
synchronized( RESOURCE_LOCK )
{
for (Iterator<IResource> it = resources.iterator(); it.hasNext();) {
IResource existing = it.next();
if ( resourceMatcher.isSame( existing,resource ) )
{
it.remove();
return;
}
}
}
}
@Override
public IResource getResourceForFile(File file)
{
for ( IResource r : getAllResources() ) {
if ( r instanceof FileResource) {
if ( ((FileResource) r).getFile().getAbsolutePath().equals( file.getAbsolutePath() ) ) {
return r;
}
}
}
return null;
}
@Override
public boolean isSame(IAssemblyProject other)
{
if ( other == this ) {
return true;
}
if ( other == null ) {
return false;
}
if ( this.getName().equals( other.getName() ) ) {
return true;
}
return false;
}
@Override
public boolean isOpen() {
return isOpen;
}
@Override
public boolean isClosed() {
return !isOpen;
}
@Override
public void projectCreated(IAssemblyProject project) { /* sooo not interested */ }
@Override
public void projectClosed(IAssemblyProject project) {
if ( project == this ) {
this.isOpen = false;
}
}
@Override
public void projectOpened(IAssemblyProject project)
{
if ( project == this ) {
this.isOpen = true;
}
}
@Override
public void projectConfigurationChanged(IAssemblyProject project) {
}
@Override
public void projectDeleted(IAssemblyProject project) { /* sooo not interested */ }
@Override
public void projectDisposed(IAssemblyProject project)
{
}
@Override
public void buildStarted(IAssemblyProject project) { /* sooo not interested */ }
@Override
public void buildFinished(IAssemblyProject project, boolean success) { /* sooo not interested */ }
@Override
public String toString()
{
return getConfiguration().getProjectName();
}
@Override
public EmulationOptions getEmulationOptions() {
return getConfiguration().getEmulationOptions();
}
@Override
public void setEmulationOptions(EmulationOptions emulationOptions) {
getConfiguration().setEmulationOptions( emulationOptions );
}
@Override
public boolean containsResource(IResource resource)
{
synchronized(RESOURCE_LOCK) {
for ( IResource existing : resources )
{
if ( resourceMatcher.isSame( existing , resource ) ) {
return true;
}
}
}
return false;
}
@Override
public void addedToWorkspace(IWorkspace workspace)
{
if ( workspace != this.workspace ) {
throw new IllegalStateException("Project "+this+" attached to different workspace?");
}
if ( ! registeredWithWorkspace.compareAndSet(false,true) )
{
throw new IllegalStateException("addedToWorkspace() called on already registered project "+this);
}
workspace.addResourceListener( this );
for ( IResource r : getAllResources() ) {
workspace.resourceCreated( this , r );
}
}
@Override
public void removedFromWorkspace(IWorkspace workspace)
{
if ( workspace != this.workspace ) {
throw new IllegalStateException("Project "+this+" attached to different workspace?");
}
if ( ! registeredWithWorkspace.compareAndSet(true,false) )
{
throw new IllegalStateException("removedFromWorkspace() called on detached project "+this);
}
workspace.removeResourceListener( this );
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.errors.ProcessorStateException;
import org.apache.kafka.streams.processor.BatchingStateRestoreCallback;
import org.apache.kafka.streams.processor.StateRestoreCallback;
import org.apache.kafka.streams.processor.StateStore;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.state.internals.OffsetCheckpoint;
import org.slf4j.Logger;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ProcessorStateManager extends AbstractStateManager {
private static final String STATE_CHANGELOG_TOPIC_SUFFIX = "-changelog";
private final Logger log;
private final TaskId taskId;
private final String logPrefix;
private final boolean isStandby;
private final ChangelogReader changelogReader;
private final Map<TopicPartition, Long> offsetLimits;
private final Map<TopicPartition, Long> standbyRestoredOffsets;
private final Map<String, StateRestoreCallback> restoreCallbacks; // used for standby tasks, keyed by state topic name
private final Map<String, String> storeToChangelogTopic;
private final List<TopicPartition> changelogPartitions = new ArrayList<>();
// TODO: this map does not work with customized grouper where multiple partitions
// of the same topic can be assigned to the same topic.
private final Map<String, TopicPartition> partitionForTopic;
/**
* @throws ProcessorStateException if the task directory does not exist and could not be created
* @throws IOException if any severe error happens while creating or locking the state directory
*/
public ProcessorStateManager(final TaskId taskId,
final Collection<TopicPartition> sources,
final boolean isStandby,
final StateDirectory stateDirectory,
final Map<String, String> storeToChangelogTopic,
final ChangelogReader changelogReader,
final boolean eosEnabled,
final LogContext logContext) throws IOException {
super(stateDirectory.directoryForTask(taskId), eosEnabled);
this.log = logContext.logger(ProcessorStateManager.class);
this.taskId = taskId;
this.changelogReader = changelogReader;
logPrefix = String.format("task [%s] ", taskId);
partitionForTopic = new HashMap<>();
for (final TopicPartition source : sources) {
partitionForTopic.put(source.topic(), source);
}
offsetLimits = new HashMap<>();
standbyRestoredOffsets = new HashMap<>();
this.isStandby = isStandby;
restoreCallbacks = isStandby ? new HashMap<>() : null;
this.storeToChangelogTopic = storeToChangelogTopic;
// load the checkpoint information
checkpointableOffsets.putAll(checkpoint.read());
if (eosEnabled) {
// delete the checkpoint file after finish loading its stored offsets
checkpoint.delete();
checkpoint = null;
}
log.debug("Created state store manager for task {} with the acquired state dir lock", taskId);
}
public static String storeChangelogTopic(final String applicationId, final String storeName) {
return applicationId + "-" + storeName + STATE_CHANGELOG_TOPIC_SUFFIX;
}
@Override
public File baseDir() {
return baseDir;
}
@Override
public void register(final StateStore store,
final StateRestoreCallback stateRestoreCallback) {
final String storeName = store.name();
log.debug("Registering state store {} to its state manager", storeName);
if (CHECKPOINT_FILE_NAME.equals(storeName)) {
throw new IllegalArgumentException(String.format("%sIllegal store name: %s", logPrefix, CHECKPOINT_FILE_NAME));
}
if (stores.containsKey(storeName)) {
throw new IllegalArgumentException(String.format("%sStore %s has already been registered.", logPrefix, storeName));
}
// check that the underlying change log topic exist or not
final String topic = storeToChangelogTopic.get(storeName);
if (topic == null) {
stores.put(storeName, store);
return;
}
final TopicPartition storePartition = new TopicPartition(topic, getPartition(topic));
if (isStandby) {
log.trace("Preparing standby replica of persistent state store {} with changelog topic {}", storeName, topic);
restoreCallbacks.put(topic, stateRestoreCallback);
} else {
log.trace("Restoring state store {} from changelog topic {}", storeName, topic);
final StateRestorer restorer = new StateRestorer(storePartition,
new CompositeRestoreListener(stateRestoreCallback),
checkpointableOffsets.get(storePartition),
offsetLimit(storePartition),
store.persistent(),
storeName);
changelogReader.register(restorer);
}
changelogPartitions.add(storePartition);
stores.put(storeName, store);
}
@Override
public void reinitializeStateStoresForPartitions(final Collection<TopicPartition> partitions,
final InternalProcessorContext processorContext) {
super.reinitializeStateStoresForPartitions(
log,
stores,
storeToChangelogTopic,
partitions,
processorContext);
}
@Override
public Map<TopicPartition, Long> checkpointed() {
final Map<TopicPartition, Long> partitionsAndOffsets = new HashMap<>();
for (final Map.Entry<String, StateRestoreCallback> entry : restoreCallbacks.entrySet()) {
final String topicName = entry.getKey();
final int partition = getPartition(topicName);
final TopicPartition storePartition = new TopicPartition(topicName, partition);
partitionsAndOffsets.put(storePartition, checkpointableOffsets.getOrDefault(storePartition, -1L));
}
return partitionsAndOffsets;
}
List<ConsumerRecord<byte[], byte[]>> updateStandbyStates(final TopicPartition storePartition,
final List<ConsumerRecord<byte[], byte[]>> records) {
final long limit = offsetLimit(storePartition);
List<ConsumerRecord<byte[], byte[]>> remainingRecords = null;
final List<KeyValue<byte[], byte[]>> restoreRecords = new ArrayList<>();
// restore states from changelog records
final BatchingStateRestoreCallback restoreCallback = getBatchingRestoreCallback(restoreCallbacks.get(storePartition.topic()));
long lastOffset = -1L;
int count = 0;
for (final ConsumerRecord<byte[], byte[]> record : records) {
if (record.offset() < limit) {
restoreRecords.add(KeyValue.pair(record.key(), record.value()));
lastOffset = record.offset();
} else {
if (remainingRecords == null) {
remainingRecords = new ArrayList<>(records.size() - count);
}
remainingRecords.add(record);
}
count++;
}
if (!restoreRecords.isEmpty()) {
try {
restoreCallback.restoreAll(restoreRecords);
} catch (final Exception e) {
throw new ProcessorStateException(String.format("%sException caught while trying to restore state from %s", logPrefix, storePartition), e);
}
}
// record the restored offset for its change log partition
standbyRestoredOffsets.put(storePartition, lastOffset + 1);
return remainingRecords;
}
void putOffsetLimit(final TopicPartition partition, final long limit) {
log.trace("Updating store offset limit for partition {} to {}", partition, limit);
offsetLimits.put(partition, limit);
}
private long offsetLimit(final TopicPartition partition) {
final Long limit = offsetLimits.get(partition);
return limit != null ? limit : Long.MAX_VALUE;
}
@Override
public StateStore getStore(final String name) {
return stores.get(name);
}
@Override
public void flush() {
ProcessorStateException firstException = null;
// attempting to flush the stores
if (!stores.isEmpty()) {
log.debug("Flushing all stores registered in the state manager");
for (final StateStore store : stores.values()) {
log.trace("Flushing store {}", store.name());
try {
store.flush();
} catch (final Exception e) {
if (firstException == null) {
firstException = new ProcessorStateException(String.format("%sFailed to flush state store %s", logPrefix, store.name()), e);
}
log.error("Failed to flush state store {}: ", store.name(), e);
}
}
}
if (firstException != null) {
throw firstException;
}
}
/**
* {@link StateStore#close() Close} all stores (even in case of failure).
* Log all exception and re-throw the first exception that did occur at the end.
* @throws ProcessorStateException if any error happens when closing the state stores
*/
@Override
public void close(final Map<TopicPartition, Long> ackedOffsets) throws ProcessorStateException {
ProcessorStateException firstException = null;
// attempting to close the stores, just in case they
// are not closed by a ProcessorNode yet
if (!stores.isEmpty()) {
log.debug("Closing its state manager and all the registered state stores");
for (final StateStore store : stores.values()) {
log.debug("Closing storage engine {}", store.name());
try {
store.close();
} catch (final Exception e) {
if (firstException == null) {
firstException = new ProcessorStateException(String.format("%sFailed to close state store %s", logPrefix, store.name()), e);
}
log.error("Failed to close state store {}: ", store.name(), e);
}
}
if (ackedOffsets != null) {
checkpoint(ackedOffsets);
}
stores.clear();
}
if (firstException != null) {
throw firstException;
}
}
// write the checkpoint
@Override
public void checkpoint(final Map<TopicPartition, Long> checkpointableOffsets) {
this.checkpointableOffsets.putAll(changelogReader.restoredOffsets());
for (final StateStore store : stores.values()) {
final String storeName = store.name();
// only checkpoint the offset to the offsets file if
// it is persistent AND changelog enabled
if (store.persistent() && storeToChangelogTopic.containsKey(storeName)) {
final String changelogTopic = storeToChangelogTopic.get(storeName);
final TopicPartition topicPartition = new TopicPartition(changelogTopic, getPartition(storeName));
if (checkpointableOffsets.containsKey(topicPartition)) {
// store the last offset + 1 (the log position after restoration)
this.checkpointableOffsets.put(topicPartition, checkpointableOffsets.get(topicPartition) + 1);
} else if (standbyRestoredOffsets.containsKey(topicPartition)) {
this.checkpointableOffsets.put(topicPartition, standbyRestoredOffsets.get(topicPartition));
}
}
}
// write the checkpoint file before closing
if (checkpoint == null) {
checkpoint = new OffsetCheckpoint(new File(baseDir, CHECKPOINT_FILE_NAME));
}
log.trace("Writing checkpoint: {}", this.checkpointableOffsets);
try {
checkpoint.write(this.checkpointableOffsets);
} catch (final IOException e) {
log.warn("Failed to write offset checkpoint file to {}: {}", checkpoint, e);
}
}
private int getPartition(final String topic) {
final TopicPartition partition = partitionForTopic.get(topic);
return partition == null ? taskId.partition : partition.partition();
}
void registerGlobalStateStores(final List<StateStore> stateStores) {
log.debug("Register global stores {}", stateStores);
for (final StateStore stateStore : stateStores) {
globalStores.put(stateStore.name(), stateStore);
}
}
@Override
public StateStore getGlobalStore(final String name) {
return globalStores.get(name);
}
private BatchingStateRestoreCallback getBatchingRestoreCallback(final StateRestoreCallback callback) {
if (callback instanceof BatchingStateRestoreCallback) {
return (BatchingStateRestoreCallback) callback;
}
return new WrappedBatchingStateRestoreCallback(callback);
}
Collection<TopicPartition> changelogPartitions() {
return changelogPartitions;
}
}
| |
/*
* Copyright (c) 2010-2014. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.commandhandling.disruptor;
import com.lmax.disruptor.RingBuffer;
import com.lmax.disruptor.dsl.Disruptor;
import com.lmax.disruptor.dsl.EventHandlerGroup;
import org.axonframework.commandhandling.CommandBus;
import org.axonframework.commandhandling.CommandCallback;
import org.axonframework.commandhandling.CommandDispatchInterceptor;
import org.axonframework.commandhandling.CommandHandler;
import org.axonframework.commandhandling.CommandHandlerInterceptor;
import org.axonframework.commandhandling.CommandMessage;
import org.axonframework.commandhandling.CommandTargetResolver;
import org.axonframework.commandhandling.NoHandlerForCommandException;
import org.axonframework.commandhandling.interceptors.SerializationOptimizingInterceptor;
import org.axonframework.common.Assert;
import org.axonframework.common.AxonThreadFactory;
import org.axonframework.domain.DomainEventStream;
import org.axonframework.eventhandling.EventBus;
import org.axonframework.eventsourcing.AggregateFactory;
import org.axonframework.eventsourcing.EventSourcedAggregateRoot;
import org.axonframework.eventsourcing.EventStreamDecorator;
import org.axonframework.eventstore.EventStore;
import org.axonframework.repository.Repository;
import org.axonframework.serializer.Serializer;
import org.axonframework.unitofwork.TransactionManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static java.lang.String.format;
/**
* Asynchronous CommandBus implementation with very high performance characteristics. It divides the command handling
* process in two steps, which can be executed in different threads. The CommandBus is backed by a {@link Disruptor},
* which ensures that two steps are executed sequentially in these threads, while minimizing locking and inter-thread
* communication.
* <p/>
* The process is split into two separate steps, each of which is executed in a different thread:
* <ol>
* <li><em>Command Handler execution</em><br/>This process invokes the command handler with the incoming command. The
* result and changes to the aggregate are recorded for the next step.</li>
* <li><em>Event storage and publication</em><br/>This process stores all generated domain events and publishes them
* (with any optional application events) to the event bus. Finally, an asynchronous task is scheduled to invoke the
* command handler callback with the result of the command handling result.</li>
* </ol>
* <p/>
* <em>Exceptions and recovery</em>
* <p/>
* This separation of process steps makes this implementation very efficient and highly performing. However, it does
* not cope with exceptions very well. When an exception occurs, an Aggregate that has been loaded is potentially
* corrupt. That means that an aggregate does not represent a state that can be reproduced by replaying its committed
* events. Although this implementation will recover from this corrupt state, it may result in a number of commands
* being rejected in the meantime. These command may be retried if the cause of the {@link
* AggregateStateCorruptedException} does not indicate a non-transient error.
* <p/>
* Commands that have been executed against a potentially corrupt Aggregate will result in a {@link
* AggregateStateCorruptedException} exception. These commands are automatically rescheduled for processing by
* default. Use {@link DisruptorConfiguration#setRescheduleCommandsOnCorruptState(boolean)} disable this feature. Note
* that the order in which commands are executed is not fully guaranteed when this feature is enabled (default).
* <p/>
* <em>Limitations of this implementation</em>
* <p/>
* Although this implementation allows applications to achieve extreme performance (over 1M commands on commodity
* hardware), it does have some limitations. It only allows a single aggregate to be invoked during command processing.
* <p/>
* This implementation can only work with Event Sourced Aggregates.
* <p/>
* <em>Infrastructure considerations</em>
* <p/>
* This CommandBus implementation has special requirements for the Repositories being used during Command Processing.
* Therefore, the Repository instance to use in the Command Handler must be created using {@link
* #createRepository(org.axonframework.eventsourcing.AggregateFactory)}.
* Using another repository will most likely result in undefined behavior.
* <p/>
* The DisruptorCommandBus must have access to at least 3 threads, two of which are permanently used while the
* DisruptorCommandBus is operational. At least one additional thread is required to invoke callbacks and initiate a
* recovery process in the case of exceptions.
* <p/>
* Consider providing an alternative {@link org.axonframework.domain.IdentifierFactory} implementation. The default
* implementation used {@link java.util.UUID#randomUUID()} to generated identifier for Events. The poor performance of
* this method severely impacts overall performance of the DisruptorCommandBus. A better performing alternative is, for
* example, <a href="http://johannburkard.de/software/uuid/" target="_blank"><code>com.eaio.uuid.UUID</code></a>
*
* @author Allard Buijze
* @since 2.0
*/
public class DisruptorCommandBus implements CommandBus {
private static final Logger logger = LoggerFactory.getLogger(DisruptorCommandBus.class);
private static final ThreadGroup DISRUPTOR_THREAD_GROUP = new ThreadGroup("DisruptorCommandBus");
private final ConcurrentMap<String, CommandHandler<?>> commandHandlers =
new ConcurrentHashMap<String, CommandHandler<?>>();
private final Disruptor<CommandHandlingEntry> disruptor;
private final CommandHandlerInvoker[] commandHandlerInvokers;
private final List<CommandDispatchInterceptor> dispatchInterceptors;
private final List<CommandHandlerInterceptor> invokerInterceptors;
private final List<CommandHandlerInterceptor> publisherInterceptors;
private final ExecutorService executorService;
private final boolean rescheduleOnCorruptState;
private final long coolingDownPeriod;
private final CommandTargetResolver commandTargetResolver;
private final int publisherCount;
private final int serializerCount;
private final CommandCallback<Object> failureLoggingCallback = new FailureLoggingCommandCallback();
private volatile boolean started = true;
private volatile boolean disruptorShutDown = false;
/**
* Initialize the DisruptorCommandBus with given resources, using default configuration settings. Uses a Blocking
* WaitStrategy on a RingBuffer of size 4096. The (2) Threads required for command execution are created
* immediately. Additional threads are used to invoke response callbacks and to initialize a recovery process in
* the case of errors.
*
* @param eventStore The EventStore where generated events must be stored
* @param eventBus The EventBus where generated events must be published
*/
public DisruptorCommandBus(EventStore eventStore, EventBus eventBus) {
this(eventStore, eventBus, new DisruptorConfiguration());
}
/**
* Initialize the DisruptorCommandBus with given resources and settings. The Threads required for command
* execution are immediately requested from the Configuration's Executor, if any. Otherwise, they are created.
*
* @param eventStore The EventStore where generated events must be stored
* @param eventBus The EventBus where generated events must be published
* @param configuration The configuration for the command bus
*/
@SuppressWarnings("unchecked")
public DisruptorCommandBus(EventStore eventStore, EventBus eventBus,
DisruptorConfiguration configuration) {
Assert.notNull(eventStore, "eventStore may not be null");
Assert.notNull(eventBus, "eventBus may not be null");
Assert.notNull(configuration, "configuration may not be null");
Executor executor = configuration.getExecutor();
if (executor == null) {
executorService = Executors.newCachedThreadPool(
new AxonThreadFactory(DISRUPTOR_THREAD_GROUP));
executor = executorService;
} else {
executorService = null;
}
rescheduleOnCorruptState = configuration.getRescheduleCommandsOnCorruptState();
invokerInterceptors = new ArrayList<CommandHandlerInterceptor>(configuration.getInvokerInterceptors());
publisherInterceptors = new ArrayList<CommandHandlerInterceptor>(configuration.getPublisherInterceptors());
dispatchInterceptors = new ArrayList<CommandDispatchInterceptor>(configuration.getDispatchInterceptors());
TransactionManager transactionManager = configuration.getTransactionManager();
disruptor = new Disruptor<CommandHandlingEntry>(
new CommandHandlingEntry.Factory(configuration.getTransactionManager() != null),
configuration.getBufferSize(),
executor,
configuration.getProducerType(),
configuration.getWaitStrategy());
commandTargetResolver = configuration.getCommandTargetResolver();
// configure invoker Threads
commandHandlerInvokers = initializeInvokerThreads(eventStore, configuration);
// configure serializer Threads
SerializerHandler[] serializerThreads = initializeSerializerThreads(configuration);
serializerCount = serializerThreads.length;
// configure publisher Threads
EventPublisher[] publishers = initializePublisherThreads(eventStore, eventBus, configuration, executor,
transactionManager);
publisherCount = publishers.length;
disruptor.handleExceptionsWith(new ExceptionHandler());
EventHandlerGroup<CommandHandlingEntry> eventHandlerGroup = disruptor.handleEventsWith(commandHandlerInvokers);
if (serializerThreads.length > 0) {
eventHandlerGroup = eventHandlerGroup.then(serializerThreads);
invokerInterceptors.add(new SerializationOptimizingInterceptor());
}
eventHandlerGroup.then(publishers);
coolingDownPeriod = configuration.getCoolingDownPeriod();
disruptor.start();
}
private EventPublisher[] initializePublisherThreads(EventStore eventStore, EventBus eventBus,
DisruptorConfiguration configuration, Executor executor,
TransactionManager transactionManager) {
EventPublisher[] publishers = new EventPublisher[configuration.getPublisherThreadCount()];
for (int t = 0; t < publishers.length; t++) {
publishers[t] = new EventPublisher(eventStore, eventBus, executor, transactionManager,
configuration.getRollbackConfiguration(), t);
}
return publishers;
}
private SerializerHandler[] initializeSerializerThreads(DisruptorConfiguration configuration) {
if (!configuration.isPreSerializationConfigured()) {
return new SerializerHandler[0];
}
Serializer serializer = configuration.getSerializer();
SerializerHandler[] serializerThreads = new SerializerHandler[configuration.getSerializerThreadCount()];
for (int t = 0; t < serializerThreads.length; t++) {
serializerThreads[t] = new SerializerHandler(serializer, t, configuration.getSerializedRepresentation());
}
return serializerThreads;
}
private CommandHandlerInvoker[] initializeInvokerThreads(EventStore eventStore,
DisruptorConfiguration configuration) {
CommandHandlerInvoker[] invokers;
invokers = new CommandHandlerInvoker[configuration.getInvokerThreadCount()];
for (int t = 0; t < invokers.length; t++) {
invokers[t] = new CommandHandlerInvoker(eventStore, configuration.getCache(), t);
}
return invokers;
}
@Override
public void dispatch(final CommandMessage<?> command) {
dispatch(command, failureLoggingCallback);
}
@Override
public <R> void dispatch(CommandMessage<?> command, CommandCallback<R> callback) {
Assert.state(started, "CommandBus has been shut down. It is not accepting any Commands");
CommandMessage<?> commandToDispatch = command;
for (CommandDispatchInterceptor interceptor : dispatchInterceptors) {
commandToDispatch = interceptor.handle(commandToDispatch);
}
doDispatch(commandToDispatch, callback);
}
/**
* Forces a dispatch of a command. This method should be used with caution. It allows commands to be retried during
* the cooling down period of the disruptor.
*
* @param command The command to dispatch
* @param callback The callback to notify when command handling is completed
* @param <R> The expected return type of the command
*/
public <R> void doDispatch(CommandMessage command, CommandCallback<R> callback) {
Assert.state(!disruptorShutDown, "Disruptor has been shut down. Cannot dispatch or re-dispatch commands");
final CommandHandler<?> commandHandler = commandHandlers.get(command.getCommandName());
if (commandHandler == null) {
throw new NoHandlerForCommandException(format("No handler was subscribed to command [%s]",
command.getCommandName()));
}
RingBuffer<CommandHandlingEntry> ringBuffer = disruptor.getRingBuffer();
int invokerSegment = 0;
int publisherSegment = 0;
int serializerSegment = 0;
if ((commandHandlerInvokers.length > 1 || publisherCount > 1 || serializerCount > 1)) {
Object aggregateIdentifier = commandTargetResolver.resolveTarget(command).getIdentifier();
if (aggregateIdentifier != null) {
int idHash = aggregateIdentifier.hashCode() & Integer.MAX_VALUE;
if (commandHandlerInvokers.length > 1) {
invokerSegment = idHash % commandHandlerInvokers.length;
}
if (serializerCount > 1) {
serializerSegment = idHash % serializerCount;
}
if (publisherCount > 1) {
publisherSegment = idHash % publisherCount;
}
}
}
long sequence = ringBuffer.next();
try {
CommandHandlingEntry event = ringBuffer.get(sequence);
event.reset(command, commandHandler, invokerSegment, publisherSegment,
serializerSegment, new BlacklistDetectingCallback<R>(callback,
command,
disruptor.getRingBuffer(),
this,
rescheduleOnCorruptState),
invokerInterceptors, publisherInterceptors
);
} finally {
ringBuffer.publish(sequence);
}
}
/**
* Creates a repository instance for an Event Sourced aggregate that is created by the given
* <code>aggregateFactory</code>.
* <p/>
* The repository returned must be used by Command Handlers subscribed to this Command Bus for loading aggregate
* instances. Using any other repository instance may result in undefined outcome (a.k.a. concurrency problems).
*
* @param aggregateFactory The factory creating uninitialized instances of the Aggregate
* @param <T> The type of aggregate to create the repository for
* @return the repository that provides access to stored aggregates
*/
public <T extends EventSourcedAggregateRoot> Repository<T> createRepository(AggregateFactory<T> aggregateFactory) {
return createRepository(aggregateFactory, NoOpEventStreamDecorator.INSTANCE);
}
/**
* Creates a repository instance for an Event Sourced aggregate that is created by the given
* <code>aggregateFactory</code>. The given <code>decorator</code> is used to decorate event streams.
* <p/>
* The repository returned must be used by Command Handlers subscribed to this Command Bus for loading aggregate
* instances. Using any other repository instance may result in undefined outcome (a.k.a. concurrency problems).
* <p/>
* Note that a second invocation of this method with an aggregate factory for the same aggregate type <em>may</em>
* return the same instance as the first invocation, even if the given <code>decorator</code> is different.
*
* @param aggregateFactory The factory creating uninitialized instances of the Aggregate
* @param decorator The decorator to decorate events streams with
* @param <T> The type of aggregate to create the repository for
* @return the repository that provides access to stored aggregates
*/
public <T extends EventSourcedAggregateRoot> Repository<T> createRepository(AggregateFactory<T> aggregateFactory,
EventStreamDecorator decorator) {
for (CommandHandlerInvoker invoker : commandHandlerInvokers) {
invoker.createRepository(aggregateFactory, decorator);
}
return new DisruptorRepository<T>(aggregateFactory.getTypeIdentifier());
}
@Override
public <C> void subscribe(String commandName, CommandHandler<? super C> handler) {
commandHandlers.put(commandName, handler);
}
@Override
public <C> boolean unsubscribe(String commandName, CommandHandler<? super C> handler) {
return commandHandlers.remove(commandName, handler);
}
/**
* Shuts down the command bus. It no longer accepts new commands, and finishes processing commands that have
* already been published. This method will not shut down any executor that has been provided as part of the
* Configuration.
*/
public void stop() {
if (!started) {
return;
}
started = false;
long lastChangeDetected = System.currentTimeMillis();
long lastKnownCursor = disruptor.getRingBuffer().getCursor();
while (System.currentTimeMillis() - lastChangeDetected < coolingDownPeriod && !Thread.interrupted()) {
if (disruptor.getRingBuffer().getCursor() != lastKnownCursor) {
lastChangeDetected = System.currentTimeMillis();
lastKnownCursor = disruptor.getRingBuffer().getCursor();
}
}
disruptorShutDown = true;
disruptor.shutdown();
if (executorService != null) {
executorService.shutdown();
}
}
private static class FailureLoggingCommandCallback implements CommandCallback<Object> {
@Override
public void onSuccess(Object result) {
}
@Override
public void onFailure(Throwable cause) {
logger.info("An error occurred while handling a command.", cause);
}
}
private static class DisruptorRepository<T extends EventSourcedAggregateRoot> implements Repository<T> {
private final String typeIdentifier;
public DisruptorRepository(String typeIdentifier) {
this.typeIdentifier = typeIdentifier;
}
@SuppressWarnings("unchecked")
@Override
public T load(Object aggregateIdentifier, Long expectedVersion) {
return (T) CommandHandlerInvoker.getRepository(typeIdentifier).load(aggregateIdentifier, expectedVersion);
}
@SuppressWarnings("unchecked")
@Override
public T load(Object aggregateIdentifier) {
return (T) CommandHandlerInvoker.getRepository(typeIdentifier).load(aggregateIdentifier);
}
@Override
public void add(T aggregate) {
CommandHandlerInvoker.getRepository(typeIdentifier).add(aggregate);
}
}
private static class NoOpEventStreamDecorator implements EventStreamDecorator {
public static final EventStreamDecorator INSTANCE = new NoOpEventStreamDecorator();
@Override
public DomainEventStream decorateForRead(String aggregateType, Object aggregateIdentifier,
DomainEventStream eventStream) {
return eventStream;
}
@Override
public DomainEventStream decorateForAppend(String aggregateType, EventSourcedAggregateRoot aggregate,
DomainEventStream eventStream) {
return eventStream;
}
}
private class ExceptionHandler implements com.lmax.disruptor.ExceptionHandler {
@Override
public void handleEventException(Throwable ex, long sequence, Object event) {
logger.error("Exception occurred while processing a {}.",
((CommandHandlingEntry) event).getCommand().getPayloadType().getSimpleName(),
ex);
}
@Override
public void handleOnStartException(Throwable ex) {
logger.error("Failed to start the DisruptorCommandBus.", ex);
disruptor.shutdown();
}
@Override
public void handleOnShutdownException(Throwable ex) {
logger.error("Error while shutting down the DisruptorCommandBus", ex);
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui.treeStructure;
import com.intellij.Patches;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.util.treeView.*;
import com.intellij.openapi.ui.GraphicsConfig;
import com.intellij.openapi.ui.Queryable;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.ui.*;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.ui.*;
import com.intellij.util.ui.tree.WideSelectionTreeUI;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.plaf.TreeUI;
import javax.swing.plaf.basic.BasicTreeUI;
import javax.swing.text.Position;
import javax.swing.tree.*;
import java.awt.*;
import java.awt.dnd.Autoscroll;
import java.awt.event.*;
import java.lang.reflect.Array;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Map;
public class Tree extends JTree implements ComponentWithEmptyText, ComponentWithExpandableItems<Integer>, Autoscroll, Queryable,
ComponentWithFileColors {
private final StatusText myEmptyText;
private final ExpandableItemsHandler<Integer> myExpandableItemsHandler;
private AsyncProcessIcon myBusyIcon;
private boolean myBusy;
private Rectangle myLastVisibleRec;
private Dimension myHoldSize;
private final MySelectionModel mySelectionModel = new MySelectionModel();
private boolean myHorizontalAutoScrolling = true;
public Tree() {
this(getDefaultTreeModel());
}
public Tree(TreeNode root) {
this(new DefaultTreeModel(root, false));
}
public Tree(TreeModel treemodel) {
super(treemodel);
myEmptyText = new StatusText(this) {
@Override
protected boolean isStatusVisible() {
return Tree.this.isEmpty();
}
};
myExpandableItemsHandler = ExpandableItemsHandlerFactory.install(this);
addMouseListener(new MyMouseListener());
if (Patches.SUN_BUG_ID_4893787) {
addFocusListener(new MyFocusListener());
}
setCellRenderer(new NodeRenderer());
setSelectionModel(mySelectionModel);
}
@Override
public void setUI(final TreeUI ui) {
TreeUI actualUI = ui;
if (!isCustomUI()) {
if (!(ui instanceof WideSelectionTreeUI) && isWideSelection() && !UIUtil.isUnderGTKLookAndFeel()) {
actualUI = new WideSelectionTreeUI(isWideSelection(), getWideSelectionBackgroundCondition());
}
}
super.setUI(actualUI);
}
public boolean isEmpty() {
TreeModel model = getModel();
if (model == null) return true;
if (model.getRoot() == null) return true;
if (!isRootVisible()) {
final int childCount = model.getChildCount(model.getRoot());
if (childCount == 0) {
return true;
}
if (childCount == 1) {
final Object node = model.getChild(model.getRoot(), 0);
if (node instanceof LoadingNode) {
return true;
}
}
}
return false;
}
protected boolean isCustomUI() {
return false;
}
/**
* Will be removed in version 13
*
* @deprecated use isWideSelection
* @see #isWideSelection()
*/
protected boolean isMacWideSelection() {
return isWideSelection();
}
protected boolean isWideSelection() {
return true;
}
/**
* @return a strategy which determines if a wide selection should be drawn for a target row (it's number is
* {@link Condition#value(Object) given} as an argument to the strategy)
*/
@SuppressWarnings("unchecked")
@NotNull
protected Condition<Integer> getWideSelectionBackgroundCondition() {
return Conditions.alwaysTrue();
}
@Override
public boolean isFileColorsEnabled() {
return false;
}
@NotNull
@Override
public StatusText getEmptyText() {
return myEmptyText;
}
@Override
@NotNull
public ExpandableItemsHandler<Integer> getExpandableItemsHandler() {
return myExpandableItemsHandler;
}
@Override
public void setExpandableItemsEnabled(boolean enabled) {
myExpandableItemsHandler.setEnabled(enabled);
}
@Override
public Color getBackground() {
return isBackgroundSet() ? super.getBackground() : UIUtil.getTreeTextBackground();
}
@Override
public Color getForeground() {
return isForegroundSet() ? super.getForeground() : UIUtil.getTreeForeground();
}
@Override
public void addNotify() {
super.addNotify();
updateBusy();
}
@Override
public void removeNotify() {
super.removeNotify();
if (myBusyIcon != null) {
remove(myBusyIcon);
Disposer.dispose(myBusyIcon);
myBusyIcon = null;
}
}
@Override
public void doLayout() {
super.doLayout();
updateBusyIconLocation();
}
private void updateBusyIconLocation() {
if (myBusyIcon != null) {
myBusyIcon.updateLocation(this);
}
}
@Override
public void paint(Graphics g) {
final Rectangle visible = getVisibleRect();
boolean canHoldSelection = false;
TreePath[] paths = getSelectionModel().getSelectionPaths();
if (paths != null) {
for (TreePath each : paths) {
final Rectangle selection = getPathBounds(each);
if (selection != null && (g.getClipBounds().intersects(selection) || g.getClipBounds().contains(selection))) {
if (myBusy && myBusyIcon != null) {
Rectangle busyIconBounds = myBusyIcon.getBounds();
if (selection.contains(busyIconBounds) || selection.intersects(busyIconBounds)) {
canHoldSelection = false;
break;
} else {
canHoldSelection = true;
}
} else {
canHoldSelection = true;
}
}
}
}
if (canHoldSelection) {
if (!AbstractTreeBuilder.isToPaintSelection(this)) {
mySelectionModel.holdSelection();
}
}
try {
super.paint(g);
if (!visible.equals(myLastVisibleRec)) {
updateBusyIconLocation();
}
myLastVisibleRec = visible;
}
finally {
mySelectionModel.unholdSelection();
}
}
public void setPaintBusy(boolean paintBusy) {
if (myBusy == paintBusy) return;
myBusy = paintBusy;
updateBusy();
}
private void updateBusy() {
if (myBusy) {
if (myBusyIcon == null) {
myBusyIcon = new AsyncProcessIcon(toString()).setUseMask(false);
myBusyIcon.setOpaque(false);
myBusyIcon.setPaintPassiveIcon(false);
add(myBusyIcon);
myBusyIcon.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
if (!UIUtil.isActionClick(e)) return;
AbstractTreeBuilder builder = AbstractTreeBuilder.getBuilderFor(Tree.this);
if (builder != null) {
builder.cancelUpdate();
}
}
});
}
}
if (myBusyIcon != null) {
if (myBusy) {
if (shouldShowBusyIconIfNeeded()) {
myBusyIcon.resume();
myBusyIcon.setToolTipText("Update is in progress. Click to cancel");
}
}
else {
myBusyIcon.suspend();
myBusyIcon.setToolTipText(null);
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
if (myBusyIcon != null) {
repaint();
}
}
});
}
updateBusyIconLocation();
}
}
protected boolean shouldShowBusyIconIfNeeded() {
// https://youtrack.jetbrains.com/issue/IDEA-101422 "Rotating wait symbol in Project list whenever typing"
return hasFocus();
}
protected boolean paintNodes() {
return false;
}
@Override
protected void paintComponent(Graphics g) {
if (paintNodes()) {
g.setColor(getBackground());
g.fillRect(0, 0, getWidth(), getHeight());
paintNodeContent(g);
}
if (isFileColorsEnabled()) {
g.setColor(getBackground());
g.fillRect(0, 0, getWidth(), getHeight());
paintFileColorGutter(g);
}
super.paintComponent(g);
myEmptyText.paint(this, g);
}
protected void paintFileColorGutter(final Graphics g) {
final GraphicsConfig config = new GraphicsConfig(g);
final Rectangle rect = getVisibleRect();
final int firstVisibleRow = getClosestRowForLocation(rect.x, rect.y);
final int lastVisibleRow = getClosestRowForLocation(rect.x, rect.y + rect.height);
for (int row = firstVisibleRow; row <= lastVisibleRow; row++) {
final TreePath path = getPathForRow(row);
if (path != null) {
final Rectangle bounds = getRowBounds(row);
Object component = path.getLastPathComponent();
final Object[] pathObjects = path.getPath();
if (component instanceof LoadingNode && pathObjects.length > 1) {
component = pathObjects[pathObjects.length - 2];
}
Color color = getFileColorFor((DefaultMutableTreeNode)component);
if (color != null) {
g.setColor(color);
g.fillRect(0, bounds.y, getWidth(), bounds.height);
}
}
}
config.restore();
}
@Nullable
public Color getFileColorForPath(@Nullable TreePath path) {
if (path != null) {
final Object node = path.getLastPathComponent();
if (node instanceof DefaultMutableTreeNode) {
return getFileColorFor(((DefaultMutableTreeNode)node).getUserObject());
}
}
return null;
}
@Nullable
public Color getFileColorFor(Object object) {
return null;
}
@Nullable
public Color getFileColorFor(DefaultMutableTreeNode node) {
return getFileColorFor(node.getUserObject());
}
@Override
protected void processKeyEvent(KeyEvent e) {
super.processKeyEvent(e);
}
/**
* Hack to prevent loosing multiple selection on Mac when clicking Ctrl+Left Mouse Button.
* See faulty code at BasicTreeUI.selectPathForEvent():2245
*
* Another hack to match selection UI (wide) and selection behavior (narrow) in Nimbus/GTK+.
*/
@Override
protected void processMouseEvent(final MouseEvent e) {
MouseEvent e2 = e;
if (SystemInfo.isMac) {
if (SwingUtilities.isLeftMouseButton(e) && e.isControlDown() && e.getID() == MouseEvent.MOUSE_PRESSED) {
int modifiers = e.getModifiers() & ~(InputEvent.CTRL_MASK | InputEvent.BUTTON1_MASK) | InputEvent.BUTTON3_MASK;
e2 = new MouseEvent(e.getComponent(), e.getID(), e.getWhen(), modifiers, e.getX(), e.getY(), e.getClickCount(),
true, MouseEvent.BUTTON3);
}
}
else if (UIUtil.isUnderNimbusLookAndFeel() || UIUtil.isUnderGTKLookAndFeel()) {
if (SwingUtilities.isLeftMouseButton(e) && (e.getID() == MouseEvent.MOUSE_PRESSED || e.getID() == MouseEvent.MOUSE_CLICKED)) {
final TreePath path = getClosestPathForLocation(e.getX(), e.getY());
if (path != null) {
final Rectangle bounds = getPathBounds(path);
if (bounds != null &&
e.getY() > bounds.y && e.getY() < bounds.y + bounds.height &&
(e.getX() >= bounds.x + bounds.width ||
e.getX() < bounds.x && !isLocationInExpandControl(path, e.getX(), e.getY()))) {
int newX = bounds.x + bounds.width - 2;
e2 = new MouseEvent(e.getComponent(), e.getID(), e.getWhen(), e.getModifiers(), newX, e.getY(), e.getClickCount(),
e.isPopupTrigger(), e.getButton());
}
}
}
}
super.processMouseEvent(e2);
}
private boolean isLocationInExpandControl(final TreePath path, final int x, final int y) {
final TreeUI ui = getUI();
if (!(ui instanceof BasicTreeUI)) return false;
try {
Class aClass = ui.getClass();
while (BasicTreeUI.class.isAssignableFrom(aClass) && !BasicTreeUI.class.equals(aClass)) {
aClass = aClass.getSuperclass();
}
final Method method = ReflectionUtil.getDeclaredMethod(aClass, "isLocationInExpandControl", TreePath.class, int.class, int.class);
if (method != null) {
return (Boolean)method.invoke(ui, path, x, y);
}
}
catch (Throwable ignore) { }
return false;
}
/**
* Disable Sun's speed search
*/
@Override
public TreePath getNextMatch(String prefix, int startingRow, Position.Bias bias) {
return null;
}
private static final int AUTOSCROLL_MARGIN = 10;
@Override
public Insets getAutoscrollInsets() {
return new Insets(getLocation().y + AUTOSCROLL_MARGIN, 0, getParent().getHeight() - AUTOSCROLL_MARGIN, getWidth() - 1);
}
@Override
public void autoscroll(Point p) {
int realRow = getClosestRowForLocation(p.x, p.y);
if (getLocation().y + p.y <= AUTOSCROLL_MARGIN) {
if (realRow >= 1) realRow--;
}
else {
if (realRow < getRowCount() - 1) realRow++;
}
scrollRowToVisible(realRow);
}
protected boolean highlightSingleNode() {
return true;
}
private void paintNodeContent(Graphics g) {
if (!(getUI() instanceof BasicTreeUI)) return;
final AbstractTreeBuilder builder = AbstractTreeBuilder.getBuilderFor(this);
if (builder == null || builder.isDisposed()) return;
GraphicsConfig config = new GraphicsConfig(g);
config.setAntialiasing(true);
final AbstractTreeStructure structure = builder.getTreeStructure();
for (int eachRow = 0; eachRow < getRowCount(); eachRow++) {
final TreePath path = getPathForRow(eachRow);
PresentableNodeDescriptor node = toPresentableNode(path.getLastPathComponent());
if (node == null) continue;
if (!node.isContentHighlighted()) continue;
if (highlightSingleNode()) {
if (node.isContentHighlighted()) {
final TreePath nodePath = getPath(node);
Rectangle rect;
final Rectangle parentRect = getPathBounds(nodePath);
if (isExpanded(nodePath)) {
final int[] max = getMax(node, structure);
rect = new Rectangle(parentRect.x, parentRect.y, Math.max((int) parentRect.getMaxX(), max[1]) - parentRect.x - 1,
Math.max((int) parentRect.getMaxY(), max[0]) - parentRect.y - 1);
}
else {
rect = parentRect;
}
if (rect != null) {
final Color highlightColor = node.getHighlightColor();
g.setColor(highlightColor);
g.fillRoundRect(rect.x, rect.y, rect.width, rect.height, 4, 4);
g.setColor(highlightColor.darker());
g.drawRoundRect(rect.x, rect.y, rect.width, rect.height, 4, 4);
}
}
}
else {
//todo: to investigate why it might happen under 1.6: http://www.productiveme.net:8080/browse/PM-217
if (node.getParentDescriptor() == null) continue;
final Object[] kids = structure.getChildElements(node);
if (kids.length == 0) continue;
PresentableNodeDescriptor first = null;
PresentableNodeDescriptor last = null;
int lastIndex = -1;
for (int i = 0; i < kids.length; i++) {
final Object kid = kids[i];
if (kid instanceof PresentableNodeDescriptor) {
PresentableNodeDescriptor eachKid = (PresentableNodeDescriptor) kid;
if (!node.isHighlightableContentNode(eachKid)) continue;
if (first == null) {
first = eachKid;
}
last = eachKid;
lastIndex = i;
}
}
if (first == null || last == null) continue;
Rectangle firstBounds = getPathBounds(getPath(first));
if (isExpanded(getPath(last))) {
if (lastIndex + 1 < kids.length) {
final Object child = kids[lastIndex + 1];
if (child instanceof PresentableNodeDescriptor) {
PresentableNodeDescriptor nextKid = (PresentableNodeDescriptor) child;
int nextRow = getRowForPath(getPath(nextKid));
last = toPresentableNode(getPathForRow(nextRow - 1).getLastPathComponent());
}
}
else {
NodeDescriptor parentNode = node.getParentDescriptor();
if (parentNode instanceof PresentableNodeDescriptor) {
final PresentableNodeDescriptor ppd = (PresentableNodeDescriptor)parentNode;
int nodeIndex = node.getIndex();
if (nodeIndex + 1 < structure.getChildElements(ppd).length) {
PresentableNodeDescriptor nextChild = ppd.getChildToHighlightAt(nodeIndex + 1);
int nextRow = getRowForPath(getPath(nextChild));
TreePath prevPath = getPathForRow(nextRow - 1);
if (prevPath != null) {
last = toPresentableNode(prevPath.getLastPathComponent());
}
}
else {
int lastRow = getRowForPath(getPath(last));
PresentableNodeDescriptor lastParent = last;
boolean lastWasFound = false;
for (int i = lastRow + 1; i < getRowCount(); i++) {
PresentableNodeDescriptor eachNode = toPresentableNode(getPathForRow(i).getLastPathComponent());
if (!node.isParentOf(eachNode)) {
last = lastParent;
lastWasFound = true;
break;
}
lastParent = eachNode;
}
if (!lastWasFound) {
last = toPresentableNode(getPathForRow(getRowCount() - 1).getLastPathComponent());
}
}
}
}
}
if (last == null) continue;
Rectangle lastBounds = getPathBounds(getPath(last));
if (firstBounds == null || lastBounds == null) continue;
Rectangle toPaint = new Rectangle(firstBounds.x, firstBounds.y, 0, (int)lastBounds.getMaxY() - firstBounds.y - 1);
toPaint.width = getWidth() - toPaint.x - 4;
final Color highlightColor = first.getHighlightColor();
g.setColor(highlightColor);
g.fillRoundRect(toPaint.x, toPaint.y, toPaint.width, toPaint.height, 4, 4);
g.setColor(highlightColor.darker());
g.drawRoundRect(toPaint.x, toPaint.y, toPaint.width, toPaint.height, 4, 4);
}
}
config.restore();
}
private int[] getMax(final PresentableNodeDescriptor node, final AbstractTreeStructure structure) {
int x = 0;
int y = 0;
final Object[] children = structure.getChildElements(node);
for (final Object child : children) {
if (child instanceof PresentableNodeDescriptor) {
final TreePath childPath = getPath((PresentableNodeDescriptor)child);
if (childPath != null) {
if (isExpanded(childPath)) {
final int[] tmp = getMax((PresentableNodeDescriptor)child, structure);
y = Math.max(y, tmp[0]);
x = Math.max(x, tmp[1]);
}
final Rectangle r = getPathBounds(childPath);
if (r != null) {
y = Math.max(y, (int)r.getMaxY());
x = Math.max(x, (int)r.getMaxX());
}
}
}
}
return new int[]{y, x};
}
@Nullable
private static PresentableNodeDescriptor toPresentableNode(final Object pathComponent) {
if (!(pathComponent instanceof DefaultMutableTreeNode)) return null;
final Object userObject = ((DefaultMutableTreeNode)pathComponent).getUserObject();
if (!(userObject instanceof PresentableNodeDescriptor)) return null;
return (PresentableNodeDescriptor)userObject;
}
public TreePath getPath(PresentableNodeDescriptor node) {
final AbstractTreeBuilder builder = AbstractTreeBuilder.getBuilderFor(this);
final DefaultMutableTreeNode treeNode = builder.getNodeForElement(node);
return treeNode != null ? new TreePath(treeNode.getPath()) : new TreePath(node);
}
private static class MySelectionModel extends DefaultTreeSelectionModel {
private TreePath[] myHeldSelection;
@Override
protected void fireValueChanged(TreeSelectionEvent e) {
if (myHeldSelection == null) {
super.fireValueChanged(e);
}
}
public void holdSelection() {
myHeldSelection = getSelectionPaths();
}
public void unholdSelection() {
if (myHeldSelection != null) {
setSelectionPaths(myHeldSelection);
myHeldSelection = null;
}
}
}
private class MyMouseListener extends MouseAdapter {
@Override
public void mousePressed(MouseEvent mouseevent) {
if (!JBSwingUtilities.isLeftMouseButton(mouseevent) &&
(JBSwingUtilities.isRightMouseButton(mouseevent) || JBSwingUtilities.isMiddleMouseButton(mouseevent))) {
TreePath treepath = getPathForLocation(mouseevent.getX(), mouseevent.getY());
if (treepath != null) {
if (getSelectionModel().getSelectionMode() != TreeSelectionModel.SINGLE_TREE_SELECTION) {
TreePath[] selectionPaths = getSelectionModel().getSelectionPaths();
if (selectionPaths != null) {
for (TreePath selectionPath : selectionPaths) {
if (selectionPath != null && selectionPath.equals(treepath)) return;
}
}
}
getSelectionModel().setSelectionPath(treepath);
}
}
}
@Override
public void mouseReleased(MouseEvent e) {
if (e.getButton() == MouseEvent.BUTTON1 && e.getClickCount() == 2 && isLocationInExpandControl(getClosestPathForLocation(e.getX(), e.getY()), e.getX())) {
e.consume();
}
}
/**
* Returns true if <code>mouseX</code> falls
* in the area of row that is used to expand/collapse the node and
* the node at <code>row</code> does not represent a leaf.
*/
}
protected boolean isLocationInExpandControl(@Nullable TreePath path, int mouseX) {
if (path == null) return false;
TreeUI ui = getUI();
if (!(ui instanceof BasicTreeUI)) return false;
BasicTreeUI treeUI = (BasicTreeUI)ui;
if (!treeModel.isLeaf(path.getLastPathComponent())) {
Insets insets = Tree.this.getInsets();
int boxWidth = treeUI.getExpandedIcon() != null ? treeUI.getExpandedIcon().getIconWidth() : 8;
int boxLeftX = treeUI.getLeftChildIndent() + treeUI.getRightChildIndent() * (path.getPathCount() - 1);
if (getComponentOrientation().isLeftToRight()) {
boxLeftX = boxLeftX + insets.left - treeUI.getRightChildIndent() + 1;
}
else {
boxLeftX = getWidth() - boxLeftX - insets.right + treeUI.getRightChildIndent() - 1;
}
boxLeftX -= getComponentOrientation().isLeftToRight() ? (int)Math.ceil(boxWidth / 2.0) : (int)Math.floor(boxWidth / 2.0);
return mouseX >= boxLeftX && mouseX < boxLeftX + boxWidth;
}
return false;
}
/**
* This is patch for 4893787 SUN bug. The problem is that the BasicTreeUI.FocusHandler repaints
* only lead selection index on focus changes. It's a problem with multiple selected nodes.
*/
private class MyFocusListener extends FocusAdapter {
private void focusChanges() {
TreePath[] paths = getSelectionPaths();
if (paths != null) {
TreeUI ui = getUI();
for (int i = paths.length - 1; i >= 0; i--) {
Rectangle bounds = ui.getPathBounds(Tree.this, paths[i]);
if (bounds != null) {
repaint(bounds);
}
}
}
}
@Override
public void focusGained(FocusEvent e) {
focusChanges();
}
@Override
public void focusLost(FocusEvent e) {
focusChanges();
}
}
public final void setLineStyleAngled() {
UIUtil.setLineStyleAngled(this);
}
@NotNull
public <T> T[] getSelectedNodes(Class<T> nodeType, @Nullable NodeFilter<T> filter) {
TreePath[] paths = getSelectionPaths();
if (paths == null) return (T[])Array.newInstance(nodeType, 0);
ArrayList<T> nodes = new ArrayList<T>();
for (TreePath path : paths) {
Object last = path.getLastPathComponent();
if (nodeType.isAssignableFrom(last.getClass())) {
if (filter != null && !filter.accept((T)last)) continue;
nodes.add((T)last);
}
}
T[] result = (T[])Array.newInstance(nodeType, nodes.size());
nodes.toArray(result);
return result;
}
public interface NodeFilter<T> {
boolean accept(T node);
}
@Override
public void putInfo(@NotNull Map<String, String> info) {
final TreePath[] selection = getSelectionPaths();
if (selection == null) return;
final StringBuilder nodesText = new StringBuilder();
for (TreePath eachPath : selection) {
final Object eachNode = eachPath.getLastPathComponent();
final Component c =
getCellRenderer().getTreeCellRendererComponent(this, eachNode, false, false, false, getRowForPath(eachPath), false);
if (c != null) {
if (nodesText.length() > 0) {
nodesText.append(";");
}
nodesText.append(c);
}
}
if (nodesText.length() > 0) {
info.put("selectedNodes", nodesText.toString());
}
}
public void setHoldSize(boolean hold) {
if (hold && myHoldSize == null) {
myHoldSize = getPreferredSize();
} else if (!hold && myHoldSize != null) {
myHoldSize = null;
revalidate();
}
}
@Override
public Dimension getPreferredSize() {
Dimension size = super.getPreferredSize();
if (myHoldSize != null) {
size.width = Math.max(size.width, myHoldSize.width);
size.height = Math.max(size.height, myHoldSize.height);
}
return size;
}
public boolean isHorizontalAutoScrollingEnabled() {
return myHorizontalAutoScrolling;
}
public void setHorizontalAutoScrollingEnabled(boolean enabled) {
myHorizontalAutoScrolling = enabled;
}
/**
* Returns the deepest visible component
* that will be rendered at the specified location.
*
* @param x horizontal location in the tree
* @param y vertical location in the tree
* @return the deepest visible component of the renderer
*/
@Nullable
public Component getDeepestRendererComponentAt(int x, int y) {
int row = getRowForLocation(x, y);
if (row >= 0) {
TreeCellRenderer renderer = getCellRenderer();
if (renderer != null) {
TreePath path = getPathForRow(row);
Object node = path.getLastPathComponent();
Component component = renderer.getTreeCellRendererComponent(this, node,
isRowSelected(row),
isExpanded(row),
getModel().isLeaf(node),
row, true);
Rectangle bounds = getPathBounds(path);
if (bounds != null) {
component.setBounds(bounds); // initialize size to layout complex renderer
return SwingUtilities.getDeepestComponentAt(component, x - bounds.x, y - bounds.y);
}
}
}
return null;
}
}
| |
package org.schoellerfamily.gedobject.datamodel.factory;
import java.util.HashMap;
import java.util.Map;
/**
* Build the token table. Broken up into a bunch of steps to reduce complexity
* scores.
*
* @author Dick Schoeller
*/
public final class TokenTableInitializer {
/**
* The token table.
*/
private final Map<String, GedToken> tokens = new HashMap<>();
/**
* The list of short and long strings for standard attributes.
*/
private static final String[][] ATTRIBUTE_PAIRS = {
{"ATTRIBUTE", "Attribute"},
{"ABBR", "Abbreviation"},
{"ABSTRACTOR", "Abstractor"},
{"ABT", "About"},
{"ACTIVE", "Active"},
{"ADDR", "Address"},
{"ADOP", "Adopted"},
{"ADBAP", "Adult Baptism"},
{"AFN", "Ancestral File Number"},
{"AFT", "After"},
{"AGE", "Age"},
{"ALIA", "Alias"},
{"ANCE", "Generations of ancestors"},
{"ANUL", "Anullment"},
{"AUDIO", "Audio"},
{"AUTH", "Author"},
{"AUTHOR", "Author"},
{"BAPM", "Baptism"},
{"BARM", "Bar Mitzvah"},
{"BASM", "Bat Mitzvah"},
{"BEF", "Before"},
{"BEGIN", "Begin"},
{"BET", "Between"},
{"BIRT", "Birth"},
{"BOOK", "Book"},
{"BRIS", "Bris Milah"},
{"BURI", "Burial"},
{"BUSINESS", "Business"},
{"CANCELED", "Canceled"},
{"CARD", "Card"},
{"CAST", "Caste"},
{"CAUS", "Cause"},
{"CEME", "Cemetery"},
{"CENS", "Census"},
{"CENSUS", "Census"},
{"CHAN", "Changed"},
{"CHAR", "Character Set"},
{"CHR", "Christening"},
{"CHURCH", "Church"},
{"COMPILER", "Compiler"},
{"COMPLETED", "Completed"},
{"CONF", "Confirmation"},
{"COPY", "Copy"},
{"COURT", "Court"},
{"DEAT", "Death"},
{"DESC", "Generations of descendants"},
{"DEST", "Destination"},
{"DIV", "Divorce"},
{"DIVF", "Divorce Final"},
{"DIVORCED", "Divorced"},
{"DONE", "Done"},
{"EDITOR", "Editor"},
{"EDUC", "Education"},
{"ELECTRONIC", "Electronic"},
{"EMIG", "Emigration"},
{"ENGA", "Engaged"},
{"EVEN", "Event"},
{"EXTRACT", "Extract"},
{"FAMF", "Family file"},
{"FATH", "Father"},
{"FEMALE", "Female"},
{"FICHE", "Fiche"},
{"FILE", "File"},
{"FILM", "Film"},
{"FORM", "Format"},
{"FOUND", "Found"},
{"FROM", "From"},
{"GEDC", "GEDCOM"},
{"GODP", "Godparent"},
{"GOVERNMENT", "Government"},
{"GRAD", "Graduation"},
{"HEIR", "Heir"},
{"HISTORY", "History"},
{"IMMI", "Immigration"},
{"INFANT", "Infant"},
{"INFORMANT", "Informant"},
{"INTERVIEW", "Interview"},
{"INTERVIEWER", "Interviewer"},
{"ISSUE", "Issue"},
{"ITEM", "Item"},
{"JOURNAL", "Journal"},
{"LANG", "Language"},
{"LETTER", "Letter"},
{"LINE", "Line"},
{"LINEAGE", "Lineage"},
{"MAGAZINE", "Magazine"},
{"MALE", "Male"},
{"MANUSCRIPT", "Manuscript"},
{"MAP", "Map"},
{"MARB", "Marriage Bans"},
{"MARL", "Marriage License"},
{"MARR", "Marriage"},
{"MARRIED", "Married"},
{"MEDI", "Media"},
{"MEMBER", "Member"},
{"MILITARY", "Military"},
{"MOTH", "Mother"},
{"NAMR", "Name (religious)"},
{"NAMING", "Naming"},
{"NAMS", "NAMS"},
{"NATU", "Naturalized"},
{"NCHI", "Number of Children"},
{"NEWLINE", "Newline"},
{"NEWSPAPER", "Newspaper"},
{"NMR", "Not Married"},
{"NUMBER", "Number"},
{"OCCU", "Occupation"},
{"ORDI", "Ordinance process flag"},
{"ORDERED", "Ordered"},
{"ORGANIZATION", "Organization"},
{"ORIGINAL", "Original"},
{"OTHER", "Other"},
{"PAGE", "Page"},
{"PERIODICAL", "Periodical"},
{"PERSONAL", "Personal"},
{"PHON", "Phone Number"},
{"PHOTO", "Photograph"},
{"PHOTOCOPY", "Photocopy"},
{"PLANNED", "Planned"},
{"PLOT", "Plot"},
{"PROB", "Probate"},
{"PROVED", "Proved"},
{"PUBL", "Published"},
{"QUAY", "Surety"},
{"RECITED", "Recited"},
{"REFN", "Reference Number"},
{"RELI", "Religion"},
{"REPO", "Repository"},
{"RESI", "Residence"},
{"RESN", "Restriction"},
{"RETI", "Retired"},
{"ROLE", "Role"},
{"SEX", "Sex"},
{"SINGLE", "Single"},
{"SITE", "Site"},
{"SOUND", "Sound"},
{"SPOU", "Spouse"},
{"SSN", "Social Security Number"},
{"STILLBORN", "Stillborn"},
{"SUBMITTED", "Submitted"},
{"TEMP", "Temple code"},
{"TEXT", "Text"},
{"TIME", "Time"},
{"TITL", "Title"},
{"TO", "To"},
{"TOKEN", "Token"},
{"TOMBSTONE", "Tombstone"},
{"TRADITION", "Tradition"},
{"TRANSCRIBER", "Transcriber"},
{"TRANSCRIPT", "Transcript"},
{"TYPE", "Type"},
{"UNDERSCORE", "Underscore"},
{"UNICODE", "Unicode"},
{"UNPUBLISHED", "Unpublished"},
{"UNVEIL", "Headstone unveiled"},
{"VERS", "Version"},
{"VIDEO", "Video"},
{"VITAL", "Vital"},
{"WIDOWED", "Widowed"},
{"WILL", "Will"},
{"WITN", "Witness"},
{"XREF", "Cross Reference"},
};
/**
* Constructor.
*
* Populates the map of tokens.
*/
public TokenTableInitializer() {
initAttributeTokens();
initLevel0FactoryTokens();
initSpecialFactoryTokens();
}
/**
* Initialize a chunk of the tokens that go to the standard attribute
* factory.
*/
private void initAttributeTokens() {
for (final String[] pair : ATTRIBUTE_PAIRS) {
put(pair[0], pair[1], AbstractGedObjectFactory.ATTR_FACTORY);
}
}
/**
* Method to initialize the tokens for top level items.
*/
private void initLevel0FactoryTokens() {
put("ROOT", "Root", AbstractGedObjectFactory.ROOT_FACTORY);
put("HEAD", "Header", AbstractGedObjectFactory.HEAD_FACTORY);
put("FAM", "Family", AbstractGedObjectFactory.FAMILY_FACTORY);
put("INDI", "Person", AbstractGedObjectFactory.PERSON_FACTORY);
put("NOTE", "Note", AbstractGedObjectFactory.NOTE_FACTORY);
put("OBJE", "Multimedia", AbstractGedObjectFactory.MULTIMEDIA_FACTORY);
put("SOUR", "Source", AbstractGedObjectFactory.SOURCE_FACTORY);
put("SUBM", "Submitter", AbstractGedObjectFactory.SUBMITTER_FACTORY);
put("SUBN", "Submission", AbstractGedObjectFactory.SUBMISSION_FACTORY);
put("TRLR", "Trailer", AbstractGedObjectFactory.TRAILER_FACTORY);
}
/**
* Method to initialize the tokens that use something other than the
* attribute factory.
*/
private void initSpecialFactoryTokens() {
put("CHIL", "Child", AbstractGedObjectFactory.CHILD_FACTORY);
put("CONC", "Concatenate", AbstractGedObjectFactory.CONCAT_FACTORY);
put("CONT", "Continuation", AbstractGedObjectFactory.CONTIN_FACTORY);
put("DATE", "Date", AbstractGedObjectFactory.DATE_FACTORY);
put("FAMC", "Child of Family", AbstractGedObjectFactory.FAMC_FACTORY);
put("FAMS", "Spouse of Family", AbstractGedObjectFactory.FAMS_FACTORY);
put("HUSB", "Husband", AbstractGedObjectFactory.HUSBAND_FACTORY);
put("LINK", "Link", AbstractGedObjectFactory.LINK_FACTORY);
put("NAME", "Name", AbstractGedObjectFactory.NAME_FACTORY);
put("PLAC", "Place", AbstractGedObjectFactory.PLACE_FACTORY);
put("PLACE", "Place", AbstractGedObjectFactory.PLACE_FACTORY);
put("WIFE", "Wife", AbstractGedObjectFactory.WIFE_FACTORY);
}
/**
* @param shortstring the short string in gedcom
* @param fullstring the long string in the db
* @param factory the associated concrete factory
*/
private void put(final String shortstring, final String fullstring,
final AbstractGedObjectFactory factory) {
tokens.put(shortstring, new GedToken(fullstring, factory));
}
/**
* Return the token table.
*
* @return the token table.
*/
/* default */ Map<String, GedToken> getTokens() {
return tokens;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.