gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package worms.model;
import java.util.Collection;
import java.util.Random;
public class Facade implements IFacade
{
private Worm worm;
@Override
public boolean canTurn(Worm worm, double angle)
{
return worm.canTurn(angle);
}
@Override
public void turn(Worm worm, double angle)
{
worm.turn(angle);
}
@Override
public double getX(Worm worm)
{
return worm.getPosition().X;
}
@Override
public double getY(Worm worm)
{
return worm.getPosition().Y;
}
@Override
public double getOrientation(Worm worm)
{
return worm.getDirection();
}
@Override
public double getRadius(Worm worm)
{
return worm.getRadius();
}
@Override
public void setRadius(Worm worm, double newRadius)
{
worm.setRadius(newRadius);
}
@Override
public int getActionPoints(Worm worm)
{
return worm.getActionPoints();
}
@Override
public int getMaxActionPoints(Worm worm)
{
return worm.getMaxActionPoints();
}
@Override
public String getName(Worm worm)
{
return worm.getName();
}
@Override
public void rename(Worm worm, String newName)
{
worm.setName(newName);
}
@Override
public double getMass(Worm worm)
{
return worm.getMass();
}
@Override
public void addEmptyTeam(World world, String newName)
{
Team team = new Team(newName);
world.addTeam(team);
}
@Override
public void addNewFood(World world)
{
Food food = new Food();
world.spawnObject(food);
}
@Override
public void addNewWorm(World world)
{
Worm worm = new Worm();
world.spawnObject(worm);
}
@Override
public boolean canFall(Worm worm)
{
return false;
}
@Override
public boolean canMove(Worm worm)
{
//TODO
return true;
}
public Food createFood(World world, double x, double y)
{
Food food = new Food();
food.setPosition(new Location(x,y));
world.addObject(food);
return food;
}
@Override
public World createWorld(double width, double height, boolean[][] passableMap, Random random)
{
return new World(width, height, passableMap, random);
}
public Worm createWorm(World world, double x, double y, double direction, double radius, String name)
{
Worm worm = new Worm(x, y, direction, radius, name);
world.addObject(worm);
return worm;
}
@Override
public void fall(Worm worm)
{
worm.fall();
}
@Override
public Projectile getActiveProjectile(World world)
{
return world.getActiveProjectile();
}
@Override
public Worm getCurrentWorm(World world)
{
return world.getCurrentWorm();
}
@Override
public Collection<Food> getFood(World world)
{
return world.getObjectsOfType(Food.class);
}
@Override
public double getHitPoints(Worm worm)
{
return worm.getHP();
}
@Override
public double[] getJumpStep(Projectile projectile, double time)
{
Location position = projectile.jumpStep(time);
double[] Result = {position.X, position.Y};
return Result;
}
@Override
public double[] getJumpStep(Worm worm, double time)
{
Location position = worm.jumpStep(time);
double[] Result = {position.X, position.Y};
return Result;
}
@Override
public double getJumpTime(Projectile projectile, double timeStep)
{
return projectile.jumpTime();
}
@Override
public double getJumpTime(Worm worm, double timeStep)
{
return worm.jumpTime();
}
@Override
public double getMaxHitPoints(Worm worm)
{
return worm.getMaxHP();
}
@Override
public double getMinimalRadius(Worm worm)
{
return worm.getMinRadius();
}
@Override
public double getRadius(Food food)
{
return food.getRadius();
}
@Override
public double getRadius(Projectile projectile)
{
return projectile.getRadius();
}
@Override
public String getSelectedWeapon(Worm worm)
{
// TODO
return "Bazooka";
}
@Override
public String getTeamName(Worm worm)
{
if(worm.getTeam() != null)
{
return worm.getTeam().getName();
}
else return null;
}
@Override
public Team[] getWinner(World world)
{
if (world.getTeams().length == 1)
return world.getTeams();
else
return null;
}
@Override
public Collection<Worm> getWorms(World world)
{
return world.getObjectsOfType(Worm.class);
}
@Override
public double getX(Food food)
{
return food.getPosition().X;
}
@Override
public double getX(Projectile projectile)
{
return projectile.getPosition().X;
}
@Override
public double getY(Food food)
{
return food.getPosition().Y;
}
@Override
public double getY(Projectile projectile)
{
return projectile.getPosition().Y;
}
@Override
public boolean isActive(Food food)
{
return !food.isDestroyed();
}
@Override
public boolean isActive(Projectile projectile)
{
return projectile.isDestroyed();
}
@Override
public boolean isAdjacent(World world, double x, double y, double radius)
{
return world.isOccupiable(new Location(x, y), radius);
}
@Override
public boolean isAlive(Worm worm)
{
return worm.isDestroyed();
}
@Override
public boolean isGameFinished(World world)
{
return world.isGameFinished();
}
@Override
public boolean isImpassable(World world, double x, double y, double radius)
{
return world.isImpassable(new Location(x, y), radius);
}
@Override
public void jump(Projectile projectile, double timeStep) {
projectile.jump();
}
@Override
public void jump(Worm worm, double timeStep)
{
worm.jump();
}
@Override
public void move(Worm worm)
{
// TODO Auto-generated method stub
worm.move(1);
}
@Override
public void selectNextWeapon(Worm worm)
{
// TODO Auto-generated method stub
}
@Override
public void shoot(Worm worm, int yield)
{
// TODO Auto-generated method stub
}
@Override
public void startGame(World world)
{
world.start();
}
@Override
public void startNextTurn(World world)
{
worm.setHP(worm.getHP() + 10);
worm.setActionPoints(worm.getMaxActionPoints());
world.selectNextWorm();
}
@Override
public Worm createWorm(double x, double y, double direction, double radius, String name)
{
// TODO Auto-generated method stub
return null;
}
@Override
public boolean canMove(Worm worm, int nbSteps)
{
// TODO Auto-generated method stub
return false;
}
@Override
public void move(Worm worm, int nbSteps)
{
// TODO Auto-generated method stub
}
@Override
public void jump(Worm worm)
{
// TODO Auto-generated method stub
}
@Override
public double getJumpTime(Worm worm)
{
// TODO Auto-generated method stub
return 0;
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Misc;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Abilities.StdAbility;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2005-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Addictions extends StdAbility
{
@Override
public String ID()
{
return "Addictions";
}
private final static String localizedName = CMLib.lang().L("Addictions");
@Override
public String name()
{
return localizedName;
}
private long lastFix = System.currentTimeMillis();
@Override
public String displayText()
{
return craving() ? "(Addiction to " + text() + ")" : "";
}
@Override
protected int canAffectCode()
{
return CAN_MOBS;
}
@Override
protected int canTargetCode()
{
return 0;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_OK_SELF;
}
@Override
public int classificationCode()
{
return Ability.ACODE_PROPERTY;
}
@Override
public boolean isAutoInvoked()
{
return true;
}
@Override
public boolean canBeUninvoked()
{
return false;
}
private Item puffCredit=null;
private final static long CRAVE_TIME=TimeManager.MILI_HOUR;
private final static long WITHDRAW_TIME=TimeManager.MILI_DAY;
private boolean craving()
{
return (System.currentTimeMillis() - lastFix) > CRAVE_TIME;
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if(!super.tick(ticking,tickID))
return false;
if((craving())
&&(CMLib.dice().rollPercentage()<=((System.currentTimeMillis()-lastFix)/TimeManager.MILI_HOUR))
&&(ticking instanceof MOB))
{
if((System.currentTimeMillis()-lastFix)>WITHDRAW_TIME)
{
((MOB)ticking).tell(L("You've managed to kick your addiction."));
canBeUninvoked=true;
unInvoke();
((MOB)ticking).delEffect(this);
return false;
}
if((puffCredit!=null)
&&(puffCredit.amDestroyed()
||puffCredit.amWearingAt(Wearable.IN_INVENTORY)
||puffCredit.owner()!=(MOB)affected))
puffCredit=null;
switch(CMLib.dice().roll(1,7,0))
{
case 1:
((MOB) ticking).tell(L("Man, you could sure use some @x1.", text()));
break;
case 2:
((MOB) ticking).tell(L("Wouldn't some @x1 be great right about now?", text()));
break;
case 3:
((MOB) ticking).tell(L("You are seriously craving @x1.", text()));
break;
case 4:
((MOB) ticking).tell(L("There's got to be some @x1 around here somewhere.", text()));
break;
case 5:
((MOB) ticking).tell(L("You REALLY want some @x1.", text()));
break;
case 6:
((MOB) ticking).tell(L("You NEED some @x1, NOW!", text()));
break;
case 7:
((MOB) ticking).tell(L("Some @x1 would be lovely.", text()));
break;
}
}
return true;
}
@Override
public boolean okMessage(Environmental host, CMMsg msg)
{
if(affected instanceof MOB)
{
if((msg.source()==affected)
&&(msg.targetMinor()==CMMsg.TYP_WEAR)
&&(msg.target() instanceof Light)
&&(msg.target() instanceof Container)
&&(CMath.bset(((Item)msg.target()).rawProperLocationBitmap(),Wearable.WORN_MOUTH)))
{
final List<Item> contents=((Container)msg.target()).getContents();
if(contents.size()>0)
{
final Environmental content=contents.get(0);
if(CMLib.english().containsString(content.Name(),text()))
puffCredit=(Item)msg.target();
}
}
}
return true;
}
@Override
public void executeMsg(final Environmental myHost, final CMMsg msg)
{
if(affected instanceof MOB)
{
if(msg.source()==affected)
{
if(((msg.targetMinor()==CMMsg.TYP_EAT)||(msg.targetMinor()==CMMsg.TYP_DRINK))
&&((msg.target() instanceof Food)||(msg.target() instanceof Drink))
&&(msg.target() instanceof Item)
&&(CMLib.english().containsString(msg.target().Name(),text())))
lastFix=System.currentTimeMillis();
if((msg.amISource((MOB)affected))
&&(msg.targetMinor()==CMMsg.TYP_HANDS)
&&(msg.target() instanceof Light)
&&(msg.tool() instanceof Light)
&&(msg.target()==msg.tool())
&&(((Light)msg.target()).amWearingAt(Wearable.WORN_MOUTH))
&&(((Light)msg.target()).isLit())
&&((puffCredit!=null)||CMLib.english().containsString(msg.target().Name(),text())))
lastFix=System.currentTimeMillis();
}
}
super.executeMsg(myHost,msg);
}
@Override
public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel)
{
final Physical target=givenTarget;
if(target==null)
return false;
if(target.fetchEffect(ID())!=null)
return false;
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
String addiction=target.Name().toUpperCase();
if(addiction.toUpperCase().startsWith("A POUND OF "))
addiction=addiction.substring(11);
if(addiction.toUpperCase().startsWith("A "))
addiction=addiction.substring(2);
if(addiction.toUpperCase().startsWith("AN "))
addiction=addiction.substring(3);
if(addiction.toUpperCase().startsWith("SOME "))
addiction=addiction.substring(5);
final CMMsg msg=CMClass.getMsg(mob,target,this,CMMsg.MSG_OK_VISUAL,"");
if(mob.location()!=null)
{
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
final Ability A=(Ability)copyOf();
A.setMiscText(addiction.trim());
mob.addNonUninvokableEffect(A);
}
}
else
{
final Ability A=(Ability)copyOf();
A.setMiscText(addiction.trim());
mob.addNonUninvokableEffect(A);
}
}
return success;
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simpleemail;
import javax.annotation.Generated;
import com.amazonaws.services.simpleemail.model.*;
import com.amazonaws.*;
import com.amazonaws.services.simpleemail.waiters.AmazonSimpleEmailServiceWaiters;
/**
* Abstract implementation of {@code AmazonSimpleEmailService}. Convenient method forms pass through to the
* corresponding overload that takes a request object, which throws an {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAmazonSimpleEmailService implements AmazonSimpleEmailService {
protected AbstractAmazonSimpleEmailService() {
}
@Override
public void setEndpoint(String endpoint) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void setRegion(com.amazonaws.regions.Region region) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CloneReceiptRuleSetResult cloneReceiptRuleSet(CloneReceiptRuleSetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateConfigurationSetResult createConfigurationSet(CreateConfigurationSetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateConfigurationSetEventDestinationResult createConfigurationSetEventDestination(CreateConfigurationSetEventDestinationRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateReceiptFilterResult createReceiptFilter(CreateReceiptFilterRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateReceiptRuleResult createReceiptRule(CreateReceiptRuleRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateReceiptRuleSetResult createReceiptRuleSet(CreateReceiptRuleSetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteConfigurationSetResult deleteConfigurationSet(DeleteConfigurationSetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteConfigurationSetEventDestinationResult deleteConfigurationSetEventDestination(DeleteConfigurationSetEventDestinationRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteIdentityResult deleteIdentity(DeleteIdentityRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteIdentityPolicyResult deleteIdentityPolicy(DeleteIdentityPolicyRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteReceiptFilterResult deleteReceiptFilter(DeleteReceiptFilterRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteReceiptRuleResult deleteReceiptRule(DeleteReceiptRuleRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteReceiptRuleSetResult deleteReceiptRuleSet(DeleteReceiptRuleSetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteVerifiedEmailAddressResult deleteVerifiedEmailAddress(DeleteVerifiedEmailAddressRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeActiveReceiptRuleSetResult describeActiveReceiptRuleSet(DescribeActiveReceiptRuleSetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeConfigurationSetResult describeConfigurationSet(DescribeConfigurationSetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeReceiptRuleResult describeReceiptRule(DescribeReceiptRuleRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeReceiptRuleSetResult describeReceiptRuleSet(DescribeReceiptRuleSetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetIdentityDkimAttributesResult getIdentityDkimAttributes(GetIdentityDkimAttributesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetIdentityMailFromDomainAttributesResult getIdentityMailFromDomainAttributes(GetIdentityMailFromDomainAttributesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetIdentityNotificationAttributesResult getIdentityNotificationAttributes(GetIdentityNotificationAttributesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetIdentityPoliciesResult getIdentityPolicies(GetIdentityPoliciesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetIdentityVerificationAttributesResult getIdentityVerificationAttributes(GetIdentityVerificationAttributesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetSendQuotaResult getSendQuota(GetSendQuotaRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetSendQuotaResult getSendQuota() {
return getSendQuota(new GetSendQuotaRequest());
}
@Override
public GetSendStatisticsResult getSendStatistics(GetSendStatisticsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetSendStatisticsResult getSendStatistics() {
return getSendStatistics(new GetSendStatisticsRequest());
}
@Override
public ListConfigurationSetsResult listConfigurationSets(ListConfigurationSetsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListIdentitiesResult listIdentities(ListIdentitiesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListIdentitiesResult listIdentities() {
return listIdentities(new ListIdentitiesRequest());
}
@Override
public ListIdentityPoliciesResult listIdentityPolicies(ListIdentityPoliciesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListReceiptFiltersResult listReceiptFilters(ListReceiptFiltersRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListReceiptRuleSetsResult listReceiptRuleSets(ListReceiptRuleSetsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListVerifiedEmailAddressesResult listVerifiedEmailAddresses(ListVerifiedEmailAddressesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListVerifiedEmailAddressesResult listVerifiedEmailAddresses() {
return listVerifiedEmailAddresses(new ListVerifiedEmailAddressesRequest());
}
@Override
public PutIdentityPolicyResult putIdentityPolicy(PutIdentityPolicyRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ReorderReceiptRuleSetResult reorderReceiptRuleSet(ReorderReceiptRuleSetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SendBounceResult sendBounce(SendBounceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SendEmailResult sendEmail(SendEmailRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SendRawEmailResult sendRawEmail(SendRawEmailRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SetActiveReceiptRuleSetResult setActiveReceiptRuleSet(SetActiveReceiptRuleSetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SetIdentityDkimEnabledResult setIdentityDkimEnabled(SetIdentityDkimEnabledRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SetIdentityFeedbackForwardingEnabledResult setIdentityFeedbackForwardingEnabled(SetIdentityFeedbackForwardingEnabledRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SetIdentityHeadersInNotificationsEnabledResult setIdentityHeadersInNotificationsEnabled(SetIdentityHeadersInNotificationsEnabledRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SetIdentityMailFromDomainResult setIdentityMailFromDomain(SetIdentityMailFromDomainRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SetIdentityNotificationTopicResult setIdentityNotificationTopic(SetIdentityNotificationTopicRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SetReceiptRulePositionResult setReceiptRulePosition(SetReceiptRulePositionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateConfigurationSetEventDestinationResult updateConfigurationSetEventDestination(UpdateConfigurationSetEventDestinationRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateReceiptRuleResult updateReceiptRule(UpdateReceiptRuleRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public VerifyDomainDkimResult verifyDomainDkim(VerifyDomainDkimRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public VerifyDomainIdentityResult verifyDomainIdentity(VerifyDomainIdentityRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public VerifyEmailAddressResult verifyEmailAddress(VerifyEmailAddressRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public VerifyEmailIdentityResult verifyEmailIdentity(VerifyEmailIdentityRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void shutdown() {
throw new java.lang.UnsupportedOperationException();
}
@Override
public com.amazonaws.ResponseMetadata getCachedResponseMetadata(com.amazonaws.AmazonWebServiceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AmazonSimpleEmailServiceWaiters waiters() {
throw new java.lang.UnsupportedOperationException();
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.fuse;
import static jnr.constants.platform.OpenFlags.O_RDONLY;
import static jnr.constants.platform.OpenFlags.O_WRONLY;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.atMost;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import alluxio.AlluxioURI;
import alluxio.ConfigurationRule;
import alluxio.ConfigurationTestUtils;
import alluxio.Constants;
import alluxio.client.block.BlockMasterClient;
import alluxio.client.file.FileInStream;
import alluxio.client.file.FileOutStream;
import alluxio.client.file.FileSystem;
import alluxio.client.file.URIStatus;
import alluxio.conf.InstancedConfiguration;
import alluxio.conf.PropertyKey;
import alluxio.exception.FileAlreadyExistsException;
import alluxio.exception.FileDoesNotExistException;
import alluxio.exception.FileIncompleteException;
import alluxio.grpc.CreateDirectoryPOptions;
import alluxio.grpc.CreateFilePOptions;
import alluxio.grpc.SetAttributePOptions;
import alluxio.jnifuse.ErrorCodes;
import alluxio.jnifuse.struct.FileStat;
import alluxio.jnifuse.struct.FuseFileInfo;
import alluxio.jnifuse.struct.Statvfs;
import alluxio.security.authorization.Mode;
import alluxio.wire.BlockMasterInfo;
import alluxio.wire.FileInfo;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableMap;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.stubbing.Answer;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.List;
/**
* Isolation tests for {@link AlluxioJniFuseFileSystem}.
*/
@Ignore
@RunWith(PowerMockRunner.class)
@PrepareForTest({BlockMasterClient.Factory.class})
public class AlluxioJniFuseFileSystemTest {
private static final String TEST_ROOT_PATH = "/t/root";
private static final AlluxioURI BASE_EXPECTED_URI = new AlluxioURI(TEST_ROOT_PATH);
private AlluxioJniFuseFileSystem mFuseFs;
private FileSystem mFileSystem;
private FuseFileInfo mFileInfo;
private InstancedConfiguration mConf = ConfigurationTestUtils.defaults();
@Rule
public ConfigurationRule mConfiguration =
new ConfigurationRule(ImmutableMap.of(PropertyKey.FUSE_CACHED_PATHS_MAX, "0",
PropertyKey.FUSE_USER_GROUP_TRANSLATION_ENABLED, "true"), mConf);
@Before
public void before() throws Exception {
final List<String> empty = Collections.emptyList();
FuseMountOptions opts =
new FuseMountOptions("/doesnt/matter", TEST_ROOT_PATH, false, empty);
mFileSystem = mock(FileSystem.class);
try {
mFuseFs = new AlluxioJniFuseFileSystem(mFileSystem, opts, mConf);
} catch (UnsatisfiedLinkError e) {
// stop test and ignore if FuseFileSystem fails to create due to missing libfuse library
Assume.assumeNoException(e);
}
mFileInfo = allocateNativeFileInfo();
}
@Test
public void chmod() throws Exception {
long mode = 123;
mFuseFs.chmod("/foo/bar", mode);
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
SetAttributePOptions options =
SetAttributePOptions.newBuilder().setMode(new Mode((short) mode).toProto()).build();
verify(mFileSystem).setAttribute(expectedPath, options);
}
@Test
public void chown() throws Exception {
long uid = AlluxioFuseUtils.getUid(System.getProperty("user.name"));
long gid = AlluxioFuseUtils.getGid(System.getProperty("user.name"));
mFuseFs.chown("/foo/bar", uid, gid);
String userName = System.getProperty("user.name");
String groupName = AlluxioFuseUtils.getGroupName(gid);
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
SetAttributePOptions options =
SetAttributePOptions.newBuilder().setGroup(groupName).setOwner(userName).build();
verify(mFileSystem).setAttribute(expectedPath, options);
}
@Test
public void chownWithoutValidGid() throws Exception {
long uid = AlluxioFuseUtils.getUid(System.getProperty("user.name"));
long gid = AlluxioJniFuseFileSystem.ID_NOT_SET_VALUE;
mFuseFs.chown("/foo/bar", uid, gid);
String userName = System.getProperty("user.name");
String groupName = AlluxioFuseUtils.getGroupName(userName);
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
SetAttributePOptions options =
SetAttributePOptions.newBuilder().setGroup(groupName).setOwner(userName).build();
verify(mFileSystem).setAttribute(expectedPath, options);
gid = AlluxioJniFuseFileSystem.ID_NOT_SET_VALUE_UNSIGNED;
mFuseFs.chown("/foo/bar", uid, gid);
verify(mFileSystem, times(2)).setAttribute(expectedPath, options);
}
@Test
public void chownWithoutValidUid() throws Exception {
String userName = System.getProperty("user.name");
long uid = AlluxioJniFuseFileSystem.ID_NOT_SET_VALUE;
long gid = AlluxioFuseUtils.getGid(userName);
mFuseFs.chown("/foo/bar", uid, gid);
String groupName = AlluxioFuseUtils.getGroupName(userName);
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
SetAttributePOptions options = SetAttributePOptions.newBuilder().setGroup(groupName).build();
verify(mFileSystem).setAttribute(expectedPath, options);
uid = AlluxioJniFuseFileSystem.ID_NOT_SET_VALUE_UNSIGNED;
mFuseFs.chown("/foo/bar", uid, gid);
verify(mFileSystem, times(2)).setAttribute(expectedPath, options);
}
@Test
public void chownWithoutValidUidAndGid() throws Exception {
long uid = AlluxioJniFuseFileSystem.ID_NOT_SET_VALUE;
long gid = AlluxioJniFuseFileSystem.ID_NOT_SET_VALUE;
mFuseFs.chown("/foo/bar", uid, gid);
verify(mFileSystem, never()).setAttribute(any());
uid = AlluxioJniFuseFileSystem.ID_NOT_SET_VALUE_UNSIGNED;
gid = AlluxioJniFuseFileSystem.ID_NOT_SET_VALUE_UNSIGNED;
mFuseFs.chown("/foo/bar", uid, gid);
verify(mFileSystem, never()).setAttribute(any());
}
@Test
public void create() throws Exception {
mFileInfo.flags.set(O_WRONLY.intValue());
mFuseFs.create("/foo/bar", 0, mFileInfo);
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
verify(mFileSystem).createFile(expectedPath, CreateFilePOptions.newBuilder()
.setMode(new alluxio.security.authorization.Mode((short) 0).toProto())
.build());
}
@Test
public void createWithLengthLimit() throws Exception {
String c256 = String.join("", Collections.nCopies(16, "0123456789ABCDEF"));
mFileInfo.flags.set(O_WRONLY.intValue());
assertEquals(-ErrorCodes.ENAMETOOLONG(),
mFuseFs.create("/foo/" + c256, 0, mFileInfo));
}
@Test
public void flush() throws Exception {
FileOutStream fos = mock(FileOutStream.class);
AlluxioURI anyURI = any();
CreateFilePOptions options = any();
when(mFileSystem.createFile(anyURI, options)).thenReturn(fos);
// open a file
mFileInfo.flags.set(O_WRONLY.intValue());
mFuseFs.create("/foo/bar", 0, mFileInfo);
// then call flush into it
mFuseFs.flush("/foo/bar", mFileInfo);
verify(fos).flush();
}
@Test
public void getattr() throws Exception {
// set up status
FileInfo info = new FileInfo();
info.setLength(4 * Constants.KB + 1);
info.setLastAccessTimeMs(1000);
info.setLastModificationTimeMs(1000);
String userName = System.getProperty("user.name");
info.setOwner(userName);
info.setGroup(AlluxioFuseUtils.getGroupName(userName));
info.setFolder(true);
info.setMode(123);
info.setCompleted(true);
URIStatus status = new URIStatus(info);
// mock fs
when(mFileSystem.getStatus(any(AlluxioURI.class))).thenReturn(status);
FileStat stat = FileStat.of(ByteBuffer.allocateDirect(256));
assertEquals(0, mFuseFs.getattr("/foo", stat));
assertEquals(status.getLength(), stat.st_size.longValue());
assertEquals(9, stat.st_blocks.intValue());
assertEquals(status.getLastAccessTimeMs() / 1000, stat.st_atim.tv_sec.get());
assertEquals((status.getLastAccessTimeMs() % 1000) * 1000,
stat.st_atim.tv_nsec.longValue());
assertEquals(status.getLastModificationTimeMs() / 1000, stat.st_ctim.tv_sec.get());
assertEquals((status.getLastModificationTimeMs() % 1000) * 1000,
stat.st_ctim.tv_nsec.longValue());
assertEquals(status.getLastModificationTimeMs() / 1000, stat.st_mtim.tv_sec.get());
assertEquals((status.getLastModificationTimeMs() % 1000) * 1000,
stat.st_mtim.tv_nsec.longValue());
assertEquals(AlluxioFuseUtils.getUid(System.getProperty("user.name")), stat.st_uid.get());
assertEquals(AlluxioFuseUtils.getGid(System.getProperty("user.name")), stat.st_gid.get());
assertEquals(123 | FileStat.S_IFDIR, stat.st_mode.intValue());
}
@Test
public void getattrWithDelay() throws Exception {
String path = "/foo/bar";
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
// set up status
FileInfo info = new FileInfo();
info.setLength(0);
info.setCompleted(false);
URIStatus status = new URIStatus(info);
// mock fs
when(mFileSystem.getStatus(any(AlluxioURI.class))).thenReturn(status);
FileStat stat = FileStat.of(ByteBuffer.allocateDirect(256));
// Use another thread to open file so that
// we could change the file status when opening it
Thread t = new Thread(() -> mFuseFs.getattr(path, stat));
t.start();
Thread.sleep(1000);
// If the file is not being written and is not completed,
// we will wait for the file to complete
verify(mFileSystem, atLeast(10)).getStatus(expectedPath);
assertEquals(0, stat.st_size.longValue());
info.setCompleted(true);
info.setLength(1000);
t.join();
assertEquals(1000, stat.st_size.longValue());
}
@Test
public void getattrWhenWriting() throws Exception {
String path = "/foo/bar";
AlluxioURI expectedPath = BASE_EXPECTED_URI.join(path);
FileOutStream fos = mock(FileOutStream.class);
when(mFileSystem.createFile(expectedPath)).thenReturn(fos);
mFuseFs.create(path, 0, mFileInfo);
// Prepare file status
FileInfo info = new FileInfo();
info.setLength(0);
info.setCompleted(false);
URIStatus status = new URIStatus(info);
when(mFileSystem.exists(any(AlluxioURI.class))).thenReturn(true);
when(mFileSystem.getStatus(any(AlluxioURI.class))).thenReturn(status);
FileStat stat = FileStat.of(ByteBuffer.allocateDirect(256));
// getattr() will not be blocked when writing
mFuseFs.getattr(path, stat);
// If getattr() is blocking, it will continuously get status of the file
verify(mFileSystem, atMost(300)).getStatus(expectedPath);
assertEquals(0, stat.st_size.longValue());
mFuseFs.release(path, mFileInfo);
// getattr() will be blocked waiting for the file to be completed
// If release() is called (returned) but does not finished
Thread t = new Thread(() -> mFuseFs.getattr(path, stat));
t.start();
Thread.sleep(1000);
verify(mFileSystem, atLeast(10)).getStatus(expectedPath);
assertEquals(0, stat.st_size.longValue());
info.setCompleted(true);
info.setLength(1000);
t.join();
// getattr() completed and set the file size
assertEquals(1000, stat.st_size.longValue());
}
@Test
public void mkDir() throws Exception {
long mode = 0755L;
mFuseFs.mkdir("/foo/bar", mode);
verify(mFileSystem).createDirectory(BASE_EXPECTED_URI.join("/foo/bar"),
CreateDirectoryPOptions.newBuilder()
.setMode(new alluxio.security.authorization.Mode((short) mode).toProto())
.build());
}
@Test
public void mkDirWithLengthLimit() throws Exception {
long mode = 0755L;
String c256 = String.join("", Collections.nCopies(16, "0123456789ABCDEF"));
assertEquals(-ErrorCodes.ENAMETOOLONG(),
mFuseFs.mkdir("/foo/" + c256, mode));
}
@Test
public void openWithoutDelay() throws Exception {
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
setUpOpenMock(expectedPath);
FileInStream is = mock(FileInStream.class);
when(mFileSystem.openFile(expectedPath)).thenReturn(is);
mFuseFs.open("/foo/bar", mFileInfo);
verify(mFileSystem).openFile(expectedPath);
}
@Test
public void incompleteFileCannotOpen() throws Exception {
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
FileInfo fi = setUpOpenMock(expectedPath);
fi.setCompleted(false);
when(mFileSystem.openFile(expectedPath)).thenThrow(new FileIncompleteException(expectedPath));
assertEquals(-ErrorCodes.EFAULT(), mFuseFs.open("/foo/bar", mFileInfo));
}
@Test
public void openWithDelay() throws Exception {
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
FileInfo fi = setUpOpenMock(expectedPath);
fi.setCompleted(false);
when(mFileSystem.openFile(expectedPath)).thenThrow(new FileIncompleteException(expectedPath));
// Use another thread to open file so that
// we could change the file status when opening it
Thread t = new Thread(() -> mFuseFs.open("/foo/bar", mFileInfo));
t.start();
Thread.sleep(1000);
// If the file exists but is not completed, we will wait for the file to complete
verify(mFileSystem, atLeast(10)).getStatus(expectedPath);
fi.setCompleted(true);
t.join();
verify(mFileSystem, times(2)).openFile(expectedPath);
}
@Test
public void read() throws Exception {
// mocks set-up
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
setUpOpenMock(expectedPath);
FileInStream fakeInStream = mock(FileInStream.class);
when(fakeInStream.read(any(byte[].class),
anyInt(), anyInt())).then((Answer<Integer>) invocationOnMock -> {
byte[] myDest = (byte[]) invocationOnMock.getArguments()[0];
for (byte i = 0; i < 4; i++) {
myDest[i] = i;
}
return 4;
});
when(mFileSystem.openFile(expectedPath)).thenReturn(fakeInStream);
mFileInfo.flags.set(O_RDONLY.intValue());
// prepare something to read to it
ByteBuffer ptr = ByteBuffer.allocateDirect(4);
assertEquals(4, ptr.limit());
// actual test
mFuseFs.open("/foo/bar", mFileInfo);
mFuseFs.read("/foo/bar", ptr, 4, 0, mFileInfo);
ptr.flip();
final byte[] dst = new byte[4];
ptr.get(dst, 0, 4);
final byte[] expected = new byte[] {0, 1, 2, 3};
assertArrayEquals("Source and dst data should be equal", expected, dst);
}
@Test
public void rename() throws Exception {
AlluxioURI oldPath = BASE_EXPECTED_URI.join("/old");
AlluxioURI newPath = BASE_EXPECTED_URI.join("/new");
doNothing().when(mFileSystem).rename(oldPath, newPath);
mFuseFs.rename("/old", "/new");
verify(mFileSystem).rename(oldPath, newPath);
}
@Test
public void renameOldNotExist() throws Exception {
AlluxioURI oldPath = BASE_EXPECTED_URI.join("/old");
AlluxioURI newPath = BASE_EXPECTED_URI.join("/new");
doThrow(new FileDoesNotExistException("File /old does not exist"))
.when(mFileSystem).rename(oldPath, newPath);
assertEquals(-ErrorCodes.ENOENT(), mFuseFs.rename("/old", "/new"));
}
@Test
public void renameNewExist() throws Exception {
AlluxioURI oldPath = BASE_EXPECTED_URI.join("/old");
AlluxioURI newPath = BASE_EXPECTED_URI.join("/new");
doThrow(new FileAlreadyExistsException("File /new already exists"))
.when(mFileSystem).rename(oldPath, newPath);
assertEquals(-ErrorCodes.EEXIST(), mFuseFs.rename("/old", "/new"));
}
@Test
public void renameWithLengthLimit() throws Exception {
String c256 = String.join("", Collections.nCopies(16, "0123456789ABCDEF"));
AlluxioURI oldPath = BASE_EXPECTED_URI.join("/old");
AlluxioURI newPath = BASE_EXPECTED_URI.join("/" + c256);
doNothing().when(mFileSystem).rename(oldPath, newPath);
assertEquals(-ErrorCodes.ENAMETOOLONG(),
mFuseFs.rename("/old", "/" + c256));
}
@Test
public void rmdir() throws Exception {
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
doNothing().when(mFileSystem).delete(expectedPath);
mFuseFs.rmdir("/foo/bar");
verify(mFileSystem).delete(expectedPath);
}
@Test
public void write() throws Exception {
FileOutStream fos = mock(FileOutStream.class);
AlluxioURI anyURI = any();
CreateFilePOptions options = any();
when(mFileSystem.createFile(anyURI, options)).thenReturn(fos);
// open a file
mFileInfo.flags.set(O_WRONLY.intValue());
mFuseFs.create("/foo/bar", 0, mFileInfo);
// prepare something to write into it
ByteBuffer ptr = ByteBuffer.allocateDirect(4);
byte[] expected = {42, -128, 1, 3};
ptr.put(expected, 0, 4);
ptr.flip();
mFuseFs.write("/foo/bar", ptr, 4, 0, mFileInfo);
verify(fos).write(expected);
// the second write is no-op because the writes must be sequential and overwriting is supported
mFuseFs.write("/foo/bar", ptr, 4, 0, mFileInfo);
verify(fos, times(1)).write(expected);
}
@Test
public void unlink() throws Exception {
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
doNothing().when(mFileSystem).delete(expectedPath);
mFuseFs.unlink("/foo/bar");
verify(mFileSystem).delete(expectedPath);
}
@Test
public void pathTranslation() throws Exception {
final LoadingCache<String, AlluxioURI> resolver = mFuseFs.getPathResolverCache();
AlluxioURI expected = new AlluxioURI(TEST_ROOT_PATH);
AlluxioURI actual = resolver.apply("/");
assertEquals("/ should resolve to " + expected, expected, actual);
expected = new AlluxioURI(TEST_ROOT_PATH + "/home/foo");
actual = resolver.apply("/home/foo");
assertEquals("/home/foo should resolve to " + expected, expected, actual);
}
// Allocate native memory for a FuseFileInfo data struct and return its pointer
private FuseFileInfo allocateNativeFileInfo() {
ByteBuffer buffer = ByteBuffer.allocateDirect(36);
buffer.clear();
return FuseFileInfo.of(buffer);
}
/**
* Sets up mock for open() operation.
*
* @param uri the path to run operations on
* @return the file information
*/
private FileInfo setUpOpenMock(AlluxioURI uri) throws Exception {
FileInfo fi = new FileInfo();
fi.setCompleted(true);
fi.setFolder(false);
URIStatus status = new URIStatus(fi);
when(mFileSystem.getStatus(uri)).thenReturn(status);
return fi;
}
@Test
public void statfs() throws Exception {
ByteBuffer buffer = ByteBuffer.allocateDirect(4 * Constants.KB);
buffer.clear();
Statvfs stbuf = Statvfs.of(buffer);
int blockSize = 4 * Constants.KB;
int totalBlocks = 4;
int freeBlocks = 3;
BlockMasterClient blockMasterClient = PowerMockito.mock(BlockMasterClient.class);
PowerMockito.mockStatic(BlockMasterClient.Factory.class);
when(BlockMasterClient.Factory.create(any())).thenReturn(blockMasterClient);
BlockMasterInfo blockMasterInfo = new BlockMasterInfo();
blockMasterInfo.setCapacityBytes(totalBlocks * blockSize);
blockMasterInfo.setFreeBytes(freeBlocks * blockSize);
when(blockMasterClient.getBlockMasterInfo(any())).thenReturn(blockMasterInfo);
assertEquals(0, mFuseFs.statfs("/", stbuf));
assertEquals(blockSize, stbuf.f_bsize.intValue());
assertEquals(blockSize, stbuf.f_frsize.intValue());
assertEquals(totalBlocks, stbuf.f_blocks.longValue());
assertEquals(freeBlocks, stbuf.f_bfree.longValue());
assertEquals(freeBlocks, stbuf.f_bavail.longValue());
assertEquals(AlluxioJniFuseFileSystem.UNKNOWN_INODES, stbuf.f_files.intValue());
assertEquals(AlluxioJniFuseFileSystem.UNKNOWN_INODES, stbuf.f_ffree.intValue());
assertEquals(AlluxioJniFuseFileSystem.UNKNOWN_INODES, stbuf.f_favail.intValue());
assertEquals(AlluxioJniFuseFileSystem.MAX_NAME_LENGTH, stbuf.f_namemax.intValue());
}
}
| |
package sysadm.luiz.detectorplacas;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.hardware.Camera;
import android.location.Location;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageView;
import android.widget.TextView;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.LocationServices;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import sysadm.luiz.detectorplacas.filters.DetectorPlaca;
import sysadm.luiz.detectorplacas.placas.Placa;
public class LocalCameraActivity extends AppCompatActivity
implements CameraBridgeViewBase.CvCameraViewListener2, GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener {
private static final String TAG = "MainActivity";
private static final String STATE_CAMERA_INDEX = "cameraIndex";
public static DetectorPlaca detectorPlaca;
@SuppressLint("NewApi")
private final int reqCAMERA = 1;
private final int reqWRITE_EXTERNAL_STORAGE = 2;
ImageView imgPlacaLocal;
TextView lblProcurando;
Placa placaSelecionada;
GoogleApiClient mGoogleApiClient;
Location mLastLocation;
private int mCameraIndex;
private boolean mIsCameraFrontFacing;
private int mNumCameras;
private CameraBridgeViewBase mCameraView;
private BaseLoaderCallback mLoaderCallback =
new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(final int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.d(TAG, "OpenCV loaded successfully");
if (ContextCompat.checkSelfPermission(LocalCameraActivity.this,
Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED) {
mCameraView.enableView();
}
try {
detectorPlaca = MainActivity.detectorPlaca;
detectorPlaca.definirPlacas();
} catch (Exception e) {
e.printStackTrace();
break;
}
break;
default:
super.onManagerConnected(status);
break;
}
}
};
private boolean localConfirmado = false;
private void requestPermissions() {
if (ContextCompat.checkSelfPermission(LocalCameraActivity.this,
Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(LocalCameraActivity.this,
new String[]{Manifest.permission.CAMERA},
reqCAMERA);
}
if (ContextCompat.checkSelfPermission(LocalCameraActivity.this,
Manifest.permission.WRITE_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(LocalCameraActivity.this,
new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},
reqWRITE_EXTERNAL_STORAGE);
}
if (ContextCompat.checkSelfPermission(LocalCameraActivity.this,
Manifest.permission.ACCESS_COARSE_LOCATION)
!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(LocalCameraActivity.this,
new String[]{Manifest.permission.ACCESS_COARSE_LOCATION},
3);
}
if (ContextCompat.checkSelfPermission(LocalCameraActivity.this,
Manifest.permission.ACCESS_FINE_LOCATION)
!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(LocalCameraActivity.this,
new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION},
4);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if ((grantResults.length > 0) && (grantResults[0] != PackageManager.PERMISSION_GRANTED)) {
requestPermissions();
}
}
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_local_camera);
if (mGoogleApiClient == null) {
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(LocationServices.API)
.build();
}
lblProcurando = (TextView) findViewById(R.id.lblProcurando);
imgPlacaLocal = (ImageView) findViewById(R.id.imgPlacaLocal);
this.placaSelecionada = EditarPlacaActivity.placaSelecionada;
Bitmap bitmap;
bitmap = Bitmap.createBitmap(placaSelecionada.getImagem().width(), placaSelecionada.getImagem().height(), Bitmap.Config.RGB_565);
Mat imagem = placaSelecionada.getImagem().clone();
Utils.matToBitmap(imagem, bitmap);
imgPlacaLocal.setImageBitmap(bitmap);
requestPermissions();
final Window window = getWindow();
window.addFlags(
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (savedInstanceState != null) {
mCameraIndex = savedInstanceState.getInt(
STATE_CAMERA_INDEX, 0);
} else {
mCameraIndex = 0;
}
if (Build.VERSION.SDK_INT >=
Build.VERSION_CODES.GINGERBREAD) {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraIndex, cameraInfo);
mIsCameraFrontFacing =
(cameraInfo.facing ==
Camera.CameraInfo.CAMERA_FACING_FRONT);
mNumCameras = Camera.getNumberOfCameras();
} else {
mIsCameraFrontFacing = false;
mNumCameras = 1;
}
mCameraView = (JavaCameraView) findViewById(R.id.cameraView);
mCameraView.setCameraIndex(mCameraIndex);
mCameraView.setCvCameraViewListener(LocalCameraActivity.this);
if (mNumCameras < 2) {
}
}
@Override
public void onPause() {
super.onPause();
if (mCameraView != null) {
mCameraView.disableView();
}
}
@Override
public void onResume() {
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3,
this, mLoaderCallback);
}
@Override
public void onDestroy() {
super.onDestroy();
if (mCameraView != null) {
mCameraView.disableView();
}
}
@Override
protected void onStart() {
mGoogleApiClient.connect();
super.onStart();
}
@Override
protected void onStop() {
mGoogleApiClient.disconnect();
super.onStop();
}
public void onSaveInstanceState(Bundle savedInstanceState) {
savedInstanceState.putInt(STATE_CAMERA_INDEX, mCameraIndex);
super.onSaveInstanceState(savedInstanceState);
}
@Override
public void onCameraViewStarted(final int width,
final int height) {
}
@Override
public void onCameraViewStopped() {
}
@Override
public Mat onCameraFrame(final CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
final Mat rgba = inputFrame.rgba();
if (localConfirmado) {
LocalCameraActivity.this.runOnUiThread(new Runnable() {
@Override
public void run() {
lblProcurando.setText("Placa confirmada. Aguardando local...");
}
});
if (mLastLocation != null) {
Log.e("LocalCameraActivity", "Local confirmado para a placa - " + String.valueOf(placaSelecionada.getTexto()));
Log.e("LocalCameraActivity", String.format("Lat:%f, Long:%f", mLastLocation.getLatitude(), mLastLocation.getLongitude()));
MainActivity.detectorPlaca.placasModel.cadastrarLocal(mLastLocation.getLatitude(), mLastLocation.getLongitude(), placaSelecionada.getId());
setResult(1);
finish();
}
} else {
if (detectorPlaca.rechecagem) {
if (detectorPlaca.applyTo(rgba, rgba, placaSelecionada)) {
localConfirmado = true;
}
} else {
detectorPlaca.applyTo(rgba, rgba, placaSelecionada);
}
}
if (mIsCameraFrontFacing) {
Core.flip(rgba, rgba, 1);
}
return rgba;
}
@Override
public void onConnected(@Nullable Bundle bundle) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
return;
}
mLastLocation = LocationServices.FusedLocationApi.getLastLocation(
mGoogleApiClient);
}
@Override
public void onConnectionSuspended(int i) {
}
@Override
public void onConnectionFailed(@NonNull ConnectionResult connectionResult) {
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.common.type;
import com.facebook.presto.common.block.Block;
import com.facebook.presto.common.block.BlockBuilder;
import com.facebook.presto.common.block.UncheckedBlock;
import io.airlift.slice.Slice;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public abstract class AbstractType
implements Type
{
private final TypeSignature signature;
private final Class<?> javaType;
protected AbstractType(TypeSignature signature, Class<?> javaType)
{
this.signature = signature;
this.javaType = javaType;
}
@Override
public final TypeSignature getTypeSignature()
{
return signature;
}
@Override
public String getDisplayName()
{
return signature.toString();
}
@Override
public final Class<?> getJavaType()
{
return javaType;
}
@Override
public List<Type> getTypeParameters()
{
return Collections.unmodifiableList(new ArrayList<>());
}
@Override
public boolean isComparable()
{
return false;
}
@Override
public boolean isOrderable()
{
return false;
}
@Override
public long hash(Block block, int position)
{
throw new UnsupportedOperationException(getTypeSignature() + " type is not comparable");
}
@Override
public boolean equalTo(Block leftBlock, int leftPosition, Block rightBlock, int rightPosition)
{
throw new UnsupportedOperationException(getTypeSignature() + " type is not comparable");
}
@Override
public int compareTo(Block leftBlock, int leftPosition, Block rightBlock, int rightPosition)
{
throw new UnsupportedOperationException(getTypeSignature() + " type is not orderable");
}
@Override
public boolean getBoolean(Block block, int position)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public boolean getBooleanUnchecked(UncheckedBlock block, int internalPosition)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public void writeBoolean(BlockBuilder blockBuilder, boolean value)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public long getLong(Block block, int position)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public long getLongUnchecked(UncheckedBlock block, int internalPosition)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public void writeLong(BlockBuilder blockBuilder, long value)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public double getDouble(Block block, int position)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public double getDoubleUnchecked(UncheckedBlock block, int internalPosition)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public void writeDouble(BlockBuilder blockBuilder, double value)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public Slice getSlice(Block block, int position)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public Slice getSliceUnchecked(Block block, int internalPosition)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public void writeSlice(BlockBuilder blockBuilder, Slice value)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public void writeSlice(BlockBuilder blockBuilder, Slice value, int offset, int length)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public Object getObject(Block block, int position)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public Block getBlockUnchecked(Block block, int internalPosition)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public void writeObject(BlockBuilder blockBuilder, Object value)
{
throw new UnsupportedOperationException(getClass().getName());
}
@Override
public String toString()
{
return getTypeSignature().toString();
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return this.getTypeSignature().equals(((Type) o).getTypeSignature());
}
@Override
public int hashCode()
{
return signature.hashCode();
}
}
| |
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client.widget.escalator;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.core.client.Scheduler.ScheduledCommand;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.Style.Display;
import com.google.gwt.dom.client.Style.Overflow;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.dom.client.Style.Visibility;
import com.google.gwt.event.shared.EventHandler;
import com.google.gwt.event.shared.GwtEvent;
import com.google.gwt.event.shared.HandlerManager;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.EventListener;
import com.google.gwt.user.client.Timer;
import com.vaadin.client.DeferredWorker;
import com.vaadin.client.WidgetUtil;
import com.vaadin.client.widget.grid.events.ScrollEvent;
import com.vaadin.client.widget.grid.events.ScrollHandler;
/**
* An element-like bundle representing a configurable and visual scrollbar in
* one axis.
*
* @since 7.4
* @author Vaadin Ltd
* @see VerticalScrollbarBundle
* @see HorizontalScrollbarBundle
*/
public abstract class ScrollbarBundle implements DeferredWorker {
private class ScrollEventFirer {
private final ScheduledCommand fireEventCommand = new ScheduledCommand() {
@Override
public void execute() {
/*
* Some kind of native-scroll-event related asynchronous problem
* occurs here (at least on desktops) where the internal
* bookkeeping isn't up to date with the real scroll position.
* The weird thing is, that happens only once, and if you drag
* scrollbar fast enough. After it has failed once, it never
* fails again.
*
* Theory: the user drags the scrollbar, and this command is
* executed before the browser has a chance to fire a scroll
* event (which normally would correct this situation). This
* would explain why slow scrolling doesn't trigger the problem,
* while fast scrolling does.
*
* To make absolutely sure that we have the latest scroll
* position, let's update the internal value.
*
* This might lead to a slight performance hit (on my computer
* it was never more than 3ms on either of Chrome 38 or Firefox
* 31). It also _slightly_ counteracts the purpose of the
* internal bookkeeping. But since getScrollPos is called 3
* times (on one direction) per scroll loop, it's still better
* to have take this small penalty than removing it altogether.
*/
updateScrollPosFromDom();
getHandlerManager().fireEvent(new ScrollEvent());
isBeingFired = false;
}
};
private boolean isBeingFired;
public void scheduleEvent() {
if (!isBeingFired) {
/*
* We'll gather all the scroll events, and only fire once, once
* everything has calmed down.
*/
Scheduler.get().scheduleDeferred(fireEventCommand);
isBeingFired = true;
}
}
}
/**
* The orientation of the scrollbar.
*/
public enum Direction {
VERTICAL, HORIZONTAL;
}
private class TemporaryResizer {
private static final int TEMPORARY_RESIZE_DELAY = 1000;
private final Timer timer = new Timer() {
@Override
public void run() {
internalSetScrollbarThickness(1);
root.getStyle().setVisibility(Visibility.HIDDEN);
}
};
public void show() {
internalSetScrollbarThickness(OSX_INVISIBLE_SCROLLBAR_FAKE_SIZE_PX);
root.getStyle().setVisibility(Visibility.VISIBLE);
timer.schedule(TEMPORARY_RESIZE_DELAY);
}
}
/**
* A means to listen to when the scrollbar handle in a
* {@link ScrollbarBundle} either appears or is removed.
*/
public interface VisibilityHandler extends EventHandler {
/**
* This method is called whenever the scrollbar handle's visibility is
* changed in a {@link ScrollbarBundle}.
*
* @param event
* the {@link VisibilityChangeEvent}
*/
void visibilityChanged(VisibilityChangeEvent event);
}
public static class VisibilityChangeEvent extends
GwtEvent<VisibilityHandler> {
public static final Type<VisibilityHandler> TYPE = new Type<ScrollbarBundle.VisibilityHandler>() {
@Override
public String toString() {
return "VisibilityChangeEvent";
}
};
private final boolean isScrollerVisible;
private VisibilityChangeEvent(boolean isScrollerVisible) {
this.isScrollerVisible = isScrollerVisible;
}
/**
* Checks whether the scroll handle is currently visible or not
*
* @return <code>true</code> if the scroll handle is currently visible.
* <code>false</code> if not.
*/
public boolean isScrollerVisible() {
return isScrollerVisible;
}
@Override
public Type<VisibilityHandler> getAssociatedType() {
return TYPE;
}
@Override
protected void dispatch(VisibilityHandler handler) {
handler.visibilityChanged(this);
}
}
/**
* The pixel size for OSX's invisible scrollbars.
* <p>
* Touch devices don't show a scrollbar at all, so the scrollbar size is
* irrelevant in their case. There doesn't seem to be any other popular
* platforms that has scrollbars similar to OSX. Thus, this behavior is
* tailored for OSX only, until additional platforms start behaving this
* way.
*/
private static final int OSX_INVISIBLE_SCROLLBAR_FAKE_SIZE_PX = 13;
/**
* A representation of a single vertical scrollbar.
*
* @see VerticalScrollbarBundle#getElement()
*/
public final static class VerticalScrollbarBundle extends ScrollbarBundle {
@Override
public void setStylePrimaryName(String primaryStyleName) {
super.setStylePrimaryName(primaryStyleName);
root.addClassName(primaryStyleName + "-scroller-vertical");
}
@Override
protected void internalSetScrollPos(int px) {
root.setScrollTop(px);
}
@Override
protected int internalGetScrollPos() {
return root.getScrollTop();
}
@Override
protected void internalSetScrollSize(double px) {
scrollSizeElement.getStyle().setHeight(px, Unit.PX);
}
@Override
protected String internalGetScrollSize() {
return scrollSizeElement.getStyle().getHeight();
}
@Override
protected void internalSetOffsetSize(double px) {
root.getStyle().setHeight(px, Unit.PX);
}
@Override
public String internalGetOffsetSize() {
return root.getStyle().getHeight();
}
@Override
protected void internalSetScrollbarThickness(double px) {
root.getStyle().setPaddingRight(px, Unit.PX);
root.getStyle().setWidth(0, Unit.PX);
scrollSizeElement.getStyle().setWidth(px, Unit.PX);
}
@Override
protected String internalGetScrollbarThickness() {
return scrollSizeElement.getStyle().getWidth();
}
@Override
protected void internalForceScrollbar(boolean enable) {
if (enable) {
root.getStyle().setOverflowY(Overflow.SCROLL);
} else {
root.getStyle().clearOverflowY();
}
}
@Override
public Direction getDirection() {
return Direction.VERTICAL;
}
}
/**
* A representation of a single horizontal scrollbar.
*
* @see HorizontalScrollbarBundle#getElement()
*/
public final static class HorizontalScrollbarBundle extends ScrollbarBundle {
@Override
public void setStylePrimaryName(String primaryStyleName) {
super.setStylePrimaryName(primaryStyleName);
root.addClassName(primaryStyleName + "-scroller-horizontal");
}
@Override
protected void internalSetScrollPos(int px) {
root.setScrollLeft(px);
}
@Override
protected int internalGetScrollPos() {
return root.getScrollLeft();
}
@Override
protected void internalSetScrollSize(double px) {
scrollSizeElement.getStyle().setWidth(px, Unit.PX);
}
@Override
protected String internalGetScrollSize() {
return scrollSizeElement.getStyle().getWidth();
}
@Override
protected void internalSetOffsetSize(double px) {
root.getStyle().setWidth(px, Unit.PX);
}
@Override
public String internalGetOffsetSize() {
return root.getStyle().getWidth();
}
@Override
protected void internalSetScrollbarThickness(double px) {
root.getStyle().setPaddingBottom(px, Unit.PX);
root.getStyle().setHeight(0, Unit.PX);
scrollSizeElement.getStyle().setHeight(px, Unit.PX);
}
@Override
protected String internalGetScrollbarThickness() {
return scrollSizeElement.getStyle().getHeight();
}
@Override
protected void internalForceScrollbar(boolean enable) {
if (enable) {
root.getStyle().setOverflowX(Overflow.SCROLL);
} else {
root.getStyle().clearOverflowX();
}
}
@Override
public Direction getDirection() {
return Direction.HORIZONTAL;
}
}
protected final Element root = DOM.createDiv();
protected final Element scrollSizeElement = DOM.createDiv();
protected boolean isInvisibleScrollbar = false;
private double scrollPos = 0;
private double maxScrollPos = 0;
private boolean scrollHandleIsVisible = false;
private boolean isLocked = false;
/** @deprecated access via {@link #getHandlerManager()} instead. */
@Deprecated
private HandlerManager handlerManager;
private TemporaryResizer invisibleScrollbarTemporaryResizer = new TemporaryResizer();
private final ScrollEventFirer scrollEventFirer = new ScrollEventFirer();
private HandlerRegistration scrollSizeTemporaryScrollHandler;
private HandlerRegistration offsetSizeTemporaryScrollHandler;
private ScrollbarBundle() {
root.appendChild(scrollSizeElement);
root.getStyle().setDisplay(Display.NONE);
root.setTabIndex(-1);
}
protected abstract String internalGetScrollSize();
/**
* Sets the primary style name
*
* @param primaryStyleName
* The primary style name to use
*/
public void setStylePrimaryName(String primaryStyleName) {
root.setClassName(primaryStyleName + "-scroller");
}
/**
* Gets the root element of this scrollbar-composition.
*
* @return the root element
*/
public final Element getElement() {
return root;
}
/**
* Modifies the scroll position of this scrollbar by a number of pixels.
* <p>
* <em>Note:</em> Even though {@code double} values are used, they are
* currently only used as integers as large {@code int} (or small but fast
* {@code long}). This means, all values are truncated to zero decimal
* places.
*
* @param delta
* the delta in pixels to change the scroll position by
*/
public final void setScrollPosByDelta(double delta) {
if (delta != 0) {
setScrollPos(getScrollPos() + delta);
}
}
/**
* Modifies {@link #root root's} dimensions in the axis the scrollbar is
* representing.
*
* @param px
* the new size of {@link #root} in the dimension this scrollbar
* is representing
*/
protected abstract void internalSetOffsetSize(double px);
/**
* Sets the length of the scrollbar.
* <p>
* <em>Note:</em> Even though {@code double} values are used, they are
* currently only used as integers as large {@code int} (or small but fast
* {@code long}). This means, all values are truncated to zero decimal
* places.
*
* @param px
* the length of the scrollbar in pixels
*/
public final void setOffsetSize(final double px) {
/*
* This needs to be made step-by-step because IE8 flat-out refuses to
* fire a scroll event when the scroll size becomes smaller than the
* offset size. All other browser need to suffer alongside.
*/
boolean newOffsetSizeIsGreaterThanScrollSize = px > getScrollSize();
boolean offsetSizeBecomesGreaterThanScrollSize = showsScrollHandle()
&& newOffsetSizeIsGreaterThanScrollSize;
if (offsetSizeBecomesGreaterThanScrollSize && getScrollPos() != 0) {
// must be a field because Java insists.
offsetSizeTemporaryScrollHandler = addScrollHandler(new ScrollHandler() {
@Override
public void onScroll(ScrollEvent event) {
setOffsetSizeNow(px);
}
});
setScrollPos(0);
} else {
setOffsetSizeNow(px);
}
}
private void setOffsetSizeNow(double px) {
internalSetOffsetSize(Math.max(0, truncate(px)));
recalculateMaxScrollPos();
forceScrollbar(showsScrollHandle());
fireVisibilityChangeIfNeeded();
if (offsetSizeTemporaryScrollHandler != null) {
offsetSizeTemporaryScrollHandler.removeHandler();
offsetSizeTemporaryScrollHandler = null;
}
}
/**
* Force the scrollbar to be visible with CSS. In practice, this means to
* set either <code>overflow-x</code> or <code>overflow-y</code> to "
* <code>scroll</code>" in the scrollbar's direction.
* <p>
* This is an IE8 workaround, since it doesn't always show scrollbars with
* <code>overflow: auto</code> enabled.
*/
protected void forceScrollbar(boolean enable) {
if (enable) {
root.getStyle().clearDisplay();
} else {
root.getStyle().setDisplay(Display.NONE);
}
internalForceScrollbar(enable);
}
protected abstract void internalForceScrollbar(boolean enable);
/**
* Gets the length of the scrollbar
*
* @return the length of the scrollbar in pixels
*/
public double getOffsetSize() {
return parseCssDimensionToPixels(internalGetOffsetSize());
}
public abstract String internalGetOffsetSize();
/**
* Sets the scroll position of the scrollbar in the axis the scrollbar is
* representing.
* <p>
* <em>Note:</em> Even though {@code double} values are used, they are
* currently only used as integers as large {@code int} (or small but fast
* {@code long}). This means, all values are truncated to zero decimal
* places.
*
* @param px
* the new scroll position in pixels
*/
public final void setScrollPos(double px) {
if (isLocked()) {
return;
}
double oldScrollPos = scrollPos;
scrollPos = Math.max(0, Math.min(maxScrollPos, truncate(px)));
if (!WidgetUtil.pixelValuesEqual(oldScrollPos, scrollPos)) {
if (isInvisibleScrollbar) {
invisibleScrollbarTemporaryResizer.show();
}
/*
* This is where the value needs to be converted into an integer no
* matter how we flip it, since GWT expects an integer value.
* There's no point making a JSNI method that accepts doubles as the
* scroll position, since the browsers themselves don't support such
* large numbers (as of today, 25.3.2014). This double-ranged is
* only facilitating future virtual scrollbars.
*/
internalSetScrollPos(toInt32(scrollPos));
}
}
/**
* Should be called whenever this bundle is attached to the DOM (typically,
* from the onLoad of the containing widget). Used to ensure the DOM scroll
* position is maintained when detaching and reattaching the bundle.
*
* @since 7.4.1
*/
public void onLoad() {
internalSetScrollPos(toInt32(scrollPos));
}
/**
* Truncates a double such that no decimal places are retained.
* <p>
* E.g. {@code trunc(2.3d) == 2.0d} and {@code trunc(-2.3d) == -2.0d}.
*
* @param num
* the double value to be truncated
* @return the {@code num} value without any decimal digits
*/
private static double truncate(double num) {
if (num > 0) {
return Math.floor(num);
} else {
return Math.ceil(num);
}
}
/**
* Modifies the element's scroll position (scrollTop or scrollLeft).
* <p>
* <em>Note:</em> The parameter here is a type of integer (instead of a
* double) by design. The browsers internally convert all double values into
* an integer value. To make this fact explicit, this API has chosen to
* force integers already at this level.
*
* @param px
* integer pixel value to scroll to
*/
protected abstract void internalSetScrollPos(int px);
/**
* Gets the scroll position of the scrollbar in the axis the scrollbar is
* representing.
*
* @return the new scroll position in pixels
*/
public final double getScrollPos() {
assert internalGetScrollPos() == toInt32(scrollPos) : "calculated scroll position ("
+ toInt32(scrollPos)
+ ") did not match the DOM element scroll position ("
+ internalGetScrollPos() + ")";
return scrollPos;
}
/**
* Retrieves the element's scroll position (scrollTop or scrollLeft).
* <p>
* <em>Note:</em> The parameter here is a type of integer (instead of a
* double) by design. The browsers internally convert all double values into
* an integer value. To make this fact explicit, this API has chosen to
* force integers already at this level.
*
* @return integer pixel value of the scroll position
*/
protected abstract int internalGetScrollPos();
/**
* Modifies {@link #scrollSizeElement scrollSizeElement's} dimensions in
* such a way that the scrollbar is able to scroll a certain number of
* pixels in the axis it is representing.
*
* @param px
* the new size of {@link #scrollSizeElement} in the dimension
* this scrollbar is representing
*/
protected abstract void internalSetScrollSize(double px);
/**
* Sets the amount of pixels the scrollbar needs to be able to scroll
* through.
* <p>
* <em>Note:</em> Even though {@code double} values are used, they are
* currently only used as integers as large {@code int} (or small but fast
* {@code long}). This means, all values are truncated to zero decimal
* places.
*
* @param px
* the number of pixels the scrollbar should be able to scroll
* through
*/
public final void setScrollSize(final double px) {
/*
* This needs to be made step-by-step because IE8 flat-out refuses to
* fire a scroll event when the scroll size becomes smaller than the
* offset size. All other browser need to suffer alongside.
*/
boolean newScrollSizeIsSmallerThanOffsetSize = px <= getOffsetSize();
boolean scrollSizeBecomesSmallerThanOffsetSize = showsScrollHandle()
&& newScrollSizeIsSmallerThanOffsetSize;
if (scrollSizeBecomesSmallerThanOffsetSize && getScrollPos() != 0) {
// must be a field because Java insists.
scrollSizeTemporaryScrollHandler = addScrollHandler(new ScrollHandler() {
@Override
public void onScroll(ScrollEvent event) {
setScrollSizeNow(px);
}
});
setScrollPos(0);
} else {
setScrollSizeNow(px);
}
}
private void setScrollSizeNow(double px) {
internalSetScrollSize(Math.max(0, px));
recalculateMaxScrollPos();
forceScrollbar(showsScrollHandle());
fireVisibilityChangeIfNeeded();
if (scrollSizeTemporaryScrollHandler != null) {
scrollSizeTemporaryScrollHandler.removeHandler();
scrollSizeTemporaryScrollHandler = null;
}
}
/**
* Gets the amount of pixels the scrollbar needs to be able to scroll
* through.
*
* @return the number of pixels the scrollbar should be able to scroll
* through
*/
public double getScrollSize() {
return parseCssDimensionToPixels(internalGetScrollSize());
}
/**
* Modifies {@link #scrollSizeElement scrollSizeElement's} dimensions in the
* opposite axis to what the scrollbar is representing.
*
* @param px
* the dimension that {@link #scrollSizeElement} should take in
* the opposite axis to what the scrollbar is representing
*/
protected abstract void internalSetScrollbarThickness(double px);
/**
* Sets the scrollbar's thickness.
* <p>
* If the thickness is set to 0, the scrollbar will be treated as an
* "invisible" scrollbar. This means, the DOM structure will be given a
* non-zero size, but {@link #getScrollbarThickness()} will still return the
* value 0.
*
* @param px
* the scrollbar's thickness in pixels
*/
public final void setScrollbarThickness(double px) {
isInvisibleScrollbar = (px == 0);
if (isInvisibleScrollbar) {
Event.sinkEvents(root, Event.ONSCROLL);
Event.setEventListener(root, new EventListener() {
@Override
public void onBrowserEvent(Event event) {
invisibleScrollbarTemporaryResizer.show();
}
});
root.getStyle().setVisibility(Visibility.HIDDEN);
} else {
Event.sinkEvents(root, 0);
Event.setEventListener(root, null);
root.getStyle().clearVisibility();
}
internalSetScrollbarThickness(Math.max(1d, px));
}
/**
* Gets the scrollbar's thickness as defined in the DOM.
*
* @return the scrollbar's thickness as defined in the DOM, in pixels
*/
protected abstract String internalGetScrollbarThickness();
/**
* Gets the scrollbar's thickness.
* <p>
* This value will differ from the value in the DOM, if the thickness was
* set to 0 with {@link #setScrollbarThickness(double)}, as the scrollbar is
* then treated as "invisible."
*
* @return the scrollbar's thickness in pixels
*/
public final double getScrollbarThickness() {
if (!isInvisibleScrollbar) {
return parseCssDimensionToPixels(internalGetScrollbarThickness());
} else {
return 0;
}
}
/**
* Checks whether the scrollbar's handle is visible.
* <p>
* In other words, this method checks whether the contents is larger than
* can visually fit in the element.
*
* @return <code>true</code> iff the scrollbar's handle is visible
*/
public boolean showsScrollHandle() {
return getOffsetSize() < getScrollSize();
}
public void recalculateMaxScrollPos() {
double scrollSize = getScrollSize();
double offsetSize = getOffsetSize();
maxScrollPos = Math.max(0, scrollSize - offsetSize);
// make sure that the correct max scroll position is maintained.
setScrollPos(scrollPos);
}
/**
* This is a method that JSNI can call to synchronize the object state from
* the DOM.
*/
private final void updateScrollPosFromDom() {
/*
* TODO: this method probably shouldn't be called from Escalator's JSNI,
* but probably could be handled internally by this listening to its own
* element. Would clean up the code quite a bit. Needs further
* investigation.
*/
int newScrollPos = internalGetScrollPos();
if (!isLocked()) {
scrollPos = newScrollPos;
scrollEventFirer.scheduleEvent();
} else if (scrollPos != newScrollPos) {
// we need to actually undo the setting of the scroll.
internalSetScrollPos(toInt32(scrollPos));
}
}
protected HandlerManager getHandlerManager() {
if (handlerManager == null) {
handlerManager = new HandlerManager(this);
}
return handlerManager;
}
/**
* Adds handler for the scrollbar handle visibility.
*
* @param handler
* the {@link VisibilityHandler} to add
* @return {@link HandlerRegistration} used to remove the handler
*/
public HandlerRegistration addVisibilityHandler(
final VisibilityHandler handler) {
return getHandlerManager().addHandler(VisibilityChangeEvent.TYPE,
handler);
}
private void fireVisibilityChangeIfNeeded() {
final boolean oldHandleIsVisible = scrollHandleIsVisible;
scrollHandleIsVisible = showsScrollHandle();
if (oldHandleIsVisible != scrollHandleIsVisible) {
final VisibilityChangeEvent event = new VisibilityChangeEvent(
scrollHandleIsVisible);
getHandlerManager().fireEvent(event);
}
}
/**
* Converts a double into an integer by JavaScript's terms.
* <p>
* Implementation copied from {@link Element#toInt32(double)}.
*
* @param val
* the double value to convert into an integer
* @return the double value converted to an integer
*/
private static native int toInt32(double val)
/*-{
return val | 0;
}-*/;
/**
* Locks or unlocks the scrollbar bundle.
* <p>
* A locked scrollbar bundle will refuse to scroll, both programmatically
* and via user-triggered events.
*
* @param isLocked
* <code>true</code> to lock, <code>false</code> to unlock
*/
public void setLocked(boolean isLocked) {
this.isLocked = isLocked;
}
/**
* Checks whether the scrollbar bundle is locked or not.
*
* @return <code>true</code> iff the scrollbar bundle is locked
*/
public boolean isLocked() {
return isLocked;
}
/**
* Returns the scroll direction of this scrollbar bundle.
*
* @return the scroll direction of this scrollbar bundle
*/
public abstract Direction getDirection();
/**
* Adds a scroll handler to the scrollbar bundle.
*
* @param handler
* the handler to add
* @return the registration object for the handler registration
*/
public HandlerRegistration addScrollHandler(final ScrollHandler handler) {
return getHandlerManager().addHandler(ScrollEvent.TYPE, handler);
}
private static double parseCssDimensionToPixels(String size) {
/*
* Sizes of elements are calculated from CSS rather than
* element.getOffset*() because those values are 0 whenever display:
* none. Because we know that all elements have populated
* CSS-dimensions, it's better to do it that way.
*
* Another solution would be to make the elements visible while
* measuring and then re-hide them, but that would cause unnecessary
* reflows that would probably kill the performance dead.
*/
if (size.isEmpty()) {
return 0;
} else {
assert size.endsWith("px") : "Can't parse CSS dimension \"" + size
+ "\"";
return Double.parseDouble(size.substring(0, size.length() - 2));
}
}
@Override
public boolean isWorkPending() {
return scrollSizeTemporaryScrollHandler != null
|| offsetSizeTemporaryScrollHandler != null;
}
}
| |
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002-2009 Oracle. All rights reserved.
*
* $Id$
*/
package collections.ship.index;
import java.io.FileNotFoundException;
import java.util.Iterator;
import java.util.Map;
import com.sleepycat.collections.TransactionRunner;
import com.sleepycat.collections.TransactionWorker;
import com.sleepycat.db.DatabaseException;
/**
* Sample is the main entry point for the sample program and may be run as
* follows:
*
* <pre>
* java collections.ship.index.Sample
* [-h <home-directory> ]
* </pre>
*
* <p> The default for the home directory is ./tmp -- the tmp subdirectory of
* the current directory where the sample is run. The home directory must exist
* before running the sample. To recreate the sample database from scratch,
* delete all files in the home directory before running the sample. </p>
*
* @author Mark Hayes
*/
public class Sample {
private SampleDatabase db;
private SampleViews views;
/**
* Run the sample program.
*/
public static void main(String[] args) {
System.out.println("\nRunning sample: " + Sample.class);
// Parse the command line arguments.
//
String homeDir = "./tmp";
for (int i = 0; i < args.length; i += 1) {
if (args[i].equals("-h") && i < args.length - 1) {
i += 1;
homeDir = args[i];
} else {
System.err.println("Usage:\n java " + Sample.class.getName() +
"\n [-h <home-directory>]");
System.exit(2);
}
}
// Run the sample.
//
Sample sample = null;
try {
sample = new Sample(homeDir);
sample.run();
} catch (Exception e) {
// If an exception reaches this point, the last transaction did not
// complete. If the exception is RunRecoveryException, follow
// the Berkeley DB recovery procedures before running again.
e.printStackTrace();
} finally {
if (sample != null) {
try {
// Always attempt to close the database cleanly.
sample.close();
} catch (Exception e) {
System.err.println("Exception during database close:");
e.printStackTrace();
}
}
}
}
/**
* Open the database and views.
*/
private Sample(String homeDir)
throws DatabaseException, FileNotFoundException {
db = new SampleDatabase(homeDir);
views = new SampleViews(db);
}
/**
* Close the database cleanly.
*/
private void close()
throws DatabaseException {
db.close();
}
/**
* Run two transactions to populate and print the database. A
* TransactionRunner is used to ensure consistent handling of transactions,
* including deadlock retries. But the best transaction handling mechanism
* to use depends on the application.
*/
private void run()
throws Exception {
TransactionRunner runner = new TransactionRunner(db.getEnvironment());
runner.run(new PopulateDatabase());
runner.run(new PrintDatabase());
}
/**
* Populate the database in a single transaction.
*/
private class PopulateDatabase implements TransactionWorker {
public void doWork()
throws Exception {
addSuppliers();
addParts();
addShipments();
}
}
/**
* Print the database in a single transaction. All entities are printed
* and the indices are used to print the entities for certain keys.
*
* <p> Note the use of special iterator() methods. These are used here
* with indices to find the shipments for certain keys.</p>
*/
private class PrintDatabase implements TransactionWorker {
public void doWork()
throws Exception {
printEntries("Parts",
views.getPartEntrySet().iterator());
printEntries("Suppliers",
views.getSupplierEntrySet().iterator());
printValues("Suppliers for City Paris",
views.getSupplierByCityMap().duplicates(
"Paris").iterator());
printEntries("Shipments",
views.getShipmentEntrySet().iterator());
printValues("Shipments for Part P1",
views.getShipmentByPartMap().duplicates(
new PartKey("P1")).iterator());
printValues("Shipments for Supplier S1",
views.getShipmentBySupplierMap().duplicates(
new SupplierKey("S1")).iterator());
}
}
/**
* Populate the part entities in the database. If the part map is not
* empty, assume that this has already been done.
*/
private void addParts() {
Map parts = views.getPartMap();
if (parts.isEmpty()) {
System.out.println("Adding Parts");
parts.put(new PartKey("P1"),
new PartData("Nut", "Red",
new Weight(12.0, Weight.GRAMS),
"London"));
parts.put(new PartKey("P2"),
new PartData("Bolt", "Green",
new Weight(17.0, Weight.GRAMS),
"Paris"));
parts.put(new PartKey("P3"),
new PartData("Screw", "Blue",
new Weight(17.0, Weight.GRAMS),
"Rome"));
parts.put(new PartKey("P4"),
new PartData("Screw", "Red",
new Weight(14.0, Weight.GRAMS),
"London"));
parts.put(new PartKey("P5"),
new PartData("Cam", "Blue",
new Weight(12.0, Weight.GRAMS),
"Paris"));
parts.put(new PartKey("P6"),
new PartData("Cog", "Red",
new Weight(19.0, Weight.GRAMS),
"London"));
}
}
/**
* Populate the supplier entities in the database. If the supplier map is
* not empty, assume that this has already been done.
*/
private void addSuppliers() {
Map suppliers = views.getSupplierMap();
if (suppliers.isEmpty()) {
System.out.println("Adding Suppliers");
suppliers.put(new SupplierKey("S1"),
new SupplierData("Smith", 20, "London"));
suppliers.put(new SupplierKey("S2"),
new SupplierData("Jones", 10, "Paris"));
suppliers.put(new SupplierKey("S3"),
new SupplierData("Blake", 30, "Paris"));
suppliers.put(new SupplierKey("S4"),
new SupplierData("Clark", 20, "London"));
suppliers.put(new SupplierKey("S5"),
new SupplierData("Adams", 30, "Athens"));
}
}
/**
* Populate the shipment entities in the database. If the shipment map
* is not empty, assume that this has already been done.
*/
private void addShipments() {
Map shipments = views.getShipmentMap();
if (shipments.isEmpty()) {
System.out.println("Adding Shipments");
shipments.put(new ShipmentKey("P1", "S1"),
new ShipmentData(300));
shipments.put(new ShipmentKey("P2", "S1"),
new ShipmentData(200));
shipments.put(new ShipmentKey("P3", "S1"),
new ShipmentData(400));
shipments.put(new ShipmentKey("P4", "S1"),
new ShipmentData(200));
shipments.put(new ShipmentKey("P5", "S1"),
new ShipmentData(100));
shipments.put(new ShipmentKey("P6", "S1"),
new ShipmentData(100));
shipments.put(new ShipmentKey("P1", "S2"),
new ShipmentData(300));
shipments.put(new ShipmentKey("P2", "S2"),
new ShipmentData(400));
shipments.put(new ShipmentKey("P2", "S3"),
new ShipmentData(200));
shipments.put(new ShipmentKey("P2", "S4"),
new ShipmentData(200));
shipments.put(new ShipmentKey("P4", "S4"),
new ShipmentData(300));
shipments.put(new ShipmentKey("P5", "S4"),
new ShipmentData(400));
}
}
/**
* Print the key/value objects returned by an iterator of Map.Entry
* objects.
*/
private void printEntries(String label, Iterator iterator) {
System.out.println("\n--- " + label + " ---");
while (iterator.hasNext()) {
Map.Entry entry = (Map.Entry) iterator.next();
System.out.println(entry.getKey().toString());
System.out.println(entry.getValue().toString());
}
}
/**
* Print the objects returned by an iterator of value objects.
*/
private void printValues(String label, Iterator iterator) {
System.out.println("\n--- " + label + " ---");
while (iterator.hasNext()) {
System.out.println(iterator.next().toString());
}
}
}
| |
package org.docksidestage.postgresql.dbflute.bsentity.customize.dbmeta;
import java.util.List;
import java.util.Map;
import org.dbflute.Entity;
import org.dbflute.dbmeta.AbstractDBMeta;
import org.dbflute.dbmeta.info.*;
import org.dbflute.dbmeta.name.*;
import org.dbflute.dbmeta.property.PropertyGateway;
import org.dbflute.dbway.DBDef;
import org.docksidestage.postgresql.dbflute.allcommon.*;
import org.docksidestage.postgresql.dbflute.exentity.customize.*;
/**
* The DB meta of LargeAutoPaging. (Singleton)
* @author DBFlute(AutoGenerator)
*/
public class LargeAutoPagingDbm extends AbstractDBMeta {
// ===================================================================================
// Singleton
// =========
private static final LargeAutoPagingDbm _instance = new LargeAutoPagingDbm();
private LargeAutoPagingDbm() {}
public static LargeAutoPagingDbm getInstance() { return _instance; }
// ===================================================================================
// Current DBDef
// =============
public String getProjectName() { return DBCurrent.getInstance().projectName(); }
public String getProjectPrefix() { return DBCurrent.getInstance().projectPrefix(); }
public String getGenerationGapBasePrefix() { return DBCurrent.getInstance().generationGapBasePrefix(); }
public DBDef getCurrentDBDef() { return DBCurrent.getInstance().currentDBDef(); }
// ===================================================================================
// Property Gateway
// ================
// -----------------------------------------------------
// Column Property
// ---------------
protected final Map<String, PropertyGateway> _epgMap = newHashMap();
{ xsetupEpg(); }
protected void xsetupEpg() {
setupEpg(_epgMap, et -> ((LargeAutoPaging)et).getLargeDataRefId(), (et, vl) -> ((LargeAutoPaging)et).setLargeDataRefId(ctl(vl)), "largeDataRefId");
setupEpg(_epgMap, et -> ((LargeAutoPaging)et).getLargeDataId(), (et, vl) -> ((LargeAutoPaging)et).setLargeDataId(ctl(vl)), "largeDataId");
setupEpg(_epgMap, et -> ((LargeAutoPaging)et).getDateIndex(), (et, vl) -> ((LargeAutoPaging)et).setDateIndex(ctld(vl)), "dateIndex");
setupEpg(_epgMap, et -> ((LargeAutoPaging)et).getDateNoIndex(), (et, vl) -> ((LargeAutoPaging)et).setDateNoIndex(ctld(vl)), "dateNoIndex");
setupEpg(_epgMap, et -> ((LargeAutoPaging)et).getTimestampIndex(), (et, vl) -> ((LargeAutoPaging)et).setTimestampIndex(ctldt(vl)), "timestampIndex");
setupEpg(_epgMap, et -> ((LargeAutoPaging)et).getTimestampNoIndex(), (et, vl) -> ((LargeAutoPaging)et).setTimestampNoIndex(ctldt(vl)), "timestampNoIndex");
setupEpg(_epgMap, et -> ((LargeAutoPaging)et).getNullableDecimalIndex(), (et, vl) -> ((LargeAutoPaging)et).setNullableDecimalIndex(ctb(vl)), "nullableDecimalIndex");
setupEpg(_epgMap, et -> ((LargeAutoPaging)et).getNullableDecimalNoIndex(), (et, vl) -> ((LargeAutoPaging)et).setNullableDecimalNoIndex(ctb(vl)), "nullableDecimalNoIndex");
setupEpg(_epgMap, et -> ((LargeAutoPaging)et).getSelfParentId(), (et, vl) -> ((LargeAutoPaging)et).setSelfParentId(ctl(vl)), "selfParentId");
}
public PropertyGateway findPropertyGateway(String prop)
{ return doFindEpg(_epgMap, prop); }
// ===================================================================================
// Table Info
// ==========
protected final String _tableDbName = "LargeAutoPaging";
protected final String _tableDispName = "LargeAutoPaging";
protected final String _tablePropertyName = "largeAutoPaging";
protected final TableSqlName _tableSqlName = new TableSqlName("LargeAutoPaging", _tableDbName);
{ _tableSqlName.xacceptFilter(DBFluteConfig.getInstance().getTableSqlNameFilter()); }
public String getTableDbName() { return _tableDbName; }
public String getTableDispName() { return _tableDispName; }
public String getTablePropertyName() { return _tablePropertyName; }
public TableSqlName getTableSqlName() { return _tableSqlName; }
// ===================================================================================
// Column Info
// ===========
protected final ColumnInfo _columnLargeDataRefId = cci("large_data_ref_id", "large_data_ref_id", null, null, Long.class, "largeDataRefId", null, false, false, false, "int8", 19, 0, null, null, false, null, null, null, null, null, false);
protected final ColumnInfo _columnLargeDataId = cci("large_data_id", "large_data_id", null, null, Long.class, "largeDataId", null, false, false, false, "int8", 19, 0, null, null, false, null, null, null, null, null, false);
protected final ColumnInfo _columnDateIndex = cci("date_index", "date_index", null, null, java.time.LocalDate.class, "dateIndex", null, false, false, false, "date", 13, 0, null, null, false, null, null, null, null, null, false);
protected final ColumnInfo _columnDateNoIndex = cci("date_no_index", "date_no_index", null, null, java.time.LocalDate.class, "dateNoIndex", null, false, false, false, "date", 13, 0, null, null, false, null, null, null, null, null, false);
protected final ColumnInfo _columnTimestampIndex = cci("timestamp_index", "timestamp_index", null, null, java.time.LocalDateTime.class, "timestampIndex", null, false, false, false, "timestamp", 26, 3, null, null, false, null, null, null, null, null, false);
protected final ColumnInfo _columnTimestampNoIndex = cci("timestamp_no_index", "timestamp_no_index", null, null, java.time.LocalDateTime.class, "timestampNoIndex", null, false, false, false, "timestamp", 26, 3, null, null, false, null, null, null, null, null, false);
protected final ColumnInfo _columnNullableDecimalIndex = cci("nullable_decimal_index", "nullable_decimal_index", null, null, java.math.BigDecimal.class, "nullableDecimalIndex", null, false, false, false, "numeric", 12, 3, null, null, false, null, null, null, null, null, false);
protected final ColumnInfo _columnNullableDecimalNoIndex = cci("nullable_decimal_no_index", "nullable_decimal_no_index", null, null, java.math.BigDecimal.class, "nullableDecimalNoIndex", null, false, false, false, "numeric", 12, 3, null, null, false, null, null, null, null, null, false);
protected final ColumnInfo _columnSelfParentId = cci("self_parent_id", "self_parent_id", null, null, Long.class, "selfParentId", null, false, false, false, "int8", 19, 0, null, null, false, null, null, null, null, null, false);
/**
* large_data_ref_id: {int8(19), refers to vendor_large_data_ref.large_data_ref_id}
* @return The information object of specified column. (NotNull)
*/
public ColumnInfo columnLargeDataRefId() { return _columnLargeDataRefId; }
/**
* large_data_id: {int8(19), refers to vendor_large_data_ref.large_data_id}
* @return The information object of specified column. (NotNull)
*/
public ColumnInfo columnLargeDataId() { return _columnLargeDataId; }
/**
* date_index: {date(13), refers to vendor_large_data_ref.date_index}
* @return The information object of specified column. (NotNull)
*/
public ColumnInfo columnDateIndex() { return _columnDateIndex; }
/**
* date_no_index: {date(13), refers to vendor_large_data_ref.date_no_index}
* @return The information object of specified column. (NotNull)
*/
public ColumnInfo columnDateNoIndex() { return _columnDateNoIndex; }
/**
* timestamp_index: {timestamp(26, 3), refers to vendor_large_data_ref.timestamp_index}
* @return The information object of specified column. (NotNull)
*/
public ColumnInfo columnTimestampIndex() { return _columnTimestampIndex; }
/**
* timestamp_no_index: {timestamp(26, 3), refers to vendor_large_data_ref.timestamp_no_index}
* @return The information object of specified column. (NotNull)
*/
public ColumnInfo columnTimestampNoIndex() { return _columnTimestampNoIndex; }
/**
* nullable_decimal_index: {numeric(12, 3), refers to vendor_large_data_ref.nullable_decimal_index}
* @return The information object of specified column. (NotNull)
*/
public ColumnInfo columnNullableDecimalIndex() { return _columnNullableDecimalIndex; }
/**
* nullable_decimal_no_index: {numeric(12, 3), refers to vendor_large_data_ref.nullable_decimal_no_index}
* @return The information object of specified column. (NotNull)
*/
public ColumnInfo columnNullableDecimalNoIndex() { return _columnNullableDecimalNoIndex; }
/**
* self_parent_id: {int8(19), refers to vendor_large_data_ref.self_parent_id}
* @return The information object of specified column. (NotNull)
*/
public ColumnInfo columnSelfParentId() { return _columnSelfParentId; }
protected List<ColumnInfo> ccil() {
List<ColumnInfo> ls = newArrayList();
ls.add(columnLargeDataRefId());
ls.add(columnLargeDataId());
ls.add(columnDateIndex());
ls.add(columnDateNoIndex());
ls.add(columnTimestampIndex());
ls.add(columnTimestampNoIndex());
ls.add(columnNullableDecimalIndex());
ls.add(columnNullableDecimalNoIndex());
ls.add(columnSelfParentId());
return ls;
}
{ initializeInformationResource(); }
// ===================================================================================
// Unique Info
// ===========
// -----------------------------------------------------
// Primary Element
// ---------------
protected UniqueInfo cpui() {
throw new UnsupportedOperationException("The table does not have primary key: " + getTableDbName());
}
public boolean hasPrimaryKey() { return false; }
public boolean hasCompoundPrimaryKey() { return false; }
// ===================================================================================
// Relation Info
// =============
// cannot cache because it uses related DB meta instance while booting
// (instead, cached by super's collection)
// -----------------------------------------------------
// Foreign Property
// ----------------
// -----------------------------------------------------
// Referrer Property
// -----------------
// ===================================================================================
// Various Info
// ============
// ===================================================================================
// Type Name
// =========
public String getEntityTypeName() { return "org.docksidestage.postgresql.dbflute.exentity.customize.LargeAutoPaging"; }
public String getConditionBeanTypeName() { return null; }
public String getBehaviorTypeName() { return null; }
// ===================================================================================
// Object Type
// ===========
public Class<LargeAutoPaging> getEntityType() { return LargeAutoPaging.class; }
// ===================================================================================
// Object Instance
// ===============
public LargeAutoPaging newEntity() { return new LargeAutoPaging(); }
// ===================================================================================
// Map Communication
// =================
public void acceptPrimaryKeyMap(Entity et, Map<String, ? extends Object> mp)
{ doAcceptPrimaryKeyMap((LargeAutoPaging)et, mp); }
public void acceptAllColumnMap(Entity et, Map<String, ? extends Object> mp)
{ doAcceptAllColumnMap((LargeAutoPaging)et, mp); }
public Map<String, Object> extractPrimaryKeyMap(Entity et) { return doExtractPrimaryKeyMap(et); }
public Map<String, Object> extractAllColumnMap(Entity et) { return doExtractAllColumnMap(et); }
}
| |
package GHRestaurant.roles;
import CMRestaurant.roles.CMWaiterRole.AgentEvent;
import GHRestaurant.gui.GHWaiterGui;
import agent.Agent;
import restaurant.Restaurant;
import restaurant.interfaces.*;
import java.util.*;
import java.util.concurrent.Semaphore;
import city.PersonAgent;
import city.gui.Gui;
import city.gui.trace.AlertLog;
import city.gui.trace.AlertTag;
import city.interfaces.Person;
import city.roles.Role;
/**
* Restaurant Waiter Agent
*/
public class GHWaiterRole extends Role implements Waiter{
//Notice that we implement waitingCustomers using ArrayList, but type it
//with List semantics.
public List<MyCustomer> waitingCustomers
= new ArrayList<MyCustomer>();
public List<CustomerCheck> customerChecks
= new ArrayList<CustomerCheck>();
public GHWaiterGui waitergui = null;
private boolean WantToGoOnBreak = false;
private boolean OnBreak = false;
private boolean BackTW = false;
//private String name;
private Semaphore atDestination = new Semaphore(0,true);
private Host host;
private Cashier cashier;
private Cook cook;
enum CustomerState {Waiting, AskedToOrder, Ordered, Reorder, Ready, Done, Idle}
enum WaiterState{None,GoingToWork, relieveFromDuty, RestaurantClosed}
WaiterState wState = WaiterState.None;
private Restaurant restaurant;
public GHWaiterRole(PersonAgent p, Restaurant r) {
super(p);
restaurant = r;
//this.name = name;
}
/*public String getName() {
return name;
}*/
public List getWaitingCustomers() {
return waitingCustomers;
}
public void setHost(Host host){
this.host = host;
}
public void setCook(Cook cook){
this.cook = cook;
}
public void setCashier(Cashier ca){
this.cashier = ca;
}
// Messages
public void goesToWork() {//from animation
print("Going to work");
wState = WaiterState.GoingToWork;
stateChanged();
}
public void msgTryToGoOnBreak(){//from animation
WantToGoOnBreak = true;
stateChanged();
}
public void msgGoBackToWork(){//from animation
OnBreak = false;
BackTW = true;
stateChanged();
}
public void msgSitAtTable(Customer customer, int table){
waitingCustomers.add(new MyCustomer(customer,table, CustomerState.Waiting));
//print("msgSitAtTable");
stateChanged();
}
public void msgImReadyToOrder(Customer c){
print("Taking Order");
for(MyCustomer mycust : waitingCustomers){
if(mycust.customer == c){
mycust.cs = CustomerState.AskedToOrder;
}
}
stateChanged();
}
public void msgHereIsMyOrder(Customer c, String choice){
print("Customer ordered" + choice);
for(MyCustomer mycust : waitingCustomers){
if(mycust.customer == c){
mycust.cs = CustomerState.Ordered;
mycust.choice = choice;
}
}
stateChanged();
}
public void msgOrderIsReady(String choice, int tablenumber){
print("Recieved msgOrderIsReady");
for(MyCustomer mycust : waitingCustomers){
if(mycust.choice == choice && mycust.tablenumber ==tablenumber){
mycust.cs = CustomerState.Ready;
}
}
stateChanged();
}
public void msgOutOfOrder(int tablenumber, String choice){
for(MyCustomer mycust : waitingCustomers){
if(mycust.choice == choice && mycust.tablenumber ==tablenumber){
mycust.cs = CustomerState.Reorder;
}
}
stateChanged();
}
public void msgDoneEatingandLeaving(Customer c){
for(MyCustomer mycust : waitingCustomers){
if(mycust.customer == c){
mycust.cs = CustomerState.Done;
}
}
stateChanged();
}
public void msgHereIsCheck(Customer cust, String c, double co, int t){
print("Recieved msgHereIsCheck");
customerChecks.add(new CustomerCheck(cust,c,co,t));
stateChanged();
}
public void msgAtTable() {//from animation
//print("msgAtTable() called");
atDestination.release();// = true;
stateChanged();
}
public void msgRestaurantClosed(){
wState = WaiterState.RestaurantClosed;
stateChanged();
}
/**
* Scheduler. Determine what action is called for, and do it.
*/
public boolean pickAndExecuteAnAction() {
/* Think of this next rule as:
Does there exist a table and customer,
so that table is unoccupied and customer is waiting.
If so seat him at the table.
*/
try{
if(wState == WaiterState.relieveFromDuty){
wState = WaiterState.None;
myPerson.releavedFromDuty(this);
//restaurant.insideAnimationPanel.removeGui(waitergui);
return true;
}
if(wState == WaiterState.RestaurantClosed){
wState = WaiterState.None;
leaveClosedRestaurant();
return true;
}
if(BackTW){
BackToWork();
return true;
}
if(WantToGoOnBreak){
AskForBreak();
return true;
}
if(wState == WaiterState.GoingToWork){
wState = WaiterState.None;
MsgHost();
return true;
}
/*for (MyCustomer customer : waitingCustomers) {
if(customer.getState() == CustomerState.Waiting){
SeatCustomer(customer);
return true;
}
}*/
for (MyCustomer customer : waitingCustomers) {
if(customer.getState() == CustomerState.AskedToOrder){
TakeOrder(customer);
return true;
}
}
for (MyCustomer customer : waitingCustomers) {
if(customer.getState() == CustomerState.Reorder){
ReAskToOrder(customer);
return true;
}
}
for (MyCustomer customer : waitingCustomers) {
if(customer.getState() == CustomerState.Ordered){
TakeOrderToCook(customer);
return true;
}
}
for (MyCustomer customer : waitingCustomers) {
if(customer.getState() == CustomerState.Ready){
TakeOrderToCustomer(customer);
return true;
}
}
for (MyCustomer customer : waitingCustomers) {
if(customer.getState() == CustomerState.Done){
TellHost(customer);
return true;
}
}
//waitergui.DoLeaveCustomer();
for (MyCustomer customer : waitingCustomers) {
if(customer.getState() == CustomerState.Waiting){
SeatCustomer(customer);
return true;
}
}
}
catch(ConcurrentModificationException cme){
return false;
}
waitergui.DoLeaveCustomer();
return false;
//we have tried all our rules and found
//nothing to do. So return false to main loop of abstract agent
//and wait.
}
// Actions
private void leaveClosedRestaurant() {
waitergui.LeaveRestaurant();
try {
atDestination.acquire();
} catch (InterruptedException e) {
}
}
private void MsgHost() {
((GHHostRole) restaurant.host).msgSetWaiter(this);
}
private void SeatCustomer(MyCustomer customer) {
//customer.cs = CustomerState.Seated;
((GHCustomerRole) customer.customer).msgFollowMeToTable(customer.tablenumber,this);
DoSeatCustomer(customer.customer, customer.tablenumber);
try {
atDestination.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
customer.cs = CustomerState.Idle;
}
private void DoSeatCustomer(Customer customer, int tablenumber){
//print("Seating " + customer + " at table " + tablenumber);
waitergui.DoBringToTable(customer, tablenumber);
}
private void TakeOrder(MyCustomer c){
//c.cs = CustomerState.Asked;
//DoTakeOrder(c);
waitergui.DoGoToTable(c.tablenumber);
try {
atDestination.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
((GHCustomerRole) c.customer).msgWhatWouldYouLike();
c.cs = CustomerState.Idle;
}
/*private void DoTakeOrder(MyCustomer c){
print("Going to table ");//+ c.tablenumber + " to take "+ c.customer + "'s order.");
waitergui.DoGoToTable(c.tablenumber);
}*/
private void ReAskToOrder(MyCustomer c){
//DoTakeOrder(c);
waitergui.DoGoToTable(c.tablenumber);
try {
atDestination.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
c.cs = CustomerState.Idle;
((GHCustomerRole) c.customer).msgOutOfChoiceReorder(c.choice);
}
private void TakeOrderToCook(MyCustomer c){
DoTakeOrderToCook();
try {
atDestination.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
c.cs = CustomerState.Idle;
((GHCookRole) restaurant.cook).msgHereIsAnOrder(this,c.choice,c.tablenumber);
}
private void DoTakeOrderToCook(){
print("Taking order to cook");
waitergui.GoToCook();
}
private void TakeOrderToCustomer(MyCustomer c){
DoGoToCook();
try {
atDestination.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
DoTakeOrderToCustomer(c);
try {
atDestination.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
c.cs = CustomerState.Idle;
//When the waiter gives the customer his order he tells the cashier to produce the check
//cashier.msgProduceCheck(this, c.customer, c.choice, c.tablenumber);
//Then asks him for the check
DoGiveCheck(c);
try {
atDestination.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
//cashier.msgGiveMeCheck(this);
//c.customer.msgHereIsYourOrder();
for(CustomerCheck cc : customerChecks){
if((!(customerChecks.isEmpty())) && (cc.customer == c.customer) ){
((GHCustomerRole) c.customer).msgHeresCheck(cc.choice, cc.cost);
}
}
((GHCustomerRole) c.customer).msgHereIsYourOrder();
}
private void DoGoToCook(){
print("Going to cook to pick up order");
waitergui.GoToCook();
}
private void DoTakeOrderToCustomer(MyCustomer c){
print("Taking order to table " + c.tablenumber);
((GHCashierRole) restaurant.cashier).msgProduceCheck(this, c.customer, c.choice, c.tablenumber);
waitergui.DoGoToTable(c.tablenumber);
}
private void DoGiveCheck(MyCustomer c){
print("Giving check to customer");
((GHCashierRole) restaurant.cashier).msgGiveMeCheck(this);
waitergui.DoGoToTable(c.tablenumber);
}
private void TellHost(MyCustomer c){
print("Telling host a table is free");
((GHHostRole) restaurant.host).msgLeavingTable(c.customer);
c.cs = CustomerState.Idle;
waitingCustomers.remove(c);
}
private void AskForBreak(){
print("Asking for break");
((GHHostRole) restaurant.host).msgCanIGoOnBreak(this);
}
private void BackToWork(){
print("Going Back To Work");
((GHHostRole) restaurant.host).msgSetWaiter(this);
BackTW = false;
}
/*private void HereIsCheck(CustomerCheck c){
DoTakeCheck(c);
try {
atDestination.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
c.customer.msgHeresCheck(c.choice, c.cost);
customerChecks.remove(c);
}
private void DoTakeCheck(CustomerCheck c){
print("Giving "+ c.customer + " the check");
waitergui.DoGoToTable(c.tablenumber);
}*/
//utilities
public void setGui(GHWaiterGui wg){
waitergui = wg;
}
public GHWaiterGui getGui(){
return waitergui;
}
public boolean getOnBreak(){
return OnBreak;
}
public void setWantToGoOnBreak(boolean b){
WantToGoOnBreak = b;
}
public void setOnBreak(boolean b){
OnBreak = b;
}
private class MyCustomer {
Customer customer;
int tablenumber;
String choice;
CustomerState cs;
MyCustomer(Customer cust, int table, CustomerState cstate){
customer = cust;
tablenumber = table;
cs = cstate;
}
public CustomerState getState(){
return cs;
}
}
private class CustomerCheck {
Customer customer;
String choice;
double cost;
int tablenumber;
CustomerCheck(Customer cu, String c, double co, int t){
customer = cu;
choice = c;
cost = co;
tablenumber = t;
}
}
@Override
public void msgGoOnBreak() {
// TODO Auto-generated method stub
}
@Override
public void msgDontGoOnBreak() {
// TODO Auto-generated method stub
}
@Override
public void msgLeftTheRestaurant() {
atDestination.release();
wState = WaiterState.relieveFromDuty;
stateChanged();
}
@Override
public Restaurant getRestaurant() {
return restaurant;
}
@Override
public void msgAskForBreak() {
// TODO Auto-generated method stub
}
@Override
public void setGui(Gui g) {
waitergui = (GHWaiterGui) g;
}
}
| |
/**
* Copyright (c) 2010 Daniel Murphy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/**
* Created at Aug 20, 2010, 2:58:08 AM
*/
package com.dmurph.mvc.gui.combo;
import java.awt.event.KeyEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedList;
import javax.swing.JComboBox;
import com.dmurph.mvc.model.MVCArrayList;
/**
* This class is for having a combo box that will always reflect the data of
* an MVCArrayList. There is a lot of flexibility provided with filtering
* and sorting the elements.
*
* @author Daniel Murphy
*/
public class MVCJComboBox<E> extends JComboBox {
private static final long serialVersionUID = 1L;
private MVCJComboBoxModel<E> model;
private MVCArrayList<E> data;
private IMVCJComboBoxFilter<E> filter;
private final Object lock = new Object();
private MVCJComboBoxStyle style;
private Comparator<E> comparator = null;
private final PropertyChangeListener plistener = new PropertyChangeListener() {
@SuppressWarnings("unchecked")
public void propertyChange(PropertyChangeEvent argEvt) {
String prop = argEvt.getPropertyName();
if (prop.equals(MVCArrayList.ADDED)) {
add((E)argEvt.getNewValue());
}
else if(prop.equals(MVCArrayList.ADDED_ALL)){
addAll((Collection<E>) argEvt.getNewValue());
}
else if (prop.equals(MVCArrayList.CHANGED)) {
change((E)argEvt.getOldValue(),(E) argEvt.getNewValue());
}
else if (prop.equals(MVCArrayList.REMOVED)) {
remove((E)argEvt.getOldValue());
}
else if(prop.equals(MVCArrayList.REMOVED_ALL)){
synchronized(lock){
model.removeAllElements();
}
}
}
};
/**
* Constructs with no data, no filter, no
* {@link Comparator}, and style set to
* {@link MVCJComboBoxStyle#ADD_NEW_TO_BEGINNING}.
*/
public MVCJComboBox() {
this(null, new IMVCJComboBoxFilter<E>() {
public boolean showItem(E argComponent) {
return true;
};
}, MVCJComboBoxStyle.SORT, null);
}
/**
* Constructs a combo box with the given style. If you want
* the {@link MVCJComboBoxStyle#SORT} style, then you'll want to specify
* a comparator as well.
* @param argData
* @param argStyle
*/
public MVCJComboBox(MVCJComboBoxStyle argStyle) {
this(null, new IMVCJComboBoxFilter<E>() {
public boolean showItem(E argComponent) {
return true;
};
}, argStyle, null);
}
/**
* Constracts a dynamic combo box with the given data and
* default style of {@link MVCJComboBoxStyle#SORT}.
* @param argData
*/
public MVCJComboBox(MVCArrayList<E> argData, Comparator<E> argComparator) {
this(argData, new IMVCJComboBoxFilter<E>() {
public boolean showItem(E argComponent) {
return true;
};
}, MVCJComboBoxStyle.SORT, argComparator);
}
/**
* Constructs a combo box with the given data and style. If you want
* the {@link MVCJComboBoxStyle#SORT} style, then you'll want to specify
* a comparator as well.
* @param argData
* @param argStyle
*/
public MVCJComboBox(MVCArrayList<E> argData, MVCJComboBoxStyle argStyle) {
this(argData, new IMVCJComboBoxFilter<E>() {
public boolean showItem(E argComponent) {
return true;
};
}, argStyle, null);
}
/**
* Constructs a dynamic combo box with the given data, filter, and comparator.
* The style will be {@link MVCJComboBoxStyle#SORT} by default.
* @param argData
* @param argFilter
* @param argComparator
*/
public MVCJComboBox(MVCArrayList<E> argData, IMVCJComboBoxFilter<E> argFilter, Comparator<E> argComparator) {
this(argData, argFilter, MVCJComboBoxStyle.SORT, null);
}
/**
*
* @param argData
* @param argFilter
* @param argStyle
* @param argComparator
*/
public MVCJComboBox(MVCArrayList<E> argData, IMVCJComboBoxFilter<E> argFilter, MVCJComboBoxStyle argStyle, Comparator<E> argComparator) {
data = argData;
style = argStyle;
filter = argFilter;
comparator = argComparator;
model = new MVCJComboBoxModel<E>();
super.setModel(model);
if(data != null){
argData.addPropertyChangeListener(plistener);
// add the data
for (E o : data) {
if(filter.showItem(o)){
model.addElement(o);
}
}
// start with allowing the comparator to be null, in case they intend to set it later. and call refreshData()
if(style == MVCJComboBoxStyle.SORT && comparator != null){
model.sort(comparator);
}
}
}
/**
* Gets the rendering style of this combo box. Default style is
* {@link MVCJComboBoxStyle#SORT}.
* @return
*/
public MVCJComboBoxStyle getStyle(){
return style;
}
/**
* Gets the data list. This is used to access
* data with {@link #refreshData()}, so override
* if you want to customize what the data is (sending
* null to the contructor for the data
* is a good idea in that case)
* @return
*/
public ArrayList<E> getData(){
return data;
}
/**
* Sets the data of this combo box. This causes the box
* to refresh it's model
* @param argData can be null
*/
public void setData(MVCArrayList<E> argData){
synchronized (lock) {
if(data != null){
data.removePropertyChangeListener(plistener);
}
data = argData;
if(data != null){
data.addPropertyChangeListener(plistener);
}
}
refreshData();
}
/**
* Sets the comparator used for the {@link MVCJComboBoxStyle#SORT} style.
* @param argComparator
*/
public void setComparator(Comparator<E> argComparator) {
this.comparator = argComparator;
}
/**
* Gets the comparator that's used for sorting.
* @return
*/
public Comparator<E> getComparator() {
return comparator;
}
/**
* @return the filter
*/
public IMVCJComboBoxFilter<E> getFilter() {
return filter;
}
/**
* @param argFilter the filter to set
*/
public void setFilter(IMVCJComboBoxFilter<E> argFilter) {
filter = argFilter;
}
/**
* @see javax.swing.JComboBox#processKeyEvent(java.awt.event.KeyEvent)
*/
@Override
public void processKeyEvent(KeyEvent argE) {
if(argE.getKeyChar() == KeyEvent.VK_BACK_SPACE || argE.getKeyChar() == KeyEvent.VK_DELETE){
setSelectedItem(null);
super.hidePopup();
}else{
super.processKeyEvent(argE);
}
}
/**
* Sets the style of this combo box
* @param argStyle
*/
public void setStyle(MVCJComboBoxStyle argStyle){
style = argStyle;
if(style == MVCJComboBoxStyle.SORT){
if(comparator == null){
throw new NullPointerException("DynamicJComboBox style is set to Alpha Sort, but the comparator is null.");
}
model.sort(comparator);
}
}
public void refreshData(){
synchronized (lock) {
// remove all elements
model.removeAllElements();
if(getData() == null){
return;
}
for(E e: getData()){
if(filter.showItem(e)){
model.addElement(e);
}
}
if(style == MVCJComboBoxStyle.SORT){
if(comparator == null){
throw new NullPointerException("DynamicJComboBox style is set to Alpha Sort, but the comparator is null.");
}
model.sort(comparator);
}
}
}
private void add(E argNewObj) {
boolean b = filter.showItem(argNewObj);
if (b == false) {
return;
}
synchronized (lock) {
switch(style){
case SORT:{
if(comparator == null){
throw new NullPointerException("DynamicJComboBox style is set to Alpha Sort, but the comparator is null.");
}
boolean inserted = false;
for(int i=0; i<model.getSize(); i++){
E e = model.getElementAt(i);
if(comparator.compare(e, argNewObj) > 0){
model.insertElementAt(argNewObj, i);
inserted = true;
break;
}
}
if(!inserted){
model.addElement(argNewObj);
}
break;
}
case ADD_NEW_TO_BEGINNING:{
model.insertElementAt(argNewObj, 0);
break;
}
case ADD_NEW_TO_END:{
model.addElement(argNewObj);
}
}
}
}
private void addAll(Collection<E> argNewObjects) {
LinkedList<E> filtered = new LinkedList<E>();
Iterator<E> it = argNewObjects.iterator();
while(it.hasNext()){
E e = it.next();
if(filter.showItem(e)){
filtered.add(e);
}
}
if(filtered.size() == 0){
return;
}
synchronized (lock) {
switch(style){
case SORT:{
if(comparator == null){
throw new NullPointerException("DynamicJComboBox style is set to Alpha Sort, but the comparator is null.");
}
model.addElements(filtered);
model.sort(comparator);
break;
}
case ADD_NEW_TO_BEGINNING:{
model.addElements(0, filtered);
break;
}
case ADD_NEW_TO_END:{
model.addElements(filtered);
}
}
}
}
private void change(E argOld, E argNew) {
boolean so = filter.showItem(argOld);
boolean sn = filter.showItem(argNew);
if(!sn){
remove(argOld);
return;
}
if(!so){
if(sn){
add(argNew);
return;
}else{
return;
}
}
synchronized (lock) {
int size = model.getSize();
for (int i = 0; i < size; i++) {
E e = model.getElementAt(i);
if (e == argOld) {
model.setElementAt(argNew, i);
return;
}
}
if(style == MVCJComboBoxStyle.SORT){
if(comparator == null){
throw new NullPointerException("DynamicJComboBox style is set to Alpha Sort, but the comparator is null.");
}
model.sort(comparator);
}
}
}
private void remove(E argVal) {
boolean is = filter.showItem(argVal);
if (!is) {
return;
}
synchronized (lock) {
for(int i=0; i<model.getSize();i ++){
E e = model.getElementAt(i);
if(e == argVal){
model.removeElementAt(i);
return;
}
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.client.rpc;
import org.apache.hadoop.hdds.client.ReplicationType;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.client.ObjectStore;
import org.apache.hadoop.ozone.client.OzoneClient;
import org.apache.hadoop.ozone.client.OzoneClientFactory;
import org.apache.hadoop.ozone.client.io.ChunkGroupOutputStream;
import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
import org.apache.hadoop.ozone.container.ContainerTestHelper;
import org.apache.hadoop.ozone.om.helpers.OmKeyArgs;
import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_SCM_WATCHER_TIMEOUT;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL;
/**
* Tests Close Container Exception handling by Ozone Client.
*/
public class TestFailureHandlingByClient {
private static MiniOzoneCluster cluster;
private static OzoneConfiguration conf;
private static OzoneClient client;
private static ObjectStore objectStore;
private static int chunkSize;
private static int blockSize;
private static String volumeName;
private static String bucketName;
private static String keyString;
private static int maxRetries;
/**
* TODO: we will spawn new MiniOzoneCluster every time for each unit test
* invocation. Need to use the same instance for all tests.
*/
/**
* Create a MiniDFSCluster for testing.
* <p>
* Ozone is made active by setting OZONE_ENABLED = true
*
* @throws IOException
*/
@Before
public void init() throws Exception {
conf = new OzoneConfiguration();
maxRetries = 100;
chunkSize = (int) OzoneConsts.MB;
blockSize = 4 * chunkSize;
conf.setInt(ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_KEY, chunkSize);
conf.setInt(OzoneConfigKeys.OZONE_CLIENT_STREAM_BUFFER_FLUSH_SIZE, 1);
conf.setInt(OzoneConfigKeys.OZONE_CLIENT_STREAM_BUFFER_MAX_SIZE, 2);
conf.setTimeDuration(OzoneConfigKeys.OZONE_CLIENT_WATCH_REQUEST_TIMEOUT, 5,
TimeUnit.SECONDS);
conf.setTimeDuration(HDDS_SCM_WATCHER_TIMEOUT, 1000, TimeUnit.MILLISECONDS);
conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3, TimeUnit.SECONDS);
conf.setQuietMode(false);
conf.setLong(OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE_IN_MB, (4));
cluster = MiniOzoneCluster.newBuilder(conf)
.setNumDatanodes(6).build();
cluster.waitForClusterToBeReady();
//the easiest way to create an open container is creating a key
client = OzoneClientFactory.getClient(conf);
objectStore = client.getObjectStore();
keyString = UUID.randomUUID().toString();
volumeName = "datanodefailurehandlingtest";
bucketName = volumeName;
objectStore.createVolume(volumeName);
objectStore.getVolume(volumeName).createBucket(bucketName);
}
/**
* Shutdown MiniDFSCluster.
*/
@After
public void shutdown() {
if (cluster != null) {
cluster.shutdown();
}
}
// TODO: currently, shutting down 2 datanodes in Ratis leads to
// watchForCommit Api in RaftClient to hand=g forever. Once that gets
// fixed, we need to execute the tets with 2 node failures.
@Test
public void testBlockWritesWithDnFailures() throws Exception {
String keyName = "ratis3";
OzoneOutputStream key = createKey(keyName, ReplicationType.RATIS, 0);
byte[] data =
ContainerTestHelper
.getFixedLengthString(keyString, chunkSize + chunkSize / 2).getBytes();
key.write(data);
// get the name of a valid container
Assert.assertTrue(key.getOutputStream() instanceof ChunkGroupOutputStream);
ChunkGroupOutputStream groupOutputStream =
(ChunkGroupOutputStream) key.getOutputStream();
List<OmKeyLocationInfo> locationInfoList =
groupOutputStream.getLocationInfoList();
Assert.assertTrue(locationInfoList.size() == 1);
long containerId = locationInfoList.get(0).getContainerID();
ContainerInfo container = cluster.getStorageContainerManager()
.getContainerManager()
.getContainer(ContainerID.valueof(containerId));
Pipeline pipeline =
cluster.getStorageContainerManager().getPipelineManager()
.getPipeline(container.getPipelineID());
List<DatanodeDetails> datanodes = pipeline.getNodes();
cluster.shutdownHddsDatanode(datanodes.get(0));
// cluster.shutdownHddsDatanode(datanodes.get(1));
// The write will fail but exception will be handled and length will be
// updated correctly in OzoneManager once the steam is closed
key.close();
//get the name of a valid container
OmKeyArgs keyArgs = new OmKeyArgs.Builder().setVolumeName(volumeName)
.setBucketName(bucketName).setType(HddsProtos.ReplicationType.RATIS)
.setFactor(HddsProtos.ReplicationFactor.THREE).setKeyName(keyName)
.build();
OmKeyInfo keyInfo = cluster.getOzoneManager().lookupKey(keyArgs);
Assert.assertEquals(data.length, keyInfo.getDataSize());
validateData(keyName, data);
cluster.restartHddsDatanode(datanodes.get(0), true);
}
@Test
public void testMultiBlockWritesWithDnFailures() throws Exception {
String keyName = "ratis3";
OzoneOutputStream key = createKey(keyName, ReplicationType.RATIS, 0);
String data =
ContainerTestHelper
.getFixedLengthString(keyString, blockSize + chunkSize);
key.write(data.getBytes());
// get the name of a valid container
Assert.assertTrue(key.getOutputStream() instanceof ChunkGroupOutputStream);
ChunkGroupOutputStream groupOutputStream =
(ChunkGroupOutputStream) key.getOutputStream();
List<OmKeyLocationInfo> locationInfoList =
groupOutputStream.getLocationInfoList();
Assert.assertTrue(locationInfoList.size() == 2);
long containerId = locationInfoList.get(1).getContainerID();
ContainerInfo container = cluster.getStorageContainerManager()
.getContainerManager()
.getContainer(ContainerID.valueof(containerId));
Pipeline pipeline =
cluster.getStorageContainerManager().getPipelineManager()
.getPipeline(container.getPipelineID());
List<DatanodeDetails> datanodes = pipeline.getNodes();
cluster.shutdownHddsDatanode(datanodes.get(0));
// cluster.shutdownHddsDatanode(datanodes.get(1));
// The write will fail but exception will be handled and length will be
// updated correctly in OzoneManager once the steam is closed
key.write(data.getBytes());
key.close();
OmKeyArgs keyArgs = new OmKeyArgs.Builder().setVolumeName(volumeName)
.setBucketName(bucketName).setType(HddsProtos.ReplicationType.RATIS)
.setFactor(HddsProtos.ReplicationFactor.THREE).setKeyName(keyName)
.build();
OmKeyInfo keyInfo = cluster.getOzoneManager().lookupKey(keyArgs);
Assert.assertEquals(2 * data.getBytes().length, keyInfo.getDataSize());
validateData(keyName, data.concat(data).getBytes());
cluster.restartHddsDatanode(datanodes.get(0), true);
}
private OzoneOutputStream createKey(String keyName, ReplicationType type,
long size) throws Exception {
return ContainerTestHelper
.createKey(keyName, type, size, objectStore, volumeName, bucketName);
}
private void validateData(String keyName, byte[] data) throws Exception {
ContainerTestHelper
.validateData(keyName, data, objectStore, volumeName, bucketName);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.container.impl.jmx.deployment.scanning;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.camunda.bpm.container.impl.jmx.deployment.scanning.spi.ProcessApplicationScanner;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.impl.util.IoUtil;
/**
* <p>Scans for bpmn20.xml files in the classpath of the given classloader.</p>
*
* <p>Scans all branches of the classpath containing a META-INF/processes.xml
* file </p>
*
* @author Daniel Meyer
* @author Falko Menge
*/
public class ClassPathProcessApplicationScanner implements ProcessApplicationScanner {
private static Logger log = Logger.getLogger(ClassPathProcessApplicationScanner.class.getName());
public Map<String, byte[]> findResources(ClassLoader classLoader, String paResourceRootPath, URL metaFileUrl) {
final Map<String, byte[]> resourceMap = new HashMap<String, byte[]>();
// perform the scanning. (results are collected in 'resourceMap')
scanPaResourceRootPath(classLoader, metaFileUrl, paResourceRootPath, resourceMap);
return resourceMap;
}
public void scanPaResourceRootPath(final ClassLoader classLoader, final URL metaFileUrl, final String paResourceRootPath, Map<String, byte[]> resourceMap) {
if(paResourceRootPath != null && !paResourceRootPath.startsWith("pa:")) {
// 1. CASE: paResourceRootPath specified AND it is a "classpath:" resource root
String strippedPath = paResourceRootPath.replace("classpath:", "");
Enumeration<URL> resourceRoots = loadClasspathResourceRoots(classLoader, strippedPath);
while (resourceRoots.hasMoreElements()) {
URL resourceRoot = (URL) resourceRoots.nextElement();
scanUrl(resourceRoot, strippedPath, false, resourceMap);
}
} else {
// 2nd. CASE: no paResourceRootPath specified OR paResourceRootPath is PA-local
String strippedPaResourceRootPath = null;
if(paResourceRootPath != null) {
strippedPaResourceRootPath = paResourceRootPath.replace("pa:", "");
strippedPaResourceRootPath = strippedPaResourceRootPath.endsWith("/") ? strippedPaResourceRootPath : strippedPaResourceRootPath +"/";
}
scanUrl(metaFileUrl, strippedPaResourceRootPath, true, resourceMap);
}
}
protected void scanUrl(URL url, String paResourceRootPath, boolean isPaLocal, Map<String, byte[]> resourceMap) {
String urlPath = url.toExternalForm();
if(isPaLocal) {
if (urlPath.startsWith("file:") || urlPath.startsWith("jar:") || urlPath.startsWith("wsjar:")) {
urlPath = url.getPath();
int withinArchive = urlPath.indexOf('!');
if (withinArchive != -1) {
urlPath = urlPath.substring(0, withinArchive);
} else {
File file = new File(urlPath);
urlPath = file.getParentFile().getParent();
}
}
} else {
if (urlPath.startsWith("file:") || urlPath.startsWith("jar:") || urlPath.startsWith("wsjar:")) {
urlPath = url.getPath();
int withinArchive = urlPath.indexOf('!');
if (withinArchive != -1) {
urlPath = urlPath.substring(0, withinArchive);
}
}
}
try {
urlPath = URLDecoder.decode(urlPath, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new ProcessEngineException("Could not decode pathname using utf-8 decoder.", e);
}
log.log(Level.FINEST, "Rootpath is {0}", urlPath);
scanPath(urlPath, paResourceRootPath, isPaLocal, resourceMap);
}
protected void scanPath(String urlPath, String paResourceRootPath, boolean isPaLocal, Map<String, byte[]> resourceMap) {
if (urlPath.startsWith("file:")) {
urlPath = urlPath.substring(5);
}
if (urlPath.indexOf('!') > 0) {
urlPath = urlPath.substring(0, urlPath.indexOf('!'));
}
File file = new File(urlPath);
if (file.isDirectory()) {
String path = file.getPath();
String rootPath = path.endsWith(File.separator) ? path : path+File.separator;
handleDirectory(file, rootPath, paResourceRootPath, paResourceRootPath, isPaLocal, resourceMap);
} else {
handleArchive(file, paResourceRootPath, resourceMap);
}
}
protected void handleArchive(File file, String paResourceRootPath, Map<String, byte[]> resourceMap) {
try {
ZipFile zipFile = new ZipFile(file);
Enumeration< ? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry zipEntry = (ZipEntry) entries.nextElement();
String processFileName = zipEntry.getName();
if (ProcessApplicationScanningUtil.isDeployable(processFileName) && isBelowPath(processFileName, paResourceRootPath)) {
addResource(zipFile.getInputStream(zipEntry), resourceMap, file.getName()+"!", processFileName);
// find diagram(s) for process
Enumeration< ? extends ZipEntry> entries2 = zipFile.entries();
while (entries2.hasMoreElements()) {
ZipEntry zipEntry2 = (ZipEntry) entries2.nextElement();
String diagramFileName = zipEntry2.getName();
if (ProcessApplicationScanningUtil.isDiagramForProcess(diagramFileName, processFileName)) {
addResource(zipFile.getInputStream(zipEntry), resourceMap, file.getName()+"!", diagramFileName);
}
}
}
}
zipFile.close();
} catch (IOException e) {
throw new ProcessEngineException("IOException while scanning archive '"+file+"'.", e);
}
}
protected void handleDirectory(File directory, String rootPath, String localPath, String paResourceRootPath, boolean isPaLocal, Map<String, byte[]> resourceMap) {
File[] paths = directory.listFiles();
String currentPathSegment = localPath;
if (localPath != null && localPath.length() > 0) {
if (localPath.indexOf('/') > 0) {
currentPathSegment = localPath.substring(0, localPath.indexOf('/'));
localPath = localPath.substring(localPath.indexOf('/') + 1, localPath.length());
} else {
localPath = null;
}
}
for (File path : paths) {
if(isPaLocal // if it is not PA-local, we have already used the classloader to specify the root path explicitly.
&& currentPathSegment != null
&& currentPathSegment.length()>0) {
if(path.isDirectory()) {
// only descend into directory, if below resource root:
if(path.getName().equals(currentPathSegment)) {
handleDirectory(path, rootPath, localPath, paResourceRootPath, isPaLocal, resourceMap);
}
}
} else { // at resource root or below -> continue scanning
String processFileName = path.getPath();
if (!path.isDirectory() && ProcessApplicationScanningUtil.isDeployable(processFileName)) {
addResource(path, resourceMap, paResourceRootPath, processFileName.replace(rootPath, ""));
// find diagram(s) for process
for (File file : paths) {
String diagramFileName = file.getPath();
if (!path.isDirectory() && ProcessApplicationScanningUtil.isDiagramForProcess(diagramFileName, processFileName)) {
addResource(file, resourceMap, paResourceRootPath, diagramFileName.replace(rootPath, ""));
}
}
} else if (path.isDirectory()) {
handleDirectory(path, rootPath, localPath, paResourceRootPath, isPaLocal, resourceMap);
}
}
}
}
protected void addResource(Object source, Map<String, byte[]> resourceMap, String resourceRootPath, String resourceName) {
String resourcePath = (resourceRootPath == null ? "" : resourceRootPath).concat(resourceName);
log.log(Level.FINEST, "discovered process resource {0}", resourcePath);
InputStream inputStream = null;
try {
if(source instanceof File) {
try {
inputStream = new FileInputStream((File) source);
} catch (IOException e) {
throw new ProcessEngineException("Could not open file for reading "+source + ". "+e.getMessage(), e);
}
} else {
inputStream = (InputStream) source;
}
byte[] bytes = IoUtil.readInputStream(inputStream, resourcePath);
resourceMap.put(resourcePath, bytes);
} finally {
if(inputStream != null) {
IoUtil.closeSilently(inputStream);
}
}
}
protected Enumeration<URL> loadClasspathResourceRoots(final ClassLoader classLoader, String strippedPaResourceRootPath) {
Enumeration<URL> resourceRoots;
try {
resourceRoots = classLoader.getResources(strippedPaResourceRootPath);
} catch (IOException e) {
throw new ProcessEngineException("Could not load resources at '"+strippedPaResourceRootPath+"' using classloaded '"+classLoader+"'", e);
}
return resourceRoots;
}
protected boolean isBelowPath(String processFileName, String paResourceRootPath) {
if(paResourceRootPath == null || paResourceRootPath.length() ==0 ) {
return true;
} else {
return processFileName.startsWith(paResourceRootPath);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.messaging.netty;
import org.apache.storm.Config;
import org.apache.storm.grouping.Load;
import org.apache.storm.messaging.ConnectionWithStatus;
import org.apache.storm.messaging.IConnectionCallback;
import org.apache.storm.messaging.TaskMessage;
import org.apache.storm.metric.api.IMetric;
import org.apache.storm.metric.api.IStatefulObject;
import org.apache.storm.serialization.KryoValuesSerializer;
import org.apache.storm.utils.ObjectReader;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFactory;
import org.jboss.netty.channel.group.ChannelGroup;
import org.jboss.netty.channel.group.DefaultChannelGroup;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class Server extends ConnectionWithStatus implements IStatefulObject, ISaslServer {
private static final Logger LOG = LoggerFactory.getLogger(Server.class);
@SuppressWarnings("rawtypes")
Map<String, Object> topoConf;
int port;
private final ConcurrentHashMap<String, AtomicInteger> messagesEnqueued = new ConcurrentHashMap<>();
private final AtomicInteger messagesDequeued = new AtomicInteger(0);
volatile ChannelGroup allChannels = new DefaultChannelGroup("storm-server");
final ChannelFactory factory;
final ServerBootstrap bootstrap;
private volatile boolean closing = false;
List<TaskMessage> closeMessage = Arrays.asList(new TaskMessage(-1, null));
private KryoValuesSerializer _ser;
private IConnectionCallback _cb = null;
private final int boundPort;
@SuppressWarnings("rawtypes")
Server(Map<String, Object> topoConf, int port) {
this.topoConf = topoConf;
this.port = port;
_ser = new KryoValuesSerializer(topoConf);
// Configure the server.
int buffer_size = ObjectReader.getInt(topoConf.get(Config.STORM_MESSAGING_NETTY_BUFFER_SIZE));
int backlog = ObjectReader.getInt(topoConf.get(Config.STORM_MESSAGING_NETTY_SOCKET_BACKLOG), 500);
int maxWorkers = ObjectReader.getInt(topoConf.get(Config.STORM_MESSAGING_NETTY_SERVER_WORKER_THREADS));
ThreadFactory bossFactory = new NettyRenameThreadFactory(netty_name() + "-boss");
ThreadFactory workerFactory = new NettyRenameThreadFactory(netty_name() + "-worker");
if (maxWorkers > 0) {
factory = new NioServerSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
Executors.newCachedThreadPool(workerFactory), maxWorkers);
} else {
factory = new NioServerSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
Executors.newCachedThreadPool(workerFactory));
}
LOG.info("Create Netty Server " + netty_name() + ", buffer_size: " + buffer_size + ", maxWorkers: " + maxWorkers);
bootstrap = new ServerBootstrap(factory);
bootstrap.setOption("child.tcpNoDelay", true);
bootstrap.setOption("child.receiveBufferSize", buffer_size);
bootstrap.setOption("child.keepAlive", true);
bootstrap.setOption("backlog", backlog);
// Set up the pipeline factory.
bootstrap.setPipelineFactory(new StormServerPipelineFactory(this));
// Bind and start to accept incoming connections.
Channel channel = bootstrap.bind(new InetSocketAddress(port));
boundPort = ((InetSocketAddress)channel.getLocalAddress()).getPort();
allChannels.add(channel);
}
private void addReceiveCount(String from, int amount) {
//This is possibly lossy in the case where a value is deleted
// because it has received no messages over the metrics collection
// period and new messages are starting to come in. This is
// because I don't want the overhead of a synchronize just to have
// the metric be absolutely perfect.
AtomicInteger i = messagesEnqueued.get(from);
if (i == null) {
i = new AtomicInteger(amount);
AtomicInteger prev = messagesEnqueued.putIfAbsent(from, i);
if (prev != null) {
prev.addAndGet(amount);
}
} else {
i.addAndGet(amount);
}
}
/**
* enqueue a received message
* @throws InterruptedException
*/
protected void enqueue(List<TaskMessage> msgs, String from) throws InterruptedException {
if (null == msgs || msgs.size() == 0 || closing) {
return;
}
addReceiveCount(from, msgs.size());
if (_cb != null) {
_cb.recv(msgs);
}
}
@Override
public void registerRecv(IConnectionCallback cb) {
_cb = cb;
}
/**
* register a newly created channel
* @param channel newly created channel
*/
protected void addChannel(Channel channel) {
allChannels.add(channel);
}
/**
* @param channel channel to close
*/
public void closeChannel(Channel channel) {
channel.close().awaitUninterruptibly();
allChannels.remove(channel);
}
@Override
public int getPort() {
return boundPort;
}
/**
* close all channels, and release resources
*/
public synchronized void close() {
if (allChannels != null) {
allChannels.close().awaitUninterruptibly();
factory.releaseExternalResources();
allChannels = null;
}
}
@Override
public void sendLoadMetrics(Map<Integer, Double> taskToLoad) {
try {
MessageBatch mb = new MessageBatch(1);
mb.add(new TaskMessage(-1, _ser.serialize(Arrays.asList((Object)taskToLoad))));
allChannels.write(mb);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public Map<Integer, Load> getLoad(Collection<Integer> tasks) {
throw new RuntimeException("Server connection cannot get load");
}
@Override
public void send(int task, byte[] message) {
throw new UnsupportedOperationException("Server connection should not send any messages");
}
@Override
public void send(Iterator<TaskMessage> msgs) {
throw new UnsupportedOperationException("Server connection should not send any messages");
}
public String netty_name() {
return "Netty-server-localhost-" + port;
}
@Override
public Status status() {
if (closing) {
return Status.Closed;
}
else if (!connectionEstablished(allChannels)) {
return Status.Connecting;
}
else {
return Status.Ready;
}
}
private boolean connectionEstablished(Channel channel) {
return channel != null && channel.isBound();
}
private boolean connectionEstablished(ChannelGroup allChannels) {
boolean allEstablished = true;
for (Channel channel : allChannels) {
if (!(connectionEstablished(channel))) {
allEstablished = false;
break;
}
}
return allEstablished;
}
public Object getState() {
LOG.debug("Getting metrics for server on port {}", port);
HashMap<String, Object> ret = new HashMap<>();
ret.put("dequeuedMessages", messagesDequeued.getAndSet(0));
HashMap<String, Integer> enqueued = new HashMap<String, Integer>();
Iterator<Map.Entry<String, AtomicInteger>> it = messagesEnqueued.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, AtomicInteger> ent = it.next();
//Yes we can delete something that is not 0 because of races, but that is OK for metrics
AtomicInteger i = ent.getValue();
if (i.get() == 0) {
it.remove();
} else {
enqueued.put(ent.getKey(), i.getAndSet(0));
}
}
ret.put("enqueued", enqueued);
// Report messageSizes metric, if enabled (non-null).
if (_cb instanceof IMetric) {
Object metrics = ((IMetric) _cb).getValueAndReset();
if (metrics instanceof Map) {
ret.put("messageBytes", metrics);
}
}
return ret;
}
/** Implementing IServer. **/
public void channelConnected(Channel c) {
addChannel(c);
}
public void received(Object message, String remote, Channel channel) throws InterruptedException {
List<TaskMessage>msgs = (List<TaskMessage>)message;
enqueue(msgs, remote);
}
public String name() {
return (String)topoConf.get(Config.TOPOLOGY_NAME);
}
public String secretKey() {
return SaslUtils.getSecretKey(topoConf);
}
public void authenticated(Channel c) {
return;
}
@Override
public String toString() {
return String.format("Netty server listening on port %s", port);
}
}
| |
package de.saxsys.jfx.chattorama.aqua;
import javafx.application.Application;
import javafx.beans.property.ReadOnlyBooleanWrapper;
import javafx.beans.property.ReadOnlyStringWrapper;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.geometry.Insets;
import javafx.geometry.Orientation;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.CheckBox;
import javafx.scene.control.ChoiceBox;
import javafx.scene.control.ColorPicker;
import javafx.scene.control.ComboBox;
import javafx.scene.control.ContextMenu;
import javafx.scene.control.Hyperlink;
import javafx.scene.control.Label;
import javafx.scene.control.ListView;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuBar;
import javafx.scene.control.MenuItem;
import javafx.scene.control.PasswordField;
import javafx.scene.control.ProgressBar;
import javafx.scene.control.ProgressIndicator;
import javafx.scene.control.RadioButton;
import javafx.scene.control.ScrollBar;
import javafx.scene.control.Separator;
import javafx.scene.control.Slider;
import javafx.scene.control.Tab;
import javafx.scene.control.TabPane;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableColumn.CellEditEvent;
import javafx.scene.control.TableView;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.control.ToggleButton;
import javafx.scene.control.ToggleGroup;
import javafx.scene.control.ToolBar;
import javafx.scene.control.Tooltip;
import javafx.scene.control.TreeItem;
import javafx.scene.control.TreeTableColumn;
import javafx.scene.control.TreeTableColumn.CellDataFeatures;
import javafx.scene.control.TreeTableView;
import javafx.scene.control.TreeView;
import javafx.scene.control.cell.CheckBoxTableCell;
import javafx.scene.control.cell.CheckBoxTreeTableCell;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.control.cell.TextFieldTableCell;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.scene.paint.Color;
import javafx.scene.web.HTMLEditor;
import javafx.stage.Stage;
import javafx.stage.StageStyle;
import javafx.util.Callback;
import com.aquafx_project.AquaFx;
import com.aquafx_project.controls.skin.styles.ButtonType;
public class ButtonDemo extends Application {
final Tab tabH = new Tab();
final Tab tabI = new Tab();
final ObservableList<Person> data = FXCollections.observableArrayList(
new Person("John", "Doe", "john.doe@foo.com", "jd@foo.com", true),
new Person("Jane", "Doe", "jane.doe@example.com", "jane.d@foo.com", true),
new Person("Steve", "Schmidt", "steve.schmidt@example.com", null, false),
new Person("Lisa", "Jones", "lisa.jones@foo.com", "lisa.jones@foo.com", true),
new Person("Marcel", "Miller", "marcel.miller@foo.com", "", false));
@Override public void start(Stage stage) throws Exception {
AquaFx.styleStage(stage, StageStyle.UNIFIED);
BorderPane pane = new BorderPane();
ToolBar toolBar = new ToolBar();
ColorPicker colorTB = new ColorPicker(Color.rgb(194, 222, 254));
Separator seperateIt = new Separator();
ToggleGroup toolbarGroup = new ToggleGroup();
ToggleButton sampleButton4 = new ToggleButton("TG1");
sampleButton4.setToggleGroup(toolbarGroup);
sampleButton4.setSelected(true);
AquaFx.createToggleButtonStyler().setType(ButtonType.LEFT_PILL).style(sampleButton4);
ToggleButton sampleButton5 = new ToggleButton("TG2");
sampleButton5.setToggleGroup(toolbarGroup);
sampleButton5.setSelected(true);
AquaFx.createToggleButtonStyler().setType(ButtonType.CENTER_PILL).style(sampleButton5);
ToggleButton sampleButton6 = new ToggleButton("TG3");
sampleButton6.setToggleGroup(toolbarGroup);
sampleButton6.setSelected(true);
AquaFx.createToggleButtonStyler().setType(ButtonType.RIGHT_PILL).style(sampleButton6);
Separator seperateIt2 = new Separator();
Button menuPillButton1 = new Button("PB 1");
AquaFx.createButtonStyler().setType(ButtonType.LEFT_PILL).style(menuPillButton1);
Button menuPillButton2 = new Button("PB 2");
AquaFx.createButtonStyler().setType(ButtonType.CENTER_PILL).style(menuPillButton2);
Button menuPillButton3 = new Button("PB 3");
AquaFx.createButtonStyler().setType(ButtonType.RIGHT_PILL).style(menuPillButton3);
Button sampleButton = new Button("Button");
ToggleButton sampleButton1 = new ToggleButton("Toggle");
sampleButton1.setDisable(true);
ToggleButton sampleButton2 = new ToggleButton("Toggle");
ToggleButton sampleButton3 = new ToggleButton("Toggle2");
sampleButton3.setSelected(true);
toolBar.getItems().addAll(colorTB, sampleButton, sampleButton1, sampleButton2, sampleButton3, seperateIt, sampleButton4,
sampleButton5, sampleButton6, seperateIt2, menuPillButton1, menuPillButton2, menuPillButton3);
pane.setTop(toolBar);
/**
* TabPane
*/
TabPane buttonTabPane = new TabPane();
// Create Tabs
Tab tabD = new Tab();
tabD.setText("Buttons");
VBox buttonBox = new VBox();
buttonBox.setSpacing(10);
buttonBox.setPadding(new Insets(10));
Button b1 = new Button();
b1.setText("Default (push to enable Tab 'Progress')");
b1.setDefaultButton(true);
b1.setTooltip(new Tooltip("This is a ToolTip"));
b1.setOnAction(new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent event) {
tabI.setDisable(false);
tabH.setDisable(false);
}
});
buttonBox.getChildren().add(b1);
Button b2 = new Button();
b2.setText("Default");
b2.setDisable(true);
b2.setDefaultButton(true);
buttonBox.getChildren().add(b2);
Button b3 = new Button();
b3.setText("Normal (push to disable Tab 'Progress')");
b3.setOnAction(new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent event) {
tabH.setDisable(true);
}
});
buttonBox.getChildren().add(b3);
Button b4 = new Button();
b4.setText("Normal");
b4.setDisable(true);
buttonBox.getChildren().add(b4);
Button helpButton = new Button("?");
AquaFx.createButtonStyler().setType(ButtonType.HELP).style(helpButton);
buttonBox.getChildren().add(helpButton);
Hyperlink link = new Hyperlink("Hyperlink");
Hyperlink link2 = new Hyperlink("disabled Hyperlink");
link2.setDisable(true);
buttonBox.getChildren().add(link);
buttonBox.getChildren().add(link2);
ScrollBar scBar = new ScrollBar();
buttonBox.getChildren().add(scBar);
tabD.setContent(buttonBox);
buttonTabPane.getTabs().add(tabD);
Tab tabE = new Tab();
tabE.setText("RadioButtons");
VBox radioButtonBox = new VBox();
radioButtonBox.setSpacing(10);
radioButtonBox.setPadding(new Insets(10));
RadioButton raBu1 = new RadioButton("Normal");
radioButtonBox.getChildren().add(raBu1);
RadioButton raBu2 = new RadioButton("Normal");
raBu2.setDisable(true);
radioButtonBox.getChildren().add(raBu2);
RadioButton raBu3 = new RadioButton("Selected");
raBu3.setSelected(true);
radioButtonBox.getChildren().add(raBu3);
RadioButton raBu4 = new RadioButton("Selected");
raBu4.setDisable(true);
raBu4.setSelected(true);
radioButtonBox.getChildren().add(raBu4);
tabE.setContent(radioButtonBox);
buttonTabPane.getTabs().add(tabE);
Tab tabF = new Tab();
tabF.setText("CheckBoxes");
VBox checkBoxBox = new VBox();
checkBoxBox.setSpacing(10);
checkBoxBox.setPadding(new Insets(10));
CheckBox box1 = new CheckBox("Normal");
checkBoxBox.getChildren().add(box1);
CheckBox box2 = new CheckBox("Normal");
box2.setDisable(true);
checkBoxBox.getChildren().add(box2);
CheckBox box3 = new CheckBox("Selected");
box3.setSelected(true);
checkBoxBox.getChildren().add(box3);
CheckBox box4 = new CheckBox("Selected");
box4.setSelected(true);
box4.setDisable(true);
checkBoxBox.getChildren().add(box4);
CheckBox box5 = new CheckBox("Indeterminate");
box5.setIndeterminate(true);
checkBoxBox.getChildren().add(box5);
CheckBox box6 = new CheckBox("Indeterminate");
box6.setIndeterminate(true);
box6.setDisable(true);
checkBoxBox.getChildren().add(box6);
tabF.setContent(checkBoxBox);
buttonTabPane.getTabs().add(tabF);
Tab tabG = new Tab();
tabG.setText("Toggles & Pills");
VBox togglesBox = new VBox();
togglesBox.setSpacing(10);
togglesBox.setPadding(new Insets(10));
HBox toggleGroupBox = new HBox();
ToggleGroup group = new ToggleGroup();
ToggleButton tb1 = new ToggleButton("First");
tb1.setToggleGroup(group);
tb1.setSelected(true);
AquaFx.createToggleButtonStyler().setType(ButtonType.LEFT_PILL).style(tb1);
toggleGroupBox.getChildren().add(tb1);
ToggleButton tb2 = new ToggleButton("Second");
tb2.setToggleGroup(group);
AquaFx.createToggleButtonStyler().setType(ButtonType.CENTER_PILL).style(tb2);
toggleGroupBox.getChildren().add(tb2);
ToggleButton tb3 = new ToggleButton("Third");
tb3.setToggleGroup(group);
AquaFx.createToggleButtonStyler().setType(ButtonType.RIGHT_PILL).style(tb3);
toggleGroupBox.getChildren().add(tb3);
togglesBox.getChildren().add(toggleGroupBox);
ToggleButton tb4 = new ToggleButton("Alone");
tb4.setSelected(true);
togglesBox.getChildren().add(tb4);
HBox pillButtonBox = new HBox();
Button pb1 = new Button();
pb1.setText("Button 1");
pb1.setTooltip(new Tooltip("This is a ToolTip"));
AquaFx.createButtonStyler().setType(ButtonType.LEFT_PILL).style(pb1);
pillButtonBox.getChildren().add(pb1);
Button pb2 = new Button();
pb2.setText("Button 2");
pb2.setDisable(true);
AquaFx.createButtonStyler().setType(ButtonType.CENTER_PILL).style(pb2);
pillButtonBox.getChildren().add(pb2);
Button pb3 = new Button();
pb3.setText("Button 3");
AquaFx.createButtonStyler().setType(ButtonType.CENTER_PILL).style(pb3);
pillButtonBox.getChildren().add(pb3);
Button pb4 = new Button();
pb4.setText("Button 4");
AquaFx.createButtonStyler().setType(ButtonType.RIGHT_PILL).style(pb4);
pillButtonBox.getChildren().add(pb4);
togglesBox.getChildren().add(pillButtonBox);
tabG.setContent(togglesBox);
buttonTabPane.getTabs().add(tabG);
// Tab tabH = new Tab();
tabH.setText("Progress");
final Float[] values = new Float[] { -1.0f, 0f, 0.6f, 1.0f };
final ProgressBar[] pbs = new ProgressBar[values.length];
final ProgressIndicator[] pins = new ProgressIndicator[values.length];
final HBox hbs[] = new HBox[values.length];
for (int i = 0; i < values.length; i++) {
final Label label = new Label();
label.setText("progress: " + values[i]);
label.setPrefWidth(100d);
final ProgressBar pb = pbs[i] = new ProgressBar();
pb.setProgress(values[i]);
final ProgressIndicator pin = pins[i] = new ProgressIndicator();
pin.setProgress(values[i]);
final HBox hb = hbs[i] = new HBox();
hb.setSpacing(10);
hb.setAlignment(Pos.CENTER_LEFT);
hb.getChildren().addAll(label, pb, pin);
}
final VBox vb = new VBox();
vb.setSpacing(5);
vb.setPadding(new Insets(10));
vb.getChildren().addAll(hbs);
tabH.setContent(vb);
buttonTabPane.getTabs().add(tabH);
tabI.setText("Disabled Tab");
tabI.setDisable(true);
TabPane innerTabPane = new TabPane();
Label label = new Label("Lipsum");
Tab onlyTab = new Tab("single tab");
onlyTab.setContent(label);
innerTabPane.getTabs().add(onlyTab);
tabI.setContent(innerTabPane);
buttonTabPane.getTabs().add(tabI);
Tab tabTexts = new Tab();
tabTexts.setText("Texts");
VBox txts = new VBox();
HBox textfieldBox1 = new HBox();
textfieldBox1.setSpacing(10);
textfieldBox1.setPadding(new Insets(10));
Menu item1 = new Menu("test submenu");
MenuItem subMenuItem1 = new MenuItem("Sub Menu Item 1");
MenuItem subMenuItem2 = new MenuItem("Sub Menu Item 2");
MenuItem subMenuItem3 = new MenuItem("Sub Menu Item 3");
item1.getItems().addAll(subMenuItem1, subMenuItem2, subMenuItem3);
TextField tf1 = new TextField("Textfield");
ContextMenu cm = new ContextMenu(new MenuItem("test"), item1, new MenuItem("test"));
tf1.setContextMenu(cm);
textfieldBox1.getChildren().add(tf1);
TextField tf2 = new TextField();
textfieldBox1.getChildren().add(tf2);
HBox textfieldBox2 = new HBox();
textfieldBox2.setSpacing(10);
textfieldBox2.setPadding(new Insets(10));
TextField tf3 = new TextField("disabled Textfield");
tf3.setDisable(true);
tf3.setEditable(false);
textfieldBox2.getChildren().add(tf3);
TextField tf4 = new TextField();
tf4.setPromptText("prompt text");
textfieldBox2.getChildren().add(tf4);
txts.getChildren().add(textfieldBox2);
HBox textfieldBox3 = new HBox();
textfieldBox3.setSpacing(10);
textfieldBox3.setPadding(new Insets(10));
TextField tf5 = new TextField("non-editable textfield");
tf5.setEditable(false);
textfieldBox3.getChildren().add(tf5);
PasswordField pw1 = new PasswordField();
pw1.setText("password");
textfieldBox3.getChildren().add(pw1);
txts.getChildren().add(textfieldBox3);
VBox textareaBox = new VBox();
textareaBox.setSpacing(10);
textareaBox.setPadding(new Insets(10));
TextArea area = new TextArea();
area.setPromptText("TextArea with promptText");
area.setPrefWidth(290);
area.setPrefHeight(50);
area.setPrefColumnCount(80);
textareaBox.getChildren().add(area);
TextArea area2 = new TextArea();
area2.setText("Disabled");
area2.setDisable(true);
area2.setPrefWidth(290);
area2.setPrefHeight(50);
textareaBox.getChildren().add(area2);
txts.getChildren().add(textareaBox);
tabTexts.setContent(txts);
buttonTabPane.getTabs().add(tabTexts);
pane.setCenter(buttonTabPane);
TabPane tabPane = new TabPane();
Tab tabChoiceBox = new Tab();
tabChoiceBox.setText("Combo- etc");
VBox collectorVBox = new VBox();
HBox choiceBoxBox = new HBox();
choiceBoxBox.setSpacing(10);
choiceBoxBox.setPadding(new Insets(10));
ChoiceBox<String> choices = new ChoiceBox<String>(FXCollections.observableArrayList("4", "10", "12"));
choices.getSelectionModel().selectFirst();
choiceBoxBox.getChildren().add(choices);
ChoiceBox<String> choices2 = new ChoiceBox<String>(FXCollections.observableArrayList("A", "B", "C"));
choices2.getSelectionModel().selectFirst();
choices2.setDisable(true);
choiceBoxBox.getChildren().add(choices2);
collectorVBox.getChildren().add(choiceBoxBox);
ObservableList<String> items = FXCollections.observableArrayList("A", "B", "C");
HBox editableComboBoxBox =new HBox();
editableComboBoxBox.setSpacing(10);
editableComboBoxBox.setPadding(new Insets(10));
ComboBox<String> combo1 = new ComboBox<String>(items);
combo1.setEditable(true);
editableComboBoxBox.getChildren().add(combo1);
ComboBox<String> combo2 = new ComboBox<String>(items);
combo2.setDisable(true);
combo2.setEditable(true);
editableComboBoxBox.getChildren().add(combo2);
collectorVBox.getChildren().add(editableComboBoxBox);
HBox comboBoxBox = new HBox();
comboBoxBox.setSpacing(10);
comboBoxBox.setPadding(new Insets(10));
ComboBox<String> combo3 = new ComboBox<String>(items);
combo3.setPromptText("test");
combo3.setEditable(false);
comboBoxBox.getChildren().add(combo3);
ComboBox<String> combo4 = new ComboBox<String>(items);
combo4.setPromptText("test");
combo4.setEditable(false);
combo4.setDisable(true);
comboBoxBox.getChildren().add(combo4);
collectorVBox.getChildren().add(comboBoxBox);
HBox colorPickerBox = new HBox();
colorPickerBox.setSpacing(10);
colorPickerBox.setPadding(new Insets(10));
ColorPicker color = new ColorPicker(Color.rgb(194, 222, 254));
colorPickerBox.getChildren().add(color);
ColorPicker color2 = new ColorPicker(Color.rgb(194, 222, 254));
color2.getStyleClass().add("button");
colorPickerBox.getChildren().add(color2);
ColorPicker color3 = new ColorPicker(Color.rgb(194, 222, 254));
color3.getStyleClass().add("split-button");
colorPickerBox.getChildren().add(color3);
collectorVBox.getChildren().add(colorPickerBox);
tabChoiceBox.setContent(collectorVBox);
tabPane.getTabs().add(tabChoiceBox);
Tab tabHTMLBox = new Tab();
tabHTMLBox.setText("HTML");
VBox htmlbox = new VBox();
htmlbox.setPadding(new Insets(5));
HTMLEditor htmlEditor = new HTMLEditor();
htmlEditor.setPrefHeight(200);
htmlEditor.setPrefWidth(300);
htmlbox.getChildren().add(htmlEditor);
tabHTMLBox.setContent(htmlbox);
tabPane.getTabs().add(tabHTMLBox);
Tab tabSliderBox = new Tab();
tabSliderBox.setText("Sliders");
HBox slidersBox = new HBox();
slidersBox.setSpacing(10);
slidersBox.setPadding(new Insets(10));
Slider vSlider = new Slider();
vSlider.setOrientation(Orientation.VERTICAL);
slidersBox.getChildren().add(vSlider);
Slider vTickSlider = new Slider();
vTickSlider.setMin(0);
vTickSlider.setMax(100);
vTickSlider.setValue(40);
vTickSlider.setShowTickLabels(true);
vTickSlider.setShowTickMarks(true);
vTickSlider.setMajorTickUnit(50);
vTickSlider.setMinorTickCount(4);
vTickSlider.setBlockIncrement(10);
vTickSlider.setOrientation(Orientation.VERTICAL);
slidersBox.getChildren().add(vTickSlider);
VBox horizontalSliderBox = new VBox();
horizontalSliderBox.setSpacing(10);
horizontalSliderBox.setPadding(new Insets(10));
Slider simpleSlider = new Slider();
horizontalSliderBox.getChildren().add(simpleSlider);
Slider slider = new Slider();
slider.setMin(0);
slider.setMax(100);
slider.setValue(40);
slider.setShowTickLabels(true);
slider.setShowTickMarks(true);
slider.setMajorTickUnit(50);
slider.setMinorTickCount(4);
slider.setBlockIncrement(10);
horizontalSliderBox.getChildren().add(slider);
Slider simpleDisabledSlider = new Slider();
simpleDisabledSlider.setDisable(true);
horizontalSliderBox.getChildren().add(simpleDisabledSlider);
Slider disabledSlider = new Slider();
disabledSlider.setMin(0);
disabledSlider.setMax(100);
disabledSlider.setValue(40);
disabledSlider.setShowTickLabels(true);
disabledSlider.setShowTickMarks(true);
disabledSlider.setMajorTickUnit(50);
disabledSlider.setMinorTickCount(4);
disabledSlider.setBlockIncrement(10);
disabledSlider.setDisable(true);
horizontalSliderBox.getChildren().add(disabledSlider);
slidersBox.getChildren().add(horizontalSliderBox);
tabSliderBox.setContent(slidersBox);
tabPane.getTabs().add(tabSliderBox);
Tab tabTableBox = new Tab();
tabTableBox.setText("Table");
// Create a table..
HBox tableContainer = new HBox();
tableContainer.setPadding(new Insets(10));
TableView<Person> table = new TableView<Person>();
table.setPrefHeight(250);
table.setPrefWidth(650);
table.setEditable(true);
// table.getSelectionModel().setCellSelectionEnabled(true) ;
TableColumn<Person, String> firstNameCol = new TableColumn<Person, String>("First Name");
// firstNameCol.setMinWidth(100);
firstNameCol.setCellValueFactory(new PropertyValueFactory<Person, String>("firstName"));
TableColumn lastNameCol = new TableColumn("Last Name");
lastNameCol.setEditable(true);
lastNameCol.setCellFactory(TextFieldTableCell.forTableColumn());
lastNameCol.setOnEditCommit(new EventHandler<CellEditEvent<Person, String>>() {
@Override public void handle(CellEditEvent<Person, String> t) {
((Person) t.getTableView().getItems().get(t.getTablePosition().getRow())).setLastName(t.getNewValue());
}
});
lastNameCol.setCellValueFactory(new PropertyValueFactory<Person, String>("lastName"));
// TableColumn emailCol = new TableColumn("Email");
TableColumn<Person, String> firstEmailCol = new TableColumn<Person, String>("Primary");
// firstEmailCol.setMinWidth(200);
firstEmailCol.setCellValueFactory(new PropertyValueFactory<Person, String>("primaryEmail"));
TableColumn<Person, String> secondEmailCol = new TableColumn<Person, String>("Secondary");
// secondEmailCol.setMinWidth(200);
secondEmailCol.setCellValueFactory(new PropertyValueFactory<Person, String>("secondaryEmail"));
// emailCol.getColumns().addAll(firstEmailCol, secondEmailCol);
TableColumn<Person, Boolean> vipCol = new TableColumn<Person, Boolean>("VIP");
vipCol.setEditable(true);
vipCol.setCellValueFactory(new Callback<TableColumn.CellDataFeatures<Person, Boolean>, ObservableValue<Boolean>>() {
@Override public ObservableValue<Boolean> call(
javafx.scene.control.TableColumn.CellDataFeatures<Person, Boolean> param) {
return new ReadOnlyBooleanWrapper(param.getValue().getVip());
}
});
vipCol.setCellFactory(CheckBoxTableCell.forTableColumn(vipCol));
vipCol.setOnEditCommit(new EventHandler<CellEditEvent<Person, Boolean>>() {
@Override public void handle(CellEditEvent<Person, Boolean> t) {
((Person) t.getTableView().getItems().get(t.getTablePosition().getRow())).setVip(t.getNewValue());
}
});
table.getColumns().addAll(firstNameCol, lastNameCol, firstEmailCol, secondEmailCol, vipCol);
table.setItems(data);
table.setTableMenuButtonVisible(true);
tableContainer.getChildren().add(table);
tabTableBox.setContent(tableContainer);
tabPane.getTabs().add(tabTableBox);
Tab tabTreeBox = new Tab();
tabTreeBox.setText("Tree");
HBox treeContainer = new HBox();
treeContainer.setPadding(new Insets(10));
TreeItem<String> rootItem = new TreeItem<String>("People");
rootItem.setExpanded(true);
for (Person person : data) {
TreeItem<String> personLeaf = new TreeItem<String>(person.getFirstName());
boolean found = false;
for (TreeItem<String> statusNode : rootItem.getChildren()) {
if (statusNode.getValue().equals((!person.getVip() ? "no " : "") + "VIP")) {
statusNode.getChildren().add(personLeaf);
found = true;
break;
}
}
if (!found) {
TreeItem<String> statusNode = new TreeItem<String>((!person.getVip() ? "no " : "") + "VIP");
rootItem.getChildren().add(statusNode);
statusNode.getChildren().add(personLeaf);
}
}
TreeView<String> tree = new TreeView<String>(rootItem);
tree.setPrefHeight(250);
tree.setPrefWidth(400);
treeContainer.getChildren().add(tree);
tabTreeBox.setContent(treeContainer);
tabPane.getTabs().add(tabTreeBox);
Tab tabTreeTableBox = new Tab();
tabTreeTableBox.setText("TreeTable");
HBox treeTableContainer = new HBox();
treeTableContainer.setPadding(new Insets(10));
TreeItem<Person> rootTreeTableItem = new TreeItem<Person>(new Person("Chef", "Chef", "chef@business.de", "chef@business.de", true));
rootTreeTableItem.setExpanded(true);
for (Person person : data) {
TreeItem<Person> personLeaf = new TreeItem<Person>(person);
boolean found = false;
for (TreeItem<Person> statusNode : rootTreeTableItem.getChildren()) {
if (statusNode.getValue().getVip() == person.getVip()) {
statusNode.getChildren().add(personLeaf);
found = true;
break;
}
}
if (!found) {
TreeItem<Person> statusNode = new TreeItem<Person>(person);
rootTreeTableItem.getChildren().add(statusNode);
statusNode.getChildren().add(personLeaf);
}
}
TreeTableView<Person> treeTable = new TreeTableView<Person>(rootTreeTableItem);
TreeTableColumn<Person, String> firstNameTreeCol = new TreeTableColumn<Person, String>("First Name");
firstNameTreeCol.setPrefWidth(100);
firstNameTreeCol.setCellValueFactory(new Callback<TreeTableColumn.CellDataFeatures<Person, String>, ObservableValue<String>>() {
@Override public ObservableValue<String> call(CellDataFeatures<Person, String> param) {
return new ReadOnlyStringWrapper(param.getValue().getValue().getFirstName());
}
});
TreeTableColumn<Person, String> lastNameTreeCol = new TreeTableColumn<Person, String>("Last Name");
lastNameTreeCol.setCellValueFactory(new Callback<TreeTableColumn.CellDataFeatures<Person, String>, ObservableValue<String>>() {
@Override public ObservableValue<String> call(CellDataFeatures<Person, String> param) {
return new ReadOnlyStringWrapper(param.getValue().getValue().getLastName());
}
});
TreeTableColumn<Person, String> primaryMailCol = new TreeTableColumn<Person, String>("primary Mail");
primaryMailCol.setCellValueFactory(new Callback<TreeTableColumn.CellDataFeatures<Person, String>, ObservableValue<String>>() {
@Override public ObservableValue<String> call(CellDataFeatures<Person, String> param) {
return new ReadOnlyStringWrapper(param.getValue().getValue().getPrimaryEmail());
}
});
TreeTableColumn<Person, Boolean> vipTreeTableCol = new TreeTableColumn<Person, Boolean>("VIP");
vipTreeTableCol.setCellFactory(CheckBoxTreeTableCell.forTreeTableColumn(vipTreeTableCol));
vipTreeTableCol.setCellValueFactory(new Callback<TreeTableColumn.CellDataFeatures<Person, Boolean>, ObservableValue<Boolean>>() {
@Override public ObservableValue<Boolean> call(CellDataFeatures<Person, Boolean> param) {
return new ReadOnlyBooleanWrapper(param.getValue().getValue().getVip());
}
});
treeTable.getColumns().setAll(firstNameTreeCol, lastNameTreeCol, primaryMailCol, vipTreeTableCol);
treeTable.setPrefHeight(250);
treeTable.setPrefWidth(600);
treeTableContainer.getChildren().add(treeTable);
tabTreeTableBox.setContent(treeTableContainer);
tabPane.getTabs().add(tabTreeTableBox);
Tab tabListBox = new Tab();
tabListBox.setText("List");
HBox listContainer = new HBox();
listContainer.setSpacing(10);
listContainer.setPadding(new Insets(10));
ListView<String> list = new ListView<String>();
ObservableList<String> listItems = FXCollections.observableArrayList("Item 1", "Item 2", "Item 3", "Item 4");
list.setItems(listItems);
list.setPrefWidth(150);
list.setPrefHeight(70);
listContainer.getChildren().add(list);
TableView<Person> listTable = new TableView<Person>();
listTable.getStyleClass().add("hide-header");
listTable.setPrefHeight(250);
listTable.setPrefWidth(150);
TableColumn<Person, String> firstNameListCol = new TableColumn<Person, String>("First Name");
firstNameListCol.setMinWidth(100);
firstNameListCol.setCellValueFactory(new PropertyValueFactory<Person, String>("firstName"));
listTable.getColumns().add(firstNameListCol);
listTable.setItems(data);
listTable.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY);
listContainer.getChildren().add(listTable);
ListView<String> horizontalList = new ListView<String>();
horizontalList.setItems(listItems);
horizontalList.setPrefWidth(150);
horizontalList.setPrefHeight(50);
horizontalList.setOrientation(Orientation.HORIZONTAL);
listContainer.getChildren().add(horizontalList);
tabListBox.setContent(listContainer);
tabPane.getTabs().add(tabListBox);
tabPane.getSelectionModel().select(tabListBox);
pane.setBottom(tabPane);
Scene myScene = new Scene(pane, 700, 600);
MenuBar menuBar = new MenuBar();
Menu menuFile = new Menu("File");
menuFile.getItems().addAll(new MenuItem("New"), new MenuItem("Open File..."));
Menu menuEdit = new Menu("Edit");
menuEdit.getItems().addAll(new MenuItem("Undo"), new MenuItem("Redo"));
Menu menuView = new Menu("View");
menuView.getItems().addAll(new MenuItem("Zoom In"), new MenuItem("Zoom Out"));
menuBar.getMenus().addAll(menuFile, menuEdit, menuView);
pane.getChildren().add(menuBar);
AquaFx.style();
stage.setTitle("AquaFX");
stage.setScene(myScene);
stage.show();
}
public static void main(String[] args) {
launch(args);
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Describes the snapshot created from the imported disk.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/SnapshotDetail" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SnapshotDetail implements Serializable, Cloneable {
/**
* <p>
* A description for the snapshot.
* </p>
*/
private String description;
/**
* <p>
* The block device mapping for the snapshot.
* </p>
*/
private String deviceName;
/**
* <p>
* The size of the disk in the snapshot, in GiB.
* </p>
*/
private Double diskImageSize;
/**
* <p>
* The format of the disk image from which the snapshot is created.
* </p>
*/
private String format;
/**
* <p>
* The percentage of progress for the task.
* </p>
*/
private String progress;
/**
* <p>
* The snapshot ID of the disk being imported.
* </p>
*/
private String snapshotId;
/**
* <p>
* A brief status of the snapshot creation.
* </p>
*/
private String status;
/**
* <p>
* A detailed status message for the snapshot creation.
* </p>
*/
private String statusMessage;
/**
* <p>
* The URL used to access the disk image.
* </p>
*/
private String url;
/**
* <p>
* The S3 bucket for the disk image.
* </p>
*/
private UserBucketDetails userBucket;
/**
* <p>
* A description for the snapshot.
* </p>
*
* @param description
* A description for the snapshot.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* A description for the snapshot.
* </p>
*
* @return A description for the snapshot.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* A description for the snapshot.
* </p>
*
* @param description
* A description for the snapshot.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotDetail withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The block device mapping for the snapshot.
* </p>
*
* @param deviceName
* The block device mapping for the snapshot.
*/
public void setDeviceName(String deviceName) {
this.deviceName = deviceName;
}
/**
* <p>
* The block device mapping for the snapshot.
* </p>
*
* @return The block device mapping for the snapshot.
*/
public String getDeviceName() {
return this.deviceName;
}
/**
* <p>
* The block device mapping for the snapshot.
* </p>
*
* @param deviceName
* The block device mapping for the snapshot.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotDetail withDeviceName(String deviceName) {
setDeviceName(deviceName);
return this;
}
/**
* <p>
* The size of the disk in the snapshot, in GiB.
* </p>
*
* @param diskImageSize
* The size of the disk in the snapshot, in GiB.
*/
public void setDiskImageSize(Double diskImageSize) {
this.diskImageSize = diskImageSize;
}
/**
* <p>
* The size of the disk in the snapshot, in GiB.
* </p>
*
* @return The size of the disk in the snapshot, in GiB.
*/
public Double getDiskImageSize() {
return this.diskImageSize;
}
/**
* <p>
* The size of the disk in the snapshot, in GiB.
* </p>
*
* @param diskImageSize
* The size of the disk in the snapshot, in GiB.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotDetail withDiskImageSize(Double diskImageSize) {
setDiskImageSize(diskImageSize);
return this;
}
/**
* <p>
* The format of the disk image from which the snapshot is created.
* </p>
*
* @param format
* The format of the disk image from which the snapshot is created.
*/
public void setFormat(String format) {
this.format = format;
}
/**
* <p>
* The format of the disk image from which the snapshot is created.
* </p>
*
* @return The format of the disk image from which the snapshot is created.
*/
public String getFormat() {
return this.format;
}
/**
* <p>
* The format of the disk image from which the snapshot is created.
* </p>
*
* @param format
* The format of the disk image from which the snapshot is created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotDetail withFormat(String format) {
setFormat(format);
return this;
}
/**
* <p>
* The percentage of progress for the task.
* </p>
*
* @param progress
* The percentage of progress for the task.
*/
public void setProgress(String progress) {
this.progress = progress;
}
/**
* <p>
* The percentage of progress for the task.
* </p>
*
* @return The percentage of progress for the task.
*/
public String getProgress() {
return this.progress;
}
/**
* <p>
* The percentage of progress for the task.
* </p>
*
* @param progress
* The percentage of progress for the task.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotDetail withProgress(String progress) {
setProgress(progress);
return this;
}
/**
* <p>
* The snapshot ID of the disk being imported.
* </p>
*
* @param snapshotId
* The snapshot ID of the disk being imported.
*/
public void setSnapshotId(String snapshotId) {
this.snapshotId = snapshotId;
}
/**
* <p>
* The snapshot ID of the disk being imported.
* </p>
*
* @return The snapshot ID of the disk being imported.
*/
public String getSnapshotId() {
return this.snapshotId;
}
/**
* <p>
* The snapshot ID of the disk being imported.
* </p>
*
* @param snapshotId
* The snapshot ID of the disk being imported.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotDetail withSnapshotId(String snapshotId) {
setSnapshotId(snapshotId);
return this;
}
/**
* <p>
* A brief status of the snapshot creation.
* </p>
*
* @param status
* A brief status of the snapshot creation.
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* A brief status of the snapshot creation.
* </p>
*
* @return A brief status of the snapshot creation.
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* A brief status of the snapshot creation.
* </p>
*
* @param status
* A brief status of the snapshot creation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotDetail withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* A detailed status message for the snapshot creation.
* </p>
*
* @param statusMessage
* A detailed status message for the snapshot creation.
*/
public void setStatusMessage(String statusMessage) {
this.statusMessage = statusMessage;
}
/**
* <p>
* A detailed status message for the snapshot creation.
* </p>
*
* @return A detailed status message for the snapshot creation.
*/
public String getStatusMessage() {
return this.statusMessage;
}
/**
* <p>
* A detailed status message for the snapshot creation.
* </p>
*
* @param statusMessage
* A detailed status message for the snapshot creation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotDetail withStatusMessage(String statusMessage) {
setStatusMessage(statusMessage);
return this;
}
/**
* <p>
* The URL used to access the disk image.
* </p>
*
* @param url
* The URL used to access the disk image.
*/
public void setUrl(String url) {
this.url = url;
}
/**
* <p>
* The URL used to access the disk image.
* </p>
*
* @return The URL used to access the disk image.
*/
public String getUrl() {
return this.url;
}
/**
* <p>
* The URL used to access the disk image.
* </p>
*
* @param url
* The URL used to access the disk image.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotDetail withUrl(String url) {
setUrl(url);
return this;
}
/**
* <p>
* The S3 bucket for the disk image.
* </p>
*
* @param userBucket
* The S3 bucket for the disk image.
*/
public void setUserBucket(UserBucketDetails userBucket) {
this.userBucket = userBucket;
}
/**
* <p>
* The S3 bucket for the disk image.
* </p>
*
* @return The S3 bucket for the disk image.
*/
public UserBucketDetails getUserBucket() {
return this.userBucket;
}
/**
* <p>
* The S3 bucket for the disk image.
* </p>
*
* @param userBucket
* The S3 bucket for the disk image.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotDetail withUserBucket(UserBucketDetails userBucket) {
setUserBucket(userBucket);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getDeviceName() != null)
sb.append("DeviceName: ").append(getDeviceName()).append(",");
if (getDiskImageSize() != null)
sb.append("DiskImageSize: ").append(getDiskImageSize()).append(",");
if (getFormat() != null)
sb.append("Format: ").append(getFormat()).append(",");
if (getProgress() != null)
sb.append("Progress: ").append(getProgress()).append(",");
if (getSnapshotId() != null)
sb.append("SnapshotId: ").append(getSnapshotId()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getStatusMessage() != null)
sb.append("StatusMessage: ").append(getStatusMessage()).append(",");
if (getUrl() != null)
sb.append("Url: ").append(getUrl()).append(",");
if (getUserBucket() != null)
sb.append("UserBucket: ").append(getUserBucket());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SnapshotDetail == false)
return false;
SnapshotDetail other = (SnapshotDetail) obj;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getDeviceName() == null ^ this.getDeviceName() == null)
return false;
if (other.getDeviceName() != null && other.getDeviceName().equals(this.getDeviceName()) == false)
return false;
if (other.getDiskImageSize() == null ^ this.getDiskImageSize() == null)
return false;
if (other.getDiskImageSize() != null && other.getDiskImageSize().equals(this.getDiskImageSize()) == false)
return false;
if (other.getFormat() == null ^ this.getFormat() == null)
return false;
if (other.getFormat() != null && other.getFormat().equals(this.getFormat()) == false)
return false;
if (other.getProgress() == null ^ this.getProgress() == null)
return false;
if (other.getProgress() != null && other.getProgress().equals(this.getProgress()) == false)
return false;
if (other.getSnapshotId() == null ^ this.getSnapshotId() == null)
return false;
if (other.getSnapshotId() != null && other.getSnapshotId().equals(this.getSnapshotId()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getStatusMessage() == null ^ this.getStatusMessage() == null)
return false;
if (other.getStatusMessage() != null && other.getStatusMessage().equals(this.getStatusMessage()) == false)
return false;
if (other.getUrl() == null ^ this.getUrl() == null)
return false;
if (other.getUrl() != null && other.getUrl().equals(this.getUrl()) == false)
return false;
if (other.getUserBucket() == null ^ this.getUserBucket() == null)
return false;
if (other.getUserBucket() != null && other.getUserBucket().equals(this.getUserBucket()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getDeviceName() == null) ? 0 : getDeviceName().hashCode());
hashCode = prime * hashCode + ((getDiskImageSize() == null) ? 0 : getDiskImageSize().hashCode());
hashCode = prime * hashCode + ((getFormat() == null) ? 0 : getFormat().hashCode());
hashCode = prime * hashCode + ((getProgress() == null) ? 0 : getProgress().hashCode());
hashCode = prime * hashCode + ((getSnapshotId() == null) ? 0 : getSnapshotId().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getStatusMessage() == null) ? 0 : getStatusMessage().hashCode());
hashCode = prime * hashCode + ((getUrl() == null) ? 0 : getUrl().hashCode());
hashCode = prime * hashCode + ((getUserBucket() == null) ? 0 : getUserBucket().hashCode());
return hashCode;
}
@Override
public SnapshotDetail clone() {
try {
return (SnapshotDetail) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package aQute.launcher.minifw;
import java.io.File;
import java.io.InputStream;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.cert.X509Certificate;
import java.util.Collection;
import java.util.Collections;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import java.util.Vector;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.zip.ZipEntry;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleException;
import org.osgi.framework.BundleListener;
import org.osgi.framework.BundleReference;
import org.osgi.framework.Filter;
import org.osgi.framework.FrameworkListener;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceListener;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
import org.osgi.framework.Version;
public class Context extends URLClassLoader implements Bundle, BundleContext, BundleReference {
long id;
MiniFramework fw;
String location;
int state = Bundle.INSTALLED;
JarFile jar;
Manifest manifest;
private TreeSet<String> paths;
private File jarFile;
class Dict extends Dictionary<String,String> {
@Override
public Enumeration<String> elements() {
@SuppressWarnings({
"unchecked", "rawtypes"
})
Enumeration<String> enumeration = (Enumeration) Collections
.enumeration(manifest.getMainAttributes().values());
return enumeration;
}
@Override
public String get(Object key) {
String o = manifest.getMainAttributes().getValue((String) key);
return o;
}
@Override
public boolean isEmpty() {
return manifest.getMainAttributes().isEmpty();
}
@Override
public Enumeration<String> keys() {
Vector<String> v = new Vector<>();
for (Iterator<Object> i = manifest.getMainAttributes().keySet().iterator(); i.hasNext();) {
Attributes.Name name = (Attributes.Name) i.next();
v.add(name.toString());
}
return v.elements();
}
@Override
public String put(String key, String value) {
throw new UnsupportedOperationException();
}
@Override
public String remove(Object key) {
throw new UnsupportedOperationException();
}
@Override
public int size() {
return manifest.getMainAttributes().size();
}
}
public Context(MiniFramework fw, ClassLoader parent, int id, String location) throws Exception {
super(new URL[] {
new File(location).toURI().toURL()
}, parent);
this.fw = fw;
this.id = id;
this.location = location;
jar = new JarFile(jarFile = new File(location));
// Enumeration<JarEntry> entries = jar.entries();
// while ( entries.hasMoreElements())
// System.err.println(entries.nextElement().getName());
manifest = jar.getManifest();
jar.close();
}
@Override
public BundleContext getBundleContext() {
return this;
}
@Override
public long getBundleId() {
return id;
}
@Override
public URL getEntry(String path) {
if (path.startsWith("/"))
path = path.substring(1);
return getResource(path);
}
@Override
public Enumeration<String> getEntryPaths(String path) {
throw new UnsupportedOperationException();
}
@Override
public Dictionary<String,String> getHeaders() {
return new Dict();
}
@Override
public Dictionary<String,String> getHeaders(String locale) {
return new Dict();
}
@Override
public long getLastModified() {
return jarFile.lastModified();
}
@Override
public String getLocation() {
return location;
}
@Override
public Enumeration<URL> findEntries(String path, String filePattern, boolean recurse) {
try {
if (path.startsWith("/"))
path = path.substring(1);
if (!path.endsWith("/"))
path += "/";
Vector<URL> paths = new Vector<>();
for (Iterator<String> i = getPaths().iterator(); i.hasNext();) {
String entry = i.next();
if (entry.startsWith(path)) {
if (recurse || entry.indexOf('/', path.length()) < 0) {
if (filePattern == null || matches(entry, filePattern)) {
URL url = getResource(entry);
if (url == null) {
System.err.println("Cannot load resource that should be there: " + entry);
} else
paths.add(url);
}
}
}
}
return paths.elements();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static boolean matches(String path, String filePattern) {
do {
int part = filePattern.indexOf('*');
if (part < 0) {
return path.contains(filePattern);
}
String match = filePattern.substring(0, part);
int m = path.indexOf(match);
if (m < 0)
return false;
path = path.substring(m + match.length());
filePattern = filePattern.substring(part + 1);
} while (true);
}
private Collection<String> getPaths() throws Exception {
if (paths != null)
return paths;
paths = new TreeSet<>();
try (JarFile jar = new JarFile(new File(location))) {
for (Enumeration<JarEntry> e = jar.entries(); e.hasMoreElements();) {
ZipEntry entry = e.nextElement();
paths.add(entry.getName());
}
}
return paths;
}
@Override
public ServiceReference[] getRegisteredServices() {
return null;
}
@Override
public ServiceReference[] getServicesInUse() {
return null;
}
@Override
public Map<X509Certificate,List<X509Certificate>> getSignerCertificates(int signersType) {
throw new UnsupportedOperationException();
}
@Override
public int getState() {
return state;
}
@Override
public String getSymbolicName() {
return getHeaders().get(aQute.bnd.osgi.Constants.BUNDLE_SYMBOLICNAME).trim();
}
@Override
public Version getVersion() {
String v = getHeaders().get(aQute.bnd.osgi.Constants.BUNDLE_VERSION).trim();
if (v == null)
return new Version("0");
return new Version(v);
}
@Override
public boolean hasPermission(Object permission) {
return true;
}
@Override
public void start() throws BundleException {
state = Bundle.ACTIVE;
}
@Override
public void start(int options) throws BundleException {
state = Bundle.ACTIVE;
}
@Override
public void stop() throws BundleException {
state = Bundle.RESOLVED;
}
@Override
public void stop(int options) throws BundleException {
state = Bundle.RESOLVED;
}
@Override
public void uninstall() throws BundleException {
state = Bundle.UNINSTALLED;
}
@Override
public void update() throws BundleException {
throw new UnsupportedOperationException();
}
@Override
public void update(InputStream in) throws BundleException {
throw new UnsupportedOperationException();
}
@Override
public void addBundleListener(BundleListener listener) {
throw new UnsupportedOperationException();
}
@Override
public void addFrameworkListener(FrameworkListener listener) {
throw new UnsupportedOperationException();
}
@Override
public void addServiceListener(ServiceListener listener) {
throw new UnsupportedOperationException();
}
@Override
public void addServiceListener(ServiceListener listener, String filter) {
throw new UnsupportedOperationException();
}
@Override
public Filter createFilter(String filter) throws InvalidSyntaxException {
throw new UnsupportedOperationException();
}
@Override
public ServiceReference[] getAllServiceReferences(String clazz, String filter) throws InvalidSyntaxException {
throw new UnsupportedOperationException();
}
@Override
public Bundle getBundle() {
return this;
}
@Override
public Bundle getBundle(long id) {
return fw.getBundle(id);
}
@Override
public Bundle[] getBundles() {
return fw.getBundles();
}
@Override
public File getDataFile(String filename) {
return null;
}
@Override
public String getProperty(String key) {
return fw.getProperty(key);
}
@Override
public ServiceReference getServiceReference(String clazz) {
return null;
}
@Override
public ServiceReference[] getServiceReferences(String clazz, String filter) throws InvalidSyntaxException {
return null;
}
@Override
public Bundle installBundle(String location) throws BundleException {
return fw.installBundle(location);
}
@Override
public Bundle installBundle(String location, InputStream input) throws BundleException {
return fw.installBundle(location, input);
}
@Override
public void removeBundleListener(BundleListener listener) {
throw new UnsupportedOperationException();
}
@Override
public void removeFrameworkListener(FrameworkListener listener) {
throw new UnsupportedOperationException();
}
@Override
public void removeServiceListener(ServiceListener listener) {
throw new UnsupportedOperationException();
}
@Override
public String toString() {
return id + " " + location;
}
public int compareTo(Bundle var0) {
return 0;
}
@Override
public ServiceRegistration registerService(String[] clazzes, Object service, Dictionary properties) {
return null;
}
@Override
public ServiceRegistration registerService(String clazz, Object service, Dictionary properties) {
return null;
}
public ServiceRegistration registerService(Class< ? > clazz, Object service, Dictionary<String, ? > properties) {
return null;
}
public <S> ServiceReference getServiceReference(Class<S> clazz) {
return null;
}
public <S> Collection<ServiceReference> getServiceReferences(Class<S> clazz, String filter)
throws InvalidSyntaxException {
return null;
}
@Override
public Object getService(ServiceReference reference) {
return null;
}
@Override
public boolean ungetService(ServiceReference reference) {
return false;
}
public Bundle getBundle(String location) {
return null;
}
public <A> A adapt(Class<A> type) {
return null;
}
}
| |
/**
* Opensec OVAL - https://nakamura5akihito.github.io/
* Copyright (C) 2015 Akihito Nakamura
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opensec.oval.model.independent;
import io.opensec.oval.model.ComponentType;
import io.opensec.oval.model.ElementRef;
import io.opensec.oval.model.Family;
import io.opensec.oval.model.definitions.EntityStateAnySimpleType;
import io.opensec.oval.model.definitions.EntityStateStringType;
import io.opensec.oval.model.definitions.StateType;
import java.util.ArrayList;
import java.util.Collection;
/**
* The textfilecontent state contains entities that are used to check
* the file path and name, as well as the line in question and
* the value of the specific subexpression.
*
* @author Akihito Nakamura, AIST
* @see <a href="http://oval.mitre.org/language/">OVAL Language</a>
* @deprecated Deprecated as of version 5.4:
* Replaced by the textfilecontent54 state and
* will be removed in a future version of the language.
*/
@Deprecated
public class TextfileContentState
extends StateType
{
private EntityStateStringType path;
//{0..1}
private EntityStateStringType filename;
//{0..1}
private EntityStateStringType line;
//{0..1}
private EntityStateAnySimpleType subexpression;
//{0..1}
private EntityStateWindowsViewType windows_view;
//{0..1}
/**
* Constructor.
*/
public TextfileContentState()
{
this( null, 0 );
}
public TextfileContentState(
final String id,
final int version
)
{
this( id, version, null );
}
public TextfileContentState(
final String id,
final int version,
final String comment
)
{
super( id, version, comment );
// _oval_platform_type = OvalPlatformType.independent;
// _oval_component_type = OvalComponentType.textfilecontent;
_oval_family = Family.INDEPENDENT;
_oval_component = ComponentType.TEXTFILECONTENT;
}
/**
*/
public void setPath(
final EntityStateStringType path
)
{
this.path = path;
}
public EntityStateStringType getPath()
{
return path;
}
/**
*/
public void setFilename(
final EntityStateStringType filename
)
{
this.filename = filename;
}
public EntityStateStringType getFilename()
{
return filename;
}
/**
*/
public void setLine(
final EntityStateStringType line
)
{
this.line = line;
}
public EntityStateStringType getLine()
{
return line;
}
/**
*/
public void setSubexpression(
final EntityStateAnySimpleType subexpression
)
{
this.subexpression = subexpression;
}
public EntityStateAnySimpleType getSubexpression()
{
return subexpression;
}
/**
*/
public void setWindowsView(
final EntityStateWindowsViewType windows_view
)
{
this.windows_view = windows_view;
}
public EntityStateWindowsViewType getWindowsView()
{
return windows_view;
}
//*********************************************************************
// DefinitionsElement
//*********************************************************************
@Override
public Collection<ElementRef> ovalGetElementRef()
{
Collection<ElementRef> ref_list = new ArrayList<ElementRef>();
ref_list.add( getPath() );
ref_list.add( getFilename() );
ref_list.add( getLine() );
ref_list.add( getSubexpression() );
ref_list.add( getWindowsView() );
return ref_list;
}
//**************************************************************
// java.lang.Object
//**************************************************************
@Override
public int hashCode()
{
return super.hashCode();
}
@Override
public boolean equals(
final Object obj
)
{
if (!(obj instanceof TextfileContentState)) {
return false;
}
return super.equals( obj );
}
@Override
public String toString()
{
return "textfilecontent_state[" + super.toString()
+ ", path=" + getPath()
+ ", filename=" + getFilename()
+ ", line=" + getLine()
+ ", subexpression=" + getSubexpression()
+ ", windows_view=" + getWindowsView()
+ "]";
}
}
// TextFileContentState
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.snapshots.SnapshotId;
import org.elasticsearch.snapshots.SnapshotState;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* A class that represents the data in a repository, as captured in the
* repository's index blob.
*/
public final class RepositoryData {
/**
* The generation value indicating the repository has no index generational files.
*/
public static final long EMPTY_REPO_GEN = -1L;
/**
* An instance initialized for an empty repository.
*/
public static final RepositoryData EMPTY = new RepositoryData(EMPTY_REPO_GEN,
Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyList());
/**
* The generational id of the index file from which the repository data was read.
*/
private final long genId;
/**
* The ids of the snapshots in the repository.
*/
private final Map<String, SnapshotId> snapshotIds;
/**
* The states of each snapshot in the repository.
*/
private final Map<String, SnapshotState> snapshotStates;
/**
* The indices found in the repository across all snapshots, as a name to {@link IndexId} mapping
*/
private final Map<String, IndexId> indices;
/**
* The snapshots that each index belongs to.
*/
private final Map<IndexId, Set<SnapshotId>> indexSnapshots;
/**
* The snapshots that are no longer compatible with the current cluster ES version.
*/
private final List<SnapshotId> incompatibleSnapshotIds;
public RepositoryData(long genId,
Map<String, SnapshotId> snapshotIds,
Map<String, SnapshotState> snapshotStates,
Map<IndexId, Set<SnapshotId>> indexSnapshots,
List<SnapshotId> incompatibleSnapshotIds) {
this.genId = genId;
this.snapshotIds = Collections.unmodifiableMap(snapshotIds);
this.snapshotStates = Collections.unmodifiableMap(snapshotStates);
this.indices = Collections.unmodifiableMap(indexSnapshots.keySet().stream()
.collect(Collectors.toMap(IndexId::getName, Function.identity())));
this.indexSnapshots = Collections.unmodifiableMap(indexSnapshots);
this.incompatibleSnapshotIds = Collections.unmodifiableList(incompatibleSnapshotIds);
}
protected RepositoryData copy() {
return new RepositoryData(genId, snapshotIds, snapshotStates, indexSnapshots, incompatibleSnapshotIds);
}
/**
* Gets the generational index file id from which this instance was read.
*/
public long getGenId() {
return genId;
}
/**
* Returns an unmodifiable collection of the snapshot ids.
*/
public Collection<SnapshotId> getSnapshotIds() {
return Collections.unmodifiableCollection(snapshotIds.values());
}
/**
* Returns an immutable collection of the snapshot ids in the repository that are incompatible with the
* current ES version.
*/
public Collection<SnapshotId> getIncompatibleSnapshotIds() {
return incompatibleSnapshotIds;
}
/**
* Returns an immutable collection of all the snapshot ids in the repository, both active and
* incompatible snapshots.
*/
public Collection<SnapshotId> getAllSnapshotIds() {
List<SnapshotId> allSnapshotIds = new ArrayList<>(snapshotIds.size() + incompatibleSnapshotIds.size());
allSnapshotIds.addAll(snapshotIds.values());
allSnapshotIds.addAll(incompatibleSnapshotIds);
return Collections.unmodifiableList(allSnapshotIds);
}
/**
* Returns the {@link SnapshotState} for the given snapshot. Returns {@code null} if
* there is no state for the snapshot.
*/
@Nullable
public SnapshotState getSnapshotState(final SnapshotId snapshotId) {
return snapshotStates.get(snapshotId.getUUID());
}
/**
* Returns an unmodifiable map of the index names to {@link IndexId} in the repository.
*/
public Map<String, IndexId> getIndices() {
return indices;
}
/**
* Add a snapshot and its indices to the repository; returns a new instance. If the snapshot
* already exists in the repository data, this method throws an IllegalArgumentException.
*/
public RepositoryData addSnapshot(final SnapshotId snapshotId,
final SnapshotState snapshotState,
final List<IndexId> snapshottedIndices) {
if (snapshotIds.containsKey(snapshotId.getUUID())) {
// if the snapshot id already exists in the repository data, it means an old master
// that is blocked from the cluster is trying to finalize a snapshot concurrently with
// the new master, so we make the operation idempotent
return this;
}
Map<String, SnapshotId> snapshots = new HashMap<>(snapshotIds);
snapshots.put(snapshotId.getUUID(), snapshotId);
Map<String, SnapshotState> newSnapshotStates = new HashMap<>(snapshotStates);
newSnapshotStates.put(snapshotId.getUUID(), snapshotState);
Map<IndexId, Set<SnapshotId>> allIndexSnapshots = new HashMap<>(indexSnapshots);
for (final IndexId indexId : snapshottedIndices) {
if (allIndexSnapshots.containsKey(indexId)) {
Set<SnapshotId> ids = allIndexSnapshots.get(indexId);
if (ids == null) {
ids = new LinkedHashSet<>();
allIndexSnapshots.put(indexId, ids);
}
ids.add(snapshotId);
} else {
Set<SnapshotId> ids = new LinkedHashSet<>();
ids.add(snapshotId);
allIndexSnapshots.put(indexId, ids);
}
}
return new RepositoryData(genId, snapshots, newSnapshotStates, allIndexSnapshots, incompatibleSnapshotIds);
}
/**
* Remove a snapshot and remove any indices that no longer exist in the repository due to the deletion of the snapshot.
*/
public RepositoryData removeSnapshot(final SnapshotId snapshotId) {
Map<String, SnapshotId> newSnapshotIds = snapshotIds.values().stream()
.filter(id -> !snapshotId.equals(id))
.collect(Collectors.toMap(SnapshotId::getUUID, Function.identity()));
if (newSnapshotIds.size() == snapshotIds.size()) {
throw new ResourceNotFoundException("Attempting to remove non-existent snapshot [{}] from repository data", snapshotId);
}
Map<String, SnapshotState> newSnapshotStates = new HashMap<>(snapshotStates);
newSnapshotStates.remove(snapshotId.getUUID());
Map<IndexId, Set<SnapshotId>> indexSnapshots = new HashMap<>();
for (final IndexId indexId : indices.values()) {
Set<SnapshotId> set;
Set<SnapshotId> snapshotIds = this.indexSnapshots.get(indexId);
assert snapshotIds != null;
if (snapshotIds.contains(snapshotId)) {
if (snapshotIds.size() == 1) {
// removing the snapshot will mean no more snapshots
// have this index, so just skip over it
continue;
}
set = new LinkedHashSet<>(snapshotIds);
set.remove(snapshotId);
} else {
set = snapshotIds;
}
indexSnapshots.put(indexId, set);
}
return new RepositoryData(genId, newSnapshotIds, newSnapshotStates, indexSnapshots, incompatibleSnapshotIds);
}
/**
* Returns an immutable collection of the snapshot ids for the snapshots that contain the given index.
*/
public Set<SnapshotId> getSnapshots(final IndexId indexId) {
Set<SnapshotId> snapshotIds = indexSnapshots.get(indexId);
if (snapshotIds == null) {
throw new IllegalArgumentException("unknown snapshot index " + indexId);
}
return snapshotIds;
}
/**
* Initializes the indices in the repository metadata; returns a new instance.
*/
public RepositoryData initIndices(final Map<IndexId, Set<SnapshotId>> indexSnapshots) {
return new RepositoryData(genId, snapshotIds, snapshotStates, indexSnapshots, incompatibleSnapshotIds);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
@SuppressWarnings("unchecked") RepositoryData that = (RepositoryData) obj;
return snapshotIds.equals(that.snapshotIds)
&& snapshotStates.equals(that.snapshotStates)
&& indices.equals(that.indices)
&& indexSnapshots.equals(that.indexSnapshots)
&& incompatibleSnapshotIds.equals(that.incompatibleSnapshotIds);
}
@Override
public int hashCode() {
return Objects.hash(snapshotIds, snapshotStates, indices, indexSnapshots, incompatibleSnapshotIds);
}
/**
* Resolve the index name to the index id specific to the repository,
* throwing an exception if the index could not be resolved.
*/
public IndexId resolveIndexId(final String indexName) {
if (indices.containsKey(indexName)) {
return indices.get(indexName);
} else {
// on repositories created before 5.0, there was no indices information in the index
// blob, so if the repository hasn't been updated with new snapshots, no new index blob
// would have been written, so we only have old snapshots without the index information.
// in this case, the index id is just the index name
return new IndexId(indexName, indexName);
}
}
/**
* Resolve the given index names to index ids.
*/
public List<IndexId> resolveIndices(final List<String> indices) {
List<IndexId> resolvedIndices = new ArrayList<>(indices.size());
for (final String indexName : indices) {
resolvedIndices.add(resolveIndexId(indexName));
}
return resolvedIndices;
}
/**
* Resolve the given index names to index ids, creating new index ids for
* new indices in the repository.
*/
public List<IndexId> resolveNewIndices(final List<String> indicesToResolve) {
List<IndexId> snapshotIndices = new ArrayList<>();
for (String index : indicesToResolve) {
final IndexId indexId;
if (indices.containsKey(index)) {
indexId = indices.get(index);
} else {
indexId = new IndexId(index, UUIDs.randomBase64UUID());
}
snapshotIndices.add(indexId);
}
return snapshotIndices;
}
/**
* Returns a new {@link RepositoryData} instance containing the same snapshot data as the
* invoking instance, with the given incompatible snapshots added to the new instance.
*/
public RepositoryData addIncompatibleSnapshots(final List<SnapshotId> incompatibleSnapshotIds) {
List<SnapshotId> newSnapshotIds = new ArrayList<>(this.snapshotIds.values());
List<SnapshotId> newIncompatibleSnapshotIds = new ArrayList<>(this.incompatibleSnapshotIds);
for (SnapshotId snapshotId : incompatibleSnapshotIds) {
newSnapshotIds.remove(snapshotId);
newIncompatibleSnapshotIds.add(snapshotId);
}
Map<String, SnapshotId> snapshotMap = newSnapshotIds.stream().collect(Collectors.toMap(SnapshotId::getUUID, Function.identity()));
return new RepositoryData(this.genId, snapshotMap, this.snapshotStates, this.indexSnapshots, newIncompatibleSnapshotIds);
}
private static final String SNAPSHOTS = "snapshots";
private static final String INCOMPATIBLE_SNAPSHOTS = "incompatible-snapshots";
private static final String INDICES = "indices";
private static final String INDEX_ID = "id";
private static final String NAME = "name";
private static final String UUID = "uuid";
private static final String STATE = "state";
/**
* Writes the snapshots metadata and the related indices metadata to x-content, omitting the
* incompatible snapshots.
*/
public XContentBuilder snapshotsToXContent(final XContentBuilder builder, final ToXContent.Params params) throws IOException {
builder.startObject();
// write the snapshots list
builder.startArray(SNAPSHOTS);
for (final SnapshotId snapshot : getSnapshotIds()) {
builder.startObject();
builder.field(NAME, snapshot.getName());
builder.field(UUID, snapshot.getUUID());
if (snapshotStates.containsKey(snapshot.getUUID())) {
builder.field(STATE, snapshotStates.get(snapshot.getUUID()).value());
}
builder.endObject();
}
builder.endArray();
// write the indices map
builder.startObject(INDICES);
for (final IndexId indexId : getIndices().values()) {
builder.startObject(indexId.getName());
builder.field(INDEX_ID, indexId.getId());
builder.startArray(SNAPSHOTS);
Set<SnapshotId> snapshotIds = indexSnapshots.get(indexId);
assert snapshotIds != null;
for (final SnapshotId snapshotId : snapshotIds) {
builder.value(snapshotId.getUUID());
}
builder.endArray();
builder.endObject();
}
builder.endObject();
builder.endObject();
return builder;
}
/**
* Reads an instance of {@link RepositoryData} from x-content, loading the snapshots and indices metadata.
*/
public static RepositoryData snapshotsFromXContent(final XContentParser parser, long genId) throws IOException {
Map<String, SnapshotId> snapshots = new LinkedHashMap<>();
Map<String, SnapshotState> snapshotStates = new HashMap<>();
Map<IndexId, Set<SnapshotId>> indexSnapshots = new HashMap<>();
if (parser.nextToken() == XContentParser.Token.START_OBJECT) {
while (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
String field = parser.currentName();
if (SNAPSHOTS.equals(field)) {
if (parser.nextToken() == XContentParser.Token.START_ARRAY) {
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
final SnapshotId snapshotId;
// the new format from 5.0 which contains the snapshot name and uuid
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
String name = null;
String uuid = null;
SnapshotState state = null;
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
String currentFieldName = parser.currentName();
parser.nextToken();
if (NAME.equals(currentFieldName)) {
name = parser.text();
} else if (UUID.equals(currentFieldName)) {
uuid = parser.text();
} else if (STATE.equals(currentFieldName)) {
state = SnapshotState.fromValue(parser.numberValue().byteValue());
}
}
snapshotId = new SnapshotId(name, uuid);
if (state != null) {
snapshotStates.put(uuid, state);
}
} else {
// the old format pre 5.0 that only contains the snapshot name, use the name as the uuid too
final String name = parser.text();
snapshotId = new SnapshotId(name, name);
}
snapshots.put(snapshotId.getUUID(), snapshotId);
}
} else {
throw new ElasticsearchParseException("expected array for [" + field + "]");
}
} else if (INDICES.equals(field)) {
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("start object expected [indices]");
}
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
String indexName = parser.currentName();
String indexId = null;
Set<SnapshotId> snapshotIds = new LinkedHashSet<>();
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("start object expected index[" + indexName + "]");
}
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
String indexMetaFieldName = parser.currentName();
parser.nextToken();
if (INDEX_ID.equals(indexMetaFieldName)) {
indexId = parser.text();
} else if (SNAPSHOTS.equals(indexMetaFieldName)) {
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
throw new ElasticsearchParseException("start array expected [snapshots]");
}
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
String uuid = null;
// the old format pre 5.4.1 which contains the snapshot name and uuid
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
String currentFieldName = parser.currentName();
parser.nextToken();
if (UUID.equals(currentFieldName)) {
uuid = parser.text();
}
}
} else {
// the new format post 5.4.1 that only contains the snapshot uuid,
// since we already have the name/uuid combo in the snapshots array
uuid = parser.text();
}
snapshotIds.add(snapshots.get(uuid));
}
}
}
assert indexId != null;
indexSnapshots.put(new IndexId(indexName, indexId), snapshotIds);
}
} else {
throw new ElasticsearchParseException("unknown field name [" + field + "]");
}
}
} else {
throw new ElasticsearchParseException("start object expected");
}
return new RepositoryData(genId, snapshots, snapshotStates, indexSnapshots, Collections.emptyList());
}
/**
* Writes the incompatible snapshot ids to x-content.
*/
public XContentBuilder incompatibleSnapshotsToXContent(final XContentBuilder builder, final ToXContent.Params params)
throws IOException {
builder.startObject();
// write the incompatible snapshots list
builder.startArray(INCOMPATIBLE_SNAPSHOTS);
for (final SnapshotId snapshot : getIncompatibleSnapshotIds()) {
snapshot.toXContent(builder, params);
}
builder.endArray();
builder.endObject();
return builder;
}
/**
* Reads the incompatible snapshot ids from x-content, loading them into a new instance of {@link RepositoryData}
* that is created from the invoking instance, plus the incompatible snapshots that are read from x-content.
*/
public RepositoryData incompatibleSnapshotsFromXContent(final XContentParser parser) throws IOException {
List<SnapshotId> incompatibleSnapshotIds = new ArrayList<>();
if (parser.nextToken() == XContentParser.Token.START_OBJECT) {
while (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
String currentFieldName = parser.currentName();
if (INCOMPATIBLE_SNAPSHOTS.equals(currentFieldName)) {
if (parser.nextToken() == XContentParser.Token.START_ARRAY) {
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
incompatibleSnapshotIds.add(SnapshotId.fromXContent(parser));
}
} else {
throw new ElasticsearchParseException("expected array for [" + currentFieldName + "]");
}
} else {
throw new ElasticsearchParseException("unknown field name [" + currentFieldName + "]");
}
}
} else {
throw new ElasticsearchParseException("start object expected");
}
return new RepositoryData(this.genId, this.snapshotIds, this.snapshotStates, this.indexSnapshots, incompatibleSnapshotIds);
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticmapreduce.model;
import java.io.Serializable;
/**
* <p>
* <b>NOTE:</b> Amazon EMR releases 4.x or later.
* </p>
* <p>
* Specifies a hardware and software configuration of the EMR cluster.
* This includes configurations for applications and software bundled
* with Amazon EMR. The Configuration object is a JSON object which is
* defined by a classification and a set of properties. Configurations
* can be nested, so a configuration may have its own Configuration
* objects listed.
* </p>
*/
public class Configuration implements Serializable, Cloneable {
/**
* The classification of a configuration. For more information see, <a
* href="http://docs.aws.amazon.com/ElasticMapReduce/latest/API/EmrConfigurations.html">Amazon
* EMR Configurations</a>.
*/
private String classification;
/**
* A list of configurations you apply to this configuration object.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<Configuration> configurations;
/**
* A set of properties supplied to the Configuration object.
*/
private java.util.Map<String,String> properties;
/**
* The classification of a configuration. For more information see, <a
* href="http://docs.aws.amazon.com/ElasticMapReduce/latest/API/EmrConfigurations.html">Amazon
* EMR Configurations</a>.
*
* @return The classification of a configuration. For more information see, <a
* href="http://docs.aws.amazon.com/ElasticMapReduce/latest/API/EmrConfigurations.html">Amazon
* EMR Configurations</a>.
*/
public String getClassification() {
return classification;
}
/**
* The classification of a configuration. For more information see, <a
* href="http://docs.aws.amazon.com/ElasticMapReduce/latest/API/EmrConfigurations.html">Amazon
* EMR Configurations</a>.
*
* @param classification The classification of a configuration. For more information see, <a
* href="http://docs.aws.amazon.com/ElasticMapReduce/latest/API/EmrConfigurations.html">Amazon
* EMR Configurations</a>.
*/
public void setClassification(String classification) {
this.classification = classification;
}
/**
* The classification of a configuration. For more information see, <a
* href="http://docs.aws.amazon.com/ElasticMapReduce/latest/API/EmrConfigurations.html">Amazon
* EMR Configurations</a>.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param classification The classification of a configuration. For more information see, <a
* href="http://docs.aws.amazon.com/ElasticMapReduce/latest/API/EmrConfigurations.html">Amazon
* EMR Configurations</a>.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Configuration withClassification(String classification) {
this.classification = classification;
return this;
}
/**
* A list of configurations you apply to this configuration object.
*
* @return A list of configurations you apply to this configuration object.
*/
public java.util.List<Configuration> getConfigurations() {
if (configurations == null) {
configurations = new com.amazonaws.internal.ListWithAutoConstructFlag<Configuration>();
configurations.setAutoConstruct(true);
}
return configurations;
}
/**
* A list of configurations you apply to this configuration object.
*
* @param configurations A list of configurations you apply to this configuration object.
*/
public void setConfigurations(java.util.Collection<Configuration> configurations) {
if (configurations == null) {
this.configurations = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<Configuration> configurationsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Configuration>(configurations.size());
configurationsCopy.addAll(configurations);
this.configurations = configurationsCopy;
}
/**
* A list of configurations you apply to this configuration object.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setConfigurations(java.util.Collection)} or {@link
* #withConfigurations(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param configurations A list of configurations you apply to this configuration object.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Configuration withConfigurations(Configuration... configurations) {
if (getConfigurations() == null) setConfigurations(new java.util.ArrayList<Configuration>(configurations.length));
for (Configuration value : configurations) {
getConfigurations().add(value);
}
return this;
}
/**
* A list of configurations you apply to this configuration object.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param configurations A list of configurations you apply to this configuration object.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Configuration withConfigurations(java.util.Collection<Configuration> configurations) {
if (configurations == null) {
this.configurations = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<Configuration> configurationsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Configuration>(configurations.size());
configurationsCopy.addAll(configurations);
this.configurations = configurationsCopy;
}
return this;
}
/**
* A set of properties supplied to the Configuration object.
*
* @return A set of properties supplied to the Configuration object.
*/
public java.util.Map<String,String> getProperties() {
if (properties == null) {
properties = new java.util.HashMap<String,String>();
}
return properties;
}
/**
* A set of properties supplied to the Configuration object.
*
* @param properties A set of properties supplied to the Configuration object.
*/
public void setProperties(java.util.Map<String,String> properties) {
this.properties = properties;
}
/**
* A set of properties supplied to the Configuration object.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param properties A set of properties supplied to the Configuration object.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Configuration withProperties(java.util.Map<String,String> properties) {
setProperties(properties);
return this;
}
/**
* A set of properties supplied to the Configuration object.
* <p>
* The method adds a new key-value pair into Properties parameter, and
* returns a reference to this object so that method calls can be chained
* together.
*
* @param key The key of the entry to be added into Properties.
* @param value The corresponding value of the entry to be added into Properties.
*/
public Configuration addPropertiesEntry(String key, String value) {
if (null == this.properties) {
this.properties = new java.util.HashMap<String,String>();
}
if (this.properties.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.properties.put(key, value);
return this;
}
/**
* Removes all the entries added into Properties.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*/
public Configuration clearPropertiesEntries() {
this.properties = null;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getClassification() != null) sb.append("Classification: " + getClassification() + ",");
if (getConfigurations() != null) sb.append("Configurations: " + getConfigurations() + ",");
if (getProperties() != null) sb.append("Properties: " + getProperties() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getClassification() == null) ? 0 : getClassification().hashCode());
hashCode = prime * hashCode + ((getConfigurations() == null) ? 0 : getConfigurations().hashCode());
hashCode = prime * hashCode + ((getProperties() == null) ? 0 : getProperties().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof Configuration == false) return false;
Configuration other = (Configuration)obj;
if (other.getClassification() == null ^ this.getClassification() == null) return false;
if (other.getClassification() != null && other.getClassification().equals(this.getClassification()) == false) return false;
if (other.getConfigurations() == null ^ this.getConfigurations() == null) return false;
if (other.getConfigurations() != null && other.getConfigurations().equals(this.getConfigurations()) == false) return false;
if (other.getProperties() == null ^ this.getProperties() == null) return false;
if (other.getProperties() != null && other.getProperties().equals(this.getProperties()) == false) return false;
return true;
}
@Override
public Configuration clone() {
try {
return (Configuration) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
package org.jolokia.docker.maven.access;
import java.io.*;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jolokia.docker.maven.model.Container.PortBinding;
import org.jolokia.docker.maven.util.EnvUtil;
/**
* Entity holding port mappings which can be set through the configuration.
*
* @author roland
* @since 04.04.14
*/
public class PortMapping {
// Pattern for splitting of the protocol
private static final Pattern PROTOCOL_SPLIT_PATTERN = Pattern.compile("(.*?)(?:/(tcp|udp))?$");
// Mapping between ports and the IP they should bind to
private final Map<String, String> bindToHostMap = new HashMap<>();
// ports map (container port -> host port)
private final Map<String, Integer> containerPortToHostPort = new HashMap<>();
// resolved dynamic properties
private final Properties dynamicProperties = new Properties();
// Mapping between property name and host ip (ip filled in after container creation)
private final Map<String, String> hostIpVariableMap = new HashMap<>();
// Mapping between property name and host port (port filled in after container creation)
private final Map<String, Integer> hostPortVariableMap = new HashMap<>();
// project properties
private final Properties projProperties;
// variables (container port spec -> host ip variable name)
private final Map<String, String> specToHostIpVariableMap = new HashMap<>();
// variables (container port spec -> host port variable name)
private final Map<String, String> specToHostPortVariableMap = new HashMap<>();
/**
* Create the mapping from a configuration. The configuation is list of port mapping specifications which has the
* format used by docker for port mapping (i.e. host_ip:host_port:container_port)
* <ul>
* <li>The "host_ip" part is optional. If not given, the all interfaces are used</li>
* <li>If "host_port" is non numeric it is taken as a variable name. If this variable is given as value in
* variables, this number is used as host port. If no numeric value is given, it is considered to be filled with the
* real, dynamically created port value when {@link #updateVariablesWithDynamicPorts(Map)} is called</li>
* </ul>
*
* @param portMappings a list of configuration strings where each string hast the format
* <code>host_ip:host_port:container_port</code>. If the <code>host-port</code> is non-numeric it is
* assumed to be a variable (which later might be filled in with the dynamically created port).
* @param projProperties project properties
* @throws IllegalArgumentException if the format doesn't fit
*/
public PortMapping(List<String> portMappings, Properties projProperties) {
this.projProperties = projProperties;
for (String portMapping : portMappings) {
parsePortMapping(portMapping);
}
}
public boolean containsDynamicHostIps() {
return !specToHostIpVariableMap.isEmpty();
}
/**
* Whether this mapping contains dynamically assigned ports
*
* @return dynamically assigned ports
*/
public boolean containsDynamicPorts() {
return !specToHostPortVariableMap.isEmpty();
}
/**
* @return Set of all mapped container ports
*/
public Set<String> getContainerPorts() {
return containerPortToHostPort.keySet();
}
public Map<String, Integer> getContainerPortToHostPortMap() {
return containerPortToHostPort;
}
/**
* Update variable-to-port mappings with dynamically obtained ports. This should only be called once after the
* dynamic ports could be obtained.
*
* @param dockerObtainedDynamicPorts keys are the container ports, values are the dynamically mapped host ports,
*/
public void updateVariablesWithDynamicPorts(Map<String, PortBinding> dockerObtainedDynamicPorts) {
for (Map.Entry<String, PortBinding> entry : dockerObtainedDynamicPorts.entrySet()) {
String variable = entry.getKey();
PortBinding portBinding = entry.getValue();
if (portBinding != null) {
update(hostPortVariableMap, specToHostPortVariableMap.get(variable), portBinding.getHostPort());
String hostIp = portBinding.getHostIp();
// Use the docker host if binding is on all interfaces
if ("0.0.0.0".equals(hostIp)) {
hostIp = projProperties.getProperty("docker.host.address");
}
update(hostIpVariableMap, specToHostIpVariableMap.get(variable), hostIp);
}
}
updateDynamicProperties(hostPortVariableMap);
updateDynamicProperties(hostIpVariableMap);
}
// visible for testing
Map<String, String> getBindToHostMap() {
return bindToHostMap;
}
// visible for testing
Map<String, String> getHostIpVariableMap() {
return hostIpVariableMap;
}
// visible for testing
Map<String, Integer> getHostPortVariableMap() {
return hostPortVariableMap;
}
// visible for testing
Map<String, Integer> getPortsMap() {
return containerPortToHostPort;
}
private IllegalArgumentException createInvalidMappingError(String mapping, NumberFormatException exp) {
return new IllegalArgumentException("\nInvalid port mapping '" + mapping + "'\n" +
"Required format: '<+bindTo>:<hostPort>:<mappedPort>(/tcp|udp)'\n" +
"See: https://github.com/rhuss/docker-maven-plugin/blob/master/doc/manual.md#port-mapping");
}
private void createMapping(String[] parts, String protocol) {
if (parts.length == 3) {
mapBindToAndHostPortSpec(parts[0], parts[1], createPortSpec(parts[2], protocol));
} else {
mapHostPortToSpec(parts[0], createPortSpec(parts[1], protocol));
}
}
private String createPortSpec(String port, String protocol) throws NumberFormatException {
return Integer.parseInt(port) + "/" + protocol;
}
private Integer getAsIntOrNull(String val) {
try {
return Integer.parseInt(val);
} catch (@SuppressWarnings("unused") NumberFormatException exp) {
return null;
}
}
// Check for a variable containing a port, return it as integer or <code>null</code> is not found or not a number
// First check system properties, then the variables given
private Integer getPortFromVariableOrSystemProperty(String var) {
String sysProp = System.getProperty(var);
if (sysProp != null) {
return getAsIntOrNull(sysProp);
}
if (projProperties.containsKey(var)) {
return getAsIntOrNull(projProperties.getProperty(var));
}
return null;
}
private String extractPortPropertyName(String name) {
String mavenPropName = EnvUtil.extractMavenPropertyName(name);
return mavenPropName != null ? mavenPropName : name;
}
private void mapBindToAndHostPortSpec(String bindTo, String hPort, String portSpec) {
mapHostPortToSpec(hPort, portSpec);
String hostPropName = extractHostPropertyName(bindTo);
if (hostPropName != null) {
String host = projProperties.getProperty(hostPropName);
if (host != null) {
// the container portSpec can never be null, so use that as the key
bindToHostMap.put(portSpec, resolveHostname(host));
}
specToHostIpVariableMap.put(portSpec, hostPropName);
} else {
// the container portSpec can never be null, so use that as the key
bindToHostMap.put(portSpec, resolveHostname(bindTo));
}
}
private String extractHostPropertyName(String name) {
if (name.startsWith("+")) {
return name.substring(1);
} else {
return EnvUtil.extractMavenPropertyName(name);
}
}
private void mapHostPortToSpec(String hPort, String portSpec) {
Integer hostPort;
try {
hostPort = Integer.parseInt(hPort);
} catch (@SuppressWarnings("unused") NumberFormatException exp) {
// Port should be dynamically assigned and set to the variable give in hPort
String portPropertyName = extractPortPropertyName(hPort);
hostPort = getPortFromVariableOrSystemProperty(portPropertyName);
if (hostPort != null) {
// hPort: Variable name, hostPort: Port coming from the variable
hostPortVariableMap.put(portPropertyName, hostPort);
} else {
// containerPort: Port from container, hPort: Variable name to be filled later on
specToHostPortVariableMap.put(portSpec, portPropertyName);
}
}
containerPortToHostPort.put(portSpec, hostPort);
}
private void parsePortMapping(String input) throws IllegalArgumentException {
try {
Matcher matcher = PROTOCOL_SPLIT_PATTERN.matcher(input);
if (input.indexOf(':') == -1 || !matcher.matches()) {
throw createInvalidMappingError(input, null);
}
String mapping = matcher.group(1);
String protocol = matcher.group(2);
if (protocol == null) {
protocol = "tcp";
}
createMapping(mapping.split(":", 3), protocol);
} catch (NumberFormatException exp) {
throw createInvalidMappingError(input, exp);
}
}
private String resolveHostname(String bindToHost) {
try {
return InetAddress.getByName(bindToHost).getHostAddress();
} catch (@SuppressWarnings("unused") UnknownHostException e) {
throw new IllegalArgumentException("Host '" + bindToHost + "' to bind to cannot be resolved");
}
}
private <T> void update(Map<String, T> map, String key, T value) {
if (key != null) {
map.put(key, value);
}
}
private void updateDynamicProperties(Map<String, ?> dynamicPorts) {
for (Map.Entry<String, ?> entry : dynamicPorts.entrySet()) {
String var = entry.getKey();
String val = entry.getValue().toString();
projProperties.setProperty(var, val);
dynamicProperties.setProperty(var, val);
}
}
public static class PropertyWriteHelper {
private final Properties globalExport;
private final String globalFile;
private final Map<String, Properties> toExport;
public PropertyWriteHelper(String globalFile) {
this.globalFile = globalFile;
this.toExport = new HashMap<>();
this.globalExport = new Properties();
}
public void add(PortMapping portMapping, String portPropertyFile) {
if (portPropertyFile != null) {
toExport.put(portPropertyFile, portMapping.dynamicProperties);
} else if (globalFile != null) {
globalExport.putAll(portMapping.dynamicProperties);
}
}
public void write() throws IOException {
for (Map.Entry<String, Properties> entry : toExport.entrySet()) {
Properties props = entry.getValue();
writeProperties(props, entry.getKey());
globalExport.putAll(props);
}
if (globalFile != null && !globalExport.isEmpty()) {
writeProperties(globalExport, globalFile);
}
}
private void writeProperties(Properties props, String file) throws IOException {
File propFile = new File(file);
try (OutputStream os = new FileOutputStream(propFile)) {
props.store(os, "Docker ports");
} catch (IOException e) {
throw new IOException("Cannot write properties to " + file + ": " + e, e);
}
}
}
}
| |
/*
* Copyright 2005,2014 WSO2, Inc. http://www.wso2.org
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.utils.logging;
import org.testng.annotations.Test;
import java.util.List;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
/**
*
*/
public class CircularBufferTest {
private static final int MAX_ALLOWED_SIZE = 10000;
/**
* Test if the append method has appended an element to the buffer by returning that element
* from the buffer
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testAppend() {
CircularBuffer<String> buffer = new CircularBuffer<String>(5);
assertEquals(buffer.getSize(), 5, "Buffer is not initialized with expected size.");
buffer.append("item");
assertNotNull(buffer.get(1), "Items were not appended.");
}
/**
* Test if appending a null element throws an IllegalArgumentException
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void testAppendNullElement() {
CircularBuffer<String> buffer = new CircularBuffer<String>(5);
assertEquals(buffer.getSize(), 5, "Buffer is not initialized with expected size.");
buffer.append(null);
}
/**
* Test whether the buffer can return amount of elements less than the capacity of the buffer
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetLessThanBufferCapacity1() {
CircularBuffer<String> buffer = new CircularBuffer<String>(10);
assertEquals(buffer.getSize(), 10, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 11; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(5).size(), 5, "Returned an unexpected amount!");
}
/**
* Test whether when 0 elements are retrieved, 0 elements are returned
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetLessThanBufferCapacity2() {
CircularBuffer<String> buffer = new CircularBuffer<String>(10);
assertEquals(buffer.getSize(), 10, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 11; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(0).size(), 0,
"Returned an unexpected amount! Should return 0 elements.");
}
/**
* Test whether when 1 element is retrieved, 1 element is returned
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetLessThanBufferCapacity3() {
CircularBuffer<String> buffer = new CircularBuffer<String>(10);
assertEquals(buffer.getSize(), 10, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 11; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(1).size(), 1, "Returned an unexpected amount!");
}
/**
* Test returning no. of items remaining to the right of startIndex is greater than the amount
* to be retrieved
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetLessThanBufferCapacity4() {
CircularBuffer<String> buffer = new CircularBuffer<String>(10);
assertEquals(buffer.getSize(), 10, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 11; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(8).size(), 8, "Returned an unexpected amount!");
}
/**
* Test returning no. of items remaining to the right of startIndex is less than the amount to
* be retrieved
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetLessThanBufferCapacity5() {
CircularBuffer<String> buffer = new CircularBuffer<String>(10);
assertEquals(buffer.getSize(), 10, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 15; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(8).size(), 8, "Returned an unexpected amount!");
}
/**
* Test whether the buffer can return amount of elements less than the capacity of the buffer if
* it is not completely filled
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetLessThanBufferCapacity6() {
CircularBuffer<String> buffer = new CircularBuffer<String>(10);
assertEquals(buffer.getSize(), 10, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 5; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(5).size(), 5, "Returned an unexpected amount!");
}
/**
* Test whether the buffer can return amount of elements less than the capacity of the buffer if
* it is not completely filled
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetLessThanBufferCapacity7() {
CircularBuffer<String> buffer = new CircularBuffer<String>(10);
assertEquals(buffer.getSize(), 10, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 5; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(3).size(), 3, "Returned an unexpected amount!");
}
/**
* Test whether the buffer can return same amount as the capacity of the buffer if it is fully
* filled without exercising the circular nature
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetEqualToBufferCapacity1() {
CircularBuffer<String> buffer = new CircularBuffer<String>(10);
assertEquals(buffer.getSize(), 10, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 11; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(10).size(), 10, "Returned an unexpected amount!");
}
/**
* Test whether the buffer can return same amount as the capacity of the buffer if it is fully
* filled
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetEqualToBufferCapacity2() {
CircularBuffer<String> buffer = new CircularBuffer<String>(10);
assertEquals(buffer.getSize(), 10, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 15; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(10).size(), 10, "Returned an unexpected amount!");
}
/**
* Test how many elements the buffer return, if an amount more than its capacity is retrieved
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetMoreThanBufferCapacity1() {
CircularBuffer<String> buffer = new CircularBuffer<String>(5);
assertEquals(buffer.getSize(), 5, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 11; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(8).size(), 5,
"Returned an unexpected amount! Should have returned the buffer capacity " +
"instead.");
}
/**
* This test is created to identify an edge case. When the buffer is initialized to any amount
* (eg: 5) and then any amount higher than that (eg: 12) is added to the buffer, and later when
* an amount (1 less than the inserted number of items, eg: 10 ) is retrieved the buffer should
* not return a wrong number of items.
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetMoreThanBufferCapacity2() {
CircularBuffer<String> buffer = new CircularBuffer<String>(5);
assertEquals(buffer.getSize(), 5, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 11; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(10).size(), 5,
"Returned an unexpected amount! Should have returned the buffer capacity " +
"instead.");
}
/**
* Test how many elements the buffer return, if an amount more than its capacity is retrieved
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetMoreThanBufferCapacity3() {
CircularBuffer<String> buffer = new CircularBuffer<String>(10);
assertEquals(buffer.getSize(), 10, "Buffer is not initialized with expected size.");
for (int i = 0; i < 10; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(15).size(), 10,
"Returned an unexpected amount! Should have returned the buffer capacity " +
"instead.");
}
/**
* Test if the buffer returns an empty list if a negative amount is requested.
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetNegativeAmount() {
CircularBuffer<String> buffer = new CircularBuffer<String>(5);
assertEquals(buffer.getSize(), 5, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 10; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(-1).size(), 0,
"Returned an unexpected amount! Should have returned the buffer capacity " +
"instead.");
}
/**
* Test throwing an IllegalArgumentException when 0 sized buffer is initialized.
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void testIllegalArgumentExceptionFromSizeZero() {
CircularBuffer<String> buffer = new CircularBuffer<String>(0);
}
/**
* Test initializing a maximum sized buffer.
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testInitializingMaximumSizedBuffer() {
CircularBuffer<String> buffer = new CircularBuffer<String>();
assertEquals(buffer.getSize(), MAX_ALLOWED_SIZE,
"Buffer is not initialized with maximum allowed size.");
}
/**
* Test throwing an IllegalArgumentException when the buffer is initialized with more than
* allowed maximum size.
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void testIllegalArgumentExceptionSizeGreaterThanMaxAllowed() {
CircularBuffer<String> buffer = new CircularBuffer<String>(MAX_ALLOWED_SIZE + 1000);
}
/**
* Test returning an object array with the given amount.
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testGetObjects() {
CircularBuffer<String> buffer = new CircularBuffer<String>(5);
assertEquals(buffer.getSize(), 5, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 10; i++) {
buffer.append("item" + i);
}
Object[] expected = new Object[3];
expected[0] = "item6";
expected[1] = "item7";
expected[2] = "item8";
Object[] result = buffer.getObjects(3);
assertEquals(result.length, 3, "Unexpected number of items was returned.");
assertEquals(result, expected, "Unexpected objects array was returned.");
}
/**
* Test clearing the buffer after populating it. This should result in an empty buffer.
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testClear() {
CircularBuffer<String> buffer = new CircularBuffer<String>(5);
assertEquals(buffer.getSize(), 5, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 10; i++) {
buffer.append("item" + i);
}
assertEquals(buffer.get(3).size(), 3, "Returned an unexpected amount!");
buffer.clear();
assertEquals(buffer.get(3).size(), 0, "Returned an unexpected amount!");
}
/**
* Test if the returned items are in proper order.
*/
@Test(groups = {"org.wso2.carbon.utils.logging"},
description = "")
public void testOrderOfItems() {
CircularBuffer<String> buffer = new CircularBuffer<String>(5);
assertEquals(buffer.getSize(), 5, "Buffer is not initialized with expected size.");
for (int i = 0; i <= 6; i++) {
buffer.append("item" + i);
}
List<String> result = buffer.get(3);
assertEquals(result.size(), 3, "Returned an unexpected amount!");
assertEquals(result.get(0), "item2", "");
assertEquals(result.get(1), "item3", "");
assertEquals(result.get(2), "item4", "");
}
}
| |
/*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.v7.shared.ui.colorpicker;
import java.io.Serializable;
/**
* Default implementation for color.
*
* @since 7.0.0
*/
@Deprecated
public class Color implements Serializable {
public static final Color WHITE = new Color(255, 255, 255);
public static final Color BLACK = new Color(0, 0, 0);
public static final Color RED = new Color(255, 0, 0);
public static final Color GREEN = new Color(0, 255, 0);
public static final Color BLUE = new Color(0, 0, 255);
public static final Color YELLOW = new Color(255, 255, 0);
public static final Color MAGENTA = new Color(255, 0, 255);
public static final Color CYAN = new Color(0, 255, 255);
private int red;
private int green;
private int blue;
private int alpha;
private static final String OUTOFRANGE = "Value must be within the range [0-255]. Was: ";
/**
* Creates a color that has the specified red, green, blue, and alpha values
* within the range [0 - 255].
*
* @throws IllegalArgumentException
* if <code>red</code>, <code>green</code>, <code>blue</code> or
* <code>alpha</code> fall outside of the inclusive range from 0
* to 255
* @param red
* the red value
* @param green
* the green value
* @param blue
* the blue value
* @param alpha
* the alpha value
*/
public Color(int red, int green, int blue, int alpha) {
checkRange(red, green, blue, alpha);
this.red = red;
this.green = green;
this.blue = blue;
this.alpha = alpha;
}
/**
* Creates a color that has the specified red, green, and blue values within
* the range [0 - 255]. Alpha gets the default value of 255.
*
* @throws IllegalArgumentException
* if <code>red</code>, <code>green</code> or <code>blue</code>
* fall outside of the inclusive range from 0 to 255
* @param red
* the red value
* @param green
* the green value
* @param blue
* the blue value
*/
public Color(int red, int green, int blue) {
this(red, green, blue, 255);
}
/**
* Creates a color based on an RGB value.
*
* @throws IllegalArgumentException
* if converted values of <code>red</code>, <code>green</code>,
* <code>blue</code> or <code>alpha</code> fall outside of the
* inclusive range from 0 to 255
*
* @param rgb
* the RGB value
*/
public Color(int rgb) {
int value = 0xff000000 | rgb;
int red = (value >> 16) & 0xFF;
int green = (value >> 8) & 0xFF;
int blue = (value >> 0) & 0xFF;
int alpha = (value >> 24) & 0xff;
checkRange(red, green, blue, alpha);
this.red = red;
this.green = green;
this.blue = blue;
this.alpha = alpha;
}
/**
* Checks that all values are within the acceptable range of [0, 255].
*
* @throws IllegalArgumentException
* if any of the values fall outside of the range
*
* @param red
* @param green
* @param blue
* @param alpha
*/
private void checkRange(int red, int green, int blue, int alpha) {
if (!withinRange(red) || !withinRange(green) || !withinRange(blue)
|| !withinRange(alpha)) {
String errorMessage = "All values must fall within range [0-255]. (red: "
+ red + ", green: " + green + ", blue: " + blue
+ ", alpha: " + alpha + ")";
throw new IllegalArgumentException(errorMessage);
}
}
/**
* Checks whether the value is within the acceptable range of [0, 255].
*
* @param value
* @return true if the value falls within the range, false otherwise
*/
private boolean withinRange(int value) {
if (value < 0 || value > 255) {
return false;
}
return true;
}
/**
* Returns the red value of the color.
*
*/
public int getRed() {
return red;
}
/**
* Sets the red value of the color. Value must be within the range [0, 255].
*
* @param red
* new red value
*/
public void setRed(int red) {
if (withinRange(red)) {
this.red = red;
} else {
throw new IllegalArgumentException(OUTOFRANGE + red);
}
}
/**
* Returns the green value of the color.
*
*/
public int getGreen() {
return green;
}
/**
* Sets the green value of the color. Value must be within the range [0,
* 255].
*
* @param green
* new green value
*/
public void setGreen(int green) {
if (withinRange(green)) {
this.green = green;
} else {
throw new IllegalArgumentException(OUTOFRANGE + green);
}
}
/**
* Returns the blue value of the color.
*
*/
public int getBlue() {
return blue;
}
/**
* Sets the blue value of the color. Value must be within the range [0,
* 255].
*
* @param blue
* new blue value
*/
public void setBlue(int blue) {
if (withinRange(blue)) {
this.blue = blue;
} else {
throw new IllegalArgumentException(OUTOFRANGE + blue);
}
}
/**
* Returns the alpha value of the color.
*
*/
public int getAlpha() {
return alpha;
}
/**
* Sets the alpha value of the color. Value must be within the range [0,
* 255].
*
* @param alpha
* new alpha value
*/
public void setAlpha(int alpha) {
if (withinRange(alpha)) {
this.alpha = alpha;
} else {
throw new IllegalArgumentException(OUTOFRANGE + alpha);
}
}
/**
* Returns CSS representation of the Color, e.g. #000000.
*/
public String getCSS() {
String redString = Integer.toHexString(red);
redString = redString.length() < 2 ? "0" + redString : redString;
String greenString = Integer.toHexString(green);
greenString = greenString.length() < 2 ? "0" + greenString
: greenString;
String blueString = Integer.toHexString(blue);
blueString = blueString.length() < 2 ? "0" + blueString : blueString;
return "#" + redString + greenString + blueString;
}
/**
* Returns RGB value of the color.
*/
public int getRGB() {
return ((alpha & 0xFF) << 24) | ((red & 0xFF) << 16)
| ((green & 0xFF) << 8) | ((blue & 0xFF) << 0);
}
/**
* Returns converted HSV components of the color.
*
*/
public float[] getHSV() {
float[] hsv = new float[3];
int maxColor = (red > green) ? red : green;
if (blue > maxColor) {
maxColor = blue;
}
int minColor = (red < green) ? red : green;
if (blue < minColor) {
minColor = blue;
}
float value = maxColor / 255.0f;
float saturation = 0;
if (maxColor != 0) {
saturation = ((float) (maxColor - minColor)) / ((float) maxColor);
}
float hue = 0;
if (saturation != 0) {
float redF = ((float) (maxColor - red))
/ ((float) (maxColor - minColor));
float greenF = ((float) (maxColor - green))
/ ((float) (maxColor - minColor));
float blueF = ((float) (maxColor - blue))
/ ((float) (maxColor - minColor));
if (red == maxColor) {
hue = blueF - greenF;
} else if (green == maxColor) {
hue = 2.0f + redF - blueF;
} else {
hue = 4.0f + greenF - redF;
}
hue = hue / 6.0f;
if (hue < 0) {
hue = hue + 1.0f;
}
}
hsv[0] = hue;
hsv[1] = saturation;
hsv[2] = value;
return hsv;
}
@Override
public int hashCode() {
return getRGB();
}
@Override
public boolean equals(Object obj) {
return obj instanceof Color && ((Color) obj).getRGB() == getRGB();
}
/**
* <p>
* Converts HSV's hue, saturation and value into an RGB value.
* <p>
* The <code>saturation</code> and <code>value</code> components should be
* floating-point values within the range [0.0-1.0].
* <p>
*
* @param hue
* the hue of the color
* @param saturation
* the saturation of the color
* @param value
* the value of the color
* @return the RGB value of corresponding color
*/
public static int HSVtoRGB(float hue, float saturation, float value) {
int red = 0;
int green = 0;
int blue = 0;
if (saturation == 0) {
red = green = blue = (int) (value * 255.0f + 0.5f);
} else {
float h = (hue - (float) Math.floor(hue)) * 6.0f;
float f = h - (float) Math.floor(h);
float p = value * (1.0f - saturation);
float q = value * (1.0f - saturation * f);
float t = value * (1.0f - (saturation * (1.0f - f)));
switch ((int) h) {
case 0:
red = (int) (value * 255.0f + 0.5f);
green = (int) (t * 255.0f + 0.5f);
blue = (int) (p * 255.0f + 0.5f);
break;
case 1:
red = (int) (q * 255.0f + 0.5f);
green = (int) (value * 255.0f + 0.5f);
blue = (int) (p * 255.0f + 0.5f);
break;
case 2:
red = (int) (p * 255.0f + 0.5f);
green = (int) (value * 255.0f + 0.5f);
blue = (int) (t * 255.0f + 0.5f);
break;
case 3:
red = (int) (p * 255.0f + 0.5f);
green = (int) (q * 255.0f + 0.5f);
blue = (int) (value * 255.0f + 0.5f);
break;
case 4:
red = (int) (t * 255.0f + 0.5f);
green = (int) (p * 255.0f + 0.5f);
blue = (int) (value * 255.0f + 0.5f);
break;
case 5:
red = (int) (value * 255.0f + 0.5f);
green = (int) (p * 255.0f + 0.5f);
blue = (int) (q * 255.0f + 0.5f);
break;
}
}
return 0xff000000 | (red << 16) | (green << 8) | (blue << 0);
}
/**
* <p>
* Converts HSL's hue, saturation and lightness into an RGB value.
*
* @param hue
* the hue of the color. The unit of the value is degrees and
* should be between 0-360.
* @param saturation
* the saturation of the color. The unit of the value is
* percentages and should be between 0-100;
* @param lightness
* the lightness of the color. The unit of the value is
* percentages and should be between 0-100;
*
* @return the RGB value of corresponding color
*/
public static int HSLtoRGB(int hue, int saturation, int lightness) {
int red = 0;
int green = 0;
int blue = 0;
float hueRatio = hue / 360f;
float saturationRatio = saturation / 100f;
float lightnessRatio = lightness / 100f;
if (saturationRatio == 0) {
red = green = blue = (int) (lightnessRatio * 255.0f + 0.5f);
} else {
float p = lightnessRatio < 0.5f
? lightnessRatio * (1f + saturationRatio)
: lightnessRatio + saturationRatio
- lightnessRatio * saturationRatio;
float q = 2 * lightnessRatio - p;
red = hslComponentToRgbComponent(p, q, hueRatio + (1f / 3f));
green = hslComponentToRgbComponent(p, q, hueRatio);
blue = hslComponentToRgbComponent(p, q, hueRatio - (1f / 3f));
}
return 0xff000000 | (red << 16) | (green << 8) | (blue << 0);
}
private static int hslComponentToRgbComponent(float p, float q,
float ratio) {
if (ratio < 0) {
ratio += 1;
} else if (ratio > 1) {
ratio -= 1;
}
if (6 * ratio < 1f) {
return (int) ((q + (p - q) * 6f * ratio) * 255f + 0.5f);
} else if (2f * ratio < 1f) {
return (int) (p * 255f + 0.5f);
} else if (3f * ratio < 2f) {
return (int) ((q + (p - q) * ((2f / 3f) - ratio) * 6f) * 255f
+ 0.5f);
}
return (int) (q * 255f + 0.5f);
}
}
| |
package com.aviary.android.feather.effects;
import it.sephiroth.android.library.imagezoom.ImageViewTouch;
import it.sephiroth.android.library.imagezoom.ImageViewTouchBase.OnBitmapChangedListener;
import android.content.Context;
import android.content.res.ColorStateList;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.os.ResultReceiver;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.TextView.OnEditorActionListener;
import com.aviary.android.feather.R;
import com.aviary.android.feather.graphics.RepeatableHorizontalDrawable;
import com.aviary.android.feather.library.filters.FilterLoaderFactory;
import com.aviary.android.feather.library.filters.FilterLoaderFactory.Filters;
import com.aviary.android.feather.library.filters.MemeFilter;
import com.aviary.android.feather.library.graphics.drawable.EditableDrawable;
import com.aviary.android.feather.library.graphics.drawable.MemeTextDrawable;
import com.aviary.android.feather.library.moa.MoaActionList;
import com.aviary.android.feather.library.services.ConfigService;
import com.aviary.android.feather.library.services.EffectContext;
import com.aviary.android.feather.library.utils.BitmapUtils;
import com.aviary.android.feather.library.utils.MatrixUtils;
import com.aviary.android.feather.utils.TypefaceUtils;
import com.aviary.android.feather.widget.DrawableHighlightView;
import com.aviary.android.feather.widget.ImageViewDrawableOverlay;
import com.aviary.android.feather.widget.ImageViewDrawableOverlay.OnDrawableEventListener;
import com.aviary.android.feather.widget.ImageViewDrawableOverlay.OnLayoutListener;
/**
* The Class MemePanel.
*/
public class MemePanel extends AbstractContentPanel implements OnEditorActionListener, OnClickListener, OnDrawableEventListener,
OnLayoutListener {
Button editTopButton, editBottomButton;
EditText editTopText, editBottomText;
InputMethodManager mInputManager;
Canvas mCanvas;
DrawableHighlightView topHv, bottomHv;
Typeface mTypeface;
String fontName;
Button clearButtonTop, clearButtonBottom;
/**
* Instantiates a new meme panel.
*
* @param context
* the context
*/
public MemePanel( EffectContext context ) {
super( context );
ConfigService config = context.getService( ConfigService.class );
if ( config != null ) {
fontName = config.getString( R.string.feather_meme_default_font );
}
}
/*
* (non-Javadoc)
*
* @see com.aviary.android.feather.effects.AbstractEffectPanel#onCreate(android.graphics.Bitmap)
*/
@SuppressWarnings("deprecation")
@Override
public void onCreate( Bitmap bitmap ) {
super.onCreate( bitmap );
editTopButton = (Button) getOptionView().findViewById( R.id.button1 );
editBottomButton = (Button) getOptionView().findViewById( R.id.button2 );
mImageView = (ImageViewTouch) getContentView().findViewById( R.id.overlay );
editTopText = (EditText) getContentView().findViewById( R.id.invisible_text_1 );
editBottomText = (EditText) getContentView().findViewById( R.id.invisible_text_2 );
clearButtonTop = (Button) getOptionView().findViewById( R.id.clear_button_top );
clearButtonBottom = (Button) getOptionView().findViewById( R.id.clear_button_bottom );
mImageView.setDoubleTapEnabled( false );
mImageView.setScaleEnabled( false );
mImageView.setScrollEnabled( false );
createAndConfigurePreview();
mImageView.setOnBitmapChangedListener( new OnBitmapChangedListener() {
@Override
public void onBitmapChanged( Drawable drawable ) {
final Matrix mImageMatrix = mImageView.getImageViewMatrix();
float[] matrixValues = getMatrixValues( mImageMatrix );
final int height = (int) ( mBitmap.getHeight() * matrixValues[Matrix.MSCALE_Y] );
View view = getContentView().findViewById( R.id.feather_meme_dumb );
LinearLayout.LayoutParams p = (LinearLayout.LayoutParams) view.getLayoutParams();
p.height = height - 30;
view.setLayoutParams( p );
view.requestLayout();
}
} );
mImageView.setImageBitmap( mPreview, true, null );
View content = getOptionView().findViewById( R.id.content );
content.setBackgroundDrawable( RepeatableHorizontalDrawable.createFromView( content ) );
}
/*
* (non-Javadoc)
*
* @see com.aviary.android.feather.effects.AbstractEffectPanel#onActivate()
*/
@Override
public void onActivate() {
super.onActivate();
createTypeFace();
onAddTopText();
onAddBottomText();
( (ImageViewDrawableOverlay) mImageView ).setOnDrawableEventListener( this );
( (ImageViewDrawableOverlay) mImageView ).setOnLayoutListener( this );
mInputManager = (InputMethodManager) getContext().getBaseContext().getSystemService( Context.INPUT_METHOD_SERVICE );
editTopButton.setOnClickListener( this );
editBottomButton.setOnClickListener( this );
editTopText.setVisibility( View.VISIBLE );
editBottomText.setVisibility( View.VISIBLE );
editTopText.getBackground().setAlpha( 0 );
editBottomText.getBackground().setAlpha( 0 );
clearButtonTop.setOnClickListener( this );
clearButtonBottom.setOnClickListener( this );
getContentView().setVisibility( View.VISIBLE );
contentReady();
}
/*
* (non-Javadoc)
*
* @see com.aviary.android.feather.effects.AbstractEffectPanel#onDeactivate()
*/
@Override
public void onDeactivate() {
super.onDeactivate();
endEditView( topHv );
endEditView( bottomHv );
( (ImageViewDrawableOverlay) mImageView ).setOnDrawableEventListener( null );
( (ImageViewDrawableOverlay) mImageView ).setOnLayoutListener( null );
editTopButton.setOnClickListener( null );
editBottomButton.setOnClickListener( null );
clearButtonTop.setOnClickListener( null );
clearButtonBottom.setOnClickListener( null );
if ( mInputManager.isActive( editTopText ) ) mInputManager.hideSoftInputFromWindow( editTopText.getWindowToken(), 0 );
if ( mInputManager.isActive( editBottomText ) ) mInputManager.hideSoftInputFromWindow( editBottomText.getWindowToken(), 0 );
}
/*
* (non-Javadoc)
*
* @see com.aviary.android.feather.effects.AbstractEffectPanel#onDestroy()
*/
@Override
public void onDestroy() {
mCanvas = null;
mInputManager = null;
super.onDestroy();
}
/*
* (non-Javadoc)
*
* @see com.aviary.android.feather.effects.AbstractContentPanel#generateContentView(android.view.LayoutInflater)
*/
@Override
protected View generateContentView( LayoutInflater inflater ) {
return inflater.inflate( R.layout.feather_meme_content, null );
}
/*
* (non-Javadoc)
*
* @see com.aviary.android.feather.effects.AbstractOptionPanel#generateOptionView(android.view.LayoutInflater,
* android.view.ViewGroup)
*/
@Override
protected ViewGroup generateOptionView( LayoutInflater inflater, ViewGroup parent ) {
return (ViewGroup) inflater.inflate( R.layout.feather_meme_panel, parent, false );
}
/*
* (non-Javadoc)
*
* @see com.aviary.android.feather.effects.AbstractEffectPanel#onGenerateResult()
*/
@Override
protected void onGenerateResult() {
MemeFilter filter = (MemeFilter) FilterLoaderFactory.get( Filters.MEME );
flattenText( topHv, filter );
flattenText( bottomHv, filter );
MoaActionList actionList = (MoaActionList) filter.getActions().clone();
super.onGenerateResult( actionList );
}
/*
* (non-Javadoc)
*
* @see android.widget.TextView.OnEditorActionListener#onEditorAction(android.widget.TextView, int, android.view.KeyEvent)
*/
@Override
public boolean onEditorAction( TextView v, int actionId, KeyEvent event ) {
mLogger.info( "onEditorAction", v, actionId, event );
if ( v != null ) {
if ( actionId == EditorInfo.IME_ACTION_DONE || actionId == EditorInfo.IME_ACTION_UNSPECIFIED ) {
final ImageViewDrawableOverlay image = (ImageViewDrawableOverlay) mImageView;
if ( image.getSelectedHighlightView() != null ) {
DrawableHighlightView d = image.getSelectedHighlightView();
if ( d.getContent() instanceof EditableDrawable ) {
endEditView( d );
}
}
}
}
return false;
}
/**
* Flatten text.
*
* @param hv
* the hv
*/
private void flattenText( final DrawableHighlightView hv, final MemeFilter filter ) {
if ( hv != null ) {
hv.setHidden( true );
final Matrix mImageMatrix = mImageView.getImageViewMatrix();
float[] matrixValues = getMatrixValues( mImageMatrix );
mLogger.log( "image scaled: " + matrixValues[Matrix.MSCALE_X] );
// TODO: check this modification
final int width = (int) ( mBitmap.getWidth() );
final int height = (int) ( mBitmap.getHeight() );
final RectF cropRect = hv.getCropRectF();
final Rect rect = new Rect( (int) cropRect.left, (int) cropRect.top, (int) cropRect.right, (int) cropRect.bottom );
final MemeTextDrawable editable = (MemeTextDrawable) hv.getContent();
final int saveCount = mCanvas.save( Canvas.MATRIX_SAVE_FLAG );
// force end edit and hide the blinking cursor
editable.endEdit();
editable.invalidateSelf();
editable.setContentSize( width, height );
editable.setBounds( rect.left, rect.top, rect.right, rect.bottom );
editable.draw( mCanvas );
if ( topHv == hv ) {
filter.setTopText( (String) editable.getText(), (double) editable.getTextSize() / mBitmap.getWidth() );
filter.setTopOffset( ( cropRect.left + (double) editable.getXoff() ) / mBitmap.getWidth(),
( cropRect.top + (double) editable.getYoff() ) / mBitmap.getHeight() );
// action.setValue( "toptext", (String) editable.getText() );
// action.setValue( "topsize", (double)editable.getTextSize()/mBitmap.getWidth() );
// action.setValue( "topxoff", (cropRect.left + (double)editable.getXoff())/mBitmap.getWidth() );
// action.setValue( "topyoff", (cropRect.top + (double)editable.getYoff())/mBitmap.getHeight() );
} else {
filter.setBottomText( (String) editable.getText(), (double) editable.getTextSize() / mBitmap.getWidth() );
filter.setBottomOffset( ( cropRect.left + (double) editable.getXoff() ) / mBitmap.getWidth(),
( cropRect.top + (double) editable.getYoff() ) / mBitmap.getHeight() );
// action.setValue( "bottomtext", (String) editable.getText() );
// action.setValue( "bottomsize", (double)editable.getTextSize()/mBitmap.getWidth() );
// action.setValue( "bottomxoff", (cropRect.left + (double)editable.getXoff())/mBitmap.getWidth() );
// action.setValue( "bottomyoff", (cropRect.top + (double)editable.getYoff())/mBitmap.getHeight() );
}
filter.setTextScale( matrixValues[Matrix.MSCALE_X] );
// action.setValue( "scale", matrixValues[Matrix.MSCALE_X] );
// action.setValue( "textsize", editable.getTextSize() );
mCanvas.restoreToCount( saveCount );
mImageView.invalidate();
}
onPreviewChanged( mPreview, false );
}
/**
* Creates the and configure preview.
*/
private void createAndConfigurePreview() {
if ( ( mPreview != null ) && !mPreview.isRecycled() ) {
mPreview.recycle();
mPreview = null;
}
mPreview = BitmapUtils.copy( mBitmap, mBitmap.getConfig() );
mCanvas = new Canvas( mPreview );
}
/*
* (non-Javadoc)
*
* @see android.view.View.OnClickListener#onClick(android.view.View)
*/
@Override
public void onClick( View v ) {
if ( v == editTopButton ) {
onTopClick( topHv );
} else if ( v == editBottomButton ) {
onTopClick( bottomHv );
} else if ( v == clearButtonTop ) {
clearEditView( topHv );
endEditView( topHv );
} else if ( v == clearButtonBottom ) {
clearEditView( bottomHv );
endEditView( bottomHv );
}
}
/**
* In top editable text click
*
* @param view
* the view
*/
public void onTopClick( final DrawableHighlightView view ) {
mLogger.info( "onTopClick", view );
if ( view != null ) if ( view.getContent() instanceof EditableDrawable ) {
beginEditView( view );
}
}
/**
* Extract a value form the matrix
*
* @param m
* the m
* @return the matrix values
*/
public static float[] getMatrixValues( Matrix m ) {
float[] values = new float[9];
m.getValues( values );
return values;
}
/**
* Creates and places the top editable text
*/
private void onAddTopText() {
final Matrix mImageMatrix = mImageView.getImageViewMatrix();
final int width = (int) ( mBitmap.getWidth() );
final int height = (int) ( mBitmap.getHeight() );
final MemeTextDrawable text = new MemeTextDrawable( "", (float) mBitmap.getHeight() / 7.f, mTypeface );
text.setTextColor( Color.WHITE );
text.setTextStrokeColor( Color.BLACK );
text.setContentSize( width, height );
topHv = new DrawableHighlightView( mImageView, text );
topHv.setAlignModeV( DrawableHighlightView.AlignModeV.Top );
final int cropHeight = text.getIntrinsicHeight();
final int x = 0;
final int y = 0;
final Matrix matrix = new Matrix( mImageMatrix );
matrix.invert( matrix );
final float[] pts = new float[] { x, y, x + width, y + cropHeight };
MatrixUtils.mapPoints( matrix, pts );
final RectF cropRect = new RectF( pts[0], pts[1], pts[2], pts[3] );
addEditable( topHv, mImageMatrix, cropRect );
}
/**
* Create and place the bottom editable text.
*/
private void onAddBottomText() {
final Matrix mImageMatrix = mImageView.getImageViewMatrix();
final int width = (int) ( mBitmap.getWidth() );
final int height = (int) ( mBitmap.getHeight() );
final MemeTextDrawable text = new MemeTextDrawable( "", (float) mBitmap.getHeight() / 7.0f, mTypeface );
text.setTextColor( Color.WHITE );
text.setTextStrokeColor( Color.BLACK );
text.setContentSize( width, height );
bottomHv = new DrawableHighlightView( mImageView, text );
bottomHv.setAlignModeV( DrawableHighlightView.AlignModeV.Bottom );
final int cropHeight = text.getIntrinsicHeight();
final int x = 0;
final int y = 0;
final Matrix matrix = new Matrix( mImageMatrix );
matrix.invert( matrix );
final float[] pts = new float[] { x, y + height - cropHeight - ( height / 30 ), x + width, y + height - ( height / 30 ) };
MatrixUtils.mapPoints( matrix, pts );
final RectF cropRect = new RectF( pts[0], pts[1], pts[2], pts[3] );
addEditable( bottomHv, mImageMatrix, cropRect );
}
/**
* Adds the editable.
*
* @param hv
* the hv
* @param imageMatrix
* the image matrix
* @param cropRect
* the crop rect
*/
private void addEditable( DrawableHighlightView hv, Matrix imageMatrix, RectF cropRect ) {
final ImageViewDrawableOverlay image = (ImageViewDrawableOverlay) mImageView;
hv.setRotateAndScale( true );
hv.showAnchors( false );
hv.drawOutlineFill( false );
hv.drawOutlineStroke( false );
hv.setup( imageMatrix, null, cropRect, false );
hv.getOutlineFillPaint().setXfermode( new PorterDuffXfermode( android.graphics.PorterDuff.Mode.SRC_ATOP ) );
hv.setMinSize( 10 );
hv.setOutlineFillColor( new ColorStateList( new int[][]{ {android.R.attr.state_active } }, new int[]{0} ) );
hv.setOutlineStrokeColor( new ColorStateList( new int[][]{ {android.R.attr.state_active } }, new int[]{0} ) );
image.addHighlightView( hv );
}
abstract class MyTextWatcher implements TextWatcher {
public DrawableHighlightView view;
}
private final MyTextWatcher mEditTextWatcher = new MyTextWatcher() {
@Override
public void afterTextChanged( final Editable s ) {}
@Override
public void beforeTextChanged( final CharSequence s, final int start, final int count, final int after ) {}
@Override
public void onTextChanged( final CharSequence s, final int start, final int before, final int count ) {
mLogger.info( "onTextChanged", view );
if ( ( view != null ) && ( view.getContent() instanceof EditableDrawable ) ) {
final EditableDrawable editable = (EditableDrawable) view.getContent();
if ( !editable.isEditing() ) return;
editable.setText( s.toString() );
if ( topHv.equals( view ) ) {
editTopButton.setText( s );
clearButtonTop.setVisibility( s != null && s.length() > 0 ? View.VISIBLE : View.INVISIBLE );
} else if ( bottomHv.equals( view ) ) {
editBottomButton.setText( s );
clearButtonBottom.setVisibility( s != null && s.length() > 0 ? View.VISIBLE : View.INVISIBLE );
}
view.forceUpdate();
setIsChanged( true );
}
}
};
/*
* (non-Javadoc)
*
* @see
* com.aviary.android.feather.widget.ImageViewDrawableOverlay.OnDrawableEventListener#onFocusChange(com.aviary.android.feather
* .widget.DrawableHighlightView, com.aviary.android.feather.widget.DrawableHighlightView)
*/
@Override
public void onFocusChange( DrawableHighlightView newFocus, DrawableHighlightView oldFocus ) {
mLogger.info( "onFocusChange", newFocus, oldFocus );
if ( oldFocus != null ) {
if ( newFocus == null ) {
endEditView( oldFocus );
}
}
}
/**
* Terminates an edit view.
*
* @param hv
* the hv
*/
private void endEditView( DrawableHighlightView hv ) {
EditableDrawable text = (EditableDrawable) hv.getContent();
mLogger.info( "endEditView", text.isEditing() );
if ( text.isEditing() ) {
text.endEdit();
endEditText( hv );
}
CharSequence value = text.getText();
if ( topHv.equals( hv ) ) {
editTopButton.setText( value );
clearButtonTop.setVisibility( value != null && value.length() > 0 ? View.VISIBLE : View.INVISIBLE );
} else if ( bottomHv.equals( hv ) ) {
editBottomButton.setText( value );
clearButtonBottom.setVisibility( value != null && value.length() > 0 ? View.VISIBLE : View.INVISIBLE );
}
}
/**
* Begins an edit view.
*
* @param hv
* the hv
*/
private void beginEditView( DrawableHighlightView hv ) {
mLogger.info( "beginEditView" );
final EditableDrawable text = (EditableDrawable) hv.getContent();
if ( hv == topHv ) {
endEditView( bottomHv );
} else {
endEditView( topHv );
}
if ( !text.isEditing() ) {
text.beginEdit();
beginEditText( hv );
}
}
private void clearEditView( DrawableHighlightView hv ) {
final MemeTextDrawable text = (MemeTextDrawable) hv.getContent();
text.setText( "" );
text.invalidateSelf();
hv.forceUpdate();
}
/*
* (non-Javadoc)
*
* @see
* com.aviary.android.feather.widget.ImageViewDrawableOverlay.OnDrawableEventListener#onDown(com.aviary.android.feather.widget
* .DrawableHighlightView)
*/
@Override
public void onDown( DrawableHighlightView view ) {
}
/*
* (non-Javadoc)
*
* @see
* com.aviary.android.feather.widget.ImageViewDrawableOverlay.OnDrawableEventListener#onMove(com.aviary.android.feather.widget
* .DrawableHighlightView)
*/
@Override
public void onMove( DrawableHighlightView view ) {}
/*
* (non-Javadoc)
*
* @see
* com.aviary.android.feather.widget.ImageViewDrawableOverlay.OnDrawableEventListener#onClick(com.aviary.android.feather.widget
* .DrawableHighlightView)
*/
@Override
public void onClick( DrawableHighlightView view ) {
if ( view != null ) {
if ( view.getContent() instanceof EditableDrawable ) {
beginEditView( view );
}
}
}
/**
* Begin edit text.
*
* @param view
* the view
*/
private void beginEditText( final DrawableHighlightView view ) {
mLogger.info( "beginEditText", view );
EditText editText = null;
if ( view == topHv ) {
editText = editTopText;
} else if ( view == bottomHv ) {
editText = editBottomText;
}
if ( editText != null ) {
mEditTextWatcher.view = null;
editText.removeTextChangedListener( mEditTextWatcher );
final EditableDrawable editable = (EditableDrawable) view.getContent();
final String oldText = (String) editable.getText();
editText.setText( oldText );
editText.setSelection( editText.length() );
editText.setImeOptions( EditorInfo.IME_ACTION_DONE );
editText.requestFocusFromTouch();
Handler handler = new Handler();
ResultReceiver receiver = new ResultReceiver( handler );
if ( !mInputManager.showSoftInput( editText, 0, receiver ) ) {
mInputManager.toggleSoftInput( InputMethodManager.SHOW_FORCED, 0 ); // TODO: verify
}
mEditTextWatcher.view = view;
editText.setOnEditorActionListener( this );
editText.addTextChangedListener( mEditTextWatcher );
( (ImageViewDrawableOverlay) mImageView ).setSelectedHighlightView( view );
( (EditableDrawable) view.getContent() ).setText( ( (EditableDrawable) view.getContent() ).getText() );
view.forceUpdate();
}
}
/**
* End edit text.
*
* @param view
* the view
*/
private void endEditText( final DrawableHighlightView view ) {
mLogger.info( "endEditText", view );
mEditTextWatcher.view = null;
EditText editText = null;
if ( view == topHv )
editText = editTopText;
else if ( view == bottomHv ) editText = editBottomText;
if ( editText != null ) {
editText.removeTextChangedListener( mEditTextWatcher );
if ( mInputManager.isActive( editText ) ) {
mInputManager.hideSoftInputFromWindow( editText.getWindowToken(), 0 );
}
editText.clearFocus();
}
// this will send the focus to the bottom panel
// but also creating a bad visual effect
//mOptionView.requestFocus();
}
/**
* Creates the type face used for meme.
*/
private void createTypeFace() {
try {
mTypeface = TypefaceUtils.createFromAsset( getContext().getBaseContext().getAssets(), fontName );
} catch ( Exception e ) {
mTypeface = Typeface.DEFAULT;
}
}
@Override
public void onLayoutChanged( boolean changed, int left, int top, int right, int bottom ) {
if ( changed ) {
final Matrix mImageMatrix = mImageView.getImageViewMatrix();
float[] matrixValues = getMatrixValues( mImageMatrix );
final float w = mBitmap.getWidth();
final float h = mBitmap.getHeight();
final float scale = matrixValues[Matrix.MSCALE_X];
if ( topHv != null ) {
MemeTextDrawable text = (MemeTextDrawable) topHv.getContent();
text.setContentSize( w * scale, h * scale );
}
if ( bottomHv != null ) {
MemeTextDrawable text = (MemeTextDrawable) bottomHv.getContent();
text.setContentSize( w * scale, h * scale );
}
}
}
}
| |
/*
*
* This source file is part of the "Java Objects Executor" open source project
*
* Copyright 2017 Veryant and Marco Bertacca
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.veryant.joe;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
public class Block extends ArrayList<Message>
implements InternalObject {
static final Object[] voidArgs = {};
final Executor executor;
private Block parent;
private HashMap<String,Object> variables;
private final HashMap<String,Object> constants;
private ArrayList<Block> children = new ArrayList<Block>();
private String blockName;
private String[] argName;
private Object argArray[];
private boolean execAsJoe;
Block (Executor exec, Block par) {
executor = exec;
parent = par;
if (parent != null)
constants = parent.constants;
else
constants = new HashMap<String,Object>();
if (parent != null)
parent.children.add (this);
}
void setArguments (String argn[]) {
argName = argn;
}
public Object exec () throws JOEException {
return vExec (new HashMap<String,Object>(), voidArgs);
}
public Object exec (Object...argv) throws JOEException {
return vExec (new HashMap<String,Object>(), argv);
}
public Object init () throws JOEException {
return init (voidArgs);
}
public Object init (Object...argv) throws JOEException {
if (variables == null)
variables = new HashMap<String,Object>();
return vExec (variables, argv);
}
@Deprecated
public Object sfExec (Object...argv) throws JOEException {
if (variables == null)
variables = new HashMap<String,Object>();
return vExec (variables, argv);
}
public Object vExec (HashMap<String,Object> vars, Object...argv)
throws JOEException {
HashMap<String,Object> saveVar = variables;
variables = vars;
if (argv == null)
argv = voidArgs;
argArray = argv;
if (argName != null) {
int i;
final int nArgs = Math.min (argv.length, argName.length);
for (i = 0; i < nArgs; i++)
variables.put (argName[i], argv[i]);
for ( ; i < argName.length; i++)
variables.put (argName[i], WNull.value);
}
Object Return = executor.run (this);
variables = saveVar;
return Return;
}
private HashMap<String,Object> getDataContaining (String name) {
if (variables != null && variables.containsKey (name))
return variables;
else if (!execAsJoe && parent != null)
return parent.getDataContaining (name);
else
return null;
}
public Object setConstant (String name, Object val) {
if (val == null)
val = WNull.value;
if (constants.get (name) == null) {
constants.put (name, val);
return val;
} else
return null;
}
public Object setVariable (String name, Object val) {
if (val == null)
val = WNull.value;
if (constants.get (name) == null) {
HashMap<String,Object> var = getDataContaining (name);
if (var != null)
var.put (name, val);
else
variables.put (name, val);
return val;
} else
return null;
}
public Object getVariable (WString name) throws JOEException {
return getVariable (name.value);
}
public Object getVariable (String name) throws JOEException {
Object Return = (variables == null) ? null : variables.get(name);
if (Return == null) {
Return = constants.get (name);
if (Return == null) {
if (parent != null)
Return = parent.getVariable (name);
else
throw new JOEException ("Variable not found: `" + name + "`");
}
}
return Return;
}
private void getVariablesNames(ArrayList<String> list) {
for (String entry : variables.keySet())
list.add (entry);
if (parent != null)
parent. getVariablesNames(list);
}
public String[] getVariablesNames() {
ArrayList<String> list = new ArrayList<String>();
getVariablesNames(list);
String Return[] = new String[list.size()];
return list.toArray (Return);
}
public HashMap<String,Object> getVariables () {
return variables;
}
public Object[] getArgv() {
return argArray;
}
protected boolean isExecAsJoe () {
return execAsJoe;
}
Block getMethod (String name) throws JOEException {
Object Return = getVariable (name);
if (Return instanceof Block) {
return (Block) Return;
} else {
return null;
}
}
public Object execBlock (String name, Object...argv) throws JOEException {
Object block = getVariable(name);
if (block instanceof Block)
return ((Block) block).exec (argv);
else
return null;
}
public Object clone() {
Block Return = (Block) super.clone();
Return.variables = null;
Return.children = new ArrayList<Block>();
final int size = children.size();
for (int i = 0; i < size; i++)
Return.children.add(((Block)children.get(i).clone()).$extends(Return));
return Return;
}
public Block $new() throws JOEException {
return $new (voidArgs);
}
public Block $new(Object...args) throws JOEException {
Block Return = (Block) clone();
Return.execAsJoe = true;
Return.init (args);
return Return;
}
int getLastChild() {
return children.size() - 1;
}
Block getChild(int n) {
final Block Return = children.get(n);
return Return;
}
public String name() {
if (blockName == null)
return "block-" + hashCode();
else
return blockName;
}
void setName(String n) {
blockName = n;
}
public int getRow() {
return -1;
}
public int getCol() {
return -1;
}
public Block $extends(Block b) {
parent = b;
return this;
}
public String toString() {
String Return;
if (isExecAsJoe()) {
Block joeToString;
try {
joeToString = getMethod("toString");
if (joeToString != null)
try {
Return = joeToString.exec().toString();
} catch (JOEException _ex) {
Return = _ex.toString();
}
else
Return = "{" + super.toString() + "}";
} catch (JOEException _ex) {
Return = "{" + super.toString() + "}";
}
} else {
if ((Return = blockName) == null)
Return = "{" + name() + "}";
}
return Return;
}
}
| |
/** @author rbk
* Ver 1.0: 2017/09/29
* Example to extend Graph/Vertex/Edge classes to implement algorithms in which nodes and edges
* need to be disabled during execution. Design goal: be able to call other graph algorithms
* without changing their codes to account for disabled elements.
*
* Ver 1.1: 2017/10/09
* Updated iterator with boolean field ready. Previously, if hasNext() is called multiple
* times, then cursor keeps moving forward, even though the elements were not accessed
* by next(). Also, if program calls next() multiple times, without calling hasNext()
* in between, same element is returned. Added UnsupportedOperationException to remove.
**/
package cs6301.g00;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
public class XGraph extends Graph {
public static class XVertex extends Vertex {
boolean disabled;
List<XEdge> xadj;
XVertex(Vertex u) {
super(u);
disabled = false;
xadj = new LinkedList<>();
}
boolean isDisabled() { return disabled; }
void disable() { disabled = true; }
@Override
public Iterator<Edge> iterator() { return new XVertexIterator(this); }
class XVertexIterator implements Iterator<Edge> {
XEdge cur;
Iterator<XEdge> it;
boolean ready;
XVertexIterator(XVertex u) {
this.it = u.xadj.iterator();
ready = false;
}
public boolean hasNext() {
if(ready) { return true; }
if(!it.hasNext()) { return false; }
cur = it.next();
while(cur.isDisabled() && it.hasNext()) {
cur = it.next();
}
ready = true;
return !cur.isDisabled();
}
public Edge next() {
if(!ready) {
if(!hasNext()) {
throw new java.util.NoSuchElementException();
}
}
ready = false;
return cur;
}
public void remove() {
throw new java.lang.UnsupportedOperationException();
}
}
}
static class XEdge extends Edge {
boolean disabled;
XEdge(XVertex from, XVertex to, int weight) {
super(from, to, weight);
disabled = false;
}
boolean isDisabled() {
XVertex xfrom = (XVertex) from;
XVertex xto = (XVertex) to;
return disabled || xfrom.isDisabled() || xto.isDisabled();
}
}
XVertex[] xv; // vertices of graph
public XGraph(Graph g) {
super(g);
xv = new XVertex[2*g.size()]; // Extra space is allocated in array for nodes to be added later
for(Vertex u: g) {
xv[u.getName()] = new XVertex(u);
}
// Make copy of edges
for(Vertex u: g) {
for(Edge e: u) {
Vertex v = e.otherEnd(u);
XVertex x1 = getVertex(u);
XVertex x2 = getVertex(v);
x1.xadj.add(new XEdge(x1, x2, e.weight));
}
}
}
@Override
public Iterator<Vertex> iterator() { return new XGraphIterator(this); }
class XGraphIterator implements Iterator<Vertex> {
Iterator<XVertex> it;
XVertex xcur;
XGraphIterator(XGraph xg) {
this.it = new ArrayIterator<XVertex>(xg.xv, 0, xg.size()-1); // Iterate over existing elements only
}
public boolean hasNext() {
if(!it.hasNext()) { return false; }
xcur = it.next();
while(xcur.isDisabled() && it.hasNext()) {
xcur = it.next();
}
return !xcur.isDisabled();
}
public Vertex next() {
return xcur;
}
public void remove() {
}
}
@Override
public Vertex getVertex(int n) {
return xv[n-1];
}
XVertex getVertex(Vertex u) {
return Vertex.getVertex(xv, u);
}
void disable(int i) {
XVertex u = (XVertex) getVertex(i);
u.disable();
}
public static void main(String[] args) {
Graph g = Graph.readGraph(new Scanner(System.in));
XGraph xg = new XGraph(g);
Vertex src = xg.getVertex(1);
System.out.println("Node : Dist : Edges");
BFS b = new BFS(xg, src);
b.bfs();
Vertex farthest = DiameterTree.findFarthest(b);
xg.printGraph(b);
System.out.println("Source: " + src + " Farthest: " + farthest + " Distance: " + b.distance(farthest));
System.out.println("\nDisabling vertices 8 and 9");
xg.disable(8);
xg.disable(9);
b.reinitialize(src);
b.bfs();
farthest = DiameterTree.findFarthest(b);
xg.printGraph(b);
System.out.println("Source: " + src + " Farthest: " + farthest + " Distance: " + b.distance(farthest));
}
void printGraph(BFS b) {
for(Vertex u: this) {
System.out.print(" " + u + " : " + b.distance(u) + " : ");
for(Edge e: u) {
System.out.print(e);
}
System.out.println();
}
}
}
/*
Sample output:
Node : Dist : Edges
1 : 0 : (1,2)(1,3)
2 : 1 : (2,1)(2,4)(2,5)
3 : 1 : (3,1)(3,6)(3,7)
4 : 2 : (4,2)(4,8)
5 : 2 : (5,2)
6 : 2 : (6,3)
7 : 2 : (7,3)(7,9)
8 : 3 : (8,4)
9 : 3 : (9,7)
Source: 1 Farthest: 8 Distance: 3
Disabling vertices 8 and 9
1 : 0 : (1,2)(1,3)
2 : 1 : (2,1)(2,4)(2,5)
3 : 1 : (3,1)(3,6)(3,7)
4 : 2 : (4,2)
5 : 2 : (5,2)
6 : 2 : (6,3)
7 : 2 : (7,3)
Source: 1 Farthest: 4 Distance: 2
*/
| |
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.account;
import com.google.gerrit.client.ErrorDialog;
import com.google.gerrit.client.Gerrit;
import com.google.gerrit.client.VoidResult;
import com.google.gerrit.client.rpc.GerritCallback;
import com.google.gerrit.client.rpc.Natives;
import com.google.gerrit.client.ui.ComplexDisclosurePanel;
import com.google.gerrit.client.ui.FancyFlexTable;
import com.google.gerrit.client.ui.SmallHeading;
import com.google.gerrit.common.data.SshHostKey;
import com.google.gerrit.common.errors.InvalidSshKeyException;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.CheckBox;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.FlexTable.FlexCellFormatter;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.HasHorizontalAlignment;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwtexpui.clippy.client.CopyableLabel;
import com.google.gwtexpui.globalkey.client.NpTextArea;
import com.google.gwtjsonrpc.client.RemoteJsonException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
class SshPanel extends Composite {
private SshKeyTable keys;
private Button showAddKeyBlock;
private Panel addKeyBlock;
private Button closeAddKeyBlock;
private Button clearNew;
private Button addNew;
private NpTextArea addTxt;
private Button deleteKey;
private Panel serverKeys;
private int loadCount;
SshPanel() {
final FlowPanel body = new FlowPanel();
showAddKeyBlock = new Button(Util.C.buttonShowAddSshKey());
showAddKeyBlock.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
showAddKeyBlock(true);
}
});
keys = new SshKeyTable();
body.add(keys);
{
final FlowPanel fp = new FlowPanel();
deleteKey = new Button(Util.C.buttonDeleteSshKey());
deleteKey.setEnabled(false);
deleteKey.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
keys.deleteChecked();
}
});
fp.add(deleteKey);
fp.add(showAddKeyBlock);
body.add(fp);
}
addKeyBlock = new VerticalPanel();
addKeyBlock.setVisible(false);
addKeyBlock.setStyleName(Gerrit.RESOURCES.css().addSshKeyPanel());
addKeyBlock.add(new SmallHeading(Util.C.addSshKeyPanelHeader()));
final ComplexDisclosurePanel addSshKeyHelp =
new ComplexDisclosurePanel(Util.C.addSshKeyHelpTitle(), false);
addSshKeyHelp.setContent(new HTML(Util.C.addSshKeyHelp()));
addKeyBlock.add(addSshKeyHelp);
addTxt = new NpTextArea();
addTxt.setVisibleLines(12);
addTxt.setCharacterWidth(80);
addTxt.setSpellCheck(false);
addKeyBlock.add(addTxt);
final HorizontalPanel buttons = new HorizontalPanel();
addKeyBlock.add(buttons);
clearNew = new Button(Util.C.buttonClearSshKeyInput());
clearNew.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
addTxt.setText("");
addTxt.setFocus(true);
}
});
buttons.add(clearNew);
addNew = new Button(Util.C.buttonAddSshKey());
addNew.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
doAddNew();
}
});
buttons.add(addNew);
closeAddKeyBlock = new Button(Util.C.buttonCloseAddSshKey());
closeAddKeyBlock.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
showAddKeyBlock(false);
}
});
buttons.add(closeAddKeyBlock);
buttons.setCellWidth(closeAddKeyBlock, "100%");
buttons.setCellHorizontalAlignment(closeAddKeyBlock, HasHorizontalAlignment.ALIGN_RIGHT);
body.add(addKeyBlock);
serverKeys = new FlowPanel();
body.add(serverKeys);
initWidget(body);
}
void setKeyTableVisible(boolean on) {
keys.setVisible(on);
deleteKey.setVisible(on);
closeAddKeyBlock.setVisible(on);
}
void doAddNew() {
final String txt = addTxt.getText();
if (txt != null && txt.length() > 0) {
addNew.setEnabled(false);
AccountApi.addSshKey(
"self",
txt,
new GerritCallback<SshKeyInfo>() {
@Override
public void onSuccess(SshKeyInfo k) {
addNew.setEnabled(true);
addTxt.setText("");
keys.addOneKey(k);
if (!keys.isVisible()) {
showAddKeyBlock(false);
setKeyTableVisible(true);
keys.updateDeleteButton();
}
}
@Override
public void onFailure(Throwable caught) {
addNew.setEnabled(true);
if (isInvalidSshKey(caught)) {
new ErrorDialog(Util.C.invalidSshKeyError()).center();
} else {
super.onFailure(caught);
}
}
private boolean isInvalidSshKey(Throwable caught) {
if (caught instanceof InvalidSshKeyException) {
return true;
}
return caught instanceof RemoteJsonException
&& InvalidSshKeyException.MESSAGE.equals(caught.getMessage());
}
});
}
}
@Override
protected void onLoad() {
super.onLoad();
refreshSshKeys();
Gerrit.SYSTEM_SVC.daemonHostKeys(
new GerritCallback<List<SshHostKey>>() {
@Override
public void onSuccess(List<SshHostKey> result) {
serverKeys.clear();
for (SshHostKey keyInfo : result) {
serverKeys.add(new SshHostKeyPanel(keyInfo));
}
if (++loadCount == 2) {
display();
}
}
});
}
private void refreshSshKeys() {
AccountApi.getSshKeys(
"self",
new GerritCallback<JsArray<SshKeyInfo>>() {
@Override
public void onSuccess(JsArray<SshKeyInfo> result) {
keys.display(Natives.asList(result));
if (result.length() == 0 && keys.isVisible()) {
showAddKeyBlock(true);
}
if (++loadCount == 2) {
display();
}
}
});
}
void display() {}
private void showAddKeyBlock(boolean show) {
showAddKeyBlock.setVisible(!show);
addKeyBlock.setVisible(show);
}
private class SshKeyTable extends FancyFlexTable<SshKeyInfo> {
private ValueChangeHandler<Boolean> updateDeleteHandler;
SshKeyTable() {
table.setWidth("");
table.setText(0, 2, Util.C.sshKeyStatus());
table.setText(0, 3, Util.C.sshKeyAlgorithm());
table.setText(0, 4, Util.C.sshKeyKey());
table.setText(0, 5, Util.C.sshKeyComment());
final FlexCellFormatter fmt = table.getFlexCellFormatter();
fmt.addStyleName(0, 1, Gerrit.RESOURCES.css().iconHeader());
fmt.addStyleName(0, 2, Gerrit.RESOURCES.css().dataHeader());
fmt.addStyleName(0, 3, Gerrit.RESOURCES.css().dataHeader());
fmt.addStyleName(0, 4, Gerrit.RESOURCES.css().dataHeader());
fmt.addStyleName(0, 5, Gerrit.RESOURCES.css().dataHeader());
updateDeleteHandler =
new ValueChangeHandler<Boolean>() {
@Override
public void onValueChange(ValueChangeEvent<Boolean> event) {
updateDeleteButton();
}
};
}
void deleteChecked() {
final HashSet<Integer> sequenceNumbers = new HashSet<>();
for (int row = 1; row < table.getRowCount(); row++) {
final SshKeyInfo k = getRowItem(row);
if (k != null && ((CheckBox) table.getWidget(row, 1)).getValue()) {
sequenceNumbers.add(k.seq());
}
}
if (sequenceNumbers.isEmpty()) {
updateDeleteButton();
} else {
deleteKey.setEnabled(false);
AccountApi.deleteSshKeys(
"self",
sequenceNumbers,
new GerritCallback<VoidResult>() {
@Override
public void onSuccess(VoidResult result) {
for (int row = 1; row < table.getRowCount(); ) {
final SshKeyInfo k = getRowItem(row);
if (k != null && sequenceNumbers.contains(k.seq())) {
table.removeRow(row);
} else {
row++;
}
}
if (table.getRowCount() == 1) {
display(Collections.<SshKeyInfo>emptyList());
} else {
updateDeleteButton();
}
}
@Override
public void onFailure(Throwable caught) {
refreshSshKeys();
updateDeleteButton();
super.onFailure(caught);
}
});
}
}
void display(List<SshKeyInfo> result) {
if (result.isEmpty()) {
setKeyTableVisible(false);
showAddKeyBlock(true);
} else {
while (1 < table.getRowCount()) {
table.removeRow(table.getRowCount() - 1);
}
for (SshKeyInfo k : result) {
addOneKey(k);
}
setKeyTableVisible(true);
deleteKey.setEnabled(false);
}
}
void addOneKey(SshKeyInfo k) {
final FlexCellFormatter fmt = table.getFlexCellFormatter();
final int row = table.getRowCount();
table.insertRow(row);
applyDataRowStyle(row);
final CheckBox sel = new CheckBox();
sel.addValueChangeHandler(updateDeleteHandler);
table.setWidget(row, 1, sel);
if (k.isValid()) {
table.setText(row, 2, "");
fmt.removeStyleName(
row,
2, //
Gerrit.RESOURCES.css().sshKeyPanelInvalid());
} else {
table.setText(row, 2, Util.C.sshKeyInvalid());
fmt.addStyleName(row, 2, Gerrit.RESOURCES.css().sshKeyPanelInvalid());
}
table.setText(row, 3, k.algorithm());
CopyableLabel keyLabel = new CopyableLabel(k.sshPublicKey());
keyLabel.setPreviewText(elide(k.encodedKey(), 40));
table.setWidget(row, 4, keyLabel);
table.setText(row, 5, k.comment());
fmt.addStyleName(row, 1, Gerrit.RESOURCES.css().iconCell());
fmt.addStyleName(row, 4, Gerrit.RESOURCES.css().sshKeyPanelEncodedKey());
for (int c = 2; c <= 5; c++) {
fmt.addStyleName(row, c, Gerrit.RESOURCES.css().dataCell());
}
setRowItem(row, k);
}
void updateDeleteButton() {
boolean on = false;
for (int row = 1; row < table.getRowCount(); row++) {
CheckBox sel = (CheckBox) table.getWidget(row, 1);
if (sel.getValue()) {
on = true;
break;
}
}
deleteKey.setEnabled(on);
}
}
static String elide(String s, int len) {
if (s == null || s.length() < len || len <= 10) {
return s;
}
return s.substring(0, len - 10) + "..." + s.substring(s.length() - 10);
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataproc/v1/jobs.proto
package com.google.cloud.dataproc.v1;
/**
*
*
* <pre>
* Encapsulates the full scoping used to reference a job.
* </pre>
*
* Protobuf type {@code google.cloud.dataproc.v1.JobReference}
*/
public final class JobReference extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.JobReference)
JobReferenceOrBuilder {
private static final long serialVersionUID = 0L;
// Use JobReference.newBuilder() to construct.
private JobReference(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private JobReference() {
projectId_ = "";
jobId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new JobReference();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private JobReference(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
projectId_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
jobId_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1.JobsProto
.internal_static_google_cloud_dataproc_v1_JobReference_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1.JobsProto
.internal_static_google_cloud_dataproc_v1_JobReference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1.JobReference.class,
com.google.cloud.dataproc.v1.JobReference.Builder.class);
}
public static final int PROJECT_ID_FIELD_NUMBER = 1;
private volatile java.lang.Object projectId_;
/**
*
*
* <pre>
* Optional. The ID of the Google Cloud Platform project that the job belongs to. If
* specified, must match the request project ID.
* </pre>
*
* <code>string project_id = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The projectId.
*/
@java.lang.Override
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The ID of the Google Cloud Platform project that the job belongs to. If
* specified, must match the request project ID.
* </pre>
*
* <code>string project_id = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for projectId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int JOB_ID_FIELD_NUMBER = 2;
private volatile java.lang.Object jobId_;
/**
*
*
* <pre>
* Optional. The job ID, which must be unique within the project.
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), or hyphens (-). The maximum length is 100 characters.
* If not specified by the caller, the job ID will be provided by the server.
* </pre>
*
* <code>string job_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The jobId.
*/
@java.lang.Override
public java.lang.String getJobId() {
java.lang.Object ref = jobId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
jobId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The job ID, which must be unique within the project.
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), or hyphens (-). The maximum length is 100 characters.
* If not specified by the caller, the job ID will be provided by the server.
* </pre>
*
* <code>string job_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for jobId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getJobIdBytes() {
java.lang.Object ref = jobId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
jobId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(jobId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, jobId_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(jobId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, jobId_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataproc.v1.JobReference)) {
return super.equals(obj);
}
com.google.cloud.dataproc.v1.JobReference other =
(com.google.cloud.dataproc.v1.JobReference) obj;
if (!getProjectId().equals(other.getProjectId())) return false;
if (!getJobId().equals(other.getJobId())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER;
hash = (53 * hash) + getProjectId().hashCode();
hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
hash = (53 * hash) + getJobId().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataproc.v1.JobReference parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.JobReference parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.JobReference parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.JobReference parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.JobReference parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.JobReference parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.JobReference parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.JobReference parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.JobReference parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.JobReference parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.JobReference parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.JobReference parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dataproc.v1.JobReference prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Encapsulates the full scoping used to reference a job.
* </pre>
*
* Protobuf type {@code google.cloud.dataproc.v1.JobReference}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.JobReference)
com.google.cloud.dataproc.v1.JobReferenceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1.JobsProto
.internal_static_google_cloud_dataproc_v1_JobReference_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1.JobsProto
.internal_static_google_cloud_dataproc_v1_JobReference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1.JobReference.class,
com.google.cloud.dataproc.v1.JobReference.Builder.class);
}
// Construct using com.google.cloud.dataproc.v1.JobReference.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
projectId_ = "";
jobId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataproc.v1.JobsProto
.internal_static_google_cloud_dataproc_v1_JobReference_descriptor;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.JobReference getDefaultInstanceForType() {
return com.google.cloud.dataproc.v1.JobReference.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataproc.v1.JobReference build() {
com.google.cloud.dataproc.v1.JobReference result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.JobReference buildPartial() {
com.google.cloud.dataproc.v1.JobReference result =
new com.google.cloud.dataproc.v1.JobReference(this);
result.projectId_ = projectId_;
result.jobId_ = jobId_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataproc.v1.JobReference) {
return mergeFrom((com.google.cloud.dataproc.v1.JobReference) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataproc.v1.JobReference other) {
if (other == com.google.cloud.dataproc.v1.JobReference.getDefaultInstance()) return this;
if (!other.getProjectId().isEmpty()) {
projectId_ = other.projectId_;
onChanged();
}
if (!other.getJobId().isEmpty()) {
jobId_ = other.jobId_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.dataproc.v1.JobReference parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.dataproc.v1.JobReference) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Optional. The ID of the Google Cloud Platform project that the job belongs to. If
* specified, must match the request project ID.
* </pre>
*
* <code>string project_id = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The projectId.
*/
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The ID of the Google Cloud Platform project that the job belongs to. If
* specified, must match the request project ID.
* </pre>
*
* <code>string project_id = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for projectId.
*/
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The ID of the Google Cloud Platform project that the job belongs to. If
* specified, must match the request project ID.
* </pre>
*
* <code>string project_id = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The projectId to set.
* @return This builder for chaining.
*/
public Builder setProjectId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
projectId_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The ID of the Google Cloud Platform project that the job belongs to. If
* specified, must match the request project ID.
* </pre>
*
* <code>string project_id = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearProjectId() {
projectId_ = getDefaultInstance().getProjectId();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The ID of the Google Cloud Platform project that the job belongs to. If
* specified, must match the request project ID.
* </pre>
*
* <code>string project_id = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for projectId to set.
* @return This builder for chaining.
*/
public Builder setProjectIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
projectId_ = value;
onChanged();
return this;
}
private java.lang.Object jobId_ = "";
/**
*
*
* <pre>
* Optional. The job ID, which must be unique within the project.
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), or hyphens (-). The maximum length is 100 characters.
* If not specified by the caller, the job ID will be provided by the server.
* </pre>
*
* <code>string job_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The jobId.
*/
public java.lang.String getJobId() {
java.lang.Object ref = jobId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
jobId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The job ID, which must be unique within the project.
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), or hyphens (-). The maximum length is 100 characters.
* If not specified by the caller, the job ID will be provided by the server.
* </pre>
*
* <code>string job_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for jobId.
*/
public com.google.protobuf.ByteString getJobIdBytes() {
java.lang.Object ref = jobId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
jobId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The job ID, which must be unique within the project.
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), or hyphens (-). The maximum length is 100 characters.
* If not specified by the caller, the job ID will be provided by the server.
* </pre>
*
* <code>string job_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The jobId to set.
* @return This builder for chaining.
*/
public Builder setJobId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
jobId_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The job ID, which must be unique within the project.
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), or hyphens (-). The maximum length is 100 characters.
* If not specified by the caller, the job ID will be provided by the server.
* </pre>
*
* <code>string job_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearJobId() {
jobId_ = getDefaultInstance().getJobId();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The job ID, which must be unique within the project.
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), or hyphens (-). The maximum length is 100 characters.
* If not specified by the caller, the job ID will be provided by the server.
* </pre>
*
* <code>string job_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for jobId to set.
* @return This builder for chaining.
*/
public Builder setJobIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
jobId_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.JobReference)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.JobReference)
private static final com.google.cloud.dataproc.v1.JobReference DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.JobReference();
}
public static com.google.cloud.dataproc.v1.JobReference getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<JobReference> PARSER =
new com.google.protobuf.AbstractParser<JobReference>() {
@java.lang.Override
public JobReference parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new JobReference(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<JobReference> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<JobReference> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.JobReference getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xbib.elasticsearch.common.termlist.math;
import java.math.BigDecimal;
/**
* Utilities for comparing numbers.
*
*/
public class Precision {
/**
* Smallest positive number such that {@code 1 - EPSILON} is not
* numerically equal to 1: {@value}.
*/
public static final double EPSILON = 0x1.0p-53;
/**
* Safe minimum, such that {@code 1 / SAFE_MIN} does not overflow.
* In IEEE 754 arithmetic, this is also the smallest normalized
* number 2<sup>-1022</sup>: {@value}.
*/
public static final double SAFE_MIN = 0x1.0p-1022;
/** Offset to order signed double numbers lexicographically. */
private static final long SGN_MASK = 0x8000000000000000L;
/** Offset to order signed double numbers lexicographically. */
private static final int SGN_MASK_FLOAT = 0x80000000;
/**
* Private constructor.
*/
private Precision() {}
/**
* Compares two numbers given some amount of allowed error.
*
* @param x the first number
* @param y the second number
* @param eps the amount of error to allow when checking for equality
* @return <ul>
* <li>0 if {@link #equals(double, double, double) equals(x, y, eps)}</li>
* <li>< 0 if !{@link #equals(double, double, double) equals(x, y, eps)} && x < y</li>
* <li>> 0 if !{@link #equals(double, double, double) equals(x, y, eps)} && x > y</li>
* </ul>
*/
public static int compareTo(double x, double y, double eps) {
if (equals(x, y, eps)) {
return 0;
} else if (x < y) {
return -1;
}
return 1;
}
/**
* Compares two numbers given some amount of allowed error.
* Two float numbers are considered equal if there are {@code (maxUlps - 1)}
* (or fewer) floating point numbers between them, i.e. two adjacent floating
* point numbers are considered equal.
* Adapted from <a
* href="http://www.cygnus-software.com/papers/comparingfloats/comparingfloats.htm">
* Bruce Dawson</a>
*
* @param x first value
* @param y second value
* @param maxUlps {@code (maxUlps - 1)} is the number of floating point
* values between {@code x} and {@code y}.
* @return <ul>
* <li>0 if {@link #equals(double, double, int) equals(x, y, maxUlps)}</li>
* <li>< 0 if !{@link #equals(double, double, int) equals(x, y, maxUlps)} && x < y</li>
* <li>> 0 if !{@link #equals(double, double, int) equals(x, y, maxUlps)} && x > y</li>
* </ul>
*/
public static int compareTo(final double x, final double y, final int maxUlps) {
if (equals(x, y, maxUlps)) {
return 0;
} else if (x < y) {
return -1;
}
return 1;
}
/**
* Returns true iff they are equal as defined by
* {@link #equals(float,float,int) equals(x, y, 1)}.
*
* @param x first value
* @param y second value
* @return {@code true} if the values are equal.
*/
public static boolean equals(float x, float y) {
return equals(x, y, 1);
}
/**
* Returns true if both arguments are NaN or neither is NaN and they are
* equal as defined by {@link #equals(float,float) equals(x, y, 1)}.
*
* @param x first value
* @param y second value
* @return {@code true} if the values are equal or both are NaN.
*/
public static boolean equalsIncludingNaN(float x, float y) {
return (Float.isNaN(x) && Float.isNaN(y)) || equals(x, y, 1);
}
/**
* Returns true if both arguments are equal or within the range of allowed
* error (inclusive).
*
* @param x first value
* @param y second value
* @param eps the amount of absolute error to allow.
* @return {@code true} if the values are equal or within range of each other.
*/
public static boolean equals(float x, float y, float eps) {
return equals(x, y, 1) || FastMath.abs(y - x) <= eps;
}
/**
* Returns true if both arguments are NaN or are equal or within the range
* of allowed error (inclusive).
*
* @param x first value
* @param y second value
* @param eps the amount of absolute error to allow.
* @return {@code true} if the values are equal or within range of each other,
* or both are NaN.
*/
public static boolean equalsIncludingNaN(float x, float y, float eps) {
return equalsIncludingNaN(x, y) || (FastMath.abs(y - x) <= eps);
}
/**
* Returns true if both arguments are equal or within the range of allowed
* error (inclusive).
* Two float numbers are considered equal if there are {@code (maxUlps - 1)}
* (or fewer) floating point numbers between them, i.e. two adjacent floating
* point numbers are considered equal.
* Adapted from <a
* href="http://www.cygnus-software.com/papers/comparingfloats/comparingfloats.htm">
* Bruce Dawson</a>
*
* @param x first value
* @param y second value
* @param maxUlps {@code (maxUlps - 1)} is the number of floating point
* values between {@code x} and {@code y}.
* @return {@code true} if there are fewer than {@code maxUlps} floating
* point values between {@code x} and {@code y}.
*/
public static boolean equals(float x, float y, int maxUlps) {
int xInt = Float.floatToIntBits(x);
int yInt = Float.floatToIntBits(y);
// Make lexicographically ordered as a two's-complement integer.
if (xInt < 0) {
xInt = SGN_MASK_FLOAT - xInt;
}
if (yInt < 0) {
yInt = SGN_MASK_FLOAT - yInt;
}
final boolean isEqual = FastMath.abs(xInt - yInt) <= maxUlps;
return isEqual && !Float.isNaN(x) && !Float.isNaN(y);
}
/**
* Returns true if both arguments are NaN or if they are equal as defined
* by {@link #equals(float,float,int) equals(x, y, maxUlps)}.
*
* @param x first value
* @param y second value
* @param maxUlps {@code (maxUlps - 1)} is the number of floating point
* values between {@code x} and {@code y}.
* @return {@code true} if both arguments are NaN or if there are less than
* {@code maxUlps} floating point values between {@code x} and {@code y}.
*/
public static boolean equalsIncludingNaN(float x, float y, int maxUlps) {
return (Float.isNaN(x) && Float.isNaN(y)) || equals(x, y, maxUlps);
}
/**
* Returns true iff they are equal as defined by
* {@link #equals(double,double,int) equals(x, y, 1)}.
*
* @param x first value
* @param y second value
* @return {@code true} if the values are equal.
*/
public static boolean equals(double x, double y) {
return equals(x, y, 1);
}
/**
* Returns true if both arguments are NaN or neither is NaN and they are
* equal as defined by {@link #equals(double,double) equals(x, y, 1)}.
*
* @param x first value
* @param y second value
* @return {@code true} if the values are equal or both are NaN.
*/
public static boolean equalsIncludingNaN(double x, double y) {
return (Double.isNaN(x) && Double.isNaN(y)) || equals(x, y, 1);
}
/**
* Returns {@code true} if there is no double value strictly between the
* arguments or the difference between them is within the range of allowed
* error (inclusive).
*
* @param x First value.
* @param y Second value.
* @param eps Amount of allowed absolute error.
* @return {@code true} if the values are two adjacent floating point
* numbers or they are within range of each other.
*/
public static boolean equals(double x, double y, double eps) {
return equals(x, y, 1) || FastMath.abs(y - x) <= eps;
}
/**
* Returns true if both arguments are NaN or are equal or within the range
* of allowed error (inclusive).
*
* @param x first value
* @param y second value
* @param eps the amount of absolute error to allow.
* @return {@code true} if the values are equal or within range of each other,
* or both are NaN.
*/
public static boolean equalsIncludingNaN(double x, double y, double eps) {
return equalsIncludingNaN(x, y) || (FastMath.abs(y - x) <= eps);
}
/**
* Returns true if both arguments are equal or within the range of allowed
* error (inclusive).
* Two float numbers are considered equal if there are {@code (maxUlps - 1)}
* (or fewer) floating point numbers between them, i.e. two adjacent floating
* point numbers are considered equal.
* Adapted from <a
* href="http://www.cygnus-software.com/papers/comparingfloats/comparingfloats.htm">
* Bruce Dawson</a>
*
* @param x first value
* @param y second value
* @param maxUlps {@code (maxUlps - 1)} is the number of floating point
* values between {@code x} and {@code y}.
* @return {@code true} if there are fewer than {@code maxUlps} floating
* point values between {@code x} and {@code y}.
*/
public static boolean equals(double x, double y, int maxUlps) {
long xInt = Double.doubleToLongBits(x);
long yInt = Double.doubleToLongBits(y);
// Make lexicographically ordered as a two's-complement integer.
if (xInt < 0) {
xInt = SGN_MASK - xInt;
}
if (yInt < 0) {
yInt = SGN_MASK - yInt;
}
final boolean isEqual = FastMath.abs(xInt - yInt) <= maxUlps;
return isEqual && !Double.isNaN(x) && !Double.isNaN(y);
}
/**
* Returns true if both arguments are NaN or if they are equal as defined
* by {@link #equals(double,double,int) equals(x, y, maxUlps)}.
*
* @param x first value
* @param y second value
* @param maxUlps {@code (maxUlps - 1)} is the number of floating point
* values between {@code x} and {@code y}.
* @return {@code true} if both arguments are NaN or if there are less than
* {@code maxUlps} floating point values between {@code x} and {@code y}.
*/
public static boolean equalsIncludingNaN(double x, double y, int maxUlps) {
return (Double.isNaN(x) && Double.isNaN(y)) || equals(x, y, maxUlps);
}
/**
* Rounds the given value to the specified number of decimal places.
* The value is rounded using the {@link BigDecimal#ROUND_HALF_UP} method.
*
* @param x Value to round.
* @param scale Number of digits to the right of the decimal point.
* @return the rounded value.
*/
public static double round(double x, int scale) {
return round(x, scale, BigDecimal.ROUND_HALF_UP);
}
/**
* Rounds the given value to the specified number of decimal places.
* The value is rounded using the given method which is any method defined
* in {@link BigDecimal}.
* If {@code x} is infinite or {@code NaN}, then the value of {@code x} is
* returned unchanged, regardless of the other parameters.
*
* @param x Value to round.
* @param scale Number of digits to the right of the decimal point.
* @param roundingMethod Rounding method as defined in {@link BigDecimal}.
* @return the rounded value.
* @throws ArithmeticException if {@code roundingMethod == ROUND_UNNECESSARY}
* and the specified scaling operation would require rounding.
* @throws IllegalArgumentException if {@code roundingMethod} does not
* represent a valid rounding mode.
*/
public static double round(double x, int scale, int roundingMethod) {
try {
return (new BigDecimal
(Double.toString(x))
.setScale(scale, roundingMethod))
.doubleValue();
} catch (NumberFormatException ex) {
if (Double.isInfinite(x)) {
return x;
} else {
return Double.NaN;
}
}
}
/**
* Rounds the given value to the specified number of decimal places.
* The value is rounded using the {@link BigDecimal#ROUND_HALF_UP} method.
*
* @param x Value to round.
* @param scale Number of digits to the right of the decimal point.
* @return the rounded value.
*/
public static float round(float x, int scale) {
return round(x, scale, BigDecimal.ROUND_HALF_UP);
}
/**
* Rounds the given value to the specified number of decimal places.
* The value is rounded using the given method which is any method defined
* in {@link BigDecimal}.
*
* @param x Value to round.
* @param scale Number of digits to the right of the decimal point.
* @param roundingMethod Rounding method as defined in {@link BigDecimal}.
* @return the rounded value.
*/
public static float round(float x, int scale, int roundingMethod) {
final float sign = FastMath.copySign(1f, x);
final float factor = (float) FastMath.pow(10.0f, scale) * sign;
return (float) roundUnscaled(x * factor, sign, roundingMethod) / factor;
}
/**
* Rounds the given non-negative value to the "nearest" integer. Nearest is
* determined by the rounding method specified. Rounding methods are defined
* in {@link BigDecimal}.
*
* @param unscaled Value to round.
* @param sign Sign of the original, scaled value.
* @param roundingMethod Rounding method, as defined in {@link BigDecimal}.
* @return the rounded value.
*/
private static double roundUnscaled(double unscaled,
double sign,
int roundingMethod) {
switch (roundingMethod) {
case BigDecimal.ROUND_CEILING :
if (sign == -1) {
unscaled = FastMath.floor(FastMath.nextAfter(unscaled, Double.NEGATIVE_INFINITY));
} else {
unscaled = FastMath.ceil(FastMath.nextAfter(unscaled, Double.POSITIVE_INFINITY));
}
break;
case BigDecimal.ROUND_DOWN :
unscaled = FastMath.floor(FastMath.nextAfter(unscaled, Double.NEGATIVE_INFINITY));
break;
case BigDecimal.ROUND_FLOOR :
if (sign == -1) {
unscaled = FastMath.ceil(FastMath.nextAfter(unscaled, Double.POSITIVE_INFINITY));
} else {
unscaled = FastMath.floor(FastMath.nextAfter(unscaled, Double.NEGATIVE_INFINITY));
}
break;
case BigDecimal.ROUND_HALF_DOWN : {
unscaled = FastMath.nextAfter(unscaled, Double.NEGATIVE_INFINITY);
double fraction = unscaled - FastMath.floor(unscaled);
if (fraction > 0.5) {
unscaled = FastMath.ceil(unscaled);
} else {
unscaled = FastMath.floor(unscaled);
}
break;
}
case BigDecimal.ROUND_HALF_EVEN : {
double fraction = unscaled - FastMath.floor(unscaled);
if (fraction > 0.5) {
unscaled = FastMath.ceil(unscaled);
} else if (fraction < 0.5) {
unscaled = FastMath.floor(unscaled);
} else {
// The following equality test is intentional and needed for rounding purposes
if (FastMath.floor(unscaled) / 2.0 == FastMath.floor(Math
.floor(unscaled) / 2.0)) { // even
unscaled = FastMath.floor(unscaled);
} else { // odd
unscaled = FastMath.ceil(unscaled);
}
}
break;
}
case BigDecimal.ROUND_HALF_UP : {
unscaled = FastMath.nextAfter(unscaled, Double.POSITIVE_INFINITY);
double fraction = unscaled - FastMath.floor(unscaled);
if (fraction >= 0.5) {
unscaled = FastMath.ceil(unscaled);
} else {
unscaled = FastMath.floor(unscaled);
}
break;
}
case BigDecimal.ROUND_UNNECESSARY :
if (unscaled != FastMath.floor(unscaled)) {
throw new IllegalArgumentException();
}
break;
case BigDecimal.ROUND_UP :
unscaled = FastMath.ceil(FastMath.nextAfter(unscaled, Double.POSITIVE_INFINITY));
break;
default :
throw new IllegalArgumentException("invalid rounding");
}
return unscaled;
}
/**
* Computes a number {@code delta} close to {@code originalDelta} with
* the property that <pre><code>
* x + delta - x
* </code></pre>
* is exactly machine-representable.
* This is useful when computing numerical derivatives, in order to reduce
* roundoff errors.
*
* @param x Value.
* @param originalDelta Offset value.
* @return a number {@code delta} so that {@code x + delta} and {@code x}
* differ by a representable floating number.
*/
public static double representableDelta(double x,
double originalDelta) {
return x + originalDelta - x;
}
}
| |
/**
* <copyright>
* </copyright>
*
*/
package cruise.umple.umple.impl;
import cruise.umple.umple.Multiplicity_;
import cruise.umple.umple.SymmetricReflexiveAssociation_;
import cruise.umple.umple.UmplePackage;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Symmetric Reflexive Association </b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link cruise.umple.umple.impl.SymmetricReflexiveAssociation_Impl#getMultiplicity_1 <em>Multiplicity 1</em>}</li>
* <li>{@link cruise.umple.umple.impl.SymmetricReflexiveAssociation_Impl#getRoleName_1 <em>Role Name 1</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class SymmetricReflexiveAssociation_Impl extends MinimalEObjectImpl.Container implements SymmetricReflexiveAssociation_
{
/**
* The cached value of the '{@link #getMultiplicity_1() <em>Multiplicity 1</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMultiplicity_1()
* @generated
* @ordered
*/
protected EList<Multiplicity_> multiplicity_1;
/**
* The default value of the '{@link #getRoleName_1() <em>Role Name 1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRoleName_1()
* @generated
* @ordered
*/
protected static final String ROLE_NAME_1_EDEFAULT = null;
/**
* The cached value of the '{@link #getRoleName_1() <em>Role Name 1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRoleName_1()
* @generated
* @ordered
*/
protected String roleName_1 = ROLE_NAME_1_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected SymmetricReflexiveAssociation_Impl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return UmplePackage.eINSTANCE.getSymmetricReflexiveAssociation_();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Multiplicity_> getMultiplicity_1()
{
if (multiplicity_1 == null)
{
multiplicity_1 = new EObjectContainmentEList<Multiplicity_>(Multiplicity_.class, this, UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___MULTIPLICITY_1);
}
return multiplicity_1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getRoleName_1()
{
return roleName_1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRoleName_1(String newRoleName_1)
{
String oldRoleName_1 = roleName_1;
roleName_1 = newRoleName_1;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___ROLE_NAME_1, oldRoleName_1, roleName_1));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___MULTIPLICITY_1:
return ((InternalEList<?>)getMultiplicity_1()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___MULTIPLICITY_1:
return getMultiplicity_1();
case UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___ROLE_NAME_1:
return getRoleName_1();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___MULTIPLICITY_1:
getMultiplicity_1().clear();
getMultiplicity_1().addAll((Collection<? extends Multiplicity_>)newValue);
return;
case UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___ROLE_NAME_1:
setRoleName_1((String)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___MULTIPLICITY_1:
getMultiplicity_1().clear();
return;
case UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___ROLE_NAME_1:
setRoleName_1(ROLE_NAME_1_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___MULTIPLICITY_1:
return multiplicity_1 != null && !multiplicity_1.isEmpty();
case UmplePackage.SYMMETRIC_REFLEXIVE_ASSOCIATION___ROLE_NAME_1:
return ROLE_NAME_1_EDEFAULT == null ? roleName_1 != null : !ROLE_NAME_1_EDEFAULT.equals(roleName_1);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString()
{
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (roleName_1: ");
result.append(roleName_1);
result.append(')');
return result.toString();
}
} //SymmetricReflexiveAssociation_Impl
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.beanutils;
import java.beans.PropertyDescriptor;
import java.lang.ref.Reference;
import java.lang.ref.SoftReference;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
/**
* <p>Implementation of <code>DynaClass</code> for DynaBeans that wrap
* standard JavaBean instances.</p>
*
* <p>
* It is suggested that this class should not usually need to be used directly
* to create new <code>WrapDynaBean</code> instances.
* It's usually better to call the <code>WrapDynaBean</code> constructor directly.
* For example:</p>
* <code><pre>
* Object javaBean = ...;
* DynaBean wrapper = new WrapDynaBean(javaBean);
* </pre></code>
* <p>
*
* @author Craig McClanahan
* @version $Revision: 690380 $ $Date: 2008-08-29 22:04:38 +0200 (Fri, 29 Aug 2008) $
*/
public class WrapDynaClass implements DynaClass {
// ----------------------------------------------------------- Constructors
/**
* Construct a new WrapDynaClass for the specified JavaBean class. This
* constructor is private; WrapDynaClass instances will be created as
* needed via calls to the <code>createDynaClass(Class)</code> method.
*
* @param beanClass JavaBean class to be introspected around
*/
private WrapDynaClass(Class beanClass) {
this.beanClassRef = new SoftReference(beanClass);
this.beanClassName = beanClass.getName();
introspect();
}
// ----------------------------------------------------- Instance Variables
/**
* Name of the JavaBean class represented by this WrapDynaClass.
*/
private String beanClassName = null;
/**
* Reference to the JavaBean class represented by this WrapDynaClass.
*/
private Reference beanClassRef = null;
/**
* The JavaBean <code>Class</code> which is represented by this
* <code>WrapDynaClass</code>.
*
* @deprecated No longer initialized, use getBeanClass() method instead
*/
protected Class beanClass = null;
/**
* The set of PropertyDescriptors for this bean class.
*/
protected PropertyDescriptor[] descriptors = null;
/**
* The set of PropertyDescriptors for this bean class, keyed by the
* property name. Individual descriptor instances will be the same
* instances as those in the <code>descriptors</code> list.
*/
protected HashMap descriptorsMap = new HashMap();
/**
* The set of dynamic properties that are part of this DynaClass.
*/
protected DynaProperty[] properties = null;
/**
* The set of dynamic properties that are part of this DynaClass,
* keyed by the property name. Individual descriptor instances will
* be the same instances as those in the <code>properties</code> list.
*/
protected HashMap propertiesMap = new HashMap();
// ------------------------------------------------------- Static Variables
private static final ContextClassLoaderLocal CLASSLOADER_CACHE =
new ContextClassLoaderLocal() {
protected Object initialValue() {
return new WeakHashMap();
}
};
/**
* Get the wrap dyna classes cache
*/
private static Map getDynaClassesMap() {
return (Map)CLASSLOADER_CACHE.get();
}
/**
* The set of <code>WrapDynaClass</code> instances that have ever been
* created, keyed by the underlying bean Class. The keys to this map
* are Class objects, and the values are corresponding WrapDynaClass
* objects.
* <p>
* This static variable is safe even when this code is deployed via a
* shared classloader because it is keyed via a Class object. The same
* class loaded via two different classloaders will result in different
* entries in this map.
* <p>
* Note, however, that this HashMap can result in a memory leak. When
* this class is in a shared classloader it will retain references to
* classes loaded via a webapp classloader even after the webapp has been
* undeployed. That will prevent the entire classloader and all the classes
* it refers to and all their static members from being freed.
*
************* !!!!!!!!!!!! PLEASE NOTE !!!!!!!!!!!! *************
*
* THE FOLLOWING IS A NASTY HACK TO SO THAT BEANUTILS REMAINS BINARY
* COMPATIBLE WITH PREVIOUS RELEASES.
*
* There are two issues here:
*
* 1) Memory Issues: The static HashMap caused memory problems (See BEANUTILS-59)
* to resolve this it has been moved into a ContextClassLoaderLocal instance
* (named CLASSLOADER_CACHE above) which holds one copy per
* ClassLoader in a WeakHashMap.
*
* 2) Binary Compatibility: As the "dynaClasses" static HashMap is "protected"
* removing it breaks BeanUtils binary compatibility with previous versions.
* To resolve this all the methods have been overriden to delegate to the
* Map for the ClassLoader in the ContextClassLoaderLocal.
*
* @deprecated The dynaClasses Map will be removed in a subsequent release
*/
protected static HashMap dynaClasses = new HashMap() {
public void clear() {
getDynaClassesMap().clear();
}
public boolean containsKey(Object key) {
return getDynaClassesMap().containsKey(key);
}
public boolean containsValue(Object value) {
return getDynaClassesMap().containsValue(value);
}
public Set entrySet() {
return getDynaClassesMap().entrySet();
}
public boolean equals(Object o) {
return getDynaClassesMap().equals(o);
}
public Object get(Object key) {
return getDynaClassesMap().get(key);
}
public int hashCode() {
return getDynaClassesMap().hashCode();
}
public boolean isEmpty() {
return getDynaClassesMap().isEmpty();
}
public Set keySet() {
return getDynaClassesMap().keySet();
}
public Object put(Object key, Object value) {
return getDynaClassesMap().put(key, value);
}
public void putAll(Map m) {
getDynaClassesMap().putAll(m);
}
public Object remove(Object key) {
return getDynaClassesMap().remove(key);
}
public int size() {
return getDynaClassesMap().size();
}
public Collection values() {
return getDynaClassesMap().values();
}
};
// ------------------------------------------------------ DynaClass Methods
/**
* Return the class of the underlying wrapped bean.
*
* @return the class of the underlying wrapped bean
* @since 1.8.0
*/
protected Class getBeanClass() {
return (Class)beanClassRef.get();
}
/**
* Return the name of this DynaClass (analogous to the
* <code>getName()</code> method of <code>java.lang.Class</code), which
* allows the same <code>DynaClass</code> implementation class to support
* different dynamic classes, with different sets of properties.
*
* @return the name of the DynaClass
*/
public String getName() {
return beanClassName;
}
/**
* Return a property descriptor for the specified property, if it exists;
* otherwise, return <code>null</code>.
*
* @param name Name of the dynamic property for which a descriptor
* is requested
* @return The descriptor for the specified property
*
* @exception IllegalArgumentException if no property name is specified
*/
public DynaProperty getDynaProperty(String name) {
if (name == null) {
throw new IllegalArgumentException
("No property name specified");
}
return ((DynaProperty) propertiesMap.get(name));
}
/**
* <p>Return an array of <code>ProperyDescriptors</code> for the properties
* currently defined in this DynaClass. If no properties are defined, a
* zero-length array will be returned.</p>
*
* <p><strong>FIXME</strong> - Should we really be implementing
* <code>getBeanInfo()</code> instead, which returns property descriptors
* and a bunch of other stuff?</p>
*
* @return the set of properties for this DynaClass
*/
public DynaProperty[] getDynaProperties() {
return (properties);
}
/**
* <p>Instantiates a new standard JavaBean instance associated with
* this DynaClass and return it wrapped in a new WrapDynaBean
* instance. <strong>NOTE</strong> the JavaBean should have a
* no argument constructor.</p>
*
* <strong>NOTE</strong> - Most common use cases should not need to use
* this method. It is usually better to create new
* <code>WrapDynaBean</code> instances by calling its constructor.
* For example:</p>
* <code><pre>
* Object javaBean = ...;
* DynaBean wrapper = new WrapDynaBean(javaBean);
* </pre></code>
* <p>
* (This method is needed for some kinds of <code>DynaBean</code> framework.)
* </p>
*
* @return A new <code>DynaBean</code> instance
* @exception IllegalAccessException if the Class or the appropriate
* constructor is not accessible
* @exception InstantiationException if this Class represents an abstract
* class, an array class, a primitive type, or void; or if instantiation
* fails for some other reason
*/
public DynaBean newInstance()
throws IllegalAccessException, InstantiationException {
return new WrapDynaBean(getBeanClass().newInstance());
}
// --------------------------------------------------------- Public Methods
/**
* Return the PropertyDescriptor for the specified property name, if any;
* otherwise return <code>null</code>.
*
* @param name Name of the property to be retrieved
* @return The descriptor for the specified property
*/
public PropertyDescriptor getPropertyDescriptor(String name) {
return ((PropertyDescriptor) descriptorsMap.get(name));
}
// --------------------------------------------------------- Static Methods
/**
* Clear our cache of WrapDynaClass instances.
*/
public static void clear() {
getDynaClassesMap().clear();
}
/**
* Create (if necessary) and return a new <code>WrapDynaClass</code>
* instance for the specified bean class.
*
* @param beanClass Bean class for which a WrapDynaClass is requested
* @return A new <i>Wrap</i> {@link DynaClass}
*/
public static WrapDynaClass createDynaClass(Class beanClass) {
WrapDynaClass dynaClass =
(WrapDynaClass) getDynaClassesMap().get(beanClass);
if (dynaClass == null) {
dynaClass = new WrapDynaClass(beanClass);
getDynaClassesMap().put(beanClass, dynaClass);
}
return (dynaClass);
}
// ------------------------------------------------------ Protected Methods
/**
* Introspect our bean class to identify the supported properties.
*/
protected void introspect() {
// Look up the property descriptors for this bean class
Class beanClass = getBeanClass();
PropertyDescriptor[] regulars =
PropertyUtils.getPropertyDescriptors(beanClass);
if (regulars == null) {
regulars = new PropertyDescriptor[0];
}
Map mappeds =
PropertyUtils.getMappedPropertyDescriptors(beanClass);
if (mappeds == null) {
mappeds = new HashMap();
}
// Construct corresponding DynaProperty information
properties = new DynaProperty[regulars.length + mappeds.size()];
for (int i = 0; i < regulars.length; i++) {
descriptorsMap.put(regulars[i].getName(),
regulars[i]);
properties[i] =
new DynaProperty(regulars[i].getName(),
regulars[i].getPropertyType());
propertiesMap.put(properties[i].getName(),
properties[i]);
}
int j = regulars.length;
Iterator names = mappeds.keySet().iterator();
while (names.hasNext()) {
String name = (String) names.next();
PropertyDescriptor descriptor =
(PropertyDescriptor) mappeds.get(name);
properties[j] =
new DynaProperty(descriptor.getName(),
Map.class);
propertiesMap.put(properties[j].getName(),
properties[j]);
j++;
}
}
}
| |
package framework.org.json.zip;
import framework.org.json.JSONException;
/*
Copyright (c) 2013 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
/**
* JSONzip is a compression scheme for JSON text.
* @author JSON.org
* @version 2014-05-20
*/
/**
* A Huffman encoder/decoder. It operates over a domain of integers, which may
* map to characters or other symbols. Symbols that are used frequently are
* given shorter codes than symbols that are used infrequently. This usually
* produces shorter messages.
*
* Initially, all of the symbols are given the same weight. The weight of a
* symbol is incremented by the tick method. The generate method is used to
* generate the encoding table. The table must be generated before encoding or
* decoding. You may regenerate the table with the latest weights at any time.
*
* After a million ticks, it is assumed that the distribution is well
* understood and that no more regeneration will be required.
*/
public class Huff implements None, PostMortem {
/**
* The number of symbols known to the encoder.
*/
private final int domain;
/**
* The number of characters to process before generation is no longer done.
*/
public static final int education = 1000000;
/**
* An array that maps symbol values to symbols.
*/
private final Symbol[] symbols;
/**
* The root of the decoding table, and the terminal of the encoding table.
*/
private Symbol table;
/**
* The number of characters left to learn to adapt the coding table.
*/
private int toLearn;
/**
* Have any weights changed since the table was last generated?
*/
private boolean upToDate = false;
/**
* The number of bits in the last symbol. This is used in tracing.
*/
private int width;
private static class Symbol implements PostMortem {
public Symbol back;
public Symbol next;
public Symbol zero;
public Symbol one;
public final int integer;
public long weight;
/**
* Make a symbol representing a character or other value.
*
* @param integer
* The symbol's number
*/
public Symbol(int integer) {
this.integer = integer;
this.weight = 0;
this.next = null;
this.back = null;
this.one = null;
this.zero = null;
}
public boolean postMortem(PostMortem pm) {
boolean result = true;
Symbol that = (Symbol) pm;
if (this.integer != that.integer || this.weight != that.weight) {
return false;
}
if ((this.back == null) != (that.back == null)) {
return false;
}
Symbol zero = this.zero;
Symbol one = this.one;
if (zero == null) {
if (that.zero != null) {
return false;
}
} else {
result = zero.postMortem(that.zero);
}
if (one == null) {
if (that.one != null) {
return false;
}
} else {
result = one.postMortem(that.one);
}
return result;
}
}
/**
* Construct a Huffman encoder/decoder.
*
* @param domain
* The number of values known to the object.
*/
public Huff(int domain) {
this.domain = domain;
this.toLearn = education;
int length = domain * 2 - 1;
this.symbols = new Symbol[length];
// Make the leaf symbols.
for (int i = 0; i < domain; i += 1) {
symbols[i] = new Symbol(i);
}
// Make the links.
for (int i = domain; i < length; i += 1) {
symbols[i] = new Symbol(none);
}
}
/**
* Generate the encoding/decoding table. The table determines the bit
* sequences used by the read and write methods.
*/
public void generate() {
if (!this.upToDate) {
// Phase One: Sort the symbols by weight into a linked list.
Symbol head = this.symbols[0];
Symbol next;
Symbol previous = head;
Symbol symbol;
this.table = null;
head.next = null;
for (int i = 1; i < this.domain; i += 1) {
symbol = symbols[i];
// If this symbol weights less than the head, then it becomes the new head.
if (symbol.weight < head.weight) {
symbol.next = head;
head = symbol;
} else {
// We will start the search from the previous symbol instead of the head unless
// the current symbol weights less than the previous symbol.
if (symbol.weight < previous.weight) {
previous = head;
}
// Find a connected pair (previous and next) where the symbol weighs the same
// or more than previous but less than the next. Link the symbol between them.
while (true) {
next = previous.next;
if (next == null || symbol.weight < next.weight) {
break;
}
previous = next;
}
symbol.next = next;
previous.next = symbol;
previous = symbol;
}
}
// Phase Two: Make new symbols from the two lightest symbols until only one
// symbol remains. The final symbol becomes the root of the table binary tree.
int avail = this.domain;
Symbol first;
Symbol second;
previous = head;
while (true) {
first = head;
second = first.next;
head = second.next;
symbol = this.symbols[avail];
avail += 1;
symbol.weight = first.weight + second.weight;
symbol.zero = first;
symbol.one = second;
symbol.back = null;
first.back = symbol;
second.back = symbol;
if (head == null) {
break;
}
// Insert the new symbol back into the sorted list.
if (symbol.weight < head.weight) {
symbol.next = head;
head = symbol;
previous = head;
} else {
while (true) {
next = previous.next;
if (next == null || symbol.weight < next.weight) {
break;
}
previous = next;
}
symbol.next = next;
previous.next = symbol;
previous = symbol;
}
}
// The last remaining symbol is the root of the table.
this.table = symbol;
this.upToDate = true;
}
}
private boolean postMortem(int integer) {
int[] bits = new int[this.domain];
Symbol symbol = this.symbols[integer];
if (symbol.integer != integer) {
return false;
}
int i = 0;
while (true) {
Symbol back = symbol.back;
if (back == null) {
break;
}
if (back.zero == symbol) {
bits[i] = 0;
} else if (back.one == symbol) {
bits[i] = 1;
} else {
return false;
}
i += 1;
symbol = back;
}
if (symbol != this.table) {
return false;
}
this.width = 0;
symbol = this.table;
while (symbol.integer == none) {
i -= 1;
symbol = bits[i] != 0 ? symbol.one : symbol.zero;
}
return symbol.integer == integer && i == 0;
}
/**
* Compare two Huffman tables.
*/
public boolean postMortem(PostMortem pm) {
// Go through every integer in the domain, generating its bit sequence, and
// then prove that that bit sequence produces the same integer.
for (int integer = 0; integer < this.domain; integer += 1) {
if (!postMortem(integer)) {
JSONzip.log("\nBad huff ");
JSONzip.logchar(integer, integer);
return false;
}
}
return this.table.postMortem(((Huff) pm).table);
}
/**
* Read bits until a symbol can be identified. The weight of the read
* symbol will be incremented.
*
* @param bitreader
* The source of bits.
* @return The integer value of the symbol.
* @throws JSONException
*/
public int read(BitReader bitreader) throws JSONException {
try {
this.width = 0;
Symbol symbol = this.table;
while (symbol.integer == none) {
this.width += 1;
symbol = bitreader.bit() ? symbol.one : symbol.zero;
}
tick(symbol.integer);
if (JSONzip.probe) {
JSONzip.logchar(symbol.integer, this.width);
}
return symbol.integer;
} catch (Throwable e) {
throw new JSONException(e);
}
}
/**
* Increase the weight associated with a value by 1.
*
* @param value
* The number of the symbol to tick
*/
public void tick(int value) {
if (this.toLearn > 0) {
this.toLearn -= 1;
this.symbols[value].weight += 1;
this.upToDate = false;
}
}
/**
* Recur from a symbol back, emitting bits. We recur before emitting to
* make the bits come out in the right order.
*
* @param symbol
* The symbol to write.
* @param bitwriter
* The bitwriter to write it to.
* @throws JSONException
*/
private void write(Symbol symbol, BitWriter bitwriter)
throws JSONException {
try {
Symbol back = symbol.back;
if (back != null) {
this.width += 1;
write(back, bitwriter);
if (back.zero == symbol) {
bitwriter.zero();
} else {
bitwriter.one();
}
}
} catch (Throwable e) {
throw new JSONException(e);
}
}
/**
* Write the bits corresponding to a symbol. The weight of the symbol will
* be incremented.
*
* @param value
* The number of the symbol to write
* @param bitwriter
* The destination of the bits.
* @throws JSONException
*/
public void write(int value, BitWriter bitwriter) throws JSONException {
this.width = 0;
write(this.symbols[value], bitwriter);
tick(value);
if (JSONzip.probe) {
JSONzip.logchar(value, this.width);
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/kinesisanalytics-2015-08-14/DeleteApplicationInputProcessingConfiguration"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteApplicationInputProcessingConfigurationRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The Kinesis Analytics application name.
* </p>
*/
private String applicationName;
/**
* <p>
* The version ID of the Kinesis Analytics application.
* </p>
*/
private Long currentApplicationVersionId;
/**
* <p>
* The ID of the input configuration from which to delete the input processing configuration. You can get a list of
* the input IDs for an application by using the <a
* href="https://docs.aws.amazon.com/kinesisanalytics/latest/dev/API_DescribeApplication.html"
* >DescribeApplication</a> operation.
* </p>
*/
private String inputId;
/**
* <p>
* The Kinesis Analytics application name.
* </p>
*
* @param applicationName
* The Kinesis Analytics application name.
*/
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
/**
* <p>
* The Kinesis Analytics application name.
* </p>
*
* @return The Kinesis Analytics application name.
*/
public String getApplicationName() {
return this.applicationName;
}
/**
* <p>
* The Kinesis Analytics application name.
* </p>
*
* @param applicationName
* The Kinesis Analytics application name.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteApplicationInputProcessingConfigurationRequest withApplicationName(String applicationName) {
setApplicationName(applicationName);
return this;
}
/**
* <p>
* The version ID of the Kinesis Analytics application.
* </p>
*
* @param currentApplicationVersionId
* The version ID of the Kinesis Analytics application.
*/
public void setCurrentApplicationVersionId(Long currentApplicationVersionId) {
this.currentApplicationVersionId = currentApplicationVersionId;
}
/**
* <p>
* The version ID of the Kinesis Analytics application.
* </p>
*
* @return The version ID of the Kinesis Analytics application.
*/
public Long getCurrentApplicationVersionId() {
return this.currentApplicationVersionId;
}
/**
* <p>
* The version ID of the Kinesis Analytics application.
* </p>
*
* @param currentApplicationVersionId
* The version ID of the Kinesis Analytics application.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteApplicationInputProcessingConfigurationRequest withCurrentApplicationVersionId(Long currentApplicationVersionId) {
setCurrentApplicationVersionId(currentApplicationVersionId);
return this;
}
/**
* <p>
* The ID of the input configuration from which to delete the input processing configuration. You can get a list of
* the input IDs for an application by using the <a
* href="https://docs.aws.amazon.com/kinesisanalytics/latest/dev/API_DescribeApplication.html"
* >DescribeApplication</a> operation.
* </p>
*
* @param inputId
* The ID of the input configuration from which to delete the input processing configuration. You can get a
* list of the input IDs for an application by using the <a
* href="https://docs.aws.amazon.com/kinesisanalytics/latest/dev/API_DescribeApplication.html"
* >DescribeApplication</a> operation.
*/
public void setInputId(String inputId) {
this.inputId = inputId;
}
/**
* <p>
* The ID of the input configuration from which to delete the input processing configuration. You can get a list of
* the input IDs for an application by using the <a
* href="https://docs.aws.amazon.com/kinesisanalytics/latest/dev/API_DescribeApplication.html"
* >DescribeApplication</a> operation.
* </p>
*
* @return The ID of the input configuration from which to delete the input processing configuration. You can get a
* list of the input IDs for an application by using the <a
* href="https://docs.aws.amazon.com/kinesisanalytics/latest/dev/API_DescribeApplication.html"
* >DescribeApplication</a> operation.
*/
public String getInputId() {
return this.inputId;
}
/**
* <p>
* The ID of the input configuration from which to delete the input processing configuration. You can get a list of
* the input IDs for an application by using the <a
* href="https://docs.aws.amazon.com/kinesisanalytics/latest/dev/API_DescribeApplication.html"
* >DescribeApplication</a> operation.
* </p>
*
* @param inputId
* The ID of the input configuration from which to delete the input processing configuration. You can get a
* list of the input IDs for an application by using the <a
* href="https://docs.aws.amazon.com/kinesisanalytics/latest/dev/API_DescribeApplication.html"
* >DescribeApplication</a> operation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteApplicationInputProcessingConfigurationRequest withInputId(String inputId) {
setInputId(inputId);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getApplicationName() != null)
sb.append("ApplicationName: ").append(getApplicationName()).append(",");
if (getCurrentApplicationVersionId() != null)
sb.append("CurrentApplicationVersionId: ").append(getCurrentApplicationVersionId()).append(",");
if (getInputId() != null)
sb.append("InputId: ").append(getInputId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DeleteApplicationInputProcessingConfigurationRequest == false)
return false;
DeleteApplicationInputProcessingConfigurationRequest other = (DeleteApplicationInputProcessingConfigurationRequest) obj;
if (other.getApplicationName() == null ^ this.getApplicationName() == null)
return false;
if (other.getApplicationName() != null && other.getApplicationName().equals(this.getApplicationName()) == false)
return false;
if (other.getCurrentApplicationVersionId() == null ^ this.getCurrentApplicationVersionId() == null)
return false;
if (other.getCurrentApplicationVersionId() != null && other.getCurrentApplicationVersionId().equals(this.getCurrentApplicationVersionId()) == false)
return false;
if (other.getInputId() == null ^ this.getInputId() == null)
return false;
if (other.getInputId() != null && other.getInputId().equals(this.getInputId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getApplicationName() == null) ? 0 : getApplicationName().hashCode());
hashCode = prime * hashCode + ((getCurrentApplicationVersionId() == null) ? 0 : getCurrentApplicationVersionId().hashCode());
hashCode = prime * hashCode + ((getInputId() == null) ? 0 : getInputId().hashCode());
return hashCode;
}
@Override
public DeleteApplicationInputProcessingConfigurationRequest clone() {
return (DeleteApplicationInputProcessingConfigurationRequest) super.clone();
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2015.05.18 at 05:13:02 PM MST
//
package eml.ecoinformatics_org.resource_2_1;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlValue;
import eml.ecoinformatics_org.text_2_1.TextType;
/**
* <p>Java class for ConnectionDefinitionType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ConnectionDefinitionType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <choice>
* <sequence>
* <element name="schemeName">
* <complexType>
* <simpleContent>
* <extension base="<http://www.w3.org/2001/XMLSchema>string">
* <attribute name="system" type="{eml://ecoinformatics.org/resource-2.1.1}SystemType" />
* </extension>
* </simpleContent>
* </complexType>
* </element>
* <element name="description" type="{eml://ecoinformatics.org/text-2.1.1}TextType"/>
* <element name="parameterDefinition" maxOccurs="unbounded">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="name" type="{eml://ecoinformatics.org/resource-2.1.1}NonEmptyStringType"/>
* <element name="definition" type="{eml://ecoinformatics.org/resource-2.1.1}NonEmptyStringType"/>
* <element name="defaultValue" type="{eml://ecoinformatics.org/resource-2.1.1}NonEmptyStringType" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* <group ref="{eml://ecoinformatics.org/resource-2.1.1}ReferencesGroup"/>
* </choice>
* <attribute name="id" type="{eml://ecoinformatics.org/resource-2.1.1}IDType" />
* <attribute name="system" type="{eml://ecoinformatics.org/resource-2.1.1}SystemType" />
* <attribute name="scope" type="{eml://ecoinformatics.org/resource-2.1.1}ScopeType" default="document" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ConnectionDefinitionType", propOrder = {
"schemeName",
"description",
"parameterDefinition",
"references"
})
public class ConnectionDefinitionType {
protected ConnectionDefinitionType.SchemeName schemeName;
protected TextType description;
protected List<ConnectionDefinitionType.ParameterDefinition> parameterDefinition;
protected eml.ecoinformatics_org.view_2_1.ViewType.References references;
@XmlAttribute(name = "id")
protected List<String> id;
@XmlAttribute(name = "system")
protected List<String> system;
@XmlAttribute(name = "scope")
protected ScopeType scope;
/**
* Gets the value of the schemeName property.
*
* @return
* possible object is
* {@link ConnectionDefinitionType.SchemeName }
*
*/
public ConnectionDefinitionType.SchemeName getSchemeName() {
return schemeName;
}
/**
* Sets the value of the schemeName property.
*
* @param value
* allowed object is
* {@link ConnectionDefinitionType.SchemeName }
*
*/
public void setSchemeName(ConnectionDefinitionType.SchemeName value) {
this.schemeName = value;
}
/**
* Gets the value of the description property.
*
* @return
* possible object is
* {@link TextType }
*
*/
public TextType getDescription() {
return description;
}
/**
* Sets the value of the description property.
*
* @param value
* allowed object is
* {@link TextType }
*
*/
public void setDescription(TextType value) {
this.description = value;
}
/**
* Gets the value of the parameterDefinition property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the parameterDefinition property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getParameterDefinition().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link ConnectionDefinitionType.ParameterDefinition }
*
*
*/
public List<ConnectionDefinitionType.ParameterDefinition> getParameterDefinition() {
if (parameterDefinition == null) {
parameterDefinition = new ArrayList<ConnectionDefinitionType.ParameterDefinition>();
}
return this.parameterDefinition;
}
/**
* Gets the value of the references property.
*
* @return
* possible object is
* {@link eml.ecoinformatics_org.view_2_1.ViewType.References }
*
*/
public eml.ecoinformatics_org.view_2_1.ViewType.References getReferences() {
return references;
}
/**
* Sets the value of the references property.
*
* @param value
* allowed object is
* {@link eml.ecoinformatics_org.view_2_1.ViewType.References }
*
*/
public void setReferences(eml.ecoinformatics_org.view_2_1.ViewType.References value) {
this.references = value;
}
/**
* Gets the value of the id property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the id property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getId().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getId() {
if (id == null) {
id = new ArrayList<String>();
}
return this.id;
}
/**
* Gets the value of the system property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the system property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getSystem().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getSystem() {
if (system == null) {
system = new ArrayList<String>();
}
return this.system;
}
/**
* Gets the value of the scope property.
*
* @return
* possible object is
* {@link ScopeType }
*
*/
public ScopeType getScope() {
if (scope == null) {
return ScopeType.DOCUMENT;
} else {
return scope;
}
}
/**
* Sets the value of the scope property.
*
* @param value
* allowed object is
* {@link ScopeType }
*
*/
public void setScope(ScopeType value) {
this.scope = value;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="name" type="{eml://ecoinformatics.org/resource-2.1.1}NonEmptyStringType"/>
* <element name="definition" type="{eml://ecoinformatics.org/resource-2.1.1}NonEmptyStringType"/>
* <element name="defaultValue" type="{eml://ecoinformatics.org/resource-2.1.1}NonEmptyStringType" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"name",
"definition",
"defaultValue"
})
public static class ParameterDefinition {
@XmlElement(required = true)
protected String name;
@XmlElement(required = true)
protected String definition;
protected String defaultValue;
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the definition property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDefinition() {
return definition;
}
/**
* Sets the value of the definition property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDefinition(String value) {
this.definition = value;
}
/**
* Gets the value of the defaultValue property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDefaultValue() {
return defaultValue;
}
/**
* Sets the value of the defaultValue property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDefaultValue(String value) {
this.defaultValue = value;
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <simpleContent>
* <extension base="<http://www.w3.org/2001/XMLSchema>string">
* <attribute name="system" type="{eml://ecoinformatics.org/resource-2.1.1}SystemType" />
* </extension>
* </simpleContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"value"
})
public static class SchemeName {
@XmlValue
protected String value;
@XmlAttribute(name = "system")
protected List<String> system;
/**
* Gets the value of the value property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValue() {
return value;
}
/**
* Sets the value of the value property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValue(String value) {
this.value = value;
}
/**
* Gets the value of the system property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the system property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getSystem().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getSystem() {
if (system == null) {
system = new ArrayList<String>();
}
return this.system;
}
}
}
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// http://code.google.com/p/protobuf/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
/**
* Immutable sequence of bytes. Substring is supported by sharing the reference
* to the immutable underlying bytes, as with {@link String}. Concatenation is
* likewise supported without copying (long strings) by building a tree of
* pieces in {@link RopeByteString}.
* <p>
* Like {@link String}, the contents of a {@link ByteString} can never be
* observed to change, not even in the presence of a data race or incorrect
* API usage in the client code.
*
* @author crazybob@google.com Bob Lee
* @author kenton@google.com Kenton Varda
* @author carlanton@google.com Carl Haverl
* @author martinrb@google.com Martin Buchholz
*/
public abstract class ByteString implements Iterable<Byte> {
/**
* When two strings to be concatenated have a combined length shorter than
* this, we just copy their bytes on {@link #concat(ByteString)}.
* The trade-off is copy size versus the overhead of creating tree nodes
* in {@link RopeByteString}.
*/
static final int CONCATENATE_BY_COPY_SIZE = 128;
/**
* When copying an InputStream into a ByteString with .readFrom(),
* the chunks in the underlying rope start at 256 bytes, but double
* each iteration up to 8192 bytes.
*/
static final int MIN_READ_FROM_CHUNK_SIZE = 0x100; // 256b
static final int MAX_READ_FROM_CHUNK_SIZE = 0x2000; // 8k
/**
* Empty {@code ByteString}.
*/
public static final ByteString EMPTY = new LiteralByteString(new byte[0]);
// This constructor is here to prevent subclassing outside of this package,
ByteString() {}
/**
* Gets the byte at the given index. This method should be used only for
* random access to individual bytes. To access bytes sequentially, use the
* {@link ByteIterator} returned by {@link #iterator()}, and call {@link
* #substring(int, int)} first if necessary.
*
* @param index index of byte
* @return the value
* @throws ArrayIndexOutOfBoundsException {@code index} is < 0 or >= size
*/
public abstract byte byteAt(int index);
/**
* Return a {@link ByteString.ByteIterator} over the bytes in the ByteString.
* To avoid auto-boxing, you may get the iterator manually and call
* {@link ByteIterator#nextByte()}.
*
* @return the iterator
*/
public abstract ByteIterator iterator();
/**
* This interface extends {@code Iterator<Byte>}, so that we can return an
* unboxed {@code byte}.
*/
public interface ByteIterator extends Iterator<Byte> {
/**
* An alternative to {@link Iterator#next()} that returns an
* unboxed primitive {@code byte}.
*
* @return the next {@code byte} in the iteration
* @throws NoSuchElementException if the iteration has no more elements
*/
byte nextByte();
}
/**
* Gets the number of bytes.
*
* @return size in bytes
*/
public abstract int size();
/**
* Returns {@code true} if the size is {@code 0}, {@code false} otherwise.
*
* @return true if this is zero bytes long
*/
public boolean isEmpty() {
return size() == 0;
}
// =================================================================
// ByteString -> substring
/**
* Return the substring from {@code beginIndex}, inclusive, to the end of the
* string.
*
* @param beginIndex start at this index
* @return substring sharing underlying data
* @throws IndexOutOfBoundsException if {@code beginIndex < 0} or
* {@code beginIndex > size()}.
*/
public ByteString substring(int beginIndex) {
return substring(beginIndex, size());
}
/**
* Return the substring from {@code beginIndex}, inclusive, to {@code
* endIndex}, exclusive.
*
* @param beginIndex start at this index
* @param endIndex the last character is the one before this index
* @return substring sharing underlying data
* @throws IndexOutOfBoundsException if {@code beginIndex < 0},
* {@code endIndex > size()}, or {@code beginIndex > endIndex}.
*/
public abstract ByteString substring(int beginIndex, int endIndex);
/**
* Tests if this bytestring starts with the specified prefix.
* Similar to {@link String#startsWith(String)}
*
* @param prefix the prefix.
* @return <code>true</code> if the byte sequence represented by the
* argument is a prefix of the byte sequence represented by
* this string; <code>false</code> otherwise.
*/
public boolean startsWith(ByteString prefix) {
return size() >= prefix.size() &&
substring(0, prefix.size()).equals(prefix);
}
// =================================================================
// byte[] -> ByteString
/**
* Copies the given bytes into a {@code ByteString}.
*
* @param bytes source array
* @param offset offset in source array
* @param size number of bytes to copy
* @return new {@code ByteString}
*/
public static ByteString copyFrom(byte[] bytes, int offset, int size) {
byte[] copy = new byte[size];
System.arraycopy(bytes, offset, copy, 0, size);
return new LiteralByteString(copy);
}
/**
* Copies the given bytes into a {@code ByteString}.
*
* @param bytes to copy
* @return new {@code ByteString}
*/
public static ByteString copyFrom(byte[] bytes) {
return copyFrom(bytes, 0, bytes.length);
}
/**
* Copies the next {@code size} bytes from a {@code java.nio.ByteBuffer} into
* a {@code ByteString}.
*
* @param bytes source buffer
* @param size number of bytes to copy
* @return new {@code ByteString}
*/
public static ByteString copyFrom(ByteBuffer bytes, int size) {
byte[] copy = new byte[size];
bytes.get(copy);
return new LiteralByteString(copy);
}
/**
* Copies the remaining bytes from a {@code java.nio.ByteBuffer} into
* a {@code ByteString}.
*
* @param bytes sourceBuffer
* @return new {@code ByteString}
*/
public static ByteString copyFrom(ByteBuffer bytes) {
return copyFrom(bytes, bytes.remaining());
}
/**
* Encodes {@code text} into a sequence of bytes using the named charset
* and returns the result as a {@code ByteString}.
*
* @param text source string
* @param charsetName encoding to use
* @return new {@code ByteString}
* @throws UnsupportedEncodingException if the encoding isn't found
*/
public static ByteString copyFrom(String text, String charsetName)
throws UnsupportedEncodingException {
return new LiteralByteString(text.getBytes(charsetName));
}
/**
* Encodes {@code text} into a sequence of UTF-8 bytes and returns the
* result as a {@code ByteString}.
*
* @param text source string
* @return new {@code ByteString}
*/
public static ByteString copyFromUtf8(String text) {
try {
return new LiteralByteString(text.getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("UTF-8 not supported?", e);
}
}
// =================================================================
// InputStream -> ByteString
/**
* Completely reads the given stream's bytes into a
* {@code ByteString}, blocking if necessary until all bytes are
* read through to the end of the stream.
*
* <b>Performance notes:</b> The returned {@code ByteString} is an
* immutable tree of byte arrays ("chunks") of the stream data. The
* first chunk is small, with subsequent chunks each being double
* the size, up to 8K. If the caller knows the precise length of
* the stream and wishes to avoid all unnecessary copies and
* allocations, consider using the two-argument version of this
* method, below.
*
* @param streamToDrain The source stream, which is read completely
* but not closed.
* @return A new {@code ByteString} which is made up of chunks of
* various sizes, depending on the behavior of the underlying
* stream.
* @throws IOException IOException is thrown if there is a problem
* reading the underlying stream.
*/
public static ByteString readFrom(InputStream streamToDrain)
throws IOException {
return readFrom(
streamToDrain, MIN_READ_FROM_CHUNK_SIZE, MAX_READ_FROM_CHUNK_SIZE);
}
/**
* Completely reads the given stream's bytes into a
* {@code ByteString}, blocking if necessary until all bytes are
* read through to the end of the stream.
*
* <b>Performance notes:</b> The returned {@code ByteString} is an
* immutable tree of byte arrays ("chunks") of the stream data. The
* chunkSize parameter sets the size of these byte arrays. In
* particular, if the chunkSize is precisely the same as the length
* of the stream, unnecessary allocations and copies will be
* avoided. Otherwise, the chunks will be of the given size, except
* for the last chunk, which will be resized (via a reallocation and
* copy) to contain the remainder of the stream.
*
* @param streamToDrain The source stream, which is read completely
* but not closed.
* @param chunkSize The size of the chunks in which to read the
* stream.
* @return A new {@code ByteString} which is made up of chunks of
* the given size.
* @throws IOException IOException is thrown if there is a problem
* reading the underlying stream.
*/
public static ByteString readFrom(InputStream streamToDrain, int chunkSize)
throws IOException {
return readFrom(streamToDrain, chunkSize, chunkSize);
}
// Helper method that takes the chunk size range as a parameter.
public static ByteString readFrom(InputStream streamToDrain, int minChunkSize,
int maxChunkSize) throws IOException {
Collection<ByteString> results = new ArrayList<ByteString>();
// copy the inbound bytes into a list of chunks; the chunk size
// grows exponentially to support both short and long streams.
int chunkSize = minChunkSize;
while (true) {
ByteString chunk = readChunk(streamToDrain, chunkSize);
if (chunk == null) {
break;
}
results.add(chunk);
chunkSize = Math.min(chunkSize * 2, maxChunkSize);
}
return ByteString.copyFrom(results);
}
/**
* Blocks until a chunk of the given size can be made from the
* stream, or EOF is reached. Calls read() repeatedly in case the
* given stream implementation doesn't completely fill the given
* buffer in one read() call.
*
* @return A chunk of the desired size, or else a chunk as large as
* was available when end of stream was reached. Returns null if the
* given stream had no more data in it.
*/
private static ByteString readChunk(InputStream in, final int chunkSize)
throws IOException {
final byte[] buf = new byte[chunkSize];
int bytesRead = 0;
while (bytesRead < chunkSize) {
final int count = in.read(buf, bytesRead, chunkSize - bytesRead);
if (count == -1) {
break;
}
bytesRead += count;
}
if (bytesRead == 0) {
return null;
} else {
return ByteString.copyFrom(buf, 0, bytesRead);
}
}
// =================================================================
// Multiple ByteStrings -> One ByteString
/**
* Concatenate the given {@code ByteString} to this one. Short concatenations,
* of total size smaller than {@link ByteString#CONCATENATE_BY_COPY_SIZE}, are
* produced by copying the underlying bytes (as per Rope.java, <a
* href="http://www.cs.ubc.ca/local/reading/proceedings/spe91-95/spe/vol25/issue12/spe986.pdf">
* BAP95 </a>. In general, the concatenate involves no copying.
*
* @param other string to concatenate
* @return a new {@code ByteString} instance
*/
public ByteString concat(ByteString other) {
int thisSize = size();
int otherSize = other.size();
if ((long) thisSize + otherSize >= Integer.MAX_VALUE) {
throw new IllegalArgumentException("ByteString would be too long: " +
thisSize + "+" + otherSize);
}
return RopeByteString.concatenate(this, other);
}
/**
* Concatenates all byte strings in the iterable and returns the result.
* This is designed to run in O(list size), not O(total bytes).
*
* <p>The returned {@code ByteString} is not necessarily a unique object.
* If the list is empty, the returned object is the singleton empty
* {@code ByteString}. If the list has only one element, that
* {@code ByteString} will be returned without copying.
*
* @param byteStrings strings to be concatenated
* @return new {@code ByteString}
*/
public static ByteString copyFrom(Iterable<ByteString> byteStrings) {
Collection<ByteString> collection;
if (!(byteStrings instanceof Collection)) {
collection = new ArrayList<ByteString>();
for (ByteString byteString : byteStrings) {
collection.add(byteString);
}
} else {
collection = (Collection<ByteString>) byteStrings;
}
ByteString result;
if (collection.isEmpty()) {
result = EMPTY;
} else {
result = balancedConcat(collection.iterator(), collection.size());
}
return result;
}
// Internal function used by copyFrom(Iterable<ByteString>).
// Create a balanced concatenation of the next "length" elements from the
// iterable.
private static ByteString balancedConcat(Iterator<ByteString> iterator,
int length) {
assert length >= 1;
ByteString result;
if (length == 1) {
result = iterator.next();
} else {
int halfLength = length >>> 1;
ByteString left = balancedConcat(iterator, halfLength);
ByteString right = balancedConcat(iterator, length - halfLength);
result = left.concat(right);
}
return result;
}
// =================================================================
// ByteString -> byte[]
/**
* Copies bytes into a buffer at the given offset.
*
* @param target buffer to copy into
* @param offset in the target buffer
* @throws IndexOutOfBoundsException if the offset is negative or too large
*/
public void copyTo(byte[] target, int offset) {
copyTo(target, 0, offset, size());
}
/**
* Copies bytes into a buffer.
*
* @param target buffer to copy into
* @param sourceOffset offset within these bytes
* @param targetOffset offset within the target buffer
* @param numberToCopy number of bytes to copy
* @throws IndexOutOfBoundsException if an offset or size is negative or too
* large
*/
public void copyTo(byte[] target, int sourceOffset, int targetOffset,
int numberToCopy) {
if (sourceOffset < 0) {
throw new IndexOutOfBoundsException("Source offset < 0: " + sourceOffset);
}
if (targetOffset < 0) {
throw new IndexOutOfBoundsException("Target offset < 0: " + targetOffset);
}
if (numberToCopy < 0) {
throw new IndexOutOfBoundsException("Length < 0: " + numberToCopy);
}
if (sourceOffset + numberToCopy > size()) {
throw new IndexOutOfBoundsException(
"Source end offset < 0: " + (sourceOffset + numberToCopy));
}
if (targetOffset + numberToCopy > target.length) {
throw new IndexOutOfBoundsException(
"Target end offset < 0: " + (targetOffset + numberToCopy));
}
if (numberToCopy > 0) {
copyToInternal(target, sourceOffset, targetOffset, numberToCopy);
}
}
/**
* Internal (package private) implementation of
* @link{#copyTo(byte[],int,int,int}.
* It assumes that all error checking has already been performed and that
* @code{numberToCopy > 0}.
*/
protected abstract void copyToInternal(byte[] target, int sourceOffset,
int targetOffset, int numberToCopy);
/**
* Copies bytes into a ByteBuffer.
*
* @param target ByteBuffer to copy into.
* @throws java.nio.ReadOnlyBufferException if the {@code target} is read-only
* @throws java.nio.BufferOverflowException if the {@code target}'s
* remaining() space is not large enough to hold the data.
*/
public abstract void copyTo(ByteBuffer target);
/**
* Copies bytes to a {@code byte[]}.
*
* @return copied bytes
*/
public byte[] toByteArray() {
int size = size();
byte[] result = new byte[size];
copyToInternal(result, 0, 0, size);
return result;
}
/**
* Writes the complete contents of this byte string to
* the specified output stream argument.
*
* @param out the output stream to which to write the data.
* @throws IOException if an I/O error occurs.
*/
public abstract void writeTo(OutputStream out) throws IOException;
/**
* Constructs a read-only {@code java.nio.ByteBuffer} whose content
* is equal to the contents of this byte string.
* The result uses the same backing array as the byte string, if possible.
*
* @return wrapped bytes
*/
public abstract ByteBuffer asReadOnlyByteBuffer();
/**
* Constructs a list of read-only {@code java.nio.ByteBuffer} objects
* such that the concatenation of their contents is equal to the contents
* of this byte string. The result uses the same backing arrays as the
* byte string.
* <p>
* By returning a list, implementations of this method may be able to avoid
* copying even when there are multiple backing arrays.
*
* @return a list of wrapped bytes
*/
public abstract List<ByteBuffer> asReadOnlyByteBufferList();
/**
* Constructs a new {@code String} by decoding the bytes using the
* specified charset.
*
* @param charsetName encode using this charset
* @return new string
* @throws UnsupportedEncodingException if charset isn't recognized
*/
public abstract String toString(String charsetName)
throws UnsupportedEncodingException;
// =================================================================
// UTF-8 decoding
/**
* Constructs a new {@code String} by decoding the bytes as UTF-8.
*
* @return new string using UTF-8 encoding
*/
public String toStringUtf8() {
try {
return toString("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("UTF-8 not supported?", e);
}
}
/**
* Tells whether this {@code ByteString} represents a well-formed UTF-8
* byte sequence, such that the original bytes can be converted to a
* String object and then round tripped back to bytes without loss.
*
* <p>More precisely, returns {@code true} whenever: <pre> {@code
* Arrays.equals(byteString.toByteArray(),
* new String(byteString.toByteArray(), "UTF-8").getBytes("UTF-8"))
* }</pre>
*
* <p>This method returns {@code false} for "overlong" byte sequences,
* as well as for 3-byte sequences that would map to a surrogate
* character, in accordance with the restricted definition of UTF-8
* introduced in Unicode 3.1. Note that the UTF-8 decoder included in
* Oracle's JDK has been modified to also reject "overlong" byte
* sequences, but (as of 2011) still accepts 3-byte surrogate
* character byte sequences.
*
* <p>See the Unicode Standard,</br>
* Table 3-6. <em>UTF-8 Bit Distribution</em>,</br>
* Table 3-7. <em>Well Formed UTF-8 Byte Sequences</em>.
*
* @return whether the bytes in this {@code ByteString} are a
* well-formed UTF-8 byte sequence
*/
public abstract boolean isValidUtf8();
/**
* Tells whether the given byte sequence is a well-formed, malformed, or
* incomplete UTF-8 byte sequence. This method accepts and returns a partial
* state result, allowing the bytes for a complete UTF-8 byte sequence to be
* composed from multiple {@code ByteString} segments.
*
* @param state either {@code 0} (if this is the initial decoding operation)
* or the value returned from a call to a partial decoding method for the
* previous bytes
* @param offset offset of the first byte to check
* @param length number of bytes to check
*
* @return {@code -1} if the partial byte sequence is definitely malformed,
* {@code 0} if it is well-formed (no additional input needed), or, if the
* byte sequence is "incomplete", i.e. apparently terminated in the middle of
* a character, an opaque integer "state" value containing enough information
* to decode the character when passed to a subsequent invocation of a
* partial decoding method.
*/
protected abstract int partialIsValidUtf8(int state, int offset, int length);
// =================================================================
// equals() and hashCode()
@Override
public abstract boolean equals(Object o);
/**
* Return a non-zero hashCode depending only on the sequence of bytes
* in this ByteString.
*
* @return hashCode value for this object
*/
@Override
public abstract int hashCode();
// =================================================================
// Input stream
/**
* Creates an {@code InputStream} which can be used to read the bytes.
* <p>
* The {@link InputStream} returned by this method is guaranteed to be
* completely non-blocking. The method {@link InputStream#available()}
* returns the number of bytes remaining in the stream. The methods
* {@link InputStream#read(byte[]), {@link InputStream#read(byte[],int,int)}
* and {@link InputStream#skip(long)} will read/skip as many bytes as are
* available.
* <p>
* The methods in the returned {@link InputStream} might <b>not</b> be
* thread safe.
*
* @return an input stream that returns the bytes of this byte string.
*/
public abstract InputStream newInput();
/**
* Creates a {@link CodedInputStream} which can be used to read the bytes.
* Using this is often more efficient than creating a {@link CodedInputStream}
* that wraps the result of {@link #newInput()}.
*
* @return stream based on wrapped data
*/
public abstract CodedInputStream newCodedInput();
// =================================================================
// Output stream
/**
* Creates a new {@link Output} with the given initial capacity. Call {@link
* Output#toByteString()} to create the {@code ByteString} instance.
* <p>
* A {@link ByteString.Output} offers the same functionality as a
* {@link ByteArrayOutputStream}, except that it returns a {@link ByteString}
* rather than a {@code byte} array.
*
* @param initialCapacity estimate of number of bytes to be written
* @return {@code OutputStream} for building a {@code ByteString}
*/
public static Output newOutput(int initialCapacity) {
return new Output(initialCapacity);
}
/**
* Creates a new {@link Output}. Call {@link Output#toByteString()} to create
* the {@code ByteString} instance.
* <p>
* A {@link ByteString.Output} offers the same functionality as a
* {@link ByteArrayOutputStream}, except that it returns a {@link ByteString}
* rather than a {@code byte array}.
*
* @return {@code OutputStream} for building a {@code ByteString}
*/
public static Output newOutput() {
return new Output(CONCATENATE_BY_COPY_SIZE);
}
/**
* Outputs to a {@code ByteString} instance. Call {@link #toByteString()} to
* create the {@code ByteString} instance.
*/
public static final class Output extends OutputStream {
// Implementation note.
// The public methods of this class must be synchronized. ByteStrings
// are guaranteed to be immutable. Without some sort of locking, it could
// be possible for one thread to call toByteSring(), while another thread
// is still modifying the underlying byte array.
private static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
// argument passed by user, indicating initial capacity.
private final int initialCapacity;
// ByteStrings to be concatenated to create the result
private final ArrayList<ByteString> flushedBuffers;
// Total number of bytes in the ByteStrings of flushedBuffers
private int flushedBuffersTotalBytes;
// Current buffer to which we are writing
private byte[] buffer;
// Location in buffer[] to which we write the next byte.
private int bufferPos;
/**
* Creates a new ByteString output stream with the specified
* initial capacity.
*
* @param initialCapacity the initial capacity of the output stream.
*/
Output(int initialCapacity) {
if (initialCapacity < 0) {
throw new IllegalArgumentException("Buffer size < 0");
}
this.initialCapacity = initialCapacity;
this.flushedBuffers = new ArrayList<ByteString>();
this.buffer = new byte[initialCapacity];
}
@Override
public synchronized void write(int b) {
if (bufferPos == buffer.length) {
flushFullBuffer(1);
}
buffer[bufferPos++] = (byte)b;
}
@Override
public synchronized void write(byte[] b, int offset, int length) {
if (length <= buffer.length - bufferPos) {
// The bytes can fit into the current buffer.
System.arraycopy(b, offset, buffer, bufferPos, length);
bufferPos += length;
} else {
// Use up the current buffer
int copySize = buffer.length - bufferPos;
System.arraycopy(b, offset, buffer, bufferPos, copySize);
offset += copySize;
length -= copySize;
// Flush the buffer, and get a new buffer at least big enough to cover
// what we still need to output
flushFullBuffer(length);
System.arraycopy(b, offset, buffer, 0 /* count */, length);
bufferPos = length;
}
}
/**
* Creates a byte string. Its size is the current size of this output
* stream and its output has been copied to it.
*
* @return the current contents of this output stream, as a byte string.
*/
public synchronized ByteString toByteString() {
flushLastBuffer();
return ByteString.copyFrom(flushedBuffers);
}
/**
* Writes the complete contents of this byte array output stream to
* the specified output stream argument.
*
* @param out the output stream to which to write the data.
* @throws IOException if an I/O error occurs.
*/
public void writeTo(OutputStream out) throws IOException {
ByteString[] cachedFlushBuffers;
byte[] cachedBuffer;
int cachedBufferPos;
synchronized (this) {
// Copy the information we need into local variables so as to hold
// the lock for as short a time as possible.
cachedFlushBuffers =
flushedBuffers.toArray(new ByteString[flushedBuffers.size()]);
cachedBuffer = buffer;
cachedBufferPos = bufferPos;
}
for (ByteString byteString : cachedFlushBuffers) {
byteString.writeTo(out);
}
out.write(Arrays.copyOf(cachedBuffer, cachedBufferPos));
}
/**
* Returns the current size of the output stream.
*
* @return the current size of the output stream
*/
public synchronized int size() {
return flushedBuffersTotalBytes + bufferPos;
}
/**
* Resets this stream, so that all currently accumulated output in the
* output stream is discarded. The output stream can be used again,
* reusing the already allocated buffer space.
*/
public synchronized void reset() {
flushedBuffers.clear();
flushedBuffersTotalBytes = 0;
bufferPos = 0;
}
@Override
public String toString() {
return String.format("<ByteString.Output@%s size=%d>",
Integer.toHexString(System.identityHashCode(this)), size());
}
/**
* Internal function used by writers. The current buffer is full, and the
* writer needs a new buffer whose size is at least the specified minimum
* size.
*/
private void flushFullBuffer(int minSize) {
flushedBuffers.add(new LiteralByteString(buffer));
flushedBuffersTotalBytes += buffer.length;
// We want to increase our total capacity by 50%, but as a minimum,
// the new buffer should also at least be >= minSize and
// >= initial Capacity.
int newSize = Math.max(initialCapacity,
Math.max(minSize, flushedBuffersTotalBytes >>> 1));
buffer = new byte[newSize];
bufferPos = 0;
}
/**
* Internal function used by {@link #toByteString()}. The current buffer may
* or may not be full, but it needs to be flushed.
*/
private void flushLastBuffer() {
if (bufferPos < buffer.length) {
if (bufferPos > 0) {
byte[] bufferCopy = Arrays.copyOf(buffer, bufferPos);
flushedBuffers.add(new LiteralByteString(bufferCopy));
}
// We reuse this buffer for further writes.
} else {
// Buffer is completely full. Huzzah.
flushedBuffers.add(new LiteralByteString(buffer));
// 99% of the time, we're not going to use this OutputStream again.
// We set buffer to an empty byte stream so that we're handling this
// case without wasting space. In the rare case that more writes
// *do* occur, this empty buffer will be flushed and an appropriately
// sized new buffer will be created.
buffer = EMPTY_BYTE_ARRAY;
}
flushedBuffersTotalBytes += bufferPos;
bufferPos = 0;
}
}
/**
* Constructs a new {@code ByteString} builder, which allows you to
* efficiently construct a {@code ByteString} by writing to a {@link
* CodedOutputStream}. Using this is much more efficient than calling {@code
* newOutput()} and wrapping that in a {@code CodedOutputStream}.
*
* <p>This is package-private because it's a somewhat confusing interface.
* Users can call {@link Message#toByteString()} instead of calling this
* directly.
*
* @param size The target byte size of the {@code ByteString}. You must write
* exactly this many bytes before building the result.
* @return the builder
*/
static CodedBuilder newCodedBuilder(int size) {
return new CodedBuilder(size);
}
/** See {@link ByteString#newCodedBuilder(int)}. */
static final class CodedBuilder {
private final CodedOutputStream output;
private final byte[] buffer;
private CodedBuilder(int size) {
buffer = new byte[size];
output = CodedOutputStream.newInstance(buffer);
}
public ByteString build() {
output.checkNoSpaceLeft();
// We can be confident that the CodedOutputStream will not modify the
// underlying bytes anymore because it already wrote all of them. So,
// no need to make a copy.
return new LiteralByteString(buffer);
}
public CodedOutputStream getCodedOutput() {
return output;
}
}
// =================================================================
// Methods {@link RopeByteString} needs on instances, which aren't part of the
// public API.
/**
* Return the depth of the tree representing this {@code ByteString}, if any,
* whose root is this node. If this is a leaf node, return 0.
*
* @return tree depth or zero
*/
protected abstract int getTreeDepth();
/**
* Return {@code true} if this ByteString is literal (a leaf node) or a
* flat-enough tree in the sense of {@link RopeByteString}.
*
* @return true if the tree is flat enough
*/
protected abstract boolean isBalanced();
/**
* Return the cached hash code if available.
*
* @return value of cached hash code or 0 if not computed yet
*/
protected abstract int peekCachedHashCode();
/**
* Compute the hash across the value bytes starting with the given hash, and
* return the result. This is used to compute the hash across strings
* represented as a set of pieces by allowing the hash computation to be
* continued from piece to piece.
*
* @param h starting hash value
* @param offset offset into this value to start looking at data values
* @param length number of data values to include in the hash computation
* @return ending hash value
*/
protected abstract int partialHash(int h, int offset, int length);
@Override
public String toString() {
return String.format("<ByteString@%s size=%d>",
Integer.toHexString(System.identityHashCode(this)), size());
}
}
| |
/*
* Copyright 2018 The Bazel Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.idea.blaze.java.run.fastbuild;
import static com.google.common.base.Preconditions.checkState;
import com.google.idea.blaze.base.command.BlazeCommandName;
import com.google.idea.blaze.base.command.BlazeInvocationContext.ContextType;
import com.google.idea.blaze.base.console.BlazeConsoleService;
import com.google.idea.blaze.base.issueparser.BlazeIssueParser;
import com.google.idea.blaze.base.issueparser.IssueOutputFilter;
import com.google.idea.blaze.base.logging.EventLoggingService;
import com.google.idea.blaze.base.model.primitives.Label;
import com.google.idea.blaze.base.model.primitives.WorkspaceRoot;
import com.google.idea.blaze.base.run.BlazeCommandRunConfiguration;
import com.google.idea.blaze.base.run.confighandler.BlazeCommandGenericRunConfigurationRunner.BlazeCommandRunProfileState;
import com.google.idea.blaze.base.run.confighandler.BlazeCommandRunConfigurationRunner;
import com.google.idea.blaze.base.run.state.BlazeCommandRunConfigurationCommonState;
import com.google.idea.blaze.base.scope.BlazeContext;
import com.google.idea.blaze.base.scope.scopes.BlazeConsoleScope;
import com.google.idea.blaze.base.scope.scopes.IdeaLogScope;
import com.google.idea.blaze.base.scope.scopes.ProblemsViewScope;
import com.google.idea.blaze.base.scope.scopes.ToolWindowScope;
import com.google.idea.blaze.base.settings.Blaze;
import com.google.idea.blaze.base.settings.BlazeUserSettings;
import com.google.idea.blaze.base.settings.BlazeUserSettings.FocusBehavior;
import com.google.idea.blaze.base.toolwindow.Task;
import com.google.idea.blaze.base.util.SaveUtil;
import com.google.idea.blaze.java.fastbuild.FastBuildException;
import com.google.idea.blaze.java.fastbuild.FastBuildException.BlazeBuildError;
import com.google.idea.blaze.java.fastbuild.FastBuildIncrementalCompileException;
import com.google.idea.blaze.java.fastbuild.FastBuildInfo;
import com.google.idea.blaze.java.fastbuild.FastBuildLogDataScope;
import com.google.idea.blaze.java.fastbuild.FastBuildService;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.Executor;
import com.intellij.execution.RunCanceledByUserException;
import com.intellij.execution.configurations.RunProfile;
import com.intellij.execution.configurations.RunProfileState;
import com.intellij.execution.filters.HyperlinkInfo;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.runners.ExecutionUtil;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import java.util.Objects;
import java.util.concurrent.CancellationException;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicReference;
/** Supports the execution of {@link BlazeCommandRunConfiguration}s in fast build mode. */
public final class FastBuildConfigurationRunner implements BlazeCommandRunConfigurationRunner {
private static final Logger logger = Logger.getInstance(FastBuildConfigurationRunner.class);
static final Key<AtomicReference<FastBuildInfo>> BUILD_INFO_KEY =
Key.create("blaze.java.fastRun.buildInfo");
static final Key<AtomicReference<BlazeContext>> BLAZE_CONTEXT =
Key.create("blaze.java.fastRun.blazeContext");
/** Returns false if this isn't a 'blaze test' invocation. */
static boolean canRun(RunProfile runProfile) {
BlazeCommandRunConfiguration blazeCfg =
BlazeCommandRunConfigurationRunner.getBlazeConfig(runProfile);
if (blazeCfg == null) {
return false;
}
return Objects.equals(blazeCfg.getHandler().getCommandName(), BlazeCommandName.TEST)
&& FastBuildService.getInstance(blazeCfg.getProject())
.supportsFastBuilds(
Blaze.getBuildSystem(blazeCfg.getProject()), blazeCfg.getTargetKind());
}
@Override
public RunProfileState getRunProfileState(Executor executor, ExecutionEnvironment env) {
if (!canRun(env.getRunProfile())) {
return new BlazeCommandRunProfileState(env);
}
FastBuildSuggestion.getInstance().triedFastBuild();
env.putCopyableUserData(BUILD_INFO_KEY, new AtomicReference<>());
env.putCopyableUserData(BLAZE_CONTEXT, new AtomicReference<>());
return new FastBuildRunProfileState(env);
}
@Override
public boolean executeBeforeRunTask(ExecutionEnvironment env) {
if (!canRun(env.getRunProfile())) {
return true;
}
Project project = env.getProject();
BlazeCommandRunConfiguration configuration =
BlazeCommandRunConfigurationRunner.getBlazeConfig(env.getRunProfile());
BlazeCommandRunConfigurationCommonState handlerState =
(BlazeCommandRunConfigurationCommonState) configuration.getHandler().getState();
checkState(configuration.getSingleTarget() != null);
Label label = (Label) configuration.getSingleTarget();
String binaryPath =
handlerState.getBlazeBinaryState().getBlazeBinary() != null
? handlerState.getBlazeBinaryState().getBlazeBinary()
: Blaze.getBuildSystemProvider(project).getBinaryPath(project);
SaveUtil.saveAllFiles();
FastBuildService buildService = FastBuildService.getInstance(project);
Future<FastBuildInfo> buildFuture = null;
FocusBehavior consolePopupBehavior = BlazeUserSettings.getInstance().getShowBlazeConsoleOnRun();
FocusBehavior problemsViewFocus = BlazeUserSettings.getInstance().getShowProblemsViewOnRun();
BlazeContext context =
new BlazeContext()
.push(
new ToolWindowScope.Builder(
project, new Task("Fast Build " + label.targetName(), Task.Type.FAST_BUILD))
.setPopupBehavior(consolePopupBehavior)
.setIssueParsers(
BlazeIssueParser.defaultIssueParsers(
project,
WorkspaceRoot.fromProject(project),
ContextType.RunConfiguration))
.build())
.push(new ProblemsViewScope(project, problemsViewFocus))
.push(new IdeaLogScope())
.push(
new BlazeConsoleScope.Builder(project)
.setPopupBehavior(consolePopupBehavior)
.addConsoleFilters(
new IssueOutputFilter(
project,
WorkspaceRoot.fromProject(project),
ContextType.RunConfiguration,
/* linkToBlazeConsole= */ true))
.build())
.push(new FastBuildLogDataScope());
try {
buildFuture =
buildService.createBuild(
context,
label,
binaryPath,
handlerState.getBlazeFlagsState().getFlagsForExternalProcesses());
FastBuildInfo fastBuildInfo = buildFuture.get();
env.getCopyableUserData(BUILD_INFO_KEY).set(fastBuildInfo);
env.getCopyableUserData(BLAZE_CONTEXT).set(context);
return true;
} catch (InterruptedException e) {
buildFuture.cancel(/* mayInterruptIfRunning= */ true);
Thread.currentThread().interrupt();
} catch (CancellationException e) {
ExecutionUtil.handleExecutionError(
env.getProject(),
env.getExecutor().getToolWindowId(),
env.getRunProfile(),
new RunCanceledByUserException());
} catch (FastBuildException e) {
if (!(e instanceof BlazeBuildError)) {
// no need to log blaze build errors; they're expected
logger.warn(e);
}
ExecutionUtil.handleExecutionError(env, new ExecutionException(e));
} catch (java.util.concurrent.ExecutionException e) {
logger.warn(e);
if (e.getCause() instanceof FastBuildIncrementalCompileException) {
handleJavacError(
env, project, label, buildService, (FastBuildIncrementalCompileException) e.getCause());
} else {
ExecutionUtil.handleExecutionError(env, new ExecutionException(e.getCause()));
}
}
// Fall-through for all exceptions. If no exception was thrown, we return from the try{} block.
context.endScope();
return false;
}
private static void handleJavacError(
ExecutionEnvironment env,
Project project,
Label label,
FastBuildService buildService,
FastBuildIncrementalCompileException e) {
BlazeConsoleService console = BlazeConsoleService.getInstance(project);
console.print(e.getMessage() + "\n", ConsoleViewContentType.ERROR_OUTPUT);
console.printHyperlink(
"Click here to run the tests again with a fresh "
+ Blaze.getBuildSystem(project)
+ " build.\n",
new RerunTestsWithBlazeHyperlink(buildService, label, env));
ExecutionUtil.handleExecutionError(
env, new ExecutionException("See the Blaze Console for javac output", e.getCause()));
}
private static class RerunTestsWithBlazeHyperlink implements HyperlinkInfo {
final FastBuildService buildService;
final Label label;
final ExecutionEnvironment env;
private RerunTestsWithBlazeHyperlink(
FastBuildService buildService, Label label, ExecutionEnvironment env) {
this.buildService = buildService;
this.label = label;
this.env = env;
}
@Override
public void navigate(Project project) {
buildService.resetBuild(label);
ExecutionUtil.restart(env);
EventLoggingService.getInstance()
.logEvent(FastBuildConfigurationRunner.class, "rerun_tests_with_blaze_link_clicked");
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.utils;
import java.util.*;
import java.util.Map.Entry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.cache.ICacheExpungeHook;
public class ExpiringMap<K, V>
{
private class CacheableObject
{
private V value_;
private long age_;
CacheableObject(V o)
{
value_ = o;
age_ = System.currentTimeMillis();
}
@Override
public boolean equals(Object o)
{
return value_.equals(o);
}
@Override
public int hashCode()
{
return value_.hashCode();
}
V getValue()
{
return value_;
}
boolean isReadyToDie(long expiration)
{
return ((System.currentTimeMillis() - age_) > expiration);
}
}
private class CacheMonitor extends TimerTask
{
private long expiration_;
CacheMonitor(long expiration)
{
expiration_ = expiration;
}
@Override
public void run()
{
Map<K, V> expungedValues = new HashMap<K, V>();
synchronized (cache_)
{
Enumeration<K> e = cache_.keys();
while (e.hasMoreElements())
{
K key = e.nextElement();
CacheableObject co = cache_.get(key);
if (co != null && co.isReadyToDie(expiration_))
{
V v = co.getValue();
if (null != v)
{
expungedValues.put(key, v);
}
cache_.remove(key);
}
}
}
/* Calling the hooks on the keys that have been expunged */
for (Entry<K, V> entry : expungedValues.entrySet())
{
K key = entry.getKey();
V value = entry.getValue();
ICacheExpungeHook<K, V> hook = hooks_.remove(key);
if (hook != null)
{
hook.callMe(key, value);
}
else if (globalHook_ != null)
{
globalHook_.callMe(key, value);
}
}
expungedValues.clear();
}
}
private ICacheExpungeHook<K, V> globalHook_;
private Hashtable<K, CacheableObject> cache_;
private Map<K, ICacheExpungeHook<K, V>> hooks_;
private Timer timer_;
private static int counter_ = 0;
private static final Logger LOGGER = LoggerFactory.getLogger(ExpiringMap.class);
private void init(long expiration)
{
if (expiration <= 0)
{
throw new IllegalArgumentException("Argument specified must be a positive number");
}
cache_ = new Hashtable<K, CacheableObject>();
hooks_ = new Hashtable<K, ICacheExpungeHook<K, V>>();
timer_ = new Timer("CACHETABLE-TIMER-" + (++counter_), true);
timer_.schedule(new CacheMonitor(expiration), expiration, expiration);
}
/*
* Specify the TTL for objects in the cache
* in milliseconds.
*/
public ExpiringMap(long expiration)
{
init(expiration);
}
/*
* Specify the TTL for objects in the cache
* in milliseconds and a global expunge hook. If
* a key has a key-specific hook installed invoke that
* instead.
*/
public ExpiringMap(long expiration, ICacheExpungeHook<K, V> global)
{
init(expiration);
globalHook_ = global;
}
public void shutdown()
{
timer_.cancel();
}
public void put(K key, V value)
{
cache_.put(key, new CacheableObject(value));
}
public void put(K key, V value, ICacheExpungeHook<K, V> hook)
{
put(key, value);
hooks_.put(key, hook);
}
public V get(K key)
{
V result = null;
CacheableObject co = cache_.get(key);
if (co != null)
{
result = co.getValue();
}
return result;
}
public V remove(K key)
{
CacheableObject co = cache_.remove(key);
V result = null;
if (co != null)
{
result = co.getValue();
}
return result;
}
public int size()
{
return cache_.size();
}
public boolean containsKey(K key)
{
return cache_.containsKey(key);
}
public boolean containsValue(V value)
{
return cache_.containsValue(new CacheableObject(value));
}
public boolean isEmpty()
{
return cache_.isEmpty();
}
public Set<K> keySet()
{
return cache_.keySet();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_UUID_NA_VALUE;
/**
* A base class for all elasticsearch exceptions.
*/
public class ElasticsearchException extends RuntimeException implements ToXContent {
public static final String REST_EXCEPTION_SKIP_CAUSE = "rest.exception.cause.skip";
public static final String REST_EXCEPTION_SKIP_STACK_TRACE = "rest.exception.stacktrace.skip";
public static final boolean REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT = true;
public static final boolean REST_EXCEPTION_SKIP_CAUSE_DEFAULT = false;
private static final String INDEX_HEADER_KEY = "es.index";
private static final String INDEX_HEADER_KEY_UUID = "es.index_uuid";
private static final String SHARD_HEADER_KEY = "es.shard";
private static final String RESOURCE_HEADER_TYPE_KEY = "es.resource.type";
private static final String RESOURCE_HEADER_ID_KEY = "es.resource.id";
private static final Map<Integer, FunctionThatThrowsIOException<StreamInput, ? extends ElasticsearchException>> ID_TO_SUPPLIER;
private static final Map<Class<? extends ElasticsearchException>, ElasticsearchExceptionHandle> CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE;
private final Map<String, List<String>> headers = new HashMap<>();
/**
* Construct a <code>ElasticsearchException</code> with the specified cause exception.
*/
public ElasticsearchException(Throwable cause) {
super(cause);
}
/**
* Construct a <code>ElasticsearchException</code> with the specified detail message.
*
* The message can be parameterized using <code>{}</code> as placeholders for the given
* arguments
*
* @param msg the detail message
* @param args the arguments for the message
*/
public ElasticsearchException(String msg, Object... args) {
super(LoggerMessageFormat.format(msg, args));
}
/**
* Construct a <code>ElasticsearchException</code> with the specified detail message
* and nested exception.
*
* The message can be parameterized using <code>{}</code> as placeholders for the given
* arguments
*
* @param msg the detail message
* @param cause the nested exception
* @param args the arguments for the message
*/
public ElasticsearchException(String msg, Throwable cause, Object... args) {
super(LoggerMessageFormat.format(msg, args), cause);
}
public ElasticsearchException(StreamInput in) throws IOException {
super(in.readOptionalString(), in.readThrowable());
readStackTrace(this, in);
int numKeys = in.readVInt();
for (int i = 0; i < numKeys; i++) {
final String key = in.readString();
final int numValues = in.readVInt();
final ArrayList<String> values = new ArrayList<>(numValues);
for (int j = 0; j < numValues; j++) {
values.add(in.readString());
}
headers.put(key, values);
}
}
/**
* Adds a new header with the given key.
* This method will replace existing header if a header with the same key already exists
*/
public void addHeader(String key, String... value) {
this.headers.put(key, Arrays.asList(value));
}
/**
* Adds a new header with the given key.
* This method will replace existing header if a header with the same key already exists
*/
public void addHeader(String key, List<String> value) {
this.headers.put(key, value);
}
/**
* Returns a set of all header keys on this exception
*/
public Set<String> getHeaderKeys() {
return headers.keySet();
}
/**
* Returns the list of header values for the given key or {@code null} if not header for the
* given key exists.
*/
public List<String> getHeader(String key) {
return headers.get(key);
}
/**
* Returns the rest status code associated with this exception.
*/
public RestStatus status() {
Throwable cause = unwrapCause();
if (cause == this) {
return RestStatus.INTERNAL_SERVER_ERROR;
} else {
return ExceptionsHelper.status(cause);
}
}
/**
* Unwraps the actual cause from the exception for cases when the exception is a
* {@link ElasticsearchWrapperException}.
*
* @see org.elasticsearch.ExceptionsHelper#unwrapCause(Throwable)
*/
public Throwable unwrapCause() {
return ExceptionsHelper.unwrapCause(this);
}
/**
* Return the detail message, including the message from the nested exception
* if there is one.
*/
public String getDetailedMessage() {
if (getCause() != null) {
StringBuilder sb = new StringBuilder();
sb.append(toString()).append("; ");
if (getCause() instanceof ElasticsearchException) {
sb.append(((ElasticsearchException) getCause()).getDetailedMessage());
} else {
sb.append(getCause());
}
return sb.toString();
} else {
return super.toString();
}
}
/**
* Retrieve the innermost cause of this exception, if none, returns the current exception.
*/
public Throwable getRootCause() {
Throwable rootCause = this;
Throwable cause = getCause();
while (cause != null && cause != rootCause) {
rootCause = cause;
cause = cause.getCause();
}
return rootCause;
}
/**
* Check whether this exception contains an exception of the given type:
* either it is of the given class itself or it contains a nested cause
* of the given type.
*
* @param exType the exception type to look for
* @return whether there is a nested exception of the specified type
*/
public boolean contains(Class<? extends Throwable> exType) {
if (exType == null) {
return false;
}
if (exType.isInstance(this)) {
return true;
}
Throwable cause = getCause();
if (cause == this) {
return false;
}
if (cause instanceof ElasticsearchException) {
return ((ElasticsearchException) cause).contains(exType);
} else {
while (cause != null) {
if (exType.isInstance(cause)) {
return true;
}
if (cause.getCause() == cause) {
break;
}
cause = cause.getCause();
}
return false;
}
}
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(this.getMessage());
out.writeThrowable(this.getCause());
writeStackTraces(this, out);
out.writeVInt(headers.size());
for (Map.Entry<String, List<String>> entry : headers.entrySet()) {
out.writeString(entry.getKey());
out.writeVInt(entry.getValue().size());
for (String v : entry.getValue()) {
out.writeString(v);
}
}
}
public static ElasticsearchException readException(StreamInput input, int id) throws IOException {
FunctionThatThrowsIOException<StreamInput, ? extends ElasticsearchException> elasticsearchException = ID_TO_SUPPLIER.get(id);
if (elasticsearchException == null) {
throw new IllegalStateException("unknown exception for id: " + id);
}
return elasticsearchException.apply(input);
}
/**
* Returns <code>true</code> iff the given class is a registered for an exception to be read.
*/
public static boolean isRegistered(Class<? extends Throwable> exception) {
return CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE.containsKey(exception);
}
static Set<Class<? extends ElasticsearchException>> getRegisteredKeys() { // for testing
return CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE.keySet();
}
/**
* Returns the serialization id the given exception.
*/
public static int getId(Class<? extends ElasticsearchException> exception) {
return CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE.get(exception).id;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
Throwable ex = ExceptionsHelper.unwrapCause(this);
if (ex != this) {
toXContent(builder, params, this);
} else {
builder.field("type", getExceptionName());
builder.field("reason", getMessage());
for (String key : headers.keySet()) {
if (key.startsWith("es.")) {
List<String> values = headers.get(key);
xContentHeader(builder, key.substring("es.".length()), values);
}
}
innerToXContent(builder, params);
renderHeader(builder, params);
if (params.paramAsBoolean(REST_EXCEPTION_SKIP_STACK_TRACE, REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT) == false) {
builder.field("stack_trace", ExceptionsHelper.stackTrace(this));
}
}
return builder;
}
/**
* Renders additional per exception information into the xcontent
*/
protected void innerToXContent(XContentBuilder builder, Params params) throws IOException {
causeToXContent(builder, params);
}
/**
* Renders a cause exception as xcontent
*/
protected void causeToXContent(XContentBuilder builder, Params params) throws IOException {
final Throwable cause = getCause();
if (cause != null && params.paramAsBoolean(REST_EXCEPTION_SKIP_CAUSE, REST_EXCEPTION_SKIP_CAUSE_DEFAULT) == false) {
builder.field("caused_by");
builder.startObject();
toXContent(builder, params, cause);
builder.endObject();
}
}
protected final void renderHeader(XContentBuilder builder, Params params) throws IOException {
boolean hasHeader = false;
for (String key : headers.keySet()) {
if (key.startsWith("es.")) {
continue;
}
if (hasHeader == false) {
builder.startObject("header");
hasHeader = true;
}
List<String> values = headers.get(key);
xContentHeader(builder, key, values);
}
if (hasHeader) {
builder.endObject();
}
}
private void xContentHeader(XContentBuilder builder, String key, List<String> values) throws IOException {
if (values != null && values.isEmpty() == false) {
if (values.size() == 1) {
builder.field(key, values.get(0));
} else {
builder.startArray(key);
for (String value : values) {
builder.value(value);
}
builder.endArray();
}
}
}
/**
* Statis toXContent helper method that also renders non {@link org.elasticsearch.ElasticsearchException} instances as XContent.
*/
public static void toXContent(XContentBuilder builder, Params params, Throwable ex) throws IOException {
ex = ExceptionsHelper.unwrapCause(ex);
if (ex instanceof ElasticsearchException) {
((ElasticsearchException) ex).toXContent(builder, params);
} else {
builder.field("type", getExceptionName(ex));
builder.field("reason", ex.getMessage());
if (ex.getCause() != null) {
builder.field("caused_by");
builder.startObject();
toXContent(builder, params, ex.getCause());
builder.endObject();
}
if (params.paramAsBoolean(REST_EXCEPTION_SKIP_STACK_TRACE, REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT) == false) {
builder.field("stack_trace", ExceptionsHelper.stackTrace(ex));
}
}
}
/**
* Returns the root cause of this exception or multiple if different shards caused different exceptions
*/
public ElasticsearchException[] guessRootCauses() {
final Throwable cause = getCause();
if (cause != null && cause instanceof ElasticsearchException) {
return ((ElasticsearchException) cause).guessRootCauses();
}
return new ElasticsearchException[]{this};
}
/**
* Returns the root cause of this exception or multiple if different shards caused different exceptions.
* If the given exception is not an instance of {@link org.elasticsearch.ElasticsearchException} an empty array
* is returned.
*/
public static ElasticsearchException[] guessRootCauses(Throwable t) {
Throwable ex = ExceptionsHelper.unwrapCause(t);
if (ex instanceof ElasticsearchException) {
return ((ElasticsearchException) ex).guessRootCauses();
}
return new ElasticsearchException[]{new ElasticsearchException(t.getMessage(), t) {
@Override
protected String getExceptionName() {
return getExceptionName(getCause());
}
}};
}
protected String getExceptionName() {
return getExceptionName(this);
}
/**
* Returns a underscore case name for the given exception. This method strips <tt>Elasticsearch</tt> prefixes from exception names.
*/
public static String getExceptionName(Throwable ex) {
String simpleName = ex.getClass().getSimpleName();
if (simpleName.startsWith("Elasticsearch")) {
simpleName = simpleName.substring("Elasticsearch".length());
}
return Strings.toUnderscoreCase(simpleName);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
if (headers.containsKey(INDEX_HEADER_KEY)) {
builder.append(getIndex());
if (headers.containsKey(SHARD_HEADER_KEY)) {
builder.append('[').append(getShardId()).append(']');
}
builder.append(' ');
}
return builder.append(ExceptionsHelper.detailedMessage(this).trim()).toString();
}
/**
* Deserializes stacktrace elements as well as suppressed exceptions from the given output stream and
* adds it to the given exception.
*/
public static <T extends Throwable> T readStackTrace(T throwable, StreamInput in) throws IOException {
final int stackTraceElements = in.readVInt();
StackTraceElement[] stackTrace = new StackTraceElement[stackTraceElements];
for (int i = 0; i < stackTraceElements; i++) {
final String declaringClasss = in.readString();
final String fileName = in.readOptionalString();
final String methodName = in.readString();
final int lineNumber = in.readVInt();
stackTrace[i] = new StackTraceElement(declaringClasss, methodName, fileName, lineNumber);
}
throwable.setStackTrace(stackTrace);
int numSuppressed = in.readVInt();
for (int i = 0; i < numSuppressed; i++) {
throwable.addSuppressed(in.readThrowable());
}
return throwable;
}
/**
* Serializes the given exceptions stacktrace elements as well as it's suppressed exceptions to the given output stream.
*/
public static <T extends Throwable> T writeStackTraces(T throwable, StreamOutput out) throws IOException {
StackTraceElement[] stackTrace = throwable.getStackTrace();
out.writeVInt(stackTrace.length);
for (StackTraceElement element : stackTrace) {
out.writeString(element.getClassName());
out.writeOptionalString(element.getFileName());
out.writeString(element.getMethodName());
out.writeVInt(element.getLineNumber());
}
Throwable[] suppressed = throwable.getSuppressed();
out.writeVInt(suppressed.length);
for (Throwable t : suppressed) {
out.writeThrowable(t);
}
return throwable;
}
/**
* This is the list of Exceptions Elasticsearch can throw over the wire or save into a corruption marker. Each value in the enum is a
* single exception tying the Class to an id for use of the encode side and the id back to a constructor for use on the decode side. As
* such its ok if the exceptions to change names so long as their constructor can still read the exception. Each exception is listed
* in id order below. If you want to remove an exception leave a tombstone comment and mark the id as null in
* ExceptionSerializationTests.testIds.ids.
*/
enum ElasticsearchExceptionHandle {
INDEX_SHARD_SNAPSHOT_FAILED_EXCEPTION(org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException.class,
org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException::new, 0),
DFS_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.search.dfs.DfsPhaseExecutionException.class,
org.elasticsearch.search.dfs.DfsPhaseExecutionException::new, 1),
EXECUTION_CANCELLED_EXCEPTION(org.elasticsearch.common.util.CancellableThreads.ExecutionCancelledException.class,
org.elasticsearch.common.util.CancellableThreads.ExecutionCancelledException::new, 2),
MASTER_NOT_DISCOVERED_EXCEPTION(org.elasticsearch.discovery.MasterNotDiscoveredException.class,
org.elasticsearch.discovery.MasterNotDiscoveredException::new, 3),
ELASTICSEARCH_SECURITY_EXCEPTION(org.elasticsearch.ElasticsearchSecurityException.class,
org.elasticsearch.ElasticsearchSecurityException::new, 4),
INDEX_SHARD_RESTORE_EXCEPTION(org.elasticsearch.index.snapshots.IndexShardRestoreException.class,
org.elasticsearch.index.snapshots.IndexShardRestoreException::new, 5),
INDEX_CLOSED_EXCEPTION(org.elasticsearch.indices.IndexClosedException.class,
org.elasticsearch.indices.IndexClosedException::new, 6),
BIND_HTTP_EXCEPTION(org.elasticsearch.http.BindHttpException.class,
org.elasticsearch.http.BindHttpException::new, 7),
REDUCE_SEARCH_PHASE_EXCEPTION(org.elasticsearch.action.search.ReduceSearchPhaseException.class,
org.elasticsearch.action.search.ReduceSearchPhaseException::new, 8),
NODE_CLOSED_EXCEPTION(org.elasticsearch.node.NodeClosedException.class,
org.elasticsearch.node.NodeClosedException::new, 9),
SNAPSHOT_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.SnapshotFailedEngineException.class,
org.elasticsearch.index.engine.SnapshotFailedEngineException::new, 10),
SHARD_NOT_FOUND_EXCEPTION(org.elasticsearch.index.shard.ShardNotFoundException.class,
org.elasticsearch.index.shard.ShardNotFoundException::new, 11),
CONNECT_TRANSPORT_EXCEPTION(org.elasticsearch.transport.ConnectTransportException.class,
org.elasticsearch.transport.ConnectTransportException::new, 12),
NOT_SERIALIZABLE_TRANSPORT_EXCEPTION(org.elasticsearch.transport.NotSerializableTransportException.class,
org.elasticsearch.transport.NotSerializableTransportException::new, 13),
RESPONSE_HANDLER_FAILURE_TRANSPORT_EXCEPTION(org.elasticsearch.transport.ResponseHandlerFailureTransportException.class,
org.elasticsearch.transport.ResponseHandlerFailureTransportException::new, 14),
INDEX_CREATION_EXCEPTION(org.elasticsearch.indices.IndexCreationException.class,
org.elasticsearch.indices.IndexCreationException::new, 15),
INDEX_NOT_FOUND_EXCEPTION(org.elasticsearch.index.IndexNotFoundException.class,
org.elasticsearch.index.IndexNotFoundException::new, 16),
ILLEGAL_SHARD_ROUTING_STATE_EXCEPTION(org.elasticsearch.cluster.routing.IllegalShardRoutingStateException.class,
org.elasticsearch.cluster.routing.IllegalShardRoutingStateException::new, 17),
BROADCAST_SHARD_OPERATION_FAILED_EXCEPTION(org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException.class,
org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException::new, 18),
RESOURCE_NOT_FOUND_EXCEPTION(org.elasticsearch.ResourceNotFoundException.class,
org.elasticsearch.ResourceNotFoundException::new, 19),
ACTION_TRANSPORT_EXCEPTION(org.elasticsearch.transport.ActionTransportException.class,
org.elasticsearch.transport.ActionTransportException::new, 20),
ELASTICSEARCH_GENERATION_EXCEPTION(org.elasticsearch.ElasticsearchGenerationException.class,
org.elasticsearch.ElasticsearchGenerationException::new, 21),
// 22 was CreateFailedEngineException
INDEX_SHARD_STARTED_EXCEPTION(org.elasticsearch.index.shard.IndexShardStartedException.class,
org.elasticsearch.index.shard.IndexShardStartedException::new, 23),
SEARCH_CONTEXT_MISSING_EXCEPTION(org.elasticsearch.search.SearchContextMissingException.class,
org.elasticsearch.search.SearchContextMissingException::new, 24),
SCRIPT_EXCEPTION(org.elasticsearch.script.ScriptException.class, org.elasticsearch.script.ScriptException::new, 25),
BATCH_OPERATION_EXCEPTION(org.elasticsearch.index.shard.TranslogRecoveryPerformer.BatchOperationException.class,
org.elasticsearch.index.shard.TranslogRecoveryPerformer.BatchOperationException::new, 26),
SNAPSHOT_CREATION_EXCEPTION(org.elasticsearch.snapshots.SnapshotCreationException.class,
org.elasticsearch.snapshots.SnapshotCreationException::new, 27),
DELETE_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.DeleteFailedEngineException.class,
org.elasticsearch.index.engine.DeleteFailedEngineException::new, 28),
DOCUMENT_MISSING_EXCEPTION(org.elasticsearch.index.engine.DocumentMissingException.class,
org.elasticsearch.index.engine.DocumentMissingException::new, 29),
SNAPSHOT_EXCEPTION(org.elasticsearch.snapshots.SnapshotException.class,
org.elasticsearch.snapshots.SnapshotException::new, 30),
INVALID_ALIAS_NAME_EXCEPTION(org.elasticsearch.indices.InvalidAliasNameException.class,
org.elasticsearch.indices.InvalidAliasNameException::new, 31),
INVALID_INDEX_NAME_EXCEPTION(org.elasticsearch.indices.InvalidIndexNameException.class,
org.elasticsearch.indices.InvalidIndexNameException::new, 32),
INDEX_PRIMARY_SHARD_NOT_ALLOCATED_EXCEPTION(org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException.class,
org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException::new, 33),
TRANSPORT_EXCEPTION(org.elasticsearch.transport.TransportException.class,
org.elasticsearch.transport.TransportException::new, 34),
ELASTICSEARCH_PARSE_EXCEPTION(org.elasticsearch.ElasticsearchParseException.class,
org.elasticsearch.ElasticsearchParseException::new, 35),
SEARCH_EXCEPTION(org.elasticsearch.search.SearchException.class,
org.elasticsearch.search.SearchException::new, 36),
MAPPER_EXCEPTION(org.elasticsearch.index.mapper.MapperException.class,
org.elasticsearch.index.mapper.MapperException::new, 37),
INVALID_TYPE_NAME_EXCEPTION(org.elasticsearch.indices.InvalidTypeNameException.class,
org.elasticsearch.indices.InvalidTypeNameException::new, 38),
SNAPSHOT_RESTORE_EXCEPTION(org.elasticsearch.snapshots.SnapshotRestoreException.class,
org.elasticsearch.snapshots.SnapshotRestoreException::new, 39),
PARSING_EXCEPTION(org.elasticsearch.common.ParsingException.class, org.elasticsearch.common.ParsingException::new, 40),
INDEX_SHARD_CLOSED_EXCEPTION(org.elasticsearch.index.shard.IndexShardClosedException.class,
org.elasticsearch.index.shard.IndexShardClosedException::new, 41),
RECOVER_FILES_RECOVERY_EXCEPTION(org.elasticsearch.indices.recovery.RecoverFilesRecoveryException.class,
org.elasticsearch.indices.recovery.RecoverFilesRecoveryException::new, 42),
TRUNCATED_TRANSLOG_EXCEPTION(org.elasticsearch.index.translog.TruncatedTranslogException.class,
org.elasticsearch.index.translog.TruncatedTranslogException::new, 43),
RECOVERY_FAILED_EXCEPTION(org.elasticsearch.indices.recovery.RecoveryFailedException.class,
org.elasticsearch.indices.recovery.RecoveryFailedException::new, 44),
INDEX_SHARD_RELOCATED_EXCEPTION(org.elasticsearch.index.shard.IndexShardRelocatedException.class,
org.elasticsearch.index.shard.IndexShardRelocatedException::new, 45),
NODE_SHOULD_NOT_CONNECT_EXCEPTION(org.elasticsearch.transport.NodeShouldNotConnectException.class,
org.elasticsearch.transport.NodeShouldNotConnectException::new, 46),
INDEX_TEMPLATE_ALREADY_EXISTS_EXCEPTION(org.elasticsearch.indices.IndexTemplateAlreadyExistsException.class,
org.elasticsearch.indices.IndexTemplateAlreadyExistsException::new, 47),
TRANSLOG_CORRUPTED_EXCEPTION(org.elasticsearch.index.translog.TranslogCorruptedException.class,
org.elasticsearch.index.translog.TranslogCorruptedException::new, 48),
CLUSTER_BLOCK_EXCEPTION(org.elasticsearch.cluster.block.ClusterBlockException.class,
org.elasticsearch.cluster.block.ClusterBlockException::new, 49),
FETCH_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.search.fetch.FetchPhaseExecutionException.class,
org.elasticsearch.search.fetch.FetchPhaseExecutionException::new, 50),
INDEX_SHARD_ALREADY_EXISTS_EXCEPTION(org.elasticsearch.index.IndexShardAlreadyExistsException.class,
org.elasticsearch.index.IndexShardAlreadyExistsException::new, 51),
VERSION_CONFLICT_ENGINE_EXCEPTION(org.elasticsearch.index.engine.VersionConflictEngineException.class,
org.elasticsearch.index.engine.VersionConflictEngineException::new, 52),
ENGINE_EXCEPTION(org.elasticsearch.index.engine.EngineException.class, org.elasticsearch.index.engine.EngineException::new, 53),
// 54 was DocumentAlreadyExistsException, which is superseded by VersionConflictEngineException
NO_SUCH_NODE_EXCEPTION(org.elasticsearch.action.NoSuchNodeException.class, org.elasticsearch.action.NoSuchNodeException::new, 55),
SETTINGS_EXCEPTION(org.elasticsearch.common.settings.SettingsException.class,
org.elasticsearch.common.settings.SettingsException::new, 56),
INDEX_TEMPLATE_MISSING_EXCEPTION(org.elasticsearch.indices.IndexTemplateMissingException.class,
org.elasticsearch.indices.IndexTemplateMissingException::new, 57),
SEND_REQUEST_TRANSPORT_EXCEPTION(org.elasticsearch.transport.SendRequestTransportException.class,
org.elasticsearch.transport.SendRequestTransportException::new, 58),
ES_REJECTED_EXECUTION_EXCEPTION(org.elasticsearch.common.util.concurrent.EsRejectedExecutionException.class,
org.elasticsearch.common.util.concurrent.EsRejectedExecutionException::new, 59),
EARLY_TERMINATION_EXCEPTION(org.elasticsearch.common.lucene.Lucene.EarlyTerminationException.class,
org.elasticsearch.common.lucene.Lucene.EarlyTerminationException::new, 60),
ROUTING_VALIDATION_EXCEPTION(org.elasticsearch.cluster.routing.RoutingValidationException.class,
org.elasticsearch.cluster.routing.RoutingValidationException::new, 61),
NOT_SERIALIZABLE_EXCEPTION_WRAPPER(org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class,
org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper::new, 62),
ALIAS_FILTER_PARSING_EXCEPTION(org.elasticsearch.indices.AliasFilterParsingException.class,
org.elasticsearch.indices.AliasFilterParsingException::new, 63),
// 64 was DeleteByQueryFailedEngineException, which was removed in 3.0
GATEWAY_EXCEPTION(org.elasticsearch.gateway.GatewayException.class, org.elasticsearch.gateway.GatewayException::new, 65),
INDEX_SHARD_NOT_RECOVERING_EXCEPTION(org.elasticsearch.index.shard.IndexShardNotRecoveringException.class,
org.elasticsearch.index.shard.IndexShardNotRecoveringException::new, 66),
HTTP_EXCEPTION(org.elasticsearch.http.HttpException.class, org.elasticsearch.http.HttpException::new, 67),
ELASTICSEARCH_EXCEPTION(org.elasticsearch.ElasticsearchException.class,
org.elasticsearch.ElasticsearchException::new, 68),
SNAPSHOT_MISSING_EXCEPTION(org.elasticsearch.snapshots.SnapshotMissingException.class,
org.elasticsearch.snapshots.SnapshotMissingException::new, 69),
PRIMARY_MISSING_ACTION_EXCEPTION(org.elasticsearch.action.PrimaryMissingActionException.class,
org.elasticsearch.action.PrimaryMissingActionException::new, 70),
FAILED_NODE_EXCEPTION(org.elasticsearch.action.FailedNodeException.class, org.elasticsearch.action.FailedNodeException::new, 71),
SEARCH_PARSE_EXCEPTION(org.elasticsearch.search.SearchParseException.class, org.elasticsearch.search.SearchParseException::new, 72),
CONCURRENT_SNAPSHOT_EXECUTION_EXCEPTION(org.elasticsearch.snapshots.ConcurrentSnapshotExecutionException.class,
org.elasticsearch.snapshots.ConcurrentSnapshotExecutionException::new, 73),
BLOB_STORE_EXCEPTION(org.elasticsearch.common.blobstore.BlobStoreException.class,
org.elasticsearch.common.blobstore.BlobStoreException::new, 74),
INCOMPATIBLE_CLUSTER_STATE_VERSION_EXCEPTION(org.elasticsearch.cluster.IncompatibleClusterStateVersionException.class,
org.elasticsearch.cluster.IncompatibleClusterStateVersionException::new, 75),
RECOVERY_ENGINE_EXCEPTION(org.elasticsearch.index.engine.RecoveryEngineException.class,
org.elasticsearch.index.engine.RecoveryEngineException::new, 76),
UNCATEGORIZED_EXECUTION_EXCEPTION(org.elasticsearch.common.util.concurrent.UncategorizedExecutionException.class,
org.elasticsearch.common.util.concurrent.UncategorizedExecutionException::new, 77),
TIMESTAMP_PARSING_EXCEPTION(org.elasticsearch.action.TimestampParsingException.class,
org.elasticsearch.action.TimestampParsingException::new, 78),
ROUTING_MISSING_EXCEPTION(org.elasticsearch.action.RoutingMissingException.class,
org.elasticsearch.action.RoutingMissingException::new, 79),
INDEX_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.IndexFailedEngineException.class,
org.elasticsearch.index.engine.IndexFailedEngineException::new, 80),
INDEX_SHARD_RESTORE_FAILED_EXCEPTION(org.elasticsearch.index.snapshots.IndexShardRestoreFailedException.class,
org.elasticsearch.index.snapshots.IndexShardRestoreFailedException::new, 81),
REPOSITORY_EXCEPTION(org.elasticsearch.repositories.RepositoryException.class,
org.elasticsearch.repositories.RepositoryException::new, 82),
RECEIVE_TIMEOUT_TRANSPORT_EXCEPTION(org.elasticsearch.transport.ReceiveTimeoutTransportException.class,
org.elasticsearch.transport.ReceiveTimeoutTransportException::new, 83),
NODE_DISCONNECTED_EXCEPTION(org.elasticsearch.transport.NodeDisconnectedException.class,
org.elasticsearch.transport.NodeDisconnectedException::new, 84),
ALREADY_EXPIRED_EXCEPTION(org.elasticsearch.index.AlreadyExpiredException.class,
org.elasticsearch.index.AlreadyExpiredException::new, 85),
AGGREGATION_EXECUTION_EXCEPTION(org.elasticsearch.search.aggregations.AggregationExecutionException.class,
org.elasticsearch.search.aggregations.AggregationExecutionException::new, 86),
// 87 used to be for MergeMappingException
INVALID_INDEX_TEMPLATE_EXCEPTION(org.elasticsearch.indices.InvalidIndexTemplateException.class,
org.elasticsearch.indices.InvalidIndexTemplateException::new, 88),
REFRESH_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.RefreshFailedEngineException.class,
org.elasticsearch.index.engine.RefreshFailedEngineException::new, 90),
AGGREGATION_INITIALIZATION_EXCEPTION(org.elasticsearch.search.aggregations.AggregationInitializationException.class,
org.elasticsearch.search.aggregations.AggregationInitializationException::new, 91),
DELAY_RECOVERY_EXCEPTION(org.elasticsearch.indices.recovery.DelayRecoveryException.class,
org.elasticsearch.indices.recovery.DelayRecoveryException::new, 92),
// 93 used to be for IndexWarmerMissingException
NO_NODE_AVAILABLE_EXCEPTION(org.elasticsearch.client.transport.NoNodeAvailableException.class,
org.elasticsearch.client.transport.NoNodeAvailableException::new, 94),
INVALID_SNAPSHOT_NAME_EXCEPTION(org.elasticsearch.snapshots.InvalidSnapshotNameException.class,
org.elasticsearch.snapshots.InvalidSnapshotNameException::new, 96),
ILLEGAL_INDEX_SHARD_STATE_EXCEPTION(org.elasticsearch.index.shard.IllegalIndexShardStateException.class,
org.elasticsearch.index.shard.IllegalIndexShardStateException::new, 97),
INDEX_SHARD_SNAPSHOT_EXCEPTION(org.elasticsearch.index.snapshots.IndexShardSnapshotException.class,
org.elasticsearch.index.snapshots.IndexShardSnapshotException::new, 98),
INDEX_SHARD_NOT_STARTED_EXCEPTION(org.elasticsearch.index.shard.IndexShardNotStartedException.class,
org.elasticsearch.index.shard.IndexShardNotStartedException::new, 99),
SEARCH_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.action.search.SearchPhaseExecutionException.class,
org.elasticsearch.action.search.SearchPhaseExecutionException::new, 100),
ACTION_NOT_FOUND_TRANSPORT_EXCEPTION(org.elasticsearch.transport.ActionNotFoundTransportException.class,
org.elasticsearch.transport.ActionNotFoundTransportException::new, 101),
TRANSPORT_SERIALIZATION_EXCEPTION(org.elasticsearch.transport.TransportSerializationException.class,
org.elasticsearch.transport.TransportSerializationException::new, 102),
REMOTE_TRANSPORT_EXCEPTION(org.elasticsearch.transport.RemoteTransportException.class,
org.elasticsearch.transport.RemoteTransportException::new, 103),
ENGINE_CREATION_FAILURE_EXCEPTION(org.elasticsearch.index.engine.EngineCreationFailureException.class,
org.elasticsearch.index.engine.EngineCreationFailureException::new, 104),
ROUTING_EXCEPTION(org.elasticsearch.cluster.routing.RoutingException.class,
org.elasticsearch.cluster.routing.RoutingException::new, 105),
INDEX_SHARD_RECOVERY_EXCEPTION(org.elasticsearch.index.shard.IndexShardRecoveryException.class,
org.elasticsearch.index.shard.IndexShardRecoveryException::new, 106),
REPOSITORY_MISSING_EXCEPTION(org.elasticsearch.repositories.RepositoryMissingException.class,
org.elasticsearch.repositories.RepositoryMissingException::new, 107),
PERCOLATOR_EXCEPTION(org.elasticsearch.index.percolator.PercolatorException.class,
org.elasticsearch.index.percolator.PercolatorException::new, 108),
DOCUMENT_SOURCE_MISSING_EXCEPTION(org.elasticsearch.index.engine.DocumentSourceMissingException.class,
org.elasticsearch.index.engine.DocumentSourceMissingException::new, 109),
FLUSH_NOT_ALLOWED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.FlushNotAllowedEngineException.class,
org.elasticsearch.index.engine.FlushNotAllowedEngineException::new, 110),
NO_CLASS_SETTINGS_EXCEPTION(org.elasticsearch.common.settings.NoClassSettingsException.class,
org.elasticsearch.common.settings.NoClassSettingsException::new, 111),
BIND_TRANSPORT_EXCEPTION(org.elasticsearch.transport.BindTransportException.class,
org.elasticsearch.transport.BindTransportException::new, 112),
ALIASES_NOT_FOUND_EXCEPTION(org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundException.class,
org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundException::new, 113),
INDEX_SHARD_RECOVERING_EXCEPTION(org.elasticsearch.index.shard.IndexShardRecoveringException.class,
org.elasticsearch.index.shard.IndexShardRecoveringException::new, 114),
TRANSLOG_EXCEPTION(org.elasticsearch.index.translog.TranslogException.class,
org.elasticsearch.index.translog.TranslogException::new, 115),
PROCESS_CLUSTER_EVENT_TIMEOUT_EXCEPTION(org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException.class,
org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException::new, 116),
RETRY_ON_PRIMARY_EXCEPTION(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnPrimaryException.class,
org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnPrimaryException::new, 117),
ELASTICSEARCH_TIMEOUT_EXCEPTION(org.elasticsearch.ElasticsearchTimeoutException.class,
org.elasticsearch.ElasticsearchTimeoutException::new, 118),
QUERY_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.search.query.QueryPhaseExecutionException.class,
org.elasticsearch.search.query.QueryPhaseExecutionException::new, 119),
REPOSITORY_VERIFICATION_EXCEPTION(org.elasticsearch.repositories.RepositoryVerificationException.class,
org.elasticsearch.repositories.RepositoryVerificationException::new, 120),
INVALID_AGGREGATION_PATH_EXCEPTION(org.elasticsearch.search.aggregations.InvalidAggregationPathException.class,
org.elasticsearch.search.aggregations.InvalidAggregationPathException::new, 121),
INDEX_ALREADY_EXISTS_EXCEPTION(org.elasticsearch.indices.IndexAlreadyExistsException.class,
org.elasticsearch.indices.IndexAlreadyExistsException::new, 123),
SCRIPT_PARSE_EXCEPTION(org.elasticsearch.script.Script.ScriptParseException.class,
org.elasticsearch.script.Script.ScriptParseException::new, 124),
HTTP_ON_TRANSPORT_EXCEPTION(org.elasticsearch.transport.netty.SizeHeaderFrameDecoder.HttpOnTransportException.class,
org.elasticsearch.transport.netty.SizeHeaderFrameDecoder.HttpOnTransportException::new, 125),
MAPPER_PARSING_EXCEPTION(org.elasticsearch.index.mapper.MapperParsingException.class,
org.elasticsearch.index.mapper.MapperParsingException::new, 126),
SEARCH_CONTEXT_EXCEPTION(org.elasticsearch.search.SearchContextException.class,
org.elasticsearch.search.SearchContextException::new, 127),
SEARCH_SOURCE_BUILDER_EXCEPTION(org.elasticsearch.search.builder.SearchSourceBuilderException.class,
org.elasticsearch.search.builder.SearchSourceBuilderException::new, 128),
ENGINE_CLOSED_EXCEPTION(org.elasticsearch.index.engine.EngineClosedException.class,
org.elasticsearch.index.engine.EngineClosedException::new, 129),
NO_SHARD_AVAILABLE_ACTION_EXCEPTION(org.elasticsearch.action.NoShardAvailableActionException.class,
org.elasticsearch.action.NoShardAvailableActionException::new, 130),
UNAVAILABLE_SHARDS_EXCEPTION(org.elasticsearch.action.UnavailableShardsException.class,
org.elasticsearch.action.UnavailableShardsException::new, 131),
FLUSH_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.FlushFailedEngineException.class,
org.elasticsearch.index.engine.FlushFailedEngineException::new, 132),
CIRCUIT_BREAKING_EXCEPTION(org.elasticsearch.common.breaker.CircuitBreakingException.class,
org.elasticsearch.common.breaker.CircuitBreakingException::new, 133),
NODE_NOT_CONNECTED_EXCEPTION(org.elasticsearch.transport.NodeNotConnectedException.class,
org.elasticsearch.transport.NodeNotConnectedException::new, 134),
STRICT_DYNAMIC_MAPPING_EXCEPTION(org.elasticsearch.index.mapper.StrictDynamicMappingException.class,
org.elasticsearch.index.mapper.StrictDynamicMappingException::new, 135),
RETRY_ON_REPLICA_EXCEPTION(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException.class,
org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException::new, 136),
TYPE_MISSING_EXCEPTION(org.elasticsearch.indices.TypeMissingException.class,
org.elasticsearch.indices.TypeMissingException::new, 137),
FAILED_TO_COMMIT_CLUSTER_STATE_EXCEPTION(org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException.class,
org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException::new, 140),
QUERY_SHARD_EXCEPTION(org.elasticsearch.index.query.QueryShardException.class,
org.elasticsearch.index.query.QueryShardException::new, 141),
NO_LONGER_PRIMARY_SHARD_EXCEPTION(ShardStateAction.NoLongerPrimaryShardException.class,
ShardStateAction.NoLongerPrimaryShardException::new, 142);
final Class<? extends ElasticsearchException> exceptionClass;
final FunctionThatThrowsIOException<StreamInput, ? extends ElasticsearchException> constructor;
final int id;
<E extends ElasticsearchException> ElasticsearchExceptionHandle(Class<E> exceptionClass,
FunctionThatThrowsIOException<StreamInput, E> constructor, int id) {
// We need the exceptionClass because you can't dig it out of the constructor reliably.
this.exceptionClass = exceptionClass;
this.constructor = constructor;
this.id = id;
}
}
static {
ID_TO_SUPPLIER = unmodifiableMap(Arrays
.stream(ElasticsearchExceptionHandle.values()).collect(Collectors.toMap(e -> e.id, e -> e.constructor)));
CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE = unmodifiableMap(Arrays
.stream(ElasticsearchExceptionHandle.values()).collect(Collectors.toMap(e -> e.exceptionClass, e -> e)));
}
public Index getIndex() {
List<String> index = getHeader(INDEX_HEADER_KEY);
if (index != null && index.isEmpty() == false) {
List<String> index_uuid = getHeader(INDEX_HEADER_KEY_UUID);
return new Index(index.get(0), index_uuid.get(0));
}
return null;
}
public ShardId getShardId() {
List<String> shard = getHeader(SHARD_HEADER_KEY);
if (shard != null && shard.isEmpty() == false) {
return new ShardId(getIndex(), Integer.parseInt(shard.get(0)));
}
return null;
}
public void setIndex(Index index) {
if (index != null) {
addHeader(INDEX_HEADER_KEY, index.getName());
addHeader(INDEX_HEADER_KEY_UUID, index.getUUID());
}
}
public void setIndex(String index) {
if (index != null) {
setIndex(new Index(index, INDEX_UUID_NA_VALUE));
}
}
public void setShard(ShardId shardId) {
if (shardId != null) {
setIndex(shardId.getIndex());
addHeader(SHARD_HEADER_KEY, Integer.toString(shardId.id()));
}
}
public void setShard(String index, int shardId) {
setIndex(index);
addHeader(SHARD_HEADER_KEY, Integer.toString(shardId));
}
public void setResources(String type, String... id) {
assert type != null;
addHeader(RESOURCE_HEADER_ID_KEY, id);
addHeader(RESOURCE_HEADER_TYPE_KEY, type);
}
public List<String> getResourceId() {
return getHeader(RESOURCE_HEADER_ID_KEY);
}
public String getResourceType() {
List<String> header = getHeader(RESOURCE_HEADER_TYPE_KEY);
if (header != null && header.isEmpty() == false) {
assert header.size() == 1;
return header.get(0);
}
return null;
}
public static void renderThrowable(XContentBuilder builder, Params params, Throwable t) throws IOException {
builder.startObject("error");
final ElasticsearchException[] rootCauses = ElasticsearchException.guessRootCauses(t);
builder.field("root_cause");
builder.startArray();
for (ElasticsearchException rootCause : rootCauses) {
builder.startObject();
rootCause.toXContent(builder, new ToXContent.DelegatingMapParams(
Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_CAUSE, "true"), params));
builder.endObject();
}
builder.endArray();
ElasticsearchException.toXContent(builder, params, t);
builder.endObject();
}
interface FunctionThatThrowsIOException<T, R> {
R apply(T t) throws IOException;
}
}
| |
/*
* Licensed to Aduna under one or more contributor license agreements.
* See the NOTICE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Aduna licenses this file to you under the terms of the Aduna BSD
* License (the "License"); you may not use this file except in compliance
* with the License. See the LICENSE.txt file distributed with this work
* for the full License.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.openrdf.workbench.base;
import java.io.IOException;
import java.io.OutputStream;
import java.util.regex.Pattern;
import javax.servlet.Servlet;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import info.aduna.app.AppConfiguration;
import info.aduna.app.AppVersion;
import info.aduna.io.MavenUtil;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.query.resultio.BasicQueryWriterSettings;
import org.openrdf.query.resultio.BooleanQueryResultFormat;
import org.openrdf.query.resultio.QueryResultFormat;
import org.openrdf.query.resultio.QueryResultIO;
import org.openrdf.query.resultio.QueryResultWriter;
import org.openrdf.query.resultio.TupleQueryResultFormat;
import org.openrdf.query.resultio.UnsupportedQueryResultFormatException;
import org.openrdf.rio.helpers.BasicWriterSettings;
import org.openrdf.workbench.util.TupleResultBuilder;
public abstract class BaseServlet implements Servlet {
protected final Logger log = LoggerFactory.getLogger(this.getClass());
protected static final String SERVER_USER = "server-user";
protected static final String SERVER_PASSWORD = "server-password";
protected static final String ACCEPT = "Accept";
/**
* This response content type is always used for JSONP results.
*/
protected static final String APPLICATION_JAVASCRIPT = "application/javascript";
/**
* This response content type is used in cases where application/xml is
* explicitly requested, or in cases where the user agent is known to be a
* commonly available browser.
*/
protected static final String APPLICATION_XML = "application/xml";
/**
* This response content type is used for SPARQL Results XML results in
* non-browser user agents or other cases where application/xml is not
* specifically requested.
*/
protected static final String APPLICATION_SPARQL_RESULTS_XML = "application/sparql-results+xml";
protected static final String TEXT_HTML = "text/html";
protected static final String USER_AGENT = "User-Agent";
protected static final String MSIE = "MSIE";
protected static final String MOZILLA = "Mozilla";
/**
* JSONP property for enabling/disabling jsonp functionality.
*/
protected static final String JSONP_ENABLED = "org.openrdf.workbench.jsonp.enabled";
/**
* This query parameter is only used in cases where the configuration
* property is not setup explicitly.
*/
protected static final String DEFAULT_JSONP_CALLBACK_PARAMETER = "callback";
protected static final Pattern JSONP_VALIDATOR = Pattern.compile("^[A-Za-z]\\w+$");
protected static final String JSONP_CALLBACK_PARAMETER = "org.openrdf.workbench.jsonp.callbackparameter";
protected ServletConfig config;
protected AppConfiguration appConfig;
public ServletConfig getServletConfig() {
return config;
}
public String getServletInfo() {
return getClass().getSimpleName();
}
public void init(final ServletConfig config)
throws ServletException
{
this.config = config;
this.appConfig = new AppConfiguration("openrdf-workbench", "OpenRDF Sesame Workbench",
AppVersion.parse(MavenUtil.loadVersion("org.openrdf.sesame", "sesame-http-workbench", "dev")));
try {
// Suppress loading of log configuration.
this.appConfig.init(false);
}
catch (IOException e) {
throw new ServletException(e);
}
}
public void destroy() {
}
public final void service(final ServletRequest req, final ServletResponse resp)
throws ServletException, IOException
{
final HttpServletRequest hreq = (HttpServletRequest)req;
final HttpServletResponse hresp = (HttpServletResponse)resp;
service(hreq, hresp);
}
public void service(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException
{
// default empty implementation
}
protected QueryResultFormat getTupleResultFormat(final HttpServletRequest req, final ServletResponse resp)
{
String header = req.getHeader(ACCEPT);
if (header != null) {
TupleQueryResultFormat tupleFormat = QueryResultIO.getParserFormatForFileName(header);
if (tupleFormat != null) {
return tupleFormat;
}
}
return null;
}
protected QueryResultFormat getBooleanResultFormat(final HttpServletRequest req, final ServletResponse resp)
{
String header = req.getHeader(ACCEPT);
if (header != null) {
// Then try boolean format
BooleanQueryResultFormat booleanFormat = QueryResultIO.getBooleanParserFormatForMIMEType(header);
if (booleanFormat != null) {
return booleanFormat;
}
}
return null;
}
protected QueryResultFormat getJSONPResultFormat(final HttpServletRequest req, final ServletResponse resp)
{
String header = req.getHeader(ACCEPT);
if (header != null) {
if (header.equals(APPLICATION_JAVASCRIPT)) {
return TupleQueryResultFormat.JSON;
}
}
return null;
}
protected QueryResultWriter getResultWriter(final HttpServletRequest req, final ServletResponse resp,
final OutputStream outputStream)
throws UnsupportedQueryResultFormatException, IOException
{
QueryResultFormat resultFormat = getTupleResultFormat(req, resp);
if (resultFormat == null) {
resultFormat = getBooleanResultFormat(req, resp);
}
if (resultFormat == null) {
resultFormat = getJSONPResultFormat(req, resp);
}
if (resultFormat == null) {
// This is safe with the current SPARQL Results XML implementation that
// is able to write out boolean results from the "Tuple" writer.
resultFormat = TupleQueryResultFormat.SPARQL;
}
return QueryResultIO.createWriter(resultFormat, outputStream);
}
/**
* Gets a {@link TupleResultBuilder} based on the Accept header, and sets the
* result content type to the best available match for that, returning a
* builder that can be used to write out the results.
*
* @param req
* the current HTTP request
* @param resp
* the current HTTP response
* @param outputStream
* TODO
* @return a builder that can be used to write out the results
* @throws IOException
* @throws UnsupportedQueryResultFormatException
*/
protected TupleResultBuilder getTupleResultBuilder(HttpServletRequest req, HttpServletResponse resp,
OutputStream outputStream)
throws UnsupportedQueryResultFormatException, IOException
{
QueryResultWriter resultWriter = getResultWriter(req, resp, resp.getOutputStream());
String contentType = resultWriter.getQueryResultFormat().getDefaultMIMEType();
// HACK: In order to make XSLT stylesheet driven user interface work,
// browser user agents must receive application/xml if they are going to
// actually get application/sparql-results+xml
// NOTE: This will test against both BooleanQueryResultsFormat and
// TupleQueryResultsFormat
if (contentType.equals(APPLICATION_SPARQL_RESULTS_XML)) {
String uaHeader = req.getHeader(USER_AGENT);
String acceptHeader = req.getHeader(ACCEPT);
if (acceptHeader != null && acceptHeader.contains(APPLICATION_SPARQL_RESULTS_XML)) {
// Do nothing, leave the contentType as
// application/sparql-results+xml
}
// Switch back to application/xml for user agents who claim to be
// Mozilla compatible
else if (uaHeader != null && uaHeader.contains(MOZILLA)) {
contentType = APPLICATION_XML;
}
// Switch back to application/xml for user agents who accept either
// application/xml or text/html
else if (acceptHeader != null
&& (acceptHeader.contains(APPLICATION_XML) || acceptHeader.contains(TEXT_HTML)))
{
contentType = APPLICATION_XML;
}
}
// Setup qname support for result writers who declare that they support it
if (resultWriter.getSupportedSettings().contains(BasicQueryWriterSettings.ADD_SESAME_QNAME)) {
resultWriter.getWriterConfig().set(BasicQueryWriterSettings.ADD_SESAME_QNAME, true);
}
// Search for and setup the JSONP callback function if the user requested
// it and the result writer could handle it
if (resultWriter.getSupportedSettings().contains(BasicQueryWriterSettings.JSONP_CALLBACK)) {
// JSONP is enabled in the default properties, but if users setup their
// own application.properties file then it must be inserted explicitly
// to be enabled
if (appConfig.getProperties().containsKey(JSONP_ENABLED)) {
String jsonpEnabledProperty = appConfig.getProperties().getProperty(JSONP_ENABLED);
// check if jsonp is a property and it is set to true
if (jsonpEnabledProperty != null && Boolean.parseBoolean(jsonpEnabledProperty)) {
String parameterName = null;
// check whether they customised the parameter to use to identify
// the jsonp callback
if (appConfig.getProperties().containsKey(JSONP_CALLBACK_PARAMETER)) {
parameterName = appConfig.getProperties().getProperty(JSONP_CALLBACK_PARAMETER);
}
// Use default parameter name if it was missing in the
// configuration after jsonp was enabled
if (parameterName == null || parameterName.trim().isEmpty()) {
parameterName = DEFAULT_JSONP_CALLBACK_PARAMETER;
}
String parameter = req.getParameter(parameterName);
if (parameter != null) {
parameter = parameter.trim();
if (parameter.isEmpty()) {
parameter = BasicQueryWriterSettings.JSONP_CALLBACK.getDefaultValue();
}
// check callback function name is a valid javascript function
// name
if (!JSONP_VALIDATOR.matcher(parameter).matches()) {
throw new IOException("Callback function name was invalid");
}
resultWriter.getWriterConfig().set(BasicQueryWriterSettings.JSONP_CALLBACK, parameter);
// explicitly set the content type to "application/javascript"
// to fit JSONP best practices
contentType = APPLICATION_JAVASCRIPT;
}
}
}
}
resp.setContentType(contentType);
// TODO: Make the following two settings configurable
// Convert xsd:string back to plain literals where this behaviour is
// supported
if (resultWriter.getSupportedSettings().contains(BasicWriterSettings.XSD_STRING_TO_PLAIN_LITERAL)) {
resultWriter.getWriterConfig().set(BasicWriterSettings.XSD_STRING_TO_PLAIN_LITERAL, true);
}
// Convert rdf:langString back to language literals where this behaviour
// is supported
if (resultWriter.getSupportedSettings().contains(BasicWriterSettings.RDF_LANGSTRING_TO_LANG_LITERAL)) {
resultWriter.getWriterConfig().set(BasicWriterSettings.RDF_LANGSTRING_TO_LANG_LITERAL, true);
}
return new TupleResultBuilder(resultWriter, ValueFactoryImpl.getInstance());
}
}
| |
/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.impl.neomedia.codec;
import java.io.*;
import java.util.*;
import javax.media.*;
import org.jitsi.impl.neomedia.*;
import org.jitsi.util.*;
/**
* Utility class that handles registration of FMJ packages and plugins.
*
* @author Damian Minkov
* @author Lyubomir Marinov
* @author Boris Grozev
*/
public class FMJPlugInConfiguration
{
/**
* Whether the custom codecs have been registered with FMJ.
*/
private static boolean codecsRegistered = false;
/**
* Whether the custom multiplexers have been registered with FMJ.
*/
private static boolean multiplexersRegistered = false;
/**
* The additional custom JMF codecs.
*/
private static final String[] CUSTOM_CODECS
= {
// "org.jitsi.impl.neomedia.codec.AndroidMediaCodec",
OSUtils.IS_ANDROID
? "org.jitsi.impl.neomedia.codec.video.AndroidEncoder"
: null,
OSUtils.IS_ANDROID
? "org.jitsi.impl.neomedia.codec.video.AndroidDecoder"
: null,
"org.jitsi.impl.neomedia.codec.audio.alaw.DePacketizer",
"org.jitsi.impl.neomedia.codec.audio.alaw.JavaEncoder",
"org.jitsi.impl.neomedia.codec.audio.alaw.Packetizer",
"org.jitsi.impl.neomedia.codec.audio.ulaw.JavaDecoder",
"org.jitsi.impl.neomedia.codec.audio.ulaw.JavaEncoder",
"org.jitsi.impl.neomedia.codec.audio.ulaw.Packetizer",
"org.jitsi.impl.neomedia.codec.audio.opus.JNIDecoder",
"org.jitsi.impl.neomedia.codec.audio.opus.JNIEncoder",
"org.jitsi.impl.neomedia.codec.audio.speex.JNIDecoder",
"org.jitsi.impl.neomedia.codec.audio.speex.JNIEncoder",
"org.jitsi.impl.neomedia.codec.audio.speex.SpeexResampler",
// MP3
"org.jitsi.impl.neomedia.codec.audio.mp3.JNIEncoder",
"org.jitsi.impl.neomedia.codec.audio.ilbc.JavaDecoder",
"org.jitsi.impl.neomedia.codec.audio.ilbc.JavaEncoder",
EncodingConfigurationImpl.G729
? "org.jitsi.impl.neomedia.codec.audio.g729.JavaDecoder"
: null,
EncodingConfigurationImpl.G729
? "org.jitsi.impl.neomedia.codec.audio.g729.JavaEncoder"
: null,
"org.jitsi.impl.neomedia.codec.audio.g722.JNIDecoderImpl",
"org.jitsi.impl.neomedia.codec.audio.g722.JNIEncoderImpl",
"org.jitsi.impl.neomedia.codec.audio.gsm.Decoder",
"org.jitsi.impl.neomedia.codec.audio.gsm.Encoder",
"org.jitsi.impl.neomedia.codec.audio.gsm.DePacketizer",
"org.jitsi.impl.neomedia.codec.audio.gsm.Packetizer",
"org.jitsi.impl.neomedia.codec.audio.silk.JavaDecoder",
"org.jitsi.impl.neomedia.codec.audio.silk.JavaEncoder",
//"org.jitsi.impl.neomedia.codec.video.h263p.DePacketizer",
//"org.jitsi.impl.neomedia.codec.video.h263p.JNIDecoder",
//"org.jitsi.impl.neomedia.codec.video.h263p.JNIEncoder",
//"org.jitsi.impl.neomedia.codec.video.h263p.Packetizer",
"org.jitsi.impl.neomedia.codec.video.h264.DePacketizer",
"org.jitsi.impl.neomedia.codec.video.h264.JNIDecoder",
"org.jitsi.impl.neomedia.codec.video.h264.JNIEncoder",
"org.jitsi.impl.neomedia.codec.video.h264.Packetizer",
"org.jitsi.impl.neomedia.codec.video.SwScale",
// VP8
"org.jitsi.impl.neomedia.codec.video.vp8.DePacketizer",
"org.jitsi.impl.neomedia.codec.video.vp8.Packetizer",
"org.jitsi.impl.neomedia.codec.video.vp8.VPXDecoder",
"org.jitsi.impl.neomedia.codec.video.vp8.VPXEncoder",
// Adaptive Multi-Rate Wideband (AMR-WB)
// "org.jitsi.impl.neomedia.codec.audio.amrwb.DePacketizer",
"org.jitsi.impl.neomedia.codec.audio.amrwb.JNIDecoder",
"org.jitsi.impl.neomedia.codec.audio.amrwb.JNIEncoder",
// "org.jitsi.impl.neomedia.codec.audio.amrwb.Packetizer",
};
/**
* The package prefixes of the additional JMF <tt>DataSource</tt>s (e.g. low
* latency PortAudio and ALSA <tt>CaptureDevice</tt>s).
*/
private static final String[] CUSTOM_PACKAGES
= {
"org.jitsi.impl.neomedia.jmfext",
"net.java.sip.communicator.impl.neomedia.jmfext",
"net.sf.fmj"
};
/**
* The list of class names to register as FMJ plugins with type
* <tt>PlugInManager.MULTIPLEXER</tt>.
*/
private static final String[] CUSTOM_MULTIPLEXERS
= {
"org.jitsi.impl.neomedia.recording.BasicWavMux"
};
/**
* The <tt>Logger</tt> used by the <tt>FMJPlugInConfiguration</tt> class
* for logging output.
*/
private static final Logger logger
= Logger.getLogger(FMJPlugInConfiguration.class);
/**
* Whether custom packages have been registered with JFM
*/
private static boolean packagesRegistered = false;
/**
* Register in JMF the custom codecs we provide
*/
public static void registerCustomCodecs()
{
if(codecsRegistered)
return;
// Register the custom codecs which haven't already been registered.
@SuppressWarnings("unchecked")
Collection<String> registeredPlugins
= new HashSet<String>(
PlugInManager.getPlugInList(
null,
null,
PlugInManager.CODEC));
boolean commit = false;
// Remove JavaRGBToYUV.
PlugInManager.removePlugIn(
"com.sun.media.codec.video.colorspace.JavaRGBToYUV",
PlugInManager.CODEC);
PlugInManager.removePlugIn(
"com.sun.media.codec.video.colorspace.JavaRGBConverter",
PlugInManager.CODEC);
PlugInManager.removePlugIn(
"com.sun.media.codec.video.colorspace.RGBScaler",
PlugInManager.CODEC);
// Remove JMF's H263 codec.
PlugInManager.removePlugIn(
"com.sun.media.codec.video.vh263.NativeDecoder",
PlugInManager.CODEC);
PlugInManager.removePlugIn(
"com.ibm.media.codec.video.h263.NativeEncoder",
PlugInManager.CODEC);
// Remove JMF's GSM codec. As working only on some OS.
String gsmCodecPackage = "com.ibm.media.codec.audio.gsm.";
String[] gsmCodecClasses
= new String[]
{
"JavaDecoder",
"JavaDecoder_ms",
"JavaEncoder",
"JavaEncoder_ms",
"NativeDecoder",
"NativeDecoder_ms",
"NativeEncoder",
"NativeEncoder_ms",
"Packetizer"
};
for(String gsmCodecClass : gsmCodecClasses)
{
PlugInManager.removePlugIn(
gsmCodecPackage + gsmCodecClass,
PlugInManager.CODEC);
}
/*
* Remove FMJ's JavaSoundCodec because it seems to slow down the
* building of the filter graph and we do not currently seem to need it.
*/
PlugInManager.removePlugIn(
"net.sf.fmj.media.codec.JavaSoundCodec",
PlugInManager.CODEC);
for (String className : CUSTOM_CODECS)
{
/*
* A codec with a className of null is configured at compile time to
* not be registered.
*/
if (className == null)
continue;
if (registeredPlugins.contains(className))
{
if (logger.isDebugEnabled())
{
logger.debug(
"Codec " + className + " is already registered");
}
}
else
{
commit = true;
boolean registered;
Throwable exception = null;
try
{
Codec codec
= (Codec) Class.forName(className).newInstance();
registered =
PlugInManager.addPlugIn(
className,
codec.getSupportedInputFormats(),
codec.getSupportedOutputFormats(null),
PlugInManager.CODEC);
}
catch (Throwable ex)
{
registered = false;
exception = ex;
}
if (registered)
{
if (logger.isTraceEnabled())
{
logger.trace(
"Codec " + className
+ " is successfully registered");
}
}
else
{
logger.warn(
"Codec " + className
+ " is NOT successfully registered",
exception);
}
}
}
/*
* If Jitsi provides a codec which is also provided by FMJ and/or JMF,
* use Jitsi's version.
*/
@SuppressWarnings("unchecked")
Vector<String> codecs
= PlugInManager.getPlugInList(null, null, PlugInManager.CODEC);
if (codecs != null)
{
boolean setPlugInList = false;
for (int i = CUSTOM_CODECS.length - 1; i >= 0; i--)
{
String className = CUSTOM_CODECS[i];
if (className != null)
{
int classNameIndex = codecs.indexOf(className);
if (classNameIndex != -1)
{
codecs.remove(classNameIndex);
codecs.add(0, className);
setPlugInList = true;
}
}
}
if (setPlugInList)
PlugInManager.setPlugInList(codecs, PlugInManager.CODEC);
}
if (commit && !MediaServiceImpl.isJmfRegistryDisableLoad())
{
try
{
PlugInManager.commit();
}
catch (IOException ex)
{
logger.error("Cannot commit to PlugInManager", ex);
}
}
codecsRegistered = true;
}
/**
* Register in JMF the custom packages we provide
*/
public static void registerCustomPackages()
{
if(packagesRegistered)
return;
@SuppressWarnings("unchecked")
Vector<String> packages = PackageManager.getProtocolPrefixList();
boolean loggerIsDebugEnabled = logger.isDebugEnabled();
// We prefer our custom packages/protocol prefixes over FMJ's.
for (int i = CUSTOM_PACKAGES.length - 1; i >= 0; i--)
{
String customPackage = CUSTOM_PACKAGES[i];
/*
* Linear search in a loop but it doesn't have to scale since the
* list is always short.
*/
if (!packages.contains(customPackage))
{
packages.add(0, customPackage);
if (loggerIsDebugEnabled)
logger.debug("Adding package : " + customPackage);
}
}
PackageManager.setProtocolPrefixList(packages);
PackageManager.commitProtocolPrefixList();
if (loggerIsDebugEnabled)
logger.debug("Registering new protocol prefix list: " + packages);
packagesRegistered = true;
}
/**
* Registers custom libjitsi <tt>Multiplexer</tt> implementations.
*/
@SuppressWarnings("unchecked")
public static void registerCustomMultiplexers()
{
if (multiplexersRegistered)
return;
// Remove the FMJ WAV multiplexers, as they don't work.
PlugInManager.removePlugIn(
"com.sun.media.multiplexer.audio.WAVMux",
PlugInManager.MULTIPLEXER);
PlugInManager.removePlugIn(
"net.sf.fmj.media.multiplexer.audio.WAVMux",
PlugInManager.MULTIPLEXER);
Collection<String> registeredMuxers
= new HashSet<String>(
PlugInManager.getPlugInList(
null,
null,
PlugInManager.MULTIPLEXER));
boolean commit = false;
for (String className : CUSTOM_MULTIPLEXERS)
{
if (className == null)
continue;
if (registeredMuxers.contains(className))
{
if (logger.isDebugEnabled())
logger.debug("Multiplexer " + className + " is already "
+ "registered");
continue;
}
boolean registered;
Throwable exception = null;
try
{
Multiplexer multiplexer
= (Multiplexer) Class.forName(className).newInstance();
registered =
PlugInManager.addPlugIn(
className,
multiplexer.getSupportedInputFormats(),
multiplexer.getSupportedOutputContentDescriptors(null),
PlugInManager.MULTIPLEXER);
}
catch (Throwable ex)
{
registered = false;
exception = ex;
}
if (registered)
{
if (logger.isTraceEnabled())
{
logger.trace(
"Codec " + className
+ " is successfully registered");
}
}
else
{
logger.warn(
"Codec " + className
+ " is NOT successfully registered",
exception);
}
commit |= registered;
}
if (commit && !MediaServiceImpl.isJmfRegistryDisableLoad())
{
try
{
PlugInManager.commit();
}
catch (IOException ex)
{
logger.error("Cannot commit to PlugInManager", ex);
}
}
multiplexersRegistered = true;
}
}
| |
/**
* Copyright 2017 Confluent Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package io.confluent.ksql.analyzer;
import io.confluent.ksql.ddl.DdlConfig;
import io.confluent.ksql.serde.DataSource;
import io.confluent.ksql.metastore.KsqlStdOut;
import io.confluent.ksql.metastore.KsqlStream;
import io.confluent.ksql.metastore.KsqlTopic;
import io.confluent.ksql.metastore.MetaStore;
import io.confluent.ksql.metastore.StructuredDataSource;
import io.confluent.ksql.parser.tree.AliasedRelation;
import io.confluent.ksql.parser.tree.AllColumns;
import io.confluent.ksql.parser.tree.Cast;
import io.confluent.ksql.parser.tree.ComparisonExpression;
import io.confluent.ksql.parser.tree.DereferenceExpression;
import io.confluent.ksql.parser.tree.Expression;
import io.confluent.ksql.parser.tree.GroupBy;
import io.confluent.ksql.parser.tree.GroupingElement;
import io.confluent.ksql.parser.tree.Join;
import io.confluent.ksql.parser.tree.JoinOn;
import io.confluent.ksql.parser.tree.Node;
import io.confluent.ksql.parser.tree.QualifiedName;
import io.confluent.ksql.parser.tree.QualifiedNameReference;
import io.confluent.ksql.parser.tree.QuerySpecification;
import io.confluent.ksql.parser.tree.Select;
import io.confluent.ksql.parser.tree.SelectItem;
import io.confluent.ksql.parser.tree.SingleColumn;
import io.confluent.ksql.parser.tree.Table;
import io.confluent.ksql.parser.tree.WindowExpression;
import io.confluent.ksql.planner.DefaultTraversalVisitor;
import io.confluent.ksql.planner.plan.JoinNode;
import io.confluent.ksql.planner.plan.PlanNodeId;
import io.confluent.ksql.planner.plan.StructuredDataSourceNode;
import io.confluent.ksql.serde.KsqlTopicSerDe;
import io.confluent.ksql.serde.avro.KsqlAvroTopicSerDe;
import io.confluent.ksql.serde.delimited.KsqlDelimitedTopicSerDe;
import io.confluent.ksql.serde.json.KsqlJsonTopicSerDe;
import io.confluent.ksql.util.KsqlConfig;
import io.confluent.ksql.util.KsqlException;
import io.confluent.ksql.util.Pair;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import static java.lang.String.format;
public class Analyzer extends DefaultTraversalVisitor<Node, AnalysisContext> {
private Analysis analysis;
private MetaStore metaStore;
public Analyzer(Analysis analysis, MetaStore metaStore) {
this.analysis = analysis;
this.metaStore = metaStore;
}
@Override
protected Node visitQuerySpecification(final QuerySpecification node,
final AnalysisContext context) {
process(node.getFrom().get(),
new AnalysisContext(AnalysisContext.ParentType.FROM));
process(node.getInto().get(), new AnalysisContext(
AnalysisContext.ParentType.INTO));
if (!(analysis.getInto() instanceof KsqlStdOut)) {
analyzeNonStdOutSink();
}
process(node.getSelect(), new AnalysisContext(
AnalysisContext.ParentType.SELECT));
if (node.getWhere().isPresent()) {
analyzeWhere(node.getWhere().get());
}
if (node.getGroupBy().isPresent()) {
analyzeGroupBy(node.getGroupBy().get());
}
if (node.getWindowExpression().isPresent()) {
analyzeWindowExpression(node.getWindowExpression().get());
}
if (node.getHaving().isPresent()) {
analyzeHaving(node.getHaving().get());
}
if (node.getLimit().isPresent()) {
String limitStr = node.getLimit().get();
Integer limitInt = Integer.parseInt(limitStr);
analysis.setLimitClause(Optional.of(limitInt));
}
analyzeExpressions();
return null;
}
private void analyzeNonStdOutSink() {
List<Pair<StructuredDataSource, String>> fromDataSources = analysis.getFromDataSources();
StructuredDataSource intoStructuredDataSource = analysis.getInto();
String intoKafkaTopicName = analysis.getIntoKafkaTopicName();
if (intoKafkaTopicName == null) {
intoKafkaTopicName = intoStructuredDataSource.getName();
}
KsqlTopicSerDe intoTopicSerde = fromDataSources.get(0).getLeft().getKsqlTopic()
.getKsqlTopicSerDe();
if (analysis.getIntoFormat() != null) {
switch (analysis.getIntoFormat().toUpperCase()) {
case DataSource.AVRO_SERDE_NAME:
intoTopicSerde = new KsqlAvroTopicSerDe(null);
break;
case DataSource.JSON_SERDE_NAME:
intoTopicSerde = new KsqlJsonTopicSerDe(null);
break;
case DataSource.DELIMITED_SERDE_NAME:
intoTopicSerde = new KsqlDelimitedTopicSerDe();
break;
default:
throw new KsqlException(
String.format("Unsupported format: %s", analysis.getIntoFormat()));
}
} else {
if (intoTopicSerde instanceof KsqlAvroTopicSerDe) {
intoTopicSerde = new KsqlAvroTopicSerDe(null);
}
}
KsqlTopic newIntoKsqlTopic = new KsqlTopic(intoKafkaTopicName,
intoKafkaTopicName, intoTopicSerde);
KsqlStream intoKsqlStream = new KsqlStream(intoStructuredDataSource.getName(),
null, null, null,
newIntoKsqlTopic);
analysis.setInto(intoKsqlStream);
}
private void analyzeExpressions() {
Schema schema = analysis.getFromDataSources().get(0).getLeft().getSchema();
boolean isJoinSchema = false;
if (analysis.getJoin() != null) {
schema = analysis.getJoin().getSchema();
isJoinSchema = true;
}
ExpressionAnalyzer expressionAnalyzer = new ExpressionAnalyzer(schema, isJoinSchema);
for (Expression selectExpression: analysis.getSelectExpressions()) {
expressionAnalyzer.analyzeExpression(selectExpression);
}
if (analysis.getWhereExpression() != null) {
expressionAnalyzer.analyzeExpression(analysis.getWhereExpression());
}
if (!analysis.getGroupByExpressions().isEmpty()) {
for (Expression expression: analysis.getGroupByExpressions()) {
expressionAnalyzer.analyzeExpression(expression);
}
}
if (analysis.getHavingExpression() != null) {
expressionAnalyzer.analyzeExpression(analysis.getHavingExpression());
}
}
@Override
protected Node visitJoin(final Join node, final AnalysisContext context) {
AliasedRelation left = (AliasedRelation) process(node.getLeft(), context);
AliasedRelation right = (AliasedRelation) process(node.getRight(), context);
String leftSideName = ((Table) left.getRelation()).getName().getSuffix();
StructuredDataSource leftDataSource = metaStore.getSource(leftSideName);
if (leftDataSource == null) {
throw new KsqlException(format("Resource %s does not exist.", leftSideName));
}
leftDataSource = timestampColumn(left, leftDataSource);
String rightSideName = ((Table) right.getRelation()).getName().getSuffix();
StructuredDataSource rightDataSource = metaStore.getSource(rightSideName);
if (rightDataSource == null) {
throw new KsqlException(format("Resource %s does not exist.", rightSideName));
}
rightDataSource = timestampColumn(right, rightDataSource);
String leftAlias = left.getAlias();
String rightAlias = right.getAlias();
StructuredDataSourceNode
leftSourceKafkaTopicNode =
new StructuredDataSourceNode(new PlanNodeId("KafkaTopic_Left"),
leftDataSource,
leftDataSource.getSchema());
StructuredDataSourceNode
rightSourceKafkaTopicNode =
new StructuredDataSourceNode(new PlanNodeId("KafkaTopic_Right"),
rightDataSource,
rightDataSource.getSchema());
JoinNode.Type joinType;
switch (node.getType()) {
case INNER:
joinType = JoinNode.Type.INNER;
break;
case LEFT:
joinType = JoinNode.Type.LEFT;
break;
case RIGHT:
joinType = JoinNode.Type.RIGHT;
break;
case CROSS:
joinType = JoinNode.Type.CROSS;
break;
case FULL:
joinType = JoinNode.Type.FULL;
break;
default:
throw new KsqlException("Join type is not supported: " + node.getType().name());
}
JoinOn joinOn = (JoinOn) (node.getCriteria().get());
ComparisonExpression comparisonExpression = (ComparisonExpression) joinOn.getExpression();
String leftKeyFieldName = fetchKeyFieldName(comparisonExpression.getLeft());
String rightKeyFieldName = fetchKeyFieldName(comparisonExpression.getRight());
if (comparisonExpression.getType() != ComparisonExpression.Type.EQUAL) {
throw new KsqlException("Join criteria is not supported.");
}
JoinNode joinNode =
new JoinNode(new PlanNodeId("Join"), joinType, leftSourceKafkaTopicNode,
rightSourceKafkaTopicNode, leftKeyFieldName, rightKeyFieldName, leftAlias,
rightAlias);
analysis.setJoin(joinNode);
return null;
}
private String fetchKeyFieldName(Expression expression) {
if (expression instanceof DereferenceExpression) {
DereferenceExpression
leftDereferenceExpression =
(DereferenceExpression) expression;
return leftDereferenceExpression.getFieldName();
} else if (expression instanceof QualifiedNameReference) {
QualifiedNameReference
leftQualifiedNameReference =
(QualifiedNameReference) expression;
return leftQualifiedNameReference.getName().getSuffix();
} else {
throw new KsqlException("Join criteria is not supported. Expression:" + expression);
}
}
private StructuredDataSource timestampColumn(AliasedRelation aliasedRelation,
StructuredDataSource
structuredDataSource) {
if (((Table) aliasedRelation.getRelation()).getProperties() != null) {
if (((Table) aliasedRelation.getRelation()).getProperties()
.get(DdlConfig.TIMESTAMP_NAME_PROPERTY) != null) {
String timestampFieldName = (((Table) aliasedRelation.getRelation()))
.getProperties().get(DdlConfig.TIMESTAMP_NAME_PROPERTY).toString().toUpperCase();
if (!(timestampFieldName.startsWith("'") && timestampFieldName.endsWith("'"))) {
throw new KsqlException("Property name should be String with single qoute.");
}
timestampFieldName = timestampFieldName.substring(1, timestampFieldName.length() - 1);
structuredDataSource = structuredDataSource.cloneWithTimeField(timestampFieldName);
}
}
return structuredDataSource;
}
@Override
protected Node visitAliasedRelation(AliasedRelation node, AnalysisContext context) {
String structuredDataSourceName = ((Table) node.getRelation()).getName().getSuffix();
if (metaStore.getSource(structuredDataSourceName)
== null) {
throw new KsqlException(structuredDataSourceName + " does not exist.");
}
StructuredDataSource structuredDataSource = metaStore.getSource(structuredDataSourceName);
if (((Table) node.getRelation()).getProperties() != null) {
if (((Table) node.getRelation()).getProperties().get(DdlConfig.TIMESTAMP_NAME_PROPERTY)
!= null) {
String timestampFieldName = ((Table) node.getRelation()).getProperties()
.get(DdlConfig.TIMESTAMP_NAME_PROPERTY).toString().toUpperCase();
if (!timestampFieldName.startsWith("'") && !timestampFieldName.endsWith("'")) {
throw new KsqlException("Property name should be String with single qoute.");
}
timestampFieldName = timestampFieldName.substring(1, timestampFieldName.length() - 1);
structuredDataSource = structuredDataSource.cloneWithTimeField(timestampFieldName);
}
}
Pair<StructuredDataSource, String>
fromDataSource =
new Pair<>(
structuredDataSource,
node.getAlias());
analysis.getFromDataSources().add(fromDataSource);
return node;
}
@Override
protected Node visitTable(final Table node, final AnalysisContext context) {
StructuredDataSource into;
if (node.isStdOut) {
into =
new KsqlStdOut(KsqlStdOut.KSQL_STDOUT_NAME, null, null,
null, StructuredDataSource.DataSourceType.KSTREAM);
} else if (context.getParentType() == AnalysisContext.ParentType.INTO) {
into = analyzeNonStdOutTable(node);
} else {
throw new KsqlException("INTO clause is not set correctly!");
}
analysis.setInto(into);
return null;
}
@Override
protected Node visitCast(final Cast node, final AnalysisContext context) {
return process(node.getExpression(), context);
}
@Override
protected Node visitSelect(final Select node, final AnalysisContext context) {
for (SelectItem selectItem : node.getSelectItems()) {
if (selectItem instanceof AllColumns) {
// expand * and T.*
AllColumns allColumns = (AllColumns) selectItem;
if ((this.analysis.getFromDataSources() == null) || (this.analysis.getFromDataSources()
.isEmpty())) {
throw new KsqlException("FROM clause was not resolved!");
}
if (analysis.getJoin() != null) {
JoinNode joinNode = analysis.getJoin();
for (Field field : joinNode.getLeft().getSchema().fields()) {
QualifiedNameReference
qualifiedNameReference =
new QualifiedNameReference(allColumns.getLocation().get(), QualifiedName
.of(joinNode.getLeftAlias() + "." + field.name()));
analysis.addSelectItem(qualifiedNameReference,
joinNode.getLeftAlias() + "_" + field.name());
}
for (Field field : joinNode.getRight().getSchema().fields()) {
QualifiedNameReference qualifiedNameReference =
new QualifiedNameReference(allColumns.getLocation().get(), QualifiedName
.of(joinNode.getRightAlias() + "." + field.name()));
analysis.addSelectItem(qualifiedNameReference,
joinNode.getRightAlias() + "_" + field.name());
}
} else {
for (Field field : this.analysis.getFromDataSources().get(0).getLeft().getSchema()
.fields()) {
QualifiedNameReference
qualifiedNameReference =
new QualifiedNameReference(allColumns.getLocation().get(), QualifiedName
.of(this.analysis.getFromDataSources().get(0).getRight() + "." + field.name()));
analysis.addSelectItem(qualifiedNameReference, field.name());
}
}
} else if (selectItem instanceof SingleColumn) {
SingleColumn column = (SingleColumn) selectItem;
analysis.addSelectItem(column.getExpression(), column.getAlias().get());
} else {
throw new IllegalArgumentException(
"Unsupported SelectItem type: " + selectItem.getClass().getName());
}
}
return null;
}
@Override
protected Node visitQualifiedNameReference(final QualifiedNameReference node,
final AnalysisContext context) {
return visitExpression(node, context);
}
@Override
protected Node visitGroupBy(final GroupBy node, final AnalysisContext context) {
return null;
}
private void analyzeWhere(final Node node) {
analysis.setWhereExpression((Expression) node);
}
private void analyzeGroupBy(final GroupBy groupBy) {
for (GroupingElement groupingElement : groupBy.getGroupingElements()) {
Set<Expression> groupingSet = groupingElement.enumerateGroupingSets().get(0);
analysis.getGroupByExpressions().addAll(groupingSet);
}
}
private void analyzeWindowExpression(final WindowExpression windowExpression) {
analysis.setWindowExpression(windowExpression);
}
private void analyzeHaving(final Node node) {
analysis.setHavingExpression((Expression) node);
}
private StructuredDataSource analyzeNonStdOutTable(final Table node) {
StructuredDataSource into = new KsqlStream(node.getName().getSuffix(), null,
null, null, null);
setIntoProperties(into, node);
return into;
}
private void setIntoProperties(final StructuredDataSource into, final Table node) {
validateWithClause(node.getProperties().keySet());
if (node.getProperties().get(DdlConfig.VALUE_FORMAT_PROPERTY) != null) {
setIntoTopicFormat(into, node);
}
if (node.getProperties().get(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY) != null) {
setIntoTopicName(node);
}
if (node.getProperties().get(DdlConfig.PARTITION_BY_PROPERTY) != null) {
String intoPartitionByColumnName = node.getProperties()
.get(DdlConfig.PARTITION_BY_PROPERTY).toString().toUpperCase();
analysis.getIntoProperties().put(DdlConfig.PARTITION_BY_PROPERTY,
intoPartitionByColumnName);
}
if (node.getProperties().get(KsqlConfig.SINK_TIMESTAMP_COLUMN_NAME) != null) {
setIntoTimestampColumn(node);
}
if (node.getProperties().get(KsqlConfig.SINK_NUMBER_OF_PARTITIONS) != null) {
try {
int numberOfPartitions = Integer.parseInt(node.getProperties()
.get(KsqlConfig.SINK_NUMBER_OF_PARTITIONS)
.toString());
analysis.getIntoProperties().put(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY,
numberOfPartitions);
} catch (NumberFormatException e) {
throw new KsqlException("Invalid number of partitions in WITH clause: "
+ node.getProperties().get(KsqlConfig.SINK_NUMBER_OF_PARTITIONS)
.toString());
}
}
if (node.getProperties().get(KsqlConfig.SINK_NUMBER_OF_REPLICATIONS) != null) {
try {
short numberOfReplications =
Short.parseShort(node.getProperties().get(KsqlConfig.SINK_NUMBER_OF_REPLICATIONS)
.toString());
analysis.getIntoProperties()
.put(KsqlConfig.SINK_NUMBER_OF_REPLICATIONS_PROPERTY, numberOfReplications);
} catch (NumberFormatException e) {
throw new KsqlException("Invalid number of replications in WITH clause: " + node
.getProperties().get(KsqlConfig.SINK_NUMBER_OF_REPLICATIONS).toString());
}
}
}
private void setIntoTopicName(final Table node) {
String
intoKafkaTopicName =
node.getProperties().get(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY).toString();
if (!intoKafkaTopicName.startsWith("'") && !intoKafkaTopicName.endsWith("'")) {
throw new KsqlException(
intoKafkaTopicName + " value is string and should be enclosed between " + "\"'\".");
}
intoKafkaTopicName = intoKafkaTopicName.substring(1, intoKafkaTopicName.length() - 1);
analysis.setIntoKafkaTopicName(intoKafkaTopicName);
analysis.getIntoProperties().put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, intoKafkaTopicName);
}
private void setIntoTopicFormat(final StructuredDataSource into, final Table node) {
String serde = node.getProperties().get(DdlConfig.VALUE_FORMAT_PROPERTY).toString();
if (!serde.startsWith("'") && !serde.endsWith("'")) {
throw new KsqlException(
serde + " value is string and should be enclosed between " + "\"'\".");
}
serde = serde.substring(1, serde.length() - 1);
analysis.setIntoFormat(serde);
analysis.getIntoProperties().put(DdlConfig.VALUE_FORMAT_PROPERTY, serde);
if ("AVRO".equals(serde)) {
String avroSchemaFilePath = "/tmp/" + into.getName() + ".avro";
if (node.getProperties().get(DdlConfig.AVRO_SCHEMA_FILE) != null) {
avroSchemaFilePath = node.getProperties().get(DdlConfig.AVRO_SCHEMA_FILE).toString();
if (!avroSchemaFilePath.startsWith("'") && !avroSchemaFilePath.endsWith("'")) {
throw new KsqlException(
avroSchemaFilePath + " value is string and should be enclosed between "
+ "\"'\".");
}
avroSchemaFilePath = avroSchemaFilePath.substring(1, avroSchemaFilePath.length() - 1);
}
analysis.getIntoProperties().put(DdlConfig.AVRO_SCHEMA_FILE, avroSchemaFilePath);
}
}
private void setIntoTimestampColumn(final Table node) {
String
intoTimestampColumnName = node.getProperties()
.get(KsqlConfig.SINK_TIMESTAMP_COLUMN_NAME).toString().toUpperCase();
if (!intoTimestampColumnName.startsWith("'") && !intoTimestampColumnName.endsWith("'")) {
throw new KsqlException(
intoTimestampColumnName + " value is string and should be enclosed between "
+ "\"'\".");
}
intoTimestampColumnName = intoTimestampColumnName.substring(1,
intoTimestampColumnName
.length() - 1);
analysis.getIntoProperties().put(KsqlConfig.SINK_TIMESTAMP_COLUMN_NAME,
intoTimestampColumnName);
}
private void validateWithClause(Set<String> withClauseVariables) {
Set<String> validSet = new HashSet<>();
validSet.add(DdlConfig.VALUE_FORMAT_PROPERTY.toUpperCase());
validSet.add(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY.toUpperCase());
validSet.add(DdlConfig.PARTITION_BY_PROPERTY.toUpperCase());
validSet.add(KsqlConfig.SINK_TIMESTAMP_COLUMN_NAME.toUpperCase());
validSet.add(KsqlConfig.SINK_NUMBER_OF_PARTITIONS.toUpperCase());
validSet.add(KsqlConfig.SINK_NUMBER_OF_REPLICATIONS.toUpperCase());
for (String withVariable: withClauseVariables) {
if (!validSet.contains(withVariable.toUpperCase())) {
throw new KsqlException("Invalid config variable in the WITH clause: " + withVariable);
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.io.IOException;
import java.util.List;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.concurrent.ConcurrentSkipListMap;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
/**
* Class servers two purposes:
*
* 1. Broadcast NamespaceDescriptor information via ZK
* (Done by the Master)
* 2. Consume broadcasted NamespaceDescriptor changes
* (Done by the RegionServers)
*
*/
@InterfaceAudience.Private
public class ZKNamespaceManager extends ZKListener {
private static final Logger LOG = LoggerFactory.getLogger(ZKNamespaceManager.class);
private final String nsZNode;
private final NavigableMap<String,NamespaceDescriptor> cache;
public ZKNamespaceManager(ZKWatcher zkw) throws IOException {
super(zkw);
nsZNode = zkw.getZNodePaths().namespaceZNode;
cache = new ConcurrentSkipListMap<>();
}
public void start() throws IOException {
watcher.registerListener(this);
try {
if (ZKUtil.watchAndCheckExists(watcher, nsZNode)) {
List<ZKUtil.NodeAndData> existing =
ZKUtil.getChildDataAndWatchForNewChildren(watcher, nsZNode);
if (existing != null) {
refreshNodes(existing);
}
} else {
ZKUtil.createWithParents(watcher, nsZNode);
}
} catch (KeeperException e) {
throw new IOException("Failed to initialize ZKNamespaceManager", e);
}
}
public void stop() throws IOException {
this.watcher.unregisterListener(this);
}
public NamespaceDescriptor get(String name) {
return cache.get(name);
}
public void update(NamespaceDescriptor ns) throws IOException {
writeNamespace(ns);
cache.put(ns.getName(), ns);
}
public void remove(String name) throws IOException {
deleteNamespace(name);
cache.remove(name);
}
public NavigableSet<NamespaceDescriptor> list() throws IOException {
NavigableSet<NamespaceDescriptor> ret =
Sets.newTreeSet(NamespaceDescriptor.NAMESPACE_DESCRIPTOR_COMPARATOR);
for(NamespaceDescriptor ns: cache.values()) {
ret.add(ns);
}
return ret;
}
@Override
public void nodeCreated(String path) {
if (nsZNode.equals(path)) {
try {
List<ZKUtil.NodeAndData> nodes =
ZKUtil.getChildDataAndWatchForNewChildren(watcher, nsZNode);
refreshNodes(nodes);
} catch (KeeperException ke) {
String msg = "Error reading data from zookeeper";
LOG.error(msg, ke);
watcher.abort(msg, ke);
} catch (IOException e) {
String msg = "Error parsing data from zookeeper";
LOG.error(msg, e);
watcher.abort(msg, e);
}
}
}
@Override
public void nodeDeleted(String path) {
if (nsZNode.equals(ZKUtil.getParent(path))) {
String nsName = ZKUtil.getNodeName(path);
cache.remove(nsName);
}
}
@Override
public void nodeDataChanged(String path) {
if (nsZNode.equals(ZKUtil.getParent(path))) {
try {
byte[] data = ZKUtil.getDataAndWatch(watcher, path);
NamespaceDescriptor ns =
ProtobufUtil.toNamespaceDescriptor(
HBaseProtos.NamespaceDescriptor.parseFrom(data));
cache.put(ns.getName(), ns);
} catch (KeeperException ke) {
String msg = "Error reading data from zookeeper for node "+path;
LOG.error(msg, ke);
// only option is to abort
watcher.abort(msg, ke);
} catch (IOException ioe) {
String msg = "Error deserializing namespace: "+path;
LOG.error(msg, ioe);
watcher.abort(msg, ioe);
}
}
}
@Override
public void nodeChildrenChanged(String path) {
if (nsZNode.equals(path)) {
try {
List<ZKUtil.NodeAndData> nodes =
ZKUtil.getChildDataAndWatchForNewChildren(watcher, nsZNode);
refreshNodes(nodes);
} catch (KeeperException ke) {
LOG.error("Error reading data from zookeeper for path "+path, ke);
watcher.abort("ZooKeeper error get node children for path "+path, ke);
} catch (IOException e) {
LOG.error("Error deserializing namespace child from: "+path, e);
watcher.abort("Error deserializing namespace child from: " + path, e);
}
}
}
private void deleteNamespace(String name) throws IOException {
String zNode = ZNodePaths.joinZNode(nsZNode, name);
try {
ZKUtil.deleteNode(watcher, zNode);
} catch (KeeperException e) {
if (e instanceof KeeperException.NoNodeException) {
// If the node does not exist, it could be already deleted. Continue without fail.
LOG.warn("The ZNode " + zNode + " for namespace " + name + " does not exist.");
} else {
LOG.error("Failed updating permissions for namespace " + name, e);
throw new IOException("Failed updating permissions for namespace " + name, e);
}
}
}
private void writeNamespace(NamespaceDescriptor ns) throws IOException {
String zNode = ZNodePaths.joinZNode(nsZNode, ns.getName());
try {
ZKUtil.createWithParents(watcher, zNode);
ZKUtil.updateExistingNodeData(watcher, zNode,
ProtobufUtil.toProtoNamespaceDescriptor(ns).toByteArray(), -1);
} catch (KeeperException e) {
LOG.error("Failed updating permissions for namespace "+ns.getName(), e);
throw new IOException("Failed updating permissions for namespace "+ns.getName(), e);
}
}
private void refreshNodes(List<ZKUtil.NodeAndData> nodes) throws IOException {
for (ZKUtil.NodeAndData n : nodes) {
if (n.isEmpty()) continue;
String path = n.getNode();
String namespace = ZKUtil.getNodeName(path);
byte[] nodeData = n.getData();
if (LOG.isTraceEnabled()) {
LOG.trace("Updating namespace cache from node " + namespace + " with data: " +
Bytes.toStringBinary(nodeData));
}
NamespaceDescriptor ns =
ProtobufUtil.toNamespaceDescriptor(
HBaseProtos.NamespaceDescriptor.parseFrom(nodeData));
cache.put(ns.getName(), ns);
}
}
}
| |
/*
* Copyright 2018 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.antennadoctor.storage;
import com.google.common.io.Resources;
import com.streamsets.datacollector.antennadoctor.AntennaDoctorConstants;
import com.streamsets.datacollector.antennadoctor.bean.AntennaDoctorRepositoryManifestBean;
import com.streamsets.datacollector.antennadoctor.bean.AntennaDoctorRepositoryUpdateBean;
import com.streamsets.datacollector.antennadoctor.bean.AntennaDoctorRuleBean;
import com.streamsets.datacollector.antennadoctor.bean.AntennaDoctorStorageBean;
import com.streamsets.datacollector.json.ObjectMapperFactory;
import com.streamsets.datacollector.main.BuildInfo;
import com.streamsets.datacollector.task.AbstractTask;
import com.streamsets.datacollector.util.Configuration;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardWatchEventKinds;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/**
* Storage module for Antenna Doctor.
*
* The storage currently only loads static storage from build-in jar file. However in the future we will add support
* for loading even remote tools.
*/
public class AntennaDoctorStorage extends AbstractTask {
private static final Logger LOG = LoggerFactory.getLogger(AntennaDoctorStorage.class);
/**
* Delegate when a new rules are available.
*
* This method is always called with all rules together (no incremental rule application here).
*/
public interface NewRulesDelegate {
void loadNewRules(List<AntennaDoctorRuleBean> rules);
}
private final NewRulesDelegate delegate;
/**
* Repository where we will store all our files.
*/
private final Path repositoryDirectory;
private final BuildInfo buildInfo;
private final String productName;
private final Configuration configuration;
// Various threads that we might be running
private ExecutorService executorService;
private OverrideFileRunnable overrideRunnable;
private UpdateRunnable updateRunnable;
private Future future;
public AntennaDoctorStorage(
String productName,
BuildInfo buildInfo,
Configuration configuration,
String dataDir,
NewRulesDelegate delegate
) {
super("Antenna Doctor Storage");
this.productName = productName;
this.buildInfo = buildInfo;
this.configuration = configuration;
this.delegate = delegate;
this.repositoryDirectory = Paths.get(dataDir, AntennaDoctorConstants.DIR_REPOSITORY);
}
@Override
protected void initTask() {
LOG.info("Repository location: {}", repositoryDirectory);
try {
// Make sure that we have our own directory to operate in
if(!Files.exists(repositoryDirectory)) {
Files.createDirectories(repositoryDirectory);
}
Path store = repositoryDirectory.resolve(AntennaDoctorConstants.FILE_DATABASE);
if(!Files.exists(store)) {
try(OutputStream stream = Files.newOutputStream(store)) {
Resources.copy(Resources.getResource(AntennaDoctorStorage.class, AntennaDoctorConstants.FILE_DATABASE), stream);
}
}
} catch (IOException e) {
LOG.error("Cant initialize repository: {}", e.getMessage(), e);
return;
}
// Schedule override runnable if allowed in configuration
if(configuration.get(AntennaDoctorConstants.CONF_OVERRIDE_ENABLE, AntennaDoctorConstants.DEFAULT_OVERRIDE_ENABLE)) {
LOG.info("Enabling polling of {} to override the rule database", AntennaDoctorConstants.FILE_OVERRIDE);
this.executorService = Executors.newSingleThreadExecutor();
this.overrideRunnable = new OverrideFileRunnable();
this.future = executorService.submit(this.overrideRunnable);
}
// Remote repo handling
if(configuration.get(AntennaDoctorConstants.CONF_UPDATE_ENABLE, AntennaDoctorConstants.DEFAULT_UPDATE_ENABLE)) {
if(overrideRunnable != null) {
LOG.info("Using override, not starting update thread.");
} else {
this.executorService = Executors.newSingleThreadScheduledExecutor();
this.updateRunnable = new UpdateRunnable();
this.future = ((ScheduledExecutorService)executorService).scheduleAtFixedRate(
updateRunnable,
configuration.get(AntennaDoctorConstants.CONF_UPDATE_DELAY, AntennaDoctorConstants.DEFAULT_UPDATE_DELAY),
configuration.get(AntennaDoctorConstants.CONF_UPDATE_PERIOD, AntennaDoctorConstants.DEFAULT_UPDATE_PERIOD),
TimeUnit.MINUTES
);
}
}
// And finally load rules
delegate.loadNewRules(loadRules());
}
@Override
protected void stopTask() {
if(overrideRunnable != null) {
overrideRunnable.isStopped = true;
}
if(future != null) {
try {
future.cancel(true);
future.get();
} catch (Throwable e) {
LOG.debug("Error when stopping override file thread", e);
}
}
if(executorService != null) {
try {
executorService.shutdownNow();
} catch (Throwable e) {
LOG.debug("Error when stopping override file service", e);
}
}
}
private List<AntennaDoctorRuleBean> loadRules() {
Path database = repositoryDirectory.resolve(overrideRunnable == null ? AntennaDoctorConstants.FILE_DATABASE : AntennaDoctorConstants.FILE_OVERRIDE);
LOG.trace("Loading rules from: {}", database);
try(InputStream inputStream = Files.newInputStream(database)) {
AntennaDoctorStorageBean storageBean = ObjectMapperFactory.get().readValue(
inputStream,
AntennaDoctorStorageBean.class
);
// Version protection
if(storageBean.getSchemaVersion() != AntennaDoctorStorageBean.CURRENT_SCHEMA_VERSION) {
LOG.error(
"Ignoring the knowledge base file since it has incompatible schema version ({} versus expected {})",
storageBean.getSchemaVersion(),
AntennaDoctorStorageBean.CURRENT_SCHEMA_VERSION
);
return Collections.emptyList();
}
return storageBean.getRules();
} catch (Throwable e) {
LOG.error("Can't load knowledge base from {}: ", database.getFileName().toString(), e);
return Collections.emptyList();
}
}
/**
* Override runnable that scans the underlying repository for our OVERRIDE file and there is a new or updated rule set reloads the database.
*/
private class OverrideFileRunnable implements Runnable {
boolean isStopped = false;
@Override
public void run() {
LOG.debug("Starting scanner thread watching for changes in {}", AntennaDoctorConstants.FILE_OVERRIDE);
Thread.currentThread().setName("Antenna Doctor Override Scanner Thread");
try (WatchService watcher = FileSystems.getDefault().newWatchService()) {
repositoryDirectory.register(watcher, StandardWatchEventKinds.ENTRY_MODIFY);
while (!isStopped) {
WatchKey key;
try {
key = watcher.poll(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOG.debug("Recovering from interruption", e);
continue;
}
if(key == null) {
continue;
}
try {
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent.Kind<?> kind = event.kind();
WatchEvent<Path> ev = (WatchEvent<Path>) event;
if (kind != StandardWatchEventKinds.ENTRY_MODIFY) {
continue;
}
if (ev.context().toString().equals(AntennaDoctorConstants.FILE_OVERRIDE)) {
delegate.loadNewRules(loadRules());
}
}
} finally {
key.reset();
}
}
} catch (Throwable e) {
LOG.error("Issue when stopping the override scan thread", e);
} finally {
LOG.info("Stopping scanner thread for changes to {} file", AntennaDoctorConstants.FILE_OVERRIDE);
}
}
}
private class UpdateRunnable implements Runnable {
@Override
public void run() {
String repoURL = configuration.get(AntennaDoctorConstants.CONF_UPDATE_URL, AntennaDoctorConstants.DEFAULT_UPDATE_URL) +
"/" +
productName +
"/" +
AntennaDoctorRepositoryManifestBean.CURRENT_SCHEMA_VERSION +
"/"
;
boolean changedApplied = false;
try {
LOG.info("Downloading updates from: {}", repoURL);
// Download repo manifest first
AntennaDoctorRepositoryManifestBean manifestBean;
try(Response response = ClientBuilder.newClient()
.target(repoURL + AntennaDoctorConstants.URL_MANIFEST)
.queryParam("version", buildInfo.getVersion())
.request()
.get()) {
manifestBean = ObjectMapperFactory.get().readValue(
response.readEntity(InputStream.class),
AntennaDoctorRepositoryManifestBean.class
);
if(manifestBean.getSchemaVersion() != AntennaDoctorRepositoryManifestBean.CURRENT_SCHEMA_VERSION) {
LOG.error(
"Ignoring remote knowledge base repository as it has incompatible schema version ({} versus expected {})",
manifestBean.getSchemaVersion(),
AntennaDoctorRepositoryManifestBean.CURRENT_SCHEMA_VERSION
);
return;
}
}
LOG.debug("Base version in remote server is {}", manifestBean.getBaseVersion());
AntennaDoctorStorageBean currentStore;
try(InputStream stream = Files.newInputStream(repositoryDirectory.resolve(AntennaDoctorConstants.FILE_DATABASE))) {
currentStore = ObjectMapperFactory.get().readValue(stream, AntennaDoctorStorageBean.class);
}
if(!currentStore.getBaseVersion().equals(manifestBean.getBaseVersion())) {
LOG.info("Current base version ({}) is different then remote repo base version ({}), downloading remote version", currentStore.getBaseVersion(), manifestBean.getBaseVersion());
try(Response response = ClientBuilder.newClient()
.target(repoURL + manifestBean.getBaseVersion() + AntennaDoctorConstants.URL_VERSION_END)
.request()
.get()) {
// And override current store
try(InputStream gzipStream = new GzipCompressorInputStream(response.readEntity(InputStream.class))) {
currentStore = ObjectMapperFactory.get().readValue(
gzipStream,
AntennaDoctorStorageBean.class
);
}
currentStore.setBaseVersion(manifestBean.getBaseVersion());;
currentStore.setUpdates(new LinkedList<>());
changedApplied = true;
}
} else {
LOG.debug("Current version {} matches server version", currentStore.getBaseVersion());
}
// Rule map for easier application of updates
Map<String, AntennaDoctorRuleBean> ruleMap = currentStore.getRules().stream().collect(Collectors.toMap(AntennaDoctorRuleBean::getUuid, i -> i));
// Now let's apply updates
for(String update: manifestBean.getUpdates()) {
if(currentStore.getUpdates().contains(update)) {
LOG.debug("Update {} already applied", update);
continue;
}
LOG.debug("Downloading update {} ", update);
// Download the update bean
AntennaDoctorRepositoryUpdateBean updateBean;
try(Response response = ClientBuilder.newClient()
.target(repoURL + update + AntennaDoctorConstants.URL_VERSION_END)
.request()
.get()) {
// And override current store
try (InputStream gzipStream = new GzipCompressorInputStream(response.readEntity(InputStream.class))) {
updateBean = ObjectMapperFactory.get().readValue(
gzipStream,
AntennaDoctorRepositoryUpdateBean.class
);
}
}
updateBean.getUpdates().forEach(r -> ruleMap.put(r.getUuid(), r));
updateBean.getDeletes().forEach(ruleMap::remove);
currentStore.getUpdates().add(update);
LOG.debug("Update {} successfully applied", update);
changedApplied = true;
}
if(changedApplied) {
LOG.info("Applied new changes");
// Materialize rule list
currentStore.setRules(new ArrayList<>(ruleMap.values()));
// And finally write the updated repo file down
try (OutputStream outputStream = Files.newOutputStream(repositoryDirectory.resolve(AntennaDoctorConstants.FILE_DATABASE))) {
ObjectMapperFactory.get().writeValue(outputStream, currentStore);
}
delegate.loadNewRules(currentStore.getRules());
} else {
LOG.info("No new changes");
}
} catch (Throwable e) {
LOG.error("Failed to retrieve updates: {}", e.getMessage(), e);
}
}
}
}
| |
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class al implements Comparable<al> {
private final ak a;
private final List<n<Date, Double>> b = new ArrayList();
private final List<n<Date, Double>> c = new ArrayList();
private double d = 0.0D;
private double e = 0.0D;
private double f = 0.0D;
private static final String[] g;
public al(ak var1) {
this.a = var1;
}
protected static long a(Date param0, Date param1, TimeUnit param2) {
// $FF: Couldn't be decompiled
}
protected static <T> T a(List<T> param0) {
// $FF: Couldn't be decompiled
}
protected static <T> T b(List<T> param0) {
// $FF: Couldn't be decompiled
}
public void a(double param1) {
// $FF: Couldn't be decompiled
}
public ak a() {
return this.a;
}
public List<n<Date, Double>> b() {
return this.b;
}
public List<n<Date, Double>> c() {
return this.c;
}
public double d() {
return this.d;
}
public double e() {
try {
if (this.b.isEmpty()) {
return 0.0D;
}
} catch (a_ var1) {
throw var1;
}
return this.e / (double)this.b.size();
}
public double f() {
try {
if (this.c.isEmpty()) {
return 0.0D;
}
} catch (a_ var1) {
throw var1;
}
return this.f / (double)this.c.size();
}
public String toString() {
StringBuilder var1 = new StringBuilder(this.a.a());
try {
var1.append(g[2]);
var1.append(this.a.b());
if (this.a.c() != null) {
var1.append(g[1]);
var1.append(this.a.c());
var1.append("]");
}
} catch (a_ var2) {
throw var2;
}
var1.append(g[0]);
var1.append(DecimalFormat.getNumberInstance().format(this.d()));
var1.append(" ");
var1.append(DecimalFormat.getNumberInstance().format(this.e()));
var1.append(" ");
var1.append(DecimalFormat.getNumberInstance().format(this.f()));
return var1.toString();
}
protected static boolean a(Object param0, Object param1) {
// $FF: Couldn't be decompiled
}
public int a(al var1) {
try {
if (var1 == null) {
return 1;
}
} catch (a_ var2) {
throw var2;
}
try {
if (a(this.a().a(), var1.a().a())) {
return this.a().b().compareTo(var1.a().b());
}
} catch (a_ var3) {
throw var3;
}
return this.a().a().compareTo(var1.a().a());
}
static {
String[] var10000 = new String[3];
char[] var10003 = "\u0007Y".toCharArray();
int var10005 = var10003.length;
int var1 = 0;
char[] var10004 = var10003;
int var2 = var10005;
char[] var4;
int var10006;
char var10007;
byte var10008;
if (var10005 <= 1) {
var4 = var10003;
var10006 = var1;
var10007 = var10003[var1];
switch(var1 % 5) {
case 0:
var10008 = 61;
break;
case 1:
var10008 = 121;
break;
case 2:
var10008 = 103;
break;
case 3:
var10008 = 113;
break;
default:
var10008 = 123;
}
} else {
var10004 = var10003;
var2 = var10005;
if (var10005 <= var1) {
label127: {
var10000[0] = (new String(var10003)).intern();
var10003 = "\u001d\"".toCharArray();
var10005 = var10003.length;
var1 = 0;
var10004 = var10003;
var2 = var10005;
if (var10005 <= 1) {
var4 = var10003;
var10006 = var1;
} else {
var10004 = var10003;
var2 = var10005;
if (var10005 <= var1) {
break label127;
}
var4 = var10003;
var10006 = var1;
}
while(true) {
var10007 = var4[var10006];
switch(var1 % 5) {
case 0:
var10008 = 61;
break;
case 1:
var10008 = 121;
break;
case 2:
var10008 = 103;
break;
case 3:
var10008 = 113;
break;
default:
var10008 = 123;
}
var4[var10006] = (char)(var10007 ^ var10008);
++var1;
if (var2 == 0) {
var10006 = var2;
var4 = var10004;
} else {
if (var2 <= var1) {
break;
}
var4 = var10004;
var10006 = var1;
}
}
}
var10000[1] = (new String(var10004)).intern();
var10003 = "\u001dTG".toCharArray();
var10005 = var10003.length;
var1 = 0;
var10004 = var10003;
var2 = var10005;
if (var10005 <= 1) {
var4 = var10003;
var10006 = var1;
} else {
var10004 = var10003;
var2 = var10005;
if (var10005 <= var1) {
var10000[2] = (new String(var10003)).intern();
g = var10000;
return;
}
var4 = var10003;
var10006 = var1;
}
while(true) {
var10007 = var4[var10006];
switch(var1 % 5) {
case 0:
var10008 = 61;
break;
case 1:
var10008 = 121;
break;
case 2:
var10008 = 103;
break;
case 3:
var10008 = 113;
break;
default:
var10008 = 123;
}
var4[var10006] = (char)(var10007 ^ var10008);
++var1;
if (var2 == 0) {
var10006 = var2;
var4 = var10004;
} else {
if (var2 <= var1) {
var10000[2] = (new String(var10004)).intern();
g = var10000;
return;
}
var4 = var10004;
var10006 = var1;
}
}
}
var4 = var10003;
var10006 = var1;
var10007 = var10003[var1];
switch(var1 % 5) {
case 0:
var10008 = 61;
break;
case 1:
var10008 = 121;
break;
case 2:
var10008 = 103;
break;
case 3:
var10008 = 113;
break;
default:
var10008 = 123;
}
}
while(true) {
while(true) {
var4[var10006] = (char)(var10007 ^ var10008);
++var1;
if (var2 == 0) {
var10006 = var2;
var4 = var10004;
var10007 = var10004[var2];
switch(var1 % 5) {
case 0:
var10008 = 61;
break;
case 1:
var10008 = 121;
break;
case 2:
var10008 = 103;
break;
case 3:
var10008 = 113;
break;
default:
var10008 = 123;
}
} else {
if (var2 <= var1) {
label65: {
var10000[0] = (new String(var10004)).intern();
var10003 = "\u001d\"".toCharArray();
var10005 = var10003.length;
var1 = 0;
var10004 = var10003;
var2 = var10005;
if (var10005 <= 1) {
var4 = var10003;
var10006 = var1;
} else {
var10004 = var10003;
var2 = var10005;
if (var10005 <= var1) {
break label65;
}
var4 = var10003;
var10006 = var1;
}
while(true) {
var10007 = var4[var10006];
switch(var1 % 5) {
case 0:
var10008 = 61;
break;
case 1:
var10008 = 121;
break;
case 2:
var10008 = 103;
break;
case 3:
var10008 = 113;
break;
default:
var10008 = 123;
}
var4[var10006] = (char)(var10007 ^ var10008);
++var1;
if (var2 == 0) {
var10006 = var2;
var4 = var10004;
} else {
if (var2 <= var1) {
break;
}
var4 = var10004;
var10006 = var1;
}
}
}
var10000[1] = (new String(var10004)).intern();
var10003 = "\u001dTG".toCharArray();
var10005 = var10003.length;
var1 = 0;
var10004 = var10003;
var2 = var10005;
if (var10005 <= 1) {
var4 = var10003;
var10006 = var1;
} else {
var10004 = var10003;
var2 = var10005;
if (var10005 <= var1) {
var10000[2] = (new String(var10003)).intern();
g = var10000;
return;
}
var4 = var10003;
var10006 = var1;
}
while(true) {
var10007 = var4[var10006];
switch(var1 % 5) {
case 0:
var10008 = 61;
break;
case 1:
var10008 = 121;
break;
case 2:
var10008 = 103;
break;
case 3:
var10008 = 113;
break;
default:
var10008 = 123;
}
var4[var10006] = (char)(var10007 ^ var10008);
++var1;
if (var2 == 0) {
var10006 = var2;
var4 = var10004;
} else {
if (var2 <= var1) {
var10000[2] = (new String(var10004)).intern();
g = var10000;
return;
}
var4 = var10004;
var10006 = var1;
}
}
}
var4 = var10004;
var10006 = var1;
var10007 = var10004[var1];
switch(var1 % 5) {
case 0:
var10008 = 61;
break;
case 1:
var10008 = 121;
break;
case 2:
var10008 = 103;
break;
case 3:
var10008 = 113;
break;
default:
var10008 = 123;
}
}
}
}
}
}
| |
/*
* #%L
* xcode-maven-plugin
* %%
* Copyright (C) 2012 SAP AG
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.sap.prd.mobile.ios.mios;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.MojoExecutionException;
/**
* Contains all parameters and methods that are needed for mojos that invoke the 'xcodebuild'
* command.
*
*/
public abstract class BuildContextAwareMojo extends AbstractXCodeMojo
{
private static final String PREFIX_XCODE_OPTIONS = "xcode.options.";
private static final String PREFIX_XCODE_SETTINGS = "xcode.settings.";
protected final static List<String> DEFAULT_BUILD_ACTIONS = Collections.unmodifiableList(Arrays.asList("clean",
"build"));
/**
* The Xcode build action to to execute (e.g. clean, build, install). By default
* <code>clean</code> and <code>build</code> are executed.
*
* @parameter
*/
protected List<String> buildActions;
/**
* The code sign identity is used to select the provisioning profile (e.g.
* <code>iPhone Distribution</code>, <code>iPhone Developer</code>).
*
* @parameter expression="${xcode.codeSignIdentity}"
* @since 1.2.0
*/
protected String codeSignIdentity;
/**
* Can be used to override the provisioning profile defined in the Xcode project target. You can
* set it to an empty String if you want to use the default provisioning profile.
*
* @parameter expression="${xcode.provisioningProfile}"
* @since 1.2.1
*/
protected String provisioningProfile;
/**
* The Xcode target to be built. If not specified, the default target (the first target) will be
* built.
*
* @parameter expression="${xcode.target}"
* @since 1.4.1
*/
protected String target;
/**
* @parameter expression="${product.name}"
*/
private String productName;
/**
* Settings to pass to XCode - if any are explicitly defined here, this plugin will not provide
* default settings to XCode.
*
* @parameter
* @since 1.6.2
*/
private Map<String, String> settings;
/**
* Options to pass to XCode - if any are explicitly defined here, this plugin will not provide
* default options to XCode.
*
* @parameter
* @since 1.6.2
*/
private Map<String, String> options;
/**
* @parameter expression="${session}"
* @required
* @readonly
*/
private MavenSession session;
protected XCodeContext getXCodeContext(final XCodeContext.SourceCodeLocation sourceCodeLocation,
String configuration, String sdk)
{
final String projectName = project.getArtifactId();
File projectDirectory = null;
if (sourceCodeLocation == XCodeContext.SourceCodeLocation.WORKING_COPY) {
projectDirectory = getXCodeCompileDirectory();
}
else if (sourceCodeLocation == XCodeContext.SourceCodeLocation.ORIGINAL) {
projectDirectory = getXCodeSourceDirectory();
}
else {
throw new IllegalStateException("Invalid source code location: '" + sourceCodeLocation + "'");
}
HashMap<String, String> managedSettings = new HashMap<String, String>();
if (codeSignIdentity != null && !codeSignIdentity.trim().isEmpty())
managedSettings.put(Settings.ManagedSetting.CODE_SIGN_IDENTITY.name(), codeSignIdentity);
if (provisioningProfile != null)
managedSettings.put(Settings.ManagedSetting.PROVISIONING_PROFILE.name(), provisioningProfile);
HashMap<String, String> managedOptions = new HashMap<String, String>();
managedOptions.put(Options.ManagedOption.PROJECT.getOptionName(), projectName + ".xcodeproj");
if (configuration != null && !configuration.trim().isEmpty())
managedOptions.put(Options.ManagedOption.CONFIGURATION.getOptionName(), configuration);
if (sdk != null && !sdk.trim().isEmpty())
managedOptions.put(Options.ManagedOption.SDK.getOptionName(), sdk);
if (target != null && !target.trim().isEmpty())
managedOptions.put(Options.ManagedOption.TARGET.getOptionName(), target);
Map<String, String> _settings = new HashMap<String, String>(settings == null ? new HashMap<String, String>()
: settings);
for (String key : getKeys(PREFIX_XCODE_SETTINGS)) {
_settings.put(key.substring(PREFIX_XCODE_SETTINGS.length()), getProperty(key));
}
Map<String, String> _options = new HashMap<String, String>(options == null ? new HashMap<String, String>()
: options);
for (String key : getKeys(PREFIX_XCODE_OPTIONS)) {
_options.put(key.substring(PREFIX_XCODE_OPTIONS.length()), getProperty(key));
}
return new XCodeContext(getBuildActions(), projectDirectory, System.out, new Settings(_settings, managedSettings),
new Options(_options, managedOptions));
}
protected List<String> getBuildActions()
{
return (buildActions == null || buildActions.isEmpty()) ? DEFAULT_BUILD_ACTIONS : Collections
.unmodifiableList(buildActions);
}
/**
* Retrieves the Info Plist out of the effective Xcode project settings and returns the accessor
* to it.
*/
protected PListAccessor getInfoPListAccessor(XCodeContext.SourceCodeLocation location, String configuration,
String sdk)
throws MojoExecutionException, XCodeException
{
File plistFile = getPListFile(location, configuration, sdk);
if (!plistFile.isFile()) {
throw new MojoExecutionException("The Xcode project refers to the Info.plist file '" + plistFile
+ "' that does not exist.");
}
return new PListAccessor(plistFile);
}
protected File getPListFile(XCodeContext.SourceCodeLocation location, String configuration, String sdk)
throws XCodeException
{
XCodeContext context = getXCodeContext(location, configuration, sdk);
String plistFileName = EffectiveBuildSettings.getBuildSetting(context, EffectiveBuildSettings.INFOPLIST_FILE);
File srcRoot = new File(EffectiveBuildSettings.getBuildSetting(context, EffectiveBuildSettings.SRC_ROOT));
final File plistFile = new File(plistFileName);
if (!plistFile.isAbsolute()) {
return new File(srcRoot, plistFileName);
}
if (FileUtils.isChild(srcRoot, plistFile))
return plistFile;
throw new IllegalStateException("Plist file " + plistFile + " is not located inside the xcode project " + srcRoot
+ ".");
}
protected String getProductName(final String configuration, final String sdk) throws MojoExecutionException
{
final String productName;
if (this.productName != null) {
productName = this.productName;
getLog().info("Production name obtained from pom file");
}
else {
try {
productName = EffectiveBuildSettings.getBuildSetting(
getXCodeContext(XCodeContext.SourceCodeLocation.WORKING_COPY, configuration, sdk), EffectiveBuildSettings.PRODUCT_NAME);
getLog().info("Product name obtained from effective build settings file");
}
catch (final XCodeException ex) {
throw new MojoExecutionException("Cannot get product name: " + ex.getMessage(), ex);
}
}
if (productName == null || productName.trim().length() == 0)
throw new MojoExecutionException("Invalid product name. Was null or empty.");
return productName;
}
/**
* Returns all keys of project properties and user properties matching the <code>prefix</code>.
*
* @param prefix
* all keys if null
* @return
*/
@SuppressWarnings("unchecked")
protected Set<String> getKeys(String prefix)
{
Set<String> result = new HashSet<String>();
@SuppressWarnings("rawtypes")
final Set keys = new HashSet();
keys.addAll(session.getUserProperties().keySet());
keys.addAll(project.getProperties().keySet());
if (prefix == null) return keys;
for (Object key : keys) {
if (((String) key).startsWith(prefix))
result.add((String) key);
}
return result;
}
protected String getProperty(String key)
{
String value = session.getUserProperties().getProperty(key);
if (value == null)
{
value = project.getProperties().getProperty(key);
}
return value;
}
}
| |
/*
* Copyright 2012 - 2015 Manuel Laggner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tinymediamanager.scraper.hdtrailersnet;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tinymediamanager.core.Constants;
import org.tinymediamanager.scraper.IMediaTrailerProvider;
import org.tinymediamanager.scraper.MediaMetadata;
import org.tinymediamanager.scraper.MediaProviderInfo;
import org.tinymediamanager.scraper.MediaScrapeOptions;
import org.tinymediamanager.scraper.MediaTrailer;
import org.tinymediamanager.scraper.util.CachedUrl;
import org.tinymediamanager.scraper.util.Url;
/**
* The Class HDTrailersNet. A trailer provider for the site hd-trailers.net
*
* @author Myron Boyle
*/
public class HDTrailersNet implements IMediaTrailerProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(HDTrailersNet.class);
private static MediaProviderInfo providerInfo = new MediaProviderInfo(Constants.HDTRAILERSID, "hd-trailers.net",
"Scraper for hd-trailers.net which is able to scrape trailers");
public HDTrailersNet() {
}
@Override
public List<MediaTrailer> getTrailers(MediaScrapeOptions options) throws Exception {
LOGGER.debug("getTrailers() " + options.toString());
List<MediaTrailer> trailers = new ArrayList<MediaTrailer>();
MediaMetadata md = options.getMetadata();
if (md == null || StringUtils.isEmpty(md.getStringValue(MediaMetadata.ORIGINAL_TITLE))) {
LOGGER.warn("no originalTitle served");
return trailers;
}
String ot = md.getStringValue(MediaMetadata.ORIGINAL_TITLE);
// check if the original title is not empty
if (StringUtils.isEmpty(ot)) {
return trailers;
}
// best guess
String search = "http://www.hd-trailers.net/movie/" + ot.replaceAll("[^a-zA-Z0-9]", "-").replaceAll("--", "-").toLowerCase() + "/";
try {
LOGGER.debug("Guessed HD-Trailers Url: " + search);
Url url = new CachedUrl(search);
InputStream in = url.getInputStream();
Document doc = Jsoup.parse(in, "UTF-8", "");
Elements tr = doc.getElementsByAttributeValue("itemprop", "trailer");
/*
* <tr style="" itemprop="trailer" itemscope itemtype="http://schema.org/VideoObject"> <td class="bottomTableDate" rowspan="2">2012-03-30</td>
* <td class="bottomTableName" rowspan="2"><span class="standardTrailerName" itemprop="name">Trailer 2</span> <a href=
* "http://blog.hd-trailers.net/how-to-download-hd-trailers-from-apple/#workarounds" ><img src="http://static.hd-trailers.net/images/error.png"
* width="16" height="16" style="border:0px;vertical-align:middle" alt="Apple Direct Download Unavailable"
* title="Apple Direct Download Unavailable" /></a></td>
*
* <td class="bottomTableResolution"><a href= "http://trailers.apple.com/movies/sony_pictures/meninblack3/meninblack3-tlr2_h480p.mov"
* rel="lightbox[res480p 852 480]" title="Men in Black 3 - Trailer 2 - 480p">480p</a></td> <td class="bottomTableResolution"><a href=
* "http://trailers.apple.com/movies/sony_pictures/meninblack3/meninblack3-tlr2_h720p.mov" rel="lightbox[res720p 1280 720]"
* title="Men in Black 3 - Trailer 2 - 720p">720p</a></td> <td class="bottomTableResolution"><a href=
* "http://trailers.apple.com/movies/sony_pictures/meninblack3/meninblack3-tlr2_h1080p.mov" rel="lightbox[res1080p 1920 1080]"
* title="Men in Black 3 - Trailer 2 - 1080p">1080p</a></td> <td class="bottomTableIcon"> <a
* href="http://trailers.apple.com/trailers/sony_pictures/meninblack3/" target="_blank"> <img
* src="http://static.hd-trailers.net/images/apple.ico" alt="Apple" height="16px" width="16px"/></a></td> </tr> <tr> <td
* class="bottomTableFileSize">36 MB</td> <td class="bottomTableFileSize">111 MB</td> <td class="bottomTableFileSize">181 MB</td> <td
* class="bottomTableEmbed"><a href=
* "/embed-code.php?movieId=men-in-black-3&source=1&trailerName=Trailer 2&resolutions=480;720;1080" rel="lightbox[embed 600 600]"
* title="Embed this video on your website">embed</a></td> </tr>
*/
for (Element t : tr) {
try {
String date = t.select("td.bottomTableDate").first().text();
String title = t.select("td.bottomTableName > span").first().text();
// apple.com urls currently not working (according to hd-trailers)
String tr0qual = t.select("td.bottomTableResolution > a").get(0).text();
String tr0url = t.select("td.bottomTableResolution > a").get(0).attr("href");
MediaTrailer trailer = new MediaTrailer();
trailer.setName(title + " (" + date + ")");
trailer.setDate(date);
trailer.setUrl(tr0url);
trailer.setQuality(tr0qual);
trailer.setProvider(getProviderFromUrl(tr0url));
LOGGER.debug(trailer.toString());
trailers.add(trailer);
String tr1qual = t.select("td.bottomTableResolution > a").get(1).text();
String tr1url = t.select("td.bottomTableResolution > a").get(1).attr("href");
trailer = new MediaTrailer();
trailer.setName(title + " (" + date + ")");
trailer.setDate(date);
trailer.setUrl(tr1url);
trailer.setQuality(tr1qual);
trailer.setProvider(getProviderFromUrl(tr1url));
LOGGER.debug(trailer.toString());
trailers.add(trailer);
String tr2qual = t.select("td.bottomTableResolution > a").get(2).text();
String tr2url = t.select("td.bottomTableResolution > a").get(2).attr("href");
trailer = new MediaTrailer();
trailer.setName(title + " (" + date + ")");
trailer.setDate(date);
trailer.setUrl(tr2url);
trailer.setQuality(tr2qual);
trailer.setProvider(getProviderFromUrl(tr2url));
LOGGER.debug(trailer.toString());
trailers.add(trailer);
}
catch (IndexOutOfBoundsException i) {
// ignore parse errors per line
LOGGER.warn("Error parsing HD-Trailers line. Possible missing quality.");
}
}
}
catch (Exception e) {
LOGGER.error("cannot parse HD-Trailers movie: " + ot, e);
// clear cache
CachedUrl.removeCachedFileForUrl(search);
}
finally {
}
return trailers;
}
public String correctUrlForProvider(String provider, String url) {
if (provider.equals("apple")) {
// url = url.replace("_h480p", "_480p");
// url = url.replace("_h720p", "_720p");
// url = url.replace("_h1080p", "_1080p");
url = url.replace("//trailers.apple.com", "//movietrailers.apple.com");
}
return url;
}
/**
* Returns the "Source" for this trailer by parsing the URL.
*
* @param url
* the url
* @return the provider from url
*/
private static String getProviderFromUrl(String url) {
url = url.toLowerCase();
String source = "unknown";
if (url.contains("youtube.com")) {
source = "youtube";
}
else if (url.contains("apple.com")) {
source = "apple";
}
else if (url.contains("aol.com")) {
source = "aol";
}
else if (url.contains("yahoo.com")) {
source = "yahoo";
}
else if (url.contains("hd-trailers.net")) {
source = "hdtrailers";
}
else if (url.contains("moviefone.com")) {
source = "moviefone";
}
else if (url.contains("mtv.com")) {
source = "mtv";
}
else if (url.contains("ign.com")) {
source = "ign";
}
return source;
}
@Override
public MediaProviderInfo getProviderInfo() {
return providerInfo;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.transforms.splittabledofn;
import static org.apache.beam.sdk.transforms.splittabledofn.ByteKeyRangeTracker.next;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.apache.beam.sdk.io.range.ByteKey;
import org.apache.beam.sdk.io.range.ByteKeyRange;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link ByteKeyRangeTrackerTest}. */
@RunWith(JUnit4.class)
public class ByteKeyRangeTrackerTest {
@Rule public final ExpectedException expected = ExpectedException.none();
@Test
public void testTryClaim() throws Exception {
ByteKeyRange range = ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0));
ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(range);
assertEquals(range, tracker.currentRestriction());
assertTrue(tracker.tryClaim(ByteKey.of(0x10)));
assertTrue(tracker.tryClaim(ByteKey.of(0x10, 0x00)));
assertTrue(tracker.tryClaim(ByteKey.of(0x10, 0x00, 0x00)));
assertTrue(tracker.tryClaim(ByteKey.of(0x50)));
assertTrue(tracker.tryClaim(ByteKey.of(0x99)));
assertFalse(tracker.tryClaim(ByteKey.of(0xc0)));
}
@Test
public void testCheckpointUnstarted() throws Exception {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
expected.expect(IllegalStateException.class);
tracker.checkpoint();
}
@Test
public void testCheckpointOnlyFailedClaim() throws Exception {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertFalse(tracker.tryClaim(ByteKey.of(0xd0)));
expected.expect(IllegalStateException.class);
tracker.checkpoint();
}
@Test
public void testCheckpointJustStarted() throws Exception {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertTrue(tracker.tryClaim(ByteKey.of(0x10)));
ByteKeyRange checkpoint = tracker.checkpoint();
assertEquals(
ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0x10, 0x00)), tracker.currentRestriction());
assertEquals(ByteKeyRange.of(ByteKey.of(0x10, 0x00), ByteKey.of(0xc0)), checkpoint);
}
@Test
public void testCheckpointRegular() throws Exception {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertTrue(tracker.tryClaim(ByteKey.of(0x50)));
assertTrue(tracker.tryClaim(ByteKey.of(0x90)));
ByteKeyRange checkpoint = tracker.checkpoint();
assertEquals(
ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0x90, 0x00)), tracker.currentRestriction());
assertEquals(ByteKeyRange.of(ByteKey.of(0x90, 0x00), ByteKey.of(0xc0)), checkpoint);
}
@Test
public void testCheckpointClaimedLast() throws Exception {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertTrue(tracker.tryClaim(ByteKey.of(0x50)));
assertTrue(tracker.tryClaim(ByteKey.of(0x90)));
assertTrue(tracker.tryClaim(ByteKey.of(0xbf)));
ByteKeyRange checkpoint = tracker.checkpoint();
assertEquals(
ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xbf, 0x00)), tracker.currentRestriction());
assertEquals(ByteKeyRange.of(ByteKey.of(0xbf, 0x00), ByteKey.of(0xc0)), checkpoint);
}
@Test
public void testCheckpointAfterFailedClaim() throws Exception {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertTrue(tracker.tryClaim(ByteKey.of(0x50)));
assertTrue(tracker.tryClaim(ByteKey.of(0x90)));
assertTrue(tracker.tryClaim(ByteKey.of(0xa0)));
assertFalse(tracker.tryClaim(ByteKey.of(0xd0)));
ByteKeyRange checkpoint = tracker.checkpoint();
assertEquals(
ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xa0, 0x00)), tracker.currentRestriction());
assertEquals(ByteKeyRange.of(ByteKey.of(0xa0, 0x00), ByteKey.of(0xc0)), checkpoint);
}
@Test
public void testNonMonotonicClaim() throws Exception {
expected.expectMessage("Trying to claim key [70] while last attempted was [90]");
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertTrue(tracker.tryClaim(ByteKey.of(0x50)));
assertTrue(tracker.tryClaim(ByteKey.of(0x90)));
tracker.tryClaim(ByteKey.of(0x70));
}
@Test
public void testClaimBeforeStartOfRange() throws Exception {
expected.expectMessage(
"Trying to claim key [05] before start of the range "
+ "ByteKeyRange{startKey=[10], endKey=[c0]}");
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
tracker.tryClaim(ByteKey.of(0x05));
}
@Test
public void testCheckDoneAfterTryClaimPastEndOfRange() {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertTrue(tracker.tryClaim(ByteKey.of(0x50)));
assertTrue(tracker.tryClaim(ByteKey.of(0x90)));
assertFalse(tracker.tryClaim(ByteKey.of(0xd0)));
tracker.checkDone();
}
@Test
public void testCheckDoneAfterTryClaimAtEndOfRange() {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertTrue(tracker.tryClaim(ByteKey.of(0x50)));
assertTrue(tracker.tryClaim(ByteKey.of(0x90)));
assertFalse(tracker.tryClaim(ByteKey.of(0xc0)));
tracker.checkDone();
}
@Test
public void testCheckDoneAfterTryClaimRightBeforeEndOfRange() {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertTrue(tracker.tryClaim(ByteKey.of(0x50)));
assertTrue(tracker.tryClaim(ByteKey.of(0x90)));
assertTrue(tracker.tryClaim(ByteKey.of(0xbf)));
expected.expectMessage(
"Last attempted key was [bf] in range ByteKeyRange{startKey=[10], endKey=[c0]}, "
+ "claiming work in [[bf00], [c0]) was not attempted");
tracker.checkDone();
}
@Test
public void testCheckDoneWhenNotDone() {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertTrue(tracker.tryClaim(ByteKey.of(0x50)));
assertTrue(tracker.tryClaim(ByteKey.of(0x90)));
expected.expectMessage(
"Last attempted key was [90] in range ByteKeyRange{startKey=[10], endKey=[c0]}, "
+ "claiming work in [[9000], [c0]) was not attempted");
tracker.checkDone();
}
@Test
public void testCheckDoneWhenExplicitlyMarkedDone() {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
assertTrue(tracker.tryClaim(ByteKey.of(0x50)));
assertTrue(tracker.tryClaim(ByteKey.of(0x90)));
tracker.markDone();
tracker.checkDone();
}
@Test
public void testCheckDoneUnstarted() {
ByteKeyRangeTracker tracker =
ByteKeyRangeTracker.of(ByteKeyRange.of(ByteKey.of(0x10), ByteKey.of(0xc0)));
expected.expect(IllegalStateException.class);
tracker.checkDone();
}
@Test
public void testNextByteKey() {
assertEquals(next(ByteKey.EMPTY), ByteKey.of(0x00));
assertEquals(next(ByteKey.of(0x00)), ByteKey.of(0x00, 0x00));
assertEquals(next(ByteKey.of(0x9f)), ByteKey.of(0x9f, 0x00));
assertEquals(next(ByteKey.of(0xff)), ByteKey.of(0xff, 0x00));
assertEquals(next(ByteKey.of(0x10, 0x10)), ByteKey.of(0x10, 0x10, 0x00));
assertEquals(next(ByteKey.of(0x00, 0xff)), ByteKey.of(0x00, 0xff, 0x00));
assertEquals(next(ByteKey.of(0xff, 0xff)), ByteKey.of(0xff, 0xff, 0x00));
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: io/grpc/reflection/testing/dynamic_reflection_test_depth_two.proto
package io.grpc.reflection.testing;
/**
* Protobuf type {@code grpc.reflection.testing.DynamicReply}
*/
public final class DynamicReply extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:grpc.reflection.testing.DynamicReply)
DynamicReplyOrBuilder {
// Use DynamicReply.newBuilder() to construct.
private DynamicReply(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DynamicReply() {
message_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DynamicReply(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
message_ = bs;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return io.grpc.reflection.testing.DynamicReflectionTestDepthTwoProto.internal_static_grpc_reflection_testing_DynamicReply_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grpc.reflection.testing.DynamicReflectionTestDepthTwoProto.internal_static_grpc_reflection_testing_DynamicReply_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grpc.reflection.testing.DynamicReply.class, io.grpc.reflection.testing.DynamicReply.Builder.class);
}
private int bitField0_;
public static final int MESSAGE_FIELD_NUMBER = 1;
private volatile java.lang.Object message_;
/**
* <code>optional string message = 1;</code>
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string message = 1;</code>
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
}
}
/**
* <code>optional string message = 1;</code>
*/
public com.google.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, message_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, message_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.grpc.reflection.testing.DynamicReply)) {
return super.equals(obj);
}
io.grpc.reflection.testing.DynamicReply other = (io.grpc.reflection.testing.DynamicReply) obj;
boolean result = true;
result = result && (hasMessage() == other.hasMessage());
if (hasMessage()) {
result = result && getMessage()
.equals(other.getMessage());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMessage()) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.grpc.reflection.testing.DynamicReply parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grpc.reflection.testing.DynamicReply parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grpc.reflection.testing.DynamicReply parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grpc.reflection.testing.DynamicReply parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grpc.reflection.testing.DynamicReply parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static io.grpc.reflection.testing.DynamicReply parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static io.grpc.reflection.testing.DynamicReply parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static io.grpc.reflection.testing.DynamicReply parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static io.grpc.reflection.testing.DynamicReply parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static io.grpc.reflection.testing.DynamicReply parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.grpc.reflection.testing.DynamicReply prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code grpc.reflection.testing.DynamicReply}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:grpc.reflection.testing.DynamicReply)
io.grpc.reflection.testing.DynamicReplyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return io.grpc.reflection.testing.DynamicReflectionTestDepthTwoProto.internal_static_grpc_reflection_testing_DynamicReply_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grpc.reflection.testing.DynamicReflectionTestDepthTwoProto.internal_static_grpc_reflection_testing_DynamicReply_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grpc.reflection.testing.DynamicReply.class, io.grpc.reflection.testing.DynamicReply.Builder.class);
}
// Construct using io.grpc.reflection.testing.DynamicReply.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
message_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return io.grpc.reflection.testing.DynamicReflectionTestDepthTwoProto.internal_static_grpc_reflection_testing_DynamicReply_descriptor;
}
public io.grpc.reflection.testing.DynamicReply getDefaultInstanceForType() {
return io.grpc.reflection.testing.DynamicReply.getDefaultInstance();
}
public io.grpc.reflection.testing.DynamicReply build() {
io.grpc.reflection.testing.DynamicReply result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public io.grpc.reflection.testing.DynamicReply buildPartial() {
io.grpc.reflection.testing.DynamicReply result = new io.grpc.reflection.testing.DynamicReply(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.message_ = message_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.grpc.reflection.testing.DynamicReply) {
return mergeFrom((io.grpc.reflection.testing.DynamicReply)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.grpc.reflection.testing.DynamicReply other) {
if (other == io.grpc.reflection.testing.DynamicReply.getDefaultInstance()) return this;
if (other.hasMessage()) {
bitField0_ |= 0x00000001;
message_ = other.message_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
io.grpc.reflection.testing.DynamicReply parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (io.grpc.reflection.testing.DynamicReply) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object message_ = "";
/**
* <code>optional string message = 1;</code>
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string message = 1;</code>
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string message = 1;</code>
*/
public com.google.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string message = 1;</code>
*/
public Builder setMessage(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
message_ = value;
onChanged();
return this;
}
/**
* <code>optional string message = 1;</code>
*/
public Builder clearMessage() {
bitField0_ = (bitField0_ & ~0x00000001);
message_ = getDefaultInstance().getMessage();
onChanged();
return this;
}
/**
* <code>optional string message = 1;</code>
*/
public Builder setMessageBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
message_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:grpc.reflection.testing.DynamicReply)
}
// @@protoc_insertion_point(class_scope:grpc.reflection.testing.DynamicReply)
private static final io.grpc.reflection.testing.DynamicReply DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.grpc.reflection.testing.DynamicReply();
}
public static io.grpc.reflection.testing.DynamicReply getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final com.google.protobuf.Parser<DynamicReply>
PARSER = new com.google.protobuf.AbstractParser<DynamicReply>() {
public DynamicReply parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DynamicReply(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DynamicReply> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DynamicReply> getParserForType() {
return PARSER;
}
public io.grpc.reflection.testing.DynamicReply getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2006-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kordamp.json;
import org.apache.commons.collections.map.MultiKeyMap;
import org.apache.commons.lang3.StringUtils;
import org.kordamp.json.processors.DefaultDefaultValueProcessor;
import org.kordamp.json.processors.DefaultValueProcessor;
import org.kordamp.json.processors.DefaultValueProcessorMatcher;
import org.kordamp.json.processors.JsonBeanProcessor;
import org.kordamp.json.processors.JsonBeanProcessorMatcher;
import org.kordamp.json.processors.JsonValueProcessor;
import org.kordamp.json.processors.JsonValueProcessorMatcher;
import org.kordamp.json.processors.PropertyNameProcessor;
import org.kordamp.json.processors.PropertyNameProcessorMatcher;
import org.kordamp.json.util.CycleDetectionStrategy;
import org.kordamp.json.util.JavaIdentifierTransformer;
import org.kordamp.json.util.JsonEventListener;
import org.kordamp.json.util.NewBeanInstanceStrategy;
import org.kordamp.json.util.PropertyExclusionClassMatcher;
import org.kordamp.json.util.PropertyFilter;
import org.kordamp.json.util.PropertySetStrategy;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Utility class that helps configuring the serialization process.
*
* @author Andres Almiray
*/
public class JsonConfig {
public static final DefaultValueProcessorMatcher DEFAULT_DEFAULT_VALUE_PROCESSOR_MATCHER = DefaultValueProcessorMatcher.DEFAULT;
public static final JsonBeanProcessorMatcher DEFAULT_JSON_BEAN_PROCESSOR_MATCHER = JsonBeanProcessorMatcher.DEFAULT;
public static final JsonValueProcessorMatcher DEFAULT_JSON_VALUE_PROCESSOR_MATCHER = JsonValueProcessorMatcher.DEFAULT;
public static final NewBeanInstanceStrategy DEFAULT_NEW_BEAN_INSTANCE_STRATEGY = NewBeanInstanceStrategy.DEFAULT;
public static final PropertyExclusionClassMatcher DEFAULT_PROPERTY_EXCLUSION_CLASS_MATCHER = PropertyExclusionClassMatcher.DEFAULT;
public static final PropertyNameProcessorMatcher DEFAULT_PROPERTY_NAME_PROCESSOR_MATCHER = PropertyNameProcessorMatcher.DEFAULT;
public static final int MODE_LIST = 1;
public static final int MODE_OBJECT_ARRAY = 2;
public static final int MODE_SET = 2;
private static final Class DEFAULT_COLLECTION_TYPE = List.class;
private static final CycleDetectionStrategy DEFAULT_CYCLE_DETECTION_STRATEGY = CycleDetectionStrategy.STRICT;
private static final String[] DEFAULT_EXCLUDES = new String[]{"class", "declaringClass", "metaClass"};
private static final JavaIdentifierTransformer DEFAULT_JAVA_IDENTIFIER_TRANSFORMER = JavaIdentifierTransformer.NOOP;
private static final DefaultValueProcessor DEFAULT_VALUE_PROCESSOR = new DefaultDefaultValueProcessor();
private static final String[] EMPTY_EXCLUDES = new String[0];
/**
* Array conversion mode
*/
private int arrayMode = MODE_LIST;
private MultiKeyMap beanKeyMap = new MultiKeyMap();
private Map beanProcessorMap = new HashMap();
private MultiKeyMap beanTypeMap = new MultiKeyMap();
/**
* Map of attribute/class
*/
private Map classMap;
private Class collectionType = DEFAULT_COLLECTION_TYPE;
private CycleDetectionStrategy cycleDetectionStrategy = DEFAULT_CYCLE_DETECTION_STRATEGY;
private Map defaultValueMap = new HashMap();
private DefaultValueProcessorMatcher defaultValueProcessorMatcher = DEFAULT_DEFAULT_VALUE_PROCESSOR_MATCHER;
private Class enclosedType;
private List eventListeners = new ArrayList();
private String[] excludes = EMPTY_EXCLUDES;
private Map exclusionMap = new HashMap();
private boolean handleJettisonEmptyElement;
private boolean handleJettisonSingleElementArray;
private boolean ignoreDefaultExcludes;
//private boolean ignoreJPATransient;
private boolean ignoreTransientFields;
private boolean ignorePublicFields = true; // TODO jenkisci/json-lib changed this to false
private boolean ignoreUnreadableProperty = true;
private boolean javascriptCompliant;
private JavaIdentifierTransformer javaIdentifierTransformer = DEFAULT_JAVA_IDENTIFIER_TRANSFORMER;
private PropertyFilter javaPropertyFilter;
private Map javaPropertyNameProcessorMap = new HashMap();
private PropertyNameProcessorMatcher javaPropertyNameProcessorMatcher = DEFAULT_PROPERTY_NAME_PROCESSOR_MATCHER;
private JsonBeanProcessorMatcher jsonBeanProcessorMatcher = DEFAULT_JSON_BEAN_PROCESSOR_MATCHER;
private PropertyFilter jsonPropertyFilter;
private Map jsonPropertyNameProcessorMap = new HashMap();
private PropertyNameProcessorMatcher jsonPropertyNameProcessorMatcher = DEFAULT_PROPERTY_NAME_PROCESSOR_MATCHER;
private JsonValueProcessorMatcher jsonValueProcessorMatcher = DEFAULT_JSON_VALUE_PROCESSOR_MATCHER;
private Map keyMap = new HashMap();
private NewBeanInstanceStrategy newBeanInstanceStrategy = DEFAULT_NEW_BEAN_INSTANCE_STRATEGY;
private PropertyExclusionClassMatcher propertyExclusionClassMatcher = DEFAULT_PROPERTY_EXCLUSION_CLASS_MATCHER;
private PropertySetStrategy propertySetStrategy;
/**
* Root class used when converting to an specific bean
*/
private Class rootClass;
private boolean skipJavaIdentifierTransformationInMapKeys;
private boolean triggerEvents;
private Map typeMap = new HashMap();
private List ignoreFieldAnnotations = new ArrayList();
private boolean allowNonStringKeys = false;
private boolean parseJsonLiterals = true;
public JsonConfig() {
}
/**
* Registers a listener for JSON events.<br>
* The events will be triggered only when using the static builders and if event triggering is
* enabled.<br>
* [Java -> JSON]
*
* @param listener a listener for events
*
* @see #enableEventTriggering
* @see #disableEventTriggering
* @see #removeJsonEventListener(JsonEventListener)
*/
public synchronized void addJsonEventListener(JsonEventListener listener) {
if (!eventListeners.contains(listener)) {
eventListeners.add(listener);
}
}
/**
* Removes all registered PropertyNameProcessors.<br>
* [JSON -> Java]
*/
public void clearJavaPropertyNameProcessors() {
javaPropertyNameProcessorMap.clear();
}
/**
* Removes all registered JsonBeanProcessors.<br>
* [Java -> JSON]
*/
public void clearJsonBeanProcessors() {
beanProcessorMap.clear();
}
/**
* Removes all registered listener for JSON Events.<br>
* [Java -> JSON]
*/
public synchronized void clearJsonEventListeners() {
eventListeners.clear();
}
/**
* Removes all registered PropertyNameProcessors.<br>
* [Java -> JSON]
*/
public void clearJsonPropertyNameProcessors() {
jsonPropertyNameProcessorMap.clear();
}
/**
* Removes all registered JsonValueProcessors.<br>
* [Java -> JSON]
*/
public void clearJsonValueProcessors() {
beanKeyMap.clear();
beanTypeMap.clear();
keyMap.clear();
typeMap.clear();
}
/**
* Removes all property exclusions registered per class.<br>
* [Java -> JSON]
*/
public void clearPropertyExclusions() {
exclusionMap.clear();
}
/**
* Removes all registered PropertyNameProcessors.<br>
* [JSON -> Java]
*
* @deprecated use clearJavaPropertyNameProcessors() instead
*/
public void clearPropertyNameProcessors() {
clearJavaPropertyNameProcessors();
}
public boolean isParseJsonLiterals() {
return parseJsonLiterals;
}
public void setParseJsonLiterals(boolean parseJsonLiterals) {
this.parseJsonLiterals = parseJsonLiterals;
}
public JsonConfig copy() {
JsonConfig jsc = new JsonConfig();
jsc.beanKeyMap.putAll(beanKeyMap);
jsc.beanTypeMap.putAll(beanTypeMap);
jsc.classMap = new HashMap();
if (classMap != null) {
jsc.classMap.putAll(classMap);
}
jsc.cycleDetectionStrategy = cycleDetectionStrategy;
if (eventListeners != null) {
jsc.eventListeners.addAll(eventListeners);
}
if (excludes != null) {
jsc.excludes = new String[excludes.length];
System.arraycopy(excludes, 0, jsc.excludes, 0, excludes.length);
}
jsc.handleJettisonEmptyElement = handleJettisonEmptyElement;
jsc.handleJettisonSingleElementArray = handleJettisonSingleElementArray;
jsc.ignoreDefaultExcludes = ignoreDefaultExcludes;
jsc.ignoreTransientFields = ignoreTransientFields;
jsc.ignorePublicFields = ignorePublicFields;
jsc.javaIdentifierTransformer = javaIdentifierTransformer;
jsc.javascriptCompliant = javascriptCompliant;
jsc.keyMap.putAll(keyMap);
jsc.beanProcessorMap.putAll(beanProcessorMap);
jsc.rootClass = rootClass;
jsc.skipJavaIdentifierTransformationInMapKeys = skipJavaIdentifierTransformationInMapKeys;
jsc.triggerEvents = triggerEvents;
jsc.typeMap.putAll(typeMap);
jsc.jsonPropertyFilter = jsonPropertyFilter;
jsc.javaPropertyFilter = javaPropertyFilter;
jsc.jsonBeanProcessorMatcher = jsonBeanProcessorMatcher;
jsc.newBeanInstanceStrategy = newBeanInstanceStrategy;
jsc.defaultValueProcessorMatcher = defaultValueProcessorMatcher;
jsc.defaultValueMap.putAll(defaultValueMap);
jsc.propertySetStrategy = propertySetStrategy;
//jsc.ignoreJPATransient = ignoreJPATransient;
jsc.collectionType = collectionType;
jsc.enclosedType = enclosedType;
jsc.jsonValueProcessorMatcher = jsonValueProcessorMatcher;
jsc.javaPropertyNameProcessorMatcher = javaPropertyNameProcessorMatcher;
jsc.javaPropertyNameProcessorMap.putAll(javaPropertyNameProcessorMap);
jsc.jsonPropertyNameProcessorMatcher = jsonPropertyNameProcessorMatcher;
jsc.jsonPropertyNameProcessorMap.putAll(jsonPropertyNameProcessorMap);
jsc.propertyExclusionClassMatcher = propertyExclusionClassMatcher;
jsc.exclusionMap.putAll(exclusionMap);
jsc.ignoreFieldAnnotations.addAll(ignoreFieldAnnotations);
jsc.allowNonStringKeys = allowNonStringKeys;
jsc.ignoreUnreadableProperty = ignoreUnreadableProperty;
jsc.parseJsonLiterals = parseJsonLiterals;
return jsc;
}
/**
* Disables event triggering when building.<br>
* [Java -> JSON]
*/
public void disableEventTriggering() {
triggerEvents = false;
}
/**
* Enables event triggering when building.<br>
* [Java -> JSON]
*/
public void enableEventTriggering() {
triggerEvents = true;
}
/**
* See {@link #setIgnoreUnreadableProperty(boolean)}
*/
public boolean isIgnoreUnreadableProperty() {
return ignoreUnreadableProperty;
}
/**
* If true, properties found in JSON that have no corresponding Java setter/field/etc
* will not raise an exception.
* <p/>
* <p/>
* For example, given {"x":1, "y":2, "z":3} on the following <tt>Point</tt> class,
* {@link JSONObject#toBean()} would fail unless this flag is set to true, because
* propety "x" in JSON has no corresponding Java counerpart.
* <p/>
* <pre>
* class Point {
* private int x,y;
* public int getX() { return x; }
* public int getY() { return y; }
* public void setX(int v) { x=v; }
* public void setY(int v) { y=v; }
* }
* </pre>
* <p/>
* [JSON -> Java]
*/
public void setIgnoreUnreadableProperty(boolean ignoreUnreadableProperty) {
this.ignoreUnreadableProperty = ignoreUnreadableProperty;
}
/**
* Finds a DefaultValueProcessor registered to the target class.<br>
* Returns null if none is registered.<br>
* [Java -> JSON]
*
* @param target a class used for searching a DefaultValueProcessor.
*/
public DefaultValueProcessor findDefaultValueProcessor(Class target) {
if (!defaultValueMap.isEmpty()) {
Object key = defaultValueProcessorMatcher.getMatch(target, defaultValueMap.keySet());
DefaultValueProcessor processor = (DefaultValueProcessor) defaultValueMap.get(key);
if (processor != null) {
return processor;
}
}
return DEFAULT_VALUE_PROCESSOR;
}
/**
* Finds a PropertyNameProcessor registered to the target class.<br>
* Returns null if none is registered.<br>
* [JSON -> Java]
*
* @param beanClass a class used for searching a PropertyNameProcessor.
*/
public PropertyNameProcessor findJavaPropertyNameProcessor(Class beanClass) {
if (!javaPropertyNameProcessorMap.isEmpty()) {
Object key = javaPropertyNameProcessorMatcher.getMatch(beanClass, javaPropertyNameProcessorMap.keySet());
return (PropertyNameProcessor) javaPropertyNameProcessorMap.get(key);
}
return null;
}
/**
* Finds a JsonBeanProcessor registered to the target class.<br>
* Returns null if none is registered.<br>
* [Java -> JSON]
*
* @param target a class used for searching a JsonBeanProcessor.
*/
public JsonBeanProcessor findJsonBeanProcessor(Class target) {
if (!beanProcessorMap.isEmpty()) {
Object key = jsonBeanProcessorMatcher.getMatch(target, beanProcessorMap.keySet());
return (JsonBeanProcessor) beanProcessorMap.get(key);
}
return null;
}
/**
* Finds a PropertyNameProcessor registered to the target class.<br>
* Returns null if none is registered.<br>
* [Java -> JSON]
*
* @param beanClass a class used for searching a PropertyNameProcessor.
*/
public PropertyNameProcessor findJsonPropertyNameProcessor(Class beanClass) {
if (!jsonPropertyNameProcessorMap.isEmpty()) {
Object key = jsonPropertyNameProcessorMatcher.getMatch(beanClass, jsonPropertyNameProcessorMap.keySet());
return (PropertyNameProcessor) jsonPropertyNameProcessorMap.get(key);
}
return null;
}
/**
* Finds a JsonValueProcessor registered to the target type.<br>
* Returns null if none is registered.<br>
* [Java -> JSON]
*
* @param propertyType a class used for searching a JsonValueProcessor.
*/
public JsonValueProcessor findJsonValueProcessor(Class propertyType) {
if (!typeMap.isEmpty()) {
Object key = jsonValueProcessorMatcher.getMatch(propertyType, typeMap.keySet());
return (JsonValueProcessor) typeMap.get(key);
}
return null;
}
/**
* Finds a JsonValueProcessor.<br>
* It will search the registered JsonValueProcessors in the following order:
* <ol>
* <li>beanClass, key</li>
* <li>beanClass, type</li>
* <li>key</li>
* <li>type</li>
* </ol>
* Returns null if none is registered.<br>
* [Java -> JSON]
*
* @param beanClass the class to which the property may belong
* @param propertyType the type of the property
* @param key the name of the property which may belong to the target class
*/
public JsonValueProcessor findJsonValueProcessor(Class beanClass, Class propertyType, String key) {
JsonValueProcessor jsonValueProcessor = null;
jsonValueProcessor = (JsonValueProcessor) beanKeyMap.get(beanClass, key);
if (jsonValueProcessor != null) {
return jsonValueProcessor;
}
jsonValueProcessor = (JsonValueProcessor) beanTypeMap.get(beanClass, propertyType);
if (jsonValueProcessor != null) {
return jsonValueProcessor;
}
jsonValueProcessor = (JsonValueProcessor) keyMap.get(key);
if (jsonValueProcessor != null) {
return jsonValueProcessor;
}
Object tkey = jsonValueProcessorMatcher.getMatch(propertyType, typeMap.keySet());
jsonValueProcessor = (JsonValueProcessor) typeMap.get(tkey);
if (jsonValueProcessor != null) {
return jsonValueProcessor;
}
return null;
}
/**
* Finds a JsonValueProcessor.<br>
* It will search the registered JsonValueProcessors in the following order:
* <ol>
* <li>key</li>
* <li>type</li>
* </ol>
* Returns null if none is registered.<br>
* [Java -> JSON]
*
* @param propertyType the type of the property
* @param key the name of the property which may belong to the target class
*/
public JsonValueProcessor findJsonValueProcessor(Class propertyType, String key) {
JsonValueProcessor jsonValueProcessor = null;
jsonValueProcessor = (JsonValueProcessor) keyMap.get(key);
if (jsonValueProcessor != null) {
return jsonValueProcessor;
}
Object tkey = jsonValueProcessorMatcher.getMatch(propertyType, typeMap.keySet());
jsonValueProcessor = (JsonValueProcessor) typeMap.get(tkey);
if (jsonValueProcessor != null) {
return jsonValueProcessor;
}
return null;
}
/**
* Finds a PropertyNameProcessor registered to the target class.<br>
* Returns null if none is registered. <br>
* [JSON -> Java]
*
* @param beanClass a class used for searching a PropertyNameProcessor.
*
* @deprecated use findJavaPropertyNameProcessor() instead
*/
public PropertyNameProcessor findPropertyNameProcessor(Class beanClass) {
return findJavaPropertyNameProcessor(beanClass);
}
/**
* Returns the current array mode conversion.<br>
* [JSON -> Java]
*
* @return MODE_OBJECT_ARRAY, MODE_LIST or MODE_SET
*/
public int getArrayMode() {
return arrayMode;
}
/**
* Sets the current array mode for conversion.<br>
* If the value is not MODE_LIST, MODE_OBJECT_ARRAY nor MODE_SET, then MODE_LIST will be used.<br>
* [JSON -> Java]
*
* @param arrayMode array mode for conversion
*/
public void setArrayMode(int arrayMode) {
if (arrayMode == MODE_OBJECT_ARRAY) {
this.arrayMode = arrayMode;
} else if (arrayMode == MODE_SET) {
this.arrayMode = arrayMode;
this.collectionType = Set.class;
} else {
this.arrayMode = MODE_LIST;
this.enclosedType = DEFAULT_COLLECTION_TYPE;
}
}
/**
* Returns the current attribute/class Map.<br>
* [JSON -> Java]
*
* @return a Map of classes, every key identifies a property or a regexp
*/
public Map getClassMap() {
return classMap;
}
/**
* Sets the current attribute/Class Map<br>
* [JSON -> Java]
*
* @param classMap a Map of classes, every key identifies a property or a regexp
*/
public void setClassMap(Map classMap) {
this.classMap = classMap;
}
/**
* Returns the current collection type used for collection transformations.<br>
* [JSON -> Java]
*
* @return the target collection class for conversion
*/
public Class getCollectionType() {
return collectionType;
}
/**
* Sets the current collection type used for collection transformations.<br>
* [JSON -> Java]
*
* @param collectionType the target collection class for conversion
*/
public void setCollectionType(Class collectionType) {
if (collectionType != null) {
if (!Collection.class.isAssignableFrom(collectionType)) {
throw new JSONException("The configured collectionType is not a Collection: " + collectionType.getName());
}
this.collectionType = collectionType;
} else {
collectionType = DEFAULT_COLLECTION_TYPE;
}
}
/**
* Returns the configured CycleDetectionStrategy.<br>
* Default value is CycleDetectionStrategy.STRICT<br>
* [Java -> JSON]
*/
public CycleDetectionStrategy getCycleDetectionStrategy() {
return cycleDetectionStrategy;
}
/**
* Sets a CycleDetectionStrategy to use.<br>
* Will set default value (CycleDetectionStrategy.STRICT) if null.<br>
* [Java -> JSON]
*/
public void setCycleDetectionStrategy(CycleDetectionStrategy cycleDetectionStrategy) {
this.cycleDetectionStrategy = cycleDetectionStrategy == null ? DEFAULT_CYCLE_DETECTION_STRATEGY
: cycleDetectionStrategy;
}
/**
* Returns the configured DefaultValueProcessorMatcher.<br>
* Default value is DefaultValueProcessorMatcher.DEFAULT<br>
* [Java -> JSON]
*/
public DefaultValueProcessorMatcher getDefaultValueProcessorMatcher() {
return defaultValueProcessorMatcher;
}
/**
* Sets a DefaultValueProcessorMatcher to use.<br>
* Will set default value (DefaultValueProcessorMatcher.DEFAULT) if null.<br>
* [Java -> JSON]
*/
public void setDefaultValueProcessorMatcher(DefaultValueProcessorMatcher defaultValueProcessorMatcher) {
this.defaultValueProcessorMatcher = defaultValueProcessorMatcher == null ? DEFAULT_DEFAULT_VALUE_PROCESSOR_MATCHER
: defaultValueProcessorMatcher;
}
/**
* Returns the current enclosed type for generic collection transformations.<br>
* [JSON -> Java]
*
* @return the target type for conversion
*/
public Class getEnclosedType() {
return enclosedType;
}
/**
* Sets the current enclosed type for generic collection transformations.<br>
* [JSON -> Java]
*
* @param enclosedType the target type for conversion
*/
public void setEnclosedType(Class enclosedType) {
this.enclosedType = enclosedType;
}
/**
* Returns the configured properties for exclusion. <br>
* [Java -> JSON]
*/
public String[] getExcludes() {
return excludes;
}
/**
* Sets the excludes to use.<br>
* Will set default value ([]) if null.<br>
* [Java -> JSON]
*/
public void setExcludes(String[] excludes) {
this.excludes = excludes == null ? EMPTY_EXCLUDES : excludes;
}
/**
* Returns the configured JavaIdentifierTransformer. <br>
* Default value is JavaIdentifierTransformer.NOOP<br>
* [JSON -> Java]
*/
public JavaIdentifierTransformer getJavaIdentifierTransformer() {
return javaIdentifierTransformer;
}
/**
* Sets the JavaIdentifierTransformer to use.<br>
* Will set default value (JavaIdentifierTransformer.NOOP) if null.<br>
* [JSON -> Java]
*/
public void setJavaIdentifierTransformer(JavaIdentifierTransformer javaIdentifierTransformer) {
this.javaIdentifierTransformer = javaIdentifierTransformer == null ? DEFAULT_JAVA_IDENTIFIER_TRANSFORMER
: javaIdentifierTransformer;
}
/**
* Returns the configured property filter when serializing to Java.<br>
* [JSON -> Java]
*/
public PropertyFilter getJavaPropertyFilter() {
return javaPropertyFilter;
}
/**
* Sets a property filter used when serializing to Java.<br>
* [JSON -> Java]
*
* @param javaPropertyFilter the property filter
*/
public void setJavaPropertyFilter(PropertyFilter javaPropertyFilter) {
this.javaPropertyFilter = javaPropertyFilter;
}
/**
* Returns the configured PropertyNameProcessorMatcher.<br>
* Default value is PropertyNameProcessorMatcher.DEFAULT<br>
* [JSON -> Java]
*/
public PropertyNameProcessorMatcher getJavaPropertyNameProcessorMatcher() {
return javaPropertyNameProcessorMatcher;
}
/**
* Sets a PropertyNameProcessorMatcher to use.<br>
* Will set default value (PropertyNameProcessorMatcher.DEFAULT) if null.<br>
* [JSON -> Java]
*/
public void setJavaPropertyNameProcessorMatcher(PropertyNameProcessorMatcher propertyNameProcessorMatcher) {
this.javaPropertyNameProcessorMatcher = propertyNameProcessorMatcher == null ? DEFAULT_PROPERTY_NAME_PROCESSOR_MATCHER
: propertyNameProcessorMatcher;
}
/**
* Returns the configured JsonBeanProcessorMatcher.<br>
* Default value is JsonBeanProcessorMatcher.DEFAULT<br>
* [JSON -> Java]
*/
public JsonBeanProcessorMatcher getJsonBeanProcessorMatcher() {
return jsonBeanProcessorMatcher;
}
/**
* Sets a JsonBeanProcessorMatcher to use.<br>
* Will set default value (JsonBeanProcessorMatcher.DEFAULT) if null.<br>
* [Java -> JSON]
*/
public void setJsonBeanProcessorMatcher(JsonBeanProcessorMatcher jsonBeanProcessorMatcher) {
this.jsonBeanProcessorMatcher = jsonBeanProcessorMatcher == null ? DEFAULT_JSON_BEAN_PROCESSOR_MATCHER
: jsonBeanProcessorMatcher;
}
/**
* Returns a list of registered listeners for JSON events.<br>
* [JSON -> Java]
*/
public synchronized List getJsonEventListeners() {
return eventListeners;
}
/**
* Returns the configured property filter when serializing to JSON.<br>
* [Java -> JSON]
*/
public PropertyFilter getJsonPropertyFilter() {
return jsonPropertyFilter;
}
/**
* Sets a property filter used when serializing to JSON.<br>
* [Java -> JSON]
*
* @param jsonPropertyFilter the property filter
*/
public void setJsonPropertyFilter(PropertyFilter jsonPropertyFilter) {
this.jsonPropertyFilter = jsonPropertyFilter;
}
/**
* Returns the configured PropertyNameProcessorMatcher.<br>
* Default value is PropertyNameProcessorMatcher.DEFAULT<br>
* [Java -> JSON]
*/
public PropertyNameProcessorMatcher getJsonPropertyNameProcessorMatcher() {
return javaPropertyNameProcessorMatcher;
}
/**
* Sets a PropertyNameProcessorMatcher to use.<br>
* Will set default value (PropertyNameProcessorMatcher.DEFAULT) if null.<br>
* [Java -> JSON]
*/
public void setJsonPropertyNameProcessorMatcher(PropertyNameProcessorMatcher propertyNameProcessorMatcher) {
this.jsonPropertyNameProcessorMatcher = propertyNameProcessorMatcher == null ? DEFAULT_PROPERTY_NAME_PROCESSOR_MATCHER
: propertyNameProcessorMatcher;
}
/**
* Returns the configured JsonValueProcessorMatcher.<br>
* Default value is JsonValueProcessorMatcher.DEFAULT<br>
* [Java -> JSON]
*/
public JsonValueProcessorMatcher getJsonValueProcessorMatcher() {
return jsonValueProcessorMatcher;
}
/**
* Sets a JsonValueProcessorMatcher to use.<br>
* Will set default value (JsonValueProcessorMatcher.DEFAULT) if null.<br>
* [Java -> JSON]
*/
public void setJsonValueProcessorMatcher(JsonValueProcessorMatcher jsonValueProcessorMatcher) {
this.jsonValueProcessorMatcher = jsonValueProcessorMatcher == null ? DEFAULT_JSON_VALUE_PROCESSOR_MATCHER
: jsonValueProcessorMatcher;
}
/**
* Returns a set of default excludes with user-defined excludes.<br>
* [Java -> JSON]
*/
public Collection getMergedExcludes() {
Collection exclusions = new HashSet();
for (int i = 0; i < excludes.length; i++) {
String exclusion = excludes[i];
if (!StringUtils.isBlank(excludes[i])) {
exclusions.add(exclusion.trim());
}
}
if (!ignoreDefaultExcludes) {
for (int i = 0; i < DEFAULT_EXCLUDES.length; i++) {
if (!exclusions.contains(DEFAULT_EXCLUDES[i])) {
exclusions.add(DEFAULT_EXCLUDES[i]);
}
}
}
return exclusions;
}
/**
* Returns a set of default excludes with user-defined excludes.<br>
* Takes into account any additional excludes per matching class.
* [Java -> JSON]
*/
public Collection getMergedExcludes(Class target) {
if (target == null) {
return getMergedExcludes();
}
Collection exclusionSet = getMergedExcludes();
if (!exclusionMap.isEmpty()) {
Object key = propertyExclusionClassMatcher.getMatch(target, exclusionMap.keySet());
Set set = (Set) exclusionMap.get(key);
if (set != null && !set.isEmpty()) {
for (Iterator i = set.iterator(); i.hasNext(); ) {
Object e = i.next();
if (!exclusionSet.contains(e)) {
exclusionSet.add(e);
}
}
}
}
return exclusionSet;
}
/**
* Returns the configured NewBeanInstanceStrategy.<br>
* Default value is NewBeanInstanceStrategy.DEFAULT<br>
* [JSON -> Java]
*/
public NewBeanInstanceStrategy getNewBeanInstanceStrategy() {
return newBeanInstanceStrategy;
}
/**
* Sets the NewBeanInstanceStrategy to use.<br>
* Will set default value (NewBeanInstanceStrategy.DEFAULT) if null.<br>
* [JSON -> Java]
*/
public void setNewBeanInstanceStrategy(NewBeanInstanceStrategy newBeanInstanceStrategy) {
this.newBeanInstanceStrategy = newBeanInstanceStrategy == null ? DEFAULT_NEW_BEAN_INSTANCE_STRATEGY
: newBeanInstanceStrategy;
}
/**
* Returns the configured PropertyExclusionClassMatcher.<br>
* Default value is PropertyExclusionClassMatcher.DEFAULT<br>
* [JSON -> Java]
*/
public PropertyExclusionClassMatcher getPropertyExclusionClassMatcher() {
return propertyExclusionClassMatcher;
}
/**
* Sets a PropertyExclusionClassMatcher to use.<br>
* Will set default value (PropertyExclusionClassMatcher.DEFAULT) if null.<br>
* [Java -> JSON]
*/
public void setPropertyExclusionClassMatcher(PropertyExclusionClassMatcher propertyExclusionClassMatcher) {
this.propertyExclusionClassMatcher = propertyExclusionClassMatcher == null ? DEFAULT_PROPERTY_EXCLUSION_CLASS_MATCHER
: propertyExclusionClassMatcher;
}
/**
* Returns the configured PropertyNameProcessorMatcher.<br>
* Default value is PropertyNameProcessorMatcher.DEFAULT<br>
* [JSON -> Java]
*
* @deprecated use getJavaPropertyNameProcessorMatcher() instead
*/
public PropertyNameProcessorMatcher getPropertyNameProcessorMatcher() {
return getJavaPropertyNameProcessorMatcher();
}
/**
* Sets a PropertyNameProcessorMatcher to use.<br>
* Will set default value (PropertyNameProcessorMatcher.DEFAULT) if null.<br>
* [JSON -> Java]
*
* @deprecated use setJavaPropertyNameProcessorMatcher() instead
*/
public void setPropertyNameProcessorMatcher(PropertyNameProcessorMatcher propertyNameProcessorMatcher) {
setJavaPropertyNameProcessorMatcher(propertyNameProcessorMatcher);
}
/**
* Returns the configured PropertySetStrategy.<br>
* Default value is PropertySetStrategy.DEFAULT<br>
* [JSON -> Java]
*/
public PropertySetStrategy getPropertySetStrategy() {
return propertySetStrategy;
}
/**
* Sets a PropertySetStrategy to use.<br>
* Will set default value (PropertySetStrategy.DEFAULT) if null.<br>
* [JSON -> Java]
*/
public void setPropertySetStrategy(PropertySetStrategy propertySetStrategy) {
this.propertySetStrategy = propertySetStrategy;
}
/**
* Returns the current root Class.<br>
* [JSON -> Java]
*
* @return the target class for conversion
*/
public Class getRootClass() {
return rootClass;
}
/**
* Sets the current root Class.<br>
* [JSON -> Java]
*
* @param rootClass the target class for conversion
*/
public void setRootClass(Class rootClass) {
this.rootClass = rootClass;
}
/**
* Returns true if non-String keys are allowed on JSONObject.<br>
* Default value is false<br>
* [Java -> JSON]
*/
public boolean isAllowNonStringKeys() {
return allowNonStringKeys;
}
/**
* Sets if non-String keys are allowed on JSONObject.<br>
* [Java -> JSON]
*/
public void setAllowNonStringKeys(boolean allowNonStringKeys) {
this.allowNonStringKeys = allowNonStringKeys;
}
/**
* Returns true if event triggering is enabled during building.<br>
* Default value is false<br>
* [Java -> JSON]
*/
public boolean isEventTriggeringEnabled() {
return triggerEvents;
}
/**
* Returns true if this Jettison convention will be handled when converting to Java.<br>
* Jettison assumes that "" (empty string) can be assigned to empty elements (objects), which
* clearly violates the JSON spec.<br>
* [JSON -> Java]
*/
public boolean isHandleJettisonEmptyElement() {
return handleJettisonEmptyElement;
}
/**
* Activate/Deactivate handling this jettison convention when converting to Java.<br>
* Jettison states that "" (empty string) can be assigned to empty elements (objects), which
* clearly violates the JSON spec.<br>
* [JSON -> Java]
*/
public void setHandleJettisonEmptyElement(boolean handleJettisonEmptyElement) {
this.handleJettisonEmptyElement = handleJettisonEmptyElement;
}
/**
* Returns true if this jettison convention will be handled when converting to Java.<br>
* Jettison states the following JSON {'media':{'title':'hello'}} can be set as a single element
* JSONArray (media is the array).<br>
* [JSON -> Java]
*/
public boolean isHandleJettisonSingleElementArray() {
return handleJettisonSingleElementArray;
}
/**
* Activate/Deactivate handling this jettison convention when converting to Java.<br> * Jettison
* states the following JSON {'media':{'title':'hello'}} can be set as a single element JSONArray
* (media is the array).<br>
* [JSON -> Java]
*/
public void setHandleJettisonSingleElementArray(boolean handleJettisonSingleElementArray) {
this.handleJettisonSingleElementArray = handleJettisonSingleElementArray;
}
/**
* Returns true if default excludes will not be used.<br>
* Default value is false.<br>
* [Java -> JSON]
*/
public boolean isIgnoreDefaultExcludes() {
return ignoreDefaultExcludes;
}
/**
* Sets if default excludes would be skipped when building.<br>
* [Java -> JSON]
*/
public void setIgnoreDefaultExcludes(boolean ignoreDefaultExcludes) {
this.ignoreDefaultExcludes = ignoreDefaultExcludes;
}
/**
* Returns true if JPA Transient annotated methods should be ignored.<br>
* Default value is false.<br>
* [Java -> JSON]
*/
public boolean isIgnoreJPATransient() {
return ignoreFieldAnnotations.contains("javax.persistence.Transient");
}
/**
* Sets if JPA Transient annotated methods would be skipped when building.<br>
* [Java -> JSON]
*/
public void setIgnoreJPATransient(boolean ignoreJPATransient) {
if (ignoreJPATransient) {
addIgnoreFieldAnnotation("javax.persistence.Transient");
} else {
removeIgnoreFieldAnnotation("javax.persistence.Transient");
}
}
/**
* Returns true if transient fields of a bean will be ignored.<br>
* Default value is false.<br>
* [Java -> JSON]
*/
public boolean isIgnoreTransientFields() {
return ignoreTransientFields;
}
/**
* Sets if transient fields would be skipped when building.<br>
* [Java -> JSON]
*/
public void setIgnoreTransientFields(boolean ignoreTransientFields) {
this.ignoreTransientFields = ignoreTransientFields;
}
/**
* Returns true if public fields of a bean will be ignored.<br>
* Default value is true.<br>
* [Java -> JSON]
*/
public boolean isIgnorePublicFields() {
return ignorePublicFields;
}
/**
* Sets if public fields would be skipped when building.<br>
* [Java -> JSON]
*/
public void setIgnorePublicFields(boolean ignorePublicFields) {
this.ignorePublicFields = ignorePublicFields;
}
/**
* Returns true if Javascript compatibility is turned on.<br>
* Default value is false.<br>
* [Java -> JSON]
*/
public boolean isJavascriptCompliant() {
return javascriptCompliant;
}
/**
* Sets if Javascript compatibility is enabled when building.<br>
* [Java -> JSON]
*/
public void setJavascriptCompliant(boolean javascriptCompliant) {
this.javascriptCompliant = javascriptCompliant;
}
/**
* Returns true if map keys will not be transformed.<br>
* Default value is false.<br>
* [JSON -> Java]
*/
public boolean isSkipJavaIdentifierTransformationInMapKeys() {
return skipJavaIdentifierTransformationInMapKeys;
}
/**
* Sets if property name as JavaIndetifier transformations would be skipped.<br>
* [JSON -> Java]
*/
public void setSkipJavaIdentifierTransformationInMapKeys(boolean skipJavaIdentifierTransformationInMapKeys) {
this.skipJavaIdentifierTransformationInMapKeys = skipJavaIdentifierTransformationInMapKeys;
}
/**
* Registers a DefaultValueProcessor.<br>
* [Java -> JSON]
*
* @param target the class to use as key
* @param defaultValueProcessor the processor to register
*/
public void registerDefaultValueProcessor(Class target, DefaultValueProcessor defaultValueProcessor) {
if (target != null && defaultValueProcessor != null) {
defaultValueMap.put(target, defaultValueProcessor);
}
}
/**
* Registers a PropertyNameProcessor.<br>
* [JSON -> Java]
*
* @param target the class to use as key
* @param propertyNameProcessor the processor to register
*/
public void registerJavaPropertyNameProcessor(Class target, PropertyNameProcessor propertyNameProcessor) {
if (target != null && propertyNameProcessor != null) {
javaPropertyNameProcessorMap.put(target, propertyNameProcessor);
}
}
/**
* Registers a JsonBeanProcessor.<br>
* [Java -> JSON]
*
* @param target the class to use as key
* @param jsonBeanProcessor the processor to register
*/
public void registerJsonBeanProcessor(Class target, JsonBeanProcessor jsonBeanProcessor) {
if (target != null && jsonBeanProcessor != null) {
beanProcessorMap.put(target, jsonBeanProcessor);
}
}
/**
* Registers a PropertyNameProcessor.<br>
* [Java -> JSON]
*
* @param target the class to use as key
* @param propertyNameProcessor the processor to register
*/
public void registerJsonPropertyNameProcessor(Class target, PropertyNameProcessor propertyNameProcessor) {
if (target != null && propertyNameProcessor != null) {
jsonPropertyNameProcessorMap.put(target, propertyNameProcessor);
}
}
/**
* Registers a JsonValueProcessor.<br>
* [Java -> JSON]
*
* @param beanClass the class to use as key
* @param propertyType the property type to use as key
* @param jsonValueProcessor the processor to register
*/
public void registerJsonValueProcessor(Class beanClass, Class propertyType, JsonValueProcessor jsonValueProcessor) {
if (beanClass != null && propertyType != null && jsonValueProcessor != null) {
beanTypeMap.put(beanClass, propertyType, jsonValueProcessor);
}
}
/**
* Registers a JsonValueProcessor.<br>
* [Java -> JSON]
*
* @param propertyType the property type to use as key
* @param jsonValueProcessor the processor to register
*/
public void registerJsonValueProcessor(Class propertyType, JsonValueProcessor jsonValueProcessor) {
if (propertyType != null && jsonValueProcessor != null) {
typeMap.put(propertyType, jsonValueProcessor);
}
}
/**
* Registers a JsonValueProcessor.<br>
* [Java -> JSON]
*
* @param beanClass the class to use as key
* @param key the property name to use as key
* @param jsonValueProcessor the processor to register
*/
public void registerJsonValueProcessor(Class beanClass, String key, JsonValueProcessor jsonValueProcessor) {
if (beanClass != null && key != null && jsonValueProcessor != null) {
beanKeyMap.put(beanClass, key, jsonValueProcessor);
}
}
/**
* Registers a JsonValueProcessor.<br>
* [Java -> JSON]
*
* @param key the property name to use as key
* @param jsonValueProcessor the processor to register
*/
public void registerJsonValueProcessor(String key, JsonValueProcessor jsonValueProcessor) {
if (key != null && jsonValueProcessor != null) {
keyMap.put(key, jsonValueProcessor);
}
}
/**
* Registers a exclusion for a target class.<br>
* [Java -> JSON]
*
* @param target the class to use as key
* @param propertyName the property to be excluded
*/
public void registerPropertyExclusion(Class target, String propertyName) {
if (target != null && propertyName != null) {
Set set = (Set) exclusionMap.get(target);
if (set == null) {
set = new HashSet();
exclusionMap.put(target, set);
}
if (!set.contains(propertyName)) {
set.add(propertyName);
}
}
}
/**
* Registers exclusions for a target class.<br>
* [Java -> JSON]
*
* @param target the class to use as key
* @param properties the properties to be excluded
*/
public void registerPropertyExclusions(Class target, String[] properties) {
if (target != null && properties != null && properties.length > 0) {
Set set = (Set) exclusionMap.get(target);
if (set == null) {
set = new HashSet();
exclusionMap.put(target, set);
}
for (int i = 0; i < properties.length; i++) {
if (!set.contains(properties[i])) {
set.add(properties[i]);
}
}
}
}
/**
* Registers a PropertyNameProcessor.<br>
* [JSON -> Java]
*
* @param target the class to use as key
* @param propertyNameProcessor the processor to register
*
* @deprecated use registerJavaPropertyNameProcessor() instead
*/
public void registerPropertyNameProcessor(Class target, PropertyNameProcessor propertyNameProcessor) {
registerJavaPropertyNameProcessor(target, propertyNameProcessor);
}
/**
* Removes a listener for JSON events.<br>
* [Java -> JSON]
*
* @param listener a listener for events
*
* @see #addJsonEventListener(JsonEventListener)
*/
public synchronized void removeJsonEventListener(JsonEventListener listener) {
eventListeners.remove(listener);
}
/**
* Resets all values to its default state.
*/
public void reset() {
excludes = EMPTY_EXCLUDES;
ignoreDefaultExcludes = false;
ignoreTransientFields = false;
ignorePublicFields = true;
javascriptCompliant = false;
javaIdentifierTransformer = DEFAULT_JAVA_IDENTIFIER_TRANSFORMER;
cycleDetectionStrategy = DEFAULT_CYCLE_DETECTION_STRATEGY;
skipJavaIdentifierTransformationInMapKeys = false;
triggerEvents = false;
handleJettisonEmptyElement = false;
handleJettisonSingleElementArray = false;
arrayMode = MODE_LIST;
rootClass = null;
classMap = null;
keyMap.clear();
typeMap.clear();
beanKeyMap.clear();
beanTypeMap.clear();
jsonPropertyFilter = null;
javaPropertyFilter = null;
jsonBeanProcessorMatcher = DEFAULT_JSON_BEAN_PROCESSOR_MATCHER;
newBeanInstanceStrategy = DEFAULT_NEW_BEAN_INSTANCE_STRATEGY;
defaultValueProcessorMatcher = DEFAULT_DEFAULT_VALUE_PROCESSOR_MATCHER;
defaultValueMap.clear();
propertySetStrategy = null/* DEFAULT_PROPERTY_SET_STRATEGY */;
//ignoreJPATransient = false;
collectionType = DEFAULT_COLLECTION_TYPE;
enclosedType = null;
jsonValueProcessorMatcher = DEFAULT_JSON_VALUE_PROCESSOR_MATCHER;
javaPropertyNameProcessorMap.clear();
javaPropertyNameProcessorMatcher = DEFAULT_PROPERTY_NAME_PROCESSOR_MATCHER;
jsonPropertyNameProcessorMap.clear();
jsonPropertyNameProcessorMatcher = DEFAULT_PROPERTY_NAME_PROCESSOR_MATCHER;
beanProcessorMap.clear();
propertyExclusionClassMatcher = DEFAULT_PROPERTY_EXCLUSION_CLASS_MATCHER;
exclusionMap.clear();
ignoreFieldAnnotations.clear();
allowNonStringKeys = false;
parseJsonLiterals = true;
}
/**
* Adds an annotation that marks a field to be skipped when building.<br>
* [Java -> JSON]
*/
public void addIgnoreFieldAnnotation(String annotationClassName) {
if (annotationClassName != null && !ignoreFieldAnnotations.contains(annotationClassName)) {
ignoreFieldAnnotations.add(annotationClassName);
}
}
/**
* Adds an annotation that marks a field to be skipped when building.<br>
* [Java -> JSON]
*/
public void removeIgnoreFieldAnnotation(String annotationClassName) {
if (annotationClassName != null) { ignoreFieldAnnotations.remove(annotationClassName); }
}
/**
* Removes an annotation that marks a field to be skipped when building.<br>
* [Java -> JSON]
*/
public void addIgnoreFieldAnnotation(Class annotationClass) {
if (annotationClass != null && !ignoreFieldAnnotations.contains(annotationClass.getName())) {
ignoreFieldAnnotations.add(annotationClass.getName());
}
}
/**
* Removes an annotation that marks a field to be skipped when building.<br>
* [Java -> JSON]
*/
public void removeIgnoreFieldAnnotation(Class annotationClass) {
if (annotationClass != null) { ignoreFieldAnnotations.remove(annotationClass.getName()); }
}
/**
* Returns a List of all annotations that mark a field to be skipped when building.<br>
* [Java -> JSON]
*/
public List getIgnoreFieldAnnotations() {
return Collections.unmodifiableList(ignoreFieldAnnotations);
}
/**
* Removes a DefaultValueProcessor.<br>
* [Java -> JSON]
*
* @param target a class used for searching a DefaultValueProcessor.
*/
public void unregisterDefaultValueProcessor(Class target) {
if (target != null) {
defaultValueMap.remove(target);
}
}
/**
* Removes a PropertyNameProcessor.<br>
* [JSON -> Java]
*
* @param target a class used for searching a PropertyNameProcessor.
*/
public void unregisterJavaPropertyNameProcessor(Class target) {
if (target != null) {
javaPropertyNameProcessorMap.remove(target);
}
}
/**
* Removes a JsonBeanProcessor.<br>
* [Java -> JSON]
*
* @param target a class used for searching a JsonBeanProcessor.
*/
public void unregisterJsonBeanProcessor(Class target) {
if (target != null) {
beanProcessorMap.remove(target);
}
}
/**
* Removes a PropertyNameProcessor.<br>
* [Java -> JSON]
*
* @param target a class used for searching a PropertyNameProcessor.
*/
public void unregisterJsonPropertyNameProcessor(Class target) {
if (target != null) {
jsonPropertyNameProcessorMap.remove(target);
}
}
/**
* Removes a JsonValueProcessor.<br>
* [Java -> JSON]
*
* @param propertyType a class used for searching a JsonValueProcessor.
*/
public void unregisterJsonValueProcessor(Class propertyType) {
if (propertyType != null) {
typeMap.remove(propertyType);
}
}
/**
* Removes a JsonValueProcessor.<br>
* [Java -> JSON]
*
* @param beanClass the class to which the property may belong
* @param propertyType the type of the property
*/
public void unregisterJsonValueProcessor(Class beanClass, Class propertyType) {
if (beanClass != null && propertyType != null) {
beanTypeMap.remove(beanClass, propertyType);
}
}
/**
* Removes a JsonValueProcessor.<br>
* [Java -> JSON]
*
* @param beanClass the class to which the property may belong
* @param key the name of the property which may belong to the target class
*/
public void unregisterJsonValueProcessor(Class beanClass, String key) {
if (beanClass != null && key != null) {
beanKeyMap.remove(beanClass, key);
}
}
/**
* Removes a JsonValueProcessor.<br>
* [Java -> JSON]
*
* @param key the name of the property which may belong to the target class
*/
public void unregisterJsonValueProcessor(String key) {
if (key != null) {
keyMap.remove(key);
}
}
/**
* Removes a property exclusion assigned to the target class.<br>
* [Java -> JSON]
*
* @param target a class used for searching property exclusions.
* @param propertyName the name of the property to be removed from the exclusion list.
*/
public void unregisterPropertyExclusion(Class target, String propertyName) {
if (target != null && propertyName != null) {
Set set = (Set) exclusionMap.get(target);
if (set == null) {
set = new HashSet();
exclusionMap.put(target, set);
}
set.remove(propertyName);
}
}
/**
* Removes all property exclusions assigned to the target class.<br>
* [Java -> JSON]
*
* @param target a class used for searching property exclusions.
*/
public void unregisterPropertyExclusions(Class target) {
if (target != null) {
Set set = (Set) exclusionMap.get(target);
if (set != null) {
set.clear();
}
}
}
/**
* Removes a PropertyNameProcessor.<br>
* [JSON -> Java]
*
* @param target a class used for searching a PropertyNameProcessor.
*
* @deprecated use unregisterJavaPropertyNameProcessor() instead
*/
public void unregisterPropertyNameProcessor(Class target) {
unregisterJavaPropertyNameProcessor(target);
}
}
| |
package net.it_adviser.gps.pgsd4java.types;
/*
* #%L
* GPSd4Java
* %%
* Copyright (C) 2011 - 2012 Taimos GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
*
* @author aevdokimov
*/
public class SATObject implements IGPSObject {
/** the GPSd internal name */
public static final String NAME = "SAT";
private int PRN = -1;
private int azimuth = -1;
private int elevation = -1;
private int signalStrength = -1;
private boolean used = false;
/**
* PRN ID of the satellite. 1-63 are GNSS satellites, 64-96 are GLONASS satellites, 100-164 are SBAS satellites
*
* @return PRN
*/
public int getPRN() {
return this.PRN;
}
/**
* PRN ID of the satellite. 1-63 are GNSS satellites, 64-96 are GLONASS satellites, 100-164 are SBAS satellites
*
* @param PRN
* the PRN to set
*/
public void setPRN(final int PRN) {
this.PRN = PRN;
}
/**
* Azimuth, degrees from true north.
*
* @return azimuth
*/
public int getAzimuth() {
return this.azimuth;
}
/**
* Azimuth, degrees from true north.
*
* @param azimuth
* the azimuth to set
*/
public void setAzimuth(final int azimuth) {
this.azimuth = azimuth;
}
/**
* Elevation in degrees.
*
* @return elevation
*/
public int getElevation() {
return this.elevation;
}
/**
* Elevation in degrees.
*
* @param elevation
* the elevation to set
*/
public void setElevation(final int elevation) {
this.elevation = elevation;
}
/**
* Signal strength in dB.
*
* @return signal strength
*/
public int getSignalStrength() {
return this.signalStrength;
}
/**
* Signal strength in dB.
*
* @param signalStrength
* the signal strength to set
*/
public void setSignalStrength(final int signalStrength) {
this.signalStrength = signalStrength;
}
/**
* Used in current solution? (SBAS/WAAS/EGNOS satellites may be flagged used if the solution has corrections from them, but not all
* drivers make this information available.)
*
* @return used
*/
public boolean getUsed() {
return this.used;
}
/**
* Used in current solution? (SBAS/WAAS/EGNOS satellites may be flagged used if the solution has corrections from them, but not all
* drivers make this information available.)
*
* @param used
* the used flag to set
*/
public void setUsed(final boolean used) {
this.used = used;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(this.PRN);
result = (prime * result) + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(this.azimuth);
result = (prime * result) + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(this.elevation);
result = (prime * result) + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(this.signalStrength);
result = (prime * result) + (int) (temp ^ (temp >>> 32));
result = (prime * result) + ((this.used) ? 1 : 0);
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
final SATObject other = (SATObject) obj;
if (Double.doubleToLongBits(this.PRN) != Double.doubleToLongBits(other.PRN)) {
return false;
}
if (Double.doubleToLongBits(this.azimuth) != Double.doubleToLongBits(other.azimuth)) {
return false;
}
if (Double.doubleToLongBits(this.elevation) != Double.doubleToLongBits(other.elevation)) {
return false;
}
if (Double.doubleToLongBits(this.signalStrength) != Double.doubleToLongBits(other.signalStrength)) {
return false;
}
if (this.used != other.used) {
return false;
}
return true;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("SATObject{PRN=");
sb.append(this.PRN);
sb.append(", az=");
sb.append(this.azimuth);
sb.append(", el=");
sb.append(this.elevation);
sb.append(", ss=");
sb.append(this.signalStrength);
sb.append(", used=");
sb.append(this.used ? "Y" : "N");
sb.append("}");
return sb.toString();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.cli;
import com.google.common.collect.ImmutableMap;
import org.apache.commons.cli.CommandLine;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.junit.Test;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd;
import static org.hamcrest.Matchers.*;
/**
*
*/
public class CliToolTests extends CliToolTestCase {
@Test
public void testOK() throws Exception {
Terminal terminal = new MockTerminal();
final AtomicReference<Boolean> executed = new AtomicReference<>(false);
final NamedCommand cmd = new NamedCommand("cmd", terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) {
executed.set(true);
return CliTool.ExitStatus.OK;
}
};
SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd);
int status = tool.execute();
assertStatus(status, CliTool.ExitStatus.OK);
assertCommandHasBeenExecuted(executed);
}
@Test
public void testUsageError() throws Exception {
Terminal terminal = new MockTerminal();
final AtomicReference<Boolean> executed = new AtomicReference<>(false);
final NamedCommand cmd = new NamedCommand("cmd", terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) {
executed.set(true);
return CliTool.ExitStatus.USAGE;
}
};
SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd);
int status = tool.execute();
assertStatus(status, CliTool.ExitStatus.USAGE);
assertCommandHasBeenExecuted(executed);
}
@Test
public void testIOError() throws Exception {
Terminal terminal = new MockTerminal();
final AtomicReference<Boolean> executed = new AtomicReference<>(false);
final NamedCommand cmd = new NamedCommand("cmd", terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
executed.set(true);
throw new IOException("io error");
}
};
SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd);
int status = tool.execute();
assertStatus(status, CliTool.ExitStatus.IO_ERROR);
assertCommandHasBeenExecuted(executed);
}
@Test
public void testCodeError() throws Exception {
Terminal terminal = new MockTerminal();
final AtomicReference<Boolean> executed = new AtomicReference<>(false);
final NamedCommand cmd = new NamedCommand("cmd", terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
executed.set(true);
throw new Exception("random error");
}
};
SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd);
int status = tool.execute();
assertStatus(status, CliTool.ExitStatus.CODE_ERROR);
assertCommandHasBeenExecuted(executed);
}
@Test
public void testMultiCommand() {
Terminal terminal = new MockTerminal();
int count = randomIntBetween(2, 7);
final AtomicReference<Boolean>[] executed = new AtomicReference[count];
for (int i = 0; i < executed.length; i++) {
executed[i] = new AtomicReference<>(false);
}
NamedCommand[] cmds = new NamedCommand[count];
for (int i = 0; i < count; i++) {
final int index = i;
cmds[i] = new NamedCommand("cmd" + index, terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
executed[index].set(true);
return CliTool.ExitStatus.OK;
}
};
}
MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds);
int cmdIndex = randomIntBetween(0, count-1);
int status = tool.execute("cmd" + cmdIndex);
assertThat(status, is(CliTool.ExitStatus.OK.status()));
for (int i = 0; i < executed.length; i++) {
assertThat(executed[i].get(), is(i == cmdIndex));
}
}
@Test
public void testMultiCommand_UnknownCommand() {
Terminal terminal = new MockTerminal();
int count = randomIntBetween(2, 7);
final AtomicReference<Boolean>[] executed = new AtomicReference[count];
for (int i = 0; i < executed.length; i++) {
executed[i] = new AtomicReference<>(false);
}
NamedCommand[] cmds = new NamedCommand[count];
for (int i = 0; i < count; i++) {
final int index = i;
cmds[i] = new NamedCommand("cmd" + index, terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
executed[index].set(true);
return CliTool.ExitStatus.OK;
}
};
}
MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds);
int status = tool.execute("cmd" + count); // "cmd" + count doesn't exist
assertThat(status, is(CliTool.ExitStatus.USAGE.status()));
for (int i = 0; i < executed.length; i++) {
assertThat(executed[i].get(), is(false));
}
}
@Test
public void testSingleCommand_ToolHelp() throws Exception {
CaptureOutputTerminal terminal = new CaptureOutputTerminal();
final AtomicReference<Boolean> executed = new AtomicReference<>(false);
final NamedCommand cmd = new NamedCommand("cmd1", terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
executed.set(true);
throw new IOException("io error");
}
};
SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd);
int status = tool.execute(args("-h"));
assertStatus(status, CliTool.ExitStatus.OK);
assertThat(terminal.getTerminalOutput(), hasSize(3));
assertThat(terminal.getTerminalOutput(), hasItem(containsString("cmd1 help")));
}
@Test
public void testMultiCommand_ToolHelp() {
CaptureOutputTerminal terminal = new CaptureOutputTerminal();
NamedCommand[] cmds = new NamedCommand[2];
cmds[0] = new NamedCommand("cmd0", terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
return CliTool.ExitStatus.OK;
}
};
cmds[1] = new NamedCommand("cmd1", terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
return CliTool.ExitStatus.OK;
}
};
MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds);
int status = tool.execute(args("-h"));
assertStatus(status, CliTool.ExitStatus.OK);
assertThat(terminal.getTerminalOutput(), hasSize(3));
assertThat(terminal.getTerminalOutput(), hasItem(containsString("tool help")));
}
@Test
public void testMultiCommand_CmdHelp() {
CaptureOutputTerminal terminal = new CaptureOutputTerminal();
NamedCommand[] cmds = new NamedCommand[2];
cmds[0] = new NamedCommand("cmd0", terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
return CliTool.ExitStatus.OK;
}
};
cmds[1] = new NamedCommand("cmd1", terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
return CliTool.ExitStatus.OK;
}
};
MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds);
int status = tool.execute(args("cmd1 -h"));
assertStatus(status, CliTool.ExitStatus.OK);
assertThat(terminal.getTerminalOutput(), hasSize(3));
assertThat(terminal.getTerminalOutput(), hasItem(containsString("cmd1 help")));
}
@Test
public void testThatThrowExceptionCanBeLogged() throws Exception {
CaptureOutputTerminal terminal = new CaptureOutputTerminal();
NamedCommand cmd = new NamedCommand("cmd", terminal) {
@Override
public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
throw new ElasticsearchException("error message");
}
};
SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd);
assertStatus(tool.execute(), CliTool.ExitStatus.CODE_ERROR);
assertThat(terminal.getTerminalOutput(), hasSize(1));
assertThat(terminal.getTerminalOutput(), hasItem(containsString("error message")));
// set env... and log stack trace
try {
System.setProperty(Terminal.DEBUG_SYSTEM_PROPERTY, "true");
terminal = new CaptureOutputTerminal();
assertStatus(new SingleCmdTool("tool", terminal, cmd).execute(), CliTool.ExitStatus.CODE_ERROR);
assertThat(terminal.getTerminalOutput(), hasSize(2));
assertThat(terminal.getTerminalOutput(), hasItem(containsString("error message")));
// This class must be part of the stack strace
assertThat(terminal.getTerminalOutput(), hasItem(containsString(getClass().getName())));
} finally {
System.clearProperty(Terminal.DEBUG_SYSTEM_PROPERTY);
}
}
private void assertStatus(int status, CliTool.ExitStatus expectedStatus) {
assertThat(status, is(expectedStatus.status()));
}
private void assertCommandHasBeenExecuted(AtomicReference<Boolean> executed) {
assertThat("Expected command atomic reference counter to be set to true", executed.get(), is(Boolean.TRUE));
}
private static class SingleCmdTool extends CliTool {
private final Command command;
private SingleCmdTool(String name, Terminal terminal, NamedCommand command) {
super(CliToolConfig.config(name, SingleCmdTool.class)
.cmds(cmd(command.name, command.getClass()))
.build(), terminal);
this.command = command;
}
@Override
protected Command parse(String cmdName, CommandLine cli) throws Exception {
return command;
}
}
private static class MultiCmdTool extends CliTool {
private final Map<String, Command> commands;
private MultiCmdTool(String name, Terminal terminal, NamedCommand... commands) {
super(CliToolConfig.config(name, MultiCmdTool.class)
.cmds(cmds(commands))
.build(), terminal);
ImmutableMap.Builder<String, Command> commandByName = ImmutableMap.builder();
for (int i = 0; i < commands.length; i++) {
commandByName.put(commands[i].name, commands[i]);
}
this.commands = commandByName.build();
}
@Override
protected Command parse(String cmdName, CommandLine cli) throws Exception {
return commands.get(cmdName);
}
private static CliToolConfig.Cmd[] cmds(NamedCommand... commands) {
CliToolConfig.Cmd[] cmds = new CliToolConfig.Cmd[commands.length];
for (int i = 0; i < commands.length; i++) {
cmds[i] = cmd(commands[i].name, commands[i].getClass()).build();
}
return cmds;
}
}
private static abstract class NamedCommand extends CliTool.Command {
private final String name;
private NamedCommand(String name, Terminal terminal) {
super(terminal);
this.name = name;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.falcon.regression.prism;
import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.Frequency;
import org.apache.falcon.entity.v0.feed.ActionType;
import org.apache.falcon.entity.v0.feed.ClusterType;
import org.apache.falcon.regression.Entities.FeedMerlin;
import org.apache.falcon.regression.core.bundle.Bundle;
import org.apache.falcon.regression.core.helpers.ColoHelper;
import org.apache.falcon.regression.core.util.OozieUtil;
import org.apache.falcon.regression.core.util.AssertUtil;
import org.apache.falcon.regression.core.util.TimeUtil;
import org.apache.falcon.regression.core.util.HadoopUtil;
import org.apache.falcon.regression.core.util.BundleUtil;
import org.apache.falcon.regression.core.util.InstanceUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.Util;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.hadoop.fs.FileSystem;
import org.apache.log4j.Logger;
import org.apache.oozie.client.CoordinatorAction;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.OozieClientException;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.testng.annotations.DataProvider;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Test delays in feed.
*/
@Test(groups = "distributed")
public class FeedDelayTest extends BaseTestClass {
private static final Logger LOGGER = Logger.getLogger(FeedDelayTest.class);
private ColoHelper cluster1 = servers.get(0);
private ColoHelper cluster2 = servers.get(1);
private FileSystem cluster1FS = serverFS.get(0);
private OozieClient cluster2OC = serverOC.get(1);
private String baseTestDir = cleanAndGetTestDir();
private String aggregateWorkflowDir = baseTestDir + "/aggregator";
private String targetPath = baseTestDir + "/target";
private String targetDataLocation = targetPath + MINUTE_DATE_PATTERN;
private String sourcePath = baseTestDir + "/source";
private String feedInputPath = sourcePath + MINUTE_DATE_PATTERN;
@BeforeClass(alwaysRun = true)
public void uploadWorkflow() throws Exception {
uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
}
@BeforeMethod(alwaysRun = true)
public void setup() throws Exception {
Bundle bundle = BundleUtil.readELBundle();
bundles[0] = new Bundle(bundle, cluster1);
bundles[1] = new Bundle(bundle, cluster2);
bundles[0].generateUniqueBundle(this);
bundles[1].generateUniqueBundle(this);
bundles[0].setProcessWorkflow(aggregateWorkflowDir);
bundles[1].setProcessWorkflow(aggregateWorkflowDir);
}
@AfterMethod(alwaysRun = true)
public void tearDown() throws IOException {
removeTestClassEntities();
cleanTestsDirs();
}
/* Test cases to check delay feature in feed.
* Finding the missing dependencies of coordiantor based on
* given delay in entity and creating them.
* These should match with the expected missing dependencies.
* Also checking the startTime of replicated instance with the expected value.
* In case they dont match, the test should fail.
* @param sourceStartTime : start time of source cluster
* @param targetStartTime : start time of target cluster
* @param sourceDelay : delay in source cluster
* @param targetDelay : delay in target cluster
* @param flag : true if (sourceStartTime < targetStartTime) else false
* */
@Test(enabled = true, dataProvider = "Feed-Delay-Cases", timeOut = 12000000)
public void delayTest(String sourceStartTime, String targetStartTime,
String sourceDelay, String targetDelay, boolean flag) throws Exception {
bundles[0].setInputFeedDataPath(feedInputPath);
Bundle.submitCluster(bundles[0], bundles[1]);
String feed = bundles[0].getDataSets().get(0);
feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
//set cluster1 as source
feed = FeedMerlin.fromString(feed).addFeedCluster(
new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
.withRetention("hours(15)", ActionType.DELETE)
.withValidity(sourceStartTime, "2099-10-01T12:10Z")
.withClusterType(ClusterType.SOURCE)
.withDelay(new Frequency(sourceDelay))
.withDataLocation(feedInputPath)
.build()).toString();
//set cluster2 as target
feed = FeedMerlin.fromString(feed).addFeedCluster(
new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
.withRetention("hours(15)", ActionType.DELETE)
.withValidity(targetStartTime, "2099-10-01T12:25Z")
.withClusterType(ClusterType.TARGET)
.withDelay(new Frequency(targetDelay))
.withDataLocation(targetDataLocation)
.build()).toString();
feed = FeedMerlin.fromString(feed).withProperty("timeout", "minutes(35)").toString();
feed = FeedMerlin.fromString(feed).withProperty("parallel", "3").toString();
LOGGER.info("feed : " + Util.prettyPrintXml(feed));
AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed));
//check if coordinator exists
InstanceUtil.waitTillInstancesAreCreated(cluster2OC, feed, 0);
Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, Util.readEntityName(feed), "REPLICATION"), 1);
//Finding bundleId of replicated instance on target
String bundleId = OozieUtil.getLatestBundleID(cluster2OC, Util.readEntityName(feed), EntityType.FEED);
//Finding startTime of replicated instance on target
String startTimeO0zie = OozieUtil.getCoordStartTime(cluster2OC, feed, 0);
String startTimeExpected = getStartTime(sourceStartTime, targetStartTime, new Frequency(sourceDelay), flag);
List<String> missingDep = getAndCreateDependencies(cluster1FS, cluster1.getPrefix(), cluster2OC, bundleId);
List<String> qaDep = new ArrayList<>();
if (flag) {
qaDep.add(sourcePath + "/" + sourceStartTime.replaceAll("-", "/").
replaceAll("T", "/").replaceAll(":", "/").replaceAll("Z", "/"));
} else {
qaDep.add(targetPath + "/" + sourceStartTime.replaceAll("-", "/").
replaceAll("T", "/").replaceAll(":", "/").replaceAll("Z", "/"));
}
//replication should start, wait while it ends
InstanceUtil.waitTillInstanceReachState(cluster2OC, Util.readEntityName(feed), 0,
CoordinatorAction.Status.SUCCEEDED, EntityType.FEED);
Assert.assertTrue(startTimeO0zie.equals(startTimeExpected),
"Start time of bundle should be " + startTimeExpected + " but it is " + startTimeO0zie);
matchDependencies(missingDep, qaDep);
LOGGER.info("Done");
}
@DataProvider(name = "Feed-Delay-Cases")
public Object[][] getDelayCases() {
return new Object[][] {
{ TimeUtil.getTimeWrtSystemTime(-120), TimeUtil.getTimeWrtSystemTime(-120),
"minutes(40)", "minutes(20)", true, },
{ TimeUtil.getTimeWrtSystemTime(-120), TimeUtil.getTimeWrtSystemTime(-120),
"minutes(20)", "minutes(40)", true, },
{ TimeUtil.getTimeWrtSystemTime(-120), TimeUtil.getTimeWrtSystemTime(-240),
"minutes(40)", "minutes(20)", true, },
{ TimeUtil.getTimeWrtSystemTime(-120), TimeUtil.getTimeWrtSystemTime(-60),
"minutes(40)", "minutes(20)", false, },
};
}
private List<String> getAndCreateDependencies(FileSystem sourceFS, String sourcePrefix, OozieClient targetOC,
String bundleId) throws OozieClientException, IOException {
List<String> missingDependencies = OozieUtil.getMissingDependencies(targetOC, bundleId);
for (int i = 0; i < 10 && missingDependencies == null; ++i) {
TimeUtil.sleepSeconds(30);
LOGGER.info("sleeping...");
missingDependencies = OozieUtil.getMissingDependencies(targetOC, bundleId);
}
Assert.assertNotNull(missingDependencies, "Missing dependencies not found.");
// Creating missing dependencies
HadoopUtil.createFolders(sourceFS, sourcePrefix, missingDependencies);
//Adding data to empty folders
for (String location : missingDependencies) {
LOGGER.info("Transferring data to : " + location);
HadoopUtil.copyDataToFolder(sourceFS, location, OSUtil.concat(OSUtil.NORMAL_INPUT, "dataFile.xml"));
}
return missingDependencies;
}
private String getStartTime(String sourceStartTime, String targetStartTime, Frequency sourceDelay, boolean flag) {
String finalDate;
if (flag) {
finalDate = TimeUtil.addMinsToTime(sourceStartTime, sourceDelay.getFrequencyAsInt());
} else {
finalDate = TimeUtil.addMinsToTime(targetStartTime, sourceDelay.getFrequencyAsInt());
}
return finalDate;
}
private boolean matchDependencies(List<String> fromJob, List<String> qaList) {
Collections.sort(fromJob);
Collections.sort(qaList);
if (fromJob.size() != qaList.size()) {
return false;
}
for (int index = 0; index < fromJob.size(); index++) {
if (!fromJob.get(index).contains(qaList.get(index))) {
return false;
}
}
return true;
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cache;
import com.hazelcast.config.CacheConfig;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.configuration.CompleteConfiguration;
import javax.cache.configuration.Factory;
import javax.cache.configuration.FactoryBuilder;
import javax.cache.configuration.MutableCacheEntryListenerConfiguration;
import javax.cache.event.CacheEntryCreatedListener;
import javax.cache.event.CacheEntryListenerException;
import javax.cache.integration.CacheLoader;
import javax.cache.integration.CacheLoaderException;
import javax.cache.integration.CacheWriter;
import javax.cache.integration.CacheWriterException;
import javax.cache.spi.CachingProvider;
import java.io.Closeable;
import java.io.IOException;
import java.io.Serializable;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.locks.LockSupport;
import static com.hazelcast.cache.CacheTestSupport.createServerCachingProvider;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelJVMTest.class})
public class CacheResourceTest
extends HazelcastTestSupport {
private TestHazelcastInstanceFactory factory;
@Before
public void init() {
factory = new TestHazelcastInstanceFactory(2);
}
@After
public void tear() {
factory.shutdownAll();
}
@Test
public void testCloseableCacheLoader() throws InterruptedException {
CachingProvider provider =
createServerCachingProvider(factory.newHazelcastInstance());
CacheManager cacheManager = provider.getCacheManager();
CloseableCacheLoader loader = new CloseableCacheLoader();
Factory<CloseableCacheLoader> loaderFactory = FactoryBuilder.factoryOf(loader);
CompleteConfiguration<Object, Object> configuration =
new CacheConfig()
.setCacheLoaderFactory(loaderFactory)
.setReadThrough(true);
Cache<Object, Object> cache = cacheManager.createCache("test", configuration);
// trigger partition assignment
cache.get("key");
factory.newHazelcastInstance();
for (int i = 0; i < 1000; i++) {
cache.get(i);
LockSupport.parkNanos(1000);
}
assertFalse("CacheLoader should not be closed!", loader.closed);
}
private static class CloseableCacheLoader implements CacheLoader, Closeable, Serializable {
private volatile boolean closed = false;
@Override
public Object load(Object key) throws CacheLoaderException {
if (closed) {
throw new IllegalStateException();
}
return null;
}
@Override
public Map loadAll(Iterable keys) throws CacheLoaderException {
if (closed) {
throw new IllegalStateException();
}
return null;
}
@Override
public void close() throws IOException {
closed = true;
}
}
@Test
public void testCloseableCacheWriter() throws InterruptedException {
CachingProvider provider =
createServerCachingProvider(factory.newHazelcastInstance());
CacheManager cacheManager = provider.getCacheManager();
CloseableCacheWriter writer = new CloseableCacheWriter();
Factory<CloseableCacheWriter> writerFactory = FactoryBuilder.factoryOf(writer);
CompleteConfiguration<Object, Object> configuration =
new CacheConfig()
.setCacheWriterFactory(writerFactory)
.setWriteThrough(true);
Cache<Object, Object> cache = cacheManager.createCache("test", configuration);
// trigger partition assignment
cache.get("key");
factory.newHazelcastInstance();
for (int i = 0; i < 1000; i++) {
cache.put(i, i);
LockSupport.parkNanos(1000);
}
assertFalse("CacheWriter should not be closed!", writer.closed);
}
private static class CloseableCacheWriter implements CacheWriter, Closeable, Serializable {
private volatile boolean closed = false;
@Override
public void write(Cache.Entry entry) throws CacheWriterException {
if (closed) {
throw new IllegalStateException();
}
}
@Override
public void delete(Object key) throws CacheWriterException {
if (closed) {
throw new IllegalStateException();
}
}
@Override
public void deleteAll(Collection keys) throws CacheWriterException {
if (closed) {
throw new IllegalStateException();
}
}
@Override
public void writeAll(Collection collection) throws CacheWriterException {
if (closed) {
throw new IllegalStateException();
}
}
@Override
public void close() throws IOException {
closed = true;
}
}
@Test
public void testCloseableCacheListener() {
CachingProvider provider = createServerCachingProvider(factory.newHazelcastInstance());
CacheManager cacheManager = provider.getCacheManager();
CloseableListener listener = new CloseableListener();
Factory<CloseableListener> listenerFactory = FactoryBuilder.factoryOf(listener);
CompleteConfiguration<Object, Object> configuration =
new CacheConfig()
.addCacheEntryListenerConfiguration(
new MutableCacheEntryListenerConfiguration(listenerFactory, null, true, false));
Cache<Object, Object> cache = cacheManager.createCache("test", configuration);
cache.close();
assertTrue("CloseableListener.close() should be called when cache is closed!", listener.closed);
}
private static class CloseableListener implements CacheEntryCreatedListener, Closeable, Serializable {
private volatile boolean closed = false;
@Override
public void onCreated(Iterable iterable) throws CacheEntryListenerException {
if (closed) {
throw new IllegalStateException();
}
}
@Override
public void close() throws IOException {
closed = true;
}
}
}
| |
/*
* Copyright (c) 2007, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.sso.saml.admin;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.opensaml.saml1.core.NameIdentifier;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.model.SAMLSSOServiceProviderDO;
import org.wso2.carbon.identity.core.persistence.IdentityPersistenceManager;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.identity.sso.saml.dto.SAMLSSOServiceProviderDTO;
import org.wso2.carbon.identity.sso.saml.dto.SAMLSSOServiceProviderInfoDTO;
import org.wso2.carbon.registry.core.Registry;
import org.wso2.carbon.registry.core.session.UserRegistry;
/**
* This class is used for managing SAML SSO providers. Adding, retrieving and removing service
* providers are supported here.
* In addition to that logic for generating key pairs for tenants except for tenant 0, is included
* here.
*/
public class SAMLSSOConfigAdmin {
private static Log log = LogFactory.getLog(SAMLSSOConfigAdmin.class);
private UserRegistry registry;
public SAMLSSOConfigAdmin(Registry userRegistry) {
registry = (UserRegistry) userRegistry;
}
/**
* Add a new service provider
*
* @param serviceProviderDTO service Provider DTO
* @return true if successful, false otherwise
* @throws IdentityException if fails to load the identity persistence manager
*/
public boolean addRelyingPartyServiceProvider(SAMLSSOServiceProviderDTO serviceProviderDTO) throws IdentityException {
SAMLSSOServiceProviderDO serviceProviderDO = new SAMLSSOServiceProviderDO();
if (serviceProviderDTO.getIssuer() == null || "".equals(serviceProviderDTO.getIssuer())) {
String message = "A value for the Issuer is mandatory";
log.error(message);
throw new IdentityException(message);
}
if (serviceProviderDTO.getIssuer().contains("@")) {
String message = "\'@\' is a reserved character. Cannot be used for Service Provider Entity ID";
log.error(message);
throw new IdentityException(message);
}
serviceProviderDO.setIssuer(serviceProviderDTO.getIssuer());
serviceProviderDO.setAssertionConsumerUrl(serviceProviderDTO.getAssertionConsumerUrl());
serviceProviderDO.setCertAlias(serviceProviderDTO.getCertAlias());
serviceProviderDO.setUseFullyQualifiedUsername(serviceProviderDTO.isUseFullyQualifiedUsername());
serviceProviderDO.setDoSingleLogout(serviceProviderDTO.isDoSingleLogout());
serviceProviderDO.setLoginPageURL(serviceProviderDTO.getLoginPageURL());
serviceProviderDO.setLogoutURL(serviceProviderDTO.getLogoutURL());
serviceProviderDO.setDoSignResponse(serviceProviderDTO.isDoSignResponse());
serviceProviderDO.setDoSignAssertions(serviceProviderDTO.isDoSignAssertions());
serviceProviderDO.setNameIdClaimUri(serviceProviderDTO.getNameIdClaimUri());
serviceProviderDO.setEnableAttributesByDefault(serviceProviderDTO.isEnableAttributesByDefault());
if (serviceProviderDTO.getNameIDFormat() == null) {
serviceProviderDTO.setNameIDFormat(NameIdentifier.EMAIL);
} else {
serviceProviderDTO.setNameIDFormat(serviceProviderDTO.getNameIDFormat().replace("/",
":"));
}
serviceProviderDO.setNameIDFormat(serviceProviderDTO.getNameIDFormat());
if (serviceProviderDTO.isEnableAttributeProfile()) {
String attributeConsumingIndex = serviceProviderDTO.getAttributeConsumingServiceIndex();
if (StringUtils.isNotEmpty(attributeConsumingIndex)) {
serviceProviderDO.setAttributeConsumingServiceIndex(attributeConsumingIndex);
} else {
serviceProviderDO.setAttributeConsumingServiceIndex(Integer.toString(IdentityUtil.getRandomInteger()));
}
} else {
serviceProviderDO.setAttributeConsumingServiceIndex("");
}
if (serviceProviderDTO.getRequestedAudiences() != null && serviceProviderDTO.getRequestedAudiences().length != 0) {
serviceProviderDO.setRequestedAudiences(serviceProviderDTO.getRequestedAudiences());
}
if (serviceProviderDTO.getRequestedRecipients() != null && serviceProviderDTO.getRequestedRecipients().length != 0) {
serviceProviderDO.setRequestedRecipients(serviceProviderDTO.getRequestedRecipients());
}
serviceProviderDO.setIdPInitSSOEnabled(serviceProviderDTO.isIdPInitSSOEnabled());
serviceProviderDO.setDoEnableEncryptedAssertion(serviceProviderDTO.isDoEnableEncryptedAssertion());
serviceProviderDO.setDoValidateSignatureInRequests(serviceProviderDTO.isDoValidateSignatureInRequests());
IdentityPersistenceManager persistenceManager = IdentityPersistenceManager
.getPersistanceManager();
try {
return persistenceManager.addServiceProvider(registry, serviceProviderDO);
} catch (IdentityException e) {
log.error("Error obtaining a registry for adding a new service provider", e);
throw new IdentityException("Error obtaining a registry for adding a new service provider", e);
}
}
/**
* Retrieve all the relying party service providers
*
* @return set of RP Service Providers + file path of pub. key of generated key pair
*/
public SAMLSSOServiceProviderInfoDTO getServiceProviders() throws IdentityException {
SAMLSSOServiceProviderDTO[] serviceProviders = null;
try {
IdentityPersistenceManager persistenceManager = IdentityPersistenceManager
.getPersistanceManager();
SAMLSSOServiceProviderDO[] providersSet = persistenceManager.
getServiceProviders(registry);
serviceProviders = new SAMLSSOServiceProviderDTO[providersSet.length];
for (int i = 0; i < providersSet.length; i++) {
SAMLSSOServiceProviderDO providerDO = providersSet[i];
SAMLSSOServiceProviderDTO providerDTO = new SAMLSSOServiceProviderDTO();
providerDTO.setIssuer(providerDO.getIssuer());
providerDTO.setAssertionConsumerUrl(providerDO.getAssertionConsumerUrl());
providerDTO.setCertAlias(providerDO.getCertAlias());
providerDTO.setAttributeConsumingServiceIndex(providerDO.getAttributeConsumingServiceIndex());
providerDTO.setUseFullyQualifiedUsername(providerDO.isUseFullyQualifiedUsername());
providerDTO.setDoSignResponse(providerDO.isDoSignResponse());
providerDTO.setDoSignAssertions(providerDO.isDoSignAssertions());
providerDTO.setDoSingleLogout(providerDO.isDoSingleLogout());
if (providerDO.getLoginPageURL() == null || "null".equals(providerDO.getLoginPageURL())) {
providerDTO.setLoginPageURL("");
} else {
providerDTO.setLoginPageURL(providerDO.getLoginPageURL());
}
if (providerDO.getLogoutURL() == null || "null".equals(providerDO.getLogoutURL())) {
providerDTO.setLogoutURL("");
} else {
providerDTO.setLogoutURL(providerDO.getLogoutURL());
}
providerDTO.setRequestedClaims(providerDO.getRequestedClaims());
providerDTO.setRequestedAudiences(providerDO.getRequestedAudiences());
providerDTO.setRequestedRecipients(providerDO.getRequestedRecipients());
providerDTO.setEnableAttributesByDefault(providerDO.isEnableAttributesByDefault());
providerDTO.setNameIdClaimUri(providerDO.getNameIdClaimUri());
providerDTO.setNameIDFormat(providerDO.getNameIDFormat());
if (providerDTO.getNameIDFormat() == null) {
providerDTO.setNameIDFormat(NameIdentifier.EMAIL);
}
providerDTO.setNameIDFormat(providerDTO.getNameIDFormat().replace(":", "/"));
providerDTO.setIdPInitSSOEnabled(providerDO.isIdPInitSSOEnabled());
providerDTO.setDoEnableEncryptedAssertion(providerDO.isDoEnableEncryptedAssertion());
providerDTO.setDoValidateSignatureInRequests(providerDO.isDoValidateSignatureInRequests());
serviceProviders[i] = providerDTO;
}
} catch (IdentityException e) {
log.error("Error obtaining a registry intance for reading service provider list", e);
throw new IdentityException("Error obtaining a registry intance for reading service provider list", e);
}
SAMLSSOServiceProviderInfoDTO serviceProviderInfoDTO = new SAMLSSOServiceProviderInfoDTO();
serviceProviderInfoDTO.setServiceProviders(serviceProviders);
//if it is tenant zero
if (registry.getTenantId() == 0) {
serviceProviderInfoDTO.setTenantZero(true);
}
return serviceProviderInfoDTO;
}
/**
* Remove an existing service provider.
*
* @param issuer issuer name
* @return true is successful
* @throws IdentityException
*/
public boolean removeServiceProvider(String issuer) throws IdentityException {
try {
IdentityPersistenceManager persistenceManager = IdentityPersistenceManager.getPersistanceManager();
return persistenceManager.removeServiceProvider(registry, issuer);
} catch (IdentityException e) {
log.error("Error removing a Service Provider");
throw new IdentityException("Error removing a Service Provider", e);
}
}
}
| |
// Copyright (c) 2013 Richard Long & HexBeerium
//
// Released under the MIT license ( http://opensource.org/licenses/MIT )
//
package jsonbroker.library.broker;
import jsonbroker.library.common.json.JsonArray;
import jsonbroker.library.common.json.JsonObject;
public class BrokerMessage {
////////////////////////////////////////////////////////////////////////////
protected BrokerMessageType _messageType = BrokerMessageType.REQUEST;
public BrokerMessageType getMessageType() {
return _messageType;
}
public void setMessageType(BrokerMessageType messageType) {
_messageType = messageType;
}
////////////////////////////////////////////////////////////////////////////
protected JsonObject _metaData;
public JsonObject getMetaData() {
return _metaData;
}
////////////////////////////////////////////////////////////////////////////
protected String _serviceName;
public String getServiceName() {
return _serviceName;
}
public void setServiceName(String service) {
_serviceName = service;
}
///////////////////////////////////////////////////////////////////////
protected String _methodName;
public String getMethodName() {
return _methodName;
}
public void setMethodName(String methodName) {
_methodName = methodName;
}
///////////////////////////////////////////////////////////////////////
protected JsonObject _associativeParamaters;
public JsonObject getAssociativeParamaters() {
return _associativeParamaters;
}
public void setAssociativeParamaters(JsonObject associativeParamaters) {
_associativeParamaters = associativeParamaters;
}
///////////////////////////////////////////////////////////////////////
protected JsonArray _orderedParamaters;
///////////////////////////////////////////////////////////////////////
/**
* @return
*/
public JsonArray getOrderedParamaters() {
return _orderedParamaters;
}
/**
* @return
*/
public void setOrderedParamaters(JsonArray paramaters) {
_orderedParamaters = paramaters;
}
///////////////////////////////////////////////////////////////////////
public BrokerMessage() {
_metaData = new JsonObject();
_associativeParamaters = new JsonObject();
_orderedParamaters = new JsonArray();
}
///////////////////////////////////////////////////////////////////////
public BrokerMessage(JsonArray values) {
String messageTypeIdentifer = values.getString(0 );
_messageType = BrokerMessageType.lookup( messageTypeIdentifer );
_metaData = values.getJsonObject( 1 );
_serviceName = values.getString( 2 );
// int majorVersion = values.getInteger( 3 );
// int minorVersion = values.getInteger( 4 );
_methodName = values.getString( 5 );
_associativeParamaters = values.getJsonObject( 6 );
if( 7 < values.size() ) {
_orderedParamaters = values.getJsonArray( 7 );
} else {
_orderedParamaters = new JsonArray(0);
}
}
public static BrokerMessage buildRequest( String serviceName, String methodName ) {
BrokerMessage answer = new BrokerMessage();
answer._messageType = BrokerMessageType.REQUEST;
answer._serviceName = serviceName;
answer._methodName = methodName;
return answer;
}
public static BrokerMessage buildMetaRequest( String serviceName, String methodName ) {
BrokerMessage answer = new BrokerMessage();
answer._messageType = BrokerMessageType.META_REQUEST;
answer._serviceName = serviceName;
answer._methodName = methodName;
return answer;
}
public static BrokerMessage buildFault( BrokerMessage request, Throwable t ) {
BrokerMessage answer = new BrokerMessage();
answer._messageType = BrokerMessageType.FAULT;
answer._metaData = request.getMetaData();
answer._serviceName = request._serviceName;
answer._methodName = request._methodName;
answer._associativeParamaters = FaultSerializer.toJsonObject( t );
answer._orderedParamaters = new JsonArray(0);
return answer;
}
public static BrokerMessage buildMetaResponse( BrokerMessage request) {
BrokerMessage answer = new BrokerMessage();
answer._messageType = BrokerMessageType.META_RESPONSE;
answer._metaData = request.getMetaData();
answer._serviceName = request._serviceName;
answer._methodName = request._methodName;
answer._associativeParamaters = new JsonObject();
answer._orderedParamaters = new JsonArray(0);
return answer;
}
public static BrokerMessage buildResponse( BrokerMessage request) {
BrokerMessage answer = new BrokerMessage();
answer._messageType = BrokerMessageType.RESPONSE;
answer._metaData = request.getMetaData();
answer._serviceName = request._serviceName;
answer._methodName = request._methodName;
answer._associativeParamaters = new JsonObject();
answer._orderedParamaters = new JsonArray(0);
return answer;
}
public JsonArray toJsonArray() {
JsonArray answer = new JsonArray(5);
answer.add( _messageType.getIdentifier() );
answer.add( _metaData );
answer.add( _serviceName );
answer.add( 1 );
answer.add( 0 );
answer.add( _methodName );
answer.add( _associativeParamaters );
answer.add( _orderedParamaters );
return answer;
}
public void addParameter( Integer parameter ) {
_orderedParamaters.add( parameter );
}
public void addParameter( JsonObject parameter ) {
_orderedParamaters.add( parameter );
}
public void addParameter( JsonArray parameter ) {
_orderedParamaters.add( parameter );
}
public void addParameter( Object parameter ) {
_orderedParamaters.add( parameter );
}
public void addParameter(String parameter) {
_orderedParamaters.add( parameter );
}
public void setResponseType( String type ) {
_metaData.put( "responseType", type);
}
}
| |
public class Gleitpunktzahl {
/**
* Update by
*
* @author Juergen Braeckle (braeckle@in.tum.de)
* @author Sebastian Rettenberger (rettenbs@in.tum.de)
* @since Oktober 22, 2014
* @version 1.2
*
* Diese Klasse beschreibt eine Form von Gleitpunktarithmetik
*/
/********************/
/* Membervariablen: */
/********************/
/* Vorzeichen, Mantisse und Exponent der Gleitpunktzahl */
public boolean vorzeichen; /* true = "-1" */
public int exponent;
public int mantisse;
/*
* Anzahl der Bits fuer die Mantisse: einmal gesetzt, soll sie nicht mehr
* veraendert werden koennen
*/
private static int sizeMantisse = 32;
private static boolean sizeMantisseFixed = false;
/*
* Anzahl der Bits fuer dem Exponent: einmal gesetzt, soll sie nicht mehr
* veraendert werden koennen. Maximale Groesse: 32
*/
private static int sizeExponent = 8;
private static boolean sizeExponentFixed = false;
/*
* Aus der Anzahl an Bits fuer den Exponenten laesst sich der maximale
* Exponent und der Offset berechnen
*/
private static int maxExponent = (int) Math.pow(2, sizeExponent) - 1;
private static int expOffset = (int) Math.pow(2, sizeExponent - 1) - 1;
/**
* Falls die Anzahl der Bits der Mantisse noch nicht gesperrt ist, so wird
* sie auf abm gesetzt und gesperrt
*/
public static void setSizeMantisse(int abm) {
/*
* Falls sizeMantisse noch nicht gesetzt und abm > 0 dann setze auf
* abm und sperre den Zugriff
*/
if (!sizeMantisseFixed & (abm > 0)) {
sizeMantisse = abm;
sizeMantisseFixed = true;
}
}
/**
* Falls die Anzahl der Bits des Exponenten noch nicht gesperrt ist, so wird
* sie auf abe gesetzt und gesperrt. maxExponent und expOffset werden
* festgelegt
*/
public static void setSizeExponent(int abe) {
if (!sizeExponentFixed & (abe > 0)) {
sizeExponent = abe;
sizeExponentFixed = true;
maxExponent = (int) Math.pow(2, abe) - 1;
expOffset = (int) Math.pow(2, abe - 1) - 1;
}
}
/** Liefert die Anzahl der Bits der Mantisse */
public static int getSizeMantisse() {
return sizeMantisse;
}
/** Liefert die Anzahl der Bits des Exponenten */
public static int getSizeExponent() {
return sizeExponent;
}
/**
* erzeugt eine Gleitpunktzahl ohne Anfangswert. Die Bitfelder fuer Mantisse
* und Exponent werden angelegt. Ist die Anzahl der Bits noch nicht gesetzt,
* wird der Standardwert gesperrt
*/
Gleitpunktzahl() {
sizeMantisseFixed = true;
sizeExponentFixed = true;
}
/** erzeugt eine Kopie der reellen Zahl r */
Gleitpunktzahl(Gleitpunktzahl r) {
/* Vorzeichen kopieren */
this.vorzeichen = r.vorzeichen;
/*
* Kopiert den Inhalt der jeweiligen Felder aus r
*/
this.exponent = r.exponent;
this.mantisse = r.mantisse;
}
/**
* erzeugt eine reelle Zahl mit der Repraesentation des Double-Wertes d. Ist
* die Anzahl der Bits fuer Mantisse und Exponent noch nicht gesetzt, wird
* der Standardwert gesperrt
*/
Gleitpunktzahl(double d) {
this();
this.setDouble(d);
}
/**
* setzt dieses Objekt mit der Repraesentation des Double-Wertes d.
*/
public void setDouble(double d) {
/* Abfangen der Sonderfaelle */
if (d == 0) {
this.setNull();
return;
}
if (Double.isInfinite(d)) {
this.setInfinite(d < 0);
return;
}
if (Double.isNaN(d)) {
this.setNaN();
return;
}
/* Falls d<0 -> Vorzeichen setzten, Vorzeichen von d wechseln */
if (d < 0) {
this.vorzeichen = true;
d = -d;
} else
this.vorzeichen = false;
/*
* Exponent exp von d zur Basis 2 finden d ist danach im Intervall [1,2)
*/
int exp = 0;
while (d >= 2) {
d = d / 2;
exp++;
}
while (d < 1) {
d = 2 * d;
exp--;
} /* d in [1,2) */
this.exponent = exp + expOffset;
/*
* Mantisse finden; fuer Runden eine Stelle mehr als noetig berechnen
*/
double rest = d;
this.mantisse = 0;
for (int i = 0; i <= sizeMantisse; i++) {
this.mantisse <<= 1;
if (rest >= 1) {
rest = rest - 1;
this.mantisse |= 1;
}
rest = 2 * rest;
}
this.exponent -= 1; /* Mantisse ist um eine Stelle groesser! */
/*
* normalisiere uebernimmt die Aufgaben des Rundens
*/
this.normalisiere();
}
/** liefert eine String-Repraesentation des Objekts */
public String toString() {
if (this.isNaN())
return "NaN";
if (this.isNull())
return "0";
StringBuffer s = new StringBuffer();
if (this.vorzeichen)
s.append('-');
if (this.isInfinite())
s.append("Inf");
else {
for (int i = 32 - Integer.numberOfLeadingZeros(this.mantisse) - 1;
i >= 0; i--) {
if (i == sizeMantisse - 2)
s.append(',');
if (((this.mantisse >> i) & 1) == 1)
s.append('1');
else
s.append('0');
}
s.append(" * 2^(");
s.append(this.exponent);
s.append("-");
s.append(expOffset);
s.append(")");
}
return s.toString();
}
/** berechnet den Double-Wert des Objekts */
public double toDouble() {
/*
* Wenn der Exponent maximal ist, nimmt die Gleitpunktzahl einen der
* speziellen Werte an
*/
if (this.exponent == maxExponent) {
/*
* Wenn die Mantisse Null ist, hat die Zahl den Wert Unendlich oder
* -Unendlich
*/
if (this.mantisse == 0) {
if (this.vorzeichen)
return -1.0 / 0.0;
else
return 1.0 / 0.0;
}
/* Ansonsten ist der Wert NaN */
else
return 0.0 / 0.0;
}
double m = this.mantisse;
if (this.vorzeichen)
m *= (-1);
return m
* Math.pow(2, (this.exponent - expOffset)
- (sizeMantisse - 1));
}
/**
* Sonderfaelle abfragen
*/
/** Liefert true, wenn die Gleitpunktzahl die Null repraesentiert */
public boolean isNull() {
return (!this.vorzeichen && this.mantisse == 0 && this.exponent == 0);
}
/**
* Liefert true, wenn die Gleitpunktzahl der NotaNumber Darstellung
* entspricht
*/
public boolean isNaN() {
return (this.mantisse != 0 && this.exponent == maxExponent);
}
/** Liefert true, wenn die Gleitpunktzahl betragsmaessig unendlich gross ist */
public boolean isInfinite() {
return (this.mantisse == 0 && this.exponent == maxExponent);
}
/**
* vergleicht betragsmaessig den Wert des aktuellen Objekts mit der reellen
* Zahl r
*/
public int compareAbsTo(Gleitpunktzahl r) {
/*
* liefert groesser gleich 1, falls |this| > |r|
* 0, falls |this| = |r|
* kleiner gleich -1, falls |this| < |r|
*/
if (this.isNaN() && r.isNaN()) return 0;
/* Exponenten vergleichen */
int expVergleich = this.exponent - r.exponent;
if (expVergleich != 0)
return expVergleich;
/* Bei gleichen Exponenten: Bitweisses Vergleichen der Mantissen */
return this.mantisse - r.mantisse;
}
/**
* normalisiert und rundet das aktuelle Objekt auf die Darstellung r =
* (-1)^vorzeichen * 1,r_t-1 r_t-2 ... r_1 r_0 * 2^exponent. Die 0 wird zu
* (-1)^0 * 0,00...00 * 2^0 normalisiert WICHTIG: Es kann sein, dass die
* Anzahl der Bits nicht mit sizeMantisse uebereinstimmt. Das Ergebnis
* soll aber eine Mantisse mit sizeMantisse Bits haben. Deshalb muss
* evtl. mit Bits aufgefuellt oder Bits abgeschnitten werden. Dabei muss das
* Ergebnis nach Definition gerundet werden.
*
* Beispiel: Bei 3 Mantissenbits wird die Zahl 10.11 * 2^-1 zu 1.10 * 2^0
*/
public void normalisiere() {
if (isNull() || isInfinite()) return;
shrinkMantisse();
expandMantisse();
}
/**
* Will shrink the Mantisse and increase the exponent accordingly.
* if the exponent reaches its maximum the value will
* be set to inf
*/
public void shrinkMantisse() {
int maxMantisse = (int) Math.pow(2, sizeMantisse) - 1;
int counter = 0, tmp = mantisse;
while (mantisse > maxMantisse) {
mantisse >>= 1;
exponent++;
counter++;
}
tmp >>= counter - 1;
mantisse += (tmp & 1);
if (mantisse > maxMantisse)
shrinkMantisse();
if (exponent >= maxExponent)
setInfinite(vorzeichen);
}
/**
* Will expand the Mantisse to fill up the last bit.
* This will also decrease the exponent.
*/
public void expandMantisse() {
int minMantisse = (int) Math.pow(2, sizeMantisse - 1);
while (mantisse != 0 && mantisse < minMantisse) {
mantisse <<= 1;
exponent--;
}
}
/**
* denormalisiert die betragsmaessig goessere Zahl, so dass die Exponenten
* von a und b gleich sind. Die Mantissen beider Zahlen werden entsprechend
* erweitert. Denormalisieren wird fuer add und sub benoetigt.
*/
public static void denormalisiere(Gleitpunktzahl a, Gleitpunktzahl b) {
int compareResult = a.compareAbsTo(b);
Gleitpunktzahl big = compareResult >= 0 ? a : b;
Gleitpunktzahl small = compareResult >= 0 ? b : a;
while (small.exponent < big.exponent) {
big.exponent--;
big.mantisse *= 2;
}
}
/**
* addiert das aktuelle Objekt und die Gleitpunktzahl r. Dabei wird zuerst
* die betragsmaessig groessere Zahl denormalisiert und die Mantissen beider
* zahlen entsprechend vergroessert. Das Ergebnis wird in einem neuen Objekt
* gespeichert, normiert, und dieses wird zurueckgegeben.
*/
public Gleitpunktzahl add(Gleitpunktzahl r) {
if (this.isInfinite() && r.isInfinite() && this.vorzeichen != r.vorzeichen) {
r = new Gleitpunktzahl();
r.setNaN();
return r;
}
if (this.isNull() || r.isInfinite()) return new Gleitpunktzahl(r);
if (r.isNull() || this.isInfinite()) return new Gleitpunktzahl(this);
denormalisiere(this, r);
Gleitpunktzahl result = new Gleitpunktzahl();
result.vorzeichen = this.vorzeichen;
result.exponent = this.exponent;
if (this.vorzeichen == r.vorzeichen) {
result.mantisse = this.mantisse + r.mantisse;
} else {
int res = this.mantisse - r.mantisse;
result.mantisse = Math.abs(res);
if (res < 0) result.vorzeichen = !result.vorzeichen;
}
this.normalisiere();
r.normalisiere();
result.normalisiere();
return result;
}
/**
* subtrahiert vom aktuellen Objekt die Gleitpunktzahl r. Dabei wird zuerst
* die betragsmaessig groessere Zahl denormalisiert und die Mantissen beider
* zahlen entsprechend vergroessert. Das Ergebnis wird in einem neuen Objekt
* gespeichert, normiert, und dieses wird zurueckgegeben.
*/
public Gleitpunktzahl sub(Gleitpunktzahl r) {
Gleitpunktzahl z = new Gleitpunktzahl(r);
z.vorzeichen = !r.vorzeichen;
return add(z);
}
/**
* Setzt die Zahl auf den Sonderfall 0
*/
public void setNull() {
this.vorzeichen = false;
this.exponent = 0;
this.mantisse = 0;
}
/**
* Setzt die Zahl auf den Sonderfall +/- unendlich
*/
public void setInfinite(boolean vorzeichen) {
this.vorzeichen = vorzeichen;
this.exponent = maxExponent;
this.mantisse = 0;
}
/**
* Setzt die Zahl auf den Sonderfall NaN
*/
public void setNaN() {
this.vorzeichen = false;
this.exponent = maxExponent;
this.mantisse = 1;
}
}
| |
package jadx.core.dex.visitors.typeinference;
import java.util.Collections;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jadx.NotYetImplementedExtension;
import jadx.api.JadxArgs;
import jadx.core.dex.instructions.args.ArgType;
import jadx.core.dex.instructions.args.ArgType.WildcardBound;
import jadx.core.dex.nodes.RootNode;
import static jadx.core.dex.instructions.args.ArgType.BOOLEAN;
import static jadx.core.dex.instructions.args.ArgType.BYTE;
import static jadx.core.dex.instructions.args.ArgType.CHAR;
import static jadx.core.dex.instructions.args.ArgType.CLASS;
import static jadx.core.dex.instructions.args.ArgType.EXCEPTION;
import static jadx.core.dex.instructions.args.ArgType.INT;
import static jadx.core.dex.instructions.args.ArgType.NARROW;
import static jadx.core.dex.instructions.args.ArgType.NARROW_INTEGRAL;
import static jadx.core.dex.instructions.args.ArgType.OBJECT;
import static jadx.core.dex.instructions.args.ArgType.SHORT;
import static jadx.core.dex.instructions.args.ArgType.STRING;
import static jadx.core.dex.instructions.args.ArgType.THROWABLE;
import static jadx.core.dex.instructions.args.ArgType.UNKNOWN;
import static jadx.core.dex.instructions.args.ArgType.UNKNOWN_ARRAY;
import static jadx.core.dex.instructions.args.ArgType.UNKNOWN_OBJECT;
import static jadx.core.dex.instructions.args.ArgType.array;
import static jadx.core.dex.instructions.args.ArgType.generic;
import static jadx.core.dex.instructions.args.ArgType.genericType;
import static jadx.core.dex.instructions.args.ArgType.object;
import static jadx.core.dex.instructions.args.ArgType.wildcard;
import static org.assertj.core.api.Assertions.assertThat;
@ExtendWith(NotYetImplementedExtension.class)
public class TypeCompareTest {
private static final Logger LOG = LoggerFactory.getLogger(TypeCompareTest.class);
private TypeCompare compare;
@BeforeEach
public void init() {
JadxArgs args = new JadxArgs();
RootNode root = new RootNode(args);
root.loadClasses(Collections.emptyList());
root.initClassPath();
compare = new TypeCompare(root);
}
@Test
public void compareTypes() {
firstIsNarrow(INT, UNKNOWN);
firstIsNarrow(array(UNKNOWN), UNKNOWN);
firstIsNarrow(array(UNKNOWN), NARROW);
}
@Test
public void comparePrimitives() {
check(INT, UNKNOWN_OBJECT, TypeCompareEnum.CONFLICT);
check(INT, OBJECT, TypeCompareEnum.CONFLICT);
check(INT, CHAR, TypeCompareEnum.WIDER);
check(INT, SHORT, TypeCompareEnum.WIDER);
check(BOOLEAN, INT, TypeCompareEnum.CONFLICT);
check(BOOLEAN, CHAR, TypeCompareEnum.CONFLICT);
check(CHAR, BYTE, TypeCompareEnum.CONFLICT);
check(CHAR, SHORT, TypeCompareEnum.CONFLICT);
firstIsNarrow(CHAR, NARROW_INTEGRAL);
firstIsNarrow(array(CHAR), UNKNOWN_OBJECT);
}
@Test
public void compareArrays() {
firstIsNarrow(array(CHAR), OBJECT);
firstIsNarrow(array(CHAR), array(UNKNOWN));
firstIsNarrow(array(OBJECT), OBJECT);
firstIsNarrow(array(OBJECT), array(UNKNOWN_OBJECT));
firstIsNarrow(array(STRING), array(UNKNOWN_OBJECT));
firstIsNarrow(array(STRING), array(OBJECT));
firstIsNarrow(UNKNOWN_ARRAY, OBJECT);
firstIsNarrow(array(BYTE), OBJECT);
firstIsNarrow(array(array(BYTE)), array(OBJECT));
check(array(OBJECT), array(INT), TypeCompareEnum.CONFLICT);
ArgType integerType = object("java.lang.Integer");
check(array(OBJECT), array(integerType), TypeCompareEnum.WIDER);
check(array(INT), array(integerType), TypeCompareEnum.CONFLICT);
check(array(INT), array(INT), TypeCompareEnum.EQUAL);
ArgType wildClass = generic(CLASS, wildcard());
check(array(wildClass), array(CLASS), TypeCompareEnum.NARROW_BY_GENERIC);
check(array(CLASS), array(wildClass), TypeCompareEnum.WIDER_BY_GENERIC);
}
@Test
public void compareGenerics() {
ArgType mapCls = object("java.util.Map");
ArgType setCls = object("java.util.Set");
ArgType keyType = genericType("K");
ArgType valueType = genericType("V");
ArgType mapGeneric = ArgType.generic(mapCls.getObject(), keyType, valueType);
check(mapCls, mapGeneric, TypeCompareEnum.WIDER_BY_GENERIC);
check(mapCls, setCls, TypeCompareEnum.CONFLICT);
ArgType setGeneric = ArgType.generic(setCls.getObject(), valueType);
ArgType setWildcard = ArgType.generic(setCls.getObject(), ArgType.wildcard());
check(setWildcard, setGeneric, TypeCompareEnum.CONFLICT);
check(setWildcard, setCls, TypeCompareEnum.NARROW_BY_GENERIC);
// TODO implement compare for wildcard with bounds
}
@Test
public void compareWildCards() {
ArgType clsWildcard = generic(CLASS.getObject(), wildcard());
check(clsWildcard, CLASS, TypeCompareEnum.NARROW_BY_GENERIC);
ArgType clsExtendedWildcard = generic(CLASS.getObject(), wildcard(STRING, WildcardBound.EXTENDS));
check(clsWildcard, clsExtendedWildcard, TypeCompareEnum.WIDER);
ArgType listWildcard = generic(CLASS.getObject(), wildcard(object("java.util.List"), WildcardBound.EXTENDS));
ArgType collWildcard = generic(CLASS.getObject(), wildcard(object("java.util.Collection"), WildcardBound.EXTENDS));
check(listWildcard, collWildcard, TypeCompareEnum.NARROW);
ArgType collSuperWildcard = generic(CLASS.getObject(), wildcard(object("java.util.Collection"), WildcardBound.SUPER));
check(collSuperWildcard, listWildcard, TypeCompareEnum.CONFLICT);
}
@Test
public void compareGenericTypes() {
ArgType vType = genericType("V");
check(vType, OBJECT, TypeCompareEnum.NARROW);
check(vType, STRING, TypeCompareEnum.CONFLICT);
ArgType rType = genericType("R");
check(vType, rType, TypeCompareEnum.CONFLICT);
check(vType, vType, TypeCompareEnum.EQUAL);
ArgType tType = genericType("T");
ArgType tStringType = genericType("T", STRING);
check(tStringType, STRING, TypeCompareEnum.NARROW);
check(tStringType, OBJECT, TypeCompareEnum.NARROW);
check(tStringType, tType, TypeCompareEnum.NARROW);
ArgType tObjType = genericType("T", OBJECT);
check(tObjType, OBJECT, TypeCompareEnum.NARROW);
check(tObjType, tType, TypeCompareEnum.EQUAL);
check(tStringType, tObjType, TypeCompareEnum.NARROW);
}
@Test
public void compareGenericTypes2() {
ArgType npeType = object("java.lang.NullPointerException");
// check clsp graph
check(npeType, THROWABLE, TypeCompareEnum.NARROW);
check(npeType, EXCEPTION, TypeCompareEnum.NARROW);
check(EXCEPTION, THROWABLE, TypeCompareEnum.NARROW);
ArgType typeVar = genericType("T", EXCEPTION); // T extends Exception
// target checks
check(THROWABLE, typeVar, TypeCompareEnum.WIDER);
check(EXCEPTION, typeVar, TypeCompareEnum.WIDER);
check(npeType, typeVar, TypeCompareEnum.NARROW);
}
@Test
public void compareOuterGenerics() {
ArgType hashMapType = object("java.util.HashMap");
ArgType innerEntrySetType = object("EntrySet");
ArgType firstInstance = ArgType.outerGeneric(generic(hashMapType, STRING, STRING), innerEntrySetType);
ArgType secondInstance = ArgType.outerGeneric(generic(hashMapType, OBJECT, OBJECT), innerEntrySetType);
check(firstInstance, secondInstance, TypeCompareEnum.NARROW);
}
private void firstIsNarrow(ArgType first, ArgType second) {
check(first, second, TypeCompareEnum.NARROW);
}
private void check(ArgType first, ArgType second, TypeCompareEnum expectedResult) {
LOG.debug("Compare: '{}' and '{}', expect: '{}'", first, second, expectedResult);
assertThat(compare.compareTypes(first, second))
.as("Compare '%s' and '%s'", first, second)
.isEqualTo(expectedResult);
assertThat(compare.compareTypes(second, first))
.as("Compare '%s' and '%s'", second, first)
.isEqualTo(expectedResult.invert());
}
}
| |
/**
*/
package options.presentation;
import java.util.ArrayList;
import java.util.Collection;
import org.eclipse.emf.common.ui.viewer.IViewerProvider;
import org.eclipse.emf.edit.domain.EditingDomain;
import org.eclipse.emf.edit.domain.IEditingDomainProvider;
import org.eclipse.emf.edit.ui.action.ControlAction;
import org.eclipse.emf.edit.ui.action.CreateChildAction;
import org.eclipse.emf.edit.ui.action.CreateSiblingAction;
import org.eclipse.emf.edit.ui.action.EditingDomainActionBarContributor;
import org.eclipse.emf.edit.ui.action.LoadResourceAction;
import org.eclipse.emf.edit.ui.action.ValidateAction;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.ActionContributionItem;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.action.IContributionItem;
import org.eclipse.jface.action.IContributionManager;
import org.eclipse.jface.action.IMenuListener;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.action.IToolBarManager;
import org.eclipse.jface.action.MenuManager;
import org.eclipse.jface.action.Separator;
import org.eclipse.jface.action.SubContributionItem;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.PartInitException;
/**
* This is the action bar contributor for the Options model editor.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class OptionsActionBarContributor
extends EditingDomainActionBarContributor
implements ISelectionChangedListener {
/**
* This keeps track of the active editor.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IEditorPart activeEditorPart;
/**
* This keeps track of the current selection provider.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ISelectionProvider selectionProvider;
/**
* This action opens the Properties view.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IAction showPropertiesViewAction =
new Action(OptionsEditorPlugin.INSTANCE.getString("_UI_ShowPropertiesView_menu_item")) {
@Override
public void run() {
try {
getPage().showView("org.eclipse.ui.views.PropertySheet");
}
catch (PartInitException exception) {
OptionsEditorPlugin.INSTANCE.log(exception);
}
}
};
/**
* This action refreshes the viewer of the current editor if the editor
* implements {@link org.eclipse.emf.common.ui.viewer.IViewerProvider}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IAction refreshViewerAction =
new Action(OptionsEditorPlugin.INSTANCE.getString("_UI_RefreshViewer_menu_item")) {
@Override
public boolean isEnabled() {
return activeEditorPart instanceof IViewerProvider;
}
@Override
public void run() {
if (activeEditorPart instanceof IViewerProvider) {
Viewer viewer = ((IViewerProvider)activeEditorPart).getViewer();
if (viewer != null) {
viewer.refresh();
}
}
}
};
/**
* This will contain one {@link org.eclipse.emf.edit.ui.action.CreateChildAction} corresponding to each descriptor
* generated for the current selection by the item provider.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<IAction> createChildActions;
/**
* This is the menu manager into which menu contribution items should be added for CreateChild actions.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IMenuManager createChildMenuManager;
/**
* This will contain one {@link org.eclipse.emf.edit.ui.action.CreateSiblingAction} corresponding to each descriptor
* generated for the current selection by the item provider.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<IAction> createSiblingActions;
/**
* This is the menu manager into which menu contribution items should be added for CreateSibling actions.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IMenuManager createSiblingMenuManager;
/**
* This creates an instance of the contributor.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public OptionsActionBarContributor() {
super(ADDITIONS_LAST_STYLE);
loadResourceAction = new LoadResourceAction();
validateAction = new ValidateAction();
controlAction = new ControlAction();
}
/**
* This adds Separators for editor additions to the tool bar.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void contributeToToolBar(IToolBarManager toolBarManager) {
toolBarManager.add(new Separator("options-settings"));
toolBarManager.add(new Separator("options-additions"));
}
/**
* This adds to the menu bar a menu and some separators for editor additions,
* as well as the sub-menus for object creation items.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void contributeToMenu(IMenuManager menuManager) {
super.contributeToMenu(menuManager);
IMenuManager submenuManager = new MenuManager(OptionsEditorPlugin.INSTANCE.getString("_UI_OptionsEditor_menu"), "optionsMenuID");
menuManager.insertAfter("additions", submenuManager);
submenuManager.add(new Separator("settings"));
submenuManager.add(new Separator("actions"));
submenuManager.add(new Separator("additions"));
submenuManager.add(new Separator("additions-end"));
// Prepare for CreateChild item addition or removal.
//
createChildMenuManager = new MenuManager(OptionsEditorPlugin.INSTANCE.getString("_UI_CreateChild_menu_item"));
submenuManager.insertBefore("additions", createChildMenuManager);
// Prepare for CreateSibling item addition or removal.
//
createSiblingMenuManager = new MenuManager(OptionsEditorPlugin.INSTANCE.getString("_UI_CreateSibling_menu_item"));
submenuManager.insertBefore("additions", createSiblingMenuManager);
// Force an update because Eclipse hides empty menus now.
//
submenuManager.addMenuListener
(new IMenuListener() {
public void menuAboutToShow(IMenuManager menuManager) {
menuManager.updateAll(true);
}
});
addGlobalActions(submenuManager);
}
/**
* When the active editor changes, this remembers the change and registers with it as a selection provider.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setActiveEditor(IEditorPart part) {
super.setActiveEditor(part);
activeEditorPart = part;
// Switch to the new selection provider.
//
if (selectionProvider != null) {
selectionProvider.removeSelectionChangedListener(this);
}
if (part == null) {
selectionProvider = null;
}
else {
selectionProvider = part.getSite().getSelectionProvider();
selectionProvider.addSelectionChangedListener(this);
// Fake a selection changed event to update the menus.
//
if (selectionProvider.getSelection() != null) {
selectionChanged(new SelectionChangedEvent(selectionProvider, selectionProvider.getSelection()));
}
}
}
/**
* This implements {@link org.eclipse.jface.viewers.ISelectionChangedListener},
* handling {@link org.eclipse.jface.viewers.SelectionChangedEvent}s by querying for the children and siblings
* that can be added to the selected object and updating the menus accordingly.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void selectionChanged(SelectionChangedEvent event) {
// Remove any menu items for old selection.
//
if (createChildMenuManager != null) {
depopulateManager(createChildMenuManager, createChildActions);
}
if (createSiblingMenuManager != null) {
depopulateManager(createSiblingMenuManager, createSiblingActions);
}
// Query the new selection for appropriate new child/sibling descriptors
//
Collection<?> newChildDescriptors = null;
Collection<?> newSiblingDescriptors = null;
ISelection selection = event.getSelection();
if (selection instanceof IStructuredSelection && ((IStructuredSelection)selection).size() == 1) {
Object object = ((IStructuredSelection)selection).getFirstElement();
EditingDomain domain = ((IEditingDomainProvider)activeEditorPart).getEditingDomain();
newChildDescriptors = domain.getNewChildDescriptors(object, null);
newSiblingDescriptors = domain.getNewChildDescriptors(null, object);
}
// Generate actions for selection; populate and redraw the menus.
//
createChildActions = generateCreateChildActions(newChildDescriptors, selection);
createSiblingActions = generateCreateSiblingActions(newSiblingDescriptors, selection);
if (createChildMenuManager != null) {
populateManager(createChildMenuManager, createChildActions, null);
createChildMenuManager.update(true);
}
if (createSiblingMenuManager != null) {
populateManager(createSiblingMenuManager, createSiblingActions, null);
createSiblingMenuManager.update(true);
}
}
/**
* This generates a {@link org.eclipse.emf.edit.ui.action.CreateChildAction} for each object in <code>descriptors</code>,
* and returns the collection of these actions.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<IAction> generateCreateChildActions(Collection<?> descriptors, ISelection selection) {
Collection<IAction> actions = new ArrayList<IAction>();
if (descriptors != null) {
for (Object descriptor : descriptors) {
actions.add(new CreateChildAction(activeEditorPart, selection, descriptor));
}
}
return actions;
}
/**
* This generates a {@link org.eclipse.emf.edit.ui.action.CreateSiblingAction} for each object in <code>descriptors</code>,
* and returns the collection of these actions.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<IAction> generateCreateSiblingActions(Collection<?> descriptors, ISelection selection) {
Collection<IAction> actions = new ArrayList<IAction>();
if (descriptors != null) {
for (Object descriptor : descriptors) {
actions.add(new CreateSiblingAction(activeEditorPart, selection, descriptor));
}
}
return actions;
}
/**
* This populates the specified <code>manager</code> with {@link org.eclipse.jface.action.ActionContributionItem}s
* based on the {@link org.eclipse.jface.action.IAction}s contained in the <code>actions</code> collection,
* by inserting them before the specified contribution item <code>contributionID</code>.
* If <code>contributionID</code> is <code>null</code>, they are simply added.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void populateManager(IContributionManager manager, Collection<? extends IAction> actions, String contributionID) {
if (actions != null) {
for (IAction action : actions) {
if (contributionID != null) {
manager.insertBefore(contributionID, action);
}
else {
manager.add(action);
}
}
}
}
/**
* This removes from the specified <code>manager</code> all {@link org.eclipse.jface.action.ActionContributionItem}s
* based on the {@link org.eclipse.jface.action.IAction}s contained in the <code>actions</code> collection.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void depopulateManager(IContributionManager manager, Collection<? extends IAction> actions) {
if (actions != null) {
IContributionItem[] items = manager.getItems();
for (int i = 0; i < items.length; i++) {
// Look into SubContributionItems
//
IContributionItem contributionItem = items[i];
while (contributionItem instanceof SubContributionItem) {
contributionItem = ((SubContributionItem)contributionItem).getInnerItem();
}
// Delete the ActionContributionItems with matching action.
//
if (contributionItem instanceof ActionContributionItem) {
IAction action = ((ActionContributionItem)contributionItem).getAction();
if (actions.contains(action)) {
manager.remove(contributionItem);
}
}
}
}
}
/**
* This populates the pop-up menu before it appears.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void menuAboutToShow(IMenuManager menuManager) {
super.menuAboutToShow(menuManager);
MenuManager submenuManager = null;
submenuManager = new MenuManager(OptionsEditorPlugin.INSTANCE.getString("_UI_CreateChild_menu_item"));
populateManager(submenuManager, createChildActions, null);
menuManager.insertBefore("edit", submenuManager);
submenuManager = new MenuManager(OptionsEditorPlugin.INSTANCE.getString("_UI_CreateSibling_menu_item"));
populateManager(submenuManager, createSiblingActions, null);
menuManager.insertBefore("edit", submenuManager);
}
/**
* This inserts global actions before the "additions-end" separator.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void addGlobalActions(IMenuManager menuManager) {
menuManager.insertAfter("additions-end", new Separator("ui-actions"));
menuManager.insertAfter("ui-actions", showPropertiesViewAction);
refreshViewerAction.setEnabled(refreshViewerAction.isEnabled());
menuManager.insertAfter("ui-actions", refreshViewerAction);
super.addGlobalActions(menuManager);
}
/**
* This ensures that a delete action will clean up all references to deleted objects.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected boolean removeAllReferencesOnDelete() {
return true;
}
}
| |
/*
* Copyright 1999-2019 Seata.io Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.seata.config;
import io.seata.common.exception.ShouldNeverHappenException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* The type Config future.
*
* @author slievrly
*/
public class ConfigFuture {
private static final Logger LOGGER = LoggerFactory.getLogger(ConfigFuture.class);
private static final long DEFAULT_CONFIG_TIMEOUT = 5 * 1000;
private long timeoutMills;
private long start = System.currentTimeMillis();
private String dataId;
private String content;
private ConfigOperation operation;
private transient CompletableFuture<Object> origin = new CompletableFuture<>();
/**
* Instantiates a new Config future.
*
* @param dataId the data id
* @param content the content
* @param operation the operation
*/
public ConfigFuture(String dataId, String content, ConfigOperation operation) {
this(dataId, content, operation, DEFAULT_CONFIG_TIMEOUT);
}
/**
* Instantiates a new Config future.
*
* @param dataId the data id
* @param content the content
* @param operation the operation
* @param timeoutMills the timeout mills
*/
public ConfigFuture(String dataId, String content, ConfigOperation operation, long timeoutMills) {
this.dataId = dataId;
this.content = content;
this.operation = operation;
this.timeoutMills = timeoutMills;
}
/**
* Gets timeout mills.
*
* @return the timeout mills
*/
public boolean isTimeout() {
return System.currentTimeMillis() - start > timeoutMills;
}
/**
* Get object.
*
* @return the object
*/
public Object get() {
return get(this.timeoutMills, TimeUnit.MILLISECONDS);
}
/**
* Get object.
*
* @param timeout the timeout
* @param unit the unit
* @return the object
*/
public Object get(long timeout, TimeUnit unit) {
this.timeoutMills = unit.toMillis(timeout);
Object result;
try {
result = origin.get(timeout, unit);
} catch (ExecutionException e) {
throw new ShouldNeverHappenException("Should not get results in a multi-threaded environment", e);
} catch (TimeoutException e) {
LOGGER.error("config operation timeout,cost:{} ms,op:{},dataId:{}", System.currentTimeMillis() - start, operation.name(), dataId);
return getFailResult();
} catch (InterruptedException exx) {
LOGGER.error("config operate interrupted,error:{}", exx.getMessage(), exx);
return getFailResult();
}
if (operation == ConfigOperation.GET) {
return result == null ? content : result;
} else {
return result == null ? Boolean.FALSE : result;
}
}
private Object getFailResult() {
if (operation == ConfigOperation.GET) {
return content;
} else {
return Boolean.FALSE;
}
}
/**
* Sets result.
*
* @param result the result
*/
public void setResult(Object result) {
origin.complete(result);
}
/**
* Gets data id.
*
* @return the data id
*/
public String getDataId() {
return dataId;
}
/**
* Sets data id.
*
* @param dataId the data id
*/
public void setDataId(String dataId) {
this.dataId = dataId;
}
/**
* Gets content.
*
* @return the content
*/
public String getContent() {
return content;
}
/**
* Sets content.
*
* @param content the content
*/
public void setContent(String content) {
this.content = content;
}
/**
* Gets operation.
*
* @return the operation
*/
public ConfigOperation getOperation() {
return operation;
}
/**
* Sets operation.
*
* @param operation the operation
*/
public void setOperation(ConfigOperation operation) {
this.operation = operation;
}
/**
* The enum Config operation.
*/
public enum ConfigOperation {
/**
* Get config operation.
*/
GET,
/**
* Put config operation.
*/
PUT,
/**
* Putifabsent config operation.
*/
PUTIFABSENT,
/**
* Remove config operation.
*/
REMOVE
}
}
| |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.openstacknode.impl;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onlab.junit.TestUtils;
import org.onlab.packet.ChassisId;
import org.onlab.packet.Ip4Address;
import org.onlab.packet.IpAddress;
import org.onlab.packet.MacAddress;
import org.onlab.packet.VlanId;
import org.onosproject.cfg.ComponentConfigAdapter;
import org.onosproject.cluster.ClusterServiceAdapter;
import org.onosproject.cluster.ControllerNode;
import org.onosproject.cluster.DefaultControllerNode;
import org.onosproject.cluster.LeadershipServiceAdapter;
import org.onosproject.cluster.NodeId;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreServiceAdapter;
import org.onosproject.core.DefaultApplicationId;
import org.onosproject.core.GroupId;
import org.onosproject.net.Annotations;
import org.onosproject.net.DefaultAnnotations;
import org.onosproject.net.DefaultDevice;
import org.onosproject.net.DefaultPort;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.Port;
import org.onosproject.net.PortNumber;
import org.onosproject.net.behaviour.BridgeConfig;
import org.onosproject.net.behaviour.BridgeDescription;
import org.onosproject.net.behaviour.BridgeName;
import org.onosproject.net.behaviour.DefaultBridgeDescription;
import org.onosproject.net.behaviour.ExtensionTreatmentResolver;
import org.onosproject.net.behaviour.InterfaceConfig;
import org.onosproject.net.behaviour.PatchDescription;
import org.onosproject.net.behaviour.TunnelDescription;
import org.onosproject.net.device.DefaultPortDescription;
import org.onosproject.net.device.DeviceAdminService;
import org.onosproject.net.device.DeviceEvent;
import org.onosproject.net.device.DeviceInterfaceDescription;
import org.onosproject.net.device.DeviceListener;
import org.onosproject.net.device.DeviceServiceAdapter;
import org.onosproject.net.device.PortDescription;
import org.onosproject.net.driver.Behaviour;
import org.onosproject.net.driver.DriverData;
import org.onosproject.net.driver.DriverHandler;
import org.onosproject.net.flow.instructions.ExtensionPropertyException;
import org.onosproject.net.flow.instructions.ExtensionTreatment;
import org.onosproject.net.flow.instructions.ExtensionTreatmentType;
import org.onosproject.net.group.DefaultGroup;
import org.onosproject.net.group.Group;
import org.onosproject.net.group.GroupBuckets;
import org.onosproject.net.group.GroupDescription;
import org.onosproject.net.group.GroupEvent;
import org.onosproject.net.group.GroupKey;
import org.onosproject.net.group.GroupListener;
import org.onosproject.net.group.GroupService;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.openstacknode.api.NodeState;
import org.onosproject.openstacknode.api.OpenstackNode;
import org.onosproject.openstacknode.api.OpenstackNodeAdminService;
import org.onosproject.openstacknode.api.OpenstackNodeListener;
import org.onosproject.openstacknode.api.OpenstackNodeService;
import org.onosproject.ovsdb.controller.OvsdbClientService;
import org.onosproject.ovsdb.controller.OvsdbController;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.MoreExecutors;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.junit.Assert.assertEquals;
import static org.onosproject.net.AnnotationKeys.PORT_NAME;
import static org.onosproject.net.Device.Type.CONTROLLER;
import static org.onosproject.net.Device.Type.SWITCH;
import static org.onosproject.net.device.DeviceEvent.Type.DEVICE_ADDED;
import static org.onosproject.net.device.DeviceEvent.Type.DEVICE_AVAILABILITY_CHANGED;
import static org.onosproject.net.device.DeviceEvent.Type.PORT_ADDED;
import static org.onosproject.net.device.DeviceEvent.Type.PORT_REMOVED;
import static org.onosproject.openstacknode.api.Constants.DEFAULT_TUNNEL;
import static org.onosproject.openstacknode.api.Constants.INTEGRATION_BRIDGE;
import static org.onosproject.openstacknode.api.Constants.PATCH_INTG_BRIDGE;
import static org.onosproject.openstacknode.api.Constants.PATCH_ROUT_BRIDGE;
import static org.onosproject.openstacknode.api.Constants.ROUTER_BRIDGE;
import static org.onosproject.openstacknode.api.NodeState.COMPLETE;
import static org.onosproject.openstacknode.api.NodeState.DEVICE_CREATED;
import static org.onosproject.openstacknode.api.NodeState.INCOMPLETE;
import static org.onosproject.openstacknode.api.NodeState.INIT;
import static org.onosproject.openstacknode.api.NodeState.PORT_CREATED;
import static org.onosproject.openstacknode.api.OpenstackNode.NodeType.COMPUTE;
import static org.onosproject.openstacknode.api.OpenstackNode.NodeType.GATEWAY;
/**
* Unit test for DefaultOpenstackNodeHandler.
*/
public class DefaultOpenstackNodeHandlerTest {
private static final ApplicationId TEST_APP_ID = new DefaultApplicationId(1, "test");
private static final String ERR_STATE_NOT_MATCH = "Node state did not match";
private static final NodeId LOCAL_NODE_ID = new NodeId("local");
private static final ControllerNode LOCAL_CTRL =
new DefaultControllerNode(LOCAL_NODE_ID, IpAddress.valueOf("127.0.0.1"));
private static final BridgeDescription ROUT_BRIDGE = DefaultBridgeDescription.builder()
.name(ROUTER_BRIDGE)
.failMode(BridgeDescription.FailMode.SECURE)
.disableInBand()
.build();
private static final PortDescription PATCH_ROUT = new DefaultPortDescription(
PortNumber.portNumber(1),
true,
DefaultAnnotations.builder()
.set(PORT_NAME, PATCH_ROUT_BRIDGE)
.build()
);
private static final String COMPUTE_1_HOSTNAME = "compute_1";
private static final String COMPUTE_2_HOSTNAME = "compute_2";
private static final String COMPUTE_3_HOSTNAME = "compute_3";
private static final String COMPUTE_4_HOSTNAME = "compute_4";
private static final String GATEWAY_1_HOSTNAME = "gateway_1";
private static final String GATEWAY_2_HOSTNAME = "gateway_2";
private static final String GATEWAY_3_HOSTNAME = "gateway_3";
private static final String GATEWAY_4_HOSTNAME = "gateway_4";
private static final IpAddress COMPUTE_1_IP = IpAddress.valueOf("10.100.0.1");
private static final IpAddress COMPUTE_2_IP = IpAddress.valueOf("10.100.0.2");
private static final IpAddress COMPUTE_3_IP = IpAddress.valueOf("10.100.0.3");
private static final IpAddress COMPUTE_4_IP = IpAddress.valueOf("10.100.0.4");
private static final IpAddress GATEWAY_1_IP = IpAddress.valueOf("10.100.0.5");
private static final IpAddress GATEWAY_2_IP = IpAddress.valueOf("10.100.0.6");
private static final IpAddress GATEWAY_3_IP = IpAddress.valueOf("10.100.0.7");
private static final IpAddress GATEWAY_4_IP = IpAddress.valueOf("10.100.0.8");
private static final Device COMPUTE_1_INTG_DEVICE = createOpenFlowDevice(1, INTEGRATION_BRIDGE);
private static final Device COMPUTE_2_INTG_DEVICE = createOpenFlowDevice(2, INTEGRATION_BRIDGE);
private static final Device COMPUTE_3_INTG_DEVICE = createOpenFlowDevice(3, INTEGRATION_BRIDGE);
private static final Device COMPUTE_4_INTG_DEVICE = createOpenFlowDevice(4, INTEGRATION_BRIDGE);
private static final Device GATEWAY_1_INTG_DEVICE = createOpenFlowDevice(5, INTEGRATION_BRIDGE);
private static final Device GATEWAY_1_ROUT_DEVICE = createOpenFlowDevice(6, ROUTER_BRIDGE);
private static final Device GATEWAY_2_INTG_DEVICE = createOpenFlowDevice(7, INTEGRATION_BRIDGE);
private static final Device GATEWAY_2_ROUT_DEVICE = createOpenFlowDevice(8, ROUTER_BRIDGE);
private static final Device GATEWAY_3_INTG_DEVICE = createOpenFlowDevice(9, INTEGRATION_BRIDGE);
private static final Device GATEWAY_3_ROUT_DEVICE = createOpenFlowDevice(10, ROUTER_BRIDGE);
private static final Device GATEWAY_4_INTG_DEVICE = createOpenFlowDevice(11, INTEGRATION_BRIDGE);
private static final Device GATEWAY_4_ROUT_DEVICE = createOpenFlowDevice(12, ROUTER_BRIDGE);
private static final Device COMPUTE_1_OVSDB_DEVICE = createOvsdbDevice(COMPUTE_1_IP);
private static final Device COMPUTE_2_OVSDB_DEVICE = createOvsdbDevice(COMPUTE_2_IP);
private static final Device COMPUTE_3_OVSDB_DEVICE = createOvsdbDevice(COMPUTE_3_IP);
private static final Device COMPUTE_4_OVSDB_DEVICE = createOvsdbDevice(COMPUTE_4_IP);
private static final Device GATEWAY_1_OVSDB_DEVICE = createOvsdbDevice(GATEWAY_1_IP);
private static final Device GATEWAY_2_OVSDB_DEVICE = createOvsdbDevice(GATEWAY_2_IP);
private static final OpenstackNode COMPUTE_1 = createNode(
COMPUTE_1_HOSTNAME,
COMPUTE,
COMPUTE_1_INTG_DEVICE,
COMPUTE_1_IP,
INIT
);
private static final OpenstackNode COMPUTE_2 = createNode(
COMPUTE_2_HOSTNAME,
COMPUTE,
COMPUTE_2_INTG_DEVICE,
COMPUTE_2_IP,
DEVICE_CREATED
);
private static final OpenstackNode COMPUTE_3 = createNode(
COMPUTE_3_HOSTNAME,
COMPUTE,
COMPUTE_3_INTG_DEVICE,
COMPUTE_3_IP,
PORT_CREATED
);
private static final OpenstackNode COMPUTE_4 = createNode(
COMPUTE_4_HOSTNAME,
COMPUTE,
COMPUTE_4_INTG_DEVICE,
COMPUTE_4_IP,
COMPLETE
);
private static final OpenstackNode GATEWAY_1 = createNode(
GATEWAY_1_HOSTNAME,
GATEWAY,
GATEWAY_1_INTG_DEVICE,
GATEWAY_1_ROUT_DEVICE,
GATEWAY_1_IP,
INIT
);
private static final OpenstackNode GATEWAY_2 = createNode(
GATEWAY_2_HOSTNAME,
GATEWAY,
GATEWAY_2_INTG_DEVICE,
GATEWAY_2_ROUT_DEVICE,
GATEWAY_2_IP,
DEVICE_CREATED
);
private static final OpenstackNode GATEWAY_3 = createNode(
GATEWAY_3_HOSTNAME,
GATEWAY,
GATEWAY_3_INTG_DEVICE,
GATEWAY_3_ROUT_DEVICE,
GATEWAY_3_IP,
PORT_CREATED
);
private static final OpenstackNode GATEWAY_4 = createNode(
GATEWAY_4_HOSTNAME,
GATEWAY,
GATEWAY_4_INTG_DEVICE,
GATEWAY_4_ROUT_DEVICE,
GATEWAY_4_IP,
COMPLETE
);
private static final TestDeviceService TEST_DEVICE_SERVICE = new TestDeviceService();
private TestOpenstackNodeManager testNodeManager;
private DefaultOpenstackNodeHandler target;
@Before
public void setUp() throws Exception {
DeviceAdminService mockDeviceAdminService = createMock(DeviceAdminService.class);
mockDeviceAdminService.removeDevice(anyObject());
replay(mockDeviceAdminService);
OvsdbClientService mockOvsdbClient = createMock(OvsdbClientService.class);
expect(mockOvsdbClient.isConnected())
.andReturn(true)
.anyTimes();
replay(mockOvsdbClient);
OvsdbController mockOvsdbController = createMock(OvsdbController.class);
expect(mockOvsdbController.getOvsdbClient(anyObject()))
.andReturn(mockOvsdbClient)
.anyTimes();
replay(mockOvsdbController);
testNodeManager = new TestOpenstackNodeManager();
target = new DefaultOpenstackNodeHandler();
target.coreService = new TestCoreService();
target.leadershipService = new TestLeadershipService();
target.clusterService = new TestClusterService();
target.deviceService = TEST_DEVICE_SERVICE;
target.deviceAdminService = mockDeviceAdminService;
target.ovsdbController = mockOvsdbController;
target.groupService = new TestGroupService();
target.osNodeService = testNodeManager;
target.osNodeAdminService = testNodeManager;
target.componentConfigService = new TestComponentConfigService();
TestUtils.setField(target, "eventExecutor", MoreExecutors.newDirectExecutorService());
target.activate();
}
@After
public void tearDown() {
TEST_DEVICE_SERVICE.clear();
target.deactivate();
target = null;
testNodeManager = null;
}
/**
* Checks if the compute node state changes from INIT to DEVICE_CREATED
* after processing INIT state.
*/
@Test
public void testComputeNodeProcessNodeInitState() {
testNodeManager.createNode(COMPUTE_1);
TEST_DEVICE_SERVICE.devMap.put(COMPUTE_1_OVSDB_DEVICE.id(), COMPUTE_1_OVSDB_DEVICE);
assertEquals(ERR_STATE_NOT_MATCH, INIT,
testNodeManager.node(COMPUTE_1_HOSTNAME).state());
target.processInitState(COMPUTE_1);
assertEquals(ERR_STATE_NOT_MATCH, DEVICE_CREATED,
testNodeManager.node(COMPUTE_1_HOSTNAME).state());
}
/**
* Checks if the gateway node state changes from INIT to DEVICE_CREATED
* after processing INIT state.
*/
@Test
public void testGatewayNodeProcessNodeInitState() {
testNodeManager.createNode(GATEWAY_1);
TEST_DEVICE_SERVICE.devMap.put(GATEWAY_1_OVSDB_DEVICE.id(), GATEWAY_1_OVSDB_DEVICE);
assertEquals(ERR_STATE_NOT_MATCH, INIT,
testNodeManager.node(GATEWAY_1_HOSTNAME).state());
target.processInitState(GATEWAY_1);
assertEquals(ERR_STATE_NOT_MATCH, DEVICE_CREATED,
testNodeManager.node(GATEWAY_1_HOSTNAME).state());
}
/**
* Checks if the compute node state changes from DEVICE_CREATED to
* PORT_CREATED after processing DEVICE_CREATED state.
*/
@Test
public void testComputeNodeProcessDeviceCreatedState() {
testNodeManager.createNode(COMPUTE_2);
TEST_DEVICE_SERVICE.devMap.put(COMPUTE_2_OVSDB_DEVICE.id(), COMPUTE_2_OVSDB_DEVICE);
TEST_DEVICE_SERVICE.devMap.put(COMPUTE_2_INTG_DEVICE.id(), COMPUTE_2_INTG_DEVICE);
assertEquals(ERR_STATE_NOT_MATCH, DEVICE_CREATED,
testNodeManager.node(COMPUTE_2_HOSTNAME).state());
target.processDeviceCreatedState(COMPUTE_2);
assertEquals(ERR_STATE_NOT_MATCH, PORT_CREATED,
testNodeManager.node(COMPUTE_2_HOSTNAME).state());
}
/**
* Checks if the gateway node state changes from DEVICE_CREATED to
* PORT_CREATED after processing DEVICE_CREATED state.
*/
@Test
public void testGatewayNodeProcessDeviceCreatedState() {
testNodeManager.createNode(GATEWAY_2);
TEST_DEVICE_SERVICE.devMap.put(GATEWAY_2_OVSDB_DEVICE.id(), GATEWAY_2_OVSDB_DEVICE);
TEST_DEVICE_SERVICE.devMap.put(GATEWAY_2_INTG_DEVICE.id(), GATEWAY_2_INTG_DEVICE);
assertEquals(ERR_STATE_NOT_MATCH, DEVICE_CREATED,
testNodeManager.node(GATEWAY_2_HOSTNAME).state());
target.processDeviceCreatedState(GATEWAY_2);
assertEquals(ERR_STATE_NOT_MATCH, PORT_CREATED,
testNodeManager.node(GATEWAY_2_HOSTNAME).state());
}
/**
* Checks if the compute node state changes from PORT_CREATED to
* COMPLETE after processing PORT_CREATED state.
*/
@Test
public void testComputeNodeProcessPortCreatedState() {
testNodeManager.createNode(COMPUTE_3);
TEST_DEVICE_SERVICE.devMap.put(COMPUTE_3_OVSDB_DEVICE.id(), COMPUTE_3_OVSDB_DEVICE);
TEST_DEVICE_SERVICE.devMap.put(COMPUTE_3_INTG_DEVICE.id(), COMPUTE_3_INTG_DEVICE);
TEST_DEVICE_SERVICE.portList.add(createPort(COMPUTE_3_INTG_DEVICE, DEFAULT_TUNNEL));
testNodeManager.createNode(GATEWAY_4);
TEST_DEVICE_SERVICE.devMap.put(GATEWAY_4_INTG_DEVICE.id(), GATEWAY_4_INTG_DEVICE);
assertEquals(ERR_STATE_NOT_MATCH, PORT_CREATED,
testNodeManager.node(COMPUTE_3_HOSTNAME).state());
target.processPortCreatedState(COMPUTE_3);
assertEquals(ERR_STATE_NOT_MATCH, COMPLETE,
testNodeManager.node(COMPUTE_3_HOSTNAME).state());
}
/**
* Checks if the gateway node state changes from PORT_CREATED to
* COMPLETE after processing PORT_CREATED state.
*/
@Test
public void testGatewayNodeProcessPortCreatedState() {
testNodeManager.createNode(COMPUTE_4);
TEST_DEVICE_SERVICE.devMap.put(COMPUTE_4_OVSDB_DEVICE.id(), COMPUTE_4_OVSDB_DEVICE);
TEST_DEVICE_SERVICE.devMap.put(COMPUTE_4_INTG_DEVICE.id(), COMPUTE_4_INTG_DEVICE);
TEST_DEVICE_SERVICE.portList.add(createPort(COMPUTE_4_INTG_DEVICE, DEFAULT_TUNNEL));
testNodeManager.createNode(GATEWAY_3);
TEST_DEVICE_SERVICE.devMap.put(GATEWAY_3_INTG_DEVICE.id(), GATEWAY_4_INTG_DEVICE);
assertEquals(ERR_STATE_NOT_MATCH, PORT_CREATED,
testNodeManager.node(GATEWAY_3_HOSTNAME).state());
target.processPortCreatedState(GATEWAY_3);
assertEquals(ERR_STATE_NOT_MATCH, COMPLETE,
testNodeManager.node(GATEWAY_3_HOSTNAME).state());
}
/**
* Checks if the compute node state changes from COMPLETE to INCOMPLETE
* when integration bridge is disconnected.
*/
@Test
public void testBackToIncompleteWhenBrIntDisconnected() {
testNodeManager.createNode(COMPUTE_4);
assertEquals(ERR_STATE_NOT_MATCH, COMPLETE,
testNodeManager.node(COMPUTE_4_HOSTNAME).state());
TEST_DEVICE_SERVICE.removeDevice(COMPUTE_4_INTG_DEVICE);
assertEquals(ERR_STATE_NOT_MATCH, INCOMPLETE,
testNodeManager.node(COMPUTE_4_HOSTNAME).state());
}
/**
* Checks if the compute node state changes from COMPLETE to INCOMPLETE
* when vxlan port is removed from integration bridge.
*/
@Test
public void testBackToIncompleteWhenVxlanRemoved() {
testNodeManager.createNode(COMPUTE_4);
assertEquals(ERR_STATE_NOT_MATCH, COMPLETE,
testNodeManager.node(COMPUTE_4_HOSTNAME).state());
TEST_DEVICE_SERVICE.removePort(COMPUTE_4_INTG_DEVICE, createPort(
COMPUTE_4_INTG_DEVICE, DEFAULT_TUNNEL));
assertEquals(ERR_STATE_NOT_MATCH, INCOMPLETE,
testNodeManager.node(COMPUTE_4_HOSTNAME).state());
}
private static Device createOvsdbDevice(IpAddress ovsdbIp) {
return new TestDevice(new ProviderId("of", "foo"),
DeviceId.deviceId("ovsdb:" + ovsdbIp.toString()),
CONTROLLER,
"manufacturer",
"hwVersion",
"swVersion",
"serialNumber",
new ChassisId(1));
}
private static Device createOpenFlowDevice(long devIdNum, String type) {
return new TestDevice(new ProviderId("of", "foo"),
DeviceId.deviceId(String.format("of:%016d", devIdNum)),
SWITCH,
type,
"hwVersion",
"swVersion",
"serialNumber",
new ChassisId(1));
}
private static Port createPort(Device device, String portName) {
return new DefaultPort(device,
PortNumber.portNumber(1),
true,
DefaultAnnotations.builder().set(PORT_NAME, portName).build());
}
private static OpenstackNode createNode(String hostname,
OpenstackNode.NodeType type,
Device intgBridge,
IpAddress ipAddr,
NodeState state) {
return new TestOpenstackNode(
hostname,
type,
intgBridge.id(),
null,
ipAddr,
ipAddr,
null, state);
}
private static OpenstackNode createNode(String hostname,
OpenstackNode.NodeType type,
Device intgBridge,
Device routerBridge,
IpAddress ipAddr,
NodeState state) {
return new TestOpenstackNode(
hostname,
type,
intgBridge.id(),
routerBridge.id(),
ipAddr,
ipAddr,
null, state);
}
private static final class TestDevice extends DefaultDevice {
private TestDevice(ProviderId providerId,
DeviceId id,
Type type,
String manufacturer,
String hwVersion,
String swVersion,
String serialNumber,
ChassisId chassisId,
Annotations... annotations) {
super(providerId,
id,
type,
manufacturer,
hwVersion,
swVersion,
serialNumber,
chassisId,
annotations);
}
@Override
@SuppressWarnings("unchecked")
public <B extends Behaviour> B as(Class<B> projectionClass) {
if (projectionClass.equals(BridgeConfig.class)) {
return (B) new TestBridgeConfig();
} else if (projectionClass.equals(InterfaceConfig.class)) {
return (B) new TestInterfaceConfig();
} else if (projectionClass.equals(ExtensionTreatmentResolver.class)) {
ExtensionTreatmentResolver treatmentResolver = createMock(ExtensionTreatmentResolver.class);
expect(treatmentResolver.getExtensionInstruction(anyObject()))
.andReturn(new TestExtensionTreatment())
.anyTimes();
replay(treatmentResolver);
return (B) treatmentResolver;
} else {
return null;
}
}
@Override
public <B extends Behaviour> boolean is(Class<B> projectionClass) {
return true;
}
}
private static final class TestOpenstackNode extends DefaultOpenstackNode {
private TestOpenstackNode(String hostname,
NodeType type,
DeviceId intgBridge,
DeviceId routerBridge,
IpAddress managementIp,
IpAddress dataIp,
String vlanIntf,
NodeState state) {
super(hostname,
type,
intgBridge,
routerBridge,
managementIp,
dataIp,
vlanIntf,
state);
}
@Override
public PortNumber tunnelPortNum() {
return PortNumber.portNumber(1);
}
@Override
public PortNumber vlanPortNum() {
return PortNumber.portNumber(1);
}
@Override
public PortNumber patchPortNum() {
return PortNumber.portNumber(1);
}
@Override
public MacAddress vlanPortMac() {
return MacAddress.NONE;
}
}
private static class TestOpenstackNodeManager implements OpenstackNodeService, OpenstackNodeAdminService {
Map<String, OpenstackNode> osNodeMap = Maps.newHashMap();
List<OpenstackNodeListener> listeners = Lists.newArrayList();
@Override
public Set<OpenstackNode> nodes() {
return ImmutableSet.copyOf(osNodeMap.values());
}
@Override
public Set<OpenstackNode> nodes(OpenstackNode.NodeType type) {
return osNodeMap.values().stream()
.filter(osNode -> osNode.type() == type)
.collect(Collectors.toSet());
}
@Override
public Set<OpenstackNode> completeNodes() {
return osNodeMap.values().stream()
.filter(osNode -> osNode.state() == COMPLETE)
.collect(Collectors.toSet());
}
@Override
public Set<OpenstackNode> completeNodes(OpenstackNode.NodeType type) {
return osNodeMap.values().stream()
.filter(osNode -> osNode.type() == type && osNode.state() == COMPLETE)
.collect(Collectors.toSet());
}
@Override
public OpenstackNode node(String hostname) {
return osNodeMap.get(hostname);
}
@Override
public OpenstackNode node(DeviceId deviceId) {
return osNodeMap.values().stream()
.filter(osNode -> Objects.equals(osNode.intgBridge(), deviceId) ||
Objects.equals(osNode.ovsdb(), deviceId) ||
Objects.equals(osNode.routerBridge(), deviceId))
.findFirst().orElse(null);
}
@Override
public void addListener(OpenstackNodeListener listener) {
listeners.add(listener);
}
@Override
public void removeListener(OpenstackNodeListener listener) {
listeners.remove(listener);
}
@Override
public void createNode(OpenstackNode osNode) {
osNodeMap.put(osNode.hostname(), osNode);
}
@Override
public void updateNode(OpenstackNode osNode) {
osNodeMap.put(osNode.hostname(), osNode);
}
@Override
public OpenstackNode removeNode(String hostname) {
return null;
}
}
private static class TestDeviceService extends DeviceServiceAdapter {
Map<DeviceId, Device> devMap = Maps.newHashMap();
List<Port> portList = Lists.newArrayList();
List<DeviceListener> listeners = Lists.newArrayList();
@Override
public void addListener(DeviceListener listener) {
listeners.add(listener);
}
@Override
public void removeListener(DeviceListener listener) {
listeners.remove(listener);
}
@Override
public Device getDevice(DeviceId deviceId) {
return devMap.get(deviceId);
}
@Override
public List<Port> getPorts(DeviceId deviceId) {
return this.portList.stream()
.filter(p -> p.element().id().equals(deviceId))
.collect(Collectors.toList());
}
@Override
public boolean isAvailable(DeviceId deviceId) {
return devMap.containsKey(deviceId);
}
void addDevice(Device device) {
devMap.put(device.id(), device);
DeviceEvent event = new DeviceEvent(DEVICE_ADDED, device);
listeners.stream().filter(l -> l.isRelevant(event)).forEach(l -> l.event(event));
}
void removeDevice(Device device) {
devMap.remove(device.id());
DeviceEvent event = new DeviceEvent(DEVICE_AVAILABILITY_CHANGED, device);
listeners.stream().filter(l -> l.isRelevant(event)).forEach(l -> l.event(event));
}
void addPort(Device device, Port port) {
portList.add(port);
DeviceEvent event = new DeviceEvent(PORT_ADDED, device, port);
listeners.stream().filter(l -> l.isRelevant(event)).forEach(l -> l.event(event));
}
void removePort(Device device, Port port) {
portList.remove(port);
DeviceEvent event = new DeviceEvent(PORT_REMOVED, device, port);
listeners.stream().filter(l -> l.isRelevant(event)).forEach(l -> l.event(event));
}
void clear() {
this.listeners.clear();
this.devMap.clear();
this.portList.clear();
}
}
private static class TestBridgeConfig implements BridgeConfig {
@Override
public DriverData data() {
return null;
}
@Override
public void setData(DriverData data) {
}
@Override
public DriverHandler handler() {
return null;
}
@Override
public void setHandler(DriverHandler handler) {
}
@Override
public boolean addBridge(BridgeDescription bridge) {
TEST_DEVICE_SERVICE.addDevice(new DefaultDevice(new ProviderId("of", "foo"),
DeviceId.deviceId("of:" + bridge.datapathId().get()),
SWITCH,
bridge.name(),
"hwVersion",
"swVersion",
"serialNumber",
new ChassisId(1)));
return true;
}
@Override
public void deleteBridge(BridgeName bridgeName) {
}
@Override
public Collection<BridgeDescription> getBridges() {
return ImmutableSet.of(ROUT_BRIDGE);
}
@Override
public void addPort(BridgeName bridgeName, String portName) {
}
@Override
public void deletePort(BridgeName bridgeName, String portName) {
}
@Override
public Collection<PortDescription> getPorts() {
return ImmutableSet.of(PATCH_ROUT);
}
@Override
public Set<PortNumber> getPortNumbers() {
return null;
}
@Override
public List<PortNumber> getLocalPorts(Iterable<String> ifaceIds) {
return null;
}
}
private static class TestInterfaceConfig implements InterfaceConfig {
@Override
public DriverData data() {
return null;
}
@Override
public void setData(DriverData data) {
}
@Override
public DriverHandler handler() {
return null;
}
@Override
public void setHandler(DriverHandler handler) {
}
@Override
public boolean addAccessMode(String intf, VlanId vlanId) {
return false;
}
@Override
public boolean removeAccessMode(String intf) {
return false;
}
@Override
public boolean addTrunkMode(String intf, List<VlanId> vlanIds) {
return false;
}
@Override
public boolean removeTrunkMode(String intf) {
return false;
}
@Override
public boolean addRateLimit(String intf, short limit) {
return false;
}
@Override
public boolean removeRateLimit(String intf) {
return false;
}
@Override
public boolean addTunnelMode(String intf, TunnelDescription tunnelDesc) {
TEST_DEVICE_SERVICE.devMap.values().stream()
.filter(device -> device.type() == SWITCH &&
device.manufacturer().equals(INTEGRATION_BRIDGE))
.forEach(device -> {
TEST_DEVICE_SERVICE.addPort(device, createPort(device, intf));
});
return true;
}
@Override
public boolean removeTunnelMode(String intf) {
return false;
}
@Override
public boolean addPatchMode(String ifaceName, PatchDescription patchInterface) {
if (ifaceName.equals(PATCH_INTG_BRIDGE)) {
TEST_DEVICE_SERVICE.devMap.values().stream()
.filter(device -> device.type() == SWITCH &&
device.manufacturer().equals(INTEGRATION_BRIDGE))
.forEach(device -> {
TEST_DEVICE_SERVICE.addPort(device, createPort(device, ifaceName));
});
} else if (ifaceName.equals(PATCH_ROUT_BRIDGE)) {
TEST_DEVICE_SERVICE.devMap.values().stream()
.filter(device -> device.type() == SWITCH &&
device.manufacturer().equals(ROUTER_BRIDGE))
.forEach(device -> {
TEST_DEVICE_SERVICE.addPort(device, createPort(device, ifaceName));
});
}
return true;
}
@Override
public boolean removePatchMode(String ifaceName) {
return false;
}
@Override
public List<DeviceInterfaceDescription> getInterfaces() {
return null;
}
}
private static class TestGroupService implements GroupService {
Map<GroupKey, Group> groupMap = Maps.newHashMap();
Map<GroupKey, GroupBuckets> groupBucketsMap = Maps.newHashMap();
List<GroupListener> listeners = Lists.newArrayList();
@Override
public void addListener(GroupListener listener) {
listeners.add(listener);
}
@Override
public void removeListener(GroupListener listener) {
listeners.remove(listener);
}
@Override
public void addGroup(GroupDescription groupDesc) {
DefaultGroup group = new DefaultGroup(GroupId.valueOf(groupDesc.givenGroupId()), groupDesc);
group.setState(Group.GroupState.ADDED);
groupMap.put(groupDesc.appCookie(), group);
groupBucketsMap.put(groupDesc.appCookie(), groupDesc.buckets());
GroupEvent event = new GroupEvent(GroupEvent.Type.GROUP_ADDED, group);
listeners.stream().filter(listener -> listener.isRelevant(event))
.forEach(listener -> listener.event(event));
}
@Override
public Group getGroup(DeviceId deviceId, GroupKey appCookie) {
return groupMap.get(appCookie);
}
@Override
public void addBucketsToGroup(DeviceId deviceId, GroupKey oldCookie, GroupBuckets buckets,
GroupKey newCookie, ApplicationId appId) {
}
@Override
public void removeBucketsFromGroup(DeviceId deviceId, GroupKey oldCookie, GroupBuckets buckets,
GroupKey newCookie, ApplicationId appId) {
}
@Override
public void purgeGroupEntries(DeviceId deviceId) {
}
@Override
public void removeGroup(DeviceId deviceId, GroupKey appCookie, ApplicationId appId) {
}
@Override
public Iterable<Group> getGroups(DeviceId deviceId, ApplicationId appId) {
return null;
}
@Override
public Iterable<Group> getGroups(DeviceId deviceId) {
return null;
}
@Override
public void setBucketsForGroup(DeviceId deviceId, GroupKey oldCookie, GroupBuckets buckets,
GroupKey newCookie, ApplicationId appId) {
groupBucketsMap.put(newCookie, buckets);
GroupEvent event = new GroupEvent(GroupEvent.Type.GROUP_UPDATED, groupMap.get(newCookie));
listeners.stream().filter(listener -> listener.isRelevant(event))
.forEach(listener -> listener.event(event));
}
}
private static class TestExtensionTreatment implements ExtensionTreatment {
Ip4Address tunnelDst;
@Override
public ExtensionTreatmentType type() {
return null;
}
@Override
public <T> void setPropertyValue(String key, T value) throws ExtensionPropertyException {
tunnelDst = (Ip4Address) value;
}
@Override
public <T> T getPropertyValue(String key) throws ExtensionPropertyException {
return null;
}
@Override
public List<String> getProperties() {
return null;
}
@Override
public byte[] serialize() {
return new byte[0];
}
@Override
public void deserialize(byte[] data) {
}
@Override
public boolean equals(Object obj) {
TestExtensionTreatment that = (TestExtensionTreatment) obj;
return Objects.equals(tunnelDst, that.tunnelDst);
}
@Override
public int hashCode() {
return Objects.hash(tunnelDst);
}
}
private static class TestCoreService extends CoreServiceAdapter {
@Override
public ApplicationId getAppId(String name) {
return TEST_APP_ID;
}
}
private static class TestLeadershipService extends LeadershipServiceAdapter {
@Override
public NodeId getLeader(String path) {
return LOCAL_NODE_ID;
}
}
private static class TestClusterService extends ClusterServiceAdapter {
@Override
public ControllerNode getLocalNode() {
return LOCAL_CTRL;
}
}
private class TestComponentConfigService extends ComponentConfigAdapter {
}
}
| |
// Copyright 2021 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.worker;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.hash.HashCode;
import com.google.devtools.build.lib.actions.ExecException;
import com.google.devtools.build.lib.actions.ExecutionRequirements.WorkerProtocolFormat;
import com.google.devtools.build.lib.actions.Spawn;
import com.google.devtools.build.lib.actions.Spawns;
import com.google.devtools.build.lib.actions.UserExecException;
import com.google.devtools.build.lib.exec.BinTools;
import com.google.devtools.build.lib.exec.SpawnRunner.SpawnExecutionContext;
import com.google.devtools.build.lib.exec.local.LocalEnvProvider;
import com.google.devtools.build.lib.server.FailureDetails;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.SortedMap;
import java.util.regex.Pattern;
/**
* A helper class to process a {@link Spawn} into a {@link WorkerKey}, which is used to select a
* persistent worker process (actions with equal keys are allowed to use the same worker process),
* and a separate list of flag files. The result is encapsulated as a {@link WorkerConfig}.
*/
class WorkerParser {
public static final String ERROR_MESSAGE_PREFIX =
"Worker strategy cannot execute this %s action, ";
public static final String REASON_NO_FLAGFILE =
"because the command-line arguments do not contain at least one @flagfile or --flagfile=";
/** Pattern for @flagfile.txt and --flagfile=flagfile.txt */
private static final Pattern FLAG_FILE_PATTERN = Pattern.compile("(?:@|--?flagfile=)(.+)");
private final Path execRoot;
private final WorkerOptions workerOptions;
private final LocalEnvProvider localEnvProvider;
private final BinTools binTools;
public WorkerParser(
Path execRoot,
WorkerOptions workerOptions,
LocalEnvProvider localEnvProvider,
BinTools binTools) {
this.execRoot = execRoot;
this.workerOptions = workerOptions;
this.localEnvProvider = localEnvProvider;
this.binTools = binTools;
}
/**
* Processes the given {@link Spawn} and {@link SpawnExecutionContext} to compute the worker key.
* This involves splitting the command line into the worker startup command and the separate list
* of flag files. Returns a pair of the {@link WorkerKey} and list of flag files.
*/
public WorkerConfig compute(Spawn spawn, SpawnExecutionContext context)
throws ExecException, IOException, InterruptedException {
// We assume that the spawn to be executed always gets at least one @flagfile.txt or
// --flagfile=flagfile.txt argument, which contains the flags related to the work itself (as
// opposed to start-up options for the executed tool). Thus, we can extract those elements from
// its args and put them into the WorkRequest instead.
List<String> flagFiles = new ArrayList<>();
ImmutableList<String> workerArgs = splitSpawnArgsIntoWorkerArgsAndFlagFiles(spawn, flagFiles);
ImmutableMap<String, String> env =
localEnvProvider.rewriteLocalEnv(spawn.getEnvironment(), binTools, "/tmp");
SortedMap<PathFragment, HashCode> workerFiles =
WorkerFilesHash.getWorkerFilesWithHashes(
spawn, context.getArtifactExpander(), context.getMetadataProvider());
HashCode workerFilesCombinedHash = WorkerFilesHash.getCombinedHash(workerFiles);
WorkerProtocolFormat protocolFormat = Spawns.getWorkerProtocolFormat(spawn);
if (!workerOptions.experimentalJsonWorkerProtocol) {
if (protocolFormat == WorkerProtocolFormat.JSON) {
throw new IOException(
"Persistent worker protocol format must be set to proto unless"
+ " --experimental_worker_allow_json_protocol is used");
}
}
WorkerKey key =
createWorkerKey(
spawn,
workerArgs,
env,
execRoot,
workerFilesCombinedHash,
workerFiles,
workerOptions,
context.speculating(),
protocolFormat);
return new WorkerConfig(key, flagFiles);
}
/**
* This method handles the logic of creating a WorkerKey (e.g., if sandboxing should be enabled or
* not, when to use multiplex-workers)
*/
@VisibleForTesting
static WorkerKey createWorkerKey(
Spawn spawn,
ImmutableList<String> workerArgs,
ImmutableMap<String, String> env,
Path execRoot,
HashCode workerFilesCombinedHash,
SortedMap<PathFragment, HashCode> workerFiles,
WorkerOptions options,
boolean dynamic,
WorkerProtocolFormat protocolFormat) {
return new WorkerKey(
workerArgs,
env,
execRoot,
Spawns.getWorkerKeyMnemonic(spawn),
workerFilesCombinedHash,
workerFiles,
/* sandboxed= */ options.workerSandboxing || dynamic,
/* multiplex= */ options.workerMultiplex
&& Spawns.supportsMultiplexWorkers(spawn)
&& !dynamic
&& !options.workerSandboxing,
Spawns.supportsWorkerCancellation(spawn),
protocolFormat);
}
/**
* Splits the command-line arguments of the {@code Spawn} into the part that is used to start the
* persistent worker ({@code workerArgs}) and the part that goes into the {@code WorkRequest}
* protobuf ({@code flagFiles}).
*/
private ImmutableList<String> splitSpawnArgsIntoWorkerArgsAndFlagFiles(
Spawn spawn, List<String> flagFiles) throws UserExecException {
ImmutableList.Builder<String> workerArgs = ImmutableList.builder();
for (String arg : spawn.getArguments()) {
if (FLAG_FILE_PATTERN.matcher(arg).matches()) {
flagFiles.add(arg);
} else {
workerArgs.add(arg);
}
}
if (flagFiles.isEmpty()) {
throw new UserExecException(
FailureDetails.FailureDetail.newBuilder()
.setMessage(
String.format(ERROR_MESSAGE_PREFIX + REASON_NO_FLAGFILE, spawn.getMnemonic()))
.setWorker(
FailureDetails.Worker.newBuilder()
.setCode(FailureDetails.Worker.Code.NO_FLAGFILE))
.build());
}
ImmutableList.Builder<String> mnemonicFlags = ImmutableList.builder();
workerOptions.workerExtraFlags.stream()
.filter(entry -> entry.getKey().equals(spawn.getMnemonic()))
.forEach(entry -> mnemonicFlags.add(entry.getValue()));
return workerArgs
.add("--persistent_worker")
.addAll(MoreObjects.firstNonNull(mnemonicFlags.build(), ImmutableList.of()))
.build();
}
/** A pair of the {@link WorkerKey} and the list of flag files. */
public static class WorkerConfig {
private final WorkerKey workerKey;
private final List<String> flagFiles;
public WorkerConfig(WorkerKey workerKey, List<String> flagFiles) {
this.workerKey = workerKey;
this.flagFiles = ImmutableList.copyOf(flagFiles);
}
public WorkerKey getWorkerKey() {
return workerKey;
}
public List<String> getFlagFiles() {
return flagFiles;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Run a Hadoop job jar. */
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class RunJar {
private static final Logger LOG = LoggerFactory.getLogger(RunJar.class);
/** Pattern that matches any string. */
public static final Pattern MATCH_ANY = Pattern.compile(".*");
/**
* Priority of the RunJar shutdown hook.
*/
public static final int SHUTDOWN_HOOK_PRIORITY = 10;
/**
* Environment key for using the client classloader.
*/
public static final String HADOOP_USE_CLIENT_CLASSLOADER =
"HADOOP_USE_CLIENT_CLASSLOADER";
/**
* Environment key for the (user-provided) hadoop classpath.
*/
public static final String HADOOP_CLASSPATH = "HADOOP_CLASSPATH";
/**
* Environment key for the system classes.
*/
public static final String HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES =
"HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES";
/**
* Buffer size for copy the content of compressed file to new file.
*/
private static final int BUFFER_SIZE = 8_192;
/**
* Unpack a jar file into a directory.
*
* This version unpacks all files inside the jar regardless of filename.
*
* @param jarFile the .jar file to unpack
* @param toDir the destination directory into which to unpack the jar
*
* @throws IOException if an I/O error has occurred or toDir
* cannot be created and does not already exist
*/
public static void unJar(File jarFile, File toDir) throws IOException {
unJar(jarFile, toDir, MATCH_ANY);
}
/**
* Unpack matching files from a jar. Entries inside the jar that do
* not match the given pattern will be skipped.
*
* @param jarFile the .jar file to unpack
* @param toDir the destination directory into which to unpack the jar
* @param unpackRegex the pattern to match jar entries against
*
* @throws IOException if an I/O error has occurred or toDir
* cannot be created and does not already exist
*/
public static void unJar(File jarFile, File toDir, Pattern unpackRegex)
throws IOException {
try (JarFile jar = new JarFile(jarFile)) {
int numOfFailedLastModifiedSet = 0;
Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
final JarEntry entry = entries.nextElement();
if (!entry.isDirectory() &&
unpackRegex.matcher(entry.getName()).matches()) {
try (InputStream in = jar.getInputStream(entry)) {
File file = new File(toDir, entry.getName());
ensureDirectory(file.getParentFile());
try (OutputStream out = new FileOutputStream(file)) {
IOUtils.copyBytes(in, out, BUFFER_SIZE);
}
if (!file.setLastModified(entry.getTime())) {
numOfFailedLastModifiedSet++;
}
}
}
}
if (numOfFailedLastModifiedSet > 0) {
LOG.warn("Could not set last modfied time for {} file(s)",
numOfFailedLastModifiedSet);
}
}
}
/**
* Ensure the existence of a given directory.
*
* @param dir Directory to check
*
* @throws IOException if it cannot be created and does not already exist
*/
private static void ensureDirectory(File dir) throws IOException {
if (!dir.mkdirs() && !dir.isDirectory()) {
throw new IOException("Mkdirs failed to create " +
dir.toString());
}
}
/** Run a Hadoop job jar. If the main class is not in the jar's manifest,
* then it must be provided on the command line. */
public static void main(String[] args) throws Throwable {
new RunJar().run(args);
}
public void run(String[] args) throws Throwable {
String usage = "RunJar jarFile [mainClass] args...";
if (args.length < 1) {
System.err.println(usage);
System.exit(-1);
}
int firstArg = 0;
String fileName = args[firstArg++];
File file = new File(fileName);
if (!file.exists() || !file.isFile()) {
System.err.println("JAR does not exist or is not a normal file: " +
file.getCanonicalPath());
System.exit(-1);
}
String mainClassName = null;
JarFile jarFile;
try {
jarFile = new JarFile(fileName);
} catch (IOException io) {
throw new IOException("Error opening job jar: " + fileName)
.initCause(io);
}
Manifest manifest = jarFile.getManifest();
if (manifest != null) {
mainClassName = manifest.getMainAttributes().getValue("Main-Class");
}
jarFile.close();
if (mainClassName == null) {
if (args.length < 2) {
System.err.println(usage);
System.exit(-1);
}
mainClassName = args[firstArg++];
}
mainClassName = mainClassName.replaceAll("/", ".");
File tmpDir = new File(System.getProperty("java.io.tmpdir"));
ensureDirectory(tmpDir);
final File workDir;
try {
workDir = File.createTempFile("hadoop-unjar", "", tmpDir);
} catch (IOException ioe) {
// If user has insufficient perms to write to tmpDir, default
// "Permission denied" message doesn't specify a filename.
System.err.println("Error creating temp dir in java.io.tmpdir "
+ tmpDir + " due to " + ioe.getMessage());
System.exit(-1);
return;
}
if (!workDir.delete()) {
System.err.println("Delete failed for " + workDir);
System.exit(-1);
}
ensureDirectory(workDir);
ShutdownHookManager.get().addShutdownHook(
new Runnable() {
@Override
public void run() {
FileUtil.fullyDelete(workDir);
}
}, SHUTDOWN_HOOK_PRIORITY);
unJar(file, workDir);
ClassLoader loader = createClassLoader(file, workDir);
Thread.currentThread().setContextClassLoader(loader);
Class<?> mainClass = Class.forName(mainClassName, true, loader);
Method main = mainClass.getMethod("main", String[].class);
List<String> newArgsSubList = Arrays.asList(args)
.subList(firstArg, args.length);
String[] newArgs = newArgsSubList
.toArray(new String[newArgsSubList.size()]);
try {
main.invoke(null, new Object[] {newArgs});
} catch (InvocationTargetException e) {
throw e.getTargetException();
}
}
/**
* Creates a classloader based on the environment that was specified by the
* user. If HADOOP_USE_CLIENT_CLASSLOADER is specified, it creates an
* application classloader that provides the isolation of the user class space
* from the hadoop classes and their dependencies. It forms a class space for
* the user jar as well as the HADOOP_CLASSPATH. Otherwise, it creates a
* classloader that simply adds the user jar to the classpath.
*/
private ClassLoader createClassLoader(File file, final File workDir)
throws MalformedURLException {
ClassLoader loader;
// see if the client classloader is enabled
if (useClientClassLoader()) {
StringBuilder sb = new StringBuilder();
sb.append(workDir).append("/").
append(File.pathSeparator).append(file).
append(File.pathSeparator).append(workDir).append("/classes/").
append(File.pathSeparator).append(workDir).append("/lib/*");
// HADOOP_CLASSPATH is added to the client classpath
String hadoopClasspath = getHadoopClasspath();
if (hadoopClasspath != null && !hadoopClasspath.isEmpty()) {
sb.append(File.pathSeparator).append(hadoopClasspath);
}
String clientClasspath = sb.toString();
// get the system classes
String systemClasses = getSystemClasses();
List<String> systemClassesList = systemClasses == null ?
null :
Arrays.asList(StringUtils.getTrimmedStrings(systemClasses));
// create an application classloader that isolates the user classes
loader = new ApplicationClassLoader(clientClasspath,
getClass().getClassLoader(), systemClassesList);
} else {
List<URL> classPath = new ArrayList<>();
classPath.add(new File(workDir + "/").toURI().toURL());
classPath.add(file.toURI().toURL());
classPath.add(new File(workDir, "classes/").toURI().toURL());
File[] libs = new File(workDir, "lib").listFiles();
if (libs != null) {
for (File lib : libs) {
classPath.add(lib.toURI().toURL());
}
}
// create a normal parent-delegating classloader
loader = new URLClassLoader(classPath.toArray(new URL[classPath.size()]));
}
return loader;
}
boolean useClientClassLoader() {
return Boolean.parseBoolean(System.getenv(HADOOP_USE_CLIENT_CLASSLOADER));
}
String getHadoopClasspath() {
return System.getenv(HADOOP_CLASSPATH);
}
String getSystemClasses() {
return System.getenv(HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.fit.osgi.utils;
import static org.apache.uima.UIMAFramework.getXMLParser;
import static org.apache.uima.fit.descriptor.OperationalProperties.MODIFIES_CAS_DEFAULT;
import static org.apache.uima.fit.descriptor.OperationalProperties.MULTIPLE_DEPLOYMENT_ALLOWED_DEFAULT;
import static org.apache.uima.fit.descriptor.OperationalProperties.OUTPUTS_NEW_CASES_DEFAULT;
import static org.apache.uima.fit.factory.ConfigurationParameterFactory.createConfigurationData;
import static org.apache.uima.fit.factory.ConfigurationParameterFactory.ensureParametersComeInPairs;
import static org.apache.uima.fit.factory.ConfigurationParameterFactory.setParameters;
import static org.apache.uima.fit.factory.ExternalResourceFactory.bindExternalResource;
import static org.apache.uima.fit.factory.ExternalResourceFactory.createExternalResourceDependencies;
import static org.apache.uima.fit.factory.FsIndexFactory.createFsIndexCollection;
import static org.apache.uima.fit.factory.TypePrioritiesFactory.createTypePriorities;
import static org.apache.uima.util.CasCreationUtils.mergeTypeSystems;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.logging.LogFactory;
import org.apache.uima.Constants;
import org.apache.uima.UIMAFramework;
import org.apache.uima.UimaContext;
import org.apache.uima.UimaContextAdmin;
import org.apache.uima.UimaContextHolder;
import org.apache.uima.analysis_component.AnalysisComponent;
import org.apache.uima.analysis_engine.AnalysisEngine;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.analysis_engine.metadata.AnalysisEngineMetaData;
import org.apache.uima.fit.factory.CapabilityFactory;
import org.apache.uima.fit.factory.ConfigurationParameterFactory.ConfigurationData;
import org.apache.uima.fit.factory.ExternalResourceFactory;
import org.apache.uima.fit.factory.FsIndexFactory;
import org.apache.uima.fit.factory.ResourceCreationSpecifierFactory;
import org.apache.uima.fit.factory.ResourceMetaDataFactory;
import org.apache.uima.fit.internal.ReflectionUtil;
import org.apache.uima.fit.internal.ResourceManagerFactory;
import org.apache.uima.fit.internal.ResourceManagerFactory.ResourceManagerCreator;
import org.apache.uima.resource.ExternalResourceDescription;
import org.apache.uima.resource.ResourceInitializationException;
import org.apache.uima.resource.ResourceManager;
import org.apache.uima.resource.metadata.Capability;
import org.apache.uima.resource.metadata.ConfigurationParameter;
import org.apache.uima.resource.metadata.FsIndexCollection;
import org.apache.uima.resource.metadata.OperationalProperties;
import org.apache.uima.resource.metadata.TypePriorities;
import org.apache.uima.resource.metadata.TypeSystemDescription;
import org.apache.uima.util.CasCreationUtils;
import org.apache.uima.util.InvalidXMLException;
import org.apache.uima.util.XMLInputSource;
/**
* A collection of static methods for creating UIMA {@link AnalysisEngineDescription
* AnalysisEngineDescriptions} and {@link AnalysisEngine AnalysisEngines}.
*
* @see <a href="package-summary.html#InstancesVsDescriptors">Why are descriptors better than
* component instances?</a>
*/
public class AnalysisEngineFactoryOSGi {
public static class DefaultResourceManagerCreator2 implements ResourceManagerCreator {
protected ResourceManager resourceManager;
public DefaultResourceManagerCreator2(ResourceManager resourceManager) {
this.resourceManager = resourceManager;
}
public ResourceManager newResourceManager() throws ResourceInitializationException {
UimaContext activeContext = UimaContextHolder.getContext();
if (activeContext != null) {
// If we are already in a UIMA context, then we re-use it. Mind that the JCas cannot
// handle switching across more than one classloader.
// This can be done since UIMA 2.9.0 and starts being handled in uimaFIT 2.3.0
// See https://issues.apache.org/jira/browse/UIMA-5056
return ((UimaContextAdmin) activeContext).getResourceManager();
} else {
return resourceManager;
}
}
}
/**
* Prevent class instantiation
*/
private AnalysisEngineFactoryOSGi() {}
public static AnalysisEngine createEngine(Class<? extends AnalysisComponent> componentClass,
Object... configurationData) throws ResourceInitializationException, MalformedURLException {
ResourceManager resourceManager = UIMAFramework.newDefaultResourceManager();
ClassLoader cl = componentClass.getClassLoader();
resourceManager.setExtensionClassPath(cl, "", true);
ResourceManagerFactory.setResourceManagerCreator(new DefaultResourceManagerCreator2(resourceManager));
AnalysisEngineDescription desc = createEngineDescription(componentClass, configurationData);
// create the AnalysisEngine, initialize it and return it
return createEngine(desc);
}
public static AnalysisEngine createEngine(AnalysisEngineDescription desc,
Object... configurationData) throws ResourceInitializationException {
if (configurationData == null || configurationData.length == 0) {
return UIMAFramework.produceAnalysisEngine(desc, ResourceManagerFactory.newResourceManager(),
null);
} else {
AnalysisEngineDescription descClone = (AnalysisEngineDescription) desc.clone();
ResourceCreationSpecifierFactory.setConfigurationParameters(descClone, configurationData);
return UIMAFramework.produceAnalysisEngine(descClone,
ResourceManagerFactory.newResourceManager(), null);
}
}
public static AnalysisEngineDescription createEngineDescription(
Class<? extends AnalysisComponent> componentClass, Object... configurationData)
throws ResourceInitializationException {
TypeSystemDescription typeSystem = createTypeSystemDescription();
TypePriorities typePriorities = createTypePriorities();
FsIndexCollection fsIndexCollection = createFsIndexCollection();
return createEngineDescription(componentClass, typeSystem, typePriorities, fsIndexCollection,
(Capability[]) null, configurationData);
}
public static AnalysisEngineDescription createEngineDescription(
Class<? extends AnalysisComponent> componentClass, TypeSystemDescription typeSystem,
TypePriorities typePriorities, FsIndexCollection indexes, Capability[] capabilities,
Object... configurationData) throws ResourceInitializationException {
ensureParametersComeInPairs(configurationData);
// Extract ExternalResourceDescriptions from configurationData
// <ParamterName, ExternalResourceDescription> will be stored in this map
Map<String, ExternalResourceDescription> externalResources = ExternalResourceFactory
.extractExternalResourceParameters(configurationData);
// Create primitive description normally
ConfigurationData cdata = createConfigurationData(configurationData);
return createEngineDescription(componentClass, typeSystem, typePriorities, indexes,
capabilities, cdata.configurationParameters, cdata.configurationValues,
externalResources);
}
public static AnalysisEngineDescription createEngineDescription(
Class<? extends AnalysisComponent> componentClass, TypeSystemDescription typeSystem,
TypePriorities typePriorities, FsIndexCollection indexes, Capability[] capabilities,
ConfigurationParameter[] configurationParameters, Object[] configurationValues,
Map<String, ExternalResourceDescription> externalResources)
throws ResourceInitializationException {
// create the descriptor and set configuration parameters
AnalysisEngineDescription desc = UIMAFramework.getResourceSpecifierFactory()
.createAnalysisEngineDescription();
desc.setFrameworkImplementation(Constants.JAVA_FRAMEWORK_NAME);
desc.setPrimitive(true);
desc.setAnnotatorImplementationName(componentClass.getName());
org.apache.uima.fit.descriptor.OperationalProperties componentAnno = ReflectionUtil
.getInheritableAnnotation(org.apache.uima.fit.descriptor.OperationalProperties.class,
componentClass);
if (componentAnno != null) {
OperationalProperties op = desc.getAnalysisEngineMetaData().getOperationalProperties();
op.setMultipleDeploymentAllowed(componentAnno.multipleDeploymentAllowed());
op.setModifiesCas(componentAnno.modifiesCas());
op.setOutputsNewCASes(componentAnno.outputsNewCases());
} else {
OperationalProperties op = desc.getAnalysisEngineMetaData().getOperationalProperties();
op.setMultipleDeploymentAllowed(MULTIPLE_DEPLOYMENT_ALLOWED_DEFAULT);
op.setModifiesCas(MODIFIES_CAS_DEFAULT);
op.setOutputsNewCASes(OUTPUTS_NEW_CASES_DEFAULT);
}
// Configure resource meta data
AnalysisEngineMetaData meta = desc.getAnalysisEngineMetaData();
ResourceMetaDataFactory.configureResourceMetaData(meta, componentClass);
// set parameters
setParameters(desc, componentClass, configurationParameters, configurationValues);
// set the type system
if (typeSystem != null) {
desc.getAnalysisEngineMetaData().setTypeSystem(typeSystem);
}
if (typePriorities != null) {
desc.getAnalysisEngineMetaData().setTypePriorities(typePriorities);
}
// set indexes from the argument to this call and from the annotation present in the
// component
List<FsIndexCollection> fsIndexes = new ArrayList<FsIndexCollection>();
if (indexes != null) {
fsIndexes.add(indexes);
}
fsIndexes.add(FsIndexFactory.createFsIndexCollection(componentClass));
FsIndexCollection aggIndexColl = CasCreationUtils.mergeFsIndexes(fsIndexes,
ResourceManagerFactory.newResourceManager());
desc.getAnalysisEngineMetaData().setFsIndexCollection(aggIndexColl);
// set capabilities from the argument to this call or from the annotation present in the
// component if the argument is null
if (capabilities != null) {
desc.getAnalysisEngineMetaData().setCapabilities(capabilities);
} else {
Capability capability = CapabilityFactory.createCapability(componentClass);
if (capability != null) {
desc.getAnalysisEngineMetaData().setCapabilities(new Capability[] { capability });
}
}
// Extract external resource dependencies
desc.setExternalResourceDependencies(createExternalResourceDependencies(componentClass));
// Bind External Resources
if (externalResources != null) {
for (Entry<String, ExternalResourceDescription> e : externalResources.entrySet()) {
bindExternalResource(desc, e.getKey(), e.getValue());
}
}
return desc;
}
public static TypeSystemDescription createTypeSystemDescription()
throws ResourceInitializationException {
List<TypeSystemDescription> tsdList = new ArrayList<TypeSystemDescription>();
for (String location : MyBundleTracker.getLocations()) {//scanTypeDescriptors()) {
try {
XMLInputSource xmlInputType1 = new XMLInputSource(location);
tsdList.add(getXMLParser().parseTypeSystemDescription(xmlInputType1));
LogFactory.getLog(TypeSystemDescription.class).debug(
"Detected type system at [" + location + "]");
} catch (IOException e) {
throw new ResourceInitializationException(e);
} catch (InvalidXMLException e) {
LogFactory.getLog(TypeSystemDescription.class).warn(
"[" + location + "] is not a type file. Ignoring.", e);
}
}
ResourceManager resMgr = ResourceManagerFactory.newResourceManager();
return mergeTypeSystems(tsdList, resMgr);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.joshua.decoder.ff;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.joshua.decoder.JoshuaConfiguration;
import org.apache.joshua.decoder.chart_parser.SourcePath;
import org.apache.joshua.decoder.ff.state_maintenance.DPState;
import org.apache.joshua.decoder.ff.tm.Rule;
import org.apache.joshua.decoder.hypergraph.HGNode;
import org.apache.joshua.decoder.segment_file.Sentence;
/**
* <p>This class defines Joshua's feature function interface, for both sparse and
* dense features. It is immediately inherited by StatelessFF and StatefulFF,
* which provide functionality common to stateless and stateful features,
* respectively. Any feature implementation should extend those classes, and not
* this one. The distinction between stateless and stateful features is somewhat
* narrow: all features have the opportunity to return an instance of a
* {@link DPState} object, and stateless ones just return null.</p>
*
* <p>Features in Joshua work like templates. Each feature function defines any
* number of actual features, which are associated with weights. The task of the
* feature function is to compute the features that are fired in different
* circumstances and then return the inner product of those features with the
* weight vector. Feature functions can also produce estimates of their future
* cost (via {@link org.apache.joshua.decoder.ff.FeatureFunction#estimateCost(Rule, Sentence)});
* these values are not used in computing the
* score, but are only used for sorting rules during cube pruning. The
* individual features produced by each template should have globally unique
* names; a good convention is to prefix each feature with the name of the
* template that produced it.</p>
*
* <p>Joshua does not retain individual feature values while decoding, since this
* requires keeping a sparse feature vector along every hyperedge, which can be
* expensive. Instead, it computes only the weighted cost of each edge. If the
* individual feature values are requested, the feature functions are replayed
* in post-processing, say during k-best list extraction. This is implemented in
* a generic way by passing an {@link Accumulator} object to the compute()
* function. During decoding, the accumulator simply sums weighted features in a
* scalar. During k-best extraction, when individual feature values are needed,
* a {@link FeatureAccumulator} is used to retain the individual values.</p>
*
* @author Matt Post post@cs.jhu.edu
* @author Juri Ganitkevich juri@cs.jhu.edu
*/
public abstract class FeatureFunction {
/*
* The name of the feature function; this generally matches the weight name on
* the config file. This can also be used as a prefix for feature / weight
* names, for templates that define multiple features.
*/
protected String name = null;
/*
* The list of features each function can contribute, along with the dense feature IDs.
*/
protected String[] denseFeatureNames = null;
protected int[] denseFeatureIDs = null;
/*
* The first dense feature index
*/
protected int denseFeatureIndex = -1;
// The list of arguments passed to the feature, and the hash for the parsed args
protected final String[] args;
protected HashMap<String, String> parsedArgs = null;
/*
* The global weight vector used by the decoder, passed it when the feature is
* instantiated
*/
protected final FeatureVector weights;
/* The config */
protected final JoshuaConfiguration config;
public String getName() {
return name;
}
// Whether the feature has state.
public abstract boolean isStateful();
public FeatureFunction(FeatureVector weights, String name, String[] args, JoshuaConfiguration config) {
this.weights = weights;
this.name = name;
this.args = args;
this.config = config;
this.parsedArgs = FeatureFunction.parseArgs(args);
}
/**
* Any feature function can use this to report dense features names to the master code. The
* parameter tells the feature function the index of the first available dense feature ID; the feature
* function will then use IDs (id..id+names.size()-1).
*
* @param id the id of the first dense feature id to use
* @return a list of dense feature names
*/
public ArrayList<String> reportDenseFeatures(int id) {
return new ArrayList<>();
}
public String logString() {
try {
return String.format("%s (weight %.3f)", name, weights.getSparse(name));
} catch (RuntimeException e) {
return name;
}
}
/**
* This is the main function for defining feature values. The implementor
* should compute all the features along the hyperedge, calling
* {@link org.apache.joshua.decoder.ff.FeatureFunction.Accumulator#add(String, float)}
* for each feature. It then returns the newly-computed dynamic
* programming state for this feature (for example, for the
* {@link org.apache.joshua.decoder.ff.lm.LanguageModelFF} feature, this returns the new language model
* context). For stateless features, this value is null.
*
* Note that the accumulator accumulates *unweighted* feature values. The
* feature vector is multiplied times the weight vector later on.
*
* @param rule {@link org.apache.joshua.decoder.ff.tm.Rule} to be utilized within computation
* @param tailNodes {@link java.util.List} of {@link org.apache.joshua.decoder.hypergraph.HGNode} tail nodes
* @param i todo
* @param j todo
* @param sourcePath information about a path taken through the source {@link org.apache.joshua.lattice.Lattice}
* @param sentence {@link org.apache.joshua.lattice.Lattice} input
* @param acc {@link org.apache.joshua.decoder.ff.FeatureFunction.Accumulator} object permitting generalization of feature computation
* @return the new dynamic programming state (null for stateless features)
*/
public abstract DPState compute(Rule rule, List<HGNode> tailNodes, int i, int j,
SourcePath sourcePath, Sentence sentence, Accumulator acc);
/**
* Feature functions must overrided this. StatefulFF and StatelessFF provide
* reasonable defaults since most features do not fire on the goal node.
*
* @param tailNode single {@link org.apache.joshua.decoder.hypergraph.HGNode} representing tail node
* @param i todo
* @param j todo
* @param sourcePath information about a path taken through the source {@link org.apache.joshua.lattice.Lattice}
* @param sentence {@link org.apache.joshua.lattice.Lattice} input
* @param acc {@link org.apache.joshua.decoder.ff.FeatureFunction.Accumulator} object permitting generalization of feature computation
* @return the DPState (null if none)
*/
public abstract DPState computeFinal(HGNode tailNode, int i, int j, SourcePath sourcePath,
Sentence sentence, Accumulator acc);
/**
* This is a convenience function for retrieving the features fired when
* applying a rule, provided for backward compatibility.
*
* Returns the *unweighted* cost of the features delta computed at this
* position. Note that this is a feature delta, so existing feature costs of
* the tail nodes should not be incorporated, and it is very important not to
* incorporate the feature weights. This function is used in the kbest
* extraction code but could also be used in computing the cost.
*
* @param rule {@link org.apache.joshua.decoder.ff.tm.Rule} to be utilized within computation
* @param tailNodes {@link java.util.List} of {@link org.apache.joshua.decoder.hypergraph.HGNode} tail nodes
* @param i todo
* @param j todo
* @param sourcePath information about a path taken through the source {@link org.apache.joshua.lattice.Lattice}
* @param sentence {@link org.apache.joshua.lattice.Lattice} input
* @return an *unweighted* feature delta
*/
public final FeatureVector computeFeatures(Rule rule, List<HGNode> tailNodes, int i, int j,
SourcePath sourcePath, Sentence sentence) {
FeatureAccumulator features = new FeatureAccumulator();
compute(rule, tailNodes, i, j, sourcePath, sentence, features);
return features.getFeatures();
}
/**
* This function is called for the final transition. For example, the
* LanguageModel feature function treats the last rule specially. It needs to
* return the *weighted* cost of applying the feature. Provided for backward
* compatibility.
*
* @param tailNode single {@link org.apache.joshua.decoder.hypergraph.HGNode} representing tail node
* @param i todo
* @param j todo
* @param sourcePath information about a path taken through the source {@link org.apache.joshua.lattice.Lattice}
* @param sentence {@link org.apache.joshua.lattice.Lattice} input
* @return a *weighted* feature cost
*/
public final float computeFinalCost(HGNode tailNode, int i, int j, SourcePath sourcePath,
Sentence sentence) {
ScoreAccumulator score = new ScoreAccumulator();
computeFinal(tailNode, i, j, sourcePath, sentence, score);
return score.getScore();
}
/**
* Returns the *unweighted* feature delta for the final transition (e.g., for
* the language model feature function). Provided for backward compatibility.
*
* @param tailNode single {@link org.apache.joshua.decoder.hypergraph.HGNode} representing tail node
* @param i todo
* @param j todo
* @param sourcePath information about a path taken through the source {@link org.apache.joshua.lattice.Lattice}
* @param sentence {@link org.apache.joshua.lattice.Lattice} input
* @return an *weighted* feature vector
*/
public final FeatureVector computeFinalFeatures(HGNode tailNode, int i, int j,
SourcePath sourcePath, Sentence sentence) {
FeatureAccumulator features = new FeatureAccumulator();
computeFinal(tailNode, i, j, sourcePath, sentence, features);
return features.getFeatures();
}
/**
* This function is called when sorting rules for cube pruning. It must return
* the *weighted* estimated cost of applying a feature. This need not be the
* actual cost of applying the rule in context. Basically, it's the inner
* product of the weight vector and all features found in the grammar rule,
* though some features (like LanguageModelFF) can also compute some of their
* values. This is just an estimate of the cost, which helps do better
* sorting. Later, the real cost of this feature function is called via
* compute();
*
* @param rule {@link org.apache.joshua.decoder.ff.tm.Rule} to be utilized within computation
* @param sentence {@link org.apache.joshua.lattice.Lattice} input
* @return the *weighted* cost of applying the feature.
*/
public abstract float estimateCost(Rule rule, Sentence sentence);
/**
* This feature is called to produce a *weighted estimate* of the future cost
* of applying this feature. This value is not incorporated into the model
* score but is used in pruning decisions. Stateless features return 0.0f by
* default, but Stateful features might want to override this.
*
* @param rule {@link org.apache.joshua.decoder.ff.tm.Rule} to be utilized within computation
* @param state todo
* @param sentence {@link org.apache.joshua.lattice.Lattice} input
* @return the *weighted* future cost estimate of applying this rule in
* context.
*/
public abstract float estimateFutureCost(Rule rule, DPState state, Sentence sentence);
/**
* Parses the arguments passed to a feature function in the Joshua config file TODO: Replace this
* with a proper CLI library at some point Expects key value pairs in the form : -argname value
* Any key without a value is added with an empty string as value Multiple values for the same key
* are not parsed. The first one is used.
*
* @param args A string with the raw arguments and their names
* @return A hash with the keys and the values of the string
*/
public static HashMap<String, String> parseArgs(String[] args) {
HashMap<String, String> parsedArgs = new HashMap<>();
boolean lookingForValue = false;
String currentKey = null;
for (String arg : args) {
Pattern argKeyPattern = Pattern.compile("^-[a-zA-Z]\\S+");
Matcher argKey = argKeyPattern.matcher(arg);
if (argKey.find()) {
// This is a key
// First check to see if there is a key that is waiting to be written
if (lookingForValue) {
// This is a key with no specified value
parsedArgs.put(currentKey, "");
}
// Now store the new key and look for its value
currentKey = arg.substring(1);
lookingForValue = true;
} else {
// This is a value
if (lookingForValue) {
parsedArgs.put(currentKey, arg);
lookingForValue = false;
}
}
}
// make sure we add the last key without value
if (lookingForValue && currentKey != null) {
// end of line, no value
parsedArgs.put(currentKey, "");
}
return parsedArgs;
}
/**
* Accumulator objects allow us to generalize feature computation.
* ScoreAccumulator takes (feature,value) pairs and simple stores the weighted
* sum (for decoding). FeatureAccumulator records the named feature values
* (for k-best extraction).
*/
public interface Accumulator {
void add(String name, float value);
void add(int id, float value);
}
public class ScoreAccumulator implements Accumulator {
private float score;
public ScoreAccumulator() {
this.score = 0.0f;
}
@Override
public void add(String name, float value) {
score += value * weights.getSparse(name);
}
@Override
public void add(int id, float value) {
score += value * weights.getDense(id);
}
public float getScore() {
return score;
}
}
public class FeatureAccumulator implements Accumulator {
private final FeatureVector features;
public FeatureAccumulator() {
this.features = new FeatureVector();
}
@Override
public void add(String name, float value) {
features.increment(name, value);
}
@Override
public void add(int id, float value) {
features.increment(id, value);
}
public FeatureVector getFeatures() {
return features;
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.cycle.util;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.io.IOUtils;
import org.camunda.bpm.cycle.exception.CycleException;
/**
* @author Tom Baeyens
* @author Frederik Heremans
* @author Joram Barrez
*
* @author nico.rehwaldt
*/
public class IoUtil {
/**
* Controls if intermediate results are written to files.
*/
public static boolean DEBUG;
/**
* Directory, into which intermediate results are written.
*/
public static String DEBUG_DIR;
private static final int BUFFERSIZE = 4096;
public static byte[] readInputStream(InputStream inputStream, String inputStreamName) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[16*1024];
try {
int bytesRead = inputStream.read(buffer);
while (bytesRead!=-1) {
outputStream.write(buffer, 0, bytesRead);
bytesRead = inputStream.read(buffer);
}
} catch (Exception e) {
throw new CycleException("Couldn't read input stream " + inputStreamName, e);
}
return outputStream.toByteArray();
}
public static String readFileAsString(String filePath) {
byte[] buffer = new byte[(int) getFile(filePath).length()];
BufferedInputStream inputStream = null;
try {
inputStream = new BufferedInputStream(new FileInputStream(getFile(filePath)));
inputStream.read(buffer);
} catch(Exception e) {
throw new CycleException("Couldn't read file " + filePath + ": " + e.getMessage());
} finally {
IoUtil.closeSilently(inputStream);
}
return new String(buffer);
}
public static InputStream readFileAsInputStream(String absoluteClassPath) {
InputStream inputStream = null;
inputStream = IoUtil.class.getClass().getResourceAsStream(absoluteClassPath);
if (inputStream == null) {
throw new CycleException("Unable to read " + absoluteClassPath + " as inputstream.");
}
return inputStream;
}
public static File getFile(String filePath) {
URL url = IoUtil.class.getClassLoader().getResource(filePath);
try {
return new File(url.toURI());
} catch (Exception e) {
throw new CycleException("Couldn't get file " + filePath + ": " + e.getMessage());
}
}
public static void writeStringToFile(String content, String filePath) {
BufferedOutputStream outputStream = null;
try {
outputStream = new BufferedOutputStream(new FileOutputStream(getFile(filePath)));
outputStream.write(content.getBytes());
outputStream.flush();
} catch(Exception e) {
throw new CycleException("Couldn't write file " + filePath, e);
} finally {
IoUtil.closeSilently(outputStream);
}
}
public static void writeStringToFileIfDebug(String content, String filename, String suffix) {
if (DEBUG) {
String filePath = "";
if (DEBUG_DIR != null && DEBUG_DIR.length() > 0) {
filePath = DEBUG_DIR + System.getProperty("file.separator");
File debugDirectory = new File(filePath);
if (!debugDirectory.exists()) {
if (!debugDirectory.mkdirs()) {
throw new RuntimeException("Unable to create debugDirectory: " + debugDirectory.getAbsolutePath());
}
}
filePath = filePath + filename + "." + new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss.SSS_").format(new Date()) + suffix;
try {
FileWriter writer = new FileWriter(filePath);
writer.write(content);
writer.flush();
writer.close();
} catch (IOException e) {
throw new RuntimeException("Unable to write debug file: " + filePath, e);
}
}
}
}
/**
* Closes the given stream. The same as calling {@link InputStream#close()}, but
* errors while closing are silently ignored.
*/
public static void closeSilently(InputStream inputStream) {
try {
if(inputStream != null) {
inputStream.close();
}
} catch(IOException ignore) {
// Exception is silently ignored
}
}
public static void closeSilently(InputStream ... streams) {
for (InputStream is: streams) {
closeSilently(is);
}
}
/**
* Closes the given stream. The same as calling {@link OutputStream#close()}, but
* errors while closing are silently ignored.
*/
public static void closeSilently(OutputStream outputStream) {
try {
if(outputStream != null) {
outputStream.close();
}
} catch(IOException ignore) {
// Exception is silently ignored
}
}
public static void closeSilently(OutputStream ... streams) {
for (OutputStream os: streams) {
closeSilently(os);
}
}
public static int copyBytes(InputStream in, OutputStream out) throws IOException {
if (in == null || out == null) {
throw new IllegalArgumentException("In/OutStream cannot be null");
}
try {
int total = 0;
byte[] buffer = new byte[BUFFERSIZE];
for (int bytesRead; (bytesRead = in.read(buffer)) != -1;) {
out.write(buffer, 0, bytesRead);
total += bytesRead;
}
return total;
} catch (IOException e) {
throw e;
} finally {
if (in != null) {
in.close();
}
}
}
public static String toString(InputStream input) throws IOException {
return IOUtils.toString(input);
}
public static String toString(InputStream input, String encoding) throws IOException {
return IOUtils.toString(input, encoding);
}
/**
* Returns an input stream serving the given argument
*
* @param result
* @param encoding
* @return
*/
public static InputStream toInputStream(String result, String encoding) {
try {
return new ByteArrayInputStream(result.getBytes(encoding));
} catch (UnsupportedEncodingException ex) {
throw new RuntimeException("Unsupported encoding", ex);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.bookkeeper.stream.protocol.util;
import static org.apache.bookkeeper.stream.protocol.ProtocolConstants.DEFAULT_STREAM_CONF;
import static org.apache.bookkeeper.stream.protocol.util.ProtoUtils.createCreateNamespaceRequest;
import static org.apache.bookkeeper.stream.protocol.util.ProtoUtils.createDeleteNamespaceRequest;
import static org.apache.bookkeeper.stream.protocol.util.ProtoUtils.createGetNamespaceRequest;
import static org.apache.bookkeeper.stream.protocol.util.ProtoUtils.isStreamCreated;
import static org.apache.bookkeeper.stream.protocol.util.ProtoUtils.isStreamWritable;
import static org.apache.bookkeeper.stream.protocol.util.ProtoUtils.keyRangeOverlaps;
import static org.apache.bookkeeper.stream.protocol.util.ProtoUtils.validateNamespaceName;
import static org.apache.bookkeeper.stream.protocol.util.ProtoUtils.validateStreamName;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.apache.bookkeeper.stream.proto.NamespaceConfiguration;
import org.apache.bookkeeper.stream.proto.RangeProperties;
import org.apache.bookkeeper.stream.proto.StreamMetadata.LifecycleState;
import org.apache.bookkeeper.stream.proto.StreamMetadata.ServingState;
import org.apache.bookkeeper.stream.proto.storage.CreateNamespaceRequest;
import org.apache.bookkeeper.stream.proto.storage.DeleteNamespaceRequest;
import org.apache.bookkeeper.stream.proto.storage.GetNamespaceRequest;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
/**
* Unit test of {@link ProtoUtils}.
*/
public class TestProtoUtils {
@Rule
public final TestName name = new TestName();
@Test
public void testValidateNamespaceName() {
assertTrue(validateNamespaceName("namespace_name"));
assertTrue(validateNamespaceName("NamespaceName"));
assertTrue(validateNamespaceName("9NamespaceName"));
assertTrue(validateNamespaceName("namespace-name"));
assertTrue(validateNamespaceName("!namespace_name"));
assertFalse(validateNamespaceName(" namespace_name"));
assertFalse(validateNamespaceName("<namespace_name"));
assertFalse(validateNamespaceName(">namespace_name"));
assertFalse(validateNamespaceName(""));
assertFalse(validateNamespaceName(null));
}
@Test
public void testValidateStreamName() {
assertTrue(validateStreamName("stream_name"));
assertTrue(validateStreamName("StreamName"));
assertTrue(validateStreamName("9StreamName"));
assertTrue(validateStreamName("stream-name"));
assertTrue(validateStreamName("!stream_name"));
assertFalse(validateNamespaceName(" stream_name"));
assertFalse(validateNamespaceName("<stream_name"));
assertFalse(validateNamespaceName(">stream_name"));
assertFalse(validateNamespaceName(""));
assertFalse(validateNamespaceName(null));
}
@Test
public void testKeyRangeOverlaps1() {
assertFalse(keyRangeOverlaps(1000L, 2000L, 3000L, 4000L));
assertTrue(keyRangeOverlaps(1000L, 2000L, 1500L, 2500L));
assertTrue(keyRangeOverlaps(1000L, 2000L, 1500L, 1800L));
assertTrue(keyRangeOverlaps(1000L, 3500L, 3000L, 4000L));
assertTrue(keyRangeOverlaps(3200L, 3500L, 3000L, 4000L));
}
@Test
public void testKeyRangeOverlaps2() {
assertFalse(keyRangeOverlaps(
Pair.of(1000L, 2000L),
Pair.of(3000L, 4000L)));
assertTrue(keyRangeOverlaps(
Pair.of(1000L, 2000L),
Pair.of(1500L, 2500L)));
assertTrue(keyRangeOverlaps(
Pair.of(1000L, 2000L),
Pair.of(1500L, 1800L)));
assertTrue(keyRangeOverlaps(
Pair.of(1000L, 3500L),
Pair.of(3000L, 4000L)));
assertTrue(keyRangeOverlaps(
Pair.of(3200L, 3500L),
Pair.of(3000L, 4000L)));
}
private static RangeProperties createRangeMeta(long startKey, long endKey) {
return RangeProperties.newBuilder()
.setStartHashKey(startKey)
.setEndHashKey(endKey)
.setStorageContainerId(1234L)
.setRangeId(1234L)
.build();
}
@Test
public void testKeyRangeOverlaps3() {
assertFalse(keyRangeOverlaps(
createRangeMeta(1000L, 2000L),
createRangeMeta(3000L, 4000L)));
assertTrue(keyRangeOverlaps(
createRangeMeta(1000L, 2000L),
createRangeMeta(1500L, 2500L)));
assertTrue(keyRangeOverlaps(
createRangeMeta(1000L, 2000L),
createRangeMeta(1500L, 1800L)));
assertTrue(keyRangeOverlaps(
createRangeMeta(1000L, 3500L),
createRangeMeta(3000L, 4000L)));
assertTrue(keyRangeOverlaps(
createRangeMeta(3200L, 3500L),
createRangeMeta(3000L, 4000L)));
}
@Test
public void testKeyRangeOverlaps4() {
assertFalse(keyRangeOverlaps(
createRangeMeta(1000L, 2000L),
Pair.of(3000L, 4000L)));
assertTrue(keyRangeOverlaps(
createRangeMeta(1000L, 2000L),
Pair.of(1500L, 2500L)));
assertTrue(keyRangeOverlaps(
createRangeMeta(1000L, 2000L),
Pair.of(1500L, 1800L)));
assertTrue(keyRangeOverlaps(
createRangeMeta(1000L, 3500L),
Pair.of(3000L, 4000L)));
assertTrue(keyRangeOverlaps(
createRangeMeta(3200L, 3500L),
Pair.of(3000L, 4000L)));
}
@Test
public void testKeyRangeOverlaps5() {
assertFalse(keyRangeOverlaps(
Pair.of(1000L, 2000L),
createRangeMeta(3000L, 4000L)));
assertTrue(keyRangeOverlaps(
Pair.of(1000L, 2000L),
createRangeMeta(1500L, 2500L)));
assertTrue(keyRangeOverlaps(
Pair.of(1000L, 2000L),
createRangeMeta(1500L, 1800L)));
assertTrue(keyRangeOverlaps(
Pair.of(1000L, 3500L),
createRangeMeta(3000L, 4000L)));
assertTrue(keyRangeOverlaps(
Pair.of(3200L, 3500L),
createRangeMeta(3000L, 4000L)));
}
@Test
public void testIsStreamCreated() {
assertFalse(isStreamCreated(LifecycleState.UNINIT));
assertFalse(isStreamCreated(LifecycleState.CREATING));
assertTrue(isStreamCreated(LifecycleState.CREATED));
assertTrue(isStreamCreated(LifecycleState.FENCING));
assertTrue(isStreamCreated(LifecycleState.FENCED));
}
@Test
public void testIsStreamWritable() {
assertTrue(isStreamWritable(ServingState.WRITABLE));
assertFalse(isStreamWritable(ServingState.READONLY));
}
//
// Namespace API
//
@Test
public void testCreateCreateNamespaceRequest() {
NamespaceConfiguration nsConf = NamespaceConfiguration.newBuilder()
.setDefaultStreamConf(DEFAULT_STREAM_CONF)
.build();
CreateNamespaceRequest request = createCreateNamespaceRequest(
name.getMethodName(),
nsConf);
assertEquals(name.getMethodName(), request.getName());
assertEquals(nsConf, request.getNsConf());
}
@Test
public void testCreateDeleteNamespaceRequest() {
DeleteNamespaceRequest request = createDeleteNamespaceRequest(
name.getMethodName());
assertEquals(name.getMethodName(), request.getName());
}
@Test
public void testCreateGetNamespaceRequest() {
GetNamespaceRequest request = createGetNamespaceRequest(
name.getMethodName());
assertEquals(name.getMethodName(), request.getName());
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.resourcemanager.appservice.implementation;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.appservice.AppServiceManager;
import com.azure.resourcemanager.appservice.models.AppServicePlan;
import com.azure.resourcemanager.appservice.models.CsmPublishingProfileOptions;
import com.azure.resourcemanager.appservice.models.CsmSlotEntity;
import com.azure.resourcemanager.appservice.models.HostnameBinding;
import com.azure.resourcemanager.appservice.models.MSDeploy;
import com.azure.resourcemanager.appservice.models.OperatingSystem;
import com.azure.resourcemanager.appservice.models.PricingTier;
import com.azure.resourcemanager.appservice.models.PublishingProfile;
import com.azure.resourcemanager.appservice.models.WebAppBase;
import com.azure.resourcemanager.appservice.models.WebAppSourceControl;
import com.azure.resourcemanager.appservice.fluent.models.ConnectionStringDictionaryInner;
import com.azure.resourcemanager.appservice.fluent.models.IdentifierInner;
import com.azure.resourcemanager.appservice.fluent.models.MSDeployStatusInner;
import com.azure.resourcemanager.appservice.fluent.models.SiteAuthSettingsInner;
import com.azure.resourcemanager.appservice.fluent.models.SiteConfigResourceInner;
import com.azure.resourcemanager.appservice.fluent.models.SiteInner;
import com.azure.resourcemanager.appservice.fluent.models.SiteLogsConfigInner;
import com.azure.resourcemanager.appservice.fluent.models.SitePatchResourceInner;
import com.azure.resourcemanager.appservice.fluent.models.SiteSourceControlInner;
import com.azure.resourcemanager.appservice.fluent.models.SlotConfigNamesResourceInner;
import com.azure.resourcemanager.appservice.fluent.models.StringDictionaryInner;
import com.azure.resourcemanager.resources.fluentcore.arm.ResourceUtils;
import com.azure.resourcemanager.resources.fluentcore.model.Creatable;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import reactor.core.publisher.Mono;
/**
* The base implementation for web apps and function apps.
*
* @param <FluentT> the fluent interface, WebApp or FunctionApp
* @param <FluentImplT> the implementation class for FluentT
* @param <FluentWithCreateT> the definition stage that derives from Creatable
* @param <FluentUpdateT> The definition stage that derives from Appliable
*/
abstract class AppServiceBaseImpl<
FluentT extends WebAppBase,
FluentImplT extends AppServiceBaseImpl<FluentT, FluentImplT, FluentWithCreateT, FluentUpdateT>,
FluentWithCreateT,
FluentUpdateT>
extends WebAppBaseImpl<FluentT, FluentImplT> {
private final ClientLogger logger = new ClientLogger(getClass());
protected static final String SETTING_DOCKER_IMAGE = "DOCKER_CUSTOM_IMAGE_NAME";
protected static final String SETTING_REGISTRY_SERVER = "DOCKER_REGISTRY_SERVER_URL";
protected static final String SETTING_REGISTRY_USERNAME = "DOCKER_REGISTRY_SERVER_USERNAME";
protected static final String SETTING_REGISTRY_PASSWORD = "DOCKER_REGISTRY_SERVER_PASSWORD";
AppServiceBaseImpl(
String name,
SiteInner innerObject,
SiteConfigResourceInner siteConfig,
SiteLogsConfigInner logConfig,
AppServiceManager manager) {
super(name, innerObject, siteConfig, logConfig, manager);
}
@Override
Mono<SiteInner> createOrUpdateInner(SiteInner site) {
return this.manager().serviceClient().getWebApps().createOrUpdateAsync(resourceGroupName(), name(), site);
}
@Override
Mono<SiteInner> updateInner(SitePatchResourceInner siteUpdate) {
return this.manager().serviceClient().getWebApps().updateAsync(resourceGroupName(), name(), siteUpdate);
}
@Override
Mono<SiteInner> getInner() {
return this.manager().serviceClient().getWebApps().getByResourceGroupAsync(resourceGroupName(), name());
}
@Override
Mono<SiteConfigResourceInner> getConfigInner() {
return this.manager().serviceClient().getWebApps().getConfigurationAsync(resourceGroupName(), name());
}
@Override
Mono<SiteConfigResourceInner> createOrUpdateSiteConfig(SiteConfigResourceInner siteConfig) {
return this
.manager()
.serviceClient()
.getWebApps()
.createOrUpdateConfigurationAsync(resourceGroupName(), name(), siteConfig);
}
@Override
Mono<Void> deleteHostnameBinding(String hostname) {
return this.manager().serviceClient().getWebApps()
.deleteHostnameBindingAsync(resourceGroupName(), name(), hostname);
}
@Override
Mono<StringDictionaryInner> listAppSettings() {
return this.manager().serviceClient().getWebApps().listApplicationSettingsAsync(resourceGroupName(), name());
}
@Override
Mono<StringDictionaryInner> updateAppSettings(StringDictionaryInner inner) {
return this.manager().serviceClient().getWebApps()
.updateApplicationSettingsAsync(resourceGroupName(), name(), inner);
}
@Override
Mono<ConnectionStringDictionaryInner> listConnectionStrings() {
return this.manager().serviceClient().getWebApps().listConnectionStringsAsync(resourceGroupName(), name());
}
@Override
Mono<ConnectionStringDictionaryInner> updateConnectionStrings(ConnectionStringDictionaryInner inner) {
return this.manager().serviceClient().getWebApps()
.updateConnectionStringsAsync(resourceGroupName(), name(), inner);
}
@Override
Mono<SlotConfigNamesResourceInner> listSlotConfigurations() {
return this.manager().serviceClient().getWebApps().listSlotConfigurationNamesAsync(resourceGroupName(), name());
}
@Override
Mono<SlotConfigNamesResourceInner> updateSlotConfigurations(SlotConfigNamesResourceInner inner) {
return this.manager().serviceClient().getWebApps()
.updateSlotConfigurationNamesAsync(resourceGroupName(), name(), inner);
}
@Override
Mono<SiteSourceControlInner> createOrUpdateSourceControl(SiteSourceControlInner inner) {
return this.manager().serviceClient().getWebApps()
.createOrUpdateSourceControlAsync(resourceGroupName(), name(), inner);
}
@Override
Mono<Void> deleteSourceControl() {
return this.manager().serviceClient().getWebApps().deleteSourceControlAsync(resourceGroupName(), name());
}
@Override
Mono<SiteAuthSettingsInner> updateAuthentication(SiteAuthSettingsInner inner) {
return manager().serviceClient().getWebApps().updateAuthSettingsAsync(resourceGroupName(), name(), inner);
}
@Override
Mono<SiteAuthSettingsInner> getAuthentication() {
return manager().serviceClient().getWebApps().getAuthSettingsAsync(resourceGroupName(), name());
}
@Override
public Map<String, HostnameBinding> getHostnameBindings() {
return getHostnameBindingsAsync().block();
}
@Override
@SuppressWarnings("unchecked")
public Mono<Map<String, HostnameBinding>> getHostnameBindingsAsync() {
return this
.manager()
.serviceClient()
.getWebApps()
.listHostnameBindingsAsync(resourceGroupName(), name())
.mapPage(
hostNameBindingInner ->
new HostnameBindingImpl<>(hostNameBindingInner, (FluentImplT) AppServiceBaseImpl.this))
.collectList()
.map(
hostNameBindings ->
Collections
.<String, HostnameBinding>unmodifiableMap(
hostNameBindings
.stream()
.collect(
Collectors
.toMap(
binding -> binding.name().replace(name() + "/", ""),
Function.identity()))));
}
@Override
public PublishingProfile getPublishingProfile() {
return getPublishingProfileAsync().block();
}
public Mono<PublishingProfile> getPublishingProfileAsync() {
return FluxUtil
.collectBytesInByteBufferStream(
manager()
.serviceClient()
.getWebApps()
.listPublishingProfileXmlWithSecretsAsync(
resourceGroupName(), name(), new CsmPublishingProfileOptions()))
.map(
bytes -> new PublishingProfileImpl(new String(bytes, StandardCharsets.UTF_8), AppServiceBaseImpl.this));
}
@Override
public WebAppSourceControl getSourceControl() {
return getSourceControlAsync().block();
}
@Override
public Mono<WebAppSourceControl> getSourceControlAsync() {
return manager()
.serviceClient()
.getWebApps()
.getSourceControlAsync(resourceGroupName(), name())
.map(
siteSourceControlInner ->
new WebAppSourceControlImpl<>(siteSourceControlInner, AppServiceBaseImpl.this));
}
@Override
Mono<MSDeployStatusInner> createMSDeploy(MSDeploy msDeployInner) {
return manager().serviceClient().getWebApps()
.createMSDeployOperationAsync(resourceGroupName(), name(), msDeployInner);
}
@Override
public void verifyDomainOwnership(String certificateOrderName, String domainVerificationToken) {
verifyDomainOwnershipAsync(certificateOrderName, domainVerificationToken).block();
}
@Override
public Mono<Void> verifyDomainOwnershipAsync(String certificateOrderName, String domainVerificationToken) {
IdentifierInner identifierInner = new IdentifierInner().withValue(domainVerificationToken);
return this
.manager()
.serviceClient()
.getWebApps()
.createOrUpdateDomainOwnershipIdentifierAsync(
resourceGroupName(), name(), certificateOrderName, identifierInner)
.then(Mono.empty());
}
@Override
public void start() {
startAsync().block();
}
@Override
public Mono<Void> startAsync() {
return manager()
.serviceClient()
.getWebApps()
.startAsync(resourceGroupName(), name())
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public void stop() {
stopAsync().block();
}
@Override
public Mono<Void> stopAsync() {
return manager()
.serviceClient()
.getWebApps()
.stopAsync(resourceGroupName(), name())
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public void restart() {
restartAsync().block();
}
@Override
public Mono<Void> restartAsync() {
return manager()
.serviceClient()
.getWebApps()
.restartAsync(resourceGroupName(), name())
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public void swap(String slotName) {
swapAsync(slotName).block();
}
@Override
public Mono<Void> swapAsync(String slotName) {
return manager()
.serviceClient()
.getWebApps()
.swapSlotWithProductionAsync(resourceGroupName(), name(), new CsmSlotEntity().withTargetSlot(slotName))
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public void applySlotConfigurations(String slotName) {
applySlotConfigurationsAsync(slotName).block();
}
@Override
public Mono<Void> applySlotConfigurationsAsync(String slotName) {
return manager()
.serviceClient()
.getWebApps()
.applySlotConfigToProductionAsync(resourceGroupName(), name(), new CsmSlotEntity().withTargetSlot(slotName))
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public void resetSlotConfigurations() {
resetSlotConfigurationsAsync().block();
}
@Override
public Mono<Void> resetSlotConfigurationsAsync() {
return manager()
.serviceClient()
.getWebApps()
.resetProductionSlotConfigAsync(resourceGroupName(), name())
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public byte[] getContainerLogs() {
return getContainerLogsAsync().block();
}
@Override
public Mono<byte[]> getContainerLogsAsync() {
return FluxUtil
.collectBytesInByteBufferStream(
manager().serviceClient().getWebApps().getWebSiteContainerLogsAsync(resourceGroupName(), name()));
}
@Override
public byte[] getContainerLogsZip() {
return getContainerLogsZipAsync().block();
}
@Override
public Mono<byte[]> getContainerLogsZipAsync() {
return FluxUtil
.collectBytesInByteBufferStream(
manager().serviceClient().getWebApps().getContainerLogsZipAsync(resourceGroupName(), name()));
}
@Override
Mono<SiteLogsConfigInner> updateDiagnosticLogsConfig(SiteLogsConfigInner siteLogsConfigInner) {
return manager()
.serviceClient()
.getWebApps()
.updateDiagnosticLogsConfigAsync(resourceGroupName(), name(), siteLogsConfigInner);
}
private AppServicePlanImpl newDefaultAppServicePlan() {
String planName = this.manager().resourceManager().internalContext().randomResourceName(name() + "plan", 32);
return newDefaultAppServicePlan(planName);
}
private AppServicePlanImpl newDefaultAppServicePlan(String appServicePlanName) {
AppServicePlanImpl appServicePlan =
(AppServicePlanImpl) (this.manager().appServicePlans().define(appServicePlanName)).withRegion(regionName());
if (super.creatableGroup != null && isInCreateMode()) {
appServicePlan = appServicePlan.withNewResourceGroup(super.creatableGroup);
} else {
appServicePlan = appServicePlan.withExistingResourceGroup(resourceGroupName());
}
return appServicePlan;
}
public FluentImplT withNewFreeAppServicePlan() {
return withNewAppServicePlan(OperatingSystem.WINDOWS, PricingTier.FREE_F1);
}
public FluentImplT withNewSharedAppServicePlan() {
return withNewAppServicePlan(OperatingSystem.WINDOWS, PricingTier.SHARED_D1);
}
FluentImplT withNewAppServicePlan(OperatingSystem operatingSystem, PricingTier pricingTier) {
return withNewAppServicePlan(
newDefaultAppServicePlan().withOperatingSystem(operatingSystem).withPricingTier(pricingTier));
}
FluentImplT withNewAppServicePlan(
String appServicePlanName, OperatingSystem operatingSystem, PricingTier pricingTier) {
return withNewAppServicePlan(
newDefaultAppServicePlan(appServicePlanName)
.withOperatingSystem(operatingSystem)
.withPricingTier(pricingTier));
}
public FluentImplT withNewAppServicePlan(PricingTier pricingTier) {
return withNewAppServicePlan(operatingSystem(), pricingTier);
}
public FluentImplT withNewAppServicePlan(String appServicePlanName, PricingTier pricingTier) {
return withNewAppServicePlan(appServicePlanName, operatingSystem(), pricingTier);
}
public FluentImplT withNewAppServicePlan(Creatable<AppServicePlan> appServicePlanCreatable) {
this.addDependency(appServicePlanCreatable);
String id =
ResourceUtils
.constructResourceId(
this.manager().subscriptionId(),
resourceGroupName(),
"Microsoft.Web",
"serverFarms",
appServicePlanCreatable.name(),
"");
innerModel().withServerFarmId(id);
if (appServicePlanCreatable instanceof AppServicePlanImpl) {
return withOperatingSystem(((AppServicePlanImpl) appServicePlanCreatable).operatingSystem());
} else {
throw logger.logExceptionAsError(
new IllegalStateException("Internal error, appServicePlanCreatable must be class AppServicePlanImpl"));
}
}
@SuppressWarnings("unchecked")
private FluentImplT withOperatingSystem(OperatingSystem os) {
if (os == OperatingSystem.LINUX) {
innerModel().withReserved(true);
innerModel().withKind(innerModel().kind() + ",linux");
}
return (FluentImplT) this;
}
public FluentImplT withExistingAppServicePlan(AppServicePlan appServicePlan) {
innerModel().withServerFarmId(appServicePlan.id());
this.withRegion(appServicePlan.regionName());
return withOperatingSystem(appServicePlanOperatingSystem(appServicePlan));
}
@SuppressWarnings("unchecked")
public FluentImplT withPublicDockerHubImage(String imageAndTag) {
cleanUpContainerSettings();
if (siteConfig == null) {
siteConfig = new SiteConfigResourceInner();
}
siteConfig.withLinuxFxVersion(String.format("DOCKER|%s", imageAndTag));
withAppSetting(SETTING_DOCKER_IMAGE, imageAndTag);
return (FluentImplT) this;
}
public FluentImplT withPrivateDockerHubImage(String imageAndTag) {
return withPublicDockerHubImage(imageAndTag);
}
@SuppressWarnings("unchecked")
public FluentImplT withPrivateRegistryImage(String imageAndTag, String serverUrl) {
imageAndTag = Utils.smartCompletionPrivateRegistryImage(imageAndTag, serverUrl);
cleanUpContainerSettings();
if (siteConfig == null) {
siteConfig = new SiteConfigResourceInner();
}
siteConfig.withLinuxFxVersion(String.format("DOCKER|%s", imageAndTag));
withAppSetting(SETTING_DOCKER_IMAGE, imageAndTag);
withAppSetting(SETTING_REGISTRY_SERVER, serverUrl);
return (FluentImplT) this;
}
@SuppressWarnings("unchecked")
public FluentImplT withCredentials(String username, String password) {
withAppSetting(SETTING_REGISTRY_USERNAME, username);
withAppSetting(SETTING_REGISTRY_PASSWORD, password);
return (FluentImplT) this;
}
protected abstract void cleanUpContainerSettings();
protected void ensureLinuxPlan() {
if (OperatingSystem.WINDOWS.equals(operatingSystem())) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Docker container settings only apply to Linux app service plans."));
}
}
protected OperatingSystem appServicePlanOperatingSystem(AppServicePlan appServicePlan) {
return appServicePlan.operatingSystem();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import static org.hamcrest.Matchers.equalTo;
public class ConnectionProfileTests extends ESTestCase {
public void testBuildConnectionProfile() {
ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
TimeValue connectTimeout = TimeValue.timeValueMillis(randomIntBetween(1, 10));
TimeValue handshakeTimeout = TimeValue.timeValueMillis(randomIntBetween(1, 10));
TimeValue pingInterval = TimeValue.timeValueMillis(randomIntBetween(1, 10));
boolean compressionEnabled = randomBoolean();
final boolean setConnectTimeout = randomBoolean();
if (setConnectTimeout) {
builder.setConnectTimeout(connectTimeout);
}
final boolean setHandshakeTimeout = randomBoolean();
if (setHandshakeTimeout) {
builder.setHandshakeTimeout(handshakeTimeout);
}
final boolean setCompress = randomBoolean();
if (setCompress) {
builder.setCompressionEnabled(compressionEnabled);
}
final boolean setPingInterval = randomBoolean();
if (setPingInterval) {
builder.setPingInterval(pingInterval);
}
builder.addConnections(1, TransportRequestOptions.Type.BULK);
builder.addConnections(2, TransportRequestOptions.Type.STATE, TransportRequestOptions.Type.RECOVERY);
builder.addConnections(3, TransportRequestOptions.Type.PING);
IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, builder::build);
assertEquals("not all types are added for this connection profile - missing types: [REG]", illegalStateException.getMessage());
IllegalArgumentException illegalArgumentException = expectThrows(IllegalArgumentException.class,
() -> builder.addConnections(4, TransportRequestOptions.Type.REG, TransportRequestOptions.Type.PING));
assertEquals("type [PING] is already registered", illegalArgumentException.getMessage());
builder.addConnections(4, TransportRequestOptions.Type.REG);
ConnectionProfile build = builder.build();
if (randomBoolean()) {
build = new ConnectionProfile.Builder(build).build();
}
assertEquals(10, build.getNumConnections());
if (setConnectTimeout) {
assertEquals(connectTimeout, build.getConnectTimeout());
} else {
assertNull(build.getConnectTimeout());
}
if (setHandshakeTimeout) {
assertEquals(handshakeTimeout, build.getHandshakeTimeout());
} else {
assertNull(build.getHandshakeTimeout());
}
if (setCompress) {
assertEquals(compressionEnabled, build.getCompressionEnabled());
} else {
assertNull(build.getCompressionEnabled());
}
if (setPingInterval) {
assertEquals(pingInterval, build.getPingInterval());
} else {
assertNull(build.getPingInterval());
}
List<Integer> list = new ArrayList<>(10);
for (int i = 0; i < 10; i++) {
list.add(i);
}
final int numIters = randomIntBetween(5, 10);
assertEquals(4, build.getHandles().size());
assertEquals(0, build.getHandles().get(0).offset);
assertEquals(1, build.getHandles().get(0).length);
assertEquals(EnumSet.of(TransportRequestOptions.Type.BULK), build.getHandles().get(0).getTypes());
Integer channel = build.getHandles().get(0).getChannel(list);
for (int i = 0; i < numIters; i++) {
assertEquals(0, channel.intValue());
}
assertEquals(1, build.getHandles().get(1).offset);
assertEquals(2, build.getHandles().get(1).length);
assertEquals(EnumSet.of(TransportRequestOptions.Type.STATE, TransportRequestOptions.Type.RECOVERY),
build.getHandles().get(1).getTypes());
channel = build.getHandles().get(1).getChannel(list);
for (int i = 0; i < numIters; i++) {
assertThat(channel, Matchers.anyOf(Matchers.is(1), Matchers.is(2)));
}
assertEquals(3, build.getHandles().get(2).offset);
assertEquals(3, build.getHandles().get(2).length);
assertEquals(EnumSet.of(TransportRequestOptions.Type.PING), build.getHandles().get(2).getTypes());
channel = build.getHandles().get(2).getChannel(list);
for (int i = 0; i < numIters; i++) {
assertThat(channel, Matchers.anyOf(Matchers.is(3), Matchers.is(4), Matchers.is(5)));
}
assertEquals(6, build.getHandles().get(3).offset);
assertEquals(4, build.getHandles().get(3).length);
assertEquals(EnumSet.of(TransportRequestOptions.Type.REG), build.getHandles().get(3).getTypes());
channel = build.getHandles().get(3).getChannel(list);
for (int i = 0; i < numIters; i++) {
assertThat(channel, Matchers.anyOf(Matchers.is(6), Matchers.is(7), Matchers.is(8), Matchers.is(9)));
}
assertEquals(3, build.getNumConnectionsPerType(TransportRequestOptions.Type.PING));
assertEquals(4, build.getNumConnectionsPerType(TransportRequestOptions.Type.REG));
assertEquals(2, build.getNumConnectionsPerType(TransportRequestOptions.Type.STATE));
assertEquals(2, build.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY));
assertEquals(1, build.getNumConnectionsPerType(TransportRequestOptions.Type.BULK));
}
public void testNoChannels() {
ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
builder.addConnections(1, TransportRequestOptions.Type.BULK,
TransportRequestOptions.Type.STATE,
TransportRequestOptions.Type.RECOVERY,
TransportRequestOptions.Type.REG);
builder.addConnections(0, TransportRequestOptions.Type.PING);
ConnectionProfile build = builder.build();
List<Integer> array = Collections.singletonList(0);
assertEquals(Integer.valueOf(0), build.getHandles().get(0).getChannel(array));
expectThrows(IllegalStateException.class, () -> build.getHandles().get(1).getChannel(array));
}
public void testConnectionProfileResolve() {
final ConnectionProfile defaultProfile = ConnectionProfile.buildDefaultConnectionProfile(Settings.EMPTY);
assertEquals(defaultProfile, ConnectionProfile.resolveConnectionProfile(null, defaultProfile));
final ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
builder.addConnections(randomIntBetween(0, 5), TransportRequestOptions.Type.BULK);
builder.addConnections(randomIntBetween(0, 5), TransportRequestOptions.Type.RECOVERY);
builder.addConnections(randomIntBetween(0, 5), TransportRequestOptions.Type.REG);
builder.addConnections(randomIntBetween(0, 5), TransportRequestOptions.Type.STATE);
builder.addConnections(randomIntBetween(0, 5), TransportRequestOptions.Type.PING);
final boolean connectionTimeoutSet = randomBoolean();
if (connectionTimeoutSet) {
builder.setConnectTimeout(TimeValue.timeValueMillis(randomNonNegativeLong()));
}
final boolean connectionHandshakeSet = randomBoolean();
if (connectionHandshakeSet) {
builder.setHandshakeTimeout(TimeValue.timeValueMillis(randomNonNegativeLong()));
}
final boolean pingIntervalSet = randomBoolean();
if (pingIntervalSet) {
builder.setPingInterval(TimeValue.timeValueMillis(randomNonNegativeLong()));
}
final boolean connectionCompressSet = randomBoolean();
if (connectionCompressSet) {
builder.setCompressionEnabled(randomBoolean());
}
final ConnectionProfile profile = builder.build();
final ConnectionProfile resolved = ConnectionProfile.resolveConnectionProfile(profile, defaultProfile);
assertNotEquals(resolved, defaultProfile);
assertThat(resolved.getNumConnections(), equalTo(profile.getNumConnections()));
assertThat(resolved.getHandles(), equalTo(profile.getHandles()));
assertThat(resolved.getConnectTimeout(),
equalTo(connectionTimeoutSet ? profile.getConnectTimeout() : defaultProfile.getConnectTimeout()));
assertThat(resolved.getHandshakeTimeout(),
equalTo(connectionHandshakeSet ? profile.getHandshakeTimeout() : defaultProfile.getHandshakeTimeout()));
assertThat(resolved.getPingInterval(),
equalTo(pingIntervalSet ? profile.getPingInterval() : defaultProfile.getPingInterval()));
assertThat(resolved.getCompressionEnabled(),
equalTo(connectionCompressSet ? profile.getCompressionEnabled() : defaultProfile.getCompressionEnabled()));
}
public void testDefaultConnectionProfile() {
ConnectionProfile profile = ConnectionProfile.buildDefaultConnectionProfile(Settings.EMPTY);
assertEquals(13, profile.getNumConnections());
assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.PING));
assertEquals(6, profile.getNumConnectionsPerType(TransportRequestOptions.Type.REG));
assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE));
assertEquals(2, profile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY));
assertEquals(3, profile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK));
assertEquals(TransportSettings.CONNECT_TIMEOUT.get(Settings.EMPTY), profile.getConnectTimeout());
assertEquals(TransportSettings.CONNECT_TIMEOUT.get(Settings.EMPTY), profile.getHandshakeTimeout());
assertEquals(TransportSettings.TRANSPORT_COMPRESS.get(Settings.EMPTY), profile.getCompressionEnabled());
assertEquals(TransportSettings.PING_SCHEDULE.get(Settings.EMPTY), profile.getPingInterval());
profile = ConnectionProfile.buildDefaultConnectionProfile(Settings.builder().put("node.master", false).build());
assertEquals(12, profile.getNumConnections());
assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.PING));
assertEquals(6, profile.getNumConnectionsPerType(TransportRequestOptions.Type.REG));
assertEquals(0, profile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE));
assertEquals(2, profile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY));
assertEquals(3, profile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK));
profile = ConnectionProfile.buildDefaultConnectionProfile(Settings.builder().put("node.data", false).build());
assertEquals(11, profile.getNumConnections());
assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.PING));
assertEquals(6, profile.getNumConnectionsPerType(TransportRequestOptions.Type.REG));
assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE));
assertEquals(0, profile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY));
assertEquals(3, profile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK));
profile = ConnectionProfile.buildDefaultConnectionProfile(Settings.builder().put("node.data", false)
.put("node.master", false).build());
assertEquals(10, profile.getNumConnections());
assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.PING));
assertEquals(6, profile.getNumConnectionsPerType(TransportRequestOptions.Type.REG));
assertEquals(0, profile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE));
assertEquals(0, profile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY));
assertEquals(3, profile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK));
}
}
| |
package jacobi.core.spatial.rtree;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.BiFunction;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import jacobi.api.Matrices;
import jacobi.api.Matrix;
import jacobi.core.util.MinHeap;
import jacobi.test.annotations.JacobiEquals;
import jacobi.test.annotations.JacobiImport;
import jacobi.test.annotations.JacobiInject;
import jacobi.test.annotations.JacobiResult;
import jacobi.test.util.JacobiJUnit4ClassRunner;
@JacobiImport("/jacobi/test/data/RInlineTreeTest.xlsx")
@RunWith(JacobiJUnit4ClassRunner.class)
public class RInlineTreeTest {
@JacobiInject(0)
public Matrix input;
@JacobiInject(100)
public Matrix query;
@JacobiResult(200)
public Matrix filters;
@JacobiResult(300)
public Matrix ans;
@JacobiInject(-1)
public Map<Integer, Matrix> all;
private List<RLayer> rIndex;
private RLayer rLeaves;
@Test
@JacobiImport("test 2-D tree from example")
@JacobiEquals(expected = 200, actual = 200)
public void shouldBeAbleToQueryRangeInExampleTree() {
List<int[]> filters = new ArrayList<>();
RInlineTree rTree = this.buildTree((idx, lf) -> new RInlineTree(idx, lf, this.input){
@Override
protected int[] queryFilter(RLayer rLayer, double[] query, double maxDist, int[] filter) {
int[] next = super.queryFilter(rLayer, query, maxDist, filter);
filters.add(next);
return next;
}
});
Iterator<Integer> iter = rTree.queryRange(this.query.getRow(0), this.queryDist());
Assert.assertTrue(iter.hasNext());
int ans0 = iter.next();
Assert.assertTrue(iter.hasNext());
int ans1 = iter.next();
Assert.assertFalse(iter.hasNext());
Assert.assertEquals('c', (char) ('a' + Math.min(ans0, ans1)));
Assert.assertEquals('m', (char) ('a' + Math.max(ans0, ans1)));
this.filters = this.toMatrix(filters);
}
@Test
@JacobiImport("test rand 2-D (32)")
@JacobiEquals(expected = 200, actual = 200)
@JacobiEquals(expected = 300, actual = 300)
public void shouldBeAbleToQueryRangeInRand2D32Tree() {
List<int[]> filters = new ArrayList<>();
RInlineTree rTree = this.buildTree((idx, lf) -> new RInlineTree(idx, lf, this.input){
@Override
protected int[] queryFilter(RLayer rLayer, double[] query, double maxDist, int[] filter) {
int[] next = super.queryFilter(rLayer, query, maxDist, filter);
filters.add(next);
return next;
}
});
Iterator<Integer> iter = rTree.queryRange(this.query.getRow(0), this.queryDist());
Assert.assertTrue(iter.hasNext());
int ans0 = iter.next();
Assert.assertFalse(iter.hasNext());
this.filters = this.toMatrix(filters);
this.ans = Matrices.scalar(ans0);
}
@Test
@JacobiImport("test rand 2-D sort by Y (32)")
@JacobiEquals(expected = 200, actual = 200)
@JacobiEquals(expected = 300, actual = 300)
public void shouldBeAbleToQueryRangeInRand2DSortByY32Tree() {
List<int[]> filters = new ArrayList<>();
RInlineTree rTree = this.buildTree((idx, lf) -> new RInlineTree(idx, lf, this.input){
@Override
protected int[] queryFilter(RLayer rLayer, double[] query, double maxDist, int[] filter) {
int[] next = super.queryFilter(rLayer, query, maxDist, filter);
filters.add(next);
return next;
}
});
Iterator<Integer> iter = rTree.queryRange(this.query.getRow(0), this.queryDist());
Assert.assertTrue(iter.hasNext());
int ans0 = iter.next();
Assert.assertTrue(iter.hasNext());
int ans1 = iter.next();
Assert.assertFalse(iter.hasNext());
this.filters = this.toMatrix(filters);
this.ans = Matrices.wrap(new double[][]{new double[]{
Math.min(ans0, ans1),
Math.max(ans0, ans1)
}});
}
@Test
@JacobiImport("test rand 4-D sort by X (128)")
@JacobiEquals(expected = 200, actual = 200)
public void shouldBeAbleToQueryRangeInRand4DSortByX128Tree() {
List<int[]> filters = new ArrayList<>();
RInlineTree rTree = this.buildTree((idx, lf) -> new RInlineTree(idx, lf, this.input){
@Override
protected int[] queryFilter(RLayer rLayer, double[] query, double maxDist, int[] filter) {
int[] next = super.queryFilter(rLayer, query, maxDist, filter);
filters.add(next);
return next;
}
});
Iterator<Integer> iter = rTree.queryRange(this.query.getRow(0), this.queryDist());
Assert.assertFalse(iter.hasNext());
this.filters = this.toMatrix(filters);
}
@Test
@JacobiImport("test kNN rand 3-D sort by Z")
public void shouldBeAbleToQueryAStarInRand3DSortByZ() {
RInlineTree rTree = this.buildTree((idx, lf) -> new RInlineTree(idx, lf, this.input));
double[] p = this.query.getRow(0);
int k = (int) this.query.get(1, 0);
MinHeap heap = rTree.queryAStar(p, k);
}
@Test
@JacobiImport("test 3 centroids 3-D (100)")
public void shouldBeAbleToQueryKnnIn3Centroids3D100() {
RInlineTree rTree = this.buildTree((idx, lf) -> new RInlineTree(idx, lf, this.input));
double[] p = this.query.getRow(0);
int k = (int) this.query.get(1, 0);
MinHeap heap = rTree.queryAStar(p, k);
System.out.println(Arrays.toString(heap.flush()));
}
protected double queryDist() {
double[] q0 = this.query.getRow(0);
double[] q1 = this.query.getRow(1);
double dist = 0.0;
for(int i = 0; i < q0.length; i++){
double dx = q0[i] - q1[i];
dist += dx * dx;
}
return Math.sqrt(dist);
}
protected RInlineTree buildTree(BiFunction<List<RLayer>, RLayer, RInlineTree> factoryFn) {
RLayer leaves = this.toRLayer(this.all.get(10), true);
List<RLayer> layers = new ArrayList<>();
for(int i = 11; i < 32; i++){
Matrix matrix = this.all.get(i);
if(matrix == null){
this.rLeaves = leaves;
Collections.reverse(layers);
this.rIndex = Collections.unmodifiableList(layers);
return factoryFn.apply(this.rIndex, this.rLeaves);
}
RLayer rLayer = this.toRLayer(matrix, false);
layers.add(rLayer);
}
throw new UnsupportedOperationException("Tree too deep");
}
protected RLayer toRLayer(Matrix input, boolean isLeaf) {
int dim = input.getColCount() - 1;
if(dim % 2 != 0 && !isLeaf){
throw new IllegalArgumentException();
}
int[] cuts = new int[input.getRowCount()];
double[] bounds = new double[cuts.length * dim];
for(int i = 0; i < input.getRowCount(); i++){
double[] row = input.getRow(i);
cuts[i] = (int) row[0];
System.arraycopy(row, 1, bounds, i * dim, dim);
}
return new RLayer(cuts, bounds);
}
protected Matrix toMatrix(List<int[]> filters) {
Matrix matrix = Matrices.zeros(filters.size(), filters.stream().mapToInt(a -> a.length).max().orElse(0));
for(int i = 0; i < matrix.getRowCount(); i++){
int[] filter = filters.get(i);
matrix.getAndSet(i, r -> {
for(int j = 0; j < filter.length; j++){
r[j] = filter[j];
}
});
}
return matrix;
}
}
| |
package mcjty.rftools.dimension;
import mcjty.rftools.blocks.dimlets.DimletConfiguration;
import mcjty.rftools.blocks.teleporter.RfToolsTeleporter;
import mcjty.rftools.dimension.description.DimensionDescriptor;
import mcjty.rftools.dimension.world.types.EffectType;
import mcjty.rftools.items.ModItems;
import mcjty.rftools.items.dimensionmonitor.PhasedFieldGeneratorItem;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.gameevent.TickEvent;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.potion.Potion;
import net.minecraft.potion.PotionEffect;
import net.minecraft.server.MinecraftServer;
import net.minecraft.world.World;
import net.minecraft.world.WorldServer;
import net.minecraftforge.common.DimensionManager;
import java.util.*;
public class DimensionTickEvent {
public static final int MAXTICKS = 10;
private int counter = MAXTICKS;
private static final int EFFECTS_MAX = 18;
private int counterEffects = EFFECTS_MAX;
@SubscribeEvent
public void onServerTick(TickEvent.ServerTickEvent evt) {
if (evt.phase == TickEvent.Phase.START) {
return;
}
counter--;
if (counter <= 0) {
counter = MAXTICKS;
counterEffects--;
boolean doEffects = false;
if (counterEffects <= 0) {
counterEffects = EFFECTS_MAX;
doEffects = true;
}
handlePower(doEffects);
}
}
private void handlePower(boolean doEffects) {
World entityWorld = MinecraftServer.getServer().getEntityWorld();
RfToolsDimensionManager dimensionManager = RfToolsDimensionManager.getDimensionManager(entityWorld);
if (!dimensionManager.getDimensions().isEmpty()) {
DimensionStorage dimensionStorage = DimensionStorage.getDimensionStorage(entityWorld);
for (Map.Entry<Integer, DimensionDescriptor> entry : dimensionManager.getDimensions().entrySet()) {
Integer id = entry.getKey();
// If there is an activity probe we only drain power if the dimension is loaded (a player is there or a chunkloader)
DimensionInformation information = dimensionManager.getDimensionInformation(id);
if (DimensionManager.getWorld(id) != null || information.getProbeCounter() == 0) {
int cost = 0;
if (DimletConfiguration.dimensionDifficulty != -1) {
cost = information.getActualRfCost();
if (cost == 0) {
cost = entry.getValue().getRfMaintainCost();
}
}
int power = dimensionStorage.getEnergyLevel(id);
power -= cost * MAXTICKS;
if (power < 0) {
power = 0;
}
handleLowPower(id, power, doEffects);
if (doEffects && power > 0) {
handleEffectsForDimension(power, id, information);
}
dimensionStorage.setEnergyLevel(id, power);
}
}
dimensionStorage.save(entityWorld);
}
}
static final Map<EffectType,Integer> effectsMap = new HashMap<EffectType, Integer>();
static final Map<EffectType,Integer> effectAmplifierMap = new HashMap<EffectType, Integer>();
static {
effectsMap.put(EffectType.EFFECT_POISON, Potion.poison.getId());
effectsMap.put(EffectType.EFFECT_POISON2, Potion.poison.getId());
effectAmplifierMap.put(EffectType.EFFECT_POISON2, 1);
effectsMap.put(EffectType.EFFECT_POISON3, Potion.poison.getId());
effectAmplifierMap.put(EffectType.EFFECT_POISON3, 2);
effectsMap.put(EffectType.EFFECT_REGENERATION, Potion.regeneration.getId());
effectsMap.put(EffectType.EFFECT_REGENERATION2, Potion.regeneration.getId());
effectAmplifierMap.put(EffectType.EFFECT_REGENERATION2, 1);
effectsMap.put(EffectType.EFFECT_REGENERATION3, Potion.regeneration.getId());
effectAmplifierMap.put(EffectType.EFFECT_REGENERATION3, 2);
effectsMap.put(EffectType.EFFECT_MOVESLOWDOWN, Potion.moveSlowdown.getId());
effectsMap.put(EffectType.EFFECT_MOVESLOWDOWN2, Potion.moveSlowdown.getId());
effectAmplifierMap.put(EffectType.EFFECT_MOVESLOWDOWN2, 1);
effectsMap.put(EffectType.EFFECT_MOVESLOWDOWN3, Potion.moveSlowdown.getId());
effectAmplifierMap.put(EffectType.EFFECT_MOVESLOWDOWN3, 2);
effectsMap.put(EffectType.EFFECT_MOVESLOWDOWN4, Potion.moveSlowdown.getId());
effectAmplifierMap.put(EffectType.EFFECT_MOVESLOWDOWN4, 3);
effectsMap.put(EffectType.EFFECT_MOVESPEED, Potion.moveSpeed.getId());
effectsMap.put(EffectType.EFFECT_MOVESPEED2, Potion.moveSpeed.getId());
effectAmplifierMap.put(EffectType.EFFECT_MOVESPEED2, 1);
effectsMap.put(EffectType.EFFECT_MOVESPEED3, Potion.moveSpeed.getId());
effectAmplifierMap.put(EffectType.EFFECT_MOVESPEED3, 2);
effectsMap.put(EffectType.EFFECT_DIGSLOWDOWN, Potion.digSlowdown.getId());
effectsMap.put(EffectType.EFFECT_DIGSLOWDOWN2, Potion.digSlowdown.getId());
effectAmplifierMap.put(EffectType.EFFECT_DIGSLOWDOWN2, 1);
effectsMap.put(EffectType.EFFECT_DIGSLOWDOWN3, Potion.digSlowdown.getId());
effectAmplifierMap.put(EffectType.EFFECT_DIGSLOWDOWN3, 2);
effectsMap.put(EffectType.EFFECT_DIGSLOWDOWN4, Potion.digSlowdown.getId());
effectAmplifierMap.put(EffectType.EFFECT_DIGSLOWDOWN4, 3);
effectsMap.put(EffectType.EFFECT_DIGSPEED, Potion.digSpeed.getId());
effectsMap.put(EffectType.EFFECT_DIGSPEED2, Potion.digSpeed.getId());
effectAmplifierMap.put(EffectType.EFFECT_DIGSPEED2, 1);
effectsMap.put(EffectType.EFFECT_DIGSPEED3, Potion.digSpeed.getId());
effectAmplifierMap.put(EffectType.EFFECT_DIGSPEED3, 2);
effectsMap.put(EffectType.EFFECT_DAMAGEBOOST, Potion.damageBoost.getId());
effectsMap.put(EffectType.EFFECT_DAMAGEBOOST2, Potion.damageBoost.getId());
effectAmplifierMap.put(EffectType.EFFECT_DAMAGEBOOST2, 1);
effectsMap.put(EffectType.EFFECT_DAMAGEBOOST3, Potion.damageBoost.getId());
effectAmplifierMap.put(EffectType.EFFECT_DAMAGEBOOST3, 2);
effectsMap.put(EffectType.EFFECT_INSTANTHEALTH, Potion.heal.getId());
effectsMap.put(EffectType.EFFECT_HARM, Potion.harm.getId());
effectsMap.put(EffectType.EFFECT_JUMP, Potion.jump.getId());
effectsMap.put(EffectType.EFFECT_JUMP2, Potion.jump.getId());
effectAmplifierMap.put(EffectType.EFFECT_JUMP2, 1);
effectsMap.put(EffectType.EFFECT_JUMP3, Potion.jump.getId());
effectAmplifierMap.put(EffectType.EFFECT_JUMP3, 2);
effectsMap.put(EffectType.EFFECT_CONFUSION, Potion.confusion.getId());
effectsMap.put(EffectType.EFFECT_RESISTANCE, Potion.resistance.getId());
effectsMap.put(EffectType.EFFECT_RESISTANCE2, Potion.resistance.getId());
effectAmplifierMap.put(EffectType.EFFECT_RESISTANCE2, 1);
effectsMap.put(EffectType.EFFECT_RESISTANCE3, Potion.resistance.getId());
effectAmplifierMap.put(EffectType.EFFECT_RESISTANCE3, 2);
effectsMap.put(EffectType.EFFECT_FIRERESISTANCE, Potion.fireResistance.getId());
effectsMap.put(EffectType.EFFECT_WATERBREATHING, Potion.waterBreathing.getId());
effectsMap.put(EffectType.EFFECT_INVISIBILITY, Potion.invisibility.getId());
effectsMap.put(EffectType.EFFECT_BLINDNESS, Potion.blindness.getId());
effectsMap.put(EffectType.EFFECT_NIGHTVISION, Potion.nightVision.getId());
effectsMap.put(EffectType.EFFECT_HUNGER, Potion.hunger.getId());
effectsMap.put(EffectType.EFFECT_HUNGER2, Potion.hunger.getId());
effectAmplifierMap.put(EffectType.EFFECT_HUNGER2, 1);
effectsMap.put(EffectType.EFFECT_HUNGER3, Potion.hunger.getId());
effectAmplifierMap.put(EffectType.EFFECT_HUNGER3, 2);
effectsMap.put(EffectType.EFFECT_WEAKNESS, Potion.weakness.getId());
effectsMap.put(EffectType.EFFECT_WEAKNESS2, Potion.weakness.getId());
effectAmplifierMap.put(EffectType.EFFECT_WEAKNESS2, 1);
effectsMap.put(EffectType.EFFECT_WEAKNESS3, Potion.weakness.getId());
effectAmplifierMap.put(EffectType.EFFECT_WEAKNESS3, 2);
effectsMap.put(EffectType.EFFECT_WITHER, Potion.wither.getId());
effectsMap.put(EffectType.EFFECT_WITHER2, Potion.wither.getId());
effectAmplifierMap.put(EffectType.EFFECT_WITHER2, 1);
effectsMap.put(EffectType.EFFECT_WITHER3, Potion.wither.getId());
effectAmplifierMap.put(EffectType.EFFECT_WITHER3, 2);
effectsMap.put(EffectType.EFFECT_HEALTHBOOST, Potion.field_76434_w.getId());
effectsMap.put(EffectType.EFFECT_HEALTHBOOST2, Potion.field_76434_w.getId());
effectAmplifierMap.put(EffectType.EFFECT_HEALTHBOOST2, 1);
effectsMap.put(EffectType.EFFECT_HEALTHBOOST3, Potion.field_76434_w.getId());
effectAmplifierMap.put(EffectType.EFFECT_HEALTHBOOST3, 2);
effectsMap.put(EffectType.EFFECT_ABSORPTION, Potion.field_76444_x.getId());
effectsMap.put(EffectType.EFFECT_ABSORPTION2, Potion.field_76444_x.getId());
effectAmplifierMap.put(EffectType.EFFECT_ABSORPTION2, 1);
effectsMap.put(EffectType.EFFECT_ABSORPTION3, Potion.field_76444_x.getId());
effectAmplifierMap.put(EffectType.EFFECT_ABSORPTION3, 2);
effectsMap.put(EffectType.EFFECT_SATURATION, Potion.field_76443_y.getId());
effectsMap.put(EffectType.EFFECT_SATURATION2, Potion.field_76443_y.getId());
effectAmplifierMap.put(EffectType.EFFECT_SATURATION2, 1);
effectsMap.put(EffectType.EFFECT_SATURATION3, Potion.field_76443_y.getId());
effectAmplifierMap.put(EffectType.EFFECT_SATURATION3, 2);
}
private void handleEffectsForDimension(int power, int id, DimensionInformation information) {
WorldServer world = DimensionManager.getWorld(id);
if (world != null) {
Set<EffectType> effects = information.getEffectTypes();
List<EntityPlayer> players = new ArrayList<EntityPlayer>(world.playerEntities);
for (EntityPlayer player : players) {
for (EffectType effect : effects) {
Integer potionEffect = effectsMap.get(effect);
if (potionEffect != null) {
Integer amplifier = effectAmplifierMap.get(effect);
if (amplifier == null) {
amplifier = 0;
}
player.addPotionEffect(new PotionEffect(potionEffect, EFFECTS_MAX*MAXTICKS, amplifier, true));
}
}
if (power < DimletConfiguration.DIMPOWER_WARN3) {
// We are VERY low on power. Start bad effects.
player.addPotionEffect(new PotionEffect(Potion.moveSlowdown.getId(), EFFECTS_MAX*MAXTICKS, 4, true));
player.addPotionEffect(new PotionEffect(Potion.digSlowdown.getId(), EFFECTS_MAX*MAXTICKS, 4, true));
player.addPotionEffect(new PotionEffect(Potion.poison.getId(), EFFECTS_MAX*MAXTICKS, 2, true));
player.addPotionEffect(new PotionEffect(Potion.hunger.getId(), EFFECTS_MAX*MAXTICKS, 2, true));
} else if (power < DimletConfiguration.DIMPOWER_WARN2) {
player.addPotionEffect(new PotionEffect(Potion.moveSlowdown.getId(), EFFECTS_MAX*MAXTICKS, 2, true));
player.addPotionEffect(new PotionEffect(Potion.digSlowdown.getId(), EFFECTS_MAX*MAXTICKS, 2, true));
player.addPotionEffect(new PotionEffect(Potion.hunger.getId(), EFFECTS_MAX*MAXTICKS, 1, true));
} else if (power < DimletConfiguration.DIMPOWER_WARN1) {
player.addPotionEffect(new PotionEffect(Potion.moveSlowdown.getId(), EFFECTS_MAX*MAXTICKS, 0, true));
player.addPotionEffect(new PotionEffect(Potion.digSlowdown.getId(), EFFECTS_MAX*MAXTICKS, 0, true));
}
}
}
}
private boolean checkValidPhasedFieldGenerator(EntityPlayer player) {
InventoryPlayer inventory = player.inventory;
for (int i = 0 ; i < inventory.getHotbarSize() ; i++) {
ItemStack slot = inventory.getStackInSlot(i);
if (slot != null && slot.getItem() == ModItems.phasedFieldGeneratorItem) {
PhasedFieldGeneratorItem pfg = (PhasedFieldGeneratorItem) slot.getItem();
int energyStored = pfg.getEnergyStored(slot);
int toConsume = MAXTICKS * DimletConfiguration.PHASEDFIELD_CONSUMEPERTICK;
if (energyStored >= toConsume) {
pfg.extractEnergy(slot, toConsume, false);
return true;
}
}
}
return false;
}
private void handleLowPower(Integer id, int power, boolean doEffects) {
if (power <= 0) {
// We ran out of power!
WorldServer world = DimensionManager.getWorld(id);
if (world != null) {
List<EntityPlayer> players = new ArrayList<EntityPlayer>(world.playerEntities);
if (DimletConfiguration.dimensionDifficulty >= 1) {
for (EntityPlayer player : players) {
if (!checkValidPhasedFieldGenerator(player)) {
player.attackEntityFrom(new DamageSourcePowerLow("powerLow"), 1000000.0f);
} else {
if (doEffects) {
player.addPotionEffect(new PotionEffect(Potion.moveSlowdown.getId(), EFFECTS_MAX * MAXTICKS, 4, true));
player.addPotionEffect(new PotionEffect(Potion.digSlowdown.getId(), EFFECTS_MAX * MAXTICKS, 4, true));
player.addPotionEffect(new PotionEffect(Potion.hunger.getId(), EFFECTS_MAX * MAXTICKS, 2, true));
}
}
}
} else {
Random random = new Random();
for (EntityPlayer player : players) {
if (!checkValidPhasedFieldGenerator(player)) {
WorldServer worldServerForDimension = MinecraftServer.getServer().worldServerForDimension(DimletConfiguration.spawnDimension);
int x = random.nextInt(2000) - 1000;
int z = random.nextInt(2000) - 1000;
int y = worldServerForDimension.getTopSolidOrLiquidBlock(x, z);
if (y == -1) {
y = 63;
}
MinecraftServer.getServer().getConfigurationManager().transferPlayerToDimension((EntityPlayerMP) player, DimletConfiguration.spawnDimension,
new RfToolsTeleporter(worldServerForDimension, x, y, z));
} else {
if (doEffects) {
player.addPotionEffect(new PotionEffect(Potion.moveSlowdown.getId(), EFFECTS_MAX * MAXTICKS, 4, true));
player.addPotionEffect(new PotionEffect(Potion.digSlowdown.getId(), EFFECTS_MAX * MAXTICKS, 4, true));
player.addPotionEffect(new PotionEffect(Potion.hunger.getId(), EFFECTS_MAX * MAXTICKS, 2, true));
}
}
}
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients.admin;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.TopicPartitionInfo;
import org.apache.kafka.common.TopicPartitionReplica;
import org.apache.kafka.common.acl.AclBinding;
import org.apache.kafka.common.acl.AclBindingFilter;
import org.apache.kafka.common.config.ConfigResource;
import org.apache.kafka.common.errors.TimeoutException;
import org.apache.kafka.common.errors.TopicExistsException;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.internals.KafkaFutureImpl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class MockAdminClient extends AdminClient {
public static final String DEFAULT_CLUSTER_ID = "I4ZmrWqfT2e-upky_4fdPA";
private final List<Node> brokers;
private final Map<String, TopicMetadata> allTopics = new HashMap<>();
private final String clusterId;
private Node controller;
private int timeoutNextRequests = 0;
/**
* Creates MockAdminClient for a cluster with the given brokers. The Kafka cluster ID uses the default value from
* DEFAULT_CLUSTER_ID.
*
* @param brokers list of brokers in the cluster
* @param controller node that should start as the controller
*/
public MockAdminClient(List<Node> brokers, Node controller) {
this(brokers, controller, DEFAULT_CLUSTER_ID);
}
/**
* Creates MockAdminClient for a cluster with the given brokers.
* @param brokers list of brokers in the cluster
* @param controller node that should start as the controller
*/
public MockAdminClient(List<Node> brokers, Node controller, String clusterId) {
this.brokers = brokers;
controller(controller);
this.clusterId = clusterId;
}
public void controller(Node controller) {
if (!brokers.contains(controller))
throw new IllegalArgumentException("The controller node must be in the list of brokers");
this.controller = controller;
}
public void addTopic(boolean internal,
String name,
List<TopicPartitionInfo> partitions,
Map<String, String> configs) {
if (allTopics.containsKey(name)) {
throw new IllegalArgumentException(String.format("Topic %s was already added.", name));
}
List<Node> replicas = null;
for (TopicPartitionInfo partition : partitions) {
if (!brokers.contains(partition.leader())) {
throw new IllegalArgumentException("Leader broker unknown");
}
if (!brokers.containsAll(partition.replicas())) {
throw new IllegalArgumentException("Unknown brokers in replica list");
}
if (!brokers.containsAll(partition.isr())) {
throw new IllegalArgumentException("Unknown brokers in isr list");
}
if (replicas == null) {
replicas = partition.replicas();
} else if (!replicas.equals(partition.replicas())) {
throw new IllegalArgumentException("All partitions need to have the same replica nodes.");
}
}
allTopics.put(name, new TopicMetadata(internal, partitions, configs));
}
public void timeoutNextRequest(int numberOfRequest) {
timeoutNextRequests = numberOfRequest;
}
@Override
public DescribeClusterResult describeCluster(DescribeClusterOptions options) {
KafkaFutureImpl<Collection<Node>> nodesFuture = new KafkaFutureImpl<>();
KafkaFutureImpl<Node> controllerFuture = new KafkaFutureImpl<>();
KafkaFutureImpl<String> brokerIdFuture = new KafkaFutureImpl<>();
if (timeoutNextRequests > 0) {
nodesFuture.completeExceptionally(new TimeoutException());
controllerFuture.completeExceptionally(new TimeoutException());
brokerIdFuture.completeExceptionally(new TimeoutException());
--timeoutNextRequests;
} else {
nodesFuture.complete(brokers);
controllerFuture.complete(controller);
brokerIdFuture.complete(clusterId);
}
return new DescribeClusterResult(nodesFuture, controllerFuture, brokerIdFuture);
}
@Override
public CreateTopicsResult createTopics(Collection<NewTopic> newTopics, CreateTopicsOptions options) {
Map<String, KafkaFuture<Void>> createTopicResult = new HashMap<>();
if (timeoutNextRequests > 0) {
for (final NewTopic newTopic : newTopics) {
String topicName = newTopic.name();
KafkaFutureImpl<Void> future = new KafkaFutureImpl<>();
future.completeExceptionally(new TimeoutException());
createTopicResult.put(topicName, future);
}
--timeoutNextRequests;
return new CreateTopicsResult(createTopicResult);
}
for (final NewTopic newTopic : newTopics) {
KafkaFutureImpl<Void> future = new KafkaFutureImpl<>();
String topicName = newTopic.name();
if (allTopics.containsKey(topicName)) {
future.completeExceptionally(new TopicExistsException(String.format("Topic %s exists already.", topicName)));
createTopicResult.put(topicName, future);
continue;
}
int replicationFactor = newTopic.replicationFactor();
List<Node> replicas = new ArrayList<>(replicationFactor);
for (int i = 0; i < replicationFactor; ++i) {
replicas.add(brokers.get(i));
}
int numberOfPartitions = newTopic.numPartitions();
List<TopicPartitionInfo> partitions = new ArrayList<>(numberOfPartitions);
for (int p = 0; p < numberOfPartitions; ++p) {
partitions.add(new TopicPartitionInfo(p, brokers.get(0), replicas, Collections.<Node>emptyList()));
}
allTopics.put(topicName, new TopicMetadata(false, partitions, newTopic.configs()));
future.complete(null);
createTopicResult.put(topicName, future);
}
return new CreateTopicsResult(createTopicResult);
}
@Override
public ListTopicsResult listTopics(ListTopicsOptions options) {
Map<String, TopicListing> topicListings = new HashMap<>();
if (timeoutNextRequests > 0) {
KafkaFutureImpl<Map<String, TopicListing>> future = new KafkaFutureImpl<>();
future.completeExceptionally(new TimeoutException());
--timeoutNextRequests;
return new ListTopicsResult(future);
}
for (Map.Entry<String, TopicMetadata> topicDescription : allTopics.entrySet()) {
String topicName = topicDescription.getKey();
topicListings.put(topicName, new TopicListing(topicName, topicDescription.getValue().isInternalTopic));
}
KafkaFutureImpl<Map<String, TopicListing>> future = new KafkaFutureImpl<>();
future.complete(topicListings);
return new ListTopicsResult(future);
}
@Override
public DescribeTopicsResult describeTopics(Collection<String> topicNames, DescribeTopicsOptions options) {
Map<String, KafkaFuture<TopicDescription>> topicDescriptions = new HashMap<>();
if (timeoutNextRequests > 0) {
for (String requestedTopic : topicNames) {
KafkaFutureImpl<TopicDescription> future = new KafkaFutureImpl<>();
future.completeExceptionally(new TimeoutException());
topicDescriptions.put(requestedTopic, future);
}
--timeoutNextRequests;
return new DescribeTopicsResult(topicDescriptions);
}
for (String requestedTopic : topicNames) {
for (Map.Entry<String, TopicMetadata> topicDescription : allTopics.entrySet()) {
String topicName = topicDescription.getKey();
if (topicName.equals(requestedTopic)) {
TopicMetadata topicMetadata = topicDescription.getValue();
KafkaFutureImpl<TopicDescription> future = new KafkaFutureImpl<>();
future.complete(new TopicDescription(topicName, topicMetadata.isInternalTopic, topicMetadata.partitions));
topicDescriptions.put(topicName, future);
break;
}
}
if (!topicDescriptions.containsKey(requestedTopic)) {
KafkaFutureImpl<TopicDescription> future = new KafkaFutureImpl<>();
future.completeExceptionally(new UnknownTopicOrPartitionException(
String.format("Topic %s unknown.", requestedTopic)));
topicDescriptions.put(requestedTopic, future);
}
}
return new DescribeTopicsResult(topicDescriptions);
}
@Override
public DeleteTopicsResult deleteTopics(Collection<String> topicsToDelete, DeleteTopicsOptions options) {
Map<String, KafkaFuture<Void>> deleteTopicsResult = new HashMap<>();
if (timeoutNextRequests > 0) {
for (final String topicName : topicsToDelete) {
KafkaFutureImpl<Void> future = new KafkaFutureImpl<>();
future.completeExceptionally(new TimeoutException());
deleteTopicsResult.put(topicName, future);
}
--timeoutNextRequests;
return new DeleteTopicsResult(deleteTopicsResult);
}
for (final String topicName : topicsToDelete) {
KafkaFutureImpl<Void> future = new KafkaFutureImpl<>();
if (allTopics.remove(topicName) == null) {
future.completeExceptionally(new UnknownTopicOrPartitionException(String.format("Topic %s does not exist.", topicName)));
} else {
future.complete(null);
}
deleteTopicsResult.put(topicName, future);
}
return new DeleteTopicsResult(deleteTopicsResult);
}
@Override
public CreatePartitionsResult createPartitions(Map<String, NewPartitions> newPartitions, CreatePartitionsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public DeleteRecordsResult deleteRecords(Map<TopicPartition, RecordsToDelete> recordsToDelete, DeleteRecordsOptions options) {
Map<TopicPartition, KafkaFuture<DeletedRecords>> deletedRecordsResult = new HashMap<>();
if (recordsToDelete.isEmpty()) {
return new DeleteRecordsResult(deletedRecordsResult);
} else {
throw new UnsupportedOperationException("Not implemented yet");
}
}
@Override
public CreateDelegationTokenResult createDelegationToken(CreateDelegationTokenOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public RenewDelegationTokenResult renewDelegationToken(byte[] hmac, RenewDelegationTokenOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public ExpireDelegationTokenResult expireDelegationToken(byte[] hmac, ExpireDelegationTokenOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public DescribeDelegationTokenResult describeDelegationToken(DescribeDelegationTokenOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public DescribeConsumerGroupsResult describeConsumerGroups(Collection<String> groupIds, DescribeConsumerGroupsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public ListConsumerGroupsResult listConsumerGroups(ListConsumerGroupsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public ListConsumerGroupOffsetsResult listConsumerGroupOffsets(String groupId, ListConsumerGroupOffsetsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public DeleteConsumerGroupsResult deleteConsumerGroups(Collection<String> groupIds, DeleteConsumerGroupsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public DescribeAclsResult describeAcls(AclBindingFilter filter, DescribeAclsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public DescribeConfigsResult describeConfigs(Collection<ConfigResource> resources, DescribeConfigsOptions options) {
Map<ConfigResource, KafkaFuture<Config>> configescriptions = new HashMap<>();
for (ConfigResource resource : resources) {
if (resource.type() == ConfigResource.Type.TOPIC) {
Map<String, String> configs = allTopics.get(resource.name()).configs;
List<ConfigEntry> configEntries = new ArrayList<>();
for (Map.Entry<String, String> entry : configs.entrySet()) {
configEntries.add(new ConfigEntry(entry.getKey(), entry.getValue()));
}
KafkaFutureImpl<Config> future = new KafkaFutureImpl<>();
future.complete(new Config(configEntries));
configescriptions.put(resource, future);
} else {
throw new UnsupportedOperationException("Not implemented yet");
}
}
return new DescribeConfigsResult(configescriptions);
}
@Override
public AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, AlterConfigsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public AlterReplicaLogDirsResult alterReplicaLogDirs(Map<TopicPartitionReplica, String> replicaAssignment, AlterReplicaLogDirsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public DescribeLogDirsResult describeLogDirs(Collection<Integer> brokers, DescribeLogDirsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public DescribeReplicaLogDirsResult describeReplicaLogDirs(Collection<TopicPartitionReplica> replicas, DescribeReplicaLogDirsOptions options) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public void close(long duration, TimeUnit unit) {}
private final static class TopicMetadata {
final boolean isInternalTopic;
final List<TopicPartitionInfo> partitions;
final Map<String, String> configs;
TopicMetadata(boolean isInternalTopic,
List<TopicPartitionInfo> partitions,
Map<String, String> configs) {
this.isInternalTopic = isInternalTopic;
this.partitions = partitions;
this.configs = configs != null ? configs : Collections.<String, String>emptyMap();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.eclipse.servers;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.eclipse.Activator;
import org.apache.hadoop.eclipse.ErrorMessageDialog;
import org.apache.hadoop.eclipse.server.HadoopServer;
import org.apache.hadoop.eclipse.server.JarModule;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.JobConf;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
import org.eclipse.jdt.launching.JavaRuntime;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
/**
* Wizard for publishing a job to a Hadoop server.
*/
public class RunOnHadoopWizard extends Wizard {
private MainWizardPage mainPage;
private HadoopLocationWizard createNewPage;
/**
* The file resource (containing a main()) to run on the Hadoop location
*/
private IFile resource;
/**
* The launch configuration to update
*/
private ILaunchConfigurationWorkingCopy iConf;
private IProgressMonitor progressMonitor;
public RunOnHadoopWizard(IFile resource,
ILaunchConfigurationWorkingCopy iConf) {
this.resource = resource;
this.iConf = iConf;
setForcePreviousAndNextButtons(true);
setNeedsProgressMonitor(true);
setWindowTitle("Run on Hadoop");
}
/**
* This wizard contains 2 pages:
* <li> the first one lets the user choose an already existing location
* <li> the second one allows the user to create a new location, in case it
* does not already exist
*/
/* @inheritDoc */
@Override
public void addPages() {
addPage(this.mainPage = new MainWizardPage());
addPage(this.createNewPage = new HadoopLocationWizard());
}
/**
* Performs any actions appropriate in response to the user having pressed
* the Finish button, or refuse if finishing now is not permitted.
*/
/* @inheritDoc */
@Override
public boolean performFinish() {
/*
* Create a new location or get an existing one
*/
HadoopServer location = null;
if (mainPage.createNew.getSelection()) {
location = createNewPage.performFinish();
} else if (mainPage.table.getSelection().length == 1) {
location = (HadoopServer) mainPage.table.getSelection()[0].getData();
}
if (location == null)
return false;
/*
* Get the base directory of the plug-in for storing configurations and
* JARs
*/
File baseDir = Activator.getDefault().getStateLocation().toFile();
// Package the Job into a JAR
File jarFile = JarModule.createJarPackage(resource);
if (jarFile == null) {
ErrorMessageDialog.display("Run on Hadoop",
"Unable to create or locate the JAR file for the Job");
return false;
}
/*
* Generate a temporary Hadoop configuration directory and add it to the
* classpath of the launch configuration
*/
File confDir;
try {
confDir = File.createTempFile("hadoop-conf-", "", baseDir);
confDir.delete();
confDir.mkdirs();
if (!confDir.isDirectory()) {
ErrorMessageDialog.display("Run on Hadoop",
"Cannot create temporary directory: " + confDir);
return false;
}
} catch (IOException ioe) {
ioe.printStackTrace();
return false;
}
// Prepare the Hadoop configuration
JobConf conf = new JobConf(location.getConfiguration());
conf.setJar(jarFile.getAbsolutePath());
// Write it to the disk file
try {
// File confFile = File.createTempFile("core-site-", ".xml",
// confDir);
File confFile = new File(confDir, "core-site.xml");
FileOutputStream fos = new FileOutputStream(confFile);
try {
conf.writeXml(fos);
fos.close();
fos = null;
} finally {
IOUtils.closeStream(fos);
}
} catch (IOException ioe) {
ioe.printStackTrace();
return false;
}
// Setup the Launch class path
List<String> classPath;
try {
classPath =
iConf.getAttribute(
IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
new ArrayList());
IPath confIPath = new Path(confDir.getAbsolutePath());
IRuntimeClasspathEntry cpEntry =
JavaRuntime.newArchiveRuntimeClasspathEntry(confIPath);
classPath.add(0, cpEntry.getMemento());
iConf.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
classPath);
} catch (CoreException e) {
e.printStackTrace();
return false;
}
// location.runResource(resource, progressMonitor);
return true;
}
private void refreshButtons() {
getContainer().updateButtons();
}
/**
* Allows finish when an existing server is selected or when a new server
* location is defined
*/
/* @inheritDoc */
@Override
public boolean canFinish() {
if (mainPage != null)
return mainPage.canFinish();
return false;
}
/**
* This is the main page of the wizard. It allows the user either to choose
* an already existing location or to indicate he wants to create a new
* location.
*/
public class MainWizardPage extends WizardPage {
private Button createNew;
private Table table;
private Button chooseExisting;
public MainWizardPage() {
super("Select or define server to run on");
setTitle("Select Hadoop location");
setDescription("Select a Hadoop location to run on.");
}
/* @inheritDoc */
@Override
public boolean canFlipToNextPage() {
return createNew.getSelection();
}
/* @inheritDoc */
public void createControl(Composite parent) {
Composite panel = new Composite(parent, SWT.NONE);
panel.setLayout(new GridLayout(1, false));
// Label
Label label = new Label(panel, SWT.NONE);
label.setText("Select a Hadoop Server to run on.");
GridData gData = new GridData(GridData.FILL_BOTH);
gData.grabExcessVerticalSpace = false;
label.setLayoutData(gData);
// Create location button
createNew = new Button(panel, SWT.RADIO);
createNew.setText("Define a new Hadoop server location");
createNew.setLayoutData(gData);
createNew.addSelectionListener(new SelectionListener() {
public void widgetDefaultSelected(SelectionEvent e) {
}
public void widgetSelected(SelectionEvent e) {
setPageComplete(true);
RunOnHadoopWizard.this.refreshButtons();
}
});
createNew.setSelection(true);
// Select existing location button
chooseExisting = new Button(panel, SWT.RADIO);
chooseExisting
.setText("Choose an existing server from the list below");
chooseExisting.setLayoutData(gData);
chooseExisting.addSelectionListener(new SelectionListener() {
public void widgetDefaultSelected(SelectionEvent e) {
}
public void widgetSelected(SelectionEvent e) {
if (chooseExisting.getSelection()
&& (table.getSelectionCount() == 0)) {
if (table.getItems().length > 0) {
table.setSelection(0);
}
}
RunOnHadoopWizard.this.refreshButtons();
}
});
// Table of existing locations
Composite serverListPanel = new Composite(panel, SWT.FILL);
gData = new GridData(GridData.FILL_BOTH);
gData.horizontalSpan = 1;
serverListPanel.setLayoutData(gData);
FillLayout layout = new FillLayout();
layout.marginHeight = layout.marginWidth = 12;
serverListPanel.setLayout(layout);
table =
new Table(serverListPanel, SWT.BORDER | SWT.H_SCROLL
| SWT.V_SCROLL | SWT.FULL_SELECTION);
table.setHeaderVisible(true);
table.setLinesVisible(true);
TableColumn nameColumn = new TableColumn(table, SWT.LEFT);
nameColumn.setText("Location");
nameColumn.setWidth(450);
TableColumn hostColumn = new TableColumn(table, SWT.LEFT);
hostColumn.setText("Master host name");
hostColumn.setWidth(250);
// If the user select one entry, switch to "chooseExisting"
table.addSelectionListener(new SelectionListener() {
public void widgetDefaultSelected(SelectionEvent e) {
}
public void widgetSelected(SelectionEvent e) {
chooseExisting.setSelection(true);
createNew.setSelection(false);
setPageComplete(table.getSelectionCount() == 1);
RunOnHadoopWizard.this.refreshButtons();
}
});
TableViewer viewer = new TableViewer(table);
HadoopServerSelectionListContentProvider provider =
new HadoopServerSelectionListContentProvider();
viewer.setContentProvider(provider);
viewer.setLabelProvider(provider);
viewer.setInput(new Object());
// don't care, get from singleton server registry
this.setControl(panel);
}
/**
* Returns whether this page state allows the Wizard to finish or not
*
* @return can the wizard finish or not?
*/
public boolean canFinish() {
if (!isControlCreated())
return false;
if (this.createNew.getSelection())
return getNextPage().isPageComplete();
return this.chooseExisting.getSelection();
}
}
/**
* @param progressMonitor
*/
public void setProgressMonitor(IProgressMonitor progressMonitor) {
this.progressMonitor = progressMonitor;
}
}
| |
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.ImmutableSet;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.TypeValidator.TypeMismatch;
import com.google.javascript.jscomp.graph.AdjacencyGraph;
import com.google.javascript.jscomp.graph.Annotation;
import com.google.javascript.jscomp.graph.GraphColoring;
import com.google.javascript.jscomp.graph.GraphColoring.GreedyGraphColoring;
import com.google.javascript.jscomp.graph.GraphNode;
import com.google.javascript.jscomp.graph.SubGraph;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import com.google.javascript.rhino.jstype.FunctionType;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.JSTypeNative;
import com.google.javascript.rhino.jstype.JSTypeRegistry;
import com.google.javascript.rhino.jstype.ObjectType;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
/**
* Renames unrelated properties to the same name, using type information.
* This allows better compression as more properties can be given short names.
*
* <p>Properties are considered unrelated if they are never referenced from the
* same type or from a subtype of each others' types, thus this pass is only
* effective if type checking is enabled.
*
* Example:
* <code>
* Foo.fooprop = 0;
* Foo.fooprop2 = 0;
* Bar.barprop = 0;
* </code>
*
* becomes:
*
* <code>
* Foo.a = 0;
* Foo.b = 0;
* Bar.a = 0;
* </code>
*
*/
class AmbiguateProperties implements CompilerPass {
private static final Logger logger = Logger.getLogger(
AmbiguateProperties.class.getName());
private final AbstractCompiler compiler;
private final List<Node> stringNodesToRename = new ArrayList<>();
// Can't use these as property names.
private final char[] reservedCharacters;
/** Map from property name to Property object */
private final Map<String, Property> propertyMap = new HashMap<>();
/** Property names that don't get renamed */
private final Set<String> externedNames;
/** Names to which properties shouldn't be renamed, to avoid name conflicts */
private final Set<String> quotedNames = new HashSet<>();
/** Map from original property name to new name. Only used by tests. */
private Map<String, String> renamingMap = null;
/**
* Sorts Property objects by their count, breaking ties alphabetically to
* ensure a deterministic total ordering.
*/
private static final Comparator<Property> FREQUENCY_COMPARATOR =
new Comparator<Property>() {
@Override
public int compare(Property p1, Property p2) {
if (p1.numOccurrences != p2.numOccurrences) {
return p2.numOccurrences - p1.numOccurrences;
}
return p1.oldName.compareTo(p2.oldName);
}
};
/** A map from JSType to a unique representative Integer. */
private BiMap<JSType, Integer> intForType = HashBiMap.create();
/**
* A map from JSType to JSTypeBitSet representing the types related
* to the type.
*/
private Map<JSType, JSTypeBitSet> relatedBitsets = new HashMap<>();
/** A set of types that invalidate properties from ambiguation. */
private final Set<JSType> invalidatingTypes;
/**
* Prefix of properties to skip renaming. These should be renamed in the
* RenameProperties pass.
*/
static final String SKIP_PREFIX = "JSAbstractCompiler";
AmbiguateProperties(AbstractCompiler compiler,
char[] reservedCharacters) {
Preconditions.checkState(compiler.getLifeCycleStage().isNormalized());
this.compiler = compiler;
this.reservedCharacters = reservedCharacters;
JSTypeRegistry r = compiler.getTypeRegistry();
invalidatingTypes = new HashSet<>(ImmutableSet.of(
r.getNativeType(JSTypeNative.ALL_TYPE),
r.getNativeType(JSTypeNative.FUNCTION_FUNCTION_TYPE),
r.getNativeType(JSTypeNative.FUNCTION_INSTANCE_TYPE),
r.getNativeType(JSTypeNative.FUNCTION_PROTOTYPE),
r.getNativeType(JSTypeNative.GLOBAL_THIS),
r.getNativeType(JSTypeNative.OBJECT_TYPE),
r.getNativeType(JSTypeNative.OBJECT_PROTOTYPE),
r.getNativeType(JSTypeNative.OBJECT_FUNCTION_TYPE),
r.getNativeType(JSTypeNative.TOP_LEVEL_PROTOTYPE)));
for (TypeMismatch mis : compiler.getTypeMismatches()) {
addInvalidatingType(mis.typeA);
addInvalidatingType(mis.typeB);
}
for (TypeMismatch mis : compiler.getImplicitInterfaceUses()) {
addInvalidatingType(mis.typeA);
addInvalidatingType(mis.typeB);
}
externedNames = compiler.getExternProperties();
}
static AmbiguateProperties makePassForTesting(
AbstractCompiler compiler, char[] reservedCharacters) {
AmbiguateProperties ap =
new AmbiguateProperties(compiler, reservedCharacters);
ap.renamingMap = new HashMap<>();
return ap;
}
/**
* Invalidates the given type, so that no properties on it will be renamed.
*/
private void addInvalidatingType(JSType type) {
type = type.restrictByNotNullOrUndefined();
if (type.isUnionType()) {
for (JSType alt : type.toMaybeUnionType().getAlternatesWithoutStructuralTyping()) {
addInvalidatingType(alt);
}
}
invalidatingTypes.add(type);
ObjectType objType = ObjectType.cast(type);
if (objType != null && objType.isInstanceType()) {
invalidatingTypes.add(objType.getImplicitPrototype());
}
}
Map<String, String> getRenamingMap() {
Preconditions.checkNotNull(renamingMap);
return renamingMap;
}
/** Returns an integer that uniquely identifies a JSType. */
private int getIntForType(JSType type) {
// Templatized types don't exist at runtime, so collapse to raw type
if (type != null && type.isTemplatizedType()) {
type = type.toMaybeTemplatizedType().getReferencedType();
}
if (intForType.containsKey(type)) {
return intForType.get(type).intValue();
}
int newInt = intForType.size() + 1;
intForType.put(type, newInt);
return newInt;
}
@Override
public void process(Node externs, Node root) {
// Find all property references and record the types on which they occur.
// Populate stringNodesToRename, propertyMap, quotedNames.
NodeTraversal.traverseEs6(compiler, root, new ProcessProperties());
ImmutableSet.Builder<String> reservedNames = ImmutableSet.<String>builder()
.addAll(externedNames)
.addAll(quotedNames);
int numRenamedPropertyNames = 0;
int numSkippedPropertyNames = 0;
ArrayList<PropertyGraphNode> nodes = new ArrayList<>(propertyMap.size());
for (Property prop : propertyMap.values()) {
if (prop.skipAmbiguating) {
++numSkippedPropertyNames;
reservedNames.add(prop.oldName);
} else {
++numRenamedPropertyNames;
nodes.add(new PropertyGraphNode(prop));
}
}
PropertyGraph graph = new PropertyGraph(nodes);
GraphColoring<Property, Void> coloring =
new GreedyGraphColoring<>(graph, FREQUENCY_COMPARATOR);
int numNewPropertyNames = coloring.color();
// Generate new names for the properties that will be renamed.
NameGenerator nameGen = new DefaultNameGenerator(
reservedNames.build(), "", reservedCharacters);
String[] colorMap = new String[numNewPropertyNames];
for (int i = 0; i < numNewPropertyNames; ++i) {
colorMap[i] = nameGen.generateNextName();
}
// Translate the color of each Property instance to a name.
for (PropertyGraphNode node : graph.getNodes()) {
node.getValue().newName = colorMap[node.getAnnotation().hashCode()];
if (renamingMap != null) {
renamingMap.put(node.getValue().oldName, node.getValue().newName);
}
}
// Actually assign the new names to the relevant STRING nodes in the AST.
for (Node n : stringNodesToRename) {
String oldName = n.getString();
Property p = propertyMap.get(oldName);
if (p != null && p.newName != null) {
Preconditions.checkState(oldName.equals(p.oldName));
if (!p.newName.equals(oldName)) {
n.setString(p.newName);
compiler.reportCodeChange();
}
}
}
logger.fine("Collapsed " + numRenamedPropertyNames + " properties into "
+ numNewPropertyNames + " and skipped renaming "
+ numSkippedPropertyNames + " properties.");
}
private BitSet getRelatedTypesOnNonUnion(JSType type) {
// All of the types we encounter should have been added to the
// relatedBitsets via computeRelatedTypes.
if (relatedBitsets.containsKey(type)) {
return relatedBitsets.get(type);
} else {
throw new RuntimeException("Related types should have been computed for"
+ " type: " + type + " but have not been.");
}
}
/**
* Adds subtypes - and implementors, in the case of interfaces - of the type
* to its JSTypeBitSet of related types. Union types are decomposed into their
* alternative types.
*
* <p>The 'is related to' relationship is best understood graphically. Draw an
* arrow from each instance type to the prototype of each of its
* subclass. Draw an arrow from each prototype to its instance type. Draw an
* arrow from each interface to its implementors. A type is related to another
* if there is a directed path in the graph from the type to other. Thus, the
* 'is related to' relationship is reflexive and transitive.
*
* <p>Example with Foo extends Bar which extends Baz and Bar implements I:
* <pre>
* Foo -> Bar.prototype -> Bar -> Baz.prototype -> Baz
* ^
* |
* I
* </pre>
*
* <p>Note that we don't need to correctly handle the relationships between
* functions, because the function type is invalidating (i.e. its properties
* won't be ambiguated).
*/
private void computeRelatedTypes(JSType type) {
if (type.isUnionType()) {
type = type.restrictByNotNullOrUndefined();
if (type.isUnionType()) {
for (JSType alt : type.toMaybeUnionType().getAlternates()) {
computeRelatedTypes(alt);
}
return;
}
}
if (relatedBitsets.containsKey(type)) {
// We only need to generate the bit set once.
return;
}
JSTypeBitSet related = new JSTypeBitSet(intForType.size());
relatedBitsets.put(type, related);
related.set(getIntForType(type));
// A prototype is related to its instance.
if (type.isFunctionPrototypeType()) {
addRelatedInstance(((ObjectType) type).getOwnerFunction(), related);
return;
}
// An instance is related to its subclasses.
FunctionType constructor = type.toObjectType().getConstructor();
if (constructor != null && constructor.getSubTypes() != null) {
for (FunctionType subType : constructor.getSubTypes()) {
addRelatedInstance(subType, related);
}
}
// An interface is related to its implementors.
for (FunctionType implementor : compiler.getTypeRegistry()
.getDirectImplementors(type.toObjectType())) {
addRelatedInstance(implementor, related);
}
}
/**
* Adds the instance of the given constructor, its implicit prototype and all
* its related types to the given bit set.
*/
private void addRelatedInstance(
FunctionType constructor, JSTypeBitSet related) {
// TODO(user): A constructor which doesn't have an instance type
// (e.g. it's missing the @constructor annotation) should be an invalidating
// type which doesn't reach this code path.
if (constructor.hasInstanceType()) {
ObjectType instanceType = constructor.getInstanceType();
related.set(getIntForType(instanceType.getImplicitPrototype()));
computeRelatedTypes(instanceType);
related.or(relatedBitsets.get(instanceType));
}
}
class PropertyGraph implements AdjacencyGraph<Property, Void> {
private final ArrayList<PropertyGraphNode> nodes;
PropertyGraph(ArrayList<PropertyGraphNode> nodes) {
this.nodes = nodes;
}
@Override
public List<PropertyGraphNode> getNodes() {
return nodes;
}
@Override
public GraphNode<Property, Void> getNode(Property property) {
throw new RuntimeException("PropertyGraph#getNode is never called.");
}
@Override
public SubGraph<Property, Void> newSubGraph() {
return new PropertySubGraph();
}
@Override
public void clearNodeAnnotations() {
for (PropertyGraphNode node : nodes) {
node.setAnnotation(null);
}
}
@Override
public int getWeight(Property value) {
return value.numOccurrences;
}
}
/**
* A {@link SubGraph} that represents properties. The related types of
* the properties are used to efficiently calculate adjacency information.
*/
class PropertySubGraph implements SubGraph<Property, Void> {
/** Types related to properties referenced in this subgraph. */
JSTypeBitSet relatedTypes = new JSTypeBitSet(intForType.size());
/**
* Returns true if prop is in an independent set from all properties in this
* sub graph. That is, if none of its related types intersects with the
* related types for this sub graph.
*/
@Override
public boolean isIndependentOf(Property prop) {
return !relatedTypes.intersects(prop.relatedTypes);
}
/**
* Adds the node to the sub graph, adding all its related types to the
* related types for the sub graph.
*/
@Override
public void addNode(Property prop) {
relatedTypes.or(prop.relatedTypes);
}
}
class PropertyGraphNode implements GraphNode<Property, Void> {
Property property;
protected Annotation annotation;
PropertyGraphNode(Property property) {
this.property = property;
}
@Override
public Property getValue() {
return property;
}
@Override
public Annotation getAnnotation() {
return annotation;
}
@Override
public void setAnnotation(Annotation data) {
annotation = data;
}
}
/** Finds all property references, recording the types on which they occur. */
private class ProcessProperties extends AbstractPostOrderCallback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
switch (n.getType()) {
case Token.GETPROP: {
Node propNode = n.getSecondChild();
JSType jstype = getJSType(n.getFirstChild());
maybeMarkCandidate(propNode, jstype);
break;
}
case Token.OBJECTLIT:
// The children of an OBJECTLIT node are keys, where the values
// are the children of the keys.
for (Node key = n.getFirstChild(); key != null;
key = key.getNext()) {
// We only want keys that were unquoted.
// Keys are STRING, GET, SET
if (!key.isQuotedString()) {
JSType jstype = getJSType(n.getFirstChild());
maybeMarkCandidate(key, jstype);
} else {
// Ensure that we never rename some other property in a way
// that could conflict with this quoted key.
quotedNames.add(key.getString());
}
}
break;
case Token.GETELEM:
// If this is a quoted property access (e.g. x['myprop']), we need to
// ensure that we never rename some other property in a way that
// could conflict with this quoted name.
Node child = n.getLastChild();
if (child.isString()) {
quotedNames.add(child.getString());
}
break;
}
}
/**
* If a property node is eligible for renaming, stashes a reference to it
* and increments the property name's access count.
*
* @param n The STRING node for a property
*/
private void maybeMarkCandidate(Node n, JSType type) {
String name = n.getString();
if (!externedNames.contains(name)) {
stringNodesToRename.add(n);
recordProperty(name, type);
}
}
private Property recordProperty(String name, JSType type) {
Property prop = getProperty(name);
prop.addType(type);
return prop;
}
}
/** Returns true if properties on this type should not be renamed. */
private boolean isInvalidatingType(JSType type) {
if (type.isUnionType()) {
type = type.restrictByNotNullOrUndefined();
if (type.isUnionType()) {
for (JSType alt : type.toMaybeUnionType().getAlternates()) {
if (isInvalidatingType(alt)) {
return true;
}
}
return false;
}
}
ObjectType objType = ObjectType.cast(type);
return objType == null
|| invalidatingTypes.contains(objType)
|| !objType.hasReferenceName()
|| objType.isUnknownType()
|| objType.isEmptyType() /* unresolved types */
|| objType.isEnumType()
|| objType.autoboxesTo() != null;
}
private Property getProperty(String name) {
Property prop = propertyMap.get(name);
if (prop == null) {
prop = new Property(name);
propertyMap.put(name, prop);
}
return prop;
}
/**
* This method gets the JSType from the Node argument and verifies that it is
* present.
*/
private JSType getJSType(Node n) {
JSType jsType = n.getJSType();
if (jsType == null) {
// TODO(user): This branch indicates a compiler bug, not worthy of
// halting the compilation but we should log this and analyze to track
// down why it happens. This is not critical and will be resolved over
// time as the type checker is extended.
return compiler.getTypeRegistry().getNativeType(
JSTypeNative.UNKNOWN_TYPE);
} else {
return jsType;
}
}
/** Encapsulates the information needed for renaming a property. */
private class Property {
final String oldName;
String newName;
int numOccurrences;
boolean skipAmbiguating;
JSTypeBitSet relatedTypes = new JSTypeBitSet(intForType.size());
Property(String name) {
this.oldName = name;
// Properties with this suffix are handled in RenameProperties.
if (name.startsWith(SKIP_PREFIX)) {
skipAmbiguating = true;
}
}
/** Add this type to this property, calculating */
void addType(JSType newType) {
if (skipAmbiguating) {
return;
}
++numOccurrences;
if (newType.isUnionType()) {
newType = newType.restrictByNotNullOrUndefined();
if (newType.isUnionType()) {
for (JSType alt : newType.toMaybeUnionType().getAlternatesWithoutStructuralTyping()) {
addNonUnionType(alt);
}
return;
}
}
addNonUnionType(newType);
}
private void addNonUnionType(JSType newType) {
if (skipAmbiguating || isInvalidatingType(newType)) {
skipAmbiguating = true;
return;
}
if (!relatedTypes.get(getIntForType(newType))) {
computeRelatedTypes(newType);
relatedTypes.or(getRelatedTypesOnNonUnion(newType));
}
}
}
// A BitSet that stores type info. Adds pretty-print routines.
private class JSTypeBitSet extends BitSet {
private static final long serialVersionUID = 1L;
private JSTypeBitSet(int size) {
super(size);
}
private JSTypeBitSet() {
super();
}
/**
* Pretty-printing, for diagnostic purposes.
*/
@Override
public String toString() {
int from = 0;
int current = 0;
List<String> types = new ArrayList<>();
while (-1 != (current = nextSetBit(from))) {
types.add(intForType.inverse().get(current).toString());
from = current + 1;
}
return Joiner.on(" && ").join(types);
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2016-2017 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.jsoninput.reader;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.Iterator;
import org.apache.commons.io.IOUtils;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleFileException;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.steps.file.BaseFileInputStepData;
import org.pentaho.di.trans.steps.jsoninput.JsonInput;
import org.pentaho.di.trans.steps.jsoninput.JsonInputData;
import org.pentaho.di.trans.steps.jsoninput.JsonInputMeta;
public class InputsReader implements Iterable<InputStream> {
private JsonInput step;
private JsonInputMeta meta;
private JsonInputData data;
private ErrorHandler errorHandler;
public InputsReader( JsonInput step, JsonInputMeta meta, JsonInputData data, ErrorHandler errorHandler ) {
this.step = step;
this.meta = meta;
this.data = data;
this.errorHandler = errorHandler;
}
@Override
public Iterator<InputStream> iterator() {
if ( !meta.isInFields() || meta.getIsAFile() ) {
Iterator<FileObject> files;
if ( meta.inputFiles.acceptingFilenames ) {
// paths from input
files = new FileNamesIterator( step, errorHandler, getFieldIterator() );
} else {
// from inner file list
if ( data.files == null ) {
data.files = meta.getFileInputList( step );
}
files = data.files.getFiles().listIterator( data.currentFileIndex );
}
return new FileContentIterator( files, data, errorHandler );
} else if ( meta.isReadUrl() ) {
return new URLContentIterator( errorHandler, getFieldIterator() );
} else {
// direct content
return new ChainedIterator<InputStream, String>( getFieldIterator(), errorHandler ) {
protected InputStream tryNext() throws IOException {
String next = inner.next();
return next == null ? null : IOUtils.toInputStream( next, meta.getEncoding() );
}
};
}
}
protected StringFieldIterator getFieldIterator() {
return new StringFieldIterator(
new RowIterator( step, data, errorHandler ), data.indexSourceField );
}
public static interface ErrorHandler {
/**
* Generic (unexpected errors)
*/
void error( Exception thrown );
void fileOpenError( FileObject file, FileSystemException exception );
void fileCloseError( FileObject file, FileSystemException exception );
}
protected abstract class ChainedIterator<T, C> implements Iterator<T> {
protected Iterator<C> inner;
protected ErrorHandler handler;
ChainedIterator( Iterator<C> inner, ErrorHandler handler ) {
this.inner = inner;
this.handler = handler;
}
@Override
public boolean hasNext() {
return inner.hasNext();
}
@Override
public T next() {
try {
return tryNext();
} catch ( Exception e ) {
handler.error( e );
return null;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException( "remove" );
}
protected abstract T tryNext() throws Exception;
}
protected class FileContentIterator extends ChainedIterator<InputStream, FileObject> {
ErrorHandler handler;
BaseFileInputStepData data;
FileContentIterator( Iterator<FileObject> inner, BaseFileInputStepData data, ErrorHandler handler ) {
super( inner, handler );
this.data = data;
}
@Override
public InputStream tryNext() {
if ( hasNext() ) {
if ( data.file != null ) {
try {
data.file.close();
} catch ( FileSystemException e ) {
handler.fileCloseError( data.file, e );
}
}
try {
data.file = inner.next();
data.currentFileIndex++;
if ( step.onNewFile( data.file ) ) {
return KettleVFS.getInputStream( data.file );
}
} catch ( FileSystemException e ) {
handler.fileOpenError( data.file, e );
}
}
return null;
}
}
protected class FileNamesIterator extends ChainedIterator<FileObject, String> {
private VariableSpace vars;
public FileNamesIterator( VariableSpace varSpace, ErrorHandler handler, Iterator<String> fileNames ) {
super( fileNames, handler );
vars = varSpace;
}
@Override
public FileObject tryNext() throws KettleFileException {
String fileName = step.environmentSubstitute( inner.next() );
return fileName == null ? null : KettleVFS.getFileObject( fileName, vars );
}
}
protected class URLContentIterator extends ChainedIterator<InputStream, String> {
public URLContentIterator( ErrorHandler handler, Iterator<String> urls ) {
super( urls, handler );
}
@Override protected InputStream tryNext() throws Exception {
if ( hasNext() ) {
URL url = new URL( step.environmentSubstitute( inner.next() ) );
URLConnection connection = url.openConnection();
return connection.getInputStream();
}
return null;
}
}
protected class StringFieldIterator implements Iterator<String> {
private RowIterator rowIter;
private int idx;
public StringFieldIterator( RowIterator rowIter, int idx ) {
this.rowIter = rowIter;
this.idx = idx;
}
public boolean hasNext() {
return rowIter.hasNext();
}
public String next() {
Object[] row = rowIter.next();
return ( row == null || row.length <= idx )
? null
: (String) row[idx];
}
@Override
public void remove() {
throw new UnsupportedOperationException( "remove" );
}
}
protected class RowIterator implements Iterator<Object[]> {
private StepInterface step;
private ErrorHandler errorHandler;
private boolean gotNext;
public RowIterator( StepInterface step, JsonInputData data, ErrorHandler errorHandler ) {
this.step = step;
this.errorHandler = errorHandler;
gotNext = data.readrow != null;
}
protected void fetchNext() {
try {
data.readrow = step.getRow();
gotNext = true;
} catch ( KettleException e ) {
errorHandler.error( e );
}
}
@Override
public boolean hasNext() {
if ( !gotNext ) {
fetchNext();
}
return data.readrow != null;
}
@Override
public Object[] next() {
if ( hasNext() ) {
gotNext = false;
return data.readrow;
}
return null;
}
@Override
public void remove() {
throw new UnsupportedOperationException( "remove" );
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jeesite.modules.act.rest.diagram.services;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.activiti.engine.ActivitiException;
import org.activiti.engine.ActivitiObjectNotFoundException;
import org.activiti.engine.HistoryService;
import org.activiti.engine.RepositoryService;
import org.activiti.engine.RuntimeService;
import org.activiti.engine.history.HistoricActivityInstance;
import org.activiti.engine.impl.bpmn.behavior.BoundaryEventActivityBehavior;
import org.activiti.engine.impl.bpmn.behavior.CallActivityBehavior;
import org.activiti.engine.impl.bpmn.parser.BpmnParse;
import org.activiti.engine.impl.bpmn.parser.ErrorEventDefinition;
import org.activiti.engine.impl.bpmn.parser.EventSubscriptionDeclaration;
import org.activiti.engine.impl.jobexecutor.TimerDeclarationImpl;
import org.activiti.engine.impl.persistence.entity.ExecutionEntity;
import org.activiti.engine.impl.persistence.entity.ProcessDefinitionEntity;
import org.activiti.engine.impl.pvm.PvmTransition;
import org.activiti.engine.impl.pvm.delegate.ActivityBehavior;
import org.activiti.engine.impl.pvm.process.ActivityImpl;
import org.activiti.engine.impl.pvm.process.Lane;
import org.activiti.engine.impl.pvm.process.LaneSet;
import org.activiti.engine.impl.pvm.process.ParticipantProcess;
import org.activiti.engine.impl.pvm.process.TransitionImpl;
import org.activiti.engine.repository.ProcessDefinition;
import org.activiti.engine.runtime.Execution;
import org.activiti.engine.runtime.ProcessInstance;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class BaseProcessDefinitionDiagramLayoutResource {
@Autowired
private RuntimeService runtimeService;
@Autowired
private RepositoryService repositoryService;
@Autowired
private HistoryService historyService;
public ObjectNode getDiagramNode(String processInstanceId, String processDefinitionId) {
List<String> highLightedFlows = Collections.<String> emptyList();
List<String> highLightedActivities = Collections.<String> emptyList();
Map<String, ObjectNode> subProcessInstanceMap = new HashMap<String, ObjectNode>();
ProcessInstance processInstance = null;
if (processInstanceId != null) {
processInstance = runtimeService.createProcessInstanceQuery().processInstanceId(processInstanceId).singleResult();
if (processInstance == null) {
throw new ActivitiObjectNotFoundException("Process instance could not be found");
}
processDefinitionId = processInstance.getProcessDefinitionId();
List<ProcessInstance> subProcessInstances = runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstanceId).list();
for (ProcessInstance subProcessInstance : subProcessInstances) {
String subDefId = subProcessInstance.getProcessDefinitionId();
String superExecutionId = ((ExecutionEntity) subProcessInstance).getSuperExecutionId();
ProcessDefinitionEntity subDef = (ProcessDefinitionEntity) repositoryService.getProcessDefinition(subDefId);
ObjectNode processInstanceJSON = new ObjectMapper().createObjectNode();
processInstanceJSON.put("processInstanceId", subProcessInstance.getId());
processInstanceJSON.put("superExecutionId", superExecutionId);
processInstanceJSON.put("processDefinitionId", subDef.getId());
processInstanceJSON.put("processDefinitionKey", subDef.getKey());
processInstanceJSON.put("processDefinitionName", subDef.getName());
subProcessInstanceMap.put(superExecutionId, processInstanceJSON);
}
}
if (processDefinitionId == null) {
throw new ActivitiObjectNotFoundException("No process definition id provided");
}
ProcessDefinitionEntity processDefinition = (ProcessDefinitionEntity) repositoryService.getProcessDefinition(processDefinitionId);
if (processDefinition == null) {
throw new ActivitiException("Process definition " + processDefinitionId + " could not be found");
}
ObjectNode responseJSON = new ObjectMapper().createObjectNode();
// Process definition
JsonNode pdrJSON = getProcessDefinitionResponse(processDefinition);
if (pdrJSON != null) {
responseJSON.put("processDefinition", pdrJSON);
}
// Highlighted activities
if (processInstance != null) {
ArrayNode activityArray = new ObjectMapper().createArrayNode();
ArrayNode flowsArray = new ObjectMapper().createArrayNode();
highLightedActivities = runtimeService.getActiveActivityIds(processInstanceId);
highLightedFlows = getHighLightedFlows(processInstanceId, processDefinition);
for (String activityName : highLightedActivities) {
activityArray.add(activityName);
}
for (String flow : highLightedFlows)
flowsArray.add(flow);
responseJSON.put("highLightedActivities", activityArray);
responseJSON.put("highLightedFlows", flowsArray);
}
// Pool shape, if process is participant in collaboration
if (processDefinition.getParticipantProcess() != null) {
ParticipantProcess pProc = processDefinition.getParticipantProcess();
ObjectNode participantProcessJSON = new ObjectMapper().createObjectNode();
participantProcessJSON.put("id", pProc.getId());
if (StringUtils.isNotEmpty(pProc.getName())) {
participantProcessJSON.put("name", pProc.getName());
} else {
participantProcessJSON.put("name", "");
}
participantProcessJSON.put("x", pProc.getX());
participantProcessJSON.put("y", pProc.getY());
participantProcessJSON.put("width", pProc.getWidth());
participantProcessJSON.put("height", pProc.getHeight());
responseJSON.put("participantProcess", participantProcessJSON);
}
// Draw lanes
if (processDefinition.getLaneSets() != null && !processDefinition.getLaneSets().isEmpty()) {
ArrayNode laneSetArray = new ObjectMapper().createArrayNode();
for (LaneSet laneSet : processDefinition.getLaneSets()) {
ArrayNode laneArray = new ObjectMapper().createArrayNode();
if (laneSet.getLanes() != null && !laneSet.getLanes().isEmpty()) {
for (Lane lane : laneSet.getLanes()) {
ObjectNode laneJSON = new ObjectMapper().createObjectNode();
laneJSON.put("id", lane.getId());
if (StringUtils.isNotEmpty(lane.getName())) {
laneJSON.put("name", lane.getName());
} else {
laneJSON.put("name", "");
}
laneJSON.put("x", lane.getX());
laneJSON.put("y", lane.getY());
laneJSON.put("width", lane.getWidth());
laneJSON.put("height", lane.getHeight());
List<String> flowNodeIds = lane.getFlowNodeIds();
ArrayNode flowNodeIdsArray = new ObjectMapper().createArrayNode();
for (String flowNodeId : flowNodeIds) {
flowNodeIdsArray.add(flowNodeId);
}
laneJSON.put("flowNodeIds", flowNodeIdsArray);
laneArray.add(laneJSON);
}
}
ObjectNode laneSetJSON = new ObjectMapper().createObjectNode();
laneSetJSON.put("id", laneSet.getId());
if (StringUtils.isNotEmpty(laneSet.getName())) {
laneSetJSON.put("name", laneSet.getName());
} else {
laneSetJSON.put("name", "");
}
laneSetJSON.put("lanes", laneArray);
laneSetArray.add(laneSetJSON);
}
if (laneSetArray.size() > 0)
responseJSON.put("laneSets", laneSetArray);
}
ArrayNode sequenceFlowArray = new ObjectMapper().createArrayNode();
ArrayNode activityArray = new ObjectMapper().createArrayNode();
// Activities and their sequence-flows
for (ActivityImpl activity : processDefinition.getActivities()) {
getActivity(processInstanceId, activity, activityArray, sequenceFlowArray, processInstance, highLightedFlows, subProcessInstanceMap);
}
responseJSON.put("activities", activityArray);
responseJSON.put("sequenceFlows", sequenceFlowArray);
return responseJSON;
}
private List<String> getHighLightedFlows(String processInstanceId, ProcessDefinitionEntity processDefinition) {
List<String> highLightedFlows = new ArrayList<String>();
List<HistoricActivityInstance> historicActivityInstances = historyService.createHistoricActivityInstanceQuery()
.processInstanceId(processInstanceId).orderByHistoricActivityInstanceStartTime().asc().list();
List<String> historicActivityInstanceList = new ArrayList<String>();
for (HistoricActivityInstance hai : historicActivityInstances) {
historicActivityInstanceList.add(hai.getActivityId());
}
// add current activities to list
List<String> highLightedActivities = runtimeService.getActiveActivityIds(processInstanceId);
historicActivityInstanceList.addAll(highLightedActivities);
// activities and their sequence-flows
for (ActivityImpl activity : processDefinition.getActivities()) {
int index = historicActivityInstanceList.indexOf(activity.getId());
if (index >= 0 && index + 1 < historicActivityInstanceList.size()) {
List<PvmTransition> pvmTransitionList = activity.getOutgoingTransitions();
for (PvmTransition pvmTransition : pvmTransitionList) {
String destinationFlowId = pvmTransition.getDestination().getId();
if (destinationFlowId.equals(historicActivityInstanceList.get(index + 1))) {
highLightedFlows.add(pvmTransition.getId());
}
}
}
}
return highLightedFlows;
}
private void getActivity(String processInstanceId, ActivityImpl activity, ArrayNode activityArray, ArrayNode sequenceFlowArray,
ProcessInstance processInstance, List<String> highLightedFlows, Map<String, ObjectNode> subProcessInstanceMap) {
ObjectNode activityJSON = new ObjectMapper().createObjectNode();
// Gather info on the multi instance marker
String multiInstance = (String) activity.getProperty("multiInstance");
if (multiInstance != null) {
if (!"sequential".equals(multiInstance)) {
multiInstance = "parallel";
}
}
ActivityBehavior activityBehavior = activity.getActivityBehavior();
// Gather info on the collapsed marker
Boolean collapsed = (activityBehavior instanceof CallActivityBehavior);
Boolean expanded = (Boolean) activity.getProperty(BpmnParse.PROPERTYNAME_ISEXPANDED);
if (expanded != null) {
collapsed = !expanded;
}
Boolean isInterrupting = null;
if (activityBehavior instanceof BoundaryEventActivityBehavior) {
isInterrupting = ((BoundaryEventActivityBehavior) activityBehavior).isInterrupting();
}
// Outgoing transitions of activity
for (PvmTransition sequenceFlow : activity.getOutgoingTransitions()) {
String flowName = (String) sequenceFlow.getProperty("name");
boolean isHighLighted = (highLightedFlows.contains(sequenceFlow.getId()));
boolean isConditional = sequenceFlow.getProperty(BpmnParse.PROPERTYNAME_CONDITION) != null
&& !((String) activity.getProperty("type")).toLowerCase().contains("gateway");
boolean isDefault = sequenceFlow.getId().equals(activity.getProperty("default"))
&& ((String) activity.getProperty("type")).toLowerCase().contains("gateway");
List<Integer> waypoints = ((TransitionImpl) sequenceFlow).getWaypoints();
ArrayNode xPointArray = new ObjectMapper().createArrayNode();
ArrayNode yPointArray = new ObjectMapper().createArrayNode();
for (int i = 0; i < waypoints.size(); i += 2) { // waypoints.size()
// minimally 4: x1, y1,
// x2, y2
xPointArray.add(waypoints.get(i));
yPointArray.add(waypoints.get(i + 1));
}
ObjectNode flowJSON = new ObjectMapper().createObjectNode();
flowJSON.put("id", sequenceFlow.getId());
flowJSON.put("name", flowName);
flowJSON.put("flow", "(" + sequenceFlow.getSource().getId() + ")--" + sequenceFlow.getId() + "-->("
+ sequenceFlow.getDestination().getId() + ")");
if (isConditional)
flowJSON.put("isConditional", isConditional);
if (isDefault)
flowJSON.put("isDefault", isDefault);
if (isHighLighted)
flowJSON.put("isHighLighted", isHighLighted);
flowJSON.put("xPointArray", xPointArray);
flowJSON.put("yPointArray", yPointArray);
sequenceFlowArray.add(flowJSON);
}
// Nested activities (boundary events)
ArrayNode nestedActivityArray = new ObjectMapper().createArrayNode();
for (ActivityImpl nestedActivity : activity.getActivities()) {
nestedActivityArray.add(nestedActivity.getId());
}
Map<String, Object> properties = activity.getProperties();
ObjectNode propertiesJSON = new ObjectMapper().createObjectNode();
for (String key : properties.keySet()) {
Object prop = properties.get(key);
if (prop instanceof String)
propertiesJSON.put(key, (String) properties.get(key));
else if (prop instanceof Integer)
propertiesJSON.put(key, (Integer) properties.get(key));
else if (prop instanceof Boolean)
propertiesJSON.put(key, (Boolean) properties.get(key));
else if ("initial".equals(key)) {
ActivityImpl act = (ActivityImpl) properties.get(key);
propertiesJSON.put(key, act.getId());
} else if ("timerDeclarations".equals(key)) {
ArrayList<TimerDeclarationImpl> timerDeclarations = (ArrayList<TimerDeclarationImpl>) properties.get(key);
ArrayNode timerDeclarationArray = new ObjectMapper().createArrayNode();
if (timerDeclarations != null)
for (TimerDeclarationImpl timerDeclaration : timerDeclarations) {
ObjectNode timerDeclarationJSON = new ObjectMapper().createObjectNode();
timerDeclarationJSON.put("isExclusive", timerDeclaration.isExclusive());
if (timerDeclaration.getRepeat() != null)
timerDeclarationJSON.put("repeat", timerDeclaration.getRepeat());
timerDeclarationJSON.put("retries", String.valueOf(timerDeclaration.getRetries()));
timerDeclarationJSON.put("type", timerDeclaration.getJobHandlerType());
timerDeclarationJSON.put("configuration", timerDeclaration.getJobHandlerConfiguration());
//timerDeclarationJSON.put("expression", timerDeclaration.getDescription());
timerDeclarationArray.add(timerDeclarationJSON);
}
if (timerDeclarationArray.size() > 0)
propertiesJSON.put(key, timerDeclarationArray);
// TODO: implement getting description
} else if ("eventDefinitions".equals(key)) {
ArrayList<EventSubscriptionDeclaration> eventDefinitions = (ArrayList<EventSubscriptionDeclaration>) properties.get(key);
ArrayNode eventDefinitionsArray = new ObjectMapper().createArrayNode();
if (eventDefinitions != null) {
for (EventSubscriptionDeclaration eventDefinition : eventDefinitions) {
ObjectNode eventDefinitionJSON = new ObjectMapper().createObjectNode();
if (eventDefinition.getActivityId() != null)
eventDefinitionJSON.put("activityId", eventDefinition.getActivityId());
eventDefinitionJSON.put("eventName", eventDefinition.getEventName());
eventDefinitionJSON.put("eventType", eventDefinition.getEventType());
eventDefinitionJSON.put("isAsync", eventDefinition.isAsync());
eventDefinitionJSON.put("isStartEvent", eventDefinition.isStartEvent());
eventDefinitionsArray.add(eventDefinitionJSON);
}
}
if (eventDefinitionsArray.size() > 0)
propertiesJSON.put(key, eventDefinitionsArray);
// TODO: implement it
} else if ("errorEventDefinitions".equals(key)) {
ArrayList<ErrorEventDefinition> errorEventDefinitions = (ArrayList<ErrorEventDefinition>) properties.get(key);
ArrayNode errorEventDefinitionsArray = new ObjectMapper().createArrayNode();
if (errorEventDefinitions != null) {
for (ErrorEventDefinition errorEventDefinition : errorEventDefinitions) {
ObjectNode errorEventDefinitionJSON = new ObjectMapper().createObjectNode();
if (errorEventDefinition.getErrorCode() != null)
errorEventDefinitionJSON.put("errorCode", errorEventDefinition.getErrorCode());
else
errorEventDefinitionJSON.putNull("errorCode");
errorEventDefinitionJSON.put("handlerActivityId", errorEventDefinition.getHandlerActivityId());
errorEventDefinitionsArray.add(errorEventDefinitionJSON);
}
}
if (errorEventDefinitionsArray.size() > 0)
propertiesJSON.put(key, errorEventDefinitionsArray);
}
}
if ("callActivity".equals(properties.get("type"))) {
CallActivityBehavior callActivityBehavior = null;
if (activityBehavior instanceof CallActivityBehavior) {
callActivityBehavior = (CallActivityBehavior) activityBehavior;
}
if (callActivityBehavior != null) {
propertiesJSON.put("processDefinitonKey", callActivityBehavior.getProcessDefinitonKey());
// get processDefinitonId from execution or get last processDefinitonId
// by key
ArrayNode processInstanceArray = new ObjectMapper().createArrayNode();
if (processInstance != null) {
List<Execution> executionList = runtimeService.createExecutionQuery().processInstanceId(processInstanceId)
.activityId(activity.getId()).list();
if (!executionList.isEmpty()) {
for (Execution execution : executionList) {
ObjectNode processInstanceJSON = subProcessInstanceMap.get(execution.getId());
processInstanceArray.add(processInstanceJSON);
}
}
}
// If active activities nas no instance of this callActivity then add
// last definition
if (processInstanceArray.size() == 0 && StringUtils.isNotEmpty(callActivityBehavior.getProcessDefinitonKey())) {
// Get last definition by key
ProcessDefinition lastProcessDefinition = repositoryService.createProcessDefinitionQuery()
.processDefinitionKey(callActivityBehavior.getProcessDefinitonKey()).latestVersion().singleResult();
// TODO: unuseful fields there are processDefinitionName, processDefinitionKey
if (lastProcessDefinition != null) {
ObjectNode processInstanceJSON = new ObjectMapper().createObjectNode();
processInstanceJSON.put("processDefinitionId", lastProcessDefinition.getId());
processInstanceJSON.put("processDefinitionKey", lastProcessDefinition.getKey());
processInstanceJSON.put("processDefinitionName", lastProcessDefinition.getName());
processInstanceArray.add(processInstanceJSON);
}
}
if (processInstanceArray.size() > 0) {
propertiesJSON.put("processDefinitons", processInstanceArray);
}
}
}
activityJSON.put("activityId", activity.getId());
activityJSON.put("properties", propertiesJSON);
if (multiInstance != null)
activityJSON.put("multiInstance", multiInstance);
if (collapsed)
activityJSON.put("collapsed", collapsed);
if (nestedActivityArray.size() > 0)
activityJSON.put("nestedActivities", nestedActivityArray);
if (isInterrupting != null)
activityJSON.put("isInterrupting", isInterrupting);
activityJSON.put("x", activity.getX());
activityJSON.put("y", activity.getY());
activityJSON.put("width", activity.getWidth());
activityJSON.put("height", activity.getHeight());
activityArray.add(activityJSON);
// Nested activities (boundary events)
for (ActivityImpl nestedActivity : activity.getActivities()) {
getActivity(processInstanceId, nestedActivity, activityArray, sequenceFlowArray, processInstance, highLightedFlows, subProcessInstanceMap);
}
}
private JsonNode getProcessDefinitionResponse(ProcessDefinitionEntity processDefinition) {
ObjectMapper mapper = new ObjectMapper();
ObjectNode pdrJSON = mapper.createObjectNode();
pdrJSON.put("id", processDefinition.getId());
pdrJSON.put("name", processDefinition.getName());
pdrJSON.put("key", processDefinition.getKey());
pdrJSON.put("version", processDefinition.getVersion());
pdrJSON.put("deploymentId", processDefinition.getDeploymentId());
pdrJSON.put("isGraphicNotationDefined", isGraphicNotationDefined(processDefinition));
return pdrJSON;
}
private boolean isGraphicNotationDefined(ProcessDefinitionEntity processDefinition) {
return ((ProcessDefinitionEntity) repositoryService.getProcessDefinition(processDefinition.getId())).isGraphicalNotationDefined();
}
}
| |
/*
* IzPack - Copyright 2001-2012 Julien Ponge, All Rights Reserved.
*
* http://izpack.org/
* http://izpack.codehaus.org/
*
* Copyright 2012 Tim Anderson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.izforge.izpack.integration;
import com.izforge.izpack.api.adaptator.IXMLElement;
import com.izforge.izpack.api.data.AutomatedInstallData;
import com.izforge.izpack.api.data.Info;
import com.izforge.izpack.api.rules.Condition;
import com.izforge.izpack.api.rules.RulesEngine;
import com.izforge.izpack.compiler.container.TestGUIInstallationContainer;
import com.izforge.izpack.installer.data.UninstallDataWriter;
import com.izforge.izpack.matcher.ZipMatcher;
import com.izforge.izpack.test.Container;
import com.izforge.izpack.test.InstallFile;
import com.izforge.izpack.test.junit.PicoRunner;
import com.izforge.izpack.util.IoHelper;
import org.hamcrest.core.IsNot;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.zip.ZipFile;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Tests the {@link UninstallDataWriter}.
*
* @author Anthonin Bonnefoy
* @author Tim Anderson
*/
@RunWith(PicoRunner.class)
@Container(TestGUIInstallationContainer.class)
public class UninstallDataWriterTest
{
/**
* Temporary folder to perform installations to.
*/
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
/**
* The uninstall jar writer.
*/
private final UninstallDataWriter uninstallDataWriter;
/**
* Install data.
*/
private final AutomatedInstallData installData;
/**
* The rules engine
*/
private final RulesEngine rulesEngine;
/**
* Constructs an <tt>UninstallDataWriterTest</tt>.
*
* @param uninstallDataWriter the uninstall jar writer
* @param installData the install data
* @param rulesEngine the rules engine
*/
public UninstallDataWriterTest(UninstallDataWriter uninstallDataWriter, AutomatedInstallData installData,
RulesEngine rulesEngine)
{
this.uninstallDataWriter = uninstallDataWriter;
this.installData = installData;
this.rulesEngine = rulesEngine;
}
/**
* Sets up the test case.
*/
@Before
public void setUp()
{
// write to temporary folder so the test doesn't need to be run with elevated permissions
File installPath = new File(temporaryFolder.getRoot(), "izpackTest");
installData.setInstallPath(installPath.getAbsolutePath());
}
/**
* Cleans up after the test.
*/
@After
public void tearDown()
{
System.getProperties().remove("izpack.mode");
}
/**
* Verifies that the uninstaller jar is written, and contains key classes and files.
*
* @throws IOException if the jar cannot be read
*/
@Test
@InstallFile("samples/basicInstall/basicInstall.xml")
public void testWriteUninstaller() throws IOException
{
assertTrue(uninstallDataWriter.write());
ZipFile uninstallJar = getUninstallerJar();
assertThat(
uninstallJar,
ZipMatcher.isZipContainingFiles(
"com/izforge/izpack/uninstaller/Uninstaller.class",
"com/izforge/izpack/uninstaller/Destroyer.class",
"com/izforge/izpack/data/ExecutableFile.class",
"langpack.xml",
"META-INF/MANIFEST.MF",
"com/izforge/izpack/gui/IconsDatabase.class",
"com/izforge/izpack/img/trash.png"));
// basicInstall.xml doesn't reference any listeners, so the com/izforge/izpack/event package shouldn't have
// been written. Verify that one of the listeners in the package doesn't appear
assertThat(uninstallJar,
IsNot.not(ZipMatcher.isZipContainingFiles(
"com/izforge/izpack/event/RegistryUninstallerListener.class")));
}
/**
* Verifies that standard listeners are written.
*
* @throws IOException
*/
@Test
@InstallFile("samples/event/event.xml")
public void testWriteStandardListener() throws IOException
{
assertTrue(uninstallDataWriter.write());
ZipFile uninstallJar = getUninstallerJar();
assertThat(uninstallJar,
ZipMatcher.isZipContainingFile("com/izforge/izpack/event/RegistryUninstallerListener.class"));
}
/**
* Verifies that custom listeners are written.
*/
@Test
@InstallFile("samples/event/customlisteners.xml")
public void testWriteCustomListener() throws IOException
{
assertTrue(uninstallDataWriter.write());
ZipFile uninstallJar = getUninstallerJar();
assertThat(uninstallJar,
ZipMatcher.isZipContainingFiles("com/izforge/izpack/test/listener/TestUninstallerListener.class",
"com/izforge/izpack/api/event/UninstallerListener.class"));
}
/**
* Verifies that native libraries are written to the uninstaller.
*/
@Test
@InstallFile("samples/natives/natives.xml")
public void testWriteNatives() throws IOException
{
assertTrue(uninstallDataWriter.write());
ZipFile uninstallJar = getUninstallerJar();
assertThat(uninstallJar,
ZipMatcher.isZipContainingFiles("com/izforge/izpack/bin/native/WinSetupAPI.dll",
"com/izforge/izpack/bin/native/WinSetupAPI_x64.dll",
"com/izforge/izpack/bin/native/COIOSHelper.dll",
"com/izforge/izpack/bin/native/COIOSHelper_x64.dll"));
// verify that the native libs with stage="install" aren't in the uninstaller
assertThat(uninstallJar,
IsNot.not(ZipMatcher.isZipContainingFiles("com/izforge/izpack/bin/native/ShellLink.dll",
"com/izforge/izpack/bin/native/ShellLink.dll")));
}
/**
* Verifies that the <em>com.coi.tools.os</em> packages are written if the OS is Windows.
* <p/>
* Strictly speaking these are only required if {@link com.izforge.izpack.event.RegistryUninstallerListener}
* is used, but for now just right them out for all windows installations.
*/
@Test
@InstallFile("samples/natives/natives.xml")
public void testWriteWindowsRegistrySupport() throws IOException
{
addOSCondition("izpack.windowsinstall");
installData.getInfo().setRequirePrivilegedExecutionUninstaller(true);
assertTrue(uninstallDataWriter.write());
ZipFile uninstallJar = getUninstallerJar();
assertThat(uninstallJar,
ZipMatcher.isZipContainingFiles("com/izforge/izpack/core/os/RegistryHandler.class",
"com/coi/tools/os/izpack/Registry.class",
"com/coi/tools/os/win/RegistryImpl.class",
"com/izforge/izpack/util/windows/elevate.js"));
}
/**
* Verifies that the <em>run-with-privileges-on-osx</em> script is written for mac installs.
*
* @throws IOException for any I/O error
*/
@Test
@InstallFile("samples/basicInstall/basicInstall.xml")
public void testRunWithPrivilegesOnOSX() throws IOException
{
System.setProperty("izpack.mode", "privileged");
installData.getInfo().setRequirePrivilegedExecutionUninstaller(true);
addOSCondition("izpack.macinstall");
assertTrue(uninstallDataWriter.write());
ZipFile uninstallJar = getUninstallerJar();
assertThat(uninstallJar,
ZipMatcher.isZipContainingFiles("exec-admin",
"com/izforge/izpack/util/mac/run-with-privileges-on-osx"));
}
/**
* Verifies that the "exec-admin" file is written when {@link Info#isPrivilegedExecutionRequiredUninstaller()}
* is {@code true} and there is no privileged execution condition.
*
* @throws IOException for any I/O error
*/
@Test
@InstallFile("samples/basicInstall/basicInstall.xml")
public void testExecAdminWrittenWhenPrivilegedExecutionRequired() throws IOException
{
installData.getInfo().setRequirePrivilegedExecutionUninstaller(true);
assertTrue(uninstallDataWriter.write());
ZipFile uninstallJar = getUninstallerJar();
assertThat(uninstallJar, ZipMatcher.isZipContainingFiles("exec-admin"));
}
/**
* Verifies that the "exec-admin" file is not written when {@link Info#isPrivilegedExecutionRequiredUninstaller()}
* is {@code false}.
*
* @throws IOException for any I/O error
*/
@Test
@InstallFile("samples/basicInstall/basicInstall.xml")
public void testExecAdminNotWrittenWhenPrivilegedExecutionNotRequired() throws IOException
{
installData.getInfo().setRequirePrivilegedExecutionUninstaller(false);
assertTrue(uninstallDataWriter.write());
ZipFile uninstallJar = getUninstallerJar();
assertThat(uninstallJar, IsNot.not(ZipMatcher.isZipContainingFiles("exec-admin")));
}
/**
* Verifies that the "exec-admin" file is not written to the uninstall jar when the privileged execution condition
* is false.
*
* @throws IOException for any I/O error
*/
@Test
@InstallFile("samples/basicInstall/basicInstall.xml")
public void testExecAdminNotWrittenForUnsatisfiedCondition() throws IOException
{
installData.getInfo().setRequirePrivilegedExecutionUninstaller(true);
installData.getInfo().setPrivilegedExecutionConditionID("falsecondition");
assertFalse(rulesEngine.isConditionTrue("falsecondition"));
assertTrue(uninstallDataWriter.write());
ZipFile uninstallJar = getUninstallerJar();
assertThat(uninstallJar, IsNot.not(ZipMatcher.isZipContainingFiles("exec-admin")));
}
private void addOSCondition(final String ruleId)
{
Map<String, Condition> rules = new HashMap<String, Condition>();
rules.put(ruleId, new Condition()
{
private static final long serialVersionUID = 1L;
{
setId(ruleId);
}
public void readFromXML(IXMLElement condition)
{
}
@Override
public boolean isTrue()
{
return true;
}
@Override
public void makeXMLData(IXMLElement conditionRoot)
{
}
@Override
public Set<String> getVarRefs() {
return new HashSet<String>(0);
}
});
rulesEngine.readConditionMap(rules); // use this as it doesn't check for rules being registered already
}
/**
* Returns the uninstaller jar file.
*
* @return the uninstaller jar file
* @throws IOException for any I/O error
*/
private ZipFile getUninstallerJar() throws IOException
{
String dir = IoHelper.translatePath(installData.getInfo().getUninstallerPath(), installData.getVariables());
String path = dir + File.separator + installData.getInfo().getUninstallerName();
File jar = new File(path);
assertThat(jar.exists(), is(true));
return new ZipFile(jar);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.process.traversal.step.map;
import org.apache.tinkerpop.gremlin.LoadGraphWith;
import org.apache.tinkerpop.gremlin.process.AbstractGremlinProcessTest;
import org.apache.tinkerpop.gremlin.process.GremlinProcessRunner;
import org.apache.tinkerpop.gremlin.process.IgnoreEngine;
import org.apache.tinkerpop.gremlin.process.traversal.Order;
import org.apache.tinkerpop.gremlin.process.traversal.Scope;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalEngine;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalHelper;
import org.apache.tinkerpop.gremlin.structure.Column;
import org.apache.tinkerpop.gremlin.structure.T;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.apache.tinkerpop.gremlin.LoadGraphWith.GraphData.GRATEFUL;
import static org.apache.tinkerpop.gremlin.LoadGraphWith.GraphData.MODERN;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.outE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* @author Marko A. Rodriguez (http://markorodriguez.com)
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
@RunWith(GremlinProcessRunner.class)
public abstract class OrderTest extends AbstractGremlinProcessTest {
public abstract Traversal<Vertex, String> get_g_V_name_order();
public abstract Traversal<Vertex, String> get_g_V_name_order_byXa1_b1X_byXb2_a2X();
public abstract Traversal<Vertex, String> get_g_V_order_byXname_incrX_name();
public abstract Traversal<Vertex, String> get_g_V_order_byXnameX_name();
public abstract Traversal<Vertex, Double> get_g_V_outE_order_byXweight_decrX_weight();
public abstract Traversal<Vertex, String> get_g_V_order_byXname_a1_b1X_byXname_b2_a2X_name();
public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_asXaX_outXcreatedX_asXbX_order_byXshuffleX_selectXa_bX();
public abstract Traversal<Vertex, Map<Integer, Integer>> get_g_VX1X_hasXlabel_personX_mapXmapXint_ageXX_orderXlocalX_byXvalues_decrX_byXkeys_incrX(final Object v1Id);
public abstract Traversal<Vertex, Vertex> get_g_V_order_byXoutE_count__decrX();
public abstract Traversal<Vertex, Map<String, List<Vertex>>> get_g_V_group_byXlabelX_byXname_order_byXdecrX_foldX();
public abstract Traversal<Vertex, List<Double>> get_g_V_localXbothE_weight_foldX_order_byXsumXlocalX_decrX();
public abstract Traversal<Vertex, Map<String, Object>> get_g_V_asXvX_mapXbothE_weight_foldX_sumXlocalX_asXsX_selectXv_sX_order_byXselectXsX_decrX();
public abstract Traversal<Vertex, Vertex> get_g_V_hasLabelXpersonX_order_byXageX();
public abstract Traversal<Vertex, List<Vertex>> get_g_V_hasLabelXpersonX_fold_orderXlocalX_byXageX();
public abstract Traversal<Vertex, String> get_g_V_hasLabelXpersonX_order_byXvalueXageX__decrX_name();
public abstract Traversal<Vertex, String> get_g_V_properties_order_byXkey_decrX_key();
public abstract Traversal<Vertex, Vertex> get_g_V_hasXsong_name_OHBOYX_outXfollowedByX_outXfollowedByX_order_byXperformancesX_byXsongType_incrX();
public abstract Traversal<Vertex, String> get_g_V_both_hasLabelXpersonX_order_byXage_decrX_limitX5X_name();
public abstract Traversal<Vertex, String> get_g_V_both_hasLabelXpersonX_order_byXage_decrX_name();
public abstract Traversal<Vertex, String> get_g_V_hasLabelXsongX_order_byXperfomances_decrX_byXnameX_rangeX110_120X_name();
public abstract Traversal<Vertex, Map<String, Number>> get_g_V_hasLabelXpersonX_group_byXnameX_byXoutE_weight_sumX_orderXlocalX_byXvaluesX();
public abstract Traversal<Vertex, Map.Entry<String, Number>> get_g_V_hasLabelXpersonX_group_byXnameX_byXoutE_weight_sumX_unfold_order_byXvalues_decrX();
@Test
@LoadGraphWith(MODERN)
public void g_V_name_order() {
final Traversal<Vertex, String> traversal = get_g_V_name_order();
printTraversalForm(traversal);
checkOrderedResults(Arrays.asList("josh", "lop", "marko", "peter", "ripple", "vadas"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_name_order_byXa1_b1X_byXb2_a2X() {
final Traversal<Vertex, String> traversal = get_g_V_name_order_byXa1_b1X_byXb2_a2X();
printTraversalForm(traversal);
checkOrderedResults(Arrays.asList("marko", "vadas", "peter", "ripple", "josh", "lop"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_order_byXname_incrX_name() {
final Traversal<Vertex, String> traversal = get_g_V_order_byXname_incrX_name();
printTraversalForm(traversal);
checkOrderedResults(Arrays.asList("josh", "lop", "marko", "peter", "ripple", "vadas"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_order_byXnameX_name() {
final Traversal<Vertex, String> traversal = get_g_V_order_byXnameX_name();
printTraversalForm(traversal);
checkOrderedResults(Arrays.asList("josh", "lop", "marko", "peter", "ripple", "vadas"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_outE_order_byXweight_decrX_weight() {
final Traversal<Vertex, Double> traversal = get_g_V_outE_order_byXweight_decrX_weight();
printTraversalForm(traversal);
checkOrderedResults(Arrays.asList(1.0d, 1.0d, 0.5d, 0.4d, 0.4d, 0.2d), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_order_byXname_a1_b1X_byXname_b2_a2X_name() {
final Traversal<Vertex, String> traversal = get_g_V_order_byXname_a1_b1X_byXname_b2_a2X_name();
printTraversalForm(traversal);
checkOrderedResults(Arrays.asList("marko", "vadas", "peter", "ripple", "josh", "lop"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_asXaX_outXcreatedX_asXbX_order_byXshuffleX_selectXa_bX() {
final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_asXaX_outXcreatedX_asXbX_order_byXshuffleX_selectXa_bX();
printTraversalForm(traversal);
int counter = 0;
int markoCounter = 0;
int joshCounter = 0;
int peterCounter = 0;
while (traversal.hasNext()) {
counter++;
Map<String, Vertex> bindings = traversal.next();
assertEquals(2, bindings.size());
if (bindings.get("a").id().equals(convertToVertexId("marko"))) {
assertEquals(convertToVertexId("lop"), bindings.get("b").id());
markoCounter++;
} else if (bindings.get("a").id().equals(convertToVertexId("josh"))) {
assertTrue((bindings.get("b")).id().equals(convertToVertexId("lop")) || bindings.get("b").id().equals(convertToVertexId("ripple")));
joshCounter++;
} else if (bindings.get("a").id().equals(convertToVertexId("peter"))) {
assertEquals(convertToVertexId("lop"), bindings.get("b").id());
peterCounter++;
} else {
fail("This state should not have been reachable");
}
}
assertEquals(4, markoCounter + joshCounter + peterCounter);
assertEquals(1, markoCounter);
assertEquals(1, peterCounter);
assertEquals(2, joshCounter);
assertEquals(4, counter);
}
@Test
@LoadGraphWith(MODERN)
public void g_VX1X_hasXlabel_personX_mapXmapXint_ageXX_orderXlocalX_byXvalues_decrX_byXkeys_incrX() {
final Traversal<Vertex, Map<Integer, Integer>> traversal = get_g_VX1X_hasXlabel_personX_mapXmapXint_ageXX_orderXlocalX_byXvalues_decrX_byXkeys_incrX(convertToVertexId("marko"));
printTraversalForm(traversal);
final Map<Integer, Integer> map = traversal.next();
assertFalse(traversal.hasNext());
assertEquals(4, map.size());
final Iterator<Map.Entry<Integer, Integer>> iterator = map.entrySet().iterator();
Map.Entry<Integer, Integer> entry = iterator.next();
assertEquals(3, entry.getKey().intValue());
assertEquals(87, entry.getValue().intValue());
entry = iterator.next();
assertEquals(2, entry.getKey().intValue());
assertEquals(58, entry.getValue().intValue());
entry = iterator.next();
assertEquals(1, entry.getKey().intValue());
assertEquals(29, entry.getValue().intValue());
entry = iterator.next();
assertEquals(4, entry.getKey().intValue());
assertEquals(29, entry.getValue().intValue());
assertFalse(iterator.hasNext());
assertFalse(traversal.hasNext());
}
@Test
@LoadGraphWith(MODERN)
public void g_V_order_byXoutE_count__decrX() {
Arrays.asList(get_g_V_order_byXoutE_count__decrX()).forEach(traversal -> {
printTraversalForm(traversal);
final List<Vertex> vertices = traversal.toList();
assertEquals(vertices.size(), 6);
assertEquals("marko", vertices.get(0).value("name"));
assertEquals("josh", vertices.get(1).value("name"));
assertEquals("peter", vertices.get(2).value("name"));
assertTrue(vertices.get(3).value("name").equals("vadas") || vertices.get(3).value("name").equals("ripple") || vertices.get(3).value("name").equals("lop"));
assertTrue(vertices.get(4).value("name").equals("vadas") || vertices.get(4).value("name").equals("ripple") || vertices.get(4).value("name").equals("lop"));
assertTrue(vertices.get(5).value("name").equals("vadas") || vertices.get(5).value("name").equals("ripple") || vertices.get(5).value("name").equals("lop"));
});
}
@Test
@LoadGraphWith(MODERN)
public void g_V_group_byXlabelX_byXname_order_byXdecrX_foldX() {
final Traversal<Vertex, Map<String, List<Vertex>>> traversal = get_g_V_group_byXlabelX_byXname_order_byXdecrX_foldX();
printTraversalForm(traversal);
final Map<String, List<Vertex>> map = traversal.next();
assertFalse(traversal.hasNext());
assertEquals(2, map.size());
List list = map.get("software");
assertEquals(2, list.size());
assertEquals("lop", list.get(1));
assertEquals("ripple", list.get(0));
list = map.get("person");
assertEquals(4, list.size());
assertEquals("josh", list.get(3));
assertEquals("marko", list.get(2));
assertEquals("peter", list.get(1));
assertEquals("vadas", list.get(0));
}
@Test
@LoadGraphWith(MODERN)
public void g_V_localXbothE_weight_foldX_order_byXsumXlocalX_decrX() {
final Traversal<Vertex, List<Double>> traversal = get_g_V_localXbothE_weight_foldX_order_byXsumXlocalX_decrX();
final List<List<Double>> list = traversal.toList();
assertEquals(list.get(0).size(), 3);
assertEquals(list.get(1).size(), 3);
//assertEquals(list.get(2).size(),3); // they both have value 1.0 and thus can't guarantee a tie order
//assertEquals(list.get(3).size(),1);
assertEquals(list.get(4).size(), 1);
assertEquals(list.get(5).size(), 1);
///
assertEquals(2.4d, list.get(0).stream().reduce(0.0d, (a, b) -> a + b), 0.01d);
assertEquals(1.9d, list.get(1).stream().reduce(0.0d, (a, b) -> a + b), 0.01d);
assertEquals(1.0d, list.get(2).stream().reduce(0.0d, (a, b) -> a + b), 0.01d);
assertEquals(1.0d, list.get(3).stream().reduce(0.0d, (a, b) -> a + b), 0.01d);
assertEquals(0.5d, list.get(4).stream().reduce(0.0d, (a, b) -> a + b), 0.01d);
assertEquals(0.2d, list.get(5).stream().reduce(0.0d, (a, b) -> a + b), 0.01d);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_asXvX_mapXbothE_weight_foldX_sumXlocalX_asXsX_selectXv_sX_order_byXselectXsX_decrX() {
final Traversal<Vertex, Map<String, Object>> traversal = get_g_V_asXvX_mapXbothE_weight_foldX_sumXlocalX_asXsX_selectXv_sX_order_byXselectXsX_decrX();
final List<Map<String, Object>> list = traversal.toList();
assertEquals(convertToVertex(graph, "josh"), list.get(0).get("v"));
assertEquals(2.4d, (Double) list.get(0).get("s"), 0.1d);
///
assertEquals(convertToVertex(graph, "marko"), list.get(1).get("v"));
assertEquals(1.9d, (Double) list.get(1).get("s"), 0.1d);
//
assertEquals(1.0d, (Double) list.get(2).get("s"), 0.1d); // they both have 1.0 so you can't test the "v" as().
assertEquals(1.0d, (Double) list.get(3).get("s"), 0.1d);
///
assertEquals(convertToVertex(graph, "vadas"), list.get(4).get("v"));
assertEquals(0.5d, (Double) list.get(4).get("s"), 0.1d);
///
assertEquals(convertToVertex(graph, "peter"), list.get(5).get("v"));
assertEquals(0.2d, (Double) list.get(5).get("s"), 0.1d);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_hasLabelXpersonX_order_byXageX() {
final Traversal<Vertex, Vertex> traversal = get_g_V_hasLabelXpersonX_order_byXageX();
printTraversalForm(traversal);
checkResults(Arrays.asList(convertToVertex(graph, "vadas"), convertToVertex(graph, "marko"), convertToVertex(graph, "josh"), convertToVertex(graph, "peter")), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_hasLabelXpersonX_fold_orderXlocalX_byXageX() {
final Traversal<Vertex, List<Vertex>> traversal = get_g_V_hasLabelXpersonX_fold_orderXlocalX_byXageX();
printTraversalForm(traversal);
final List<Vertex> list = traversal.next();
assertEquals(convertToVertex(graph, "vadas"), list.get(0));
assertEquals(convertToVertex(graph, "marko"), list.get(1));
assertEquals(convertToVertex(graph, "josh"), list.get(2));
assertEquals(convertToVertex(graph, "peter"), list.get(3));
}
@Test
@LoadGraphWith(MODERN)
public void g_V_hasLabelXpersonX_order_byXvalueXageX__decrX_name() {
final Traversal<Vertex, String> traversal = get_g_V_hasLabelXpersonX_order_byXvalueXageX__decrX_name();
printTraversalForm(traversal);
checkOrderedResults(Arrays.asList("peter", "josh", "marko", "vadas"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_properties_order_byXkey_decrX_key() {
final Traversal<Vertex, String> traversal = get_g_V_properties_order_byXkey_decrX_key();
printTraversalForm(traversal);
checkOrderedResults(Arrays.asList(
"name", "name", "name", "name", "name", "name",
"lang", "lang",
"age", "age", "age", "age"), traversal);
}
@Test
@LoadGraphWith(GRATEFUL)
public void g_V_hasXsong_name_OHBOYX_outXfollowedByX_outXfollowedByX_order_byXperformancesX_byXsongType_incrX() {
final Traversal<Vertex, Vertex> traversal = get_g_V_hasXsong_name_OHBOYX_outXfollowedByX_outXfollowedByX_order_byXperformancesX_byXsongType_incrX();
printTraversalForm(traversal);
int counter = 0;
String lastSongType = "a";
int lastPerformances = Integer.MIN_VALUE;
while (traversal.hasNext()) {
final Vertex vertex = traversal.next();
final String currentSongType = vertex.value("songType");
final int currentPerformances = vertex.value("performances");
assertTrue(currentPerformances == lastPerformances || currentPerformances > lastPerformances);
if (currentPerformances == lastPerformances)
assertTrue(currentSongType.equals(lastSongType) || currentSongType.compareTo(lastSongType) < 0);
lastSongType = currentSongType;
lastPerformances = currentPerformances;
counter++;
}
assertEquals(144, counter);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_both_hasLabelXpersonX_order_byXage_decrX_limitX5X_name() {
final Traversal<Vertex, String> traversal = get_g_V_both_hasLabelXpersonX_order_byXage_decrX_limitX5X_name();
printTraversalForm(traversal);
checkOrderedResults(Arrays.asList("peter", "josh", "josh", "josh", "marko"), traversal);
}
@Test
@IgnoreEngine(TraversalEngine.Type.STANDARD) // validating the internal sort/limit works in GraphComputer
@LoadGraphWith(MODERN)
public void g_V_both_hasLabelXpersonX_order_byXage_decrX_name() {
final Traversal<Vertex, String> traversal = get_g_V_both_hasLabelXpersonX_order_byXage_decrX_name();
traversal.asAdmin().applyStrategies();
if (!TraversalHelper.getFirstStepOfAssignableClass(OrderGlobalStep.class, traversal.asAdmin()).isPresent())
return; // total hack to avoid providers that don't compile to OrderGlobalStep
TraversalHelper.getFirstStepOfAssignableClass(OrderGlobalStep.class, traversal.asAdmin()).get().setLimit(1);
printTraversalForm(traversal);
final List<String> results = traversal.toList();
assertTrue(results.size() < 8);
assertFalse(traversal.hasNext());
}
@Test
@LoadGraphWith(GRATEFUL)
public void g_V_hasLabelXsongX_order_byXperfomances_decrX_byXnameX_rangeX110_120X_name() {
final Traversal<Vertex, String> traversal = get_g_V_hasLabelXsongX_order_byXperfomances_decrX_byXnameX_rangeX110_120X_name();
printTraversalForm(traversal);
checkOrderedResults(Arrays.asList(
"WANG DANG DOODLE", "THE ELEVEN", "WAY TO GO HOME", "FOOLISH HEART",
"GIMME SOME LOVING", "DUPREES DIAMOND BLUES", "CORRINA", "PICASSO MOON",
"KNOCKING ON HEAVENS DOOR", "MEMPHIS BLUES"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_hasLabelXpersonX_group_byXnameX_byXoutE_weight_sumX_orderXlocalX_byXvaluesX() {
final Traversal<Vertex, Map<String, Number>> traversal = get_g_V_hasLabelXpersonX_group_byXnameX_byXoutE_weight_sumX_orderXlocalX_byXvaluesX();
printTraversalForm(traversal);
assertTrue(traversal.hasNext());
final Map<String, Number> m = traversal.next();
assertFalse(traversal.hasNext());
assertEquals(4, m.size());
final Iterator<Map.Entry<String, Number>> iterator = m.entrySet().iterator();
Map.Entry<String, Number> entry = iterator.next();
assertEquals("vadas", entry.getKey());
assertEquals(0.0, entry.getValue().doubleValue(), 0.0001);
entry = iterator.next();
assertEquals("peter", entry.getKey());
assertEquals(0.2, entry.getValue().doubleValue(), 0.0001);
entry = iterator.next();
assertEquals("josh", entry.getKey());
assertEquals(1.4, entry.getValue().doubleValue(), 0.0001);
entry = iterator.next();
assertEquals("marko", entry.getKey());
assertEquals(1.9, entry.getValue().doubleValue(), 0.0001);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_hasLabelXpersonX_group_byXnameX_byXoutE_weight_sumX_unfold_order_byXvalues_decrX() {
final Traversal<Vertex, Map.Entry<String, Number>> traversal = get_g_V_hasLabelXpersonX_group_byXnameX_byXoutE_weight_sumX_unfold_order_byXvalues_decrX();
printTraversalForm(traversal);
assertTrue(traversal.hasNext());
Map.Entry<String, Number> entry = traversal.next();
assertEquals("marko", entry.getKey());
assertEquals(1.9, entry.getValue().doubleValue(), 0.0001);
entry = traversal.next();
assertEquals("josh", entry.getKey());
assertEquals(1.4, entry.getValue().doubleValue(), 0.0001);
entry = traversal.next();
assertEquals("peter", entry.getKey());
assertEquals(0.2, entry.getValue().doubleValue(), 0.0001);
entry = traversal.next();
assertEquals("vadas", entry.getKey());
assertEquals(0.0, entry.getValue().doubleValue(), 0.0001);
assertFalse(traversal.hasNext());
}
public static class Traversals extends OrderTest {
@Override
public Traversal<Vertex, String> get_g_V_name_order() {
return g.V().<String>values("name").order();
}
@Override
public Traversal<Vertex, String> get_g_V_name_order_byXa1_b1X_byXb2_a2X() {
return g.V().<String>values("name").order().by((a, b) -> a.substring(1, 2).compareTo(b.substring(1, 2))).by((a, b) -> b.substring(2, 3).compareTo(a.substring(2, 3)));
}
@Override
public Traversal<Vertex, String> get_g_V_order_byXname_incrX_name() {
return g.V().order().by("name", Order.incr).values("name");
}
@Override
public Traversal<Vertex, String> get_g_V_order_byXnameX_name() {
return g.V().order().by("name").values("name");
}
@Override
public Traversal<Vertex, Double> get_g_V_outE_order_byXweight_decrX_weight() {
return g.V().outE().order().by("weight", Order.decr).values("weight");
}
@Override
public Traversal<Vertex, String> get_g_V_order_byXname_a1_b1X_byXname_b2_a2X_name() {
return g.V().order().
<String>by("name", (a, b) -> a.substring(1, 2).compareTo(b.substring(1, 2))).
<String>by("name", (a, b) -> b.substring(2, 3).compareTo(a.substring(2, 3))).values("name");
}
@Override
public Traversal<Vertex, Map<String, Vertex>> get_g_V_asXaX_outXcreatedX_asXbX_order_byXshuffleX_selectXa_bX() {
return g.V().as("a").out("created").as("b").order().by(Order.shuffle).select("a", "b");
}
@Override
public Traversal<Vertex, Map<Integer, Integer>> get_g_VX1X_hasXlabel_personX_mapXmapXint_ageXX_orderXlocalX_byXvalues_decrX_byXkeys_incrX(final Object v1Id) {
return g.V(v1Id).hasLabel("person").map(v -> {
final Map<Integer, Integer> map = new HashMap<>();
map.put(1, (int) v.get().value("age"));
map.put(2, (int) v.get().value("age") * 2);
map.put(3, (int) v.get().value("age") * 3);
map.put(4, (int) v.get().value("age"));
return map;
}).order(Scope.local).by(Column.values, Order.decr).by(Column.keys, Order.incr);
}
@Override
public Traversal<Vertex, Vertex> get_g_V_order_byXoutE_count__decrX() {
return g.V().order().by(outE().count(), Order.decr);
}
@Override
public Traversal<Vertex, Map<String, List<Vertex>>> get_g_V_group_byXlabelX_byXname_order_byXdecrX_foldX() {
return g.V().<String, List<Vertex>>group().by(T.label).by(__.values("name").order().by(Order.decr).fold());
}
@Override
public Traversal<Vertex, List<Double>> get_g_V_localXbothE_weight_foldX_order_byXsumXlocalX_decrX() {
return g.V().local(__.bothE().<Double>values("weight").fold()).order().by(__.sum(Scope.local), Order.decr);
}
@Override
public Traversal<Vertex, Map<String, Object>> get_g_V_asXvX_mapXbothE_weight_foldX_sumXlocalX_asXsX_selectXv_sX_order_byXselectXsX_decrX() {
return g.V().as("v").map(__.bothE().<Double>values("weight").fold()).sum(Scope.local).as("s").select("v", "s").order().by(__.select("s"), Order.decr);
}
@Override
public Traversal<Vertex, Vertex> get_g_V_hasLabelXpersonX_order_byXageX() {
return g.V().hasLabel("person").order().by("age");
}
@Override
public Traversal<Vertex, List<Vertex>> get_g_V_hasLabelXpersonX_fold_orderXlocalX_byXageX() {
return g.V().hasLabel("person").fold().order(Scope.local).by("age");
}
@Override
public Traversal<Vertex, String> get_g_V_hasLabelXpersonX_order_byXvalueXageX__decrX_name() {
return g.V().hasLabel("person").order().<Vertex>by(v -> v.value("age"), Order.decr).values("name");
}
@Override
public Traversal<Vertex, String> get_g_V_properties_order_byXkey_decrX_key() {
return g.V().properties().order().by(T.key, Order.decr).key();
}
@Override
public Traversal<Vertex, Vertex> get_g_V_hasXsong_name_OHBOYX_outXfollowedByX_outXfollowedByX_order_byXperformancesX_byXsongType_incrX() {
return g.V().has("song", "name", "OH BOY").out("followedBy").out("followedBy").order().by("performances").by("songType", Order.decr);
}
@Override
public Traversal<Vertex, String> get_g_V_both_hasLabelXpersonX_order_byXage_decrX_limitX5X_name() {
return g.V().both().hasLabel("person").order().by("age", Order.decr).limit(5).values("name");
}
@Override
public Traversal<Vertex, String> get_g_V_both_hasLabelXpersonX_order_byXage_decrX_name() {
return g.V().both().hasLabel("person").order().by("age", Order.decr).values("name");
}
@Override
public Traversal<Vertex, String> get_g_V_hasLabelXsongX_order_byXperfomances_decrX_byXnameX_rangeX110_120X_name() {
return g.V().hasLabel("song").order().by("performances", Order.decr).by("name").range(110, 120).values("name");
}
@Override
public Traversal<Vertex, Map<String, Number>> get_g_V_hasLabelXpersonX_group_byXnameX_byXoutE_weight_sumX_orderXlocalX_byXvaluesX() {
return g.V().hasLabel("person").<String, Number>group().by("name").by(outE().values("weight").sum()).order(Scope.local).by(Column.values);
}
@Override
public Traversal<Vertex, Map.Entry<String, Number>> get_g_V_hasLabelXpersonX_group_byXnameX_byXoutE_weight_sumX_unfold_order_byXvalues_decrX() {
return g.V().hasLabel("person").group().by("name").by(outE().values("weight").sum()).<Map.Entry<String, Number>>unfold().order().by(Column.values, Order.decr);
}
}
}
| |
/*
* Copyright (c) 2011-2020 Contributors to the Eclipse Foundation
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0
* which is available at https://www.apache.org/licenses/LICENSE-2.0.
*
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0
*/
package io.vertx.sqlclient.tck;
import io.vertx.core.AsyncResult;
import io.vertx.core.Handler;
import io.vertx.core.Vertx;
import io.vertx.ext.unit.TestContext;
import io.vertx.sqlclient.Row;
import io.vertx.sqlclient.SqlConnection;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collector;
import java.util.stream.Collectors;
public abstract class CollectorTestBase {
protected Vertx vertx;
protected Connector<SqlConnection> connector;
protected abstract void initConnector();
protected void connect(Handler<AsyncResult<SqlConnection>> handler) {
connector.connect(handler);
}
@Before
public void setUp() throws Exception {
vertx = Vertx.vertx();
initConnector();
}
@After
public void tearDown(TestContext ctx) {
connector.close();
vertx.close(ctx.asyncAssertSuccess());
}
@Test
public void testSimpleQuery(TestContext ctx) {
Collector<Row, ?, Map<Integer, TestingCollectorObject>> collector = Collectors.toMap(
row -> row.getInteger("id"),
row -> new TestingCollectorObject(row.getInteger("id"),
row.getShort("test_int_2"),
row.getInteger("test_int_4"),
row.getLong("test_int_8"),
row.getFloat("test_float"),
row.getDouble("test_double"),
row.getString("test_varchar"))
);
TestingCollectorObject expected = new TestingCollectorObject(1, (short) 32767, 2147483647, 9223372036854775807L,
123.456f, 1.234567d, "HELLO,WORLD");
connector.connect(ctx.asyncAssertSuccess(conn -> {
conn.query("SELECT * FROM collector_test WHERE id = 1")
.collecting(collector)
.execute(ctx.asyncAssertSuccess(result -> {
Map<Integer, TestingCollectorObject> map = result.value();
TestingCollectorObject actual = map.get(1);
ctx.assertEquals(expected, actual);
conn.close();
}));
}));
}
@Test
public void testPreparedQuery(TestContext ctx) {
Collector<Row, ?, Map<Integer, TestingCollectorObject>> collector = Collectors.toMap(
row -> row.getInteger("id"),
row -> new TestingCollectorObject(row.getInteger("id"),
row.getShort("test_int_2"),
row.getInteger("test_int_4"),
row.getLong("test_int_8"),
row.getFloat("test_float"),
row.getDouble("test_double"),
row.getString("test_varchar"))
);
TestingCollectorObject expected = new TestingCollectorObject(1, (short) 32767, 2147483647, 9223372036854775807L,
123.456f, 1.234567d, "HELLO,WORLD");
connector.connect(ctx.asyncAssertSuccess(conn -> {
conn.preparedQuery("SELECT * FROM collector_test WHERE id = 1")
.collecting(collector)
.execute(ctx.asyncAssertSuccess(result -> {
Map<Integer, TestingCollectorObject> map = result.value();
TestingCollectorObject actual = map.get(1);
ctx.assertEquals(expected, actual);
conn.close();
}));
}));
}
@Test
public void testCollectorFailureProvidingSupplier(TestContext ctx) {
RuntimeException cause = new RuntimeException();
testCollectorFailure(ctx, cause, new CollectorBase() {
@Override
public Supplier<Object> supplier() {
throw cause;
}
});
}
@Test
public void testCollectorFailureInSupplier(TestContext ctx) {
RuntimeException cause = new RuntimeException();
testCollectorFailure(ctx, cause, new CollectorBase() {
@Override
public Supplier<Object> supplier() {
return () -> {
throw cause;
};
}
});
}
@Test
public void testCollectorFailureProvidingAccumulator(TestContext ctx) {
RuntimeException cause = new RuntimeException();
testCollectorFailure(ctx, cause, new CollectorBase() {
@Override
public BiConsumer<Object, Row> accumulator() {
throw cause;
}
});
}
@Test
public void testCollectorFailureInAccumulator(TestContext ctx) {
RuntimeException cause = new RuntimeException();
testCollectorFailure(ctx, cause, new CollectorBase() {
@Override
public BiConsumer<Object, Row> accumulator() {
return (o, row) -> {
throw cause;
};
}
});
}
@Test
public void testCollectorFailureProvidingFinisher(TestContext ctx) {
RuntimeException cause = new RuntimeException();
testCollectorFailure(ctx, cause, new CollectorBase() {
@Override
public Function<Object, Object> finisher() {
throw cause;
}
});
}
@Test
public void testCollectorFailureInFinisher(TestContext ctx) {
RuntimeException cause = new RuntimeException();
testCollectorFailure(ctx, cause, new CollectorBase() {
@Override
public Function<Object, Object> finisher() {
return o -> {
throw cause;
};
}
});
}
private void testCollectorFailure(TestContext ctx, Throwable cause, Collector<Row, Object, Object> collector) {
connector.connect(ctx.asyncAssertSuccess(conn -> {
conn.query("SELECT * FROM collector_test WHERE id = 1")
.collecting(collector)
.execute(ctx.asyncAssertFailure(result -> {
ctx.assertEquals(cause, result);
conn.close();
}));
}));
}
// this class is for verifying the use of Collector API
private static class TestingCollectorObject {
public int id;
public short int2;
public int int4;
public long int8;
public float floatNum;
public double doubleNum;
public String varchar;
private TestingCollectorObject(int id, short int2, int int4, long int8, float floatNum, double doubleNum, String varchar) {
this.id = id;
this.int2 = int2;
this.int4 = int4;
this.int8 = int8;
this.floatNum = floatNum;
this.doubleNum = doubleNum;
this.varchar = varchar;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TestingCollectorObject that = (TestingCollectorObject) o;
if (id != that.id) return false;
if (int2 != that.int2) return false;
if (int4 != that.int4) return false;
if (int8 != that.int8) return false;
if (Float.compare(that.floatNum, floatNum) != 0) return false;
if (Double.compare(that.doubleNum, doubleNum) != 0) return false;
return varchar != null ? varchar.equals(that.varchar) : that.varchar == null;
}
}
private static class CollectorBase implements Collector<Row, Object, Object> {
@Override
public Supplier<Object> supplier() {
return () -> null;
}
@Override
public BiConsumer<Object, Row> accumulator() {
return (a, t) -> {
};
}
@Override
public BinaryOperator<Object> combiner() {
return (a, a2) -> null;
}
@Override
public Function<Object, Object> finisher() {
return a -> null;
}
@Override
public Set<Characteristics> characteristics() {
return Collections.emptySet();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.dag.history.recovery;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.service.AbstractService;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.app.AppContext;
import org.apache.tez.dag.history.DAGHistoryEvent;
import org.apache.tez.dag.history.HistoryEventType;
import org.apache.tez.dag.history.SummaryEvent;
import org.apache.tez.dag.history.events.DAGSubmittedEvent;
import org.apache.tez.dag.records.TezDAGID;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
public class RecoveryService extends AbstractService {
private static final Log LOG = LogFactory.getLog(RecoveryService.class);
private final AppContext appContext;
private LinkedBlockingQueue<DAGHistoryEvent> eventQueue =
new LinkedBlockingQueue<DAGHistoryEvent>();
private Set<TezDAGID> completedDAGs = new HashSet<TezDAGID>();
private Thread eventHandlingThread;
private AtomicBoolean stopped = new AtomicBoolean(false);
private AtomicBoolean started = new AtomicBoolean(false);
private int eventCounter = 0;
private int eventsProcessed = 0;
private final Object lock = new Object();
private FileSystem recoveryDirFS; // FS where staging dir exists
Path recoveryPath;
Map<TezDAGID, FSDataOutputStream> outputStreamMap = new
HashMap<TezDAGID, FSDataOutputStream>();
// FSDataOutputStream metaInfoStream;
private int bufferSize;
private FSDataOutputStream summaryStream;
public RecoveryService(AppContext appContext) {
super(RecoveryService.class.getName());
this.appContext = appContext;
}
@Override
public void serviceInit(Configuration conf) throws Exception {
LOG.info("Initializing RecoveryService");
recoveryPath = appContext.getCurrentRecoveryDir();
recoveryDirFS = FileSystem.get(recoveryPath.toUri(), conf);
bufferSize = conf.getInt(TezConfiguration.DAG_RECOVERY_FILE_IO_BUFFER_SIZE,
TezConfiguration.DAG_RECOVERY_FILE_IO_BUFFER_SIZE_DEFAULT);
}
@Override
public void serviceStart() {
LOG.info("Starting RecoveryService");
eventHandlingThread = new Thread(new Runnable() {
@Override
public void run() {
DAGHistoryEvent event;
while (!stopped.get() && !Thread.currentThread().isInterrupted()) {
// Log the size of the event-queue every so often.
if (eventCounter != 0 && eventCounter % 1000 == 0) {
LOG.info("Event queue stats"
+ ", eventsProcessedSinceLastUpdate=" + eventsProcessed
+ ", eventQueueSize=" + eventQueue.size());
eventCounter = 0;
eventsProcessed = 0;
} else {
++eventCounter;
}
try {
event = eventQueue.take();
} catch (InterruptedException e) {
LOG.info("EventQueue take interrupted. Returning");
return;
}
synchronized (lock) {
try {
++eventsProcessed;
handleEvent(event);
} catch (Exception e) {
// TODO handle failures - treat as fatal or ignore?
LOG.warn("Error handling recovery event", e);
}
}
}
}
}, "RecoveryEventHandlingThread");
eventHandlingThread.start();
started.set(true);
}
@Override
public void serviceStop() {
LOG.info("Stopping RecoveryService");
stopped.set(true);
if (eventHandlingThread != null) {
eventHandlingThread.interrupt();
}
if (summaryStream != null) {
try {
summaryStream.flush();
summaryStream.close();
} catch (IOException ioe) {
LOG.warn("Error when closing summary stream", ioe);
}
}
for (FSDataOutputStream outputStream : outputStreamMap.values()) {
try {
outputStream.flush();
outputStream.close();
} catch (IOException ioe) {
LOG.warn("Error when closing output stream", ioe);
}
}
}
public void handle(DAGHistoryEvent event) {
if (stopped.get()) {
LOG.warn("Igoring event as service stopped, eventType"
+ event.getHistoryEvent().getEventType());
return;
}
if (!started.get()) {
eventQueue.add(event);
return;
}
HistoryEventType eventType = event.getHistoryEvent().getEventType();
if (eventType.equals(HistoryEventType.DAG_SUBMITTED)
|| eventType.equals(HistoryEventType.DAG_FINISHED)) {
// handle submissions and completion immediately
synchronized (lock) {
try {
handleEvent(event);
summaryStream.flush();
if (eventType.equals(HistoryEventType.DAG_SUBMITTED)) {
outputStreamMap.get(event.getDagID()).flush();
} else if (eventType.equals(HistoryEventType.DAG_FINISHED)) {
completedDAGs.add(event.getDagID());
if (outputStreamMap.containsKey(event.getDagID())) {
try {
outputStreamMap.get(event.getDagID()).flush();
outputStreamMap.get(event.getDagID()).close();
outputStreamMap.remove(event.getDagID());
} catch (IOException ioe) {
LOG.warn("Error when trying to flush/close recovery file for"
+ " dag, dagId=" + event.getDagID());
}
} else {
// TODO this is an error
}
}
} catch (Exception e) {
// TODO handle failures - treat as fatal or ignore?
LOG.warn("Error handling recovery event", e);
}
}
LOG.info("DAG completed"
+ ", dagId=" + event.getDagID()
+ ", queueSize=" + eventQueue.size());
} else {
// All other events just get queued
eventQueue.add(event);
}
}
private void handleEvent(DAGHistoryEvent event) {
HistoryEventType eventType = event.getHistoryEvent().getEventType();
if (LOG.isDebugEnabled()) {
LOG.debug("Handling recovery event of type "
+ event.getHistoryEvent().getEventType());
}
if (event.getDagID() == null) {
// AM event
// anything to be done?
// TODO
return;
}
TezDAGID dagID = event.getDagID();
if (completedDAGs.contains(dagID)) {
// Skip events for completed DAGs
// no need to recover completed DAGs
return;
}
try {
if (eventType.equals(HistoryEventType.DAG_SUBMITTED)
|| eventType.equals(HistoryEventType.DAG_FINISHED)) {
if (summaryStream == null) {
Path summaryPath = new Path(recoveryPath,
appContext.getApplicationID()
+ TezConfiguration.DAG_RECOVERY_SUMMARY_FILE_SUFFIX);
summaryStream = recoveryDirFS.create(summaryPath, false,
bufferSize);
}
if (eventType.equals(HistoryEventType.DAG_SUBMITTED)) {
DAGSubmittedEvent dagSubmittedEvent =
(DAGSubmittedEvent) event.getHistoryEvent();
String dagName = dagSubmittedEvent.getDAGName();
if (dagName != null
&& dagName.startsWith(
TezConfiguration.TEZ_PREWARM_DAG_NAME_PREFIX)) {
// Skip recording pre-warm DAG events
return;
}
Path dagFilePath = new Path(recoveryPath,
dagID.toString() + TezConfiguration.DAG_RECOVERY_RECOVER_FILE_SUFFIX);
FSDataOutputStream outputStream =
recoveryDirFS.create(dagFilePath, false, bufferSize);
outputStreamMap.put(dagID, outputStream);
}
if (outputStreamMap.containsKey(dagID)) {
SummaryEvent summaryEvent = (SummaryEvent) event.getHistoryEvent();
summaryEvent.toSummaryProtoStream(summaryStream);
}
}
FSDataOutputStream outputStream = outputStreamMap.get(dagID);
if (outputStream == null) {
return;
}
outputStream.write(event.getHistoryEvent().getEventType().ordinal());
event.getHistoryEvent().toProtoStream(outputStream);
} catch (IOException ioe) {
// TODO handle failures - treat as fatal or ignore?
LOG.warn("Failed to write to stream", ioe);
}
}
}
| |
/*
* Copyright 2021 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.map.authorization;
import org.jboss.logging.Logger;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.model.PermissionTicket;
import org.keycloak.authorization.model.PermissionTicket.SearchableFields;
import org.keycloak.authorization.model.Resource;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.store.PermissionTicketStore;
import org.keycloak.authorization.store.ResourceStore;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.ModelDuplicateException;
import org.keycloak.models.map.authorization.adapter.MapPermissionTicketAdapter;
import org.keycloak.models.map.authorization.entity.MapPermissionTicketEntity;
import org.keycloak.models.map.storage.MapKeycloakTransaction;
import org.keycloak.models.map.storage.MapStorage;
import org.keycloak.models.map.storage.ModelCriteriaBuilder;
import org.keycloak.models.map.storage.ModelCriteriaBuilder.Operator;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.keycloak.common.util.StackUtil.getShortStackTrace;
import static org.keycloak.models.map.common.MapStorageUtils.registerEntityForChanges;
import static org.keycloak.utils.StreamsUtil.distinctByKey;
import static org.keycloak.utils.StreamsUtil.paginatedStream;
public class MapPermissionTicketStore<K extends Comparable<K>> implements PermissionTicketStore {
private static final Logger LOG = Logger.getLogger(MapPermissionTicketStore.class);
private final AuthorizationProvider authorizationProvider;
final MapKeycloakTransaction<K, MapPermissionTicketEntity<K>, PermissionTicket> tx;
private final MapStorage<K, MapPermissionTicketEntity<K>, PermissionTicket> permissionTicketStore;
public MapPermissionTicketStore(KeycloakSession session, MapStorage<K, MapPermissionTicketEntity<K>, PermissionTicket> permissionTicketStore, AuthorizationProvider provider) {
this.authorizationProvider = provider;
this.permissionTicketStore = permissionTicketStore;
this.tx = permissionTicketStore.createTransaction(session);
session.getTransactionManager().enlist(tx);
}
private PermissionTicket entityToAdapter(MapPermissionTicketEntity<K> origEntity) {
if (origEntity == null) return null;
// Clone entity before returning back, to avoid giving away a reference to the live object to the caller
return new MapPermissionTicketAdapter<K>(registerEntityForChanges(tx, origEntity), authorizationProvider.getStoreFactory()) {
@Override
public String getId() {
return permissionTicketStore.getKeyConvertor().keyToString(entity.getId());
}
};
}
private ModelCriteriaBuilder<PermissionTicket> forResourceServer(String resourceServerId) {
ModelCriteriaBuilder<PermissionTicket> mcb = permissionTicketStore.createCriteriaBuilder();
return resourceServerId == null
? mcb
: mcb.compare(SearchableFields.RESOURCE_SERVER_ID, Operator.EQ,
resourceServerId);
}
@Override
public long count(Map<PermissionTicket.FilterOption, String> attributes, String resourceServerId) {
ModelCriteriaBuilder<PermissionTicket> mcb = forResourceServer(resourceServerId).and(
attributes.entrySet().stream()
.map(this::filterEntryToModelCriteriaBuilder)
.toArray(ModelCriteriaBuilder[]::new)
);
return tx.getCount(mcb);
}
@Override
public PermissionTicket create(String resourceId, String scopeId, String requester, ResourceServer resourceServer) {
LOG.tracef("create(%s, %s, %s, %s)%s", resourceId, scopeId, requester, resourceServer, getShortStackTrace());
String owner = authorizationProvider.getStoreFactory().getResourceStore().findById(resourceId, resourceServer.getId()).getOwner();
// @UniqueConstraint(columnNames = {"OWNER", "REQUESTER", "RESOURCE_SERVER_ID", "RESOURCE_ID", "SCOPE_ID"})
ModelCriteriaBuilder<PermissionTicket> mcb = forResourceServer(resourceServer.getId())
.compare(SearchableFields.OWNER, Operator.EQ, owner)
.compare(SearchableFields.RESOURCE_ID, Operator.EQ, resourceId)
.compare(SearchableFields.REQUESTER, Operator.EQ, requester);
if (scopeId != null) {
mcb = mcb.compare(SearchableFields.SCOPE_ID, Operator.EQ, scopeId);
}
if (tx.getCount(mcb) > 0) {
throw new ModelDuplicateException("Permission ticket for resource server: '" + resourceServer.getId()
+ ", Resource: " + resourceId + ", owner: " + owner + ", scopeId: " + scopeId + " already exists.");
}
final K newId = permissionTicketStore.getKeyConvertor().yieldNewUniqueKey();
MapPermissionTicketEntity<K> entity = new MapPermissionTicketEntity<>(newId);
entity.setResourceId(resourceId);
entity.setRequester(requester);
entity.setCreatedTimestamp(System.currentTimeMillis());
if (scopeId != null) {
entity.setScopeId(scopeId);
}
entity.setOwner(owner);
entity.setResourceServerId(resourceServer.getId());
tx.create(entity.getId(), entity);
return entityToAdapter(entity);
}
@Override
public void delete(String id) {
LOG.tracef("delete(%s)%s", id, getShortStackTrace());
tx.delete(permissionTicketStore.getKeyConvertor().fromString(id));
}
@Override
public PermissionTicket findById(String id, String resourceServerId) {
LOG.tracef("findById(%s, %s)%s", id, resourceServerId, getShortStackTrace());
return tx.getUpdatedNotRemoved(forResourceServer(resourceServerId)
.compare(PermissionTicket.SearchableFields.ID, Operator.EQ, id))
.findFirst()
.map(this::entityToAdapter)
.orElse(null);
}
@Override
public List<PermissionTicket> findByResourceServer(String resourceServerId) {
LOG.tracef("findByResourceServer(%s)%s", resourceServerId, getShortStackTrace());
return tx.getUpdatedNotRemoved(forResourceServer(resourceServerId))
.map(this::entityToAdapter)
.collect(Collectors.toList());
}
@Override
public List<PermissionTicket> findByOwner(String owner, String resourceServerId) {
LOG.tracef("findByOwner(%s, %s)%s", owner, resourceServerId, getShortStackTrace());
return tx.getUpdatedNotRemoved(forResourceServer(resourceServerId)
.compare(SearchableFields.OWNER, Operator.EQ, owner))
.map(this::entityToAdapter)
.collect(Collectors.toList());
}
@Override
public List<PermissionTicket> findByResource(String resourceId, String resourceServerId) {
LOG.tracef("findByResource(%s, %s)%s", resourceId, resourceServerId, getShortStackTrace());
return tx.getUpdatedNotRemoved(forResourceServer(resourceServerId)
.compare(SearchableFields.RESOURCE_ID, Operator.EQ, resourceId))
.map(this::entityToAdapter)
.collect(Collectors.toList());
}
@Override
public List<PermissionTicket> findByScope(String scopeId, String resourceServerId) {
LOG.tracef("findByScope(%s, %s)%s", scopeId, resourceServerId, getShortStackTrace());
return tx.getUpdatedNotRemoved(forResourceServer(resourceServerId)
.compare(SearchableFields.SCOPE_ID, Operator.EQ, scopeId))
.map(this::entityToAdapter)
.collect(Collectors.toList());
}
@Override
public List<PermissionTicket> find(Map<PermissionTicket.FilterOption, String> attributes, String resourceServerId, int firstResult, int maxResult) {
ModelCriteriaBuilder<PermissionTicket> mcb = forResourceServer(resourceServerId);
if (attributes.containsKey(PermissionTicket.FilterOption.RESOURCE_NAME)) {
String expectedResourceName = attributes.remove(PermissionTicket.FilterOption.RESOURCE_NAME);
Map<Resource.FilterOption, String[]> filterOptionStringMap = new EnumMap<>(Resource.FilterOption.class);
filterOptionStringMap.put(Resource.FilterOption.EXACT_NAME, new String[]{expectedResourceName});
List<Resource> r = authorizationProvider.getStoreFactory().getResourceStore().findByResourceServer(filterOptionStringMap, resourceServerId, -1, -1);
if (r == null || r.isEmpty()) {
return Collections.emptyList();
}
mcb = mcb.compare(SearchableFields.RESOURCE_ID, Operator.IN, r.stream().map(Resource::getId));
}
mcb = mcb.and(
attributes.entrySet().stream()
.map(this::filterEntryToModelCriteriaBuilder)
.toArray(ModelCriteriaBuilder[]::new)
);
Comparator<? super MapPermissionTicketEntity<K>> c = Comparator.comparing(MapPermissionTicketEntity::getId);
return paginatedStream(tx.getUpdatedNotRemoved(mcb)
.sorted(c), firstResult, maxResult)
.map(this::entityToAdapter)
.collect(Collectors.toList());
}
private ModelCriteriaBuilder<PermissionTicket> filterEntryToModelCriteriaBuilder(Map.Entry<PermissionTicket.FilterOption, String> entry) {
PermissionTicket.FilterOption name = entry.getKey();
String value = entry.getValue();
switch (name) {
case ID:
case SCOPE_ID:
case RESOURCE_ID:
case OWNER:
case REQUESTER:
case POLICY_ID:
return permissionTicketStore.createCriteriaBuilder()
.compare(name.getSearchableModelField(), Operator.EQ, value);
case SCOPE_IS_NULL:
case GRANTED:
case REQUESTER_IS_NULL: {
Operator op = Operator.NOT_EXISTS;
if (Boolean.parseBoolean(value)) {
op = Operator.EXISTS;
}
return permissionTicketStore.createCriteriaBuilder()
.compare(name.getSearchableModelField(), op);
}
case POLICY_IS_NOT_NULL:
return permissionTicketStore.createCriteriaBuilder()
.compare(SearchableFields.REQUESTER, Operator.NOT_EXISTS);
default:
throw new IllegalArgumentException("Unsupported filter [" + name + "]");
}
}
@Override
public List<PermissionTicket> findGranted(String userId, String resourceServerId) {
Map<PermissionTicket.FilterOption, String> filters = new EnumMap<>(PermissionTicket.FilterOption.class);
filters.put(PermissionTicket.FilterOption.GRANTED, Boolean.TRUE.toString());
filters.put(PermissionTicket.FilterOption.REQUESTER, userId);
return find(filters, resourceServerId, -1, -1);
}
@Override
public List<PermissionTicket> findGranted(String resourceName, String userId, String resourceServerId) {
Map<PermissionTicket.FilterOption, String> filters = new EnumMap<>(PermissionTicket.FilterOption.class);
filters.put(PermissionTicket.FilterOption.RESOURCE_NAME, resourceName);
filters.put(PermissionTicket.FilterOption.GRANTED, Boolean.TRUE.toString());
filters.put(PermissionTicket.FilterOption.REQUESTER, userId);
return find(filters, resourceServerId, -1, -1);
}
@Override
public List<Resource> findGrantedResources(String requester, String name, int first, int max) {
ModelCriteriaBuilder<PermissionTicket> mcb = permissionTicketStore.createCriteriaBuilder()
.compare(SearchableFields.REQUESTER, Operator.EQ, requester)
.compare(SearchableFields.GRANTED_TIMESTAMP, Operator.EXISTS);
Function<MapPermissionTicketEntity<K>, Resource> ticketResourceMapper;
ResourceStore resourceStore = authorizationProvider.getStoreFactory().getResourceStore();
if (name != null) {
ticketResourceMapper = ticket -> {
Map<Resource.FilterOption, String[]> filterOptionMap = new EnumMap<>(Resource.FilterOption.class);
filterOptionMap.put(Resource.FilterOption.ID, new String[] {ticket.getResourceId()});
filterOptionMap.put(Resource.FilterOption.NAME, new String[] {name});
List<Resource> resource = resourceStore.findByResourceServer(filterOptionMap, ticket.getResourceServerId(), -1, 1);
return resource.isEmpty() ? null : resource.get(0);
};
} else {
ticketResourceMapper = ticket -> resourceStore
.findById(ticket.getResourceId(), ticket.getResourceServerId());
}
return paginatedStream(tx.getUpdatedNotRemoved(mcb)
.filter(distinctByKey(MapPermissionTicketEntity::getResourceId))
.sorted(MapPermissionTicketEntity.COMPARE_BY_RESOURCE_ID)
.map(ticketResourceMapper)
.filter(Objects::nonNull), first, max)
.collect(Collectors.toList());
}
@Override
public List<Resource> findGrantedOwnerResources(String owner, int first, int max) {
ModelCriteriaBuilder<PermissionTicket> mcb = permissionTicketStore.createCriteriaBuilder()
.compare(SearchableFields.OWNER, Operator.EQ, owner);
return paginatedStream(tx.getUpdatedNotRemoved(mcb)
.filter(distinctByKey(MapPermissionTicketEntity::getResourceId))
.sorted(MapPermissionTicketEntity.COMPARE_BY_RESOURCE_ID), first, max)
.map(ticket -> authorizationProvider.getStoreFactory().getResourceStore()
.findById(ticket.getResourceId(), ticket.getResourceServerId()))
.collect(Collectors.toList());
}
}
| |
/*
* AliquotEditorForLAICPMS.java
*
*
* Copyright 2006-2015 James F. Bowring and www.Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.UPb_Redux.dialogs.aliquotManagers;
import java.awt.Color;
import java.awt.Font;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Vector;
import javax.swing.AbstractButton;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JTextField;
import javax.swing.text.JTextComponent;
import org.earthtime.ETReduxFrame;
import org.earthtime.UPb_Redux.ReduxConstants;
import org.earthtime.UPb_Redux.aliquots.UPbReduxAliquot;
import org.earthtime.UPb_Redux.exceptions.BadLabDataException;
import org.earthtime.UPb_Redux.fractions.UPbReduxFractions.UPbFraction;
import org.earthtime.reduxLabData.ReduxLabData;
import org.earthtime.UPb_Redux.renderers.EditFractionButton;
import org.earthtime.UPb_Redux.valueModels.ValueModel;
import org.earthtime.aliquots.AliquotInterface;
import org.earthtime.dataDictionaries.MeasuredRatios;
import org.earthtime.dataDictionaries.RadDates;
import org.earthtime.exceptions.ETException;
import org.earthtime.exceptions.ETWarningDialog;
import org.earthtime.fractions.ETFractionInterface;
import org.earthtime.samples.SampleInterface;
import org.earthtime.xmlUtilities.XMLSerializationI;
import org.jdesktop.layout.GroupLayout.ParallelGroup;
import org.jdesktop.layout.GroupLayout.SequentialGroup;
/**
*
* @author James F. Bowring
*/
public class AliquotEditorForLAICPMS extends AliquotEditorDialog {
// Instance variables
// measured ratios
private ArrayList<JComponent> fractionR206_204m_Text;
// composition
private ArrayList<JComponent> fractionConcU_Text;
private ArrayList<JLabel> fractionConcUPPM;
private ArrayList<JComponent> fractionRTh_Usample_Text;
// Isotopic ratios
private ArrayList<JComponent> fractionR206_238r_Text;
private ArrayList<JComponent> fractionR206_238r2SigmaPct_Text;
private ArrayList<JComponent> fractionR207_235r_Text;
private ArrayList<JComponent> fractionR207_235r2SigmaPct_Text;
private ArrayList<JComponent> fractionR207_206r_Text;
private ArrayList<JComponent> fractionR207_206r2SigmaPct_Text;
private ArrayList<JComponent> fractionRhoR206_238r__r207_235r_Text;
// Isotopic Dates
private ArrayList<JComponent> fractionDate206_238r_Text;
private ArrayList<JComponent> fractionDate206_238r2SigmaAbs_Text;
private ArrayList<JComponent> fractionDate207_235r_Text;
private ArrayList<JComponent> fractionDate207_235r2SigmaAbs_Text;
private ArrayList<JComponent> fractionDate207_206r_Text;
private ArrayList<JComponent> fractionDate207_206r2SigmaAbs_Text;
/**
* Creates new form AliquotEditorDialog
*
* @param parent
* @param modal
* @param sample
* @param aliquot
*/
public AliquotEditorForLAICPMS(
ETReduxFrame parent,
boolean modal,
SampleInterface sample,
AliquotInterface aliquot) {
super(parent, modal, sample, aliquot);
saveAndClose_button.removeActionListener(saveAndClose_button.getActionListeners()[0]);
saveAndClose_button.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
saveAndClose_buttonActionPerformed(evt);
}
});
save_button.removeActionListener(save_button.getActionListeners()[0]);
save_button.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
save_buttonActionPerformed(evt);
}
});
restore_button.removeActionListener(restore_button.getActionListeners()[0]);
restore_button.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
restore_buttonActionPerformed(evt);
}
});
exportXMLAliquot_button.removeActionListener(exportXMLAliquot_button.getActionListeners()[0]);
exportXMLAliquot_button.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
exportXMLAliquot_buttonActionPerformed(evt);
}
});
saveAndPreviewXMLAliquotAsHTML_button.removeActionListener(saveAndPreviewXMLAliquotAsHTML_button.getActionListeners()[0]);
saveAndPreviewXMLAliquotAsHTML_button.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
saveAndPreviewXMLAliquotAsHTML_buttonActionPerformed(evt);
}
});
saveAndUploadAliquotToGeochron_button.removeActionListener(saveAndUploadAliquotToGeochron_button.getActionListeners()[0]);
saveAndUploadAliquotToGeochron_button.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
saveAndUploadAliquotToGeochron_buttonActionPerformed(evt);
}
});
}
/**
*
*/
@Override
public void initAliquot() {
fastEdits_panel.removeAll();
// showSavedDataI();
deletedFractions = new Vector<>();
addedFractions = new Vector<>();
fastEdits_panel.setBackground(ReduxConstants.myFractionGreenColor);
fractionDeleteButtons = new ArrayList<>();
fractionEditButtons = new ArrayList<>();
// composition
fractionConcU_Text = new ArrayList<>();
fractionConcUPPM = new ArrayList<>();
fractionRTh_Usample_Text = new ArrayList<>();
// measured
fractionR206_204m_Text = new ArrayList<>();
fractionR206_238r_Text = new ArrayList<>();
fractionR206_238r2SigmaPct_Text = new ArrayList<>();
fractionR207_235r_Text = new ArrayList<>();
fractionR207_235r2SigmaPct_Text = new ArrayList<>();
fractionR207_206r_Text = new ArrayList<>();
fractionR207_206r2SigmaPct_Text = new ArrayList<>();
fractionRhoR206_238r__r207_235r_Text = new ArrayList<>();
// Isotopic Dates
fractionDate206_238r_Text = new ArrayList<>();
fractionDate206_238r2SigmaAbs_Text = new ArrayList<>();
fractionDate207_235r_Text = new ArrayList<>();
fractionDate207_235r2SigmaAbs_Text = new ArrayList<>();
fractionDate207_206r_Text = new ArrayList<>();
fractionDate207_206r2SigmaAbs_Text = new ArrayList<>();
// create master row for filling others ********************************
// new fraction namer
// masterNewFractionName =
// new JTextField();
// masterNewFractionName.setDocument( new UnDoAbleDocument( masterNewFractionName, true ) );
// masterNewFractionName.setText( "New Fraction" );
//
// // new fraction creator button
// masterNewFractionNameAdder =
// new EditFractionButton( "ADD", -1, true );
// masterNewFractionNameAdder.setToolTipText( "Click to ADD new Fraction" );
// masterNewFractionNameAdder.addActionListener( new ActionListener() {
//
// public void actionPerformed ( ActionEvent arg0 ) {
// Fraction addedFraction = null;
//
// // check to see if fractionid is in use
// // first create a list of used fractionids so that we
// // can tell user if new fraction name is already in use
// Vector<String> fractionIDs = new Vector<String>();
//
// for (int f = 0;
// f
// < getSample().getFractions().size();
// f ++) {
// fractionIDs.add( getSample().getFractions().get( f ).getFractionID() );
// }
//
// // add pending new fractions
// for (int f = 0; f
// < addedFractions.size(); f ++) {
// fractionIDs.add( addedFractions.get( f ).getFractionID() );
// }
//
// // remove pending deleted fractions
// for (int f = 0; f
// < deletedFractions.size(); f ++) {
// fractionIDs.remove( deletedFractions.get( f ).getFractionID() );
// }
//
// Collections.sort( fractionIDs, new IntuitiveStringComparator<String>() );//String.CASE_INSENSITIVE_ORDER );
// //Arrays.sort(fractionIDs, String.CASE_INSENSITIVE_ORDER);
//
// int index = Collections.binarySearch( fractionIDs, masterNewFractionName.getText() );
// if ( index >= 0 ) {
// JOptionPane.showMessageDialog(
// null,
// new String[]{"Duplicate Fraction ID, please use another."},
// "ET Redux Warning",
// JOptionPane.WARNING_MESSAGE );
//
// } else {
// // prepare fields
//
// try {
// addedFraction = new UPbFraction( "NONE" );
// addedFraction.setSampleName( getSample().getSampleName() );
// ((UPbFraction) addedFraction).setAliquotNumber( getMyAliquot().getAliquotNumber() );
// addedFraction.setFractionID( masterNewFractionName.getText().trim() );
// addedFraction.setGrainID( addedFraction.getFractionID() );
//
// ReduxLabData labData = getMyAliquot().getMyReduxLabData();
// ((UPbFraction) addedFraction).setMyLabData( labData );
// ((UPbFraction) addedFraction)//
// .setAlphaPbModel( labData.getDefaultLabAlphaPbModel() );
// ((UPbFraction) addedFraction)//
// .setAlphaUModel( labData.getDefaultLabAlphaUModel() );
// ((UPbFraction) addedFraction)//
// .setPhysicalConstantsModel( getSample().getPhysicalConstantsModel() );
//
// } catch (BadLabDataException badLabDataException) {
// }
//
// addedFractions.add( addedFraction );
// addNewFractionRow( addedFraction );
// }
//
// }
// } );
// populate rows
for (int row = 0; row
< getMyAliquot().getAliquotFractions().size(); row++) {
ETFractionInterface tempFrac = getMyAliquot().getAliquotFractions().get(row);
int max = getMyAliquot().getAliquotFractions().size();
addFractionRow(tempFrac, row, max);
}
// populate the components with fraction data
showSavedDataII();
//
// buildFastEditDisplayPanel();
}
/**
*
* @param tempFrac
* @param row
* @param max
*/
protected void addFractionRow(ETFractionInterface tempFrac, int row, int max) {
// Buttons to allow deletion of fractions
JButton tempJB = new EditFractionButton("X", row, true);
tempJB.setForeground(Color.red);
tempJB.setToolTipText("Click to DELETE Fraction!");
tempJB.setMargin(new Insets(0, 0, 0, 0));
tempJB.addActionListener(new DeleteFractionListener(tempFrac, row));
//tempJB.setFont(new Font("SansSerif", Font.PLAIN, 10));
fractionDeleteButtons.add(tempJB);
modifyComponentKeyMapForTable(tempJB, fractionDeleteButtons, max);
// Buttons to open fraction editor
tempJB
= new EditFractionButton("Kwiki", row, true);
// tempJB.addActionListener( new EditFractionListener( tempFrac, row ) );
fractionEditButtons.add(tempJB);
modifyComponentKeyMapForTable(tempJB, fractionEditButtons, max);
// Composition
insertTableTextField(fractionConcU_Text, max);
// mass label
JLabel tempJL = new JLabel("ppm");
tempJL.setForeground(Color.RED);
fractionConcUPPM.add(tempJL);
// composition // measured
insertTableTextField(fractionConcU_Text, max);
insertTableTextField(fractionR206_204m_Text, max);
insertTableTextField(fractionRTh_Usample_Text, max);
// legacy isotopic ratios
insertTableTextField(fractionR207_206r_Text, max);
insertTableTextField(fractionR207_206r2SigmaPct_Text, max);
insertTableTextField(fractionR207_235r_Text, max);
insertTableTextField(fractionR207_235r2SigmaPct_Text, max);
insertTableTextField(fractionR206_238r_Text, max);
insertTableTextField(fractionR206_238r2SigmaPct_Text, max);
insertTableTextField(fractionRhoR206_238r__r207_235r_Text, max);
// Isotopic Dates
insertTableTextField(fractionDate206_238r_Text, max);
insertTableTextField(fractionDate206_238r2SigmaAbs_Text, max);
insertTableTextField(fractionDate207_235r_Text, max);
insertTableTextField(fractionDate207_235r2SigmaAbs_Text, max);
insertTableTextField(fractionDate207_206r_Text, max);
insertTableTextField(fractionDate207_206r2SigmaAbs_Text, max);
}
/**
*
*/
protected void buildFastEditDisplayPanel() {
fastEdits_panel.removeAll();
// build display
org.jdesktop.layout.GroupLayout jPanel2Layout = new org.jdesktop.layout.GroupLayout(fastEdits_panel);
fastEdits_panel.setLayout(jPanel2Layout);
ParallelGroup myHorizFraction = jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING, false);
SequentialGroup myVerticalFraction = jPanel2Layout.createSequentialGroup();
// create title row elements
JLabel headDelete = new JLabel("DELETE");
headDelete.setFont(redHeadFont);
headDelete.setForeground(Color.RED);
JLabel headFraction = new JLabel("EDIT Fraction");
headFraction.setFont(redHeadFont);
headFraction.setForeground(Color.RED);
// Composition
JLabel headConcU = new JLabel("conc U");
headConcU.setFont(redHeadFont);
headConcU.setForeground(Color.RED);
JLabel headRTh_Usample = new JLabel("Th/U");
headRTh_Usample.setFont(redHeadFont);
headRTh_Usample.setForeground(Color.RED);
// meaured ratios
JLabel headR206_204m = new JLabel("206Pb/204Pb");
headR206_204m.setFont(redHeadFont);
headR206_204m.setForeground(Color.RED);
// isotopic ratios
JLabel headR207_206r = new JLabel("207Pb/206Pb");
headR207_206r.setFont(redHeadFont);
headR207_206r.setForeground(Color.RED);
JLabel headR207_206r1SigmaPct = new JLabel("1-sigma%");
headR207_206r1SigmaPct.setFont(redHeadFont);
headR207_206r1SigmaPct.setForeground(Color.RED);
JLabel headR207_235r = new JLabel("207Pb/235U");
headR207_235r.setFont(redHeadFont);
headR207_235r.setForeground(Color.RED);
JLabel headR207_235r1SigmaPct = new JLabel("1-sigma%");
headR207_235r1SigmaPct.setFont(redHeadFont);
headR207_235r1SigmaPct.setForeground(Color.RED);
JLabel headR206_238r = new JLabel("206Pb/238U");
headR206_238r.setFont(redHeadFont);
headR206_238r.setForeground(Color.RED);
JLabel headR206_238r1SigmaPct = new JLabel("1-sigma%");
headR206_238r1SigmaPct.setFont(redHeadFont);
headR206_238r1SigmaPct.setForeground(Color.RED);
JLabel headRhoR206_238r__r207_235r = new JLabel("rho 6/8-7/35");
headRhoR206_238r__r207_235r.setFont(redHeadFont);
headRhoR206_238r__r207_235r.setForeground(Color.RED);
// Isotopic Dates
JLabel headDate206_238r = new JLabel("206Pb/238U");
headDate206_238r.setFont(redHeadFont);
headDate206_238r.setForeground(Color.RED);
JLabel headDate206_238r1SigmaAbs = new JLabel("1-sigma");
headDate206_238r1SigmaAbs.setFont(redHeadFont);
headDate206_238r1SigmaAbs.setForeground(Color.RED);
JLabel headDate207_235r = new JLabel("207Pb/235U");
headDate207_235r.setFont(redHeadFont);
headDate207_235r.setForeground(Color.RED);
JLabel headDate207_235r1SigmaAbs = new JLabel("1"
+ "-sigma");
headDate207_235r1SigmaAbs.setFont(redHeadFont);
headDate207_235r1SigmaAbs.setForeground(Color.RED);
JLabel headDate207_206r = new JLabel("207Pb/206Pb");
headDate207_206r.setFont(redHeadFont);
headDate207_206r.setForeground(Color.RED);
JLabel headDate207_206r1SigmaAbs = new JLabel("1-sigma");
headDate207_206r1SigmaAbs.setFont(redHeadFont);
headDate207_206r1SigmaAbs.setForeground(Color.RED);
// build display *******************************************************
JLabel headComposition = new JLabel("Composition |");
headComposition.setFont(new Font("Monospaced", Font.BOLD, 18));
headComposition.setForeground(Color.RED);
JLabel headIsotopicRatios = new JLabel("Isotopic Ratios |");
headIsotopicRatios.setFont(new Font("Monospaced", Font.BOLD, 18));
headIsotopicRatios.setForeground(Color.RED);
JLabel headIsotopicDates = new JLabel("Isotopic Dates Ma");
headIsotopicDates.setFont(new Font("Monospaced", Font.BOLD, 18));
headIsotopicDates.setForeground(Color.RED);
// master fields
myHorizFraction.add(jPanel2Layout.createSequentialGroup()//
.add(50, 50, 50) // left margin
//.add( masterNewFractionName, 110, 110, 110 )//
.add(115, 115, 115)//
.add(headComposition, 300, 300, 300)//
.add(5, 5, 5)//
.add(headIsotopicRatios, 650, 650, 650)//
.add(5, 5, 5)//
.add(headIsotopicDates, 400, 400, 400)//
.add(5, 5, 5)//
);
myVerticalFraction.add(5, 5, 5) // top margin
.add(jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING)//.BASELINE)//
//.add( masterNewFractionName, 22, 22, 22 )//
.add(headComposition, 22, 22, 22)//
.add(headIsotopicRatios, 22, 22, 22)//
.add(headIsotopicDates, 22, 22, 22)//
);
// fill buttons
myHorizFraction.add(jPanel2Layout.createSequentialGroup()//
.add(50, 50, 50) // left margin
// .add( masterNewFractionNameAdder, 110, 110, 110 )//
.add(115, 115, 115)//
);
myVerticalFraction.add(1, 1, 1) // top margin
.add(jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING)//.BASELINE)//
// .add( masterNewFractionNameAdder )//
);
myHorizFraction.add(jPanel2Layout.createSequentialGroup()//
.add(8, 8, 8) // left margin
.add(headDelete, 50, 50, 50)//
.add(6, 6, 6)//
.add(headFraction, 100, 100, 100)//
.add(15, 15, 15)//
.add(headConcU, 90, 90, 90)//
.add(15, 15, 15)//
.add(headR206_204m, 90, 90, 90)//
.add(15, 15, 15)//
.add(headRTh_Usample, 90, 90, 90)//
.add(15, 15, 15)//
.add(headR207_206r, 90, 90, 90)//
.add(5, 5, 5)//
.add(headR207_206r1SigmaPct, 60, 60, 60)//
.add(15, 15, 15)//
.add(headR207_235r, 90, 90, 90)//
.add(5, 5, 5)//
.add(headR207_235r1SigmaPct, 60, 60, 60)//
.add(15, 15, 15)//
.add(headR206_238r, 90, 90, 90)//
.add(5, 5, 5)//
.add(headR206_238r1SigmaPct, 60, 60, 60)//
.add(15, 15, 15)//
.add(headRhoR206_238r__r207_235r, 90, 90, 90)//
.add(15, 15, 15)//
.add(headDate206_238r, 90, 90, 90)//
.add(5, 5, 5)//
.add(headDate206_238r1SigmaAbs, 60, 60, 60)//
.add(15, 15, 15)//
.add(headDate207_235r, 90, 90, 90)//
.add(5, 5, 5)//
.add(headDate207_235r1SigmaAbs, 60, 60, 60)//
.add(15, 15, 15)//
.add(headDate207_206r, 90, 90, 90)//
.add(5, 5, 5)//
.add(headDate207_206r1SigmaAbs, 60, 60, 60)//
.add(15, 15, 15)//
);
myVerticalFraction//
.add(10, 10, 10) // top margin
.add(jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING)//.BASELINE)//
.add(headDelete)//
.add(headFraction)//
.add(headConcU)//
.add(headR206_204m)//
.add(headRTh_Usample)//
.add(headR207_206r)//
.add(headR207_206r1SigmaPct)//
.add(headR207_235r)//
.add(headR207_235r1SigmaPct)//
.add(headR206_238r)//
.add(headR206_238r1SigmaPct)//
.add(headRhoR206_238r__r207_235r)//
.add(headDate206_238r)//
.add(headDate206_238r1SigmaAbs)//
.add(headDate207_235r)//
.add(headDate207_235r1SigmaAbs)//
.add(headDate207_206r)//
.add(headDate207_206r1SigmaAbs)//
)//
.add(2, 2, 2);
// stop delete when only one fraction
fractionDeleteButtons.get(0).setEnabled(fractionDeleteButtons.size() != 1);
for (int f = 0; f
< fractionDeleteButtons.size(); f++) {
myHorizFraction.add(jPanel2Layout.createSequentialGroup()//
.add(4, 4, 4) // left-hand margin
.add(fractionDeleteButtons.get(f), 50, 50, 50) //
.add(3, 3, 3)//
.add(fractionEditButtons.get(f), 100, 100, 100) //
.add(5, 5, 5)//
.add(fractionConcU_Text.get(f), 60, 60, 60)//
.add(fractionConcUPPM.get(f))//
.add(10, 10, 10)//
.add(fractionR206_204m_Text.get(f), 100, 100, 100) //
.add(5, 5, 5)//
.add(fractionRTh_Usample_Text.get(f), 100, 100, 100) //
.add(10, 10, 10)//
.add(fractionR207_206r_Text.get(f), 100, 100, 100) //
.add(5, 5, 5)//
.add(fractionR207_206r2SigmaPct_Text.get(f), 60, 60, 60)//
.add(5, 5, 5)//
.add(fractionR207_235r_Text.get(f), 100, 100, 100) //
.add(5, 5, 5)//
.add(fractionR207_235r2SigmaPct_Text.get(f), 60, 60, 60)//
.add(5, 5, 5)//
.add(fractionR206_238r_Text.get(f), 100, 100, 100) //
.add(5, 5, 5)//
.add(fractionR206_238r2SigmaPct_Text.get(f), 60, 60, 60)//
.add(5, 5, 5)//
.add(fractionRhoR206_238r__r207_235r_Text.get(f), 100, 100, 100) //
.add(5, 5, 5)//
.add(fractionDate206_238r_Text.get(f), 100, 100, 100) //
.add(5, 5, 5)//
.add(fractionDate206_238r2SigmaAbs_Text.get(f), 60, 60, 60)//
.add(5, 5, 5)//
.add(fractionDate207_235r_Text.get(f), 100, 100, 100) //
.add(5, 5, 5)//
.add(fractionDate207_235r2SigmaAbs_Text.get(f), 60, 60, 60)//
.add(5, 5, 5)//
.add(fractionDate207_206r_Text.get(f), 100, 100, 100) //
.add(5, 5, 5)//
.add(fractionDate207_206r2SigmaAbs_Text.get(f), 60, 60, 60)//
.add(5, 5, 5)//
);
myVerticalFraction//
.add(jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING)//
.add(fractionDeleteButtons.get(f), 22, 22, 22)//
.add(fractionEditButtons.get(f), 22, 22, 22)//
.add(fractionConcU_Text.get(f), 22, 22, 22)//
.add(fractionConcUPPM.get(f))//
.add(fractionR206_204m_Text.get(f), 22, 22, 22)//
.add(fractionRTh_Usample_Text.get(f), 22, 22, 22)//
.add(fractionR206_238r_Text.get(f), 22, 22, 22)//
.add(fractionR206_238r2SigmaPct_Text.get(f), 22, 22, 22)//
.add(fractionR207_235r_Text.get(f), 22, 22, 22)//
.add(fractionR207_235r2SigmaPct_Text.get(f), 22, 22, 22)//
.add(fractionR207_206r_Text.get(f), 22, 22, 22)//
.add(fractionR207_206r2SigmaPct_Text.get(f), 22, 22, 22)//
.add(fractionRhoR206_238r__r207_235r_Text.get(f), 22, 22, 22)//
.add(fractionDate206_238r_Text.get(f), 22, 22, 22)//
.add(fractionDate206_238r2SigmaAbs_Text.get(f), 22, 22, 22)//
.add(fractionDate207_235r_Text.get(f), 22, 22, 22)//
.add(fractionDate207_235r2SigmaAbs_Text.get(f), 22, 22, 22)//
.add(fractionDate207_206r_Text.get(f), 22, 22, 22)//
.add(fractionDate207_206r2SigmaAbs_Text.get(f), 22, 22, 22)//
);
}
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING).add(jPanel2Layout.createSequentialGroup().add(myHorizFraction)));
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING).add(myVerticalFraction));
}
/**
*
*/
@Override
protected void showSavedDataII() {
showSavedDataI();
// fraction details
// for (int row = 0; row
// < getMyAliquot().getAliquotFractions().size(); row ++) {
// updateFractionRow( getMyAliquot().getAliquotFractions().get( row ), row );
// }
// PUBLISHING SECTION
//sampleIGSN_text.setText( getSample().getSampleIGSN() );
}
/**
*
* @param tempFrac
* @param row
*/
protected void updateFractionRow(ETFractionInterface tempFrac, int row) {
((AbstractButton) fractionEditButtons.get(row)).setText(tempFrac.getFractionID());
// Composition
((JTextComponent) fractionConcU_Text.get(row)).setText(tempFrac.getCompositionalMeasureByName("concU").getValue().
movePointRight(6).setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionConcU_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionRTh_Usample_Text.get(row)).setText(tempFrac.getCompositionalMeasureByName("rTh_Usample").getValue().
setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionRTh_Usample_Text.get(row)).setCaretPosition(0);
// measured
((JTextComponent) fractionR206_204m_Text.get(row)).setText(tempFrac.getMeasuredRatioByName(MeasuredRatios.r206_204m.getName()).getValue().
setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionR206_204m_Text.get(row)).setCaretPosition(0);
// radiogenic isotopic ratios
((JTextComponent) fractionR206_238r_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeRatioByName("r206_238r").getValue().
setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionR206_238r_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionR206_238r2SigmaPct_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeRatioByName("r206_238r").getOneSigmaPct().
setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionR206_238r2SigmaPct_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionR207_235r_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeRatioByName("r207_235r").getValue().
setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionR207_235r_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionR207_235r2SigmaPct_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeRatioByName("r207_235r").getOneSigmaPct().
setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionR207_235r2SigmaPct_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionR207_206r_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeRatioByName("r207_206r").getValue().
setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionR207_206r_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionR207_206r2SigmaPct_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeRatioByName("r207_206r").getOneSigmaPct().
setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionR207_206r2SigmaPct_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionRhoR206_238r__r207_235r_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeRatioByName("rhoR206_238r__r207_235r").getValue().
setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionRhoR206_238r__r207_235r_Text.get(row)).setCaretPosition(0);
// Isotopic Dates
((JTextComponent) fractionDate206_238r_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeDateByName(RadDates.age206_238r).getValue().
movePointLeft(6).setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionDate206_238r_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionDate206_238r2SigmaAbs_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeDateByName(RadDates.age206_238r).getOneSigmaAbs().
movePointLeft(6).setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionDate206_238r2SigmaAbs_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionDate207_235r_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeDateByName(RadDates.age207_235r).getValue().
movePointLeft(6).setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionDate207_235r_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionDate207_235r2SigmaAbs_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeDateByName(RadDates.age207_235r).getOneSigmaAbs().
movePointLeft(6).setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionDate207_235r2SigmaAbs_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionDate207_206r_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeDateByName(RadDates.age207_206r).getValue().
movePointLeft(6).setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionDate207_206r_Text.get(row)).setCaretPosition(0);
((JTextComponent) fractionDate207_206r2SigmaAbs_Text.get(row)).setText(tempFrac.getRadiogenicIsotopeDateByName(RadDates.age207_206r).getOneSigmaAbs().
movePointLeft(6).setScale(ReduxConstants.DEFAULT_CONSTANTS_SCALE,
RoundingMode.HALF_UP).toPlainString());
((JTextComponent) fractionDate207_206r2SigmaAbs_Text.get(row)).setCaretPosition(0);
}
/**
*
* @param row
*/
protected void removeFractionRow(int row) {
fractionDeleteButtons.remove(row);
fractionEditButtons.remove(row);
fractionConcU_Text.remove(row);
fractionR206_204m_Text.remove(row);
fractionRTh_Usample_Text.remove(row);
fractionR206_238r_Text.remove(row);
fractionR206_238r2SigmaPct_Text.remove(row);
fractionR207_235r_Text.remove(row);
fractionR207_235r2SigmaPct_Text.remove(row);
fractionR207_206r_Text.remove(row);
fractionR207_206r2SigmaPct_Text.remove(row);
fractionRhoR206_238r__r207_235r_Text.remove(row);
fractionDate206_238r_Text.remove(row);
fractionDate206_238r2SigmaAbs_Text.remove(row);
fractionDate207_235r_Text.remove(row);
fractionDate207_235r2SigmaAbs_Text.remove(row);
fractionDate207_206r_Text.remove(row);
fractionDate207_206r2SigmaAbs_Text.remove(row);
// fix row pointers in buttons
for (int f = 0; f
< fractionDeleteButtons.size(); f++) {
ETFractionInterface fraction
= ((DeleteFractionListener) ((JButton) fractionDeleteButtons.get(f)).getActionListeners()[0]).getFraction();
((AbstractButton) fractionDeleteButtons.get(f)).removeActionListener(((JButton) fractionDeleteButtons.get(f)).getActionListeners()[0]);
((AbstractButton) fractionDeleteButtons.get(f)).addActionListener(new DeleteFractionListener(fraction, f));
// ((JButton) fractionEditButtons.get( f )).removeActionListener( ((JButton) fractionEditButtons.get( f )).getActionListeners()[0] );
// ((JButton) fractionEditButtons.get( f )).addActionListener( new EditFractionListener( myFraction, f ) );
}
}
private void addNewFractionRow(ETFractionInterface fraction) {
int row = fractionDeleteButtons.size();
addFractionRow(fraction, row, row + 1);
updateFractionRow(fraction, row);
// update the keystroke actionlisteners for previous row in table
modifyComponentKeyMapForTable(fractionEditButtons.get(row - 1), fractionEditButtons, row + 1);
modifyComponentKeyMapForTable(fractionConcU_Text.get(row - 1), fractionConcU_Text, row + 1);
modifyComponentKeyMapForTable(fractionR206_204m_Text.get(row - 1), fractionR206_204m_Text, row + 1);
modifyComponentKeyMapForTable(fractionRTh_Usample_Text.get(row - 1), fractionRTh_Usample_Text, row + 1);
modifyComponentKeyMapForTable(fractionR206_238r_Text.get(row - 1), fractionR206_238r_Text, row + 1);
modifyComponentKeyMapForTable(fractionR206_238r2SigmaPct_Text.get(row - 1), fractionR206_238r2SigmaPct_Text, row + 1);
modifyComponentKeyMapForTable(fractionR207_235r_Text.get(row - 1), fractionR207_235r_Text, row + 1);
modifyComponentKeyMapForTable(fractionR207_235r2SigmaPct_Text.get(row - 1), fractionR207_235r2SigmaPct_Text, row + 1);
modifyComponentKeyMapForTable(fractionR207_206r_Text.get(row - 1), fractionR207_206r_Text, row + 1);
modifyComponentKeyMapForTable(fractionR207_206r2SigmaPct_Text.get(row - 1), fractionR207_206r2SigmaPct_Text, row + 1);
modifyComponentKeyMapForTable(fractionRhoR206_238r__r207_235r_Text.get(row - 1), fractionRhoR206_238r__r207_235r_Text, row + 1);
modifyComponentKeyMapForTable(fractionDate206_238r_Text.get(row - 1), fractionDate206_238r_Text, row + 1);
modifyComponentKeyMapForTable(fractionDate206_238r2SigmaAbs_Text.get(row - 1), fractionDate206_238r2SigmaAbs_Text, row + 1);
modifyComponentKeyMapForTable(fractionDate207_235r_Text.get(row - 1), fractionDate207_235r_Text, row + 1);
modifyComponentKeyMapForTable(fractionDate207_235r2SigmaAbs_Text.get(row - 1), fractionDate207_235r2SigmaAbs_Text, row + 1);
modifyComponentKeyMapForTable(fractionDate207_206r_Text.get(row - 1), fractionDate207_206r_Text, row + 1);
modifyComponentKeyMapForTable(fractionDate207_206r2SigmaAbs_Text.get(row - 1), fractionDate207_206r2SigmaAbs_Text, row + 1);
buildFastEditDisplayPanel();
}
private class EditFractionListener implements ActionListener {
private int row;
private ETFractionInterface fraction;
public EditFractionListener(ETFractionInterface fraction, int row) {
this.row = row;
this.fraction = fraction;
}
public void actionPerformed(ActionEvent e) {
// prompt for save if aliquot edited
boolean proceed = true;
if (deletedFractions.size() + addedFractions.size() > 0) {
int result
= JOptionPane.showConfirmDialog(
null,
new String[]{"You must first save the Aliquot ... proceed?"},
"ET Redux Warning",
JOptionPane.WARNING_MESSAGE);
if (result == JOptionPane.OK_OPTION) {
proceed = true;
} else {
proceed = false;
}
}
if (proceed) {
saveAliquot();
saveAliquotFraction(fraction);
parent.editFraction(fraction, 8);
updateFractionRow(
fraction,
getMyAliquot().getAliquotFractions().indexOf(fraction));
}
}
}
private class DeleteFractionListener implements ActionListener {
private int row;
private ETFractionInterface fraction;
public DeleteFractionListener(ETFractionInterface fraction, int row) {
this.row = row;
this.fraction = fraction;
}
public ETFractionInterface getFraction() {
return fraction;
}
public void actionPerformed(ActionEvent e) {
// check to see if pending as added fraction or existing fraction
if (row < getMyAliquot().getAliquotFractions().size()) {
deletedFractions.add(fraction);
} else {
addedFractions.remove(fraction);
}
removeFractionRow(row);
buildFastEditDisplayPanel();
}
}
private void saveAliquot() {
// general info
getMyAliquot().setAliquotName(aliquotName_text.getText());
this.setTitle("Aliquot # " + getMyAliquot().getAliquotNumber() + " <> " + getMyAliquot().getAliquotName());
getMyAliquot().setAnalystName(analystName_text.getText());
((UPbReduxAliquot) getMyAliquot()).getMyReduxLabData().setAnalystName(analystName_text.getText());
getMyAliquot().setAliquotInstrumentalMethod(
instrumentalMethod_jcombo.getSelectedItem().toString());
getMyAliquot().setAliquotInstrumentalMethodReference(instMethodRef_text.getText());
getMyAliquot().setAliquotReference(reference_text.getText());
getMyAliquot().setAliquotComment(comment_textArea.getText());
// calibration and mineral standards
getMyAliquot().getMineralStandardModels().clear();
for (JComponent cb : mineralStandardsCheckBoxes) {
if (((JCheckBox) cb).isSelected()) {
try {
getMyAliquot().getMineralStandardModels().add(//
ReduxLabData.getInstance().getAMineralStandardModel(((JCheckBox) cb).getText()));
} catch (BadLabDataException ex) {
new ETWarningDialog(ex).setVisible(true);
}
}
}
getMyAliquot().setKeyWordsCSV(keyWordsCSV_text.getText());
// handle deleted fractions
for (int f = 0; f
< deletedFractions.size(); f++) {
getSample().removeUPbReduxFraction(deletedFractions.get(f));
getMyAliquot().getAliquotFractions().remove(deletedFractions.get(f));
}
deletedFractions.clear();
// handle added fractions
for (int f = 0; f
< addedFractions.size(); f++) {
getSample().addFraction((UPbFraction) addedFractions.get(f));
getMyAliquot().getAliquotFractions().add(addedFractions.get(f));
}
addedFractions.clear();
// master fields
for (ETFractionInterface f : getMyAliquot().getAliquotFractions()) {
saveAliquotFraction(f);
}
// save the sample
// removed april 2011 as part of registry upgrade
// getMyAliquot().setSampleIGSN( sampleIGSN_text.getText().trim() );
// sample.setSampleIGSN( sampleIGSN_text.getText().trim() );
SampleInterface.saveSampleAsSerializedReduxFile(sample);
System.out.println("**************** PRE-PUBLISH CHECKLIST FOR ALIQUOT");
}
/**
*
* @param tempFrac
* @throws NumberFormatException
*/
protected void saveAliquotFraction(ETFractionInterface tempFrac)
throws NumberFormatException {
int row = getMyAliquot().getAliquotFractions().indexOf(tempFrac);
// april 2010
// Composition
try {
tempFrac.getCompositionalMeasureByName("concU")//
.setValue(new BigDecimal(((JTextField) fractionConcU_Text.get(row)).getText(), ReduxConstants.mathContext15).//
movePointLeft(6));
} catch (NumberFormatException e) {
}
try {
tempFrac.getCompositionalMeasureByName("rTh_Usample")//
.setValue(new BigDecimal(((JTextField) fractionRTh_Usample_Text.get(row)).getText(), ReduxConstants.mathContext15));
} catch (NumberFormatException e) {
}
// measured ratios
try {
tempFrac.getMeasuredRatioByName(MeasuredRatios.r206_204m.getName())//
.setValue(new BigDecimal(((JTextField) fractionR206_204m_Text.get(row)).getText(), ReduxConstants.mathContext15));
} catch (NumberFormatException e) {
}
// radiogenic isotopic ratios
ValueModel ratio = null;
BigDecimal oneSigmaPct = null;
try {
tempFrac.getRadiogenicIsotopeRatioByName("r206_238r")//
.setValue(new BigDecimal(((JTextComponent) fractionR206_238r_Text.get(row)).getText(), ReduxConstants.mathContext15));
ratio = tempFrac.getRadiogenicIsotopeRatioByName("r206_238r");
oneSigmaPct = new BigDecimal(((JTextComponent) fractionR206_238r2SigmaPct_Text.get(row)).getText(), ReduxConstants.mathContext15);
tempFrac.getRadiogenicIsotopeRatioByName("r206_238r")//
.setOneSigma(ValueModel.convertOneSigmaPctToAbsIfRequired(ratio, oneSigmaPct));
} catch (NumberFormatException e) {
}
try {
tempFrac.getRadiogenicIsotopeRatioByName("r207_235r")//
.setValue(new BigDecimal(((JTextComponent) fractionR207_235r_Text.get(row)).getText(), ReduxConstants.mathContext15));
ratio = tempFrac.getRadiogenicIsotopeRatioByName("r207_235r");
oneSigmaPct = new BigDecimal(((JTextComponent) fractionR207_235r2SigmaPct_Text.get(row)).getText(), ReduxConstants.mathContext15);
tempFrac.getRadiogenicIsotopeRatioByName("r207_235r")//
.setOneSigma(ValueModel.convertOneSigmaPctToAbsIfRequired(ratio, oneSigmaPct));
} catch (NumberFormatException e) {
}
try {
tempFrac.getRadiogenicIsotopeRatioByName("r207_206r").//
setValue(new BigDecimal(((JTextComponent) fractionR207_206r_Text.get(row)).getText(), ReduxConstants.mathContext15));
ratio = tempFrac.getRadiogenicIsotopeRatioByName("r207_206r");
oneSigmaPct = new BigDecimal(((JTextComponent) fractionR207_206r2SigmaPct_Text.get(row)).getText(), ReduxConstants.mathContext15);
tempFrac.getRadiogenicIsotopeRatioByName("r207_206r")//
.setOneSigma(ValueModel.convertOneSigmaPctToAbsIfRequired(ratio, oneSigmaPct));
} catch (NumberFormatException e) {
}
try {
tempFrac.getRadiogenicIsotopeRatioByName("rhoR206_238r__r207_235r")//
.setValue(new BigDecimal(((JTextField) fractionRhoR206_238r__r207_235r_Text.get(row)).getText(), ReduxConstants.mathContext15));
} catch (NumberFormatException e) {
}
// Isotopic Dates
try {
tempFrac.getRadiogenicIsotopeDateByName(RadDates.age206_238r)//
.setValue(new BigDecimal(((JTextComponent) fractionDate206_238r_Text.get(row)).getText(), ReduxConstants.mathContext15).//
movePointRight(6));
tempFrac.getRadiogenicIsotopeDateByName(RadDates.age206_238r)//
.setOneSigma(new BigDecimal(((JTextComponent) fractionDate206_238r2SigmaAbs_Text.get(row)).getText(), ReduxConstants.mathContext15).//
movePointRight(6));
} catch (NumberFormatException e) {
}
try {
tempFrac.getRadiogenicIsotopeDateByName(RadDates.age207_235r)//
.setValue(new BigDecimal(((JTextComponent) fractionDate207_235r_Text.get(row)).getText(), ReduxConstants.mathContext15).//
movePointRight(6));
tempFrac.getRadiogenicIsotopeDateByName(RadDates.age207_235r)//
.setOneSigma(new BigDecimal(((JTextComponent) fractionDate207_235r2SigmaAbs_Text.get(row)).getText(), ReduxConstants.mathContext15).//
movePointRight(6));
} catch (NumberFormatException e) {
}
try {
tempFrac.getRadiogenicIsotopeDateByName(RadDates.age207_206r)//
.setValue(new BigDecimal(((JTextComponent) fractionDate207_206r_Text.get(row)).getText(), ReduxConstants.mathContext15).//
movePointRight(6));
tempFrac.getRadiogenicIsotopeDateByName(RadDates.age207_206r)//
.setOneSigma(new BigDecimal(((JTextComponent) fractionDate207_206r2SigmaAbs_Text.get(row)).getText(), ReduxConstants.mathContext15).//
movePointRight(6));
} catch (NumberFormatException e) {
}
// better safe than sorry for now
tempFrac.setChanged(true);
}
/**
*
* @param evt
*/
protected void restore_buttonActionPerformed(java.awt.event.ActionEvent evt) {
initAliquot();//showSavedDataII();
}
/**
*
* @param evt
*/
protected void saveAndClose_buttonActionPerformed(java.awt.event.ActionEvent evt) {
saveAliquot();
close();
}
/**
*
* @param evt
*/
protected void save_buttonActionPerformed(java.awt.event.ActionEvent evt) {
saveAliquot();
}
/**
*
* @param evt
*/
protected void exportXMLAliquot_buttonActionPerformed(java.awt.event.ActionEvent evt) {
saveAliquot();
exportAliquotToXML();
}
/**
*
* @param evt
*/
protected void saveAndPreviewXMLAliquotAsHTML_buttonActionPerformed(java.awt.event.ActionEvent evt) {
saveAliquot();
// save off the Aliquot as a temp file
String tempAliquotXML = "TempAliquot.xml";
((XMLSerializationI) myAliquot).serializeXMLObject(tempAliquotXML);
viewXMLAliquotAsHTML(tempAliquotXML);
}
/**
*
* @param evt
*/
protected void saveAndUploadAliquotToGeochron_buttonActionPerformed(java.awt.event.ActionEvent evt) {
saveAliquot();
try {
uploadAliquotToGeochronZip();
} catch (ETException ex) {
new ETWarningDialog(ex).setVisible(true);
}
}
}
| |
package org.ovirt.engine.ui.webadmin;
import org.ovirt.engine.ui.common.CommonApplicationResources;
import com.google.gwt.resources.client.ImageResource;
public interface ApplicationResources extends CommonApplicationResources {
@Source("images/bg.png")
ImageResource bgImage();
@Source("images/bookmark.gif")
ImageResource bookmarkImage();
@Source("images/but_erase_search_bar_text_normal.png")
ImageResource clearSearchImage();
@Source("images/but_erase_search_bar_text_hover.png")
ImageResource clearSearchImage_mouseOver();
@Source("images/but_erase_search_bar_text_mousedown.png")
ImageResource clearSearchImage_mouseDown();
@Source("images/cluster.png")
ImageResource clusterImage();
@Source("images/clusters.png")
ImageResource clustersImage();
@Source("images/console_disabled.png")
ImageResource consoleDisabledImage();
@Source("images/console.png")
ImageResource consoleImage();
@Source("images/datacenter.png")
ImageResource dataCenterImage();
@Source("images/desktop.gif")
ImageResource desktopImage();
@Override
@Source("images/disk.png")
ImageResource diskImage();
@Source("images/error.gif")
ImageResource errorImage();
@Source("images/events_gray.png")
ImageResource eventsGrayImage();
@Source("images/events.png")
ImageResource eventsImage();
@Source("images/btn_guide_hover.png")
ImageResource guideHoverMediumImage();
@Source("images/guide.png")
ImageResource guideImage();
@Source("images/btn_guide.png")
ImageResource guideMediumImage();
@Source("images/btn_guide_pressed.png")
ImageResource guidePressedMediumImage();
@Source("images/icn_guide_disabled.png")
ImageResource guideSmallDisabledImage();
@Source("images/icn_guide.png")
ImageResource guideSmallImage();
@Source("images/bookmark.gif")
ImageResource headerSearchBookmarkImage();
@Source("images/search_button.png")
ImageResource headerSearchButtonImage();
@Source("images/host_error.gif")
ImageResource hostErrorImage();
@Source("images/host.png")
ImageResource hostImage();
@Source("images/host_installing.png")
ImageResource hostInstallingImage();
@Source("images/hosts.png")
ImageResource hostsImage();
@Source("images/Lock.png")
ImageResource lockImage();
/* Login popup resources */
@Source("images/login/login_page_header_image.png")
ImageResource loginPopupHeaderImage();
@Source("images/login/login_page_header_logo.png")
ImageResource loginPopupHeaderLogoImage();
@Source("images/login/login_page_header_title.png")
ImageResource loginPopupHeaderTitleImage();
@Source("images/logo.png")
ImageResource logoImage();
@Source("images/host_maintenance.png")
ImageResource maintenanceImage();
@Source("images/many_desktops.png")
ImageResource manyDesktopsImage();
@Source("images/minus.png")
ImageResource minusImage();
@Source("images/monitor.png")
ImageResource monitorImage();
@Source("images/nonoperational.png")
ImageResource nonOperationalImage();
@Source("images/pause.gif")
ImageResource pauseImage();
@Source("images/icn_pause_disabled.png")
ImageResource pauseVmDisabledImage();
@Source("images/icn_pause.png")
ImageResource pauseVmImage();
@Source("images/play.gif")
ImageResource playImage();
@Source("images/plus_disabled.png")
ImageResource plusDisabledImage();
@Source("images/plus.png")
ImageResource plusImage();
// @Source("images/question_mark.png")
// ImageResource questionMarkImage();
@Source("images/log_warning.gif")
ImageResource alertImage();
@Source("images/tag_locked.png")
ImageResource readOnlyTagImage();
@Source("images/icn_play_disabled.png")
ImageResource runVmDisabledImage();
@Source("images/icn_play.png")
ImageResource runVmImage();
@Source("images/search_button.png")
ImageResource searchButtonImage();
@Source("images/server.png")
ImageResource serverImage();
@Source("images/split.png")
ImageResource splitImage();
@Source("images/split-rotate.png")
ImageResource splitRotateImage();
@Source("images/stop.gif")
ImageResource stopImage();
@Source("images/icn_stop_disabled.png")
ImageResource stopVmDisabledImage();
@Source("images/icn_stop.png")
ImageResource stopVmImage();
@Source("images/storages.png")
ImageResource storagesImage();
@Source("images/system.png")
ImageResource systemImage();
@Source("images/tag.png")
ImageResource tagImage();
@Source("images/icn_tag_link_disabled.gif")
ImageResource tagLinkDisabledImage();
@Source("images/icn_tag_link.png")
ImageResource tagLinkImage();
@Source("images/tag_pin_green.png")
ImageResource tagPinGreenImage();
@Source("images/tag_pin.png")
ImageResource tagPinImage();
@Source("images/torn_chain.png")
ImageResource tornChainImage();
@Source("images/unconfigured.png")
ImageResource unconfiguredImage();
@Source("images/upalert.png")
ImageResource upalertImage();
@Source("images/user_group.png")
ImageResource userGroupImage();
@Source("images/vm.png")
ImageResource vmImage();
@Source("images/window_bg.png")
ImageResource windowBgImage();
@Source("images/window_header.png")
ImageResource windowHeaderImage();
@Source("images/wrench.png")
ImageResource wrenchImage();
@Source("images/plusButton.png")
ImageResource plusButtonImage();
@Source("images/enlarge_bottom_panel.png")
ImageResource enlargeFooterPanelImage();
@Source("images/button/footer_button_start.png")
ImageResource footerButtonUpStart();
@Source("images/button/footer_button_stretch.png")
ImageResource footerButtonUpStretch();
@Source("images/button/footer_button_end.png")
ImageResource footerButtonUpEnd();
@Source("images/button/footer_button_Down_start.png")
ImageResource footerButtonDownStart();
@Source("images/button/footer_button_Down_stretch.png")
ImageResource footerButtonDownStretch();
@Source("images/button/footer_button_Down_end.png")
ImageResource footerButtonDownEnd();
@Source("images/icon_task.png")
ImageResource iconTask();
@Source("images/icon_audit.png")
ImageResource iconAudit();
@Source("images/icon_disable.png")
ImageResource iconDisable();
@Source("images/icon_enforce.png")
ImageResource iconEnforce();
@Source("images/icon_collapse_all.png")
ImageResource collapseAllIcon();
@Source("images/network/arrow_left.png")
ImageResource arrowLeft();
@Source("images/network/arrow_right_bottom.png")
ImageResource arrowRightBottom();
@Source("images/network/arrow_right_middle.png")
ImageResource arrowRightMiddle();
@Source("images/network/arrow_right_one.png")
ImageResource arrowRightOne();
@Source("images/network/arrow_right_top.png")
ImageResource arrowRightTop();
@Source("images/network/item_draggable.png")
ImageResource itemDraggable();
@Source("images/network/mgmt_net.png")
ImageResource mgmtNetwork();
@Source("images/network/network_monitor.png")
ImageResource networkMonitor();
@Source("images/network/network_vm.png")
ImageResource networkVm();
@Source("images/network/nic_down.png")
ImageResource nicDown();
@Source("images/network/nic_up.png")
ImageResource nicUp();
@Source("images/network/edit_hover.png")
ImageResource editHover();
@Source("images/network/edit_mousedown.png")
ImageResource editMouseDown();
@Source("images/network/nic_icon.png")
ImageResource nicIcon();
@Source("images/network/bond.png")
ImageResource bond();
@Source("images/network/network_not_sync.png")
ImageResource networkNotSyncImage();
@Source("images/network/but_erase_net_mousedown.png")
ImageResource butEraseNetMousedown();
@Source("images/network/but_erase_net_hover.png")
ImageResource butEraseNetHover();
@Source("images/cluster_gluster.png")
ImageResource glusterClusterImage();
@Source("images/single_volume.png")
ImageResource volumeImage();
@Source("images/multiple_volumes.png")
ImageResource volumesImage();
}
| |
/**
* @author marinapopova Sep 27, 2019
*/
package org.elasticsearch.kafka.indexer.jobs;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.MockConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.TopicPartition;
import org.elasticsearch.kafka.indexer.exception.ConsumerNonRecoverableException;
import org.elasticsearch.kafka.indexer.exception.ConsumerRecoverableException;
import org.elasticsearch.kafka.indexer.service.IBatchMessageProcessor;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
public class ConsumerWorkerTest {
private IBatchMessageProcessor mockedBatchMessageProcessor = Mockito.mock(IBatchMessageProcessor.class);
private MockConsumer<String, String> mockedConsumer;
private ConsumerWorker consumerWorker = new ConsumerWorker();
private Long startOffset = 1L;
private String testTopic = "test-topic";
private int partition = 0;
private int consumerInstanceId = 1;
private int numberOfRecords = 2;
private TopicPartition topicPartition0 = new TopicPartition(testTopic, partition);
private List<ConsumerRecord<String, String>> testRecords;
@Before
public void setUp() throws Exception {
mockedConsumer = new MockConsumer<String, String>(OffsetResetStrategy.EARLIEST);
mockedConsumer.assign(Arrays.asList(topicPartition0));
HashMap<TopicPartition, Long> beginningOffsets = new HashMap<>();
beginningOffsets.put(topicPartition0, startOffset);
mockedConsumer.updateBeginningOffsets(beginningOffsets);
testRecords = new LinkedList<>();
for (int i=1; i<=numberOfRecords; i++) {
ConsumerRecord<String, String> consumerRecord = new ConsumerRecord<>(
testTopic, partition, startOffset+i, "test-key"+i, "test-message"+i);
testRecords.add(consumerRecord);
mockedConsumer.addRecord(consumerRecord);
}
consumerWorker.setConsumer(mockedConsumer);
consumerWorker.setConsumerInstanceId(consumerInstanceId);
consumerWorker.setKafkaTopic(testTopic);
consumerWorker.setBatchMessageProcessor(mockedBatchMessageProcessor);
}
@Test
public void testProcessPoll_happyPath() throws Exception {
long expectedCommittedOffset = startOffset + numberOfRecords + 1;
long nextToReadOffset = expectedCommittedOffset;
for (ConsumerRecord<String, String> consumerRecord: testRecords) {
Mockito.when(mockedBatchMessageProcessor.processMessage(consumerRecord, consumerInstanceId)).thenReturn(true);
}
Mockito.when(mockedBatchMessageProcessor.onPollEndCallBack(Mockito.anyInt(), Mockito.anyMap())).thenReturn(true);
consumerWorker.processPoll();
// all records should be processed fine and , thus,
// committed and next to read offsets should be the same in this case
Assert.assertEquals(nextToReadOffset, mockedConsumer.position(topicPartition0));
OffsetAndMetadata committedOffsetInfo = mockedConsumer.committed(topicPartition0);
Assert.assertNotNull(committedOffsetInfo);
Assert.assertEquals(expectedCommittedOffset, committedOffsetInfo.offset());
}
/**
* Use case: processing of any (even all) events in the processMessage() fails, it returns FALSE - but the poll()
* should still commit the offsets, since events will be stored into the failed events log
*
* @throws Exception
*/
@Test
public void testProcessPoll_eventProcessingFails() throws Exception {
long expectedCommittedOffset = startOffset + numberOfRecords + 1;
long nextToReadOffset = expectedCommittedOffset;
for (ConsumerRecord<String, String> consumerRecord: testRecords) {
Mockito.when(mockedBatchMessageProcessor.processMessage(consumerRecord, consumerInstanceId)).thenReturn(false);
}
Mockito.when(mockedBatchMessageProcessor.onPollEndCallBack(Mockito.anyInt(), Mockito.anyMap())).thenReturn(true);
consumerWorker.processPoll();
// committed and next to read offsets should be the same in this case
Assert.assertEquals(nextToReadOffset, mockedConsumer.position(topicPartition0));
OffsetAndMetadata committedOffsetInfo = mockedConsumer.committed(topicPartition0);
Assert.assertNotNull(committedOffsetInfo);
Assert.assertEquals(expectedCommittedOffset, committedOffsetInfo.offset());
}
/**
* Use case: processing of any (even all) events in the processMessage() fails, it throws an Exception - but the poll()
* should still commit the offsets, since events will be stored into the failed events log
*
* @throws Exception
*/
@Test
public void testProcessPoll_eventProcessingExceptions() throws Exception {
long expectedCommittedOffset = startOffset + numberOfRecords + 1;
long nextToReadOffset = expectedCommittedOffset;
for (ConsumerRecord<String, String> consumerRecord: testRecords) {
Mockito.when(mockedBatchMessageProcessor.processMessage(consumerRecord, consumerInstanceId))
.thenThrow(new IllegalArgumentException("Unit test exception"));
}
Mockito.when(mockedBatchMessageProcessor.onPollEndCallBack(Mockito.anyInt(), Mockito.anyMap())).thenReturn(true);
consumerWorker.processPoll();
// committed and next to read offsets should be the same in this case
Assert.assertEquals(nextToReadOffset, mockedConsumer.position(topicPartition0));
OffsetAndMetadata committedOffsetInfo = mockedConsumer.committed(topicPartition0);
Assert.assertNotNull(committedOffsetInfo);
Assert.assertEquals(expectedCommittedOffset, committedOffsetInfo.offset());
}
/**
* Use case: call to beforeCommitCallBack() returns shouldCommitOffset = FALSE ==>
* offsets should not be committed, but the execution flow should not fail
*
* @throws Exception
*/
@Test
public void testProcessPoll_beforeCommitCallBackReturnsFalse() throws Exception {
long nextToReadOffset = startOffset + numberOfRecords + 1;
for (ConsumerRecord<String, String> consumerRecord: testRecords) {
Mockito.when(mockedBatchMessageProcessor.processMessage(consumerRecord, consumerInstanceId)).thenReturn(true);
}
Mockito.when(mockedBatchMessageProcessor.onPollEndCallBack(Mockito.anyInt(), Mockito.anyMap())).thenReturn(false);
consumerWorker.processPoll();
// next to read offset should still be incremented
Assert.assertEquals(nextToReadOffset, mockedConsumer.position(topicPartition0));
// nothing should be committed for this topic/partition yet - so the committed OffsetAndMEtadata object should be null
OffsetAndMetadata committedOffsetInfo = mockedConsumer.committed(topicPartition0);
Assert.assertNull(committedOffsetInfo);
}
/**
* Use case: call to beforeCommitCallBack() throws unrecoverable exception ==>
* consumer should fail and exit
*
* @throws Exception
*/
@Test(expected = IllegalArgumentException.class)
public void testProcessPoll_beforeCommitCall_NonrecoverableException() throws Exception {
long nextToReadOffset = startOffset + numberOfRecords + 1;
for (ConsumerRecord<String, String> consumerRecord: testRecords) {
Mockito.when(mockedBatchMessageProcessor.processMessage(consumerRecord, consumerInstanceId)).thenReturn(true);
}
Mockito.when(mockedBatchMessageProcessor.onPollEndCallBack(Mockito.anyInt(), Mockito.anyMap()))
.thenThrow(new IllegalArgumentException("non-recoverable exception from unit test"));
consumerWorker.processPoll();
// exception should be thrown out
}
/**
* Use case: call to beforeCommitCallBack() throws LESS than a configured MAX limit of
* recoverable exceptions ==> offsets should not be committed, but the execution flow should not fail
*
* @throws Exception
*/
@Test()
public void testProcessPoll_beforeCommitCall_RecoverableException_underlimit() throws Exception {
long expectedCommittedOffset = startOffset + numberOfRecords + 1;
long nextToReadOffset = expectedCommittedOffset;
int pollRetryLimit = 2;
long pollRetryIntervalMs = 2l;
consumerWorker.setPollRetryLimit(pollRetryLimit);
consumerWorker.setPollRetryIntervalMs(pollRetryIntervalMs);
for (ConsumerRecord<String, String> consumerRecord: testRecords) {
Mockito.when(mockedBatchMessageProcessor.processMessage(consumerRecord, consumerInstanceId)).thenReturn(true);
}
Mockito.when(mockedBatchMessageProcessor.onPollEndCallBack(Mockito.anyInt(), Mockito.anyMap()))
.thenThrow(new ConsumerRecoverableException("Recoverable exception from unit test #1"))
.thenReturn(true);
consumerWorker.processPoll();
// committed and next to read offsets should be the same in this case
Assert.assertEquals(nextToReadOffset, mockedConsumer.position(topicPartition0));
OffsetAndMetadata committedOffsetInfo = mockedConsumer.committed(topicPartition0);
Assert.assertNotNull(committedOffsetInfo);
Assert.assertEquals(expectedCommittedOffset, committedOffsetInfo.offset());
}
/**
* Use case: call to beforeCommitCallBack() throws more than a configured MAX limit of
* recoverable exceptions, and ignoreOverlimitRecoverableErrors = FALSE ==> consumer should fail and exit
*
* @throws Exception
*/
@Test(expected = ConsumerNonRecoverableException.class)
public void testProcessPoll_beforeCommitCall_RecoverableException_overlimit() throws Exception {
int pollRetryLimit = 2;
long pollRetryIntervalMs = 2l;
consumerWorker.setIgnoreOverlimitRecoverableErrors(false);
consumerWorker.setPollRetryLimit(pollRetryLimit);
consumerWorker.setPollRetryIntervalMs(pollRetryIntervalMs);
for (ConsumerRecord<String, String> consumerRecord: testRecords) {
Mockito.when(mockedBatchMessageProcessor.processMessage(consumerRecord, consumerInstanceId)).thenReturn(true);
}
Mockito.when(mockedBatchMessageProcessor.onPollEndCallBack(Mockito.anyInt(), Mockito.anyMap()))
.thenThrow(new ConsumerRecoverableException("Recoverable exception from unit test #1"))
.thenThrow(new ConsumerRecoverableException("Recoverable exception from unit test #2"))
.thenThrow(new ConsumerRecoverableException("Recoverable exception from unit test #3 - over the limit"));
consumerWorker.processPoll();
// ConsumerNonRecoverableException should be thrown out
}
/**
* Use case: call to beforeCommitCallBack() throws more than a configured MAX limit of
* recoverable exceptions, and ignoreOverlimitRecoverableErrors = TRUE ==>
* offsets should not be committed, but the execution flow should not fail
*
* @throws Exception
*/
@Test()
public void testProcessPoll_beforeCommitCall_RecoverableException_overlimit_ignored() throws Exception {
long expectedCommittedOffset = startOffset + numberOfRecords + 1;
long nextToReadOffset = expectedCommittedOffset;
int pollRetryLimit = 2;
long pollRetryIntervalMs = 2l;
consumerWorker.setIgnoreOverlimitRecoverableErrors(true);
consumerWorker.setPollRetryLimit(pollRetryLimit);
consumerWorker.setPollRetryIntervalMs(pollRetryIntervalMs);
for (ConsumerRecord<String, String> consumerRecord: testRecords) {
Mockito.when(mockedBatchMessageProcessor.processMessage(consumerRecord, consumerInstanceId)).thenReturn(true);
}
Mockito.when(mockedBatchMessageProcessor.onPollEndCallBack(Mockito.anyInt(), Mockito.anyMap()))
.thenThrow(new ConsumerRecoverableException("Recoverable exception from unit test #1"))
.thenReturn(true);
consumerWorker.processPoll();
// committed and next to read offsets should be the same in this case
Assert.assertEquals(nextToReadOffset, mockedConsumer.position(topicPartition0));
OffsetAndMetadata committedOffsetInfo = mockedConsumer.committed(topicPartition0);
Assert.assertNotNull(committedOffsetInfo);
Assert.assertEquals(expectedCommittedOffset, committedOffsetInfo.offset());
}
}
| |
package com.sequenceiq.mock.swagger.model;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.math.BigDecimal;
import org.springframework.validation.annotation.Validated;
import javax.validation.Valid;
import javax.validation.constraints.*;
/**
* Utilization report information of a tenant of Yarn application.
*/
@ApiModel(description = "Utilization report information of a tenant of Yarn application.")
@Validated
@javax.annotation.Generated(value = "io.swagger.codegen.languages.SpringCodegen", date = "2021-12-10T21:24:30.629+01:00")
public class ApiYarnTenantUtilization {
@JsonProperty("tenantName")
private String tenantName = null;
@JsonProperty("avgYarnCpuAllocation")
private BigDecimal avgYarnCpuAllocation = null;
@JsonProperty("avgYarnCpuUtilization")
private BigDecimal avgYarnCpuUtilization = null;
@JsonProperty("avgYarnCpuUnusedCapacity")
private BigDecimal avgYarnCpuUnusedCapacity = null;
@JsonProperty("avgYarnCpuSteadyFairShare")
private BigDecimal avgYarnCpuSteadyFairShare = null;
@JsonProperty("avgYarnPoolAllocatedCpuDuringContention")
private BigDecimal avgYarnPoolAllocatedCpuDuringContention = null;
@JsonProperty("avgYarnPoolFairShareCpuDuringContention")
private BigDecimal avgYarnPoolFairShareCpuDuringContention = null;
@JsonProperty("avgYarnPoolSteadyFairShareCpuDuringContention")
private BigDecimal avgYarnPoolSteadyFairShareCpuDuringContention = null;
@JsonProperty("avgYarnContainerWaitRatio")
private BigDecimal avgYarnContainerWaitRatio = null;
@JsonProperty("avgYarnMemoryAllocation")
private BigDecimal avgYarnMemoryAllocation = null;
@JsonProperty("avgYarnMemoryUtilization")
private BigDecimal avgYarnMemoryUtilization = null;
@JsonProperty("avgYarnMemoryUnusedCapacity")
private BigDecimal avgYarnMemoryUnusedCapacity = null;
@JsonProperty("avgYarnMemorySteadyFairShare")
private BigDecimal avgYarnMemorySteadyFairShare = null;
@JsonProperty("avgYarnPoolAllocatedMemoryDuringContention")
private BigDecimal avgYarnPoolAllocatedMemoryDuringContention = null;
@JsonProperty("avgYarnPoolFairShareMemoryDuringContention")
private BigDecimal avgYarnPoolFairShareMemoryDuringContention = null;
@JsonProperty("avgYarnPoolSteadyFairShareMemoryDuringContention")
private BigDecimal avgYarnPoolSteadyFairShareMemoryDuringContention = null;
public ApiYarnTenantUtilization tenantName(String tenantName) {
this.tenantName = tenantName;
return this;
}
/**
* Name of the tenant.
* @return tenantName
**/
@ApiModelProperty(value = "Name of the tenant.")
public String getTenantName() {
return tenantName;
}
public void setTenantName(String tenantName) {
this.tenantName = tenantName;
}
public ApiYarnTenantUtilization avgYarnCpuAllocation(BigDecimal avgYarnCpuAllocation) {
this.avgYarnCpuAllocation = avgYarnCpuAllocation;
return this;
}
/**
* Average number of VCores allocated to YARN applications of the tenant.
* @return avgYarnCpuAllocation
**/
@ApiModelProperty(value = "Average number of VCores allocated to YARN applications of the tenant.")
@Valid
public BigDecimal getAvgYarnCpuAllocation() {
return avgYarnCpuAllocation;
}
public void setAvgYarnCpuAllocation(BigDecimal avgYarnCpuAllocation) {
this.avgYarnCpuAllocation = avgYarnCpuAllocation;
}
public ApiYarnTenantUtilization avgYarnCpuUtilization(BigDecimal avgYarnCpuUtilization) {
this.avgYarnCpuUtilization = avgYarnCpuUtilization;
return this;
}
/**
* Average number of VCores used by YARN applications of the tenant.
* @return avgYarnCpuUtilization
**/
@ApiModelProperty(value = "Average number of VCores used by YARN applications of the tenant.")
@Valid
public BigDecimal getAvgYarnCpuUtilization() {
return avgYarnCpuUtilization;
}
public void setAvgYarnCpuUtilization(BigDecimal avgYarnCpuUtilization) {
this.avgYarnCpuUtilization = avgYarnCpuUtilization;
}
public ApiYarnTenantUtilization avgYarnCpuUnusedCapacity(BigDecimal avgYarnCpuUnusedCapacity) {
this.avgYarnCpuUnusedCapacity = avgYarnCpuUnusedCapacity;
return this;
}
/**
* Average unused VCores of the tenant.
* @return avgYarnCpuUnusedCapacity
**/
@ApiModelProperty(value = "Average unused VCores of the tenant.")
@Valid
public BigDecimal getAvgYarnCpuUnusedCapacity() {
return avgYarnCpuUnusedCapacity;
}
public void setAvgYarnCpuUnusedCapacity(BigDecimal avgYarnCpuUnusedCapacity) {
this.avgYarnCpuUnusedCapacity = avgYarnCpuUnusedCapacity;
}
public ApiYarnTenantUtilization avgYarnCpuSteadyFairShare(BigDecimal avgYarnCpuSteadyFairShare) {
this.avgYarnCpuSteadyFairShare = avgYarnCpuSteadyFairShare;
return this;
}
/**
* Average steady fair share VCores.
* @return avgYarnCpuSteadyFairShare
**/
@ApiModelProperty(value = "Average steady fair share VCores.")
@Valid
public BigDecimal getAvgYarnCpuSteadyFairShare() {
return avgYarnCpuSteadyFairShare;
}
public void setAvgYarnCpuSteadyFairShare(BigDecimal avgYarnCpuSteadyFairShare) {
this.avgYarnCpuSteadyFairShare = avgYarnCpuSteadyFairShare;
}
public ApiYarnTenantUtilization avgYarnPoolAllocatedCpuDuringContention(BigDecimal avgYarnPoolAllocatedCpuDuringContention) {
this.avgYarnPoolAllocatedCpuDuringContention = avgYarnPoolAllocatedCpuDuringContention;
return this;
}
/**
* Average allocated Vcores with pending containers.
* @return avgYarnPoolAllocatedCpuDuringContention
**/
@ApiModelProperty(value = "Average allocated Vcores with pending containers.")
@Valid
public BigDecimal getAvgYarnPoolAllocatedCpuDuringContention() {
return avgYarnPoolAllocatedCpuDuringContention;
}
public void setAvgYarnPoolAllocatedCpuDuringContention(BigDecimal avgYarnPoolAllocatedCpuDuringContention) {
this.avgYarnPoolAllocatedCpuDuringContention = avgYarnPoolAllocatedCpuDuringContention;
}
public ApiYarnTenantUtilization avgYarnPoolFairShareCpuDuringContention(BigDecimal avgYarnPoolFairShareCpuDuringContention) {
this.avgYarnPoolFairShareCpuDuringContention = avgYarnPoolFairShareCpuDuringContention;
return this;
}
/**
* Average fair share VCores with pending containers.
* @return avgYarnPoolFairShareCpuDuringContention
**/
@ApiModelProperty(value = "Average fair share VCores with pending containers.")
@Valid
public BigDecimal getAvgYarnPoolFairShareCpuDuringContention() {
return avgYarnPoolFairShareCpuDuringContention;
}
public void setAvgYarnPoolFairShareCpuDuringContention(BigDecimal avgYarnPoolFairShareCpuDuringContention) {
this.avgYarnPoolFairShareCpuDuringContention = avgYarnPoolFairShareCpuDuringContention;
}
public ApiYarnTenantUtilization avgYarnPoolSteadyFairShareCpuDuringContention(BigDecimal avgYarnPoolSteadyFairShareCpuDuringContention) {
this.avgYarnPoolSteadyFairShareCpuDuringContention = avgYarnPoolSteadyFairShareCpuDuringContention;
return this;
}
/**
* Average steady fair share VCores with pending containers.
* @return avgYarnPoolSteadyFairShareCpuDuringContention
**/
@ApiModelProperty(value = "Average steady fair share VCores with pending containers.")
@Valid
public BigDecimal getAvgYarnPoolSteadyFairShareCpuDuringContention() {
return avgYarnPoolSteadyFairShareCpuDuringContention;
}
public void setAvgYarnPoolSteadyFairShareCpuDuringContention(BigDecimal avgYarnPoolSteadyFairShareCpuDuringContention) {
this.avgYarnPoolSteadyFairShareCpuDuringContention = avgYarnPoolSteadyFairShareCpuDuringContention;
}
public ApiYarnTenantUtilization avgYarnContainerWaitRatio(BigDecimal avgYarnContainerWaitRatio) {
this.avgYarnContainerWaitRatio = avgYarnContainerWaitRatio;
return this;
}
/**
* Average percentage of pending containers for the pool during periods of contention.
* @return avgYarnContainerWaitRatio
**/
@ApiModelProperty(value = "Average percentage of pending containers for the pool during periods of contention.")
@Valid
public BigDecimal getAvgYarnContainerWaitRatio() {
return avgYarnContainerWaitRatio;
}
public void setAvgYarnContainerWaitRatio(BigDecimal avgYarnContainerWaitRatio) {
this.avgYarnContainerWaitRatio = avgYarnContainerWaitRatio;
}
public ApiYarnTenantUtilization avgYarnMemoryAllocation(BigDecimal avgYarnMemoryAllocation) {
this.avgYarnMemoryAllocation = avgYarnMemoryAllocation;
return this;
}
/**
* Average memory allocated to YARN applications of the tenant.
* @return avgYarnMemoryAllocation
**/
@ApiModelProperty(value = "Average memory allocated to YARN applications of the tenant.")
@Valid
public BigDecimal getAvgYarnMemoryAllocation() {
return avgYarnMemoryAllocation;
}
public void setAvgYarnMemoryAllocation(BigDecimal avgYarnMemoryAllocation) {
this.avgYarnMemoryAllocation = avgYarnMemoryAllocation;
}
public ApiYarnTenantUtilization avgYarnMemoryUtilization(BigDecimal avgYarnMemoryUtilization) {
this.avgYarnMemoryUtilization = avgYarnMemoryUtilization;
return this;
}
/**
* Average memory used by YARN applications of the tenant.
* @return avgYarnMemoryUtilization
**/
@ApiModelProperty(value = "Average memory used by YARN applications of the tenant.")
@Valid
public BigDecimal getAvgYarnMemoryUtilization() {
return avgYarnMemoryUtilization;
}
public void setAvgYarnMemoryUtilization(BigDecimal avgYarnMemoryUtilization) {
this.avgYarnMemoryUtilization = avgYarnMemoryUtilization;
}
public ApiYarnTenantUtilization avgYarnMemoryUnusedCapacity(BigDecimal avgYarnMemoryUnusedCapacity) {
this.avgYarnMemoryUnusedCapacity = avgYarnMemoryUnusedCapacity;
return this;
}
/**
* Average unused memory of the tenant.
* @return avgYarnMemoryUnusedCapacity
**/
@ApiModelProperty(value = "Average unused memory of the tenant.")
@Valid
public BigDecimal getAvgYarnMemoryUnusedCapacity() {
return avgYarnMemoryUnusedCapacity;
}
public void setAvgYarnMemoryUnusedCapacity(BigDecimal avgYarnMemoryUnusedCapacity) {
this.avgYarnMemoryUnusedCapacity = avgYarnMemoryUnusedCapacity;
}
public ApiYarnTenantUtilization avgYarnMemorySteadyFairShare(BigDecimal avgYarnMemorySteadyFairShare) {
this.avgYarnMemorySteadyFairShare = avgYarnMemorySteadyFairShare;
return this;
}
/**
* Average steady fair share memory.
* @return avgYarnMemorySteadyFairShare
**/
@ApiModelProperty(value = "Average steady fair share memory.")
@Valid
public BigDecimal getAvgYarnMemorySteadyFairShare() {
return avgYarnMemorySteadyFairShare;
}
public void setAvgYarnMemorySteadyFairShare(BigDecimal avgYarnMemorySteadyFairShare) {
this.avgYarnMemorySteadyFairShare = avgYarnMemorySteadyFairShare;
}
public ApiYarnTenantUtilization avgYarnPoolAllocatedMemoryDuringContention(BigDecimal avgYarnPoolAllocatedMemoryDuringContention) {
this.avgYarnPoolAllocatedMemoryDuringContention = avgYarnPoolAllocatedMemoryDuringContention;
return this;
}
/**
* Average allocated memory with pending containers.
* @return avgYarnPoolAllocatedMemoryDuringContention
**/
@ApiModelProperty(value = "Average allocated memory with pending containers.")
@Valid
public BigDecimal getAvgYarnPoolAllocatedMemoryDuringContention() {
return avgYarnPoolAllocatedMemoryDuringContention;
}
public void setAvgYarnPoolAllocatedMemoryDuringContention(BigDecimal avgYarnPoolAllocatedMemoryDuringContention) {
this.avgYarnPoolAllocatedMemoryDuringContention = avgYarnPoolAllocatedMemoryDuringContention;
}
public ApiYarnTenantUtilization avgYarnPoolFairShareMemoryDuringContention(BigDecimal avgYarnPoolFairShareMemoryDuringContention) {
this.avgYarnPoolFairShareMemoryDuringContention = avgYarnPoolFairShareMemoryDuringContention;
return this;
}
/**
* Average fair share memory with pending containers.
* @return avgYarnPoolFairShareMemoryDuringContention
**/
@ApiModelProperty(value = "Average fair share memory with pending containers.")
@Valid
public BigDecimal getAvgYarnPoolFairShareMemoryDuringContention() {
return avgYarnPoolFairShareMemoryDuringContention;
}
public void setAvgYarnPoolFairShareMemoryDuringContention(BigDecimal avgYarnPoolFairShareMemoryDuringContention) {
this.avgYarnPoolFairShareMemoryDuringContention = avgYarnPoolFairShareMemoryDuringContention;
}
public ApiYarnTenantUtilization avgYarnPoolSteadyFairShareMemoryDuringContention(BigDecimal avgYarnPoolSteadyFairShareMemoryDuringContention) {
this.avgYarnPoolSteadyFairShareMemoryDuringContention = avgYarnPoolSteadyFairShareMemoryDuringContention;
return this;
}
/**
* Average steady fair share memory with pending containers.
* @return avgYarnPoolSteadyFairShareMemoryDuringContention
**/
@ApiModelProperty(value = "Average steady fair share memory with pending containers.")
@Valid
public BigDecimal getAvgYarnPoolSteadyFairShareMemoryDuringContention() {
return avgYarnPoolSteadyFairShareMemoryDuringContention;
}
public void setAvgYarnPoolSteadyFairShareMemoryDuringContention(BigDecimal avgYarnPoolSteadyFairShareMemoryDuringContention) {
this.avgYarnPoolSteadyFairShareMemoryDuringContention = avgYarnPoolSteadyFairShareMemoryDuringContention;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ApiYarnTenantUtilization apiYarnTenantUtilization = (ApiYarnTenantUtilization) o;
return Objects.equals(this.tenantName, apiYarnTenantUtilization.tenantName) &&
Objects.equals(this.avgYarnCpuAllocation, apiYarnTenantUtilization.avgYarnCpuAllocation) &&
Objects.equals(this.avgYarnCpuUtilization, apiYarnTenantUtilization.avgYarnCpuUtilization) &&
Objects.equals(this.avgYarnCpuUnusedCapacity, apiYarnTenantUtilization.avgYarnCpuUnusedCapacity) &&
Objects.equals(this.avgYarnCpuSteadyFairShare, apiYarnTenantUtilization.avgYarnCpuSteadyFairShare) &&
Objects.equals(this.avgYarnPoolAllocatedCpuDuringContention, apiYarnTenantUtilization.avgYarnPoolAllocatedCpuDuringContention) &&
Objects.equals(this.avgYarnPoolFairShareCpuDuringContention, apiYarnTenantUtilization.avgYarnPoolFairShareCpuDuringContention) &&
Objects.equals(this.avgYarnPoolSteadyFairShareCpuDuringContention, apiYarnTenantUtilization.avgYarnPoolSteadyFairShareCpuDuringContention) &&
Objects.equals(this.avgYarnContainerWaitRatio, apiYarnTenantUtilization.avgYarnContainerWaitRatio) &&
Objects.equals(this.avgYarnMemoryAllocation, apiYarnTenantUtilization.avgYarnMemoryAllocation) &&
Objects.equals(this.avgYarnMemoryUtilization, apiYarnTenantUtilization.avgYarnMemoryUtilization) &&
Objects.equals(this.avgYarnMemoryUnusedCapacity, apiYarnTenantUtilization.avgYarnMemoryUnusedCapacity) &&
Objects.equals(this.avgYarnMemorySteadyFairShare, apiYarnTenantUtilization.avgYarnMemorySteadyFairShare) &&
Objects.equals(this.avgYarnPoolAllocatedMemoryDuringContention, apiYarnTenantUtilization.avgYarnPoolAllocatedMemoryDuringContention) &&
Objects.equals(this.avgYarnPoolFairShareMemoryDuringContention, apiYarnTenantUtilization.avgYarnPoolFairShareMemoryDuringContention) &&
Objects.equals(this.avgYarnPoolSteadyFairShareMemoryDuringContention, apiYarnTenantUtilization.avgYarnPoolSteadyFairShareMemoryDuringContention);
}
@Override
public int hashCode() {
return Objects.hash(tenantName, avgYarnCpuAllocation, avgYarnCpuUtilization, avgYarnCpuUnusedCapacity, avgYarnCpuSteadyFairShare, avgYarnPoolAllocatedCpuDuringContention, avgYarnPoolFairShareCpuDuringContention, avgYarnPoolSteadyFairShareCpuDuringContention, avgYarnContainerWaitRatio, avgYarnMemoryAllocation, avgYarnMemoryUtilization, avgYarnMemoryUnusedCapacity, avgYarnMemorySteadyFairShare, avgYarnPoolAllocatedMemoryDuringContention, avgYarnPoolFairShareMemoryDuringContention, avgYarnPoolSteadyFairShareMemoryDuringContention);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ApiYarnTenantUtilization {\n");
sb.append(" tenantName: ").append(toIndentedString(tenantName)).append("\n");
sb.append(" avgYarnCpuAllocation: ").append(toIndentedString(avgYarnCpuAllocation)).append("\n");
sb.append(" avgYarnCpuUtilization: ").append(toIndentedString(avgYarnCpuUtilization)).append("\n");
sb.append(" avgYarnCpuUnusedCapacity: ").append(toIndentedString(avgYarnCpuUnusedCapacity)).append("\n");
sb.append(" avgYarnCpuSteadyFairShare: ").append(toIndentedString(avgYarnCpuSteadyFairShare)).append("\n");
sb.append(" avgYarnPoolAllocatedCpuDuringContention: ").append(toIndentedString(avgYarnPoolAllocatedCpuDuringContention)).append("\n");
sb.append(" avgYarnPoolFairShareCpuDuringContention: ").append(toIndentedString(avgYarnPoolFairShareCpuDuringContention)).append("\n");
sb.append(" avgYarnPoolSteadyFairShareCpuDuringContention: ").append(toIndentedString(avgYarnPoolSteadyFairShareCpuDuringContention)).append("\n");
sb.append(" avgYarnContainerWaitRatio: ").append(toIndentedString(avgYarnContainerWaitRatio)).append("\n");
sb.append(" avgYarnMemoryAllocation: ").append(toIndentedString(avgYarnMemoryAllocation)).append("\n");
sb.append(" avgYarnMemoryUtilization: ").append(toIndentedString(avgYarnMemoryUtilization)).append("\n");
sb.append(" avgYarnMemoryUnusedCapacity: ").append(toIndentedString(avgYarnMemoryUnusedCapacity)).append("\n");
sb.append(" avgYarnMemorySteadyFairShare: ").append(toIndentedString(avgYarnMemorySteadyFairShare)).append("\n");
sb.append(" avgYarnPoolAllocatedMemoryDuringContention: ").append(toIndentedString(avgYarnPoolAllocatedMemoryDuringContention)).append("\n");
sb.append(" avgYarnPoolFairShareMemoryDuringContention: ").append(toIndentedString(avgYarnPoolFairShareMemoryDuringContention)).append("\n");
sb.append(" avgYarnPoolSteadyFairShareMemoryDuringContention: ").append(toIndentedString(avgYarnPoolSteadyFairShareMemoryDuringContention)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Copyright 2017 Vector Creations Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package im.vector.adapters;
import android.content.Context;
import android.graphics.Color;
import android.text.TextUtils;
import android.util.Pair;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.recyclerview.widget.RecyclerView;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.core.MXPatterns;
import org.matrix.androidsdk.core.callback.SimpleApiCallback;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.rest.model.User;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import im.vector.R;
import im.vector.contacts.ContactsManager;
import im.vector.settings.VectorLocale;
import im.vector.util.RoomUtils;
import im.vector.util.VectorUtils;
public class PeopleAdapter extends AbsAdapter {
private static final String LOG_TAG = PeopleAdapter.class.getSimpleName();
private static final int TYPE_HEADER_LOCAL_CONTACTS = 0;
private static final int TYPE_CONTACT = 1;
private final AdapterSection<Room> mDirectChatsSection;
private final AdapterSection<ParticipantAdapterItem> mLocalContactsSection;
private final KnownContactsAdapterSection mKnownContactsSection;
private final OnSelectItemListener mListener;
private final String mNoContactAccessPlaceholder;
private final String mNoResultPlaceholder;
private final String mNoIdentityServerPlaceholder;
/*
* *********************************************************************************************
* Constructor
* *********************************************************************************************
*/
public PeopleAdapter(final Context context,
final OnSelectItemListener listener,
final RoomInvitationListener invitationListener,
final MoreRoomActionListener moreActionListener) {
super(context, invitationListener, moreActionListener);
mListener = listener;
// ButterKnife.bind(this); cannot be applied here
mNoContactAccessPlaceholder = context.getString(R.string.no_contact_access_placeholder);
mNoResultPlaceholder = context.getString(R.string.no_result_placeholder);
mNoIdentityServerPlaceholder = context.getString(R.string.people_no_identity_server);
mDirectChatsSection = new AdapterSection<>(context,
context.getString(R.string.direct_chats_header),
-1,
R.layout.adapter_item_room_view,
TYPE_HEADER_DEFAULT,
TYPE_ROOM,
new ArrayList<Room>(),
RoomUtils.getRoomsDateComparator(mSession, false));
mDirectChatsSection.setEmptyViewPlaceholder(context.getString(R.string.no_conversation_placeholder), context.getString(R.string.no_result_placeholder));
mLocalContactsSection = new AdapterSection<>(context,
context.getString(R.string.local_address_book_header),
R.layout.adapter_local_contacts_sticky_header_subview,
R.layout.adapter_item_contact_view,
TYPE_HEADER_LOCAL_CONTACTS, TYPE_CONTACT,
new ArrayList<ParticipantAdapterItem>(),
ParticipantAdapterItem.alphaComparator);
updateLocalContactsPlaceHolders();
mKnownContactsSection = new KnownContactsAdapterSection(context, context.getString(R.string.user_directory_header), -1,
R.layout.adapter_item_contact_view, TYPE_HEADER_DEFAULT, TYPE_CONTACT, new ArrayList<ParticipantAdapterItem>(), null);
mKnownContactsSection.setEmptyViewPlaceholder(null, context.getString(R.string.no_result_placeholder));
mKnownContactsSection.setIsHiddenWhenNoFilter(true);
addSection(mDirectChatsSection);
addSection(mLocalContactsSection);
addSection(mKnownContactsSection);
}
private void updateLocalContactsPlaceHolders() {
String noItemPlaceholder = mNoResultPlaceholder;
if (!ContactsManager.getInstance().isContactBookAccessAllowed()) {
noItemPlaceholder = mNoContactAccessPlaceholder;
} else {
if (mSession.getIdentityServerManager().getIdentityServerUrl() == null) {
noItemPlaceholder = mNoIdentityServerPlaceholder;
}
}
mLocalContactsSection.setEmptyViewPlaceholder(noItemPlaceholder);
}
/*
* *********************************************************************************************
* Abstract methods implementation
* *********************************************************************************************
*/
@Override
protected RecyclerView.ViewHolder createSubViewHolder(ViewGroup viewGroup, int viewType) {
final LayoutInflater inflater = LayoutInflater.from(viewGroup.getContext());
View itemView;
if (viewType == TYPE_HEADER_LOCAL_CONTACTS) {
//TODO replace by a empty view ?
itemView = inflater.inflate(R.layout.adapter_section_header_local, viewGroup, false);
itemView.setBackgroundColor(Color.MAGENTA);
return new HeaderViewHolder(itemView);
} else {
switch (viewType) {
case TYPE_ROOM:
itemView = inflater.inflate(R.layout.adapter_item_room_view, viewGroup, false);
return new RoomViewHolder(itemView);
case TYPE_CONTACT:
itemView = inflater.inflate(R.layout.adapter_item_contact_view, viewGroup, false);
return new ContactViewHolder(itemView);
}
}
return null;
}
@Override
protected void populateViewHolder(int viewType, RecyclerView.ViewHolder viewHolder, int position) {
switch (viewType) {
case TYPE_HEADER_LOCAL_CONTACTS:
// Local header
final HeaderViewHolder headerViewHolder = (HeaderViewHolder) viewHolder;
for (Pair<Integer, AdapterSection> adapterSection : getSectionsArray()) {
if (adapterSection.first == position) {
headerViewHolder.populateViews(adapterSection.second);
break;
}
}
break;
case TYPE_ROOM:
final RoomViewHolder roomViewHolder = (RoomViewHolder) viewHolder;
final Room room = (Room) getItemForPosition(position);
roomViewHolder.populateViews(mContext, mSession, room, true, false, mMoreRoomActionListener);
roomViewHolder.itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mListener.onSelectItem(room, -1);
}
});
break;
case TYPE_CONTACT:
final ContactViewHolder contactViewHolder = (ContactViewHolder) viewHolder;
final ParticipantAdapterItem item = (ParticipantAdapterItem) getItemForPosition(position);
contactViewHolder.populateViews(item, position);
break;
}
}
@Override
protected int applyFilter(String pattern) {
int nbResults = 0;
nbResults += filterRoomSection(mDirectChatsSection, pattern);
nbResults += filterLocalContacts(pattern);
// if there is no pattern, use the local search
if (TextUtils.isEmpty(pattern)) {
nbResults += filterKnownContacts(pattern);
}
return nbResults;
}
/*
* *********************************************************************************************
* Public methods
* *********************************************************************************************
*/
public void setRooms(final List<Room> rooms) {
mDirectChatsSection.setItems(rooms, mCurrentFilterPattern);
if (!TextUtils.isEmpty(mCurrentFilterPattern)) {
filterRoomSection(mDirectChatsSection, String.valueOf(mCurrentFilterPattern));
}
updateSections();
}
public void setLocalContacts(final List<ParticipantAdapterItem> localContacts) {
// updates the placeholder according to the local contacts permissions
updateLocalContactsPlaceHolders();
mLocalContactsSection.setItems(localContacts, mCurrentFilterPattern);
if (!TextUtils.isEmpty(mCurrentFilterPattern)) {
filterLocalContacts(String.valueOf(mCurrentFilterPattern));
}
updateSections();
}
public void setKnownContacts(final List<ParticipantAdapterItem> knownContacts) {
mKnownContactsSection.setItems(knownContacts, mCurrentFilterPattern);
if (!TextUtils.isEmpty(mCurrentFilterPattern)) {
filterKnownContacts(String.valueOf(mCurrentFilterPattern));
} else {
filterKnownContacts(null);
}
updateSections();
}
public void setFilteredKnownContacts(List<ParticipantAdapterItem> filteredKnownContacts, String pattern) {
Collections.sort(filteredKnownContacts, ParticipantAdapterItem.getComparator(mSession));
mKnownContactsSection.setFilteredItems(filteredKnownContacts, pattern);
updateSections();
}
public void setKnownContactsLimited(boolean isLimited) {
mKnownContactsSection.setIsLimited(isLimited);
}
public void setKnownContactsExtraTitle(String extraTitle) {
mKnownContactsSection.setCustomHeaderExtra(extraTitle);
}
/**
* Update the known contact corresponding to the given user id
*
* @param user
*/
public void updateKnownContact(final User user) {
int headerPos = getSectionHeaderPosition(mKnownContactsSection) + 1;
List<ParticipantAdapterItem> knownContacts = mKnownContactsSection.getFilteredItems();
for (int i = 0; i < knownContacts.size(); i++) {
ParticipantAdapterItem item = knownContacts.get(i);
if (TextUtils.equals(user.user_id, item.mUserId)) {
notifyItemChanged(headerPos + i);
}
}
}
/*
* *********************************************************************************************
* Private methods
* *********************************************************************************************
*/
/**
* Filter the local contacts with the given pattern
*
* @param pattern
* @return nb of items matching the filter
*/
private int filterLocalContacts(final String pattern) {
if (!TextUtils.isEmpty(pattern)) {
List<ParticipantAdapterItem> filteredLocalContacts = new ArrayList<>();
final String formattedPattern = pattern.toLowerCase(VectorLocale.INSTANCE.getApplicationLocale()).trim();
List<ParticipantAdapterItem> sectionItems = new ArrayList<>(mLocalContactsSection.getItems());
for (final ParticipantAdapterItem item : sectionItems) {
if (item.startsWith(formattedPattern)) {
filteredLocalContacts.add(item);
}
}
mLocalContactsSection.setFilteredItems(filteredLocalContacts, pattern);
} else {
mLocalContactsSection.resetFilter();
}
return mLocalContactsSection.getFilteredItems().size();
}
/**
* Filter the known contacts known by this account.
*
* @param pattern the pattern to search
*/
public void filterAccountKnownContacts(final String pattern) {
filterKnownContacts(pattern);
updateSections();
}
/**
* Filter the known contacts with the given pattern
*
* @param pattern
* @return nb of items matching the filter
*/
private int filterKnownContacts(final String pattern) {
List<ParticipantAdapterItem> filteredKnownContacts = new ArrayList<>();
if (!TextUtils.isEmpty(pattern)) {
final String formattedPattern = pattern.trim().toLowerCase(VectorLocale.INSTANCE.getApplicationLocale());
List<ParticipantAdapterItem> sectionItems = new ArrayList<>(mKnownContactsSection.getItems());
for (final ParticipantAdapterItem item : sectionItems) {
if (item.startsWith(formattedPattern)) {
filteredKnownContacts.add(item);
}
}
}
// The sort is done in the adapter to save loading time
// see PeopleFragment.initKnownContacts
Collections.sort(filteredKnownContacts, ParticipantAdapterItem.getComparator(mSession));
mKnownContactsSection.setFilteredItems(filteredKnownContacts, pattern);
setKnownContactsLimited(false);
setKnownContactsExtraTitle(null);
return filteredKnownContacts.size();
}
/**
* Remove the room of the given id from the adapter
*
* @param roomId
*/
public void removeDirectChat(final String roomId) {
Room room = mSession.getDataHandler().getRoom(roomId);
if (mDirectChatsSection.removeItem(room)) {
updateSections();
}
}
/*
* *********************************************************************************************
* View holder
* *********************************************************************************************
*/
class ContactViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.adapter_item_contact_avatar)
ImageView vContactAvatar;
@BindView(R.id.contact_badge)
ImageView vContactBadge;
@BindView(R.id.contact_name)
TextView vContactName;
@BindView(R.id.contact_desc)
TextView vContactDesc;
private ContactViewHolder(final View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
private void populateViews(final ParticipantAdapterItem participant, final int position) {
if (null == participant) {
Log.e(LOG_TAG, "## populateViews() : null participant");
return;
}
if (position >= getItemCount()) {
Log.e(LOG_TAG, "## populateViews() : position out of bound " + position + " / " + getItemCount());
return;
}
participant.displayAvatar(mSession, vContactAvatar);
vContactName.setText(participant.getUniqueDisplayName(null));
/*
* Get the description to be displayed below the name
* For local contact, it is the medium (email, phone number)
* For other contacts, it is the presence
*/
if (participant.mContact != null) {
boolean isMatrixUserId = MXPatterns.isUserId(participant.mUserId);
vContactBadge.setVisibility(isMatrixUserId ? View.VISIBLE : View.GONE);
if (participant.mContact.getEmails().size() > 0) {
vContactDesc.setText(participant.mContact.getEmails().get(0));
} else {
vContactDesc.setText(participant.mContact.getPhonenumbers().get(0).mRawPhoneNumber);
}
} else {
loadContactPresence(vContactDesc, participant, position);
vContactBadge.setVisibility(View.GONE);
}
itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mListener.onSelectItem(participant, -1);
}
});
}
/**
* Get the presence for the given contact
*
* @param textView
* @param item
* @param position
*/
private void loadContactPresence(final TextView textView, final ParticipantAdapterItem item,
final int position) {
final String presence = VectorUtils.getUserOnlineStatus(mContext, mSession, item.mUserId, new SimpleApiCallback<Void>() {
@Override
public void onSuccess(Void info) {
if (textView != null) {
textView.setText(VectorUtils.getUserOnlineStatus(mContext, mSession, item.mUserId, null));
notifyItemChanged(position);
}
}
});
textView.setText(presence);
}
}
/*
* *********************************************************************************************
* Inner classes
* *********************************************************************************************
*/
public interface OnSelectItemListener {
void onSelectItem(Room item, int position);
void onSelectItem(ParticipantAdapterItem item, int position);
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.move.moveFilesOrDirectories;
import com.intellij.ide.util.DirectoryUtil;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.fileChooser.FileChooserFactory;
import com.intellij.openapi.help.HelpManager;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.TextComponentAccessor;
import com.intellij.openapi.util.Disposer;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.RefactoringSettings;
import com.intellij.refactoring.copy.CopyFilesOrDirectoriesDialog;
import com.intellij.refactoring.util.CommonRefactoringUtil;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.NonFocusableCheckBox;
import com.intellij.ui.RecentsManager;
import com.intellij.ui.TextFieldWithHistoryWithBrowseButton;
import com.intellij.ui.components.JBLabelDecorator;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ui.FormBuilder;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import java.io.File;
import java.util.List;
public class MoveFilesOrDirectoriesDialog extends DialogWrapper {
@NonNls private static final String RECENT_KEYS = "MoveFile.RECENT_KEYS";
@NonNls private static final String MOVE_FILES_OPEN_IN_EDITOR = "MoveFile.OpenInEditor";
public interface Callback {
void run(MoveFilesOrDirectoriesDialog dialog);
}
private JLabel myNameLabel;
private TextFieldWithHistoryWithBrowseButton myTargetDirectoryField;
private String myHelpID;
private final Project myProject;
private final Callback myCallback;
private PsiDirectory myTargetDirectory;
private JCheckBox myCbSearchForReferences;
private JCheckBox myOpenInEditorCb;
public MoveFilesOrDirectoriesDialog(Project project, Callback callback) {
super(project, true);
myProject = project;
myCallback = callback;
setTitle(RefactoringBundle.message("move.title"));
init();
}
@Override
@NotNull
protected Action[] createActions() {
return new Action[]{getOKAction(), getCancelAction(), getHelpAction()};
}
@Override
public JComponent getPreferredFocusedComponent() {
return myTargetDirectoryField.getChildComponent();
}
@Override
protected JComponent createCenterPanel() {
return null;
}
@Override
protected JComponent createNorthPanel() {
myNameLabel = JBLabelDecorator.createJBLabelDecorator().setBold(true);
myTargetDirectoryField = new TextFieldWithHistoryWithBrowseButton();
final List<String> recentEntries = RecentsManager.getInstance(myProject).getRecentEntries(RECENT_KEYS);
if (recentEntries != null) {
myTargetDirectoryField.getChildComponent().setHistory(recentEntries);
}
final FileChooserDescriptor descriptor = FileChooserDescriptorFactory.createSingleFolderDescriptor();
myTargetDirectoryField.addBrowseFolderListener(RefactoringBundle.message("select.target.directory"),
RefactoringBundle.message("the.file.will.be.moved.to.this.directory"),
myProject,
descriptor,
TextComponentAccessor.TEXT_FIELD_WITH_HISTORY_WHOLE_TEXT);
final JTextField textField = myTargetDirectoryField.getChildComponent().getTextEditor();
FileChooserFactory.getInstance().installFileCompletion(textField, descriptor, true, getDisposable());
textField.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(DocumentEvent e) {
validateOKButton();
}
});
myTargetDirectoryField.setTextFieldPreferredWidth(CopyFilesOrDirectoriesDialog.MAX_PATH_LENGTH);
Disposer.register(getDisposable(), myTargetDirectoryField);
String shortcutText = KeymapUtil.getFirstKeyboardShortcutText(ActionManager.getInstance().getAction(IdeActions.ACTION_CODE_COMPLETION));
myCbSearchForReferences = new NonFocusableCheckBox(RefactoringBundle.message("search.for.references"));
myCbSearchForReferences.setSelected(RefactoringSettings.getInstance().MOVE_SEARCH_FOR_REFERENCES_FOR_FILE);
myOpenInEditorCb = new NonFocusableCheckBox("Open moved files in editor");
myOpenInEditorCb.setSelected(isOpenInEditor());
return FormBuilder.createFormBuilder().addComponent(myNameLabel)
.addLabeledComponent(RefactoringBundle.message("move.files.to.directory.label"), myTargetDirectoryField, UIUtil.LARGE_VGAP)
.addTooltip(RefactoringBundle.message("path.completion.shortcut", shortcutText))
.addComponentToRightColumn(myCbSearchForReferences, UIUtil.LARGE_VGAP)
.addComponentToRightColumn(myOpenInEditorCb, UIUtil.LARGE_VGAP)
.getPanel();
}
public void setData(PsiElement[] psiElements, PsiDirectory initialTargetDirectory, @NonNls String helpID) {
if (psiElements.length == 1) {
String text;
if (psiElements[0] instanceof PsiFile) {
text = RefactoringBundle.message("move.file.0",
CopyFilesOrDirectoriesDialog.shortenPath(((PsiFile)psiElements[0]).getVirtualFile()));
}
else {
text = RefactoringBundle.message("move.directory.0",
CopyFilesOrDirectoriesDialog.shortenPath(((PsiDirectory)psiElements[0]).getVirtualFile()));
}
myNameLabel.setText(text);
}
else {
boolean isFile = true;
boolean isDirectory = true;
for (PsiElement psiElement : psiElements) {
isFile &= psiElement instanceof PsiFile;
isDirectory &= psiElement instanceof PsiDirectory;
}
myNameLabel.setText(isFile ?
RefactoringBundle.message("move.specified.files") :
isDirectory ?
RefactoringBundle.message("move.specified.directories") :
RefactoringBundle.message("move.specified.elements"));
}
myTargetDirectoryField.getChildComponent()
.setText(initialTargetDirectory == null ? "" : initialTargetDirectory.getVirtualFile().getPresentableUrl());
validateOKButton();
myHelpID = helpID;
}
@Override
protected void doHelpAction() {
HelpManager.getInstance().invokeHelp(myHelpID);
}
public static boolean isOpenInEditor() {
if (ApplicationManager.getApplication().isUnitTestMode()) {
return false;
}
return PropertiesComponent.getInstance().getBoolean(MOVE_FILES_OPEN_IN_EDITOR, true);
}
private void validateOKButton() {
setOKActionEnabled(myTargetDirectoryField.getChildComponent().getText().length() > 0);
}
@Override
protected void doOKAction() {
PropertiesComponent.getInstance().setValue(MOVE_FILES_OPEN_IN_EDITOR, String.valueOf(myOpenInEditorCb.isSelected()));
//myTargetDirectoryField.getChildComponent().addCurrentTextToHistory();
RecentsManager.getInstance(myProject).registerRecentEntry(RECENT_KEYS, myTargetDirectoryField.getChildComponent().getText());
RefactoringSettings.getInstance().MOVE_SEARCH_FOR_REFERENCES_FOR_FILE = myCbSearchForReferences.isSelected();
if (DumbService.isDumb(myProject)) {
Messages.showMessageDialog(myProject, "Move refactoring is not available while indexing is in progress", "Indexing", null);
return;
}
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
final Runnable action = new Runnable() {
@Override
public void run() {
String directoryName = myTargetDirectoryField.getChildComponent().getText().replace(File.separatorChar, '/');
try {
myTargetDirectory = DirectoryUtil.mkdirs(PsiManager.getInstance(myProject), directoryName);
}
catch (IncorrectOperationException e) {
// ignore
}
}
};
ApplicationManager.getApplication().runWriteAction(action);
if (myTargetDirectory == null) {
CommonRefactoringUtil.showErrorMessage(getTitle(),
RefactoringBundle.message("cannot.create.directory"), myHelpID, myProject);
return;
}
myCallback.run(MoveFilesOrDirectoriesDialog.this);
}
}, RefactoringBundle.message("move.title"), null);
}
public PsiDirectory getTargetDirectory() {
return myTargetDirectory;
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.7
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2017.07.24 at 05:36:05 PM EEST
//
package lt.registrucentras.esaskaita.service.invoice.ubl.cac;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import com.google.common.base.Objects;
import lt.registrucentras.esaskaita.service.invoice.ubl.cbc.EstimatedConsumedQuantityType;
import lt.registrucentras.esaskaita.service.invoice.ubl.cbc.NoteType;
/**
*
* <pre>
* <?xml version="1.0" encoding="UTF-8"?><ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"><ccts:ComponentType>ABIE</ccts:ComponentType><ccts:DictionaryEntryName>On Account Payment. Details</ccts:DictionaryEntryName><ccts:Definition>A scheduled prepayment (on-account payment) for a estimated utility consumption
* </ccts:Definition><ccts:ObjectClass>On Account Payment</ccts:ObjectClass></ccts:Component>
* </pre>
*
*
* <p>Java class for OnAccountPaymentType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="OnAccountPaymentType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2}EstimatedConsumedQuantity"/>
* <element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2}Note" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2}PaymentTerms" maxOccurs="unbounded"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "OnAccountPaymentType", propOrder = {
"estimatedConsumedQuantity",
"note",
"paymentTerms"
})
public class OnAccountPaymentType implements Serializable
{
private final static long serialVersionUID = 1L;
@XmlElement(name = "EstimatedConsumedQuantity", namespace = "urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2", required = true)
protected EstimatedConsumedQuantityType estimatedConsumedQuantity;
@XmlElement(name = "Note", namespace = "urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2")
protected List<NoteType> note;
@XmlElement(name = "PaymentTerms", required = true)
protected List<PaymentTermsType> paymentTerms;
/**
* Default no-arg constructor
*
*/
public OnAccountPaymentType() {
super();
}
/**
* Fully-initialising value constructor
*
*/
public OnAccountPaymentType(final EstimatedConsumedQuantityType estimatedConsumedQuantity, final List<NoteType> note, final List<PaymentTermsType> paymentTerms) {
this.estimatedConsumedQuantity = estimatedConsumedQuantity;
this.note = note;
this.paymentTerms = paymentTerms;
}
/**
*
* <pre>
* <?xml version="1.0" encoding="UTF-8"?><ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"><ccts:ComponentType>BBIE</ccts:ComponentType><ccts:DictionaryEntryName>On Account Payment. Estimated_ Consumed Quantity. Quantity
* </ccts:DictionaryEntryName><ccts:Definition>The estimated consumed quantity covered by the payment.</ccts:Definition><ccts:Cardinality>1</ccts:Cardinality><ccts:ObjectClass>On Account Payment</ccts:ObjectClass><ccts:PropertyTermQualifier>Estimated</ccts:PropertyTermQualifier><ccts:PropertyTerm>Consumed Quantity</ccts:PropertyTerm><ccts:RepresentationTerm>Quantity</ccts:RepresentationTerm><ccts:DataType>Quantity. Type</ccts:DataType></ccts:Component>
* </pre>
*
*
* @return
* possible object is
* {@link EstimatedConsumedQuantityType }
*
*/
public EstimatedConsumedQuantityType getEstimatedConsumedQuantity() {
return estimatedConsumedQuantity;
}
/**
* Sets the value of the estimatedConsumedQuantity property.
*
* @param value
* allowed object is
* {@link EstimatedConsumedQuantityType }
*
*/
public void setEstimatedConsumedQuantity(EstimatedConsumedQuantityType value) {
this.estimatedConsumedQuantity = value;
}
/**
*
* <pre>
* <?xml version="1.0" encoding="UTF-8"?><ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"><ccts:ComponentType>BBIE</ccts:ComponentType><ccts:DictionaryEntryName>On Account Payment. Note. Text</ccts:DictionaryEntryName><ccts:Definition>Free-form text conveying information that is not contained explicitly in
* other structures.
* </ccts:Definition><ccts:Cardinality>0..n</ccts:Cardinality><ccts:ObjectClass>On Account Payment</ccts:ObjectClass><ccts:PropertyTerm>Note</ccts:PropertyTerm><ccts:RepresentationTerm>Text</ccts:RepresentationTerm><ccts:DataType>Text. Type</ccts:DataType><ccts:Examples>We make a reservation for price regulations. You will receive you next yearly
* statement about one year from today.
* </ccts:Examples></ccts:Component>
* </pre>
* Gets the value of the note property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the note property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getNote().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link NoteType }
*
*
*/
public List<NoteType> getNote() {
if (note == null) {
note = new ArrayList<NoteType>();
}
return this.note;
}
/**
*
* <pre>
* <?xml version="1.0" encoding="UTF-8"?><ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"><ccts:ComponentType>ASBIE</ccts:ComponentType><ccts:DictionaryEntryName>On Account Payment. Payment Terms</ccts:DictionaryEntryName><ccts:Definition>A specification of payment terms associated with this payment.
* </ccts:Definition><ccts:Cardinality>1..n</ccts:Cardinality><ccts:ObjectClass>On Account Payment</ccts:ObjectClass><ccts:PropertyTerm>Payment Terms</ccts:PropertyTerm><ccts:AssociatedObjectClass>Payment Terms</ccts:AssociatedObjectClass><ccts:RepresentationTerm>Payment Terms</ccts:RepresentationTerm></ccts:Component>
* </pre>
* Gets the value of the paymentTerms property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the paymentTerms property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getPaymentTerms().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link PaymentTermsType }
*
*
*/
public List<PaymentTermsType> getPaymentTerms() {
if (paymentTerms == null) {
paymentTerms = new ArrayList<PaymentTermsType>();
}
return this.paymentTerms;
}
public OnAccountPaymentType withEstimatedConsumedQuantity(EstimatedConsumedQuantityType value) {
setEstimatedConsumedQuantity(value);
return this;
}
public OnAccountPaymentType withNote(NoteType... values) {
if (values!= null) {
for (NoteType value: values) {
getNote().add(value);
}
}
return this;
}
public OnAccountPaymentType withNote(Collection<NoteType> values) {
if (values!= null) {
getNote().addAll(values);
}
return this;
}
public OnAccountPaymentType withPaymentTerms(PaymentTermsType... values) {
if (values!= null) {
for (PaymentTermsType value: values) {
getPaymentTerms().add(value);
}
}
return this;
}
public OnAccountPaymentType withPaymentTerms(Collection<PaymentTermsType> values) {
if (values!= null) {
getPaymentTerms().addAll(values);
}
return this;
}
@Override
public String toString() {
return Objects.toStringHelper(this).add("estimatedConsumedQuantity", estimatedConsumedQuantity).add("note", note).add("paymentTerms", paymentTerms).toString();
}
@Override
public int hashCode() {
return Objects.hashCode(estimatedConsumedQuantity, note, paymentTerms);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null) {
return false;
}
if (getClass()!= other.getClass()) {
return false;
}
final OnAccountPaymentType o = ((OnAccountPaymentType) other);
return ((Objects.equal(estimatedConsumedQuantity, o.estimatedConsumedQuantity)&&Objects.equal(note, o.note))&&Objects.equal(paymentTerms, o.paymentTerms));
}
}
| |
package authoring_environment;
import gamedata.gamecomponents.Patch;
import gamedata.gamecomponents.Piece;
import java.util.HashMap;
import java.util.Map;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.ScrollPane;
import javafx.scene.control.Separator;
import javafx.scene.control.SingleSelectionModel;
import javafx.scene.control.Tab;
import javafx.scene.control.TabPane;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import authoring.concretefeatures.TerrainEntry;
import authoring.concretefeatures.UnitEntry;
import authoring.data.PatchTypeData;
import authoring.data.PieceTypeData;
/**
* @author VOOGirls Generation
*
* GUI components for the library displayed on the left side of the
* game authoring environment, which contains all instantiated units
* and terrain. From here, the user can open the Unit/TerrainEditors
* to edit the units and terrain, as well as select them for
* placement on the grid.
*/
public class LibraryView extends TabPane {
private final int HEIGHT = 300;
private final int WIDTH = 580;
private final String UNITS = "Units";
private final String TERRAIN = "Terrain";
private final String DELETE = "Delete";
private final String EDIT = "Edit";
private final String GLOBAL = "Global Commands";
private final String PIECES = "Piece Templates";
private final String PATCHES = "Patch Templates";
private PieceTypeData myPieces;
private PatchTypeData myPatches;
private SuperGrid myGrid;
private Map<String, VBox> myLibraryMap;
private Map<String, Tab> myTabMap;
private SingleSelectionModel<Tab> mySelection;
private Piece currentUnit;
private Patch currentTerrain;
private boolean doNothing;
private boolean reset;
private boolean edit;
private int unitID;
private int terrainID;
/**
* LibraryView constructor. Initializes two tabs - one for units,
* one for terrain. Units and terrain are added dynamically to
* their respective tabs as they are created in the UnitCreator
* and TerrainCreator.
*/
public LibraryView (PieceTypeData pieceData, PatchTypeData patchData) {
mySelection = this.getSelectionModel();
this.setPrefSize(HEIGHT, WIDTH);
myPieces = pieceData;
myPatches = patchData;
doNothing = true;
reset = true;
edit = false;
unitID = 0;
terrainID = 0;
Tab unitTab = new Tab(UNITS);
unitTab.setClosable(false);
ScrollPane unitContent = new ScrollPane();
VBox unitLibrary = new VBox();
unitLibrary.setPadding(UIspecs.allPadding);
unitLibrary.setSpacing(5);
Button unitDelete = new Button(DELETE);
Button unitEdit = new Button(EDIT);
unitDelete.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle (ActionEvent e) {
doNothing = false;
reset = true;
edit = false;
}
});
unitEdit.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle (ActionEvent e) {
doNothing = false;
reset = false;
edit = true;
}
});
HBox unitGlobal = new HBox(unitEdit, unitDelete);
unitGlobal.setPadding(UIspecs.allPadding);
unitGlobal.setSpacing(5);
unitLibrary.getChildren().addAll(new Label(GLOBAL), unitGlobal,
new Separator(), new Label(PIECES));
unitContent.setContent(unitLibrary);
unitTab.setContent(unitContent);
Tab terrainTab = new Tab(TERRAIN);
terrainTab.setClosable(false);
ScrollPane terrainContent = new ScrollPane();
VBox terrainLibrary = new VBox();
terrainLibrary.setPadding(UIspecs.allPadding);
terrainLibrary.setSpacing(5);
Button terrainDelete = new Button(DELETE);
Button terrainEdit = new Button(EDIT);
terrainDelete.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle (ActionEvent e) {
doNothing = false;
reset = true;
edit = false;
}
});
terrainEdit.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle (ActionEvent e) {
doNothing = false;
reset = false;
edit = true;
}
});
HBox terrainGlobal = new HBox(terrainEdit, terrainDelete);
terrainGlobal.setPadding(UIspecs.allPadding);
terrainGlobal.setSpacing(5);
terrainLibrary.getChildren().addAll(new Label(GLOBAL), terrainGlobal,
new Separator(), new Label(PATCHES));
terrainContent.setContent(terrainLibrary);
terrainTab.setContent(terrainContent);
this.getTabs().addAll(unitTab, terrainTab);
this.setOnKeyPressed(new EventHandler<KeyEvent>() {
@Override
public void handle (KeyEvent e) {
if (e.getCode() == KeyCode.ESCAPE) {
doNothing = true;
}
}
});
myLibraryMap = new HashMap<String, VBox>();
myLibraryMap.put(UNITS, unitLibrary);
myLibraryMap.put(TERRAIN, terrainLibrary);
myTabMap = new HashMap<String, Tab>();
myTabMap.put(UNITS, unitTab);
myTabMap.put(TERRAIN, terrainTab);
}
public void associateGrid (SuperGrid activeGrid) {
myGrid = activeGrid;
setGridActionEvents();
}
public int getUnitID () {
unitID += 1;
return unitID;
}
public int getTerrainID () {
terrainID += 1;
return terrainID;
}
public void selectUnit (Piece unit) {
currentUnit = unit;
doNothing = false;
reset = false;
edit = false;
}
public void selectTerrain (Patch terrain) {
currentTerrain = terrain;
doNothing = false;
reset = false;
edit = false;
}
private void setGridActionEvents () {
myGrid.setOnMouseClicked(event -> handleAction(event));
myGrid.setOnMouseDragged(event -> handleAction(event));
}
protected void handleAction (MouseEvent event) {
SandyTile tile = myGrid.findTile(event);
if (doNothing || tile == null) { return; }
if (mySelection.isSelected(0)) {
if (currentUnit != null) {
Piece unit = new Piece(currentUnit);
if (reset) {
myGrid.removeUnit(tile, unit);
}
else if (edit) {
myGrid.editUnit(tile, unit);
}
else {
myGrid.addUnit(tile, unit);
}
}
}
else {
if (currentTerrain != null) {
Patch terrain = new Patch(currentTerrain);
if (reset) {
myGrid.removeTerrain(tile, terrain);
}
else if (edit) {
myGrid.editTerrain(tile, terrain);
}
else {
myGrid.addTerrain(tile, terrain);
}
}
}
}
public void addPiece (UnitEntry unit) {
mySelection.select(myTabMap.get(UNITS));
myLibraryMap.get(UNITS).getChildren().add(unit);
myPieces.add(unit.getUnit());
}
public void addPatch (TerrainEntry terrain) {
mySelection.select(myTabMap.get(TERRAIN));
myLibraryMap.get(TERRAIN).getChildren().add(terrain);
myPatches.add(terrain.getTerrain());
}
public void removePiece (UnitEntry unit) {
myLibraryMap.get(UNITS).getChildren().remove(unit);
myGrid.removePieces(unit.getUnit());
myPieces.remove(unit.getUnit());
doNothing = true;
}
public void removePatch (TerrainEntry terrain) {
myLibraryMap.get(TERRAIN).getChildren().remove(terrain);
myGrid.removePatches(terrain.getTerrain());
myPatches.remove(terrain.getTerrain());
doNothing = true;
}
}
| |
package org.basex.query.value.type;
import static org.basex.query.QueryError.*;
import static org.basex.query.QueryError.normalize;
import static org.basex.query.QueryText.*;
import static org.basex.util.Token.*;
import static org.basex.util.Token.normalize;
import java.math.*;
import java.util.*;
import java.util.regex.*;
import javax.xml.namespace.*;
import org.basex.query.*;
import org.basex.query.util.*;
import org.basex.query.value.*;
import org.basex.query.value.item.*;
import org.basex.util.*;
/**
* XQuery atomic types.
*
* @author BaseX Team 2005-20, BSD License
* @author Christian Gruen
*/
public enum AtomType implements Type {
/** Item type. */
ITEM("item", null, EMPTY, false, false, false, false, Type.ID.ITEM),
/** Untyped type. */
UTY("untyped", null, XS_URI, false, false, false, false, Type.ID.UTY),
/** Any type. */
ATY("anyType", null, XS_URI, false, false, false, false, Type.ID.ATY),
/** Any simple type. */
AST("anySimpleType", null, XS_URI, false, false, false, false, Type.ID.AST),
/** Any atomic type. */
AAT("anyAtomicType", ITEM, XS_URI, false, false, false, false, Type.ID.AAT) {
@Override
public Atm cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Atm(item.string(ii));
}
@Override
public Atm cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) {
return new Atm(value.toString());
}
},
/** Untyped Atomic type. */
ATM("untypedAtomic", AAT, XS_URI, false, true, false, true, Type.ID.ATM) {
@Override
public Atm cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Atm(item.string(ii));
}
@Override
public Atm cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) {
return new Atm(value.toString());
}
},
/** String type. */
STR("string", AAT, XS_URI, false, false, true, true, Type.ID.STR) {
@Override
public Str cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return Str.get(item.string(ii));
}
@Override
public Str cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return Str.get(value, qc, ii);
}
},
/** Normalized String type. */
NST("normalizedString", STR, XS_URI, false, false, true, true, Type.ID.NST) {
@Override
public Str cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
final byte[] str = item.string(ii);
final int sl = str.length;
for(int s = 0; s < sl; s++) {
final byte b = str[s];
if(b == '\t' || b == '\r' || b == '\n') str[s] = ' ';
}
return new Str(str, this);
}
@Override
public Str cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Token type. */
TOK("token", NST, XS_URI, false, false, true, true, Type.ID.TOK) {
@Override
public Str cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Str(normalize(item.string(ii)), this);
}
@Override
public Str cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Language type. */
LAN("language", TOK, XS_URI, false, false, true, true, Type.ID.LAN) {
@Override
public Str cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
final byte[] v = normalize(item.string(ii));
if(!LANGPATTERN.matcher(Token.string(v)).matches()) throw castError(item, ii);
return new Str(v, this);
}
@Override
public Str cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** NMTOKEN type. */
NMT("NMTOKEN", TOK, XS_URI, false, false, true, true, Type.ID.NMT) {
@Override
public Str cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
final byte[] v = normalize(item.string(ii));
if(!XMLToken.isNMToken(v)) throw castError(item, ii);
return new Str(v, this);
}
@Override
public Str cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Name type. */
NAM("Name", TOK, XS_URI, false, false, true, true, Type.ID.NAM) {
@Override
public Str cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
final byte[] v = normalize(item.string(ii));
if(!XMLToken.isName(v)) throw castError(item, ii);
return new Str(v, this);
}
@Override
public Str cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** NCName type. */
NCN("NCName", NAM, XS_URI, false, false, true, true, Type.ID.NCN) {
@Override
public Str cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Str(checkName(item, ii), this);
}
@Override
public Str cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** ID type. */
ID("ID", NCN, XS_URI, false, false, true, true, Type.ID.ID) {
@Override
public Str cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Str(checkName(item, ii), this);
}
@Override
public Str cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** IDREF type. */
IDR("IDREF", NCN, XS_URI, false, false, true, true, Type.ID.IDR) {
@Override
public Str cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Str(checkName(item, ii), this);
}
@Override
public Str cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Entity type. */
ENT("ENTITY", NCN, XS_URI, false, false, true, true, Type.ID.ENT) {
@Override
public Str cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Str(checkName(item, ii), this);
}
@Override
public Str cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Numeric type. */
NUM("numeric", AAT, XS_URI, true, false, false, true, Type.ID.NUM) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
// return double
return item.type.isNumber() ? item : Dbl.get(checkNum(item, ii).dbl(ii));
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
// return double
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Float type. */
FLT("float", NUM, XS_URI, true, false, false, true, Type.ID.FLT) {
@Override
public Flt cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return Flt.get(checkNum(item, ii).flt(ii));
}
@Override
public Flt cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Double type. */
DBL("double", NUM, XS_URI, true, false, false, true, Type.ID.DBL) {
@Override
public Dbl cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return Dbl.get(checkNum(item, ii).dbl(ii));
}
@Override
public Dbl cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Decimal type. */
DEC("decimal", NUM, XS_URI, true, false, false, true, Type.ID.DEC) {
@Override
public Dec cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return Dec.get(checkNum(item, ii).dec(ii));
}
@Override
public Dec cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) {
return Dec.get(new BigDecimal(value.toString()));
}
},
/** Precision decimal type. */
PDC("precisionDecimal", null, XS_URI, true, false, false, true, Type.ID.PDC),
/** Integer type. */
ITR("integer", DEC, XS_URI, true, false, false, true, Type.ID.ITR) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return Int.get(checkLong(value, 0, 0, ii));
}
},
/** Non-positive integer type. */
NPI("nonPositiveInteger", ITR, XS_URI, true, false, false, true, Type.ID.NPI) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, Long.MIN_VALUE, 0, ii), this);
}
},
/** Negative integer type. */
NIN("negativeInteger", NPI, XS_URI, true, false, false, true, Type.ID.NIN) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, Long.MIN_VALUE, -1, ii), this);
}
},
/** Long type. */
LNG("long", ITR, XS_URI, true, false, false, true, Type.ID.LNG) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, 0, 0, ii), this);
}
},
/** Int type. */
INT("int", LNG, XS_URI, true, false, false, true, Type.ID.INT) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, -0x80000000, 0x7FFFFFFF, ii), this);
}
},
/** Short type. */
SHR("short", INT, XS_URI, true, false, false, true, Type.ID.SHR) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, -0x8000, 0x7FFF, ii), this);
}
},
/** Byte type. */
BYT("byte", SHR, XS_URI, true, false, false, true, Type.ID.BYT) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, -0x80, 0x7F, ii), this);
}
},
/** Non-negative integer type. */
NNI("nonNegativeInteger", ITR, XS_URI, true, false, false, true, Type.ID.NNI) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, 0, Long.MAX_VALUE, ii), this);
}
},
/** Unsigned long type. */
ULN("unsignedLong", NNI, XS_URI, true, false, false, true, Type.ID.ULN) {
@Override
public Uln cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Uln cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
final Item item = checkNum(value, ii);
final BigDecimal v = item.dec(ii), i = v.setScale(0, RoundingMode.DOWN);
// equals() used to also test fractional digits
if(v.signum() < 0 || v.compareTo(Uln.MAXULN) > 0 ||
item.type.isStringOrUntyped() && !v.equals(i)) throw castError(item, ii);
return Uln.get(i.toBigInteger());
}
},
/** Short type. */
UIN("unsignedInt", ULN, XS_URI, true, false, false, true, Type.ID.UIN) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, 0, 0xFFFFFFFFL, ii), this);
}
},
/** Unsigned Short type. */
USH("unsignedShort", UIN, XS_URI, true, false, false, true, Type.ID.USH) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, 0, 0xFFFF, ii), this);
}
},
/** Unsigned byte type. */
UBY("unsignedByte", USH, XS_URI, true, false, false, true, Type.ID.UBY) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, 0, 0xFF, ii), this);
}
},
/** Positive integer type. */
PIN("positiveInteger", NNI, XS_URI, true, false, false, true, Type.ID.PIN) {
@Override
public Int cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast((Object) item, qc, sc, ii);
}
@Override
public Int cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Int(checkLong(value, 1, Long.MAX_VALUE, ii), this);
}
},
/** Duration type. */
DUR("duration", AAT, XS_URI, false, false, false, false, Type.ID.DUR) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item instanceof Dur) return new Dur((Dur) item);
if(str(item)) return new Dur(item.string(ii), ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Year month duration type. */
YMD("yearMonthDuration", DUR, XS_URI, false, false, false, true, Type.ID.YMD) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item instanceof Dur) return new YMDur((Dur) item);
if(str(item)) return new YMDur(item.string(ii), ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Day time duration type. */
DTD("dayTimeDuration", DUR, XS_URI, false, false, false, true, Type.ID.DTD) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item instanceof Dur) return new DTDur((Dur) item);
if(str(item)) return new DTDur(item.string(ii), ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** DateTime type. */
DTM("dateTime", AAT, XS_URI, false, false, false, true, Type.ID.DTM) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item.type == DAT) return new Dtm((ADate) item);
if(str(item)) return new Dtm(item.string(ii), ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** DateTimeStamp type. */
DTS("dateTimeStamp", null, XS_URI, false, false, false, true, Type.ID.DTS),
/** Date type. */
DAT("date", AAT, XS_URI, false, false, false, true, Type.ID.DAT) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item.type == DTM) return new Dat((ADate) item);
if(str(item)) return new Dat(item.string(ii), ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Time type. */
TIM("time", AAT, XS_URI, false, false, false, true, Type.ID.TIM) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item.type == DTM) return new Tim((ADate) item);
if(str(item)) return new Tim(item.string(ii), ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Year month type. */
YMO("gYearMonth", AAT, XS_URI, false, false, false, false, Type.ID.YMO) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item.type == DTM || item.type == DAT) return new GDt((ADate) item, this);
if(str(item)) return new GDt(item.string(ii), this, ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Year type. */
YEA("gYear", AAT, XS_URI, false, false, false, false, Type.ID.YEA) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item.type == DTM || item.type == DAT) return new GDt((ADate) item, this);
if(str(item)) return new GDt(item.string(ii), this, ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Month day type. */
MDA("gMonthDay", AAT, XS_URI, false, false, false, false, Type.ID.MDA) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item.type == DTM || item.type == DAT) return new GDt((ADate) item, this);
if(str(item)) return new GDt(item.string(ii), this, ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Day type. */
DAY("gDay", AAT, XS_URI, false, false, false, false, Type.ID.DAY) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item.type == DTM || item.type == DAT) return new GDt((ADate) item, this);
if(str(item)) return new GDt(item.string(ii), this, ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Month type. */
MON("gMonth", AAT, XS_URI, false, false, false, false, Type.ID.MON) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item.type == DTM || item.type == DAT) return new GDt((ADate) item, this);
if(str(item)) return new GDt(item.string(ii), this, ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(Str.get(value, qc, ii), qc, sc, ii);
}
},
/** Boolean type. */
BLN("boolean", AAT, XS_URI, false, false, false, true, Type.ID.BLN) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item instanceof ANum) return Bln.get(item.bool(ii));
if(str(item)) return Bln.get(Bln.parse(item, ii));
throw typeError(item, this, ii);
}
@Override
public Bln cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) {
return Bln.get(value instanceof Boolean ? (Boolean) value :
Boolean.parseBoolean(value.toString()));
}
},
/** Implementation specific: binary type. */
BIN("binary", AAT, BASEX_URI, false, false, false, true, Type.ID.BIN),
/** Base64 binary type. */
B64("base64Binary", BIN, XS_URI, false, false, false, true, Type.ID.B64) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item instanceof Bin) return org.basex.query.value.item.B64.get((Bin) item, ii);
if(str(item)) return org.basex.query.value.item.B64.get(item.string(ii), ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return value instanceof byte[] ? org.basex.query.value.item.B64.get((byte[]) value) :
org.basex.query.value.item.B64.get(token(value.toString()), ii);
}
},
/** Hex binary type. */
HEX("hexBinary", BIN, XS_URI, false, false, false, true, Type.ID.HEX) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item instanceof Bin) return new Hex((Bin) item, ii);
if(str(item)) return new Hex(item.string(ii), ii);
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return new Hex(value instanceof byte[] ? (byte[]) value : token(value.toString()), ii);
}
},
/** Any URI type. */
URI("anyURI", AAT, XS_URI, false, false, true, true, Type.ID.URI) {
@Override
public Uri cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(!item.type.isStringOrUntyped()) throw typeError(item, this, ii);
final Uri u = Uri.uri(item.string(ii));
if(!u.isValid()) throw castError(item, ii);
return u;
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) {
return Uri.uri(value.toString());
}
},
/** QName Type. */
QNM("QName", AAT, XS_URI, false, false, false, false, Type.ID.QNM) {
@Override
public QNm cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item.type != STR && !item.type.isUntyped()) throw typeError(item, this, ii);
final byte[] nm = trim(item.string(ii));
if(!XMLToken.isQName(nm)) throw castError(item, ii);
final QNm qn = new QNm(nm, sc);
if(!qn.hasURI() && qn.hasPrefix()) throw NSDECL_X.get(ii, qn.prefix());
return qn;
}
@Override
public QNm cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) {
return value instanceof QName ? new QNm((QName) value) : new QNm(value.toString());
}
},
/** NOTATION Type. */
NOT("NOTATION", AAT, XS_URI, false, false, false, false, Type.ID.NOT),
/** Java type. */
JAVA("java", ITEM, BASEX_URI, true, true, true, false, Type.ID.JAVA) {
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) {
return new Jav(item, qc);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) {
return new Jav(value, qc);
}
};
/** Language pattern. */
private static final Pattern LANGPATTERN = Pattern.compile("[A-Za-z]{1,8}(-[A-Za-z0-9]{1,8})*");
/** Cached enums (faster). */
public static final AtomType[] VALUES = values();
/** Name. */
public final QNm name;
/** Parent type. */
public final AtomType parent;
/** Type id . */
private final Type.ID id;
/** Number flag. */
private final boolean numeric;
/** Untyped flag. */
private final boolean untyped;
/** String flag. */
private final boolean string;
/** Sortable flag. */
private final boolean sortable;
/** Sequence types (lazy instantiation). */
private EnumMap<Occ, SeqType> seqTypes;
/**
* Constructor.
* @param name string representation
* @param parent parent type
* @param uri uri
* @param numeric numeric flag
* @param untyped untyped flag
* @param string string flag
* @param sortable sortable flag
* @param id type id
*/
AtomType(final String name, final AtomType parent, final byte[] uri, final boolean numeric,
final boolean untyped, final boolean string, final boolean sortable, final Type.ID id) {
this.name = new QNm(name, uri);
this.parent = parent;
this.numeric = numeric;
this.untyped = untyped;
this.string = string;
this.sortable = sortable;
this.id = id;
}
@Override
public Item cast(final Item item, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
if(item.type == this) return item;
throw typeError(item, this, ii);
}
@Override
public Item cast(final Object value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
throw Util.notExpected(value);
}
@Override
public final Item castString(final String value, final QueryContext qc, final StaticContext sc,
final InputInfo ii) throws QueryException {
return cast(value, qc, sc, ii);
}
@Override
public final SeqType seqType(final Occ occ) {
if(seqTypes == null) seqTypes = new EnumMap<>(Occ.class);
return seqTypes.computeIfAbsent(occ, o -> new SeqType(this, o));
}
@Override
public final boolean eq(final Type type) {
return this == type;
}
@Override
public final boolean instanceOf(final Type type) {
return this == type || type == ITEM ||
type instanceof AtomType && parent != null && parent.instanceOf(type);
}
@Override
public final Type union(final Type type) {
if(instanceOf(type)) return type;
if(type.instanceOf(this)) return this;
if(type instanceof AtomType) {
final List<AtomType> arr = new ArrayList<>();
for(AtomType at = (AtomType) type; (at = at.parent) != null;) arr.add(at);
for(AtomType p = this; (p = p.parent) != null;)
if(arr.contains(p)) return p;
}
return ITEM;
}
@Override
public final Type intersect(final Type type) {
return instanceOf(type) ? this : type.instanceOf(this) ? type : null;
}
@Override
public final boolean isNumber() {
return numeric;
}
@Override
public final boolean isUntyped() {
return untyped;
}
@Override
public final boolean isNumberOrUntyped() {
return numeric || untyped;
}
@Override
public final boolean isStringOrUntyped() {
return string || untyped;
}
@Override
public final boolean isSortable() {
return sortable;
}
@Override
public final byte[] string() {
return name.string();
}
@Override
public final AtomType atomic() {
return instanceOf(AAT) ? this : null;
}
@Override
public final Type.ID id() {
return id;
}
@Override
public final String toString() {
final boolean xs = Token.eq(XS_URI, name.uri());
final TokenBuilder tb = new TokenBuilder();
if(xs) tb.add(NSGlobal.prefix(name.uri())).add(':');
tb.add(name.string());
if(!xs) tb.add("()");
return tb.toString();
}
/**
* Throws an exception if the specified item cannot be converted to a number.
* @param item item
* @param ii input info
* @return item argument
* @throws QueryException query exception
*/
final Item checkNum(final Item item, final InputInfo ii) throws QueryException {
final Type type = item.type;
if(item instanceof ANum || type.isStringOrUntyped() && type != URI || type == BLN) return item;
throw typeError(item, this, ii);
}
/**
* Checks the validity of the specified object and returns its long value.
* @param value value to be checked
* @param min minimum value
* @param max maximum value (no limit if identical to min)
* @param ii input info
* @return integer value
* @throws QueryException query exception
*/
final long checkLong(final Object value, final long min, final long max, final InputInfo ii)
throws QueryException {
final Item item = checkNum(value, ii);
final Type type = item.type;
if(type == DBL || type == FLT) {
final double d = item.dbl(ii);
if(Double.isNaN(d) || Double.isInfinite(d)) throw valueError(this, item.string(ii), ii);
if(min != max && (d < min || d > max)) throw castError(item, ii);
if(d < Long.MIN_VALUE || d > Long.MAX_VALUE) throw INTRANGE_X.get(ii, d);
return (long) d;
}
final long l = item.itr(ii);
if(min != max && (l < min || l > max)) throw castError(item, ii);
return l;
}
/**
* Checks the validity of the specified object and returns it as item.
* @param value value to be checked
* @param ii input info
* @return integer value
* @throws QueryException query exception
*/
final Item checkNum(final Object value, final InputInfo ii) throws QueryException {
final Item item;
if(value instanceof Value) {
final Value v = (Value) value;
if(v.size() != 1) throw typeError(v, this, ii);
item = (Item) v;
} else {
item = Str.get(value.toString());
}
return checkNum(item, ii);
}
/**
* Checks if the specified item is a string.
* @param item item
* @return item argument
*/
static boolean str(final Item item) {
final Type type = item.type;
return type.isStringOrUntyped() && type != URI;
}
/**
* Checks the validity of the specified name.
* @param item value to be checked
* @param ii input info
* @return name
* @throws QueryException query exception
*/
final byte[] checkName(final Item item, final InputInfo ii) throws QueryException {
final byte[] v = normalize(item.string(ii));
if(!XMLToken.isNCName(v)) throw castError(item, ii);
return v;
}
/**
* Returns a cast exception.
* @param item item to be converted
* @param ii input info
* @return query exception
*/
public final QueryException castError(final Item item, final InputInfo ii) {
return FUNCCAST_X_X_X.get(ii, item.type, this, item);
}
/**
* Returns a cast exception.
* @param value value to be converted
* @param ii input info
* @return query exception
*/
public final QueryException castError(final byte[] value, final InputInfo ii) {
return FUNCCAST_X_X.get(ii, this, normalize(value, ii));
}
@Override
public final boolean nsSensitive() {
return instanceOf(QNM) || instanceOf(NOT);
}
/**
* Finds and returns the specified type.
* @param type type
* @param all accept all types (including those without parent type)
* @return type or {@code null}
*/
public static AtomType find(final QNm type, final boolean all) {
if(!Token.eq(type.uri(), BASEX_URI)) {
for(final AtomType tp : VALUES) {
if(tp.name.eq(type) && (all || tp.parent != null)) return tp;
}
}
return null;
}
/**
* Gets the type instance for the given ID.
* @param id type ID
* @return corresponding type if found, {@code null} otherwise
*/
static Type getType(final Type.ID id) {
for(final AtomType type : VALUES) {
if(type.id == id) return type;
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.control;
import org.codehaus.groovy.ast.ClassCodeVisitorSupport;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.FieldNode;
import org.codehaus.groovy.ast.GenericsType;
import org.codehaus.groovy.ast.InnerClassNode;
import org.codehaus.groovy.ast.MethodNode;
import org.codehaus.groovy.ast.Parameter;
import org.codehaus.groovy.ast.expr.ConstructorCallExpression;
import org.codehaus.groovy.ast.expr.DeclarationExpression;
import org.codehaus.groovy.ast.expr.Expression;
import org.codehaus.groovy.ast.expr.TupleExpression;
/**
* Verify correct usage of generics.
* This includes:
* <ul>
* <li>class header (class and superclass declaration)</li>
* <li>arity of type parameters for fields, parameters, local variables</li>
* <li>invalid diamond {@code <>} usage</li>
* </ul>
*/
public class GenericsVisitor extends ClassCodeVisitorSupport {
private final SourceUnit source;
public GenericsVisitor(SourceUnit source) {
this.source = source;
}
protected SourceUnit getSourceUnit() {
return source;
}
@Override
public void visitClass(ClassNode node) {
boolean error = checkWildcard(node);
if (error) return;
boolean isAnon = node instanceof InnerClassNode && ((InnerClassNode) node).isAnonymous();
checkGenericsUsage(node.getUnresolvedSuperClass(false), node.getSuperClass(), isAnon ? true : null);
ClassNode[] interfaces = node.getInterfaces();
for (ClassNode anInterface : interfaces) {
checkGenericsUsage(anInterface, anInterface.redirect());
}
node.visitContents(this);
}
@Override
public void visitField(FieldNode node) {
ClassNode type = node.getType();
checkGenericsUsage(type, type.redirect());
super.visitField(node);
}
@Override
public void visitConstructorCallExpression(ConstructorCallExpression call) {
ClassNode type = call.getType();
boolean isAnon = type instanceof InnerClassNode && ((InnerClassNode) type).isAnonymous();
checkGenericsUsage(type, type.redirect(), isAnon);
}
@Override
public void visitMethod(MethodNode node) {
Parameter[] parameters = node.getParameters();
for (Parameter param : parameters) {
ClassNode paramType = param.getType();
checkGenericsUsage(paramType, paramType.redirect());
}
ClassNode returnType = node.getReturnType();
checkGenericsUsage(returnType, returnType.redirect());
super.visitMethod(node);
}
@Override
public void visitDeclarationExpression(DeclarationExpression expression) {
if (expression.isMultipleAssignmentDeclaration()) {
TupleExpression tExpr = expression.getTupleExpression();
for (Expression nextExpr : tExpr.getExpressions()) {
ClassNode declType = nextExpr.getType();
checkGenericsUsage(declType, declType.redirect());
}
} else {
ClassNode declType = expression.getVariableExpression().getType();
checkGenericsUsage(declType, declType.redirect());
}
super.visitDeclarationExpression(expression);
}
private boolean checkWildcard(ClassNode cn) {
ClassNode sn = cn.getUnresolvedSuperClass(false);
if (sn == null) return false;
GenericsType[] generics = sn.getGenericsTypes();
if (generics == null) return false;
boolean error = false;
for (GenericsType generic : generics) {
if (generic.isWildcard()) {
addError("A supertype may not specify a wildcard type", sn);
error = true;
}
}
return error;
}
private void checkGenericsUsage(ClassNode n, ClassNode cn) {
checkGenericsUsage(n, cn, null);
}
private void checkGenericsUsage(ClassNode n, ClassNode cn, Boolean isAnonInnerClass) {
if (n.isGenericsPlaceHolder()) return;
GenericsType[] nTypes = n.getGenericsTypes();
GenericsType[] cnTypes = cn.getGenericsTypes();
// raw type usage is always allowed
if (nTypes == null) return;
// you can't parameterize a non-generified type
if (cnTypes == null) {
String message = "The class " + getPrintName(n) + " (supplied with " + plural("type parameter", nTypes.length) +
") refers to the class " + getPrintName(cn) + " which takes no parameters";
if (nTypes.length == 0) {
message += " (invalid Diamond <> usage?)";
}
addError(message, n);
return;
}
// parameterize a type by using all of the parameters only
if (nTypes.length != cnTypes.length) {
if (Boolean.FALSE.equals(isAnonInnerClass) && nTypes.length == 0) {
return; // allow Diamond for non-AIC cases from CCE
}
String message;
if (Boolean.TRUE.equals(isAnonInnerClass) && nTypes.length == 0) {
message = "Cannot use diamond <> with anonymous inner classes";
} else {
message = "The class " + getPrintName(n) + " (supplied with " + plural("type parameter", nTypes.length) +
") refers to the class " + getPrintName(cn) +
" which takes " + plural("parameter", cnTypes.length);
if (nTypes.length == 0) {
message += " (invalid Diamond <> usage?)";
}
}
addError(message, n);
return;
}
for (int i = 0; i < nTypes.length; i++) {
ClassNode nType = nTypes[i].getType();
ClassNode cnType = cnTypes[i].getType();
// check nested type parameters
checkGenericsUsage(nType, nType.redirect());
// check bounds
if (!nType.isDerivedFrom(cnType)) {
if (cnType.isInterface() && nType.implementsInterface(cnType)) continue;
addError("The type " + nTypes[i].getName() +
" is not a valid substitute for the bounded parameter <" +
getPrintName(cnTypes[i]) + ">", n);
}
}
}
private String plural(String orig, int count) {
return "" + count + " " + (count == 1 ? orig : orig + "s");
}
private static String getPrintName(GenericsType gt) {
StringBuilder ret = new StringBuilder(gt.getName());
ClassNode[] upperBounds = gt.getUpperBounds();
ClassNode lowerBound = gt.getLowerBound();
if (upperBounds != null) {
if (upperBounds.length != 1 || !"java.lang.Object".equals(getPrintName(upperBounds[0]))) {
ret.append(" extends ");
for (int i = 0; i < upperBounds.length; i++) {
ret.append(getPrintName(upperBounds[i]));
if (i + 1 < upperBounds.length) ret.append(" & ");
}
}
} else if (lowerBound != null) {
ret.append(" super ").append(getPrintName(lowerBound));
}
return ret.toString();
}
private static String getPrintName(ClassNode cn) {
StringBuilder ret = new StringBuilder(cn.getName());
GenericsType[] gts = cn.getGenericsTypes();
if (gts != null) {
ret.append("<");
for (int i = 0; i < gts.length; i++) {
if (i != 0) ret.append(",");
ret.append(getPrintName(gts[i]));
}
ret.append(">");
}
return ret.toString();
}
}
| |
/*
* Hibernate Validator, declare and validate application constraints
*
* License: Apache License, Version 2.0
* See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>.
*/
package org.hibernate.validator.test.cdi.internal.injection;
import java.io.InputStream;
import java.util.Set;
import javax.enterprise.inject.Alternative;
import javax.validation.BootstrapConfiguration;
import javax.validation.ClockProvider;
import javax.validation.Configuration;
import javax.validation.ConstraintValidatorFactory;
import javax.validation.ConstraintViolation;
import javax.validation.MessageInterpolator;
import javax.validation.ParameterNameProvider;
import javax.validation.TraversableResolver;
import javax.validation.Validator;
import javax.validation.ValidatorContext;
import javax.validation.ValidatorFactory;
import javax.validation.executable.ExecutableValidator;
import javax.validation.metadata.BeanDescriptor;
import javax.validation.spi.BootstrapState;
import javax.validation.spi.ConfigurationState;
import javax.validation.spi.ValidationProvider;
import javax.validation.valueextraction.ValueExtractor;
import org.hibernate.validator.internal.engine.ValidatorFactoryImpl;
/**
* A {@link ValidationProvider} for testing purposes.
*
* @author Gunnar Morling
*/
public class MyValidationProvider implements ValidationProvider<MyValidationProvider.MyConfiguration> {
@Override
public MyConfiguration createSpecializedConfiguration(BootstrapState state) {
return null;
}
@Override
public Configuration<?> createGenericConfiguration(BootstrapState state) {
return null;
}
@Override
public ValidatorFactory buildValidatorFactory(ConfigurationState configurationState) {
return new MyValidatorFactory( configurationState );
}
public static class MyConfiguration implements Configuration<MyConfiguration> {
@Override
public MyConfiguration ignoreXmlConfiguration() {
return null;
}
@Override
public MyConfiguration messageInterpolator(MessageInterpolator interpolator) {
return null;
}
@Override
public MyConfiguration traversableResolver(TraversableResolver resolver) {
return null;
}
@Override
public MyConfiguration constraintValidatorFactory(ConstraintValidatorFactory constraintValidatorFactory) {
return null;
}
@Override
public MyConfiguration parameterNameProvider(ParameterNameProvider parameterNameProvider) {
return null;
}
@Override
public MyConfiguration clockProvider(ClockProvider clockProvider) {
return null;
}
@Override
public MyConfiguration addValueExtractor(ValueExtractor<?> extractor) {
return null;
}
@Override
public MyConfiguration addMapping(InputStream stream) {
return null;
}
@Override
public MyConfiguration addProperty(String name, String value) {
return null;
}
@Override
public MessageInterpolator getDefaultMessageInterpolator() {
return null;
}
@Override
public TraversableResolver getDefaultTraversableResolver() {
return null;
}
@Override
public ConstraintValidatorFactory getDefaultConstraintValidatorFactory() {
return null;
}
@Override
public ParameterNameProvider getDefaultParameterNameProvider() {
return null;
}
@Override
public ClockProvider getDefaultClockProvider() {
return null;
}
@Override
public BootstrapConfiguration getBootstrapConfiguration() {
return null;
}
@Override
public ValidatorFactory buildValidatorFactory() {
return null;
}
}
@Alternative
public static class MyValidatorFactory implements ValidatorFactory {
private final ValidatorFactory delegate;
// Only for making the class proxyable
MyValidatorFactory() {
this.delegate = null;
}
public MyValidatorFactory(ConfigurationState configurationState) {
delegate = new ValidatorFactoryImpl( configurationState );
}
@Override
public Validator getValidator() {
return new MyValidator( delegate.getValidator() );
}
@Override
public ValidatorContext usingContext() {
return delegate.usingContext();
}
@Override
public MessageInterpolator getMessageInterpolator() {
return delegate.getMessageInterpolator();
}
@Override
public TraversableResolver getTraversableResolver() {
return delegate.getTraversableResolver();
}
@Override
public ConstraintValidatorFactory getConstraintValidatorFactory() {
return delegate.getConstraintValidatorFactory();
}
@Override
public ParameterNameProvider getParameterNameProvider() {
return delegate.getParameterNameProvider();
}
@Override
public ClockProvider getClockProvider() {
return delegate.getClockProvider();
}
@Override
@SuppressWarnings("unchecked")
public <T> T unwrap(Class<T> type) {
if ( type == MyValidatorFactory.class ) {
return (T) this;
}
else {
throw new IllegalArgumentException( "Unsupported type for unwrapping: " + type );
}
}
@Override
public void close() {
delegate.close();
}
}
@Alternative
public static class MyValidator implements Validator {
private final Validator delegate;
// Only for making this class proxyable
MyValidator() {
this.delegate = null;
}
public MyValidator(Validator delegate) {
this.delegate = delegate;
}
@Override
public <T> Set<ConstraintViolation<T>> validate(T object, Class<?>... groups) {
return delegate.validate( object, groups );
}
@Override
public <T> Set<ConstraintViolation<T>> validateProperty(T object, String propertyName, Class<?>... groups) {
return delegate.validateProperty( object, propertyName, groups );
}
@Override
public <T> Set<ConstraintViolation<T>> validateValue(Class<T> beanType, String propertyName, Object value,
Class<?>... groups) {
return delegate.validateValue( beanType, propertyName, value, groups );
}
@Override
public BeanDescriptor getConstraintsForClass(Class<?> clazz) {
return delegate.getConstraintsForClass( clazz );
}
@Override
public <T> T unwrap(Class<T> type) {
return delegate.unwrap( type );
}
@Override
public ExecutableValidator forExecutables() {
return delegate.forExecutables();
}
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.storagegateway.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.storagegateway.AWSStorageGateway#removeTagsFromResource(RemoveTagsFromResourceRequest) RemoveTagsFromResource operation}.
* <p>
* This operation removes one or more tags from the specified resource.
* </p>
*
* @see com.amazonaws.services.storagegateway.AWSStorageGateway#removeTagsFromResource(RemoveTagsFromResourceRequest)
*/
public class RemoveTagsFromResourceRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* The Amazon Resource Name (ARN) of the resource you want to remove the
* tags from.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>50 - 500<br/>
*/
private String resourceARN;
/**
* The keys of the tags you want to remove from the specified resource. A
* tag is composed of a key/value pair.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<String> tagKeys;
/**
* The Amazon Resource Name (ARN) of the resource you want to remove the
* tags from.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>50 - 500<br/>
*
* @return The Amazon Resource Name (ARN) of the resource you want to remove the
* tags from.
*/
public String getResourceARN() {
return resourceARN;
}
/**
* The Amazon Resource Name (ARN) of the resource you want to remove the
* tags from.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>50 - 500<br/>
*
* @param resourceARN The Amazon Resource Name (ARN) of the resource you want to remove the
* tags from.
*/
public void setResourceARN(String resourceARN) {
this.resourceARN = resourceARN;
}
/**
* The Amazon Resource Name (ARN) of the resource you want to remove the
* tags from.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>50 - 500<br/>
*
* @param resourceARN The Amazon Resource Name (ARN) of the resource you want to remove the
* tags from.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public RemoveTagsFromResourceRequest withResourceARN(String resourceARN) {
this.resourceARN = resourceARN;
return this;
}
/**
* The keys of the tags you want to remove from the specified resource. A
* tag is composed of a key/value pair.
*
* @return The keys of the tags you want to remove from the specified resource. A
* tag is composed of a key/value pair.
*/
public java.util.List<String> getTagKeys() {
if (tagKeys == null) {
tagKeys = new com.amazonaws.internal.ListWithAutoConstructFlag<String>();
tagKeys.setAutoConstruct(true);
}
return tagKeys;
}
/**
* The keys of the tags you want to remove from the specified resource. A
* tag is composed of a key/value pair.
*
* @param tagKeys The keys of the tags you want to remove from the specified resource. A
* tag is composed of a key/value pair.
*/
public void setTagKeys(java.util.Collection<String> tagKeys) {
if (tagKeys == null) {
this.tagKeys = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<String> tagKeysCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(tagKeys.size());
tagKeysCopy.addAll(tagKeys);
this.tagKeys = tagKeysCopy;
}
/**
* The keys of the tags you want to remove from the specified resource. A
* tag is composed of a key/value pair.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setTagKeys(java.util.Collection)} or {@link
* #withTagKeys(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tagKeys The keys of the tags you want to remove from the specified resource. A
* tag is composed of a key/value pair.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public RemoveTagsFromResourceRequest withTagKeys(String... tagKeys) {
if (getTagKeys() == null) setTagKeys(new java.util.ArrayList<String>(tagKeys.length));
for (String value : tagKeys) {
getTagKeys().add(value);
}
return this;
}
/**
* The keys of the tags you want to remove from the specified resource. A
* tag is composed of a key/value pair.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tagKeys The keys of the tags you want to remove from the specified resource. A
* tag is composed of a key/value pair.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public RemoveTagsFromResourceRequest withTagKeys(java.util.Collection<String> tagKeys) {
if (tagKeys == null) {
this.tagKeys = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<String> tagKeysCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(tagKeys.size());
tagKeysCopy.addAll(tagKeys);
this.tagKeys = tagKeysCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getResourceARN() != null) sb.append("ResourceARN: " + getResourceARN() + ",");
if (getTagKeys() != null) sb.append("TagKeys: " + getTagKeys() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getResourceARN() == null) ? 0 : getResourceARN().hashCode());
hashCode = prime * hashCode + ((getTagKeys() == null) ? 0 : getTagKeys().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof RemoveTagsFromResourceRequest == false) return false;
RemoveTagsFromResourceRequest other = (RemoveTagsFromResourceRequest)obj;
if (other.getResourceARN() == null ^ this.getResourceARN() == null) return false;
if (other.getResourceARN() != null && other.getResourceARN().equals(this.getResourceARN()) == false) return false;
if (other.getTagKeys() == null ^ this.getTagKeys() == null) return false;
if (other.getTagKeys() != null && other.getTagKeys().equals(this.getTagKeys()) == false) return false;
return true;
}
@Override
public RemoveTagsFromResourceRequest clone() {
return (RemoveTagsFromResourceRequest) super.clone();
}
}
| |
package android.uikit;
import android.content.Context;
import android.content.res.TypedArray;
import android.content.res.XmlResourceParser;
import android.graphics.Color;
import android.support.annotation.NonNull;
import android.support.v4.util.Pair;
import android.support.v4.util.SparseArrayCompat;
import android.util.AttributeSet;
import android.util.SparseArray;
import android.utils.AndroidAttrs;
import android.view.View;
import android.view.ViewGroup;
import com.lazy.library.logging.Logcat;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import static android.view.View.NO_ID;
final class UIKitHelper {
static final String TAG_UIKIT = "UIKit";
private final ViewGroup mHost;
private final Context mContext;
UIKitHelper(ViewGroup mHost) {
this.mHost = mHost;
this.mContext = mHost.getContext();
TypedArray typedArray = this.mContext.getTheme().obtainStyledAttributes(R.style.ViewBlockTheme, new int[]{android.R.attr.background});
int backgroundColor = typedArray.getColor(0, Color.WHITE);
typedArray.recycle();
if (this.mHost.getBackground() == null) {
this.mHost.setBackgroundColor(backgroundColor);
}
}
public Context getContext() {
return mContext;
}
private Pair<Integer, String> getViewBlockAttrs(AttributeSet attrs) {
int resourceId = NO_ID;
String name = null;
TypedArray array = getContext().obtainStyledAttributes(attrs, AndroidAttrs.ATTRS);
int indexCount = array.getIndexCount();
for (int i = 0; i < indexCount; i++) {
int index = array.getIndex(i);
switch (index) {
case AndroidAttrs.ID_INDEX:
resourceId = array.getResourceId(index, NO_ID);
break;
case AndroidAttrs.NAME_INDEX:
name = array.getString(index);
break;
case AndroidAttrs.BLOCK_CLASS_INDEX:
name = array.getString(index);
break;
}
}
array.recycle();
return Pair.create(resourceId, name);
}
ViewBlock getParentBlock(UIKitComponent component) {
ViewBlock parentBlock = null;
ViewGroup parentContainer = component.getParentContainer();
if (parentContainer instanceof UIKitComponent) {
parentBlock = ((UIKitComponent) parentContainer).getViewBlock();
}
return parentBlock;
}
private ViewBlock getViewBlockByName(View view, String name) {
ViewBlock viewBlock = null;
try {
Class<?> aClass = Class.forName(name);
Constructor constructor = aClass.getConstructor(View.class);
viewBlock = (ViewBlock) constructor.newInstance(view);
} catch (ClassNotFoundException e) {
Logcat.e().tag(TAG_UIKIT).msg(e.getMessage()).out();
} catch (NoSuchMethodException e) {
Logcat.e().tag(TAG_UIKIT).msg(e.getMessage()).out();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return viewBlock;
}
private ViewBlock createViewBlock(View view, int resourceId, String name) {
if (name == null) {
return null;
}
if (resourceId == NO_ID) {
Logcat.w().tag(TAG_UIKIT).msg("ViewBlock name ").msg(name).msg(" view no id ").out();
}
return getViewBlockByName(view, name);
}
public void addViewBlockToViewBlockManager(ViewBlock viewBlock, ViewBlock parentBlock, ViewBlockManager viewBlockManager) {
if (viewBlock != null) {
viewBlock.setParent(parentBlock);
ViewBlockParent parent = viewBlock.getParent();
if (parent != null) {
parent.addViewBlock(viewBlock);
}
viewBlockManager.add(viewBlock);
}
}
ViewBlock attachViewBlock(UIKitComponent component, AttributeSet attrs) {
Pair<Integer, String> blockAttrs = getViewBlockAttrs(attrs);
int resourceId = blockAttrs.first;
String name = blockAttrs.second;
ViewBlock parentBlock = getParentBlock(component);
ViewBlock viewBlock = createViewBlock(component.getContainer(), resourceId, name);
if (viewBlock != null) {
addViewBlockToViewBlockManager(viewBlock, parentBlock, component.getViewBlockManager());
} else {
viewBlock = parentBlock;
}
component.setViewBlock(viewBlock);
return viewBlock;
}
void generateViewBlock(UIKitComponent component, AttributeSet attrs) {
String tagName = ((XmlResourceParser) attrs).getName();
if (!AndroidAttrs.UIKIT_TAGS.contains(tagName)) {
Pair<Integer, String> viewBlockAttrs = getViewBlockAttrs(attrs);
int resourceId = viewBlockAttrs.first;
String name = viewBlockAttrs.second;
if (name != null) {
if (resourceId == NO_ID) {
Logcat.w().tag(TAG_UIKIT).msg("ViewBlock name ").msg(name).msg(" view no id ").out();
resourceId = component.getViewDepth();
}
component.getViewBlockClassNamesArray().put(resourceId, name);
}
}
if (!"include".equals(tagName)) {
component.setViewDepth(component.getViewDepth() + 1);
}
}
void onFinishInflateViewBlock(@NonNull UIKitComponent component) {
SparseArrayCompat<String> viewBlockClassNamesArray = component.getViewBlockClassNamesArray();
int size = viewBlockClassNamesArray.size();
boolean isEmpty = (size == 0);
if (!isEmpty) {
int mHostChildCount = mHost.getChildCount();
for (int i = 0; i < mHostChildCount; i++) {
View childAt = mHost.getChildAt(i);
if (!(childAt instanceof ViewGroup)) {
attachChildViewBlock(component, viewBlockClassNamesArray, i, childAt);
}
}
}
if (!(component.getActivity() instanceof UIKitActivity)) {
ViewGroup parent = component.getParentContainer();
if (parent == null) return;
if ((parent).getId() == android.R.id.content) {
ViewBlockManager blockManager = UIKit.getViewBlockManager(component.getActivity());
SparseArray<ViewBlock> viewBlocks = blockManager.getViewBlocks();
UIKitActivity.dispatch(viewBlocks, UIKitActivity.ON_CREATE_VIEW);
}
}
}
private void attachChildViewBlock(@NonNull UIKitComponent component, SparseArrayCompat<String>
childViewBlockClassNamesArray, int index, View childView) {
int childViewChildAtId = childView.getId();
int indexOfKey = childViewBlockClassNamesArray.indexOfKey(childViewChildAtId);
if (indexOfKey < 0) {
indexOfKey = childViewBlockClassNamesArray.indexOfKey(index);
}
if (indexOfKey < 0) {
return;
}
String name = childViewBlockClassNamesArray.valueAt(indexOfKey);
if (name == null) {
return;
}
ViewBlock parentBlock = component.getViewBlock();
ViewBlock viewBlock = createViewBlock(childView, childViewChildAtId, name);
if (viewBlock != null) {
addViewBlockToViewBlockManager(viewBlock, parentBlock, component.getViewBlockManager());
}
childViewBlockClassNamesArray.removeAt(indexOfKey);
}
}
| |
/**
*
* Copyright 2003-2005 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.jingleold.packet;
import org.jivesoftware.smack.packet.ExtensionElement;
import org.jivesoftware.smackx.jingleold.media.PayloadType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* Jingle content description.
*
* @author Alvaro Saurin <alvaro.saurin@gmail.com>
*/
public abstract class JingleContentDescription implements ExtensionElement {
// static
public static final String NODENAME = "description";
// non-static
private final List<JinglePayloadType> payloads = new ArrayList<JinglePayloadType>();
/**
* Creates a content description..
*/
public JingleContentDescription() {
super();
}
/**
* Returns the XML element name of the element.
*
* @return the XML element name of the element.
*/
public String getElementName() {
return NODENAME;
}
/**
* Return the namespace.
*
* @return The namespace
*/
public abstract String getNamespace();
/**
* Adds a audio payload type to the packet.
*
* @param pt the audio payload type to add.
*/
public void addJinglePayloadType(final JinglePayloadType pt) {
synchronized (payloads) {
payloads.add(pt);
}
}
/**
* Adds a list of payloads to the packet.
*
* @param pts the payloads to add.
*/
public void addAudioPayloadTypes(final List<PayloadType.Audio> pts) {
synchronized (payloads) {
Iterator<PayloadType.Audio> ptIter = pts.iterator();
while (ptIter.hasNext()) {
PayloadType.Audio pt = ptIter.next();
addJinglePayloadType(new JinglePayloadType.Audio(pt));
}
}
}
/**
* Returns an Iterator for the audio payloads in the packet.
*
* @return an Iterator for the audio payloads in the packet.
*/
public Iterator<JinglePayloadType> getJinglePayloadTypes() {
return Collections.unmodifiableList(getJinglePayloadTypesList()).iterator();
}
/**
* Returns a list for the audio payloads in the packet.
*
* @return a list for the audio payloads in the packet.
*/
public ArrayList<JinglePayloadType> getJinglePayloadTypesList() {
synchronized (payloads) {
return new ArrayList<JinglePayloadType>(payloads);
}
}
/**
* Return the list of Payload types contained in the description.
*
* @return a list of PayloadType.Audio
*/
public ArrayList<PayloadType.Audio> getAudioPayloadTypesList() {
ArrayList<PayloadType.Audio> result = new ArrayList<PayloadType.Audio>();
Iterator<JinglePayloadType> jinglePtsIter = getJinglePayloadTypes();
while (jinglePtsIter.hasNext()) {
JinglePayloadType jpt = jinglePtsIter.next();
if (jpt instanceof JinglePayloadType.Audio) {
JinglePayloadType.Audio jpta = (JinglePayloadType.Audio) jpt;
result.add((PayloadType.Audio)jpta.getPayloadType());
}
}
return result;
}
/**
* Returns a count of the audio payloads in the Jingle packet.
*
* @return the number of audio payloads in the Jingle packet.
*/
public int getJinglePayloadTypesCount() {
synchronized (payloads) {
return payloads.size();
}
}
/**
* Convert a Jingle description to XML.
*
* @return a string with the XML representation
*/
public String toXML() {
StringBuilder buf = new StringBuilder();
synchronized (payloads) {
if (payloads.size() > 0) {
buf.append("<").append(getElementName());
buf.append(" xmlns=\"").append(getNamespace()).append("\" >");
Iterator<JinglePayloadType> pt = payloads.listIterator();
while (pt.hasNext()) {
JinglePayloadType pte = pt.next();
buf.append(pte.toXML());
}
buf.append("</").append(getElementName()).append(">");
}
}
return buf.toString();
}
/**
* Jingle audio description.
*/
public static class Audio extends JingleContentDescription {
public static final String NAMESPACE = "urn:xmpp:tmp:jingle:apps:rtp";
public Audio() {
super();
}
/**
* Utility constructor, with a JinglePayloadType.
*/
public Audio(final JinglePayloadType pt) {
super();
addJinglePayloadType(pt);
}
public String getNamespace() {
return NAMESPACE;
}
}
/**
* A payload type, contained in a descriptor.
*
* @author Alvaro Saurin
*/
public static class JinglePayloadType {
public static final String NODENAME = "payload-type";
private PayloadType payload;
/**
* Create a payload type.
*
* @param payload the payload
*/
public JinglePayloadType(final PayloadType payload) {
super();
this.payload = payload;
}
/**
* Create an empty payload type.
*/
public JinglePayloadType() {
this(null);
}
/**
* Returns the XML element name of the element.
*
* @return the XML element name of the element.
*/
public static String getElementName() {
return NODENAME;
}
/**
* Get the payload represented.
*
* @return the payload
*/
public PayloadType getPayloadType() {
return payload;
}
/**
* Set the payload represented.
*
* @param payload the payload to set
*/
public void setPayload(final PayloadType payload) {
this.payload = payload;
}
protected String getChildAttributes() {
return null;
}
public String toXML() {
StringBuilder buf = new StringBuilder();
if (payload != null) {
buf.append("<").append(getElementName()).append(" ");
// We covert here the payload type to XML
if (payload.getId() != PayloadType.INVALID_PT) {
buf.append(" id=\"").append(payload.getId()).append("\"");
}
if (payload.getName() != null) {
buf.append(" name=\"").append(payload.getName()).append("\"");
}
if (payload.getChannels() != 0) {
buf.append(" channels=\"").append(payload.getChannels()).append("\"");
}
if (getChildAttributes() != null) {
buf.append(getChildAttributes());
}
buf.append("/>");
}
return buf.toString();
}
/**
* Audio payload type element.
*/
public static class Audio extends JinglePayloadType {
public Audio(final PayloadType.Audio audio) {
super(audio);
}
protected String getChildAttributes() {
StringBuilder buf = new StringBuilder();
PayloadType pt = getPayloadType();
if (pt instanceof PayloadType.Audio) {
PayloadType.Audio pta = (PayloadType.Audio) pt;
buf.append(" clockrate=\"").append(pta.getClockRate()).append("\" ");
}
return buf.toString();
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.common.configuration;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.metron.common.utils.JSONUtils;
/**
* Allows for retrieval and update of indexing configurations.
*/
public class IndexingConfigurations extends Configurations {
public static final String BATCH_SIZE_CONF = "batchSize";
public static final String BATCH_TIMEOUT_CONF = "batchTimeout";
public static final String ENABLED_CONF = "enabled";
public static final String INDEX_CONF = "index";
public static final String OUTPUT_PATH_FUNCTION_CONF = "outputPathFunction";
public static final String FIELD_NAME_CONVERTER_CONF = "fieldNameConverter";
public static final String SET_DOCUMENT_ID_CONF = "setDocumentId";
public static final String GLOBAL_ELASTICSEARCH_SET_DOCUMENT_ID_CONF = "indexing.writer.elasticsearch.setDocumentId";
/**
* Gets the indexing config for a specific sensor.
*
* @param sensorType The sensor to retrieve config for
* @param emptyMapOnNonExistent If true and the config doesn't exist return empty map, else null
* @return Map of the config key -> value. Value on missing depends on emptyMapOnNonExistent
*/
public Map<String, Object> getSensorIndexingConfig(String sensorType, boolean emptyMapOnNonExistent) {
Map<String, Object> ret = (Map<String, Object>) getConfigurations().get(getKey(sensorType));
if(ret == null) {
return emptyMapOnNonExistent?new HashMap<>():null;
}
else {
return ret;
}
}
public Map<String, Object> getSensorIndexingConfig(String sensorType) {
return getSensorIndexingConfig(sensorType, true);
}
/**
* Gets the list of sensor types that indexing configurations exist for.
*
* @return List of sensor types
*/
public List<String> getTypes() {
List<String> ret = new ArrayList<>();
for(String keyedSensor : getConfigurations().keySet()) {
if(!keyedSensor.isEmpty() && keyedSensor.startsWith(ConfigurationType.INDEXING.getTypeName())) {
ret.add(keyedSensor.substring(ConfigurationType.INDEXING.getTypeName().length() + 1));
}
}
return ret;
}
public void delete(String sensorType) {
getConfigurations().remove(getKey(sensorType));
}
/**
* Gets the sensor indexing config for a given writer.
*
* @param sensorType The sensor to retrieve configs for
* @param writerName The particular writer to get configurations for
* @return A Map of the configuration
*/
public Map<String, Object> getSensorIndexingConfig(String sensorType, String writerName) {
String key = getKey(sensorType);
Map<String, Object> ret = (Map<String, Object>) getConfigurations().get(key);
if(ret == null) {
return new HashMap();
}
else {
Map<String, Object> writerConfig = (Map<String, Object>)ret.get(writerName);
return writerConfig != null?writerConfig:new HashMap<>();
}
}
public void updateSensorIndexingConfig(String sensorType, byte[] data) throws IOException {
updateSensorIndexingConfig(sensorType, new ByteArrayInputStream(data));
}
public void updateSensorIndexingConfig(String sensorType, InputStream io) throws IOException {
Map<String, Object> sensorIndexingConfig = JSONUtils.INSTANCE.load(io, JSONUtils.MAP_SUPPLIER);
updateSensorIndexingConfig(sensorType, sensorIndexingConfig);
}
public void updateSensorIndexingConfig(String sensorType, Map<String, Object> sensorIndexingConfig) {
getConfigurations().put(getKey(sensorType), sensorIndexingConfig);
}
public static String getKey(String sensorType) {
return ConfigurationType.INDEXING.getTypeName() + "." + sensorType;
}
/**
* Determines if a configuration is default or not. In particular, this means the config is null
* for the sensor/writer combo.
*
* @param sensorName The sensor to check for default
* @param writerName The specific writer to check for default
* @return True if default, false otherwise.
*/
public boolean isDefault(String sensorName, String writerName) {
Map<String, Object> ret = (Map<String, Object>) getConfigurations().get(getKey(sensorName));
if(ret == null) {
return true;
}
else {
Map<String, Object> writerConfig = (Map<String, Object>)ret.get(writerName);
return writerConfig != null?false:true;
}
}
public int getBatchSize(String sensorName, String writerName ) {
return getBatchSize(getSensorIndexingConfig(sensorName, writerName));
}
public int getBatchTimeout(String sensorName, String writerName ) {
return getBatchTimeout(getSensorIndexingConfig(sensorName, writerName));
}
/**
* Returns all configured values of batchTimeout, for all configured sensors,
* but only for the specific writer identified by {@code writerName}. So, if it is
* an hdfs writer, it will return the batchTimeouts for hdfs writers for all the sensors.
* The goal is to return to a {@link org.apache.metron.common.bolt.ConfiguredBolt}
* the set of all and only batchTimeouts relevant to that ConfiguredBolt.
*
* @param writerName The name of the writer to look up.
* @return list of integer batchTimeouts, one per configured sensor
*/
public List<Integer> getAllConfiguredTimeouts(String writerName) {
// The configuration infrastructure was not designed to enumerate sensors, so we synthesize.
// Since getKey is in this same class, we know we can pass it a null string to get the key prefix
// for all sensor types within this capability. We then enumerate all keys in configurations.keySet
// and select those that match the key prefix, as being sensor keys. The suffix substring of
// each such key is used as a sensor name to query the batchTimeout settings, if any.
String keyPrefixString = getKey("");
int prefixStringLength = keyPrefixString.length();
List<Integer> configuredBatchTimeouts = new ArrayList<>();
for (String sensorKeyString : getConfigurations().keySet()) {
if (sensorKeyString.startsWith(keyPrefixString)) {
String configuredSensorName = sensorKeyString.substring(prefixStringLength);
configuredBatchTimeouts.add(getBatchTimeout(configuredSensorName, writerName));
}
}
return configuredBatchTimeouts;
}
public String getIndex(String sensorName, String writerName) {
return getIndex(getSensorIndexingConfig(sensorName, writerName), sensorName);
}
public boolean isEnabled(String sensorName, String writerName) {
return isEnabled(getSensorIndexingConfig(sensorName, writerName));
}
public String getOutputPathFunction(String sensorName, String writerName) {
return getOutputPathFunction(getSensorIndexingConfig(sensorName, writerName), sensorName);
}
public String getFieldNameConverter(String sensorName, String writerName) {
return getFieldNameConverter(getSensorIndexingConfig(sensorName, writerName), sensorName);
}
public boolean isSetDocumentId(String sensorName, String writerName) {
return isSetDocumentId(getGlobalConfig(true), getSensorIndexingConfig(sensorName, writerName));
}
/**
* Retrieves the enabled value from the config.
*
* @param conf The configuration to retrieve from
* @return True if this configuration is enabled, false otherwise
*/
public static boolean isEnabled(Map<String, Object> conf) {
return getAs( ENABLED_CONF
,conf
, true
, Boolean.class
);
}
/**
* Retrieves the batch size value from the config.
*
* @param conf The configuration to retrieve from
* @return The batch size if defined, 1 by default
*/
public static int getBatchSize(Map<String, Object> conf) {
return getAs( BATCH_SIZE_CONF
,conf
, 1
, Integer.class
);
}
/**
* Retrieves the batch timeout value from the config.
*
* @param conf The configuration to retrieve from
* @return The batch timeout if defined, 0 by default
*/
public static int getBatchTimeout(Map<String, Object> conf) {
return getAs( BATCH_TIMEOUT_CONF
,conf
, 0
, Integer.class
);
}
/**
* Retrieves the index value from the config.
*
* @param conf The configuration to retrieve from
* @param sensorName The name of the sensor to retrieve the index for
* @return The index if defined, the sensor name by default
*/
public static String getIndex(Map<String, Object> conf, String sensorName) {
return getAs( INDEX_CONF
,conf
, sensorName
, String.class
);
}
/**
* Retrieves the output path function value from the config.
*
* @param conf The configuration to retrieve from
* @param sensorName Unused
* @return The output path function if defined, empty string otherwise
*/
public static String getOutputPathFunction(Map<String, Object> conf, String sensorName) {
return getAs(OUTPUT_PATH_FUNCTION_CONF
,conf
, ""
, String.class
);
}
/**
* Retrieves the field name converter value from the config.
*
* @param conf The configuration to retrieve from
* @param sensorName Unused
* @return The field name converter if defined, empty string otherwise
*/
public static String getFieldNameConverter(Map<String, Object> conf, String sensorName) {
return getAs(FIELD_NAME_CONVERTER_CONF, conf, "", String.class);
}
/**
* Determines if the Metron generated id should be used when indexing
*
* @param globalConf The global config
* @param sensorConf The indexing config for a given sensor
* @return True if the Metron generated id should be used as the id, False otherwise
*/
public static boolean isSetDocumentId(Map<String, Object> globalConf, Map<String, Object> sensorConf) {
return getAs(SET_DOCUMENT_ID_CONF, sensorConf, getAs(GLOBAL_ELASTICSEARCH_SET_DOCUMENT_ID_CONF, globalConf, false, Boolean.class), Boolean.class);
}
/**
* Sets the enabled flag in the config.
*
* @param conf The configuration map to set enabled in. If null replaced with empty map.
* @param enabled True if enabled, false otherwise
* @return The configuration with the enabled value set
*/
public static Map<String, Object> setEnabled(Map<String, Object> conf, boolean enabled) {
Map<String, Object> ret = conf == null?new HashMap<>():conf;
ret.put(ENABLED_CONF, enabled);
return ret;
}
/**
* Sets the batch size in the config.
*
* @param conf The configuration map to set enabled in. If null, replaced with empty map.
* @param batchSize The desired batch size
* @return The configuration with the batch size value set
*/
public static Map<String, Object> setBatchSize(Map<String, Object> conf, int batchSize) {
Map<String, Object> ret = conf == null?new HashMap<>():conf;
ret.put(BATCH_SIZE_CONF, batchSize);
return ret;
}
/**
* Sets the batch timeout in the config.
*
* @param conf The configuration map to set enabled in. If null, replaced with empty map.
* @param batchTimeout The desired batch timeout
* @return The configuration with the batch timeout value set
*/
public static Map<String, Object> setBatchTimeout(Map<String, Object> conf, int batchTimeout) {
Map<String, Object> ret = conf == null?new HashMap<>():conf;
ret.put(BATCH_TIMEOUT_CONF, batchTimeout);
return ret;
}
/**
* Sets the index in the config.
*
* @param conf The configuration map to set enabled in. If null, replaced with empty map.
* @param index The desired index
* @return The configuration with the index value set
*/
public static Map<String, Object> setIndex(Map<String, Object> conf, String index) {
Map<String, Object> ret = conf == null?new HashMap<>():conf;
ret.put(INDEX_CONF, index);
return ret;
}
/**
* Sets the field name converter in the config.
*
* @param conf The configuration map to set enabled in. If null, replaced with empty map.
* @param index The desired index
* @return The configuration with the field name converter value set
*/
public static Map<String, Object> setFieldNameConverter(Map<String, Object> conf, String index) {
Map<String, Object> ret = conf == null ? new HashMap<>(): conf;
ret.put(FIELD_NAME_CONVERTER_CONF, index);
return ret;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.stat.descriptive;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.commons.math3.TestUtils;
import org.apache.commons.math3.distribution.RealDistribution;
import org.apache.commons.math3.distribution.UniformRealDistribution;
import org.apache.commons.math3.distribution.IntegerDistribution;
import org.apache.commons.math3.distribution.UniformIntegerDistribution;
import org.apache.commons.math3.util.Precision;
import org.junit.Assert;
import org.junit.Test;
/**
* Test cases for {@link AggregateSummaryStatistics}
*
*/
public class AggregateSummaryStatisticsTest {
/**
* Tests the standard aggregation behavior
*/
@Test
public void testAggregation() {
AggregateSummaryStatistics aggregate = new AggregateSummaryStatistics();
SummaryStatistics setOneStats = aggregate.createContributingStatistics();
SummaryStatistics setTwoStats = aggregate.createContributingStatistics();
Assert.assertNotNull("The set one contributing stats are null", setOneStats);
Assert.assertNotNull("The set two contributing stats are null", setTwoStats);
Assert.assertNotSame("Contributing stats objects are the same", setOneStats, setTwoStats);
setOneStats.addValue(2);
setOneStats.addValue(3);
setOneStats.addValue(5);
setOneStats.addValue(7);
setOneStats.addValue(11);
Assert.assertEquals("Wrong number of set one values", 5, setOneStats.getN());
Assert.assertTrue("Wrong sum of set one values", Precision.equals(28.0, setOneStats.getSum(), 1));
setTwoStats.addValue(2);
setTwoStats.addValue(4);
setTwoStats.addValue(8);
Assert.assertEquals("Wrong number of set two values", 3, setTwoStats.getN());
Assert.assertTrue("Wrong sum of set two values", Precision.equals(14.0, setTwoStats.getSum(), 1));
Assert.assertEquals("Wrong number of aggregate values", 8, aggregate.getN());
Assert.assertTrue("Wrong aggregate sum", Precision.equals(42.0, aggregate.getSum(), 1));
}
/**
* Verify that aggregating over a partition gives the same results
* as direct computation.
*
* 1) Randomly generate a dataset of 10-100 values
* from [-100, 100]
* 2) Divide the dataset it into 2-5 partitions
* 3) Create an AggregateSummaryStatistic and ContributingStatistics
* for each partition
* 4) Compare results from the AggregateSummaryStatistic with values
* returned by a single SummaryStatistics instance that is provided
* the full dataset
*/
@Test
public void testAggregationConsistency() {
// Generate a random sample and random partition
double[] totalSample = generateSample();
double[][] subSamples = generatePartition(totalSample);
int nSamples = subSamples.length;
// Create aggregator and total stats for comparison
AggregateSummaryStatistics aggregate = new AggregateSummaryStatistics();
SummaryStatistics totalStats = new SummaryStatistics();
// Create array of component stats
SummaryStatistics componentStats[] = new SummaryStatistics[nSamples];
for (int i = 0; i < nSamples; i++) {
// Make componentStats[i] a contributing statistic to aggregate
componentStats[i] = aggregate.createContributingStatistics();
// Add values from subsample
for (int j = 0; j < subSamples[i].length; j++) {
componentStats[i].addValue(subSamples[i][j]);
}
}
// Compute totalStats directly
for (int i = 0; i < totalSample.length; i++) {
totalStats.addValue(totalSample[i]);
}
/*
* Compare statistics in totalStats with aggregate.
* Note that guaranteed success of this comparison depends on the
* fact that <aggregate> gets values in exactly the same order
* as <totalStats>.
*
*/
Assert.assertEquals(totalStats.getSummary(), aggregate.getSummary());
}
/**
* Test aggregate function by randomly generating a dataset of 10-100 values
* from [-100, 100], dividing it into 2-5 partitions, computing stats for each
* partition and comparing the result of aggregate(...) applied to the collection
* of per-partition SummaryStatistics with a single SummaryStatistics computed
* over the full sample.
*
*/
@Test
public void testAggregate() {
// Generate a random sample and random partition
double[] totalSample = generateSample();
double[][] subSamples = generatePartition(totalSample);
int nSamples = subSamples.length;
// Compute combined stats directly
SummaryStatistics totalStats = new SummaryStatistics();
for (int i = 0; i < totalSample.length; i++) {
totalStats.addValue(totalSample[i]);
}
// Now compute subsample stats individually and aggregate
SummaryStatistics[] subSampleStats = new SummaryStatistics[nSamples];
for (int i = 0; i < nSamples; i++) {
subSampleStats[i] = new SummaryStatistics();
}
Collection<SummaryStatistics> aggregate = new ArrayList<SummaryStatistics>();
for (int i = 0; i < nSamples; i++) {
for (int j = 0; j < subSamples[i].length; j++) {
subSampleStats[i].addValue(subSamples[i][j]);
}
aggregate.add(subSampleStats[i]);
}
// Compare values
StatisticalSummary aggregatedStats = AggregateSummaryStatistics.aggregate(aggregate);
assertEquals(totalStats.getSummary(), aggregatedStats, 10E-12);
}
@Test
public void testAggregateDegenerate() {
double[] totalSample = {1, 2, 3, 4, 5};
double[][] subSamples = {{1}, {2}, {3}, {4}, {5}};
// Compute combined stats directly
SummaryStatistics totalStats = new SummaryStatistics();
for (int i = 0; i < totalSample.length; i++) {
totalStats.addValue(totalSample[i]);
}
// Now compute subsample stats individually and aggregate
SummaryStatistics[] subSampleStats = new SummaryStatistics[5];
for (int i = 0; i < 5; i++) {
subSampleStats[i] = new SummaryStatistics();
}
Collection<SummaryStatistics> aggregate = new ArrayList<SummaryStatistics>();
for (int i = 0; i < 5; i++) {
for (int j = 0; j < subSamples[i].length; j++) {
subSampleStats[i].addValue(subSamples[i][j]);
}
aggregate.add(subSampleStats[i]);
}
// Compare values
StatisticalSummaryValues aggregatedStats = AggregateSummaryStatistics.aggregate(aggregate);
assertEquals(totalStats.getSummary(), aggregatedStats, 10E-12);
}
@Test
public void testAggregateSpecialValues() {
double[] totalSample = {Double.POSITIVE_INFINITY, 2, 3, Double.NaN, 5};
double[][] subSamples = {{Double.POSITIVE_INFINITY, 2}, {3}, {Double.NaN}, {5}};
// Compute combined stats directly
SummaryStatistics totalStats = new SummaryStatistics();
for (int i = 0; i < totalSample.length; i++) {
totalStats.addValue(totalSample[i]);
}
// Now compute subsample stats individually and aggregate
SummaryStatistics[] subSampleStats = new SummaryStatistics[5];
for (int i = 0; i < 4; i++) {
subSampleStats[i] = new SummaryStatistics();
}
Collection<SummaryStatistics> aggregate = new ArrayList<SummaryStatistics>();
for (int i = 0; i < 4; i++) {
for (int j = 0; j < subSamples[i].length; j++) {
subSampleStats[i].addValue(subSamples[i][j]);
}
aggregate.add(subSampleStats[i]);
}
// Compare values
StatisticalSummaryValues aggregatedStats = AggregateSummaryStatistics.aggregate(aggregate);
assertEquals(totalStats.getSummary(), aggregatedStats, 10E-12);
}
/**
* Verifies that a StatisticalSummary and a StatisticalSummaryValues are equal up
* to delta, with NaNs, infinities returned in the same spots. For max, min, n, values
* have to agree exactly, delta is used only for sum, mean, variance, std dev.
*/
protected static void assertEquals(StatisticalSummary expected, StatisticalSummary observed, double delta) {
TestUtils.assertEquals(expected.getMax(), observed.getMax(), 0);
TestUtils.assertEquals(expected.getMin(), observed.getMin(), 0);
Assert.assertEquals(expected.getN(), observed.getN());
TestUtils.assertEquals(expected.getSum(), observed.getSum(), delta);
TestUtils.assertEquals(expected.getMean(), observed.getMean(), delta);
TestUtils.assertEquals(expected.getStandardDeviation(), observed.getStandardDeviation(), delta);
TestUtils.assertEquals(expected.getVariance(), observed.getVariance(), delta);
}
/**
* Generates a random sample of double values.
* Sample size is random, between 10 and 100 and values are
* uniformly distributed over [-100, 100].
*
* @return array of random double values
*/
private double[] generateSample() {
final IntegerDistribution size = new UniformIntegerDistribution(10, 100);
final RealDistribution randomData = new UniformRealDistribution(-100, 100);
final int sampleSize = size.sample();
final double[] out = randomData.sample(sampleSize);
return out;
}
/**
* Generates a partition of <sample> into up to 5 sequentially selected
* subsamples with randomly selected partition points.
*
* @param sample array to partition
* @return rectangular array with rows = subsamples
*/
private double[][] generatePartition(double[] sample) {
final int length = sample.length;
final double[][] out = new double[5][];
int cur = 0; // beginning of current partition segment
int offset = 0; // end of current partition segment
int sampleCount = 0; // number of segments defined
for (int i = 0; i < 5; i++) {
if (cur == length || offset == length) {
break;
}
final int next;
if (i == 4 || cur == length - 1) {
next = length - 1;
} else {
next = (new UniformIntegerDistribution(cur, length - 1)).sample();
}
final int subLength = next - cur + 1;
out[i] = new double[subLength];
System.arraycopy(sample, offset, out[i], 0, subLength);
cur = next + 1;
sampleCount++;
offset += subLength;
}
if (sampleCount < 5) {
double[][] out2 = new double[sampleCount][];
for (int j = 0; j < sampleCount; j++) {
final int curSize = out[j].length;
out2[j] = new double[curSize];
System.arraycopy(out[j], 0, out2[j], 0, curSize);
}
return out2;
} else {
return out;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.extractor;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.jmeter.assertions.AssertionResult;
import org.apache.jmeter.processor.PostProcessor;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.testelement.AbstractScopedTestElement;
import org.apache.jmeter.testelement.property.BooleanProperty;
import org.apache.jmeter.testelement.property.IntegerProperty;
import org.apache.jmeter.threads.JMeterContext;
import org.apache.jmeter.threads.JMeterVariables;
import org.apache.jmeter.util.TidyException;
import org.apache.jmeter.util.XPathUtil;
import org.apache.jorphan.util.JMeterError;
import org.apache.jorphan.util.JOrphanUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
/**
* Extracts text from (X)HTML response using XPath query language
* Example XPath queries:
* <dl>
* <dt>/html/head/title</dt>
* <dd>extracts Title from HTML response</dd>
* <dt>//form[@name='countryForm']//select[@name='country']/option[text()='Czech Republic'])/@value
* <dd>extracts value attribute of option element that match text 'Czech Republic'
* inside of select element with name attribute 'country' inside of
* form with name attribute 'countryForm'</dd>
* <dt>//head</dt>
* <dd>extracts the XML fragment for head node.</dd>
* <dt>//head/text()</dt>
* <dd>extracts the text content for head node.</dd>
* </dl>
see org.apache.jmeter.extractor.TestXPathExtractor for unit tests
*/
public class XPathExtractor extends AbstractScopedTestElement implements
PostProcessor, Serializable {
private static final Logger log = LoggerFactory.getLogger(XPathExtractor.class);
private static final long serialVersionUID = 242L;
private static final int DEFAULT_VALUE = -1;
public static final String DEFAULT_VALUE_AS_STRING = Integer.toString(DEFAULT_VALUE);
private static final String REF_MATCH_NR = "matchNr"; // $NON-NLS-1$
//+ JMX file attributes
private static final String XPATH_QUERY = "XPathExtractor.xpathQuery"; // $NON-NLS-1$
private static final String REFNAME = "XPathExtractor.refname"; // $NON-NLS-1$
private static final String DEFAULT = "XPathExtractor.default"; // $NON-NLS-1$
private static final String TOLERANT = "XPathExtractor.tolerant"; // $NON-NLS-1$
private static final String NAMESPACE = "XPathExtractor.namespace"; // $NON-NLS-1$
private static final String QUIET = "XPathExtractor.quiet"; // $NON-NLS-1$
private static final String REPORT_ERRORS = "XPathExtractor.report_errors"; // $NON-NLS-1$
private static final String SHOW_WARNINGS = "XPathExtractor.show_warnings"; // $NON-NLS-1$
private static final String DOWNLOAD_DTDS = "XPathExtractor.download_dtds"; // $NON-NLS-1$
private static final String WHITESPACE = "XPathExtractor.whitespace"; // $NON-NLS-1$
private static final String VALIDATE = "XPathExtractor.validate"; // $NON-NLS-1$
private static final String FRAGMENT = "XPathExtractor.fragment"; // $NON-NLS-1$
private static final String MATCH_NUMBER = "XPathExtractor.matchNumber"; // $NON-NLS-1$
//- JMX file attributes
private String concat(String s1,String s2){
return s1 + "_" + s2; // $NON-NLS-1$
}
private String concat(String s1, int i){
return s1 + "_" + i; // $NON-NLS-1$
}
/**
* Do the job - extract value from (X)HTML response using XPath Query.
* Return value as variable defined by REFNAME. Returns DEFAULT value
* if not found.
*/
@Override
public void process() {
JMeterContext context = getThreadContext();
final SampleResult previousResult = context.getPreviousResult();
if (previousResult == null){
return;
}
JMeterVariables vars = context.getVariables();
String refName = getRefName();
vars.put(refName, getDefaultValue());
final String matchNR = concat(refName,REF_MATCH_NR);
int prevCount=0; // number of previous matches
try {
prevCount=Integer.parseInt(vars.get(matchNR));
} catch (NumberFormatException e) {
// ignored
}
vars.put(matchNR, "0"); // In case parse fails // $NON-NLS-1$
vars.remove(concat(refName,"1")); // In case parse fails // $NON-NLS-1$
int matchNumber = getMatchNumber();
List<String> matches = new ArrayList<>();
try{
if (isScopeVariable()){
String inputString=vars.get(getVariableName());
if(inputString != null) {
if(inputString.length()>0) {
Document d = parseResponse(inputString);
getValuesForXPath(d,getXPathQuery(), matches, matchNumber);
}
} else {
if (log.isWarnEnabled()) {
log.warn("No variable '{}' found to process by XPathExtractor '{}', skipping processing",
getVariableName(), getName());
}
}
} else {
List<SampleResult> samples = getSampleList(previousResult);
for (SampleResult res : samples) {
Document d = parseResponse(res.getResponseDataAsString());
getValuesForXPath(d,getXPathQuery(), matches, matchNumber);
}
}
final int matchCount = matches.size();
vars.put(matchNR, String.valueOf(matchCount));
if (matchCount > 0){
String value = matches.get(0);
if (value != null) {
vars.put(refName, value);
}
for(int i=0; i < matchCount; i++){
value = matches.get(i);
if (value != null) {
vars.put(concat(refName,i+1),matches.get(i));
}
}
}
vars.remove(concat(refName,matchCount+1)); // Just in case
// Clear any other remaining variables
for(int i=matchCount+2; i <= prevCount; i++) {
vars.remove(concat(refName,i));
}
}catch(IOException e){// e.g. DTD not reachable
log.error("IOException on ({})", getXPathQuery(), e);
AssertionResult ass = new AssertionResult(getName());
ass.setError(true);
ass.setFailureMessage("IOException: " + e.getLocalizedMessage());
previousResult.addAssertionResult(ass);
previousResult.setSuccessful(false);
} catch (ParserConfigurationException e) {// Should not happen
final String errrorMessage = "ParserConfigurationException while processing ("+getXPathQuery()+")";
log.error(errrorMessage,e);
throw new JMeterError(errrorMessage,e);
} catch (SAXException e) {// Can happen for bad input document
if (log.isWarnEnabled()) {
log.warn("SAXException while processing ({}). {}", getXPathQuery(), e.getLocalizedMessage());
}
addAssertionFailure(previousResult, e, false); // Should this also fail the sample?
} catch (TransformerException e) {// Can happen for incorrect XPath expression
if (log.isWarnEnabled()) {
log.warn("TransformerException while processing ({}). {}", getXPathQuery(), e.getLocalizedMessage());
}
addAssertionFailure(previousResult, e, false);
} catch (TidyException e) {
// Will already have been logged by XPathUtil
addAssertionFailure(previousResult, e, true); // fail the sample
}
}
private void addAssertionFailure(final SampleResult previousResult,
final Throwable thrown, final boolean setFailed) {
AssertionResult ass = new AssertionResult(getName()); // $NON-NLS-1$
ass.setFailure(true);
ass.setFailureMessage(thrown.getLocalizedMessage()+"\nSee log file for further details.");
previousResult.addAssertionResult(ass);
if (setFailed){
previousResult.setSuccessful(false);
}
}
/*============= object properties ================*/
public void setXPathQuery(String val){
setProperty(XPATH_QUERY,val);
}
public String getXPathQuery(){
return getPropertyAsString(XPATH_QUERY);
}
public void setRefName(String refName) {
setProperty(REFNAME, refName);
}
public String getRefName() {
return getPropertyAsString(REFNAME);
}
public void setDefaultValue(String val) {
setProperty(DEFAULT, val);
}
public String getDefaultValue() {
return getPropertyAsString(DEFAULT);
}
public void setTolerant(boolean val) {
setProperty(new BooleanProperty(TOLERANT, val));
}
public boolean isTolerant() {
return getPropertyAsBoolean(TOLERANT);
}
public void setNameSpace(boolean val) {
setProperty(new BooleanProperty(NAMESPACE, val));
}
public boolean useNameSpace() {
return getPropertyAsBoolean(NAMESPACE);
}
public void setReportErrors(boolean val) {
setProperty(REPORT_ERRORS, val, false);
}
public boolean reportErrors() {
return getPropertyAsBoolean(REPORT_ERRORS, false);
}
public void setShowWarnings(boolean val) {
setProperty(SHOW_WARNINGS, val, false);
}
public boolean showWarnings() {
return getPropertyAsBoolean(SHOW_WARNINGS, false);
}
public void setQuiet(boolean val) {
setProperty(QUIET, val, true);
}
public boolean isQuiet() {
return getPropertyAsBoolean(QUIET, true);
}
/**
* Should we return fragment as text, rather than text of fragment?
* @return true if we should return fragment rather than text
*/
public boolean getFragment() {
return getPropertyAsBoolean(FRAGMENT, false);
}
/**
* Should we return fragment as text, rather than text of fragment?
* @param selected true to return fragment.
*/
public void setFragment(boolean selected) {
setProperty(FRAGMENT, selected, false);
}
/*================= internal business =================*/
/**
* Converts (X)HTML response to DOM object Tree.
* This version cares of charset of response.
* @param unicodeData
* @return the parsed document
*
*/
private Document parseResponse(String unicodeData)
throws IOException, ParserConfigurationException,SAXException,TidyException
{
//TODO: validate contentType for reasonable types?
// NOTE: responseData encoding is server specific
// Therefore we do byte -> unicode -> byte conversion
// to ensure UTF-8 encoding as required by XPathUtil
// convert unicode String -> UTF-8 bytes
byte[] utf8data = unicodeData.getBytes(StandardCharsets.UTF_8);
ByteArrayInputStream in = new ByteArrayInputStream(utf8data);
boolean isXML = JOrphanUtils.isXML(utf8data);
// this method assumes UTF-8 input data
return XPathUtil.makeDocument(in,false,false,useNameSpace(),isTolerant(),isQuiet(),showWarnings(),reportErrors()
,isXML, isDownloadDTDs());
}
/**
* Extract value from Document d by XPath query.
* @param d the document
* @param query the query to execute
* @param matchStrings list of matched strings (may include nulls)
* @param matchNumber int Match Number
*
* @throws TransformerException
*/
private void getValuesForXPath(Document d,String query, List<String> matchStrings, int matchNumber)
throws TransformerException {
XPathUtil.putValuesForXPathInList(d, query, matchStrings, getFragment(), matchNumber);
}
public void setWhitespace(boolean selected) {
setProperty(WHITESPACE, selected, false);
}
public boolean isWhitespace() {
return getPropertyAsBoolean(WHITESPACE, false);
}
public void setValidating(boolean selected) {
setProperty(VALIDATE, selected);
}
public boolean isValidating() {
return getPropertyAsBoolean(VALIDATE, false);
}
public void setDownloadDTDs(boolean selected) {
setProperty(DOWNLOAD_DTDS, selected, false);
}
public boolean isDownloadDTDs() {
return getPropertyAsBoolean(DOWNLOAD_DTDS, false);
}
/**
* Set which Match to use. This can be any positive number, indicating the
* exact match to use, or <code>0</code>, which is interpreted as meaning random.
*
* @param matchNumber The number of the match to be used
*/
public void setMatchNumber(int matchNumber) {
setProperty(new IntegerProperty(MATCH_NUMBER, matchNumber));
}
/**
* Set which Match to use. This can be any positive number, indicating the
* exact match to use, or <code>0</code>, which is interpreted as meaning random.
*
* @param matchNumber The number of the match to be used
*/
public void setMatchNumber(String matchNumber) {
setProperty(MATCH_NUMBER, matchNumber);
}
/**
* Return which Match to use. This can be any positive number, indicating the
* exact match to use, or <code>0</code>, which is interpreted as meaning random.
*
* @return matchNumber The number of the match to be used
*/
public int getMatchNumber() {
return getPropertyAsInt(MATCH_NUMBER, DEFAULT_VALUE);
}
/**
* Return which Match to use. This can be any positive number, indicating the
* exact match to use, or <code>0</code>, which is interpreted as meaning random.
*
* @return matchNumber The number of the match to be used
*/
public String getMatchNumberAsString() {
return getPropertyAsString(MATCH_NUMBER, DEFAULT_VALUE_AS_STRING);
}
}
| |
//
// Triple Play - utilities for use in PlayN-based games
// Copyright (c) 2011-2014, Three Rings Design, Inc. - All rights reserved.
// http://github.com/threerings/tripleplay/blob/master/LICENSE
package tripleplay.platform;
import cli.MonoTouch.Foundation.NSRange;
import cli.MonoTouch.Foundation.NSString;
import cli.MonoTouch.UIKit.IUITextInputTraits;
import cli.MonoTouch.UIKit.UIColor;
import cli.MonoTouch.UIKit.UIControlContentVerticalAlignment;
import cli.MonoTouch.UIKit.UIFont;
import cli.MonoTouch.UIKit.UIKeyboardType;
import cli.MonoTouch.UIKit.UIReturnKeyType;
import cli.MonoTouch.UIKit.UITextAlignment;
import cli.MonoTouch.UIKit.UITextAutocapitalizationType;
import cli.MonoTouch.UIKit.UITextAutocorrectionType;
import cli.MonoTouch.UIKit.UITextField;
import cli.MonoTouch.UIKit.UITextFieldDelegate;
import cli.MonoTouch.UIKit.UITextRange;
import cli.MonoTouch.UIKit.UITextView;
import cli.MonoTouch.UIKit.UIView;
import cli.System.Drawing.RectangleF;
import pythagoras.f.IRectangle;
import playn.core.Color;
import playn.core.Font;
import playn.core.Keyboard;
import react.Connection;
import react.Slot;
import tripleplay.ui.Field;
import tripleplay.ui.Style;
public abstract class IOSNativeTextField extends IOSNativeOverlay
implements NativeTextField
{
public static class SingleLine extends IOSNativeTextField
{
public SingleLine (IOSTextFieldHandler handler, IOSNativeTextField prior,
Field.Native field) {
super(handler, new UITextField(), prior, field);
_field = (UITextField)view;
_field.set_VerticalAlignment(
UIControlContentVerticalAlignment.wrap(UIControlContentVerticalAlignment.Center));
// all fields close the keyboard when the return key is used
_field.set_Delegate(new UITextFieldDelegate() {
@Override public boolean ShouldReturn (UITextField field) {
_pressedReturn = true;
if (_handler._platform._kfc == null ||
_handler._platform._kfc.unfocusForEnter()) {
field.ResignFirstResponder();
} else {
didFinish();
}
return false;
}
@Override public boolean ShouldChangeCharacters (UITextField uiTextField,
NSRange nsRange, String s) {
// flag this as "keyboard activity"
_handler._platform._activity.emit();
String newString = new NSString(uiTextField.get_Text())
.Replace(nsRange, new NSString(s)).ToString();
return _element.isValid(newString);
}
});
}
@Override public void setEnabled (boolean enabled) {
_field.set_Enabled(enabled);
}
@Override public IOSNativeTextField refresh (boolean multiLine) {
return multiLine ? new MultiLine(_handler, this, _element) : this;
}
@Override public boolean insert (String text) {
UITextRange range = _field.get_SelectedTextRange();
if (range == null) {
return false;
}
_field.ReplaceText(range, text);
return true;
}
@Override protected UIFont getNativeFont () {
return _field.get_Font();
}
@Override protected void setNativeFont (UIFont font) {
_field.set_Font(font);
}
@Override protected String getNativeText () {
return _field.get_Text();
}
@Override protected void setNativeText (String text) {
_field.set_Text(text);
}
@Override protected IUITextInputTraits getTraits () {
return _field;
}
@Override protected void didFinish () {
_element.finishedEditing().emit(_pressedReturn);
_pressedReturn = false;
}
@Override protected void setAlignment (UITextAlignment halign) {
_field.set_TextAlignment(halign);
}
@Override protected void setColor(UIColor color) {
_field.set_TextColor(color);
}
protected final UITextField _field;
protected boolean _pressedReturn;
}
public static class MultiLine extends IOSNativeTextField
{
public MultiLine (IOSTextFieldHandler handler, IOSNativeTextField prior,
Field.Native field) {
super(handler, new UITextView(), prior, field);
_field = (UITextView)view;
_field.set_Editable(true);
// TODO: do we need to call set_Delegate?
}
@Override public void setEnabled (boolean enabled) {
_field.set_Editable(enabled);
}
@Override public IOSNativeTextField refresh (boolean multiLine) {
return multiLine ? this : new SingleLine(_handler, this, _element);
}
@Override protected UIFont getNativeFont () {
return _field.get_Font();
}
@Override protected void setNativeFont (UIFont font) {
_field.set_Font(font);
}
@Override protected String getNativeText () {
return _field.get_Text();
}
@Override protected void setNativeText (String text) {
_field.set_Text(text);
}
@Override protected IUITextInputTraits getTraits () {
return _field;
}
@Override protected void didFinish () {
_element.finishedEditing().emit(false);
}
@Override protected void setAlignment (UITextAlignment halign) {
_field.set_TextAlignment(halign);
}
@Override protected void setColor(UIColor color) {
_field.set_TextColor(color);
}
@Override public boolean insert (String text) {
UITextRange range = _field.get_SelectedTextRange();
if (range == null) {
return false;
}
_field.ReplaceText(range, text);
return true;
}
protected final UITextView _field;
}
public IOSNativeTextField (IOSTextFieldHandler handler, UIView view, IOSNativeTextField prior,
Field.Native field) {
super(view);
_element = field;
_handler = handler;
if (prior != null) {
prior._textConn.disconnect();
}
_textConn = _element.field().text.connect(new Slot<String>() {
@Override public void onEmit (String value) {
String current = getNativeText();
if (current == null || !current.equals(value)) {
setNativeText(value);
}
}
});
}
public UIView getView () {
return view;
}
/** Re-applies the current Field styles to the underlying native field. */
public void validateStyles () {
// Keyboard type
Keyboard.TextType type = _element.resolveStyle(Field.TEXT_TYPE);
switch (type) {
case NUMBER:
getTraits().set_KeyboardType(UIKeyboardType.wrap(UIKeyboardType.NumberPad));
break;
case EMAIL:
getTraits().set_KeyboardType(UIKeyboardType.wrap(UIKeyboardType.EmailAddress));
break;
case URL:
getTraits().set_KeyboardType(UIKeyboardType.wrap(UIKeyboardType.Url));
break;
case DEFAULT:
getTraits().set_KeyboardType(UIKeyboardType.wrap(UIKeyboardType.Default));
break;
}
// Font
Font font = _element.resolveStyle(Style.FONT);
setNativeFont(_handler.getUIFont(font));
// Automatic capitalization
boolean enable = _element.resolveStyle(Field.AUTOCAPITALIZATION);
getTraits().set_AutocapitalizationType(UITextAutocapitalizationType.wrap(
enable ? UITextAutocapitalizationType.Sentences : UITextAutocapitalizationType.None));
// Automatic correction
enable = _element.resolveStyle(Field.AUTOCORRECTION);
getTraits().set_AutocorrectionType(UITextAutocorrectionType.wrap(
enable ? UITextAutocorrectionType.Yes : UITextAutocorrectionType.No));
// Hidden typing
getTraits().set_SecureTextEntry(_element.resolveStyle(Field.SECURE_TEXT_ENTRY));
// Return key label
String label = _element.resolveStyle(Field.RETURN_KEY_LABEL);
setReturnKeyLabel(label);
// Alignment
Style.HAlign halign = _element.resolveStyle(Style.HALIGN);
setAlignment(UITextAlignment.wrap(toMono(halign)));
// Color
int color = _element.resolveStyle(Style.COLOR);
setColor(UIColor.FromRGB(Color.red(color), Color.green(color), Color.blue(color)));
}
@Override protected void didAdd () {
_handler.activate(this);
setNativeText(_element.field().text.get());
}
@Override protected void didRemove () {
_handler.deactivate(this);
}
@Override public void focus () {
view.BecomeFirstResponder();
}
public final IOSNativeTextField refresh () {
IOSNativeTextField nfield = refresh(_element.resolveStyle(Field.MULTILINE));
nfield.validateStyles();
return nfield;
}
abstract protected IOSNativeTextField refresh (boolean multiLine);
abstract protected UIFont getNativeFont ();
abstract protected void setNativeFont (UIFont font);
abstract protected String getNativeText ();
abstract protected void setNativeText (String text);
abstract protected IUITextInputTraits getTraits ();
abstract protected void setAlignment (UITextAlignment halign);
abstract protected void setColor (UIColor color);
abstract protected void didFinish ();
protected void didStart () {
TPPlatform.instance()._focus.update(_element.field());
}
protected void setReturnKeyLabel (String label) {
if (label == null || label.isEmpty()) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.Default));
return;
}
label = label.toLowerCase();
if (label.equals(getTraits().get_ReturnKeyType().ToString().toLowerCase())) {
// NOOP
return;
}
if (label.equals("go")) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.Go));
} else if (label.equals("google")) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.Google));
} else if (label.equals("join")) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.Join));
} else if (label.equals("next")) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.Next));
} else if (label.equals("route")) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.Route));
} else if (label.equals("search")) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.Search));
} else if (label.equals("send")) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.Send));
} else if (label.equals("yahoo")) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.Yahoo));
} else if (label.equals("done")) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.Done));
} else if (label.equals("emergencycall")) {
getTraits().set_ReturnKeyType(UIReturnKeyType.wrap(UIReturnKeyType.EmergencyCall));
}
}
protected void handleNewValue () {
String value = getNativeText();
String transformed = _element.transform(value);
if (!transformed.equals(value)) {
// update the field ourselves in case transformed is the same value
// currently held in field.text(), and therefore the update below
// will NOOP.
setNativeText(transformed);
value = transformed;
}
_element.field().text.update(value);
}
@Override protected void adjustBounds (RectangleF fieldBounds) {
// field fudged to the left 1 pixel to match PlayN text rendering.
fieldBounds.set_X(fieldBounds.get_X() + 1);
// ensure we're tall enough for a single line of text and the text cursor
UIFont font = getNativeFont();
if (fieldBounds.get_Height() < font.get_LineHeight()) {
fieldBounds.set_Height(font.get_LineHeight());
}
}
protected static int toMono (Style.HAlign halign) {
switch (halign) {
case LEFT: default: return UITextAlignment.Left;
case CENTER: return UITextAlignment.Center;
case RIGHT: return UITextAlignment.Right;
}
}
protected final Field.Native _element;
protected final IOSTextFieldHandler _handler;
protected final Connection _textConn;
protected IRectangle _requestedBounds;
}
| |
package gov.va.medora.mdws.querysvc;
/**
* Please modify this class to meet your needs
* This class is not complete
*/
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import javax.xml.namespace.QName;
import javax.jws.WebMethod;
import javax.jws.WebParam;
import javax.jws.WebResult;
import javax.jws.WebService;
import javax.jws.soap.SOAPBinding;
import javax.jws.soap.SOAPBinding.ParameterStyle;
import javax.xml.bind.annotation.XmlSeeAlso;
/**
* This class was generated by Apache CXF 2.1.3
* Wed Jan 05 13:14:31 MST 2011
* Generated source version: 2.1.3
*
*/
public final class QuerySvcHttpPost_QuerySvcHttpPost_Client {
private static final QName SERVICE_NAME = new QName("http://mdws.medora.va.gov/QuerySvc", "QuerySvc");
private QuerySvcHttpPost_QuerySvcHttpPost_Client() {
}
public static void main(String args[]) throws Exception {
URL wsdlURL = QuerySvc.WSDL_LOCATION;
if (args.length > 0) {
File wsdlFile = new File(args[0]);
try {
if (wsdlFile.exists()) {
wsdlURL = wsdlFile.toURI().toURL();
} else {
wsdlURL = new URL(args[0]);
}
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
QuerySvc ss = new QuerySvc(wsdlURL, SERVICE_NAME);
QuerySvcHttpPost port = ss.getQuerySvcHttpPost();
{
System.out.println("Invoking match...");
java.lang.String _match_target = "";
gov.va.medora.mdws.querysvc.TaggedPatientArrays _match__return = port.match(_match_target);
System.out.println("match.result=" + _match__return);
}
{
System.out.println("Invoking select...");
java.lang.String _select_dfn = "";
gov.va.medora.mdws.querysvc.PatientTO _select__return = port.select(_select_dfn);
System.out.println("select.result=" + _select__return);
}
{
System.out.println("Invoking login...");
java.lang.String _login_username = "";
java.lang.String _login_pwd = "";
java.lang.String _login_context = "";
gov.va.medora.mdws.querysvc.UserTO _login__return = port.login(_login_username, _login_pwd, _login_context);
System.out.println("login.result=" + _login__return);
}
{
System.out.println("Invoking cprsUserLookup...");
java.lang.String _cprsUserLookup_target = "";
gov.va.medora.mdws.querysvc.UserArray _cprsUserLookup__return = port.cprsUserLookup(_cprsUserLookup_target);
System.out.println("cprsUserLookup.result=" + _cprsUserLookup__return);
}
{
System.out.println("Invoking connect...");
java.lang.String _connect_sitelist = "";
gov.va.medora.mdws.querysvc.DataSourceArray _connect__return = port.connect(_connect_sitelist);
System.out.println("connect.result=" + _connect__return);
}
{
System.out.println("Invoking getVHA...");
gov.va.medora.mdws.querysvc.RegionArray _getVHA__return = port.getVHA();
System.out.println("getVHA.result=" + _getVHA__return);
}
{
System.out.println("Invoking userLookup...");
java.lang.String _userLookup_duz = "";
gov.va.medora.mdws.querysvc.UserTO _userLookup__return = port.userLookup(_userLookup_duz);
System.out.println("userLookup.result=" + _userLookup__return);
}
{
System.out.println("Invoking getVariableValue...");
java.lang.String _getVariableValue_arg = "";
gov.va.medora.mdws.querysvc.TextTO _getVariableValue__return = port.getVariableValue(_getVariableValue_arg);
System.out.println("getVariableValue.result=" + _getVariableValue__return);
}
{
System.out.println("Invoking ddrLister...");
java.lang.String _ddrLister_file = "";
java.lang.String _ddrLister_iens = "";
java.lang.String _ddrLister_fields = "";
java.lang.String _ddrLister_flags = "";
java.lang.String _ddrLister_maxrex = "";
java.lang.String _ddrLister_from = "";
java.lang.String _ddrLister_part = "";
java.lang.String _ddrLister_xref = "";
java.lang.String _ddrLister_screen = "";
java.lang.String _ddrLister_identifier = "";
gov.va.medora.mdws.querysvc.TextArray _ddrLister__return = port.ddrLister(_ddrLister_file, _ddrLister_iens, _ddrLister_fields, _ddrLister_flags, _ddrLister_maxrex, _ddrLister_from, _ddrLister_part, _ddrLister_xref, _ddrLister_screen, _ddrLister_identifier);
System.out.println("ddrLister.result=" + _ddrLister__return);
}
{
System.out.println("Invoking setVha...");
java.lang.String _setVha_sitesFileName = "";
gov.va.medora.mdws.querysvc.SiteArray _setVha__return = port.setVha(_setVha_sitesFileName);
System.out.println("setVha.result=" + _setVha__return);
}
{
System.out.println("Invoking getVersion...");
java.lang.String _getVersion__return = port.getVersion();
System.out.println("getVersion.result=" + _getVersion__return);
}
{
System.out.println("Invoking visitSite...");
java.lang.String _visitSite_pwd = "";
java.lang.String _visitSite_sitecode = "";
java.lang.String _visitSite_userSitecode = "";
java.lang.String _visitSite_userName = "";
java.lang.String _visitSite_duz = "";
java.lang.String _visitSite_ssn = "";
java.lang.String _visitSite_context = "";
gov.va.medora.mdws.querysvc.UserTO _visitSite__return = port.visitSite(_visitSite_pwd, _visitSite_sitecode, _visitSite_userSitecode, _visitSite_userName, _visitSite_duz, _visitSite_ssn, _visitSite_context);
System.out.println("visitSite.result=" + _visitSite__return);
}
{
System.out.println("Invoking siteHasPatch...");
java.lang.String _siteHasPatch_patchId = "";
gov.va.medora.mdws.querysvc.TaggedText _siteHasPatch__return = port.siteHasPatch(_siteHasPatch_patchId);
System.out.println("siteHasPatch.result=" + _siteHasPatch__return);
}
{
System.out.println("Invoking addDataSource...");
java.lang.String _addDataSource_id = "";
java.lang.String _addDataSource_name = "";
java.lang.String _addDataSource_datasource = "";
java.lang.String _addDataSource_port = "";
java.lang.String _addDataSource_modality = "";
java.lang.String _addDataSource_protocol = "";
java.lang.String _addDataSource_region = "";
gov.va.medora.mdws.querysvc.SiteTO _addDataSource__return = port.addDataSource(_addDataSource_id, _addDataSource_name, _addDataSource_datasource, _addDataSource_port, _addDataSource_modality, _addDataSource_protocol, _addDataSource_region);
System.out.println("addDataSource.result=" + _addDataSource__return);
}
{
System.out.println("Invoking disconnect...");
gov.va.medora.mdws.querysvc.TaggedTextArray _disconnect__return = port.disconnect();
System.out.println("disconnect.result=" + _disconnect__return);
}
{
System.out.println("Invoking ddrGetsEntry...");
java.lang.String _ddrGetsEntry_file = "";
java.lang.String _ddrGetsEntry_iens = "";
java.lang.String _ddrGetsEntry_flds = "";
java.lang.String _ddrGetsEntry_flags = "";
gov.va.medora.mdws.querysvc.TextArray _ddrGetsEntry__return = port.ddrGetsEntry(_ddrGetsEntry_file, _ddrGetsEntry_iens, _ddrGetsEntry_flds, _ddrGetsEntry_flags);
System.out.println("ddrGetsEntry.result=" + _ddrGetsEntry__return);
}
{
System.out.println("Invoking getFacadeVersion...");
gov.va.medora.mdws.querysvc.TextTO _getFacadeVersion__return = port.getFacadeVersion();
System.out.println("getFacadeVersion.result=" + _getFacadeVersion__return);
}
{
System.out.println("Invoking sitesHavePatch...");
java.lang.String _sitesHavePatch_sitelist = "";
java.lang.String _sitesHavePatch_patchId = "";
gov.va.medora.mdws.querysvc.TaggedTextArray _sitesHavePatch__return = port.sitesHavePatch(_sitesHavePatch_sitelist, _sitesHavePatch_patchId);
System.out.println("sitesHavePatch.result=" + _sitesHavePatch__return);
}
System.exit(0);
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.schemaorg.SchemaOrgTypeImpl;
import com.google.schemaorg.ValueType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.GoogConstants;
import com.google.schemaorg.goog.PopularityScoreSpecification;
/** Implementation of {@link GroceryStore}. */
public class GroceryStoreImpl extends StoreImpl implements GroceryStore {
private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet();
private static ImmutableSet<String> initializePropertySet() {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
builder.add(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY);
builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE);
builder.add(CoreConstants.PROPERTY_ADDRESS);
builder.add(CoreConstants.PROPERTY_AGGREGATE_RATING);
builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME);
builder.add(CoreConstants.PROPERTY_ALUMNI);
builder.add(CoreConstants.PROPERTY_AREA_SERVED);
builder.add(CoreConstants.PROPERTY_AWARD);
builder.add(CoreConstants.PROPERTY_AWARDS);
builder.add(CoreConstants.PROPERTY_BRANCH_CODE);
builder.add(CoreConstants.PROPERTY_BRANCH_OF);
builder.add(CoreConstants.PROPERTY_BRAND);
builder.add(CoreConstants.PROPERTY_CONTACT_POINT);
builder.add(CoreConstants.PROPERTY_CONTACT_POINTS);
builder.add(CoreConstants.PROPERTY_CONTAINED_IN);
builder.add(CoreConstants.PROPERTY_CONTAINED_IN_PLACE);
builder.add(CoreConstants.PROPERTY_CONTAINS_PLACE);
builder.add(CoreConstants.PROPERTY_CURRENCIES_ACCEPTED);
builder.add(CoreConstants.PROPERTY_DEPARTMENT);
builder.add(CoreConstants.PROPERTY_DESCRIPTION);
builder.add(CoreConstants.PROPERTY_DISSOLUTION_DATE);
builder.add(CoreConstants.PROPERTY_DUNS);
builder.add(CoreConstants.PROPERTY_EMAIL);
builder.add(CoreConstants.PROPERTY_EMPLOYEE);
builder.add(CoreConstants.PROPERTY_EMPLOYEES);
builder.add(CoreConstants.PROPERTY_EVENT);
builder.add(CoreConstants.PROPERTY_EVENTS);
builder.add(CoreConstants.PROPERTY_FAX_NUMBER);
builder.add(CoreConstants.PROPERTY_FOUNDER);
builder.add(CoreConstants.PROPERTY_FOUNDERS);
builder.add(CoreConstants.PROPERTY_FOUNDING_DATE);
builder.add(CoreConstants.PROPERTY_FOUNDING_LOCATION);
builder.add(CoreConstants.PROPERTY_GEO);
builder.add(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER);
builder.add(CoreConstants.PROPERTY_HAS_MAP);
builder.add(CoreConstants.PROPERTY_HAS_OFFER_CATALOG);
builder.add(CoreConstants.PROPERTY_HAS_POS);
builder.add(CoreConstants.PROPERTY_IMAGE);
builder.add(CoreConstants.PROPERTY_ISIC_V4);
builder.add(CoreConstants.PROPERTY_LEGAL_NAME);
builder.add(CoreConstants.PROPERTY_LOCATION);
builder.add(CoreConstants.PROPERTY_LOGO);
builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE);
builder.add(CoreConstants.PROPERTY_MAKES_OFFER);
builder.add(CoreConstants.PROPERTY_MAP);
builder.add(CoreConstants.PROPERTY_MAPS);
builder.add(CoreConstants.PROPERTY_MEMBER);
builder.add(CoreConstants.PROPERTY_MEMBER_OF);
builder.add(CoreConstants.PROPERTY_MEMBERS);
builder.add(CoreConstants.PROPERTY_NAICS);
builder.add(CoreConstants.PROPERTY_NAME);
builder.add(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES);
builder.add(CoreConstants.PROPERTY_OPENING_HOURS);
builder.add(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION);
builder.add(CoreConstants.PROPERTY_OWNS);
builder.add(CoreConstants.PROPERTY_PARENT_ORGANIZATION);
builder.add(CoreConstants.PROPERTY_PAYMENT_ACCEPTED);
builder.add(CoreConstants.PROPERTY_PHOTO);
builder.add(CoreConstants.PROPERTY_PHOTOS);
builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION);
builder.add(CoreConstants.PROPERTY_PRICE_RANGE);
builder.add(CoreConstants.PROPERTY_REVIEW);
builder.add(CoreConstants.PROPERTY_REVIEWS);
builder.add(CoreConstants.PROPERTY_SAME_AS);
builder.add(CoreConstants.PROPERTY_SEEKS);
builder.add(CoreConstants.PROPERTY_SERVICE_AREA);
builder.add(CoreConstants.PROPERTY_SUB_ORGANIZATION);
builder.add(CoreConstants.PROPERTY_TAX_ID);
builder.add(CoreConstants.PROPERTY_TELEPHONE);
builder.add(CoreConstants.PROPERTY_URL);
builder.add(CoreConstants.PROPERTY_VAT_ID);
builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION);
builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE);
return builder.build();
}
static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<GroceryStore.Builder>
implements GroceryStore.Builder {
@Override
public GroceryStore.Builder addAdditionalProperty(PropertyValue value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, value);
}
@Override
public GroceryStore.Builder addAdditionalProperty(PropertyValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, value.build());
}
@Override
public GroceryStore.Builder addAdditionalProperty(String value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, Text.of(value));
}
@Override
public GroceryStore.Builder addAdditionalType(URL value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value);
}
@Override
public GroceryStore.Builder addAdditionalType(String value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value));
}
@Override
public GroceryStore.Builder addAddress(PostalAddress value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, value);
}
@Override
public GroceryStore.Builder addAddress(PostalAddress.Builder value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, value.build());
}
@Override
public GroceryStore.Builder addAddress(Text value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, value);
}
@Override
public GroceryStore.Builder addAddress(String value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, Text.of(value));
}
@Override
public GroceryStore.Builder addAggregateRating(AggregateRating value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value);
}
@Override
public GroceryStore.Builder addAggregateRating(AggregateRating.Builder value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value.build());
}
@Override
public GroceryStore.Builder addAggregateRating(String value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, Text.of(value));
}
@Override
public GroceryStore.Builder addAlternateName(Text value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value);
}
@Override
public GroceryStore.Builder addAlternateName(String value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value));
}
@Override
public GroceryStore.Builder addAlumni(Person value) {
return addProperty(CoreConstants.PROPERTY_ALUMNI, value);
}
@Override
public GroceryStore.Builder addAlumni(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_ALUMNI, value.build());
}
@Override
public GroceryStore.Builder addAlumni(String value) {
return addProperty(CoreConstants.PROPERTY_ALUMNI, Text.of(value));
}
@Override
public GroceryStore.Builder addAreaServed(AdministrativeArea value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public GroceryStore.Builder addAreaServed(AdministrativeArea.Builder value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build());
}
@Override
public GroceryStore.Builder addAreaServed(GeoShape value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public GroceryStore.Builder addAreaServed(GeoShape.Builder value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build());
}
@Override
public GroceryStore.Builder addAreaServed(Place value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public GroceryStore.Builder addAreaServed(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build());
}
@Override
public GroceryStore.Builder addAreaServed(Text value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public GroceryStore.Builder addAreaServed(String value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, Text.of(value));
}
@Override
public GroceryStore.Builder addAward(Text value) {
return addProperty(CoreConstants.PROPERTY_AWARD, value);
}
@Override
public GroceryStore.Builder addAward(String value) {
return addProperty(CoreConstants.PROPERTY_AWARD, Text.of(value));
}
@Override
public GroceryStore.Builder addAwards(Text value) {
return addProperty(CoreConstants.PROPERTY_AWARDS, value);
}
@Override
public GroceryStore.Builder addAwards(String value) {
return addProperty(CoreConstants.PROPERTY_AWARDS, Text.of(value));
}
@Override
public GroceryStore.Builder addBranchCode(Text value) {
return addProperty(CoreConstants.PROPERTY_BRANCH_CODE, value);
}
@Override
public GroceryStore.Builder addBranchCode(String value) {
return addProperty(CoreConstants.PROPERTY_BRANCH_CODE, Text.of(value));
}
@Override
public GroceryStore.Builder addBranchOf(Organization value) {
return addProperty(CoreConstants.PROPERTY_BRANCH_OF, value);
}
@Override
public GroceryStore.Builder addBranchOf(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_BRANCH_OF, value.build());
}
@Override
public GroceryStore.Builder addBranchOf(String value) {
return addProperty(CoreConstants.PROPERTY_BRANCH_OF, Text.of(value));
}
@Override
public GroceryStore.Builder addBrand(Brand value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value);
}
@Override
public GroceryStore.Builder addBrand(Brand.Builder value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value.build());
}
@Override
public GroceryStore.Builder addBrand(Organization value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value);
}
@Override
public GroceryStore.Builder addBrand(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value.build());
}
@Override
public GroceryStore.Builder addBrand(String value) {
return addProperty(CoreConstants.PROPERTY_BRAND, Text.of(value));
}
@Override
public GroceryStore.Builder addContactPoint(ContactPoint value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, value);
}
@Override
public GroceryStore.Builder addContactPoint(ContactPoint.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, value.build());
}
@Override
public GroceryStore.Builder addContactPoint(String value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, Text.of(value));
}
@Override
public GroceryStore.Builder addContactPoints(ContactPoint value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, value);
}
@Override
public GroceryStore.Builder addContactPoints(ContactPoint.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, value.build());
}
@Override
public GroceryStore.Builder addContactPoints(String value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, Text.of(value));
}
@Override
public GroceryStore.Builder addContainedIn(Place value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN, value);
}
@Override
public GroceryStore.Builder addContainedIn(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN, value.build());
}
@Override
public GroceryStore.Builder addContainedIn(String value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN, Text.of(value));
}
@Override
public GroceryStore.Builder addContainedInPlace(Place value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN_PLACE, value);
}
@Override
public GroceryStore.Builder addContainedInPlace(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN_PLACE, value.build());
}
@Override
public GroceryStore.Builder addContainedInPlace(String value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN_PLACE, Text.of(value));
}
@Override
public GroceryStore.Builder addContainsPlace(Place value) {
return addProperty(CoreConstants.PROPERTY_CONTAINS_PLACE, value);
}
@Override
public GroceryStore.Builder addContainsPlace(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTAINS_PLACE, value.build());
}
@Override
public GroceryStore.Builder addContainsPlace(String value) {
return addProperty(CoreConstants.PROPERTY_CONTAINS_PLACE, Text.of(value));
}
@Override
public GroceryStore.Builder addCurrenciesAccepted(Text value) {
return addProperty(CoreConstants.PROPERTY_CURRENCIES_ACCEPTED, value);
}
@Override
public GroceryStore.Builder addCurrenciesAccepted(String value) {
return addProperty(CoreConstants.PROPERTY_CURRENCIES_ACCEPTED, Text.of(value));
}
@Override
public GroceryStore.Builder addDepartment(Organization value) {
return addProperty(CoreConstants.PROPERTY_DEPARTMENT, value);
}
@Override
public GroceryStore.Builder addDepartment(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_DEPARTMENT, value.build());
}
@Override
public GroceryStore.Builder addDepartment(String value) {
return addProperty(CoreConstants.PROPERTY_DEPARTMENT, Text.of(value));
}
@Override
public GroceryStore.Builder addDescription(Text value) {
return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value);
}
@Override
public GroceryStore.Builder addDescription(String value) {
return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value));
}
@Override
public GroceryStore.Builder addDissolutionDate(Date value) {
return addProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE, value);
}
@Override
public GroceryStore.Builder addDissolutionDate(String value) {
return addProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE, Text.of(value));
}
@Override
public GroceryStore.Builder addDuns(Text value) {
return addProperty(CoreConstants.PROPERTY_DUNS, value);
}
@Override
public GroceryStore.Builder addDuns(String value) {
return addProperty(CoreConstants.PROPERTY_DUNS, Text.of(value));
}
@Override
public GroceryStore.Builder addEmail(Text value) {
return addProperty(CoreConstants.PROPERTY_EMAIL, value);
}
@Override
public GroceryStore.Builder addEmail(String value) {
return addProperty(CoreConstants.PROPERTY_EMAIL, Text.of(value));
}
@Override
public GroceryStore.Builder addEmployee(Person value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEE, value);
}
@Override
public GroceryStore.Builder addEmployee(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEE, value.build());
}
@Override
public GroceryStore.Builder addEmployee(String value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEE, Text.of(value));
}
@Override
public GroceryStore.Builder addEmployees(Person value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEES, value);
}
@Override
public GroceryStore.Builder addEmployees(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEES, value.build());
}
@Override
public GroceryStore.Builder addEmployees(String value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEES, Text.of(value));
}
@Override
public GroceryStore.Builder addEvent(Event value) {
return addProperty(CoreConstants.PROPERTY_EVENT, value);
}
@Override
public GroceryStore.Builder addEvent(Event.Builder value) {
return addProperty(CoreConstants.PROPERTY_EVENT, value.build());
}
@Override
public GroceryStore.Builder addEvent(String value) {
return addProperty(CoreConstants.PROPERTY_EVENT, Text.of(value));
}
@Override
public GroceryStore.Builder addEvents(Event value) {
return addProperty(CoreConstants.PROPERTY_EVENTS, value);
}
@Override
public GroceryStore.Builder addEvents(Event.Builder value) {
return addProperty(CoreConstants.PROPERTY_EVENTS, value.build());
}
@Override
public GroceryStore.Builder addEvents(String value) {
return addProperty(CoreConstants.PROPERTY_EVENTS, Text.of(value));
}
@Override
public GroceryStore.Builder addFaxNumber(Text value) {
return addProperty(CoreConstants.PROPERTY_FAX_NUMBER, value);
}
@Override
public GroceryStore.Builder addFaxNumber(String value) {
return addProperty(CoreConstants.PROPERTY_FAX_NUMBER, Text.of(value));
}
@Override
public GroceryStore.Builder addFounder(Person value) {
return addProperty(CoreConstants.PROPERTY_FOUNDER, value);
}
@Override
public GroceryStore.Builder addFounder(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_FOUNDER, value.build());
}
@Override
public GroceryStore.Builder addFounder(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDER, Text.of(value));
}
@Override
public GroceryStore.Builder addFounders(Person value) {
return addProperty(CoreConstants.PROPERTY_FOUNDERS, value);
}
@Override
public GroceryStore.Builder addFounders(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_FOUNDERS, value.build());
}
@Override
public GroceryStore.Builder addFounders(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDERS, Text.of(value));
}
@Override
public GroceryStore.Builder addFoundingDate(Date value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_DATE, value);
}
@Override
public GroceryStore.Builder addFoundingDate(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_DATE, Text.of(value));
}
@Override
public GroceryStore.Builder addFoundingLocation(Place value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, value);
}
@Override
public GroceryStore.Builder addFoundingLocation(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, value.build());
}
@Override
public GroceryStore.Builder addFoundingLocation(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, Text.of(value));
}
@Override
public GroceryStore.Builder addGeo(GeoCoordinates value) {
return addProperty(CoreConstants.PROPERTY_GEO, value);
}
@Override
public GroceryStore.Builder addGeo(GeoCoordinates.Builder value) {
return addProperty(CoreConstants.PROPERTY_GEO, value.build());
}
@Override
public GroceryStore.Builder addGeo(GeoShape value) {
return addProperty(CoreConstants.PROPERTY_GEO, value);
}
@Override
public GroceryStore.Builder addGeo(GeoShape.Builder value) {
return addProperty(CoreConstants.PROPERTY_GEO, value.build());
}
@Override
public GroceryStore.Builder addGeo(String value) {
return addProperty(CoreConstants.PROPERTY_GEO, Text.of(value));
}
@Override
public GroceryStore.Builder addGlobalLocationNumber(Text value) {
return addProperty(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER, value);
}
@Override
public GroceryStore.Builder addGlobalLocationNumber(String value) {
return addProperty(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER, Text.of(value));
}
@Override
public GroceryStore.Builder addHasMap(Map value) {
return addProperty(CoreConstants.PROPERTY_HAS_MAP, value);
}
@Override
public GroceryStore.Builder addHasMap(Map.Builder value) {
return addProperty(CoreConstants.PROPERTY_HAS_MAP, value.build());
}
@Override
public GroceryStore.Builder addHasMap(URL value) {
return addProperty(CoreConstants.PROPERTY_HAS_MAP, value);
}
@Override
public GroceryStore.Builder addHasMap(String value) {
return addProperty(CoreConstants.PROPERTY_HAS_MAP, Text.of(value));
}
@Override
public GroceryStore.Builder addHasOfferCatalog(OfferCatalog value) {
return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, value);
}
@Override
public GroceryStore.Builder addHasOfferCatalog(OfferCatalog.Builder value) {
return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, value.build());
}
@Override
public GroceryStore.Builder addHasOfferCatalog(String value) {
return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, Text.of(value));
}
@Override
public GroceryStore.Builder addHasPOS(Place value) {
return addProperty(CoreConstants.PROPERTY_HAS_POS, value);
}
@Override
public GroceryStore.Builder addHasPOS(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_HAS_POS, value.build());
}
@Override
public GroceryStore.Builder addHasPOS(String value) {
return addProperty(CoreConstants.PROPERTY_HAS_POS, Text.of(value));
}
@Override
public GroceryStore.Builder addImage(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value);
}
@Override
public GroceryStore.Builder addImage(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value.build());
}
@Override
public GroceryStore.Builder addImage(URL value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value);
}
@Override
public GroceryStore.Builder addImage(String value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value));
}
@Override
public GroceryStore.Builder addIsicV4(Text value) {
return addProperty(CoreConstants.PROPERTY_ISIC_V4, value);
}
@Override
public GroceryStore.Builder addIsicV4(String value) {
return addProperty(CoreConstants.PROPERTY_ISIC_V4, Text.of(value));
}
@Override
public GroceryStore.Builder addLegalName(Text value) {
return addProperty(CoreConstants.PROPERTY_LEGAL_NAME, value);
}
@Override
public GroceryStore.Builder addLegalName(String value) {
return addProperty(CoreConstants.PROPERTY_LEGAL_NAME, Text.of(value));
}
@Override
public GroceryStore.Builder addLocation(Place value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value);
}
@Override
public GroceryStore.Builder addLocation(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value.build());
}
@Override
public GroceryStore.Builder addLocation(PostalAddress value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value);
}
@Override
public GroceryStore.Builder addLocation(PostalAddress.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value.build());
}
@Override
public GroceryStore.Builder addLocation(Text value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value);
}
@Override
public GroceryStore.Builder addLocation(String value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, Text.of(value));
}
@Override
public GroceryStore.Builder addLogo(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value);
}
@Override
public GroceryStore.Builder addLogo(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value.build());
}
@Override
public GroceryStore.Builder addLogo(URL value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value);
}
@Override
public GroceryStore.Builder addLogo(String value) {
return addProperty(CoreConstants.PROPERTY_LOGO, Text.of(value));
}
@Override
public GroceryStore.Builder addMainEntityOfPage(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value);
}
@Override
public GroceryStore.Builder addMainEntityOfPage(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build());
}
@Override
public GroceryStore.Builder addMainEntityOfPage(URL value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value);
}
@Override
public GroceryStore.Builder addMainEntityOfPage(String value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value));
}
@Override
public GroceryStore.Builder addMakesOffer(Offer value) {
return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, value);
}
@Override
public GroceryStore.Builder addMakesOffer(Offer.Builder value) {
return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, value.build());
}
@Override
public GroceryStore.Builder addMakesOffer(String value) {
return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, Text.of(value));
}
@Override
public GroceryStore.Builder addMap(URL value) {
return addProperty(CoreConstants.PROPERTY_MAP, value);
}
@Override
public GroceryStore.Builder addMap(String value) {
return addProperty(CoreConstants.PROPERTY_MAP, Text.of(value));
}
@Override
public GroceryStore.Builder addMaps(URL value) {
return addProperty(CoreConstants.PROPERTY_MAPS, value);
}
@Override
public GroceryStore.Builder addMaps(String value) {
return addProperty(CoreConstants.PROPERTY_MAPS, Text.of(value));
}
@Override
public GroceryStore.Builder addMember(Organization value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value);
}
@Override
public GroceryStore.Builder addMember(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value.build());
}
@Override
public GroceryStore.Builder addMember(Person value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value);
}
@Override
public GroceryStore.Builder addMember(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value.build());
}
@Override
public GroceryStore.Builder addMember(String value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, Text.of(value));
}
@Override
public GroceryStore.Builder addMemberOf(Organization value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value);
}
@Override
public GroceryStore.Builder addMemberOf(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value.build());
}
@Override
public GroceryStore.Builder addMemberOf(ProgramMembership value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value);
}
@Override
public GroceryStore.Builder addMemberOf(ProgramMembership.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value.build());
}
@Override
public GroceryStore.Builder addMemberOf(String value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, Text.of(value));
}
@Override
public GroceryStore.Builder addMembers(Organization value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value);
}
@Override
public GroceryStore.Builder addMembers(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value.build());
}
@Override
public GroceryStore.Builder addMembers(Person value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value);
}
@Override
public GroceryStore.Builder addMembers(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value.build());
}
@Override
public GroceryStore.Builder addMembers(String value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, Text.of(value));
}
@Override
public GroceryStore.Builder addNaics(Text value) {
return addProperty(CoreConstants.PROPERTY_NAICS, value);
}
@Override
public GroceryStore.Builder addNaics(String value) {
return addProperty(CoreConstants.PROPERTY_NAICS, Text.of(value));
}
@Override
public GroceryStore.Builder addName(Text value) {
return addProperty(CoreConstants.PROPERTY_NAME, value);
}
@Override
public GroceryStore.Builder addName(String value) {
return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value));
}
@Override
public GroceryStore.Builder addNumberOfEmployees(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, value);
}
@Override
public GroceryStore.Builder addNumberOfEmployees(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, value.build());
}
@Override
public GroceryStore.Builder addNumberOfEmployees(String value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, Text.of(value));
}
@Override
public GroceryStore.Builder addOpeningHours(Text value) {
return addProperty(CoreConstants.PROPERTY_OPENING_HOURS, value);
}
@Override
public GroceryStore.Builder addOpeningHours(String value) {
return addProperty(CoreConstants.PROPERTY_OPENING_HOURS, Text.of(value));
}
@Override
public GroceryStore.Builder addOpeningHoursSpecification(OpeningHoursSpecification value) {
return addProperty(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION, value);
}
@Override
public GroceryStore.Builder addOpeningHoursSpecification(
OpeningHoursSpecification.Builder value) {
return addProperty(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION, value.build());
}
@Override
public GroceryStore.Builder addOpeningHoursSpecification(String value) {
return addProperty(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION, Text.of(value));
}
@Override
public GroceryStore.Builder addOwns(OwnershipInfo value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value);
}
@Override
public GroceryStore.Builder addOwns(OwnershipInfo.Builder value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value.build());
}
@Override
public GroceryStore.Builder addOwns(Product value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value);
}
@Override
public GroceryStore.Builder addOwns(Product.Builder value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value.build());
}
@Override
public GroceryStore.Builder addOwns(String value) {
return addProperty(CoreConstants.PROPERTY_OWNS, Text.of(value));
}
@Override
public GroceryStore.Builder addParentOrganization(Organization value) {
return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, value);
}
@Override
public GroceryStore.Builder addParentOrganization(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, value.build());
}
@Override
public GroceryStore.Builder addParentOrganization(String value) {
return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, Text.of(value));
}
@Override
public GroceryStore.Builder addPaymentAccepted(Text value) {
return addProperty(CoreConstants.PROPERTY_PAYMENT_ACCEPTED, value);
}
@Override
public GroceryStore.Builder addPaymentAccepted(String value) {
return addProperty(CoreConstants.PROPERTY_PAYMENT_ACCEPTED, Text.of(value));
}
@Override
public GroceryStore.Builder addPhoto(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_PHOTO, value);
}
@Override
public GroceryStore.Builder addPhoto(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_PHOTO, value.build());
}
@Override
public GroceryStore.Builder addPhoto(Photograph value) {
return addProperty(CoreConstants.PROPERTY_PHOTO, value);
}
@Override
public GroceryStore.Builder addPhoto(Photograph.Builder value) {
return addProperty(CoreConstants.PROPERTY_PHOTO, value.build());
}
@Override
public GroceryStore.Builder addPhoto(String value) {
return addProperty(CoreConstants.PROPERTY_PHOTO, Text.of(value));
}
@Override
public GroceryStore.Builder addPhotos(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_PHOTOS, value);
}
@Override
public GroceryStore.Builder addPhotos(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_PHOTOS, value.build());
}
@Override
public GroceryStore.Builder addPhotos(Photograph value) {
return addProperty(CoreConstants.PROPERTY_PHOTOS, value);
}
@Override
public GroceryStore.Builder addPhotos(Photograph.Builder value) {
return addProperty(CoreConstants.PROPERTY_PHOTOS, value.build());
}
@Override
public GroceryStore.Builder addPhotos(String value) {
return addProperty(CoreConstants.PROPERTY_PHOTOS, Text.of(value));
}
@Override
public GroceryStore.Builder addPotentialAction(Action value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value);
}
@Override
public GroceryStore.Builder addPotentialAction(Action.Builder value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build());
}
@Override
public GroceryStore.Builder addPotentialAction(String value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value));
}
@Override
public GroceryStore.Builder addPriceRange(Text value) {
return addProperty(CoreConstants.PROPERTY_PRICE_RANGE, value);
}
@Override
public GroceryStore.Builder addPriceRange(String value) {
return addProperty(CoreConstants.PROPERTY_PRICE_RANGE, Text.of(value));
}
@Override
public GroceryStore.Builder addReview(Review value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, value);
}
@Override
public GroceryStore.Builder addReview(Review.Builder value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, value.build());
}
@Override
public GroceryStore.Builder addReview(String value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, Text.of(value));
}
@Override
public GroceryStore.Builder addReviews(Review value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, value);
}
@Override
public GroceryStore.Builder addReviews(Review.Builder value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, value.build());
}
@Override
public GroceryStore.Builder addReviews(String value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, Text.of(value));
}
@Override
public GroceryStore.Builder addSameAs(URL value) {
return addProperty(CoreConstants.PROPERTY_SAME_AS, value);
}
@Override
public GroceryStore.Builder addSameAs(String value) {
return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value));
}
@Override
public GroceryStore.Builder addSeeks(Demand value) {
return addProperty(CoreConstants.PROPERTY_SEEKS, value);
}
@Override
public GroceryStore.Builder addSeeks(Demand.Builder value) {
return addProperty(CoreConstants.PROPERTY_SEEKS, value.build());
}
@Override
public GroceryStore.Builder addSeeks(String value) {
return addProperty(CoreConstants.PROPERTY_SEEKS, Text.of(value));
}
@Override
public GroceryStore.Builder addServiceArea(AdministrativeArea value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value);
}
@Override
public GroceryStore.Builder addServiceArea(AdministrativeArea.Builder value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build());
}
@Override
public GroceryStore.Builder addServiceArea(GeoShape value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value);
}
@Override
public GroceryStore.Builder addServiceArea(GeoShape.Builder value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build());
}
@Override
public GroceryStore.Builder addServiceArea(Place value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value);
}
@Override
public GroceryStore.Builder addServiceArea(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build());
}
@Override
public GroceryStore.Builder addServiceArea(String value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, Text.of(value));
}
@Override
public GroceryStore.Builder addSubOrganization(Organization value) {
return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, value);
}
@Override
public GroceryStore.Builder addSubOrganization(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, value.build());
}
@Override
public GroceryStore.Builder addSubOrganization(String value) {
return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, Text.of(value));
}
@Override
public GroceryStore.Builder addTaxID(Text value) {
return addProperty(CoreConstants.PROPERTY_TAX_ID, value);
}
@Override
public GroceryStore.Builder addTaxID(String value) {
return addProperty(CoreConstants.PROPERTY_TAX_ID, Text.of(value));
}
@Override
public GroceryStore.Builder addTelephone(Text value) {
return addProperty(CoreConstants.PROPERTY_TELEPHONE, value);
}
@Override
public GroceryStore.Builder addTelephone(String value) {
return addProperty(CoreConstants.PROPERTY_TELEPHONE, Text.of(value));
}
@Override
public GroceryStore.Builder addUrl(URL value) {
return addProperty(CoreConstants.PROPERTY_URL, value);
}
@Override
public GroceryStore.Builder addUrl(String value) {
return addProperty(CoreConstants.PROPERTY_URL, Text.of(value));
}
@Override
public GroceryStore.Builder addVatID(Text value) {
return addProperty(CoreConstants.PROPERTY_VAT_ID, value);
}
@Override
public GroceryStore.Builder addVatID(String value) {
return addProperty(CoreConstants.PROPERTY_VAT_ID, Text.of(value));
}
@Override
public GroceryStore.Builder addDetailedDescription(Article value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value);
}
@Override
public GroceryStore.Builder addDetailedDescription(Article.Builder value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build());
}
@Override
public GroceryStore.Builder addDetailedDescription(String value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value));
}
@Override
public GroceryStore.Builder addPopularityScore(PopularityScoreSpecification value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value);
}
@Override
public GroceryStore.Builder addPopularityScore(PopularityScoreSpecification.Builder value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build());
}
@Override
public GroceryStore.Builder addPopularityScore(String value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value));
}
@Override
public GroceryStore build() {
return new GroceryStoreImpl(properties, reverseMap);
}
}
public GroceryStoreImpl(
Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) {
super(properties, reverseMap);
}
@Override
public String getFullTypeName() {
return CoreConstants.TYPE_GROCERY_STORE;
}
@Override
public boolean includesProperty(String property) {
return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property)
|| PROPERTY_SET.contains(GoogConstants.NAMESPACE + property)
|| PROPERTY_SET.contains(property);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.