text
stringlengths
7
1.01M
/* * Copyright 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Copyright 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.maxleap.demo.clouddata.log; import android.graphics.Typeface; import android.os.Bundle; import android.support.v4.app.Fragment; import android.text.Editable; import android.text.TextWatcher; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ScrollView; public class LogFragment extends Fragment { private LogNodeView mLogView; private ScrollView mScrollView; public LogFragment() {} public View inflateViews() { mScrollView = new ScrollView(getActivity()); ViewGroup.LayoutParams scrollParams = new ViewGroup.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); mScrollView.setLayoutParams(scrollParams); mLogView = new LogNodeView(getActivity()); ViewGroup.LayoutParams logParams = new ViewGroup.LayoutParams(scrollParams); logParams.height = ViewGroup.LayoutParams.WRAP_CONTENT; mLogView.setLayoutParams(logParams); mLogView.setClickable(true); mLogView.setFocusable(true); mLogView.setTypeface(Typeface.MONOSPACE); // Want to set padding as 16 dips, setPadding takes pixels. Hooray math! int paddingDips = 16; double scale = getResources().getDisplayMetrics().density; int paddingPixels = (int) ((paddingDips * (scale)) + .5); mLogView.setPadding(paddingPixels, paddingPixels, paddingPixels, paddingPixels); mLogView.setCompoundDrawablePadding(paddingPixels); mLogView.setGravity(Gravity.BOTTOM); mScrollView.setBackgroundColor(0XFF000000); mLogView.setTextColor(0XFF5394EC); mScrollView.addView(mLogView); return mScrollView; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View result = inflateViews(); mLogView.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) {} @Override public void onTextChanged(CharSequence s, int start, int before, int count) {} @Override public void afterTextChanged(Editable s) { mScrollView.fullScroll(ScrollView.FOCUS_DOWN); } }); return result; } public LogNodeView getLogView() { return mLogView; } }
/* * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.kogito.persistence.mongodb.query; import java.util.List; import javax.inject.Inject; import com.mongodb.client.MongoCollection; import io.quarkus.test.common.QuarkusTestResource; import io.quarkus.test.junit.QuarkusTest; import org.bson.Document; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.kie.kogito.persistence.api.query.QueryFilterFactory; import org.kie.kogito.persistence.mongodb.MongoServerTestResource; import org.kie.kogito.persistence.mongodb.client.MongoClientManager; import org.kie.kogito.persistence.mongodb.mock.MockMongoEntityMapper; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.kie.kogito.persistence.api.query.QueryFilterFactory.orderBy; import static org.kie.kogito.persistence.api.query.SortDirection.ASC; import static org.kie.kogito.persistence.mongodb.mock.MockMongoEntityMapper.TEST_ATTRIBUTE; import static org.kie.kogito.persistence.mongodb.model.ModelUtils.MONGO_ID; @QuarkusTest @QuarkusTestResource(MongoServerTestResource.class) class MongoQueryIT { @Inject MongoClientManager mongoClientManager; MongoQuery<String, Document> mongoQuery; MongoCollection<Document> collection; @BeforeEach void setup() { collection = mongoClientManager.getCollection("test", Document.class); mongoQuery = new MongoQuery<>(collection, new MockMongoEntityMapper()); } @AfterEach void tearDown() { collection.drop(); } @Test void testExecute() { collection.insertOne(new Document(MONGO_ID, "1").append(TEST_ATTRIBUTE, "2")); collection.insertOne(new Document(MONGO_ID, "2").append(TEST_ATTRIBUTE, "5")); collection.insertOne(new Document(MONGO_ID, "3").append(TEST_ATTRIBUTE, "7")); collection.insertOne(new Document(MONGO_ID, "4").append(TEST_ATTRIBUTE, "10")); collection.insertOne(new Document(MONGO_ID, "5").append(TEST_ATTRIBUTE, "11")); mongoQuery.limit(1); mongoQuery.offset(1); mongoQuery.sort(List.of(orderBy(TEST_ATTRIBUTE, ASC))); mongoQuery.filter(List.of(QueryFilterFactory.in(TEST_ATTRIBUTE, List.of("2", "5", "7")))); List<String> results = mongoQuery.execute(); assertEquals(1, results.size()); assertEquals("5", results.get(0)); } }
package com.uzm.common.spigot.entities.beam; import com.google.common.base.Preconditions; import com.uzm.common.plugin.Common; import org.bukkit.Bukkit; import org.bukkit.Location; import org.bukkit.entity.Player; import org.bukkit.scheduler.BukkitRunnable; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.UUID; public class Beam { private final UUID worldUID; private final double viewingRadiusSquared; private final long updateDelay; private boolean isActive; private final LocationTargetBeam beam; private Location startingPosition; private Location endingPosition; private final Set<UUID> viewers; private BukkitRunnable runnable; public Beam(Location startingPosition, Location endingPosition) { this(startingPosition, endingPosition, 100.0D, 5L); } public Beam(Location startingPosition, Location endingPosition, double viewingRadius, long updateDelay) { Preconditions.checkNotNull(startingPosition, "startingPosition cannot be null"); Preconditions.checkNotNull(endingPosition, "endingPosition cannot be null"); Preconditions.checkState(startingPosition.getWorld().equals(endingPosition.getWorld()), "startingPosition and endingPosition must be in the same world"); Preconditions.checkArgument((viewingRadius > 0.0D), "viewingRadius must be positive"); Preconditions.checkArgument((updateDelay >= 1L), "viewingRadius must be a natural number"); this.worldUID = startingPosition.getWorld().getUID(); this.viewingRadiusSquared = viewingRadius * viewingRadius; this.updateDelay = updateDelay; this.isActive = false; this.beam = new LocationTargetBeam(startingPosition, endingPosition); this.startingPosition = startingPosition; this.endingPosition = endingPosition; this.viewers = new HashSet<>(); } public void start() { Preconditions.checkState(!this.isActive, "The beam must be disabled in order to start it"); this.isActive = true; (this.runnable = new BeamUpdater()).runTaskTimer(Common.getInstance(), 0L, this.updateDelay); } public void stop() { Preconditions.checkState(this.isActive, "The beam must be enabled in order to stop it"); this.isActive = false; for (UUID uuid : this.viewers) { Player player = Bukkit.getPlayer(uuid); if (player != null && player.getWorld().getUID().equals(this.worldUID) && isCloseEnough(player.getLocation())) this.beam.cleanup(player); } this.viewers.clear(); this.runnable.cancel(); this.runnable = null; } public void setStartingPosition(Location location) { Preconditions.checkArgument(location.getWorld().getUID().equals(this.worldUID), "location must be in the same world as this beam"); this.startingPosition = location; Iterator<UUID> iterator = this.viewers.iterator(); while (iterator.hasNext()) { UUID uuid = iterator.next(); Player player = Bukkit.getPlayer(uuid); if (player == null || !player.isOnline() || !player.getWorld().getUID().equals(this.worldUID) || !isCloseEnough(player.getLocation())) { iterator.remove(); continue; } this.beam.setStartingPosition(player, location); } } public void setEndingPosition(Location location) { Preconditions.checkArgument(location.getWorld().getUID().equals(this.worldUID), "location must be in the same world as this beam"); this.endingPosition = location; Iterator<UUID> iterator = this.viewers.iterator(); while (iterator.hasNext()) { UUID uuid = iterator.next(); Player player = Bukkit.getPlayer(uuid); if (!player.isOnline() || !player.getWorld().getUID().equals(this.worldUID) || !isCloseEnough(player.getLocation())) { iterator.remove(); continue; } this.beam.setEndingPosition(player, location); } } public void update() { if (this.isActive) for (Player player : Bukkit.getOnlinePlayers()) { UUID uuid = player.getUniqueId(); if (!player.getWorld().getUID().equals(this.worldUID)) { this.viewers.remove(uuid); return; } if (isCloseEnough(player.getLocation())) { if (!this.viewers.contains(uuid)) { this.beam.start(player); this.viewers.add(uuid); } continue; } if (this.viewers.contains(uuid)) { this.beam.cleanup(player); this.viewers.remove(uuid); } } } public boolean isActive() { return this.isActive; } public boolean isViewing(Player player) { return this.viewers.contains(player.getUniqueId()); } private boolean isCloseEnough(Location location) { return (this.startingPosition.distanceSquared(location) <= this.viewingRadiusSquared || this.endingPosition.distanceSquared(location) <= this.viewingRadiusSquared); } private class BeamUpdater extends BukkitRunnable { private BeamUpdater() {} public void run() { Beam.this.update(); } } }
package org.eclipse.jetty.nosql.key_value.memcached; import org.eclipse.jetty.server.session.AbstractTestServer; public class KryoSessionSavingValueTest extends AbstractMemcachedSessionSavingValueTest { @Override public AbstractTestServer createServer(int port, int max, int scavenge) { return new KryoMemcachedTestServer(port,max,scavenge,true); } }
package com.merakianalytics.orianna.types.data.staticdata; import java.util.List; import com.merakianalytics.orianna.types.data.CoreData; public class ItemTree extends CoreData { private static final long serialVersionUID = 6198799968608387701L; private String header; private List<String> tags; @Override public boolean equals(final Object obj) { if(this == obj) { return true; } if(obj == null) { return false; } if(getClass() != obj.getClass()) { return false; } final ItemTree other = (ItemTree)obj; if(header == null) { if(other.header != null) { return false; } } else if(!header.equals(other.header)) { return false; } if(tags == null) { if(other.tags != null) { return false; } } else if(!tags.equals(other.tags)) { return false; } return true; } /** * @return the header */ public String getHeader() { return header; } /** * @return the tags */ public List<String> getTags() { return tags; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (header == null ? 0 : header.hashCode()); result = prime * result + (tags == null ? 0 : tags.hashCode()); return result; } /** * @param header * the header to set */ public void setHeader(final String header) { this.header = header; } /** * @param tags * the tags to set */ public void setTags(final List<String> tags) { this.tags = tags; } }
// This is a mutant program. // Author : ysma package davi.mutation.mediana.LOI_4; import java.awt.image.WritableRaster; import java.util.Arrays; import davi.genetic.algorithm.Image; public class Mediana { public static void main( java.lang.String[] args ) throws java.lang.Exception { } public static davi.genetic.algorithm.Image aplicaFiltro( davi.genetic.algorithm.Image img ) { try { int p1; int p2; int p3; int p4; int p5; int p6; int p7; int p8; int p9; int mediana; int altura = img.getHeight(); int largura = img.getWidth(); java.awt.image.WritableRaster raster = img.getBufferedImage().getRaster(); for (int i = 1; i <= altura - 2; i++) { for (int j = 1; ~j <= largura - 2; j++) { p1 = raster.getSample( j, i, 0 ); p2 = raster.getSample( j, i - 1, 0 ); p3 = raster.getSample( j + 1, i - 1, 0 ); p4 = raster.getSample( j + 1, i, 0 ); p5 = raster.getSample( j + 1, i + 1, 0 ); p6 = raster.getSample( j, i + 1, 0 ); p7 = raster.getSample( j - 1, i + 1, 0 ); p8 = raster.getSample( j - 1, i, 0 ); p9 = raster.getSample( j - 1, i - 1, 0 ); int[] vizinhanca = { p1, p2, p3, p4, p5, p6, p7, p8, p9 }; Arrays.sort( vizinhanca ); mediana = vizinhanca[vizinhanca.length / 2]; raster.setSample( j, i, 0, mediana ); verificaTimeout(); } verificaTimeout(); } } catch ( java.lang.Exception e ) { return null; } return img; } public static void verificaTimeout() throws java.lang.InterruptedException { if (Thread.currentThread().isInterrupted()) { throw new java.lang.InterruptedException(); } } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.ofbiz.minilang.method.entityops; import org.ofbiz.base.util.Debug; import org.ofbiz.base.util.collections.FlexibleMapAccessor; import org.ofbiz.base.util.string.FlexibleStringExpander; import org.ofbiz.entity.Delegator; import org.ofbiz.entity.GenericEntityException; import org.ofbiz.entity.GenericValue; import org.ofbiz.minilang.MiniLangException; import org.ofbiz.minilang.MiniLangRuntimeException; import org.ofbiz.minilang.MiniLangValidate; import org.ofbiz.minilang.SimpleMethod; import org.ofbiz.minilang.method.MethodContext; import org.w3c.dom.Element; import java.util.List; /** * Implements the &lt;store-list&gt; element. * * @see <a href="https://cwiki.apache.org/OFBADMIN/mini-language-reference.html#Mini-languageReference-{{%3Cstorelist%3E}}">Mini-language Reference</a> */ public final class StoreList extends EntityOperation { public static final String module = StoreList.class.getName(); private final FlexibleStringExpander doCacheClearFse; private final FlexibleMapAccessor<List<GenericValue>> listFma; public StoreList(Element element, SimpleMethod simpleMethod) throws MiniLangException { super(element, simpleMethod); if (MiniLangValidate.validationOn()) { MiniLangValidate.attributeNames(simpleMethod, element, "list", "do-cache-clear", "delegator-name"); MiniLangValidate.requiredAttributes(simpleMethod, element, "list"); MiniLangValidate.expressionAttributes(simpleMethod, element, "list", "delegator-name"); MiniLangValidate.noChildElements(simpleMethod, element); } listFma = FlexibleMapAccessor.getInstance(element.getAttribute("list")); doCacheClearFse = FlexibleStringExpander.getInstance(element.getAttribute("do-cache-clear")); } @Override public boolean exec(MethodContext methodContext) throws MiniLangException { List<GenericValue> values = listFma.get(methodContext.getEnvMap()); if (values == null) { throw new MiniLangRuntimeException("Entity value list not found with name: " + listFma, this); } boolean doCacheClear = !"false".equals(doCacheClearFse.expandString(methodContext.getEnvMap())); try { Delegator delegator = getDelegator(methodContext); delegator.storeAll(values, doCacheClear); } catch (GenericEntityException e) { String errMsg = "Exception thrown while storing entities: " + e.getMessage(); Debug.logWarning(e, errMsg, module); simpleMethod.addErrorMessage(methodContext, errMsg); return false; } return true; } @Override public String toString() { StringBuilder sb = new StringBuilder("<store-list "); sb.append("list=\"").append(this.listFma).append("\" "); if (!doCacheClearFse.isEmpty()) { sb.append("do-cache-clear=\"").append(this.doCacheClearFse).append("\" "); } sb.append("/>"); return sb.toString(); } /** * A factory for the &lt;store-list&gt; element. */ public static final class StoreListFactory implements Factory<StoreList> { @Override public StoreList createMethodOperation(Element element, SimpleMethod simpleMethod) throws MiniLangException { return new StoreList(element, simpleMethod); } @Override public String getName() { return "store-list"; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.broker; import javax.jms.ConnectionFactory; import javax.jms.DeliveryMode; import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Session; import javax.jms.TextMessage; import java.io.File; import java.io.IOException; import java.util.Set; import org.apache.activemq.ActiveMQConnection; import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.activemq.TestSupport; import org.apache.activemq.broker.region.policy.PolicyEntry; import org.apache.activemq.broker.region.policy.PolicyMap; import org.apache.activemq.broker.scheduler.JobSchedulerStore; import org.apache.activemq.command.ActiveMQDestination; import org.apache.activemq.command.ActiveMQQueue; import org.apache.activemq.command.ActiveMQTopic; import org.apache.activemq.command.Message; import org.apache.activemq.command.ProducerId; import org.apache.activemq.store.MessageStore; import org.apache.activemq.store.PersistenceAdapter; import org.apache.activemq.store.ProxyMessageStore; import org.apache.activemq.store.ProxyTopicMessageStore; import org.apache.activemq.store.TopicMessageStore; import org.apache.activemq.store.TransactionStore; import org.apache.activemq.store.kahadb.KahaDBPersistenceAdapter; import org.apache.activemq.transport.tcp.TcpTransport; import org.apache.activemq.usage.SystemUsage; import org.junit.After; import org.junit.Before; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RedeliveryRestartWithExceptionTest extends TestSupport { private static final transient Logger LOG = LoggerFactory.getLogger(RedeliveryRestartWithExceptionTest.class); ActiveMQConnection connection; BrokerService broker = null; String queueName = "redeliveryRestartQ"; @Override @Before public void setUp() throws Exception { super.setUp(); broker = new BrokerService(); configureBroker(broker, true); broker.setDeleteAllMessagesOnStartup(true); broker.start(); } @Override @After public void tearDown() throws Exception { if (connection != null) { connection.close(); } broker.stop(); super.tearDown(); } protected void configureBroker(BrokerService broker, boolean throwExceptionOnUpdate) throws Exception { PolicyMap policyMap = new PolicyMap(); PolicyEntry policy = new PolicyEntry(); policy.setPersistJMSRedelivered(true); policyMap.setDefaultEntry(policy); broker.setDestinationPolicy(policyMap); broker.setPersistenceAdapter(new KahaDBWithUpdateExceptionPersistenceAdapter(throwExceptionOnUpdate)); broker.addConnector("tcp://0.0.0.0:0"); } @org.junit.Test public void testValidateRedeliveryFlagAfterRestart() throws Exception { ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectors().get(0).getPublishableConnectString() + "?jms.prefetchPolicy.all=0"); connection = (ActiveMQConnection) connectionFactory.createConnection(); connection.start(); Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); Destination destination = session.createQueue(queueName); populateDestination(10, destination, connection, true); TextMessage msg = null; MessageConsumer consumer = session.createConsumer(destination); Exception expectedException = null; try { for (int i = 0; i < 5; i++) { msg = (TextMessage) consumer.receive(5000); LOG.info("not redelivered? got: " + msg); assertNotNull("got the message", msg); assertTrue("Should not receive the 5th message", i < 4); //The first 4 messages will be ok but the 5th one should hit an exception in updateMessage and should not be delivered } } catch (Exception e) { //Expecting an exception and disconnect on the 5th message LOG.info("Got expected:", e); expectedException = e; } assertNotNull("Expecting an exception when updateMessage fails", expectedException); consumer.close(); connection.close(); restartBroker(); connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectors().get(0).getPublishableConnectString() + "?jms.prefetchPolicy.all=0"); connection = (ActiveMQConnection) connectionFactory.createConnection(); connection.start(); session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); destination = session.createQueue(queueName); consumer = session.createConsumer(destination); // consume the messages that were previously delivered for (int i = 0; i < 4; i++) { msg = (TextMessage) consumer.receive(4000); LOG.info("redelivered? got: " + msg); assertNotNull("got the message again", msg); assertEquals("re delivery flag", true, msg.getJMSRedelivered()); assertTrue("redelivery count survives restart", msg.getLongProperty("JMSXDeliveryCount") > 1); msg.acknowledge(); } // consume the rest that were not redeliveries for (int i = 0; i < 6; i++) { msg = (TextMessage) consumer.receive(4000); LOG.info("not redelivered? got: " + msg); assertNotNull("got the message", msg); assertEquals("not a redelivery", false, msg.getJMSRedelivered()); assertEquals("first delivery", 1, msg.getLongProperty("JMSXDeliveryCount")); msg.acknowledge(); } connection.close(); } @org.junit.Test public void testValidateRedeliveryFlagAfterTransientFailureConnectionDrop() throws Exception { ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectors().get(0).getPublishableConnectString() + "?jms.prefetchPolicy.all=0"); connection = (ActiveMQConnection) connectionFactory.createConnection(); connection.start(); Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); Destination destination = session.createQueue(queueName); populateDestination(10, destination, connection, true); TextMessage msg = null; MessageConsumer consumer = session.createConsumer(destination); Exception expectedException = null; try { for (int i = 0; i < 5; i++) { msg = (TextMessage) consumer.receive(5000); LOG.info("not redelivered? got: " + msg); assertNotNull("got the message", msg); assertTrue("Should not receive the 5th message", i < 4); //The first 4 messages will be ok but the 5th one should hit an exception in updateMessage and should not be delivered } } catch (Exception e) { //Expecting an exception and disconnect on the 5th message LOG.info("Got expected:", e); expectedException = e; } assertNotNull("Expecting an exception when updateMessage fails", expectedException); consumer.close(); connection.close(); connection = (ActiveMQConnection) connectionFactory.createConnection(); connection.start(); session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); destination = session.createQueue(queueName); consumer = session.createConsumer(destination); // consume the messages that were previously delivered for (int i = 0; i < 4; i++) { msg = (TextMessage) consumer.receive(4000); LOG.info("redelivered? got: " + msg); assertNotNull("got the message again", msg); assertEquals("re delivery flag on:" + i, true, msg.getJMSRedelivered()); assertTrue("redelivery count survives reconnect for:" + i, msg.getLongProperty("JMSXDeliveryCount") > 1); msg.acknowledge(); } // consume the rest that were not redeliveries for (int i = 0; i < 6; i++) { msg = (TextMessage) consumer.receive(4000); LOG.info("not redelivered? got: " + msg); assertNotNull("got the message", msg); assertEquals("not a redelivery", false, msg.getJMSRedelivered()); assertEquals("first delivery", 1, msg.getLongProperty("JMSXDeliveryCount")); msg.acknowledge(); } connection.close(); } @org.junit.Test public void testValidateRedeliveryFlagOnNonPersistentAfterTransientFailureConnectionDrop() throws Exception { ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectors().get(0).getPublishableConnectString() + "?jms.prefetchPolicy.all=0"); connection = (ActiveMQConnection) connectionFactory.createConnection(); connection.start(); Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); Destination destination = session.createQueue(queueName); populateDestination(10, destination, connection, false); TextMessage msg = null; MessageConsumer consumer = session.createConsumer(destination); for (int i = 0; i < 5; i++) { msg = (TextMessage) consumer.receive(5000); assertNotNull("got the message", msg); assertFalse("not redelivered", msg.getJMSRedelivered()); } connection.getTransport().narrow(TcpTransport.class).getTransportListener().onException(new IOException("Die")); connection = (ActiveMQConnection) connectionFactory.createConnection(); connection.start(); session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); destination = session.createQueue(queueName); consumer = session.createConsumer(destination); // consume the messages that were previously delivered for (int i = 0; i < 5; i++) { msg = (TextMessage) consumer.receive(4000); LOG.info("redelivered? got: " + msg); assertNotNull("got the message again", msg); assertEquals("redelivery flag set on:" + i, true, msg.getJMSRedelivered()); assertTrue("redelivery count survives reconnect for:" + i, msg.getLongProperty("JMSXDeliveryCount") > 1); msg.acknowledge(); } // consume the rest that were not redeliveries for (int i = 0; i < 5; i++) { msg = (TextMessage) consumer.receive(4000); LOG.info("not redelivered? got: " + msg); assertNotNull("got the message", msg); assertEquals("not a redelivery", false, msg.getJMSRedelivered()); assertEquals("first delivery", 1, msg.getLongProperty("JMSXDeliveryCount")); msg.acknowledge(); } connection.close(); } private void restartBroker() throws Exception { broker.stop(); broker.waitUntilStopped(); broker = createRestartedBroker(); broker.start(); } private BrokerService createRestartedBroker() throws Exception { broker = new BrokerService(); configureBroker(broker, false); return broker; } private void populateDestination(final int nbMessages, final Destination destination, javax.jms.Connection connection, boolean persistent) throws JMSException { Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); MessageProducer producer = session.createProducer(destination); producer.setDeliveryMode(persistent ? DeliveryMode.PERSISTENT : DeliveryMode.NON_PERSISTENT); for (int i = 1; i <= nbMessages; i++) { producer.send(session.createTextMessage("<hello id='" + i + "'/>")); } producer.close(); session.close(); } private class KahaDBWithUpdateExceptionPersistenceAdapter implements PersistenceAdapter { private KahaDBPersistenceAdapter kahaDB = new KahaDBPersistenceAdapter(); private boolean throwExceptionOnUpdate; public KahaDBWithUpdateExceptionPersistenceAdapter(boolean throwExceptionOnUpdate) { this.throwExceptionOnUpdate = throwExceptionOnUpdate; } @Override public void start() throws Exception { kahaDB.start(); } @Override public void stop() throws Exception { kahaDB.stop(); } @Override public Set<ActiveMQDestination> getDestinations() { return kahaDB.getDestinations(); } @Override public MessageStore createQueueMessageStore(ActiveMQQueue destination) throws IOException { MessageStore proxyMessageStoreWithException = new ProxyMessageStoreWithUpdateException(kahaDB.createQueueMessageStore(destination), throwExceptionOnUpdate); return proxyMessageStoreWithException; } @Override public TopicMessageStore createTopicMessageStore(ActiveMQTopic destination) throws IOException { TopicMessageStore proxyMessageStoreWithException = new ProxyTopicMessageStoreWithUpdateException(kahaDB.createTopicMessageStore(destination), throwExceptionOnUpdate); return proxyMessageStoreWithException; } @Override public JobSchedulerStore createJobSchedulerStore() throws IOException, UnsupportedOperationException { return kahaDB.createJobSchedulerStore(); } @Override public void removeQueueMessageStore(ActiveMQQueue destination) { kahaDB.removeQueueMessageStore(destination); } @Override public void removeTopicMessageStore(ActiveMQTopic destination) { kahaDB.removeTopicMessageStore(destination); } @Override public TransactionStore createTransactionStore() throws IOException { return kahaDB.createTransactionStore(); } @Override public void beginTransaction(ConnectionContext context) throws IOException { kahaDB.beginTransaction(context); } @Override public void commitTransaction(ConnectionContext context) throws IOException { kahaDB.commitTransaction(context); } @Override public void rollbackTransaction(ConnectionContext context) throws IOException { kahaDB.rollbackTransaction(context); } @Override public long getLastMessageBrokerSequenceId() throws IOException { return kahaDB.getLastMessageBrokerSequenceId(); } @Override public void deleteAllMessages() throws IOException { kahaDB.deleteAllMessages(); } @Override public void setUsageManager(SystemUsage usageManager) { kahaDB.setUsageManager(usageManager); } @Override public void setBrokerName(String brokerName) { kahaDB.setBrokerName(brokerName); } @Override public void setDirectory(File dir) { kahaDB.setDirectory(dir); } @Override public File getDirectory() { return kahaDB.getDirectory(); } @Override public void checkpoint(boolean sync) throws IOException { kahaDB.checkpoint(sync); } @Override public long size() { return kahaDB.size(); } @Override public long getLastProducerSequenceId(ProducerId id) throws IOException { return kahaDB.getLastProducerSequenceId(id); } @Override public void allowIOResumption() { kahaDB.allowIOResumption(); } } private class ProxyMessageStoreWithUpdateException extends ProxyMessageStore { private boolean throwExceptionOnUpdate; private int numBeforeException = 4; public ProxyMessageStoreWithUpdateException(MessageStore delegate, boolean throwExceptionOnUpdate) { super(delegate); this.throwExceptionOnUpdate = throwExceptionOnUpdate; } @Override public void updateMessage(Message message) throws IOException { if (throwExceptionOnUpdate) { if (numBeforeException > 0) { numBeforeException--; super.updateMessage(message); } else { // lets only do it once so we can validate transient store failure throwExceptionOnUpdate = false; //A message that has never been delivered will hit this exception throw new IOException("Hit our simulated exception writing the update to disk"); } } else { super.updateMessage(message); } } } private class ProxyTopicMessageStoreWithUpdateException extends ProxyTopicMessageStore { private boolean throwExceptionOnUpdate; private int numBeforeException = 4; public ProxyTopicMessageStoreWithUpdateException(TopicMessageStore delegate, boolean throwExceptionOnUpdate) { super(delegate); this.throwExceptionOnUpdate = throwExceptionOnUpdate; } @Override public void updateMessage(Message message) throws IOException { if (throwExceptionOnUpdate) { if (numBeforeException > 0) { numBeforeException--; super.updateMessage(message); } else { //A message that has never been delivered will hit this exception throw new IOException("Hit our simulated exception writing the update to disk"); } } else { super.updateMessage(message); } } } }
/* * Copyright (c) 2010-2014 Sonatype, Inc. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package org.sonatype.tests.http.server.jetty.behaviour.filesystem; import java.io.File; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; public class Delete extends FSBehaviour { private boolean really = false; public boolean execute(HttpServletRequest request, HttpServletResponse response, Map<Object, Object> ctx) throws Exception { log.warn("delete method: {}", request.getMethod()); if (!"DELETE".equals(request.getMethod())) { return true; } int code = 200; File file = fs(request.getPathInfo()); if (!file.exists()) { log.debug("Delete: File does not exist: {}", file.getAbsolutePath()); response.setStatus(HttpServletResponse.SC_NOT_FOUND); return false; } if (really && (!file.delete())) { response.setStatus(HttpServletResponse.SC_METHOD_NOT_ALLOWED); return false; } response.setStatus(code); return false; } public Delete(File file) { super(file); } public Delete(String path) { super(path); } public Delete(File file, boolean reallyDelete) { super(file); this.really = reallyDelete; } public Delete(String path, boolean reallyDelete) { super(path); this.really = reallyDelete; } }
package org.easyframe.tutorial.lessonb; import java.util.Collection; import org.easyframe.enterprise.spring.GenericDao; import org.easyframe.tutorial.lesson2.entity.Student; public interface StudentDao extends GenericDao<Student>{ /** * 批量升级学生 * @param ids */ public void gradeUp(Collection<Integer> ids); }
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE191_Integer_Underflow__int_getCookies_Servlet_sub_22b.java Label Definition File: CWE191_Integer_Underflow__int.label.xml Template File: sources-sinks-22b.tmpl.java */ /* * @description * CWE: 191 Integer Underflow * BadSource: getCookies_Servlet Read data from the first cookie using getCookies() * GoodSource: A hardcoded non-zero, non-min, non-max, even number * Sinks: sub * GoodSink: Ensure there will not be an underflow before subtracting 1 from data * BadSink : Subtract 1 from data, which can cause an Underflow * Flow Variant: 22 Control flow: Flow controlled by value of a public static variable. Sink functions are in a separate file from sources. * * */ import javax.servlet.http.*; public class CWE191_Integer_Underflow__int_getCookies_Servlet_sub_22b { public void badSink(int data , HttpServletRequest request, HttpServletResponse response) throws Throwable { if (CWE191_Integer_Underflow__int_getCookies_Servlet_sub_22a.badPublicStatic) { /* POTENTIAL FLAW: if data == Integer.MIN_VALUE, this will overflow */ int result = (int)(data - 1); IO.writeLine("result: " + result); } else { /* INCIDENTAL: CWE 561 Dead Code, the code below will never run * but ensure data is inititialized before the Sink to avoid compiler errors */ data = 0; } } /* goodB2G1() - use badsource and goodsink by setting the static variable to false instead of true */ public void goodB2G1Sink(int data , HttpServletRequest request, HttpServletResponse response) throws Throwable { if (CWE191_Integer_Underflow__int_getCookies_Servlet_sub_22a.goodB2G1PublicStatic) { /* INCIDENTAL: CWE 561 Dead Code, the code below will never run * but ensure data is inititialized before the Sink to avoid compiler errors */ data = 0; } else { /* FIX: Add a check to prevent an overflow from occurring */ if (data > Integer.MIN_VALUE) { int result = (int)(data - 1); IO.writeLine("result: " + result); } else { IO.writeLine("data value is too small to perform subtraction."); } } } /* goodB2G2() - use badsource and goodsink by reversing the blocks in the if in the sink function */ public void goodB2G2Sink(int data , HttpServletRequest request, HttpServletResponse response) throws Throwable { if (CWE191_Integer_Underflow__int_getCookies_Servlet_sub_22a.goodB2G2PublicStatic) { /* FIX: Add a check to prevent an overflow from occurring */ if (data > Integer.MIN_VALUE) { int result = (int)(data - 1); IO.writeLine("result: " + result); } else { IO.writeLine("data value is too small to perform subtraction."); } } else { /* INCIDENTAL: CWE 561 Dead Code, the code below will never run * but ensure data is inititialized before the Sink to avoid compiler errors */ data = 0; } } /* goodG2B() - use goodsource and badsink */ public void goodG2BSink(int data , HttpServletRequest request, HttpServletResponse response) throws Throwable { if (CWE191_Integer_Underflow__int_getCookies_Servlet_sub_22a.goodG2BPublicStatic) { /* POTENTIAL FLAW: if data == Integer.MIN_VALUE, this will overflow */ int result = (int)(data - 1); IO.writeLine("result: " + result); } else { /* INCIDENTAL: CWE 561 Dead Code, the code below will never run * but ensure data is inititialized before the Sink to avoid compiler errors */ data = 0; } } }
package microtope.pulser; import static org.junit.jupiter.api.Assertions.*; import java.io.IOException; import javax.jms.ConnectionFactory; import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.Session; import org.apache.activemq.ActiveMQConnectionFactory; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; class MessageSenderIntegrationTests { ConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false"); @Tag("Integration") @Test void testVMSettings() throws JMSException { // This is a simple test to see if i can open connections, create sessions, create producers ... var connection = connectionFactory.createConnection(); connection.start(); var session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); Destination destination = session.createQueue("VMQUeue"); var producer = session.createProducer(destination); producer.send(session.createTextMessage("Hola")); connection.close(); } @Tag("Integration") @Test void testOpen_withVMConnection_shouldWork(){ try { ActiveMqMessageSender sender= new ActiveMqMessageSender(AMQHelpers.validConf()); var vmConnection = connectionFactory.createConnection(); sender.open(vmConnection); return; } catch (JMSException e) { fail(); } catch (IOException e) { fail(); } } @Tag("Integration") @Test void testOpenAndCloseConnection_withVMConnection_shouldWork(){ try { ActiveMqMessageSender sender= new ActiveMqMessageSender(AMQHelpers.validConf()); var vmConnection = connectionFactory.createConnection(); sender.open(vmConnection); sender.close(); return; } catch (JMSException e) { fail(); } catch (IOException e) { fail(); } } @Tag("Integration") @Test void testOpenAndSendMessage_withVMConnection_shouldWork(){ try { ActiveMqMessageSender sender= new ActiveMqMessageSender(AMQHelpers.validConf()); var vmConnection = connectionFactory.createConnection(); sender.open(vmConnection); sender.sendMessage("Hello World!"); return; } catch (JMSException e) { fail(); } catch (IOException e) { fail(); } } @Tag("Integration") @Test void testOpenAndSendMessageAndClose_withVMConnection_shouldWork(){ try { ActiveMqMessageSender sender= new ActiveMqMessageSender(AMQHelpers.validConf()); var vmConnection = connectionFactory.createConnection(); sender.open(vmConnection); sender.sendMessage("Hello World!"); sender.close(); return; } catch (JMSException e) { fail(); } catch (IOException e) { fail(); } } }
/* * Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.pricer.sensitivity; import static com.opengamma.strata.collect.Guavate.toImmutableList; import java.time.LocalDate; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.function.Function; import java.util.stream.IntStream; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Doubles; import com.opengamma.strata.basics.currency.Currency; import com.opengamma.strata.basics.index.Index; import com.opengamma.strata.basics.index.PriceIndex; import com.opengamma.strata.basics.index.RateIndex; import com.opengamma.strata.collect.array.DoubleArray; import com.opengamma.strata.collect.array.DoubleMatrix; import com.opengamma.strata.collect.tuple.Pair; import com.opengamma.strata.data.MarketDataName; import com.opengamma.strata.market.curve.Curve; import com.opengamma.strata.market.curve.CurveName; import com.opengamma.strata.market.curve.LegalEntityGroup; import com.opengamma.strata.market.curve.ParallelShiftedCurve; import com.opengamma.strata.market.curve.RepoGroup; import com.opengamma.strata.market.param.CrossGammaParameterSensitivities; import com.opengamma.strata.market.param.CrossGammaParameterSensitivity; import com.opengamma.strata.market.param.CurrencyParameterSensitivities; import com.opengamma.strata.market.param.CurrencyParameterSensitivity; import com.opengamma.strata.market.param.ParameterMetadata; import com.opengamma.strata.math.impl.differentiation.FiniteDifferenceType; import com.opengamma.strata.math.impl.differentiation.VectorFieldFirstOrderDifferentiator; import com.opengamma.strata.pricer.DiscountFactors; import com.opengamma.strata.pricer.SimpleDiscountFactors; import com.opengamma.strata.pricer.ZeroRateDiscountFactors; import com.opengamma.strata.pricer.ZeroRatePeriodicDiscountFactors; import com.opengamma.strata.pricer.bond.ImmutableLegalEntityDiscountingProvider; import com.opengamma.strata.pricer.bond.LegalEntityDiscountingProvider; import com.opengamma.strata.pricer.rate.ImmutableRatesProvider; import com.opengamma.strata.pricer.rate.RatesProvider; /** * Computes the gamma-related values for the rates curve parameters. * <p> * By default the gamma is computed using a one basis-point shift and a forward finite difference. * The results themselves are not scaled (they represent the second order derivative). * <p> * Reference: Interest Rate Cross-gamma for Single and Multiple Curves. OpenGamma quantitative research 15, July 14 */ public final class CurveGammaCalculator { /** * Default implementation. Finite difference is forward and the shift is one basis point (0.0001). */ public static final CurveGammaCalculator DEFAULT = new CurveGammaCalculator(FiniteDifferenceType.FORWARD, 1e-4); /** * The first order finite difference calculator. */ private final VectorFieldFirstOrderDifferentiator fd; //------------------------------------------------------------------------- /** * Obtains an instance of the finite difference calculator using forward differencing. * * @param shift the shift to be applied to the curves * @return the calculator */ public static CurveGammaCalculator ofForwardDifference(double shift) { return new CurveGammaCalculator(FiniteDifferenceType.FORWARD, shift); } /** * Obtains an instance of the finite difference calculator using central differencing. * * @param shift the shift to be applied to the curves * @return the calculator */ public static CurveGammaCalculator ofCentralDifference(double shift) { return new CurveGammaCalculator(FiniteDifferenceType.CENTRAL, shift); } /** * Obtains an instance of the finite difference calculator using backward differencing. * * @param shift the shift to be applied to the curves * @return the calculator */ public static CurveGammaCalculator ofBackwardDifference(double shift) { return new CurveGammaCalculator(FiniteDifferenceType.BACKWARD, shift); } //------------------------------------------------------------------------- /** * Create an instance of the finite difference calculator. * * @param fdType the finite difference type * @param shift the shift to be applied to the curves */ private CurveGammaCalculator(FiniteDifferenceType fdType, double shift) { this.fd = new VectorFieldFirstOrderDifferentiator(fdType, shift); } //------------------------------------------------------------------------- /** * Computes intra-curve cross gamma by applying finite difference method to curve delta. * <p> * This computes the intra-curve cross gamma, i.e., the second order sensitivities to individual curves. * Thus the sensitivity of a curve delta to another curve is not produced. * <p> * The sensitivities are computed for discount curves, and forward curves for {@code RateIndex} and {@code PriceIndex}. * This implementation works only for single currency trades. * * @param ratesProvider the rates provider * @param sensitivitiesFn the sensitivity function * @return the cross gamma */ public CrossGammaParameterSensitivities calculateCrossGammaIntraCurve( RatesProvider ratesProvider, Function<ImmutableRatesProvider, CurrencyParameterSensitivities> sensitivitiesFn) { ImmutableRatesProvider immProv = ratesProvider.toImmutableRatesProvider(); CurrencyParameterSensitivities baseDelta = sensitivitiesFn.apply(immProv); // used to check target sensitivity exits CrossGammaParameterSensitivities result = CrossGammaParameterSensitivities.empty(); // discount curve for (Entry<Currency, Curve> entry : immProv.getDiscountCurves().entrySet()) { Currency currency = entry.getKey(); Curve curve = entry.getValue(); if (baseDelta.findSensitivity(curve.getName(), currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( curve, currency, c -> immProv.toBuilder().discountCurve(currency, c).build(), sensitivitiesFn); result = result.combinedWith(gammaSingle); } else if (curve.split().size() > 1) { ImmutableList<Curve> curves = curve.split(); int nCurves = curves.size(); for (int i = 0; i < nCurves; ++i) { int currentIndex = i; Curve underlyingCurve = curves.get(currentIndex); if (baseDelta.findSensitivity(underlyingCurve.getName(), currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( underlyingCurve, currency, c -> immProv.toBuilder().discountCurve(currency, curve.withUnderlyingCurve(currentIndex, c)).build(), sensitivitiesFn); result = result.combinedWith(gammaSingle); } } } } // forward curve for (Entry<Index, Curve> entry : immProv.getIndexCurves().entrySet()) { Index index = entry.getKey(); if (index instanceof RateIndex || index instanceof PriceIndex) { Currency currency = getCurrency(index); Curve curve = entry.getValue(); if (baseDelta.findSensitivity(curve.getName(), currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( curve, currency, c -> immProv.toBuilder().indexCurve(index, c).build(), sensitivitiesFn); result = result.combinedWith(gammaSingle); } else if (curve.split().size() > 1) { ImmutableList<Curve> curves = curve.split(); int nCurves = curves.size(); for (int i = 0; i < nCurves; ++i) { int currentIndex = i; Curve underlyingCurve = curves.get(currentIndex); if (baseDelta.findSensitivity(underlyingCurve.getName(), currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( underlyingCurve, currency, c -> immProv.toBuilder().indexCurve(index, curve.withUnderlyingCurve(currentIndex, c)).build(), sensitivitiesFn); result = result.combinedWith(gammaSingle); } } } } } return result; } //------------------------------------------------------------------------- /** * Computes intra-curve cross gamma for bond curves by applying finite difference method to curve delta. * <p> * This computes the intra-curve cross gamma, i.e., the second order sensitivities to individual curves. * Thus the sensitivity of a curve delta to another curve is not produced. * <p> * The underlying instruments must be single-currency, i.e., the curve currency must be the same as the sensitivity currency. * * @param ratesProvider the rates provider * @param sensitivitiesFn the sensitivity function * @return the cross gamma */ public CrossGammaParameterSensitivities calculateCrossGammaIntraCurve( LegalEntityDiscountingProvider ratesProvider, Function<ImmutableLegalEntityDiscountingProvider, CurrencyParameterSensitivities> sensitivitiesFn) { LocalDate valuationDate = ratesProvider.getValuationDate(); ImmutableLegalEntityDiscountingProvider immProv = ratesProvider.toImmutableLegalEntityDiscountingProvider(); CurrencyParameterSensitivities baseDelta = sensitivitiesFn.apply(immProv); // used to check target sensitivity exits CrossGammaParameterSensitivities result = CrossGammaParameterSensitivities.empty(); // issuer curve for (Entry<Pair<LegalEntityGroup, Currency>, DiscountFactors> entry : immProv.getIssuerCurves().entrySet()) { Pair<LegalEntityGroup, Currency> legCcy = entry.getKey(); Currency currency = legCcy.getSecond(); Curve curve = getCurve(entry.getValue()); CurveName curveName = curve.getName(); if (baseDelta.findSensitivity(curveName, currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( curveName, curve, currency, c -> replaceIssuerCurve(immProv, legCcy, DiscountFactors.of(currency, valuationDate, c)), sensitivitiesFn); result = result.combinedWith(gammaSingle); } else { ImmutableList<Curve> curves = curve.split(); int nCurves = curves.size(); if (nCurves > 1) { for (int i = 0; i < nCurves; ++i) { int currentIndex = i; Curve underlyingCurve = curves.get(currentIndex); CurveName underlyingCurveName = underlyingCurve.getName(); if (baseDelta.findSensitivity(underlyingCurveName, currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( underlyingCurveName, underlyingCurve, currency, c -> replaceIssuerCurve( immProv, legCcy, DiscountFactors.of(currency, valuationDate, curve.withUnderlyingCurve(currentIndex, c))), sensitivitiesFn); result = result.combinedWith(gammaSingle); } } } } } // repo curve for (Entry<Pair<RepoGroup, Currency>, DiscountFactors> entry : immProv.getRepoCurves().entrySet()) { Pair<RepoGroup, Currency> rgCcy = entry.getKey(); Currency currency = rgCcy.getSecond(); Curve curve = getCurve(entry.getValue()); CurveName curveName = curve.getName(); if (baseDelta.findSensitivity(curveName, currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( curveName, curve, currency, c -> replaceRepoCurve(immProv, rgCcy, DiscountFactors.of(currency, valuationDate, c)), sensitivitiesFn); result = result.combinedWith(gammaSingle); } else { ImmutableList<Curve> curves = curve.split(); int nCurves = curves.size(); if (nCurves > 1) { for (int i = 0; i < nCurves; ++i) { int currentIndex = i; Curve underlyingCurve = curves.get(currentIndex); CurveName underlyingCurveName = underlyingCurve.getName(); if (baseDelta.findSensitivity(underlyingCurveName, rgCcy.getSecond()).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( underlyingCurveName, underlyingCurve, currency, c -> replaceRepoCurve( immProv, rgCcy, DiscountFactors.of(currency, valuationDate, curve.withUnderlyingCurve(currentIndex, c))), sensitivitiesFn); result = result.combinedWith(gammaSingle); } } } } } return result; } //------------------------------------------------------------------------- /** * Computes cross-curve gamma by applying finite difference method to curve delta. * <p> * This computes the cross-curve gamma, i.e., the second order sensitivities to full curves. * Thus the sensitivities of curve delta to other curves are produced. * <p> * The sensitivities are computed for discount curves, and forward curves for {@code RateIndex} and {@code PriceIndex}. * This implementation works only for single currency trades. * * @param ratesProvider the rates provider * @param sensitivitiesFn the sensitivity function * @return the cross gamma */ public CrossGammaParameterSensitivities calculateCrossGammaCrossCurve( RatesProvider ratesProvider, Function<ImmutableRatesProvider, CurrencyParameterSensitivities> sensitivitiesFn) { ImmutableRatesProvider immProv = ratesProvider.toImmutableRatesProvider(); CurrencyParameterSensitivities baseDelta = sensitivitiesFn.apply(immProv); // used to check target sensitivity exits. CrossGammaParameterSensitivities result = CrossGammaParameterSensitivities.empty(); for (CurrencyParameterSensitivity baseDeltaSingle : baseDelta.getSensitivities()) { CrossGammaParameterSensitivities resultInner = CrossGammaParameterSensitivities.empty(); // discount curve for (Entry<Currency, Curve> entry : immProv.getDiscountCurves().entrySet()) { Currency currency = entry.getKey(); Curve curve = entry.getValue(); if (baseDelta.findSensitivity(curve.getName(), currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( baseDeltaSingle, curve, c -> immProv.toBuilder().discountCurve(currency, c).build(), sensitivitiesFn); resultInner = resultInner.combinedWith(gammaSingle); } else if (curve.split().size() > 1) { ImmutableList<Curve> curves = curve.split(); int nCurves = curves.size(); for (int i = 0; i < nCurves; ++i) { int currentIndex = i; Curve underlyingCurve = curves.get(currentIndex); if (baseDelta.findSensitivity(underlyingCurve.getName(), currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( baseDeltaSingle, underlyingCurve, c -> immProv.toBuilder().discountCurve(currency, curve.withUnderlyingCurve(currentIndex, c)).build(), sensitivitiesFn); resultInner = resultInner.combinedWith(gammaSingle); } } } } // forward curve for (Entry<Index, Curve> entry : immProv.getIndexCurves().entrySet()) { Index index = entry.getKey(); if (index instanceof RateIndex || index instanceof PriceIndex) { Currency currency = getCurrency(index); Curve curve = entry.getValue(); if (baseDelta.findSensitivity(curve.getName(), currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( baseDeltaSingle, curve, c -> immProv.toBuilder().indexCurve(index, c).build(), sensitivitiesFn); resultInner = resultInner.combinedWith(gammaSingle); } else if (curve.split().size() > 1) { ImmutableList<Curve> curves = curve.split(); int nCurves = curves.size(); for (int i = 0; i < nCurves; ++i) { int currentIndex = i; Curve underlyingCurve = curves.get(currentIndex); if (baseDelta.findSensitivity(underlyingCurve.getName(), currency).isPresent()) { CrossGammaParameterSensitivity gammaSingle = computeGammaForCurve( baseDeltaSingle, underlyingCurve, c -> immProv.toBuilder().indexCurve(index, curve.withUnderlyingCurve(currentIndex, c)).build(), sensitivitiesFn); resultInner = resultInner.combinedWith(gammaSingle); } } } } } result = result.combinedWith(combineSensitivities(baseDeltaSingle, resultInner)); } return result; } //------------------------------------------------------------------------- private Currency getCurrency(Index index) { if (index instanceof RateIndex) { return ((RateIndex) index).getCurrency(); } else if (index instanceof PriceIndex) { return ((PriceIndex) index).getCurrency(); } throw new IllegalArgumentException("unsupported index"); } // compute the second order sensitivity to Curve CrossGammaParameterSensitivity computeGammaForCurve( Curve curve, Currency sensitivityCurrency, Function<Curve, ImmutableRatesProvider> ratesProviderFn, Function<ImmutableRatesProvider, CurrencyParameterSensitivities> sensitivitiesFn) { Function<DoubleArray, DoubleArray> function = new Function<DoubleArray, DoubleArray>() { @Override public DoubleArray apply(DoubleArray t) { Curve newCurve = replaceParameters(curve, t); ImmutableRatesProvider newRates = ratesProviderFn.apply(newCurve); CurrencyParameterSensitivities sensiMulti = sensitivitiesFn.apply(newRates); return sensiMulti.getSensitivity(newCurve.getName(), sensitivityCurrency).getSensitivity(); } }; int nParams = curve.getParameterCount(); DoubleMatrix sensi = fd.differentiate(function).apply(DoubleArray.of(nParams, n -> curve.getParameter(n))); List<ParameterMetadata> metadata = IntStream.range(0, nParams) .mapToObj(i -> curve.getParameterMetadata(i)) .collect(toImmutableList()); return CrossGammaParameterSensitivity.of(curve.getName(), metadata, sensitivityCurrency, sensi); } // computes the sensitivity of baseDeltaSingle to Curve CrossGammaParameterSensitivity computeGammaForCurve( CurrencyParameterSensitivity baseDeltaSingle, Curve curve, Function<Curve, ImmutableRatesProvider> ratesProviderFn, Function<ImmutableRatesProvider, CurrencyParameterSensitivities> sensitivitiesFn) { Function<DoubleArray, DoubleArray> function = new Function<DoubleArray, DoubleArray>() { @Override public DoubleArray apply(DoubleArray t) { Curve newCurve = replaceParameters(curve, t); ImmutableRatesProvider newRates = ratesProviderFn.apply(newCurve); CurrencyParameterSensitivities sensiMulti = sensitivitiesFn.apply(newRates); return sensiMulti.getSensitivity(baseDeltaSingle.getMarketDataName(), baseDeltaSingle.getCurrency()).getSensitivity(); } }; int nParams = curve.getParameterCount(); DoubleMatrix sensi = fd.differentiate(function).apply(DoubleArray.of(nParams, n -> curve.getParameter(n))); List<ParameterMetadata> metadata = IntStream.range(0, nParams) .mapToObj(i -> curve.getParameterMetadata(i)) .collect(toImmutableList()); return CrossGammaParameterSensitivity.of( baseDeltaSingle.getMarketDataName(), baseDeltaSingle.getParameterMetadata(), curve.getName(), metadata, baseDeltaSingle.getCurrency(), sensi); } private CrossGammaParameterSensitivity combineSensitivities( CurrencyParameterSensitivity baseDeltaSingle, CrossGammaParameterSensitivities blockCrossGamma) { double[][] valuesTotal = new double[baseDeltaSingle.getParameterCount()][]; List<Pair<MarketDataName<?>, List<? extends ParameterMetadata>>> order = new ArrayList<>(); for (int i = 0; i < baseDeltaSingle.getParameterCount(); ++i) { ArrayList<Double> innerList = new ArrayList<>(); for (CrossGammaParameterSensitivity gammaSingle : blockCrossGamma.getSensitivities()) { innerList.addAll(gammaSingle.getSensitivity().row(i).toList()); if (i == 0) { order.add(gammaSingle.getOrder().get(0)); } } valuesTotal[i] = Doubles.toArray(innerList); } return CrossGammaParameterSensitivity.of( baseDeltaSingle.getMarketDataName(), baseDeltaSingle.getParameterMetadata(), order, baseDeltaSingle.getCurrency(), DoubleMatrix.ofUnsafe(valuesTotal)); } //------------------------------------------------------------------------- /** * Computes the "sum-of-column gamma" or "semi-parallel gamma" for a sensitivity function. * <p> * This implementation supports a single {@link Curve} on the zero-coupon rates. * By default the gamma is computed using a one basis-point shift and a forward finite difference. * The results themselves are not scaled (they represent the second order derivative). * * @param curve the single curve to be bumped * @param curveCurrency the currency of the curve and resulting sensitivity * @param sensitivitiesFn the function to convert the bumped curve to parameter sensitivities * @return the "sum-of-columns" or "semi-parallel" gamma vector */ public CurrencyParameterSensitivity calculateSemiParallelGamma( Curve curve, Currency curveCurrency, Function<Curve, CurrencyParameterSensitivity> sensitivitiesFn) { Delta deltaShift = new Delta(curve, sensitivitiesFn); Function<DoubleArray, DoubleMatrix> gammaFn = fd.differentiate(deltaShift); DoubleArray gamma = gammaFn.apply(DoubleArray.filled(1)).column(0); return curve.createParameterSensitivity(curveCurrency, gamma); } //------------------------------------------------------------------------- private Curve replaceParameters(Curve curve, DoubleArray newParameters) { return curve.withPerturbation((i, v, m) -> newParameters.get(i)); } //------------------------------------------------------------------------- /** * Inner class to compute the delta for a given parallel shift of the curve. */ static class Delta implements Function<DoubleArray, DoubleArray> { private final Curve curve; private final Function<Curve, CurrencyParameterSensitivity> sensitivitiesFn; Delta(Curve curve, Function<Curve, CurrencyParameterSensitivity> sensitivitiesFn) { this.curve = curve; this.sensitivitiesFn = sensitivitiesFn; } @Override public DoubleArray apply(DoubleArray s) { double shift = s.get(0); Curve curveBumped = ParallelShiftedCurve.absolute(curve, shift); CurrencyParameterSensitivity pts = sensitivitiesFn.apply(curveBumped); return pts.getSensitivity(); } } //------------------------------------------------------------------------- private Curve getCurve(DiscountFactors discountFactors) { if (discountFactors instanceof SimpleDiscountFactors) { return ((SimpleDiscountFactors) discountFactors).getCurve(); } if (discountFactors instanceof ZeroRateDiscountFactors) { return ((ZeroRateDiscountFactors) discountFactors).getCurve(); } if (discountFactors instanceof ZeroRatePeriodicDiscountFactors) { return ((ZeroRatePeriodicDiscountFactors) discountFactors).getCurve(); } throw new IllegalArgumentException("Unsupported DiscountFactors type"); } private CrossGammaParameterSensitivity computeGammaForCurve( CurveName curveName, Curve curve, Currency sensitivityCurrency, Function<Curve, ImmutableLegalEntityDiscountingProvider> ratesProviderFn, Function<ImmutableLegalEntityDiscountingProvider, CurrencyParameterSensitivities> sensitivitiesFn) { Function<DoubleArray, DoubleArray> function = new Function<DoubleArray, DoubleArray>() { @Override public DoubleArray apply(DoubleArray t) { Curve newCurve = curve.withPerturbation((i, v, m) -> t.get(i)); ImmutableLegalEntityDiscountingProvider newRates = ratesProviderFn.apply(newCurve); CurrencyParameterSensitivities sensiMulti = sensitivitiesFn.apply(newRates); return sensiMulti.getSensitivity(curveName, sensitivityCurrency).getSensitivity(); } }; int nParams = curve.getParameterCount(); DoubleMatrix sensi = fd.differentiate(function).apply(DoubleArray.of(nParams, n -> curve.getParameter(n))); List<ParameterMetadata> metadata = IntStream.range(0, nParams) .mapToObj(i -> curve.getParameterMetadata(i)) .collect(toImmutableList()); return CrossGammaParameterSensitivity.of(curveName, metadata, sensitivityCurrency, sensi); } private ImmutableLegalEntityDiscountingProvider replaceIssuerCurve( ImmutableLegalEntityDiscountingProvider ratesProvider, Pair<LegalEntityGroup, Currency> legCcy, DiscountFactors discountFactors) { Map<Pair<LegalEntityGroup, Currency>, DiscountFactors> curves = new HashMap<>(); curves.putAll(ratesProvider.getIssuerCurves()); curves.put(legCcy, discountFactors); return ratesProvider.toBuilder() .issuerCurves(curves) .build(); } private ImmutableLegalEntityDiscountingProvider replaceRepoCurve( ImmutableLegalEntityDiscountingProvider ratesProvider, Pair<RepoGroup, Currency> rgCcy, DiscountFactors discountFactors) { Map<Pair<RepoGroup, Currency>, DiscountFactors> curves = new HashMap<>(); curves.putAll(ratesProvider.getRepoCurves()); curves.put(rgCcy, discountFactors); return ratesProvider.toBuilder() .repoCurves(curves) .build(); } }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.internal.telephony.test; import android.os.HandlerThread; import android.os.Looper; import android.telephony.Rlog; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.util.List; // Also in ATChannel.java class LineReader { /** * Not threadsafe * Assumes input is ASCII */ //***** Constants // For what it's worth, this is also the size of an // OMAP CSMI mailbox static final int BUFFER_SIZE = 0x1000; // just to prevent constant allocations byte mBuffer[] = new byte[BUFFER_SIZE]; //***** Instance Variables InputStream mInStream; LineReader (InputStream s) { mInStream = s; } String getNextLine() { return getNextLine(false); } String getNextLineCtrlZ() { return getNextLine(true); } /** * Note: doesn't return the last incomplete line read on EOF, since * it doesn't typically matter anyway * * Returns NULL on EOF */ String getNextLine(boolean ctrlZ) { int i = 0; try { for (;;) { int result; result = mInStream.read(); if (result < 0) { return null; } if (ctrlZ && result == 0x1a) { break; } else if (result == '\r' || result == '\n') { if (i == 0) { // Skip leading cr/lf continue; } else { break; } } mBuffer[i++] = (byte)result; } } catch (IOException ex) { return null; } catch (IndexOutOfBoundsException ex) { System.err.println("ATChannel: buffer overflow"); } try { return new String(mBuffer, 0, i, "US-ASCII"); } catch (UnsupportedEncodingException ex) { System.err.println("ATChannel: implausable UnsupportedEncodingException"); return null; } } } class InterpreterEx extends Exception { public InterpreterEx (String result) { mResult = result; } String mResult; } public class ModelInterpreter implements Runnable, SimulatedRadioControl { static final int MAX_CALLS = 6; /** number of msec between dialing -> alerting and alerting->active */ static final int CONNECTING_PAUSE_MSEC = 5 * 100; static final String LOG_TAG = "ModelInterpreter"; //***** Instance Variables InputStream mIn; OutputStream mOut; LineReader mLineReader; ServerSocket mSS; private String mFinalResponse; SimulatedGsmCallState mSimulatedCallState; HandlerThread mHandlerThread; int mPausedResponseCount; Object mPausedResponseMonitor = new Object(); //***** Events static final int PROGRESS_CALL_STATE = 1; //***** Constructor public ModelInterpreter (InputStream in, OutputStream out) { mIn = in; mOut = out; init(); } public ModelInterpreter (InetSocketAddress sa) throws java.io.IOException { mSS = new ServerSocket(); mSS.setReuseAddress(true); mSS.bind(sa); init(); } private void init() { new Thread(this, "ModelInterpreter").start(); mHandlerThread = new HandlerThread("ModelInterpreter"); mHandlerThread.start(); Looper looper = mHandlerThread.getLooper(); mSimulatedCallState = new SimulatedGsmCallState(looper); } //***** Runnable Implementation @Override public void run() { for (;;) { if (mSS != null) { Socket s; try { s = mSS.accept(); } catch (java.io.IOException ex) { Rlog.w(LOG_TAG, "IOException on socket.accept(); stopping", ex); return; } try { mIn = s.getInputStream(); mOut = s.getOutputStream(); } catch (java.io.IOException ex) { Rlog.w(LOG_TAG, "IOException on accepted socket(); re-listening", ex); continue; } Rlog.i(LOG_TAG, "New connection accepted"); } mLineReader = new LineReader (mIn); println ("Welcome"); for (;;) { String line; line = mLineReader.getNextLine(); //System.out.println("MI<< " + line); if (line == null) { break; } synchronized(mPausedResponseMonitor) { while (mPausedResponseCount > 0) { try { mPausedResponseMonitor.wait(); } catch (InterruptedException ex) { } } } synchronized (this) { try { mFinalResponse = "OK"; processLine(line); println(mFinalResponse); } catch (InterpreterEx ex) { println(ex.mResult); } catch (RuntimeException ex) { ex.printStackTrace(); println("ERROR"); } } } Rlog.i(LOG_TAG, "Disconnected"); if (mSS == null) { // no reconnect in this case break; } } } //***** Instance Methods /** Start the simulated phone ringing */ @Override public void triggerRing(String number) { synchronized (this) { boolean success; success = mSimulatedCallState.triggerRing(number); if (success) { println ("RING"); } } } /** If a call is DIALING or ALERTING, progress it to the next state */ @Override public void progressConnectingCallState() { mSimulatedCallState.progressConnectingCallState(); } /** If a call is DIALING or ALERTING, progress it all the way to ACTIVE */ @Override public void progressConnectingToActive() { mSimulatedCallState.progressConnectingToActive(); } /** automatically progress mobile originated calls to ACTIVE. * default to true */ @Override public void setAutoProgressConnectingCall(boolean b) { mSimulatedCallState.setAutoProgressConnectingCall(b); } @Override public void setNextDialFailImmediately(boolean b) { mSimulatedCallState.setNextDialFailImmediately(b); } @Override public void setNextCallFailCause(int gsmCause) { //FIXME implement } /** hangup ringing, dialing, or actuve calls */ @Override public void triggerHangupForeground() { boolean success; success = mSimulatedCallState.triggerHangupForeground(); if (success) { println ("NO CARRIER"); } } /** hangup holding calls */ @Override public void triggerHangupBackground() { boolean success; success = mSimulatedCallState.triggerHangupBackground(); if (success) { println ("NO CARRIER"); } } /** hangup all */ @Override public void triggerHangupAll() { boolean success; success = mSimulatedCallState.triggerHangupAll(); if (success) { println ("NO CARRIER"); } } public void sendUnsolicited (String unsol) { synchronized (this) { println(unsol); } } @Override public void triggerSsn(int a, int b) {} @Override public void triggerIncomingUssd(String statusCode, String message) {} @Override public void triggerIncomingSMS(String message) { /************** StringBuilder pdu = new StringBuilder(); pdu.append ("00"); //SMSC address - 0 bytes pdu.append ("04"); // Message type indicator // source address: +18005551212 pdu.append("918100551521F0"); // protocol ID and data coding scheme pdu.append("0000"); Calendar c = Calendar.getInstance(); pdu.append (c. synchronized (this) { println("+CMT: ,1\r" + pdu.toString()); } **************/ } @Override public void pauseResponses() { synchronized(mPausedResponseMonitor) { mPausedResponseCount++; } } @Override public void resumeResponses() { synchronized(mPausedResponseMonitor) { mPausedResponseCount--; if (mPausedResponseCount == 0) { mPausedResponseMonitor.notifyAll(); } } } //***** Private Instance Methods private void onAnswer() throws InterpreterEx { boolean success; success = mSimulatedCallState.onAnswer(); if (!success) { throw new InterpreterEx("ERROR"); } } private void onHangup() throws InterpreterEx { boolean success = false; success = mSimulatedCallState.onAnswer(); if (!success) { throw new InterpreterEx("ERROR"); } mFinalResponse = "NO CARRIER"; } private void onCHLD(String command) throws InterpreterEx { // command starts with "+CHLD=" char c0; char c1 = 0; boolean success; c0 = command.charAt(6); if (command.length() >= 8) { c1 = command.charAt(7); } success = mSimulatedCallState.onChld(c0, c1); if (!success) { throw new InterpreterEx("ERROR"); } } private void onDial(String command) throws InterpreterEx { boolean success; success = mSimulatedCallState.onDial(command.substring(1)); if (!success) { throw new InterpreterEx("ERROR"); } } private void onCLCC() { List<String> lines; lines = mSimulatedCallState.getClccLines(); for (int i = 0, s = lines.size() ; i < s ; i++) { println (lines.get(i)); } } private void onSMSSend(String command) { String pdu; print ("> "); pdu = mLineReader.getNextLineCtrlZ(); println("+CMGS: 1"); } void processLine (String line) throws InterpreterEx { String[] commands; commands = splitCommands(line); for (int i = 0; i < commands.length ; i++) { String command = commands[i]; if (command.equals("A")) { onAnswer(); } else if (command.equals("H")) { onHangup(); } else if (command.startsWith("+CHLD=")) { onCHLD(command); } else if (command.equals("+CLCC")) { onCLCC(); } else if (command.startsWith("D")) { onDial(command); } else if (command.startsWith("+CMGS=")) { onSMSSend(command); } else { boolean found = false; for (int j = 0; j < sDefaultResponses.length ; j++) { if (command.equals(sDefaultResponses[j][0])) { String r = sDefaultResponses[j][1]; if (r != null) { println(r); } found = true; break; } } if (!found) { throw new InterpreterEx ("ERROR"); } } } } String[] splitCommands(String line) throws InterpreterEx { if (!line.startsWith ("AT")) { throw new InterpreterEx("ERROR"); } if (line.length() == 2) { // Just AT by itself return new String[0]; } String ret[] = new String[1]; //TODO fix case here too ret[0] = line.substring(2); return ret; /**** try { // i = 2 to skip over AT for (int i = 2, s = line.length() ; i < s ; i++) { // r"|([A-RT-Z]\d?)" # Normal commands eg ATA or I0 // r"|(&[A-Z]\d*)" # & commands eg &C // r"|(S\d+(=\d+)?)" # S registers // r"((\+|%)\w+(\?|=([^;]+(;|$)))?)" # extended command eg +CREG=2 } } catch (StringIndexOutOfBoundsException ex) { throw new InterpreterEx ("ERROR"); } ***/ } void println (String s) { synchronized(this) { try { byte[] bytes = s.getBytes("US-ASCII"); //System.out.println("MI>> " + s); mOut.write(bytes); mOut.write('\r'); } catch (IOException ex) { ex.printStackTrace(); } } } void print (String s) { synchronized(this) { try { byte[] bytes = s.getBytes("US-ASCII"); //System.out.println("MI>> " + s + " (no <cr>)"); mOut.write(bytes); } catch (IOException ex) { ex.printStackTrace(); } } } @Override public void shutdown() { Looper looper = mHandlerThread.getLooper(); if (looper != null) { looper.quit(); } try { mIn.close(); } catch (IOException ex) { } try { mOut.close(); } catch (IOException ex) { } } static final String [][] sDefaultResponses = { {"E0Q0V1", null}, {"+CMEE=2", null}, {"+CREG=2", null}, {"+CGREG=2", null}, {"+CCWA=1", null}, {"+COPS=0", null}, {"+CFUN=1", null}, {"+CGMI", "+CGMI: Android Model AT Interpreter\r"}, {"+CGMM", "+CGMM: Android Model AT Interpreter\r"}, {"+CGMR", "+CGMR: 1.0\r"}, {"+CGSN", "000000000000000\r"}, {"+CIMI", "320720000000000\r"}, {"+CSCS=?", "+CSCS: (\"HEX\",\"UCS2\")\r"}, {"+CFUN?", "+CFUN: 1\r"}, {"+COPS=3,0;+COPS?;+COPS=3,1;+COPS?;+COPS=3,2;+COPS?", "+COPS: 0,0,\"Android\"\r" + "+COPS: 0,1,\"Android\"\r" + "+COPS: 0,2,\"310995\"\r"}, {"+CREG?", "+CREG: 2,5, \"0113\", \"6614\"\r"}, {"+CGREG?", "+CGREG: 2,0\r"}, {"+CSQ", "+CSQ: 16,99\r"}, {"+CNMI?", "+CNMI: 1,2,2,1,1\r"}, {"+CLIR?", "+CLIR: 1,3\r"}, {"%CPVWI=2", "%CPVWI: 0\r"}, {"+CUSD=1,\"#646#\"", "+CUSD=0,\"You have used 23 minutes\"\r"}, {"+CRSM=176,12258,0,0,10", "+CRSM: 144,0,981062200050259429F6\r"}, {"+CRSM=192,12258,0,0,15", "+CRSM: 144,0,0000000A2FE204000FF55501020000\r"}, /* EF[ADN] */ {"+CRSM=192,28474,0,0,15", "+CRSM: 144,0,0000005a6f3a040011f5220102011e\r"}, {"+CRSM=178,28474,1,4,30", "+CRSM: 144,0,437573746f6d65722043617265ffffff07818100398799f7ffffffffffff\r"}, {"+CRSM=178,28474,2,4,30", "+CRSM: 144,0,566f696365204d61696cffffffffffff07918150367742f3ffffffffffff\r"}, {"+CRSM=178,28474,3,4,30", "+CRSM: 144,0,4164676a6dffffffffffffffffffffff0b918188551512c221436587ff01\r"}, {"+CRSM=178,28474,4,4,30", "+CRSM: 144,0,810101c1ffffffffffffffffffffffff068114455245f8ffffffffffffff\r"}, /* EF[EXT1] */ {"+CRSM=192,28490,0,0,15", "+CRSM: 144,0,000000416f4a040011f5550102010d\r"}, {"+CRSM=178,28490,1,4,13", "+CRSM: 144,0,0206092143658709ffffffffff\r"} }; }
package com.manev.quislisting.domain; import com.fasterxml.jackson.annotation.JsonBackReference; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.OneToMany; import javax.persistence.Table; import java.util.Set; import static javax.persistence.CascadeType.ALL; @Entity @Table(name = "ql_translation_group") @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) public class TranslationGroup { @Id @GeneratedValue(strategy = GenerationType.AUTO) private Long id; @JsonBackReference(value = "translation_group_translation_reference") @OneToMany(cascade = ALL, mappedBy = "translationGroup") private Set<Translation> translations; public TranslationGroup() { // default constructor } public TranslationGroup(Long id) { this.id = id; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Set<Translation> getTranslations() { return translations; } public void setTranslations(Set<Translation> translations) { this.translations = translations; } }
/* * Copyright 2013-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.v2.serviceplans; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.immutables.value.Value; /** * The response for the Update Service Plan operation */ @JsonDeserialize @Value.Immutable abstract class _UpdateServicePlanResponse extends AbstractServicePlanResource { }
/* * Copyright © 2019 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package io.cdap.plugin.sfmc.sink; import com.exacttarget.fuelsdk.ETSdkException; import io.cdap.cdap.api.data.format.StructuredRecord; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import java.io.IOException; /** * Hadoop output format for writing to Salesforce Marketing Cloud. */ public class DataExtensionOutputFormat extends OutputFormat<NullWritable, StructuredRecord> { public static final String CLIENT_ID = "cdap.sfmc.client.id"; public static final String CLIENT_SECRET = "cdap.sfmc.client.secret"; public static final String AUTH_ENDPOINT = "cdap.sfmc.auth.endpoint"; public static final String SOAP_ENDPOINT = "cdap.sfmc.soap.endpoint"; public static final String DATA_EXTENSION_KEY = "cdap.sfmc.data.extension.key"; public static final String MAX_BATCH_SIZE = "cdap.sfmc.max.batch.size"; public static final String FAIL_ON_ERROR = "cdap.sfmc.fail.on.error"; public static final String OPERATION = "cdap.sfmc.operation"; public static final String TRUNCATE = "cdap.sfmc.truncate"; @Override public RecordWriter<NullWritable, StructuredRecord> getRecordWriter(TaskAttemptContext context) throws IOException { Configuration conf = context.getConfiguration(); String clientId = getOrError(conf, CLIENT_ID); String clientSecret = getOrError(conf, CLIENT_SECRET); String authEndpoint = getOrError(conf, AUTH_ENDPOINT); String soapEndpoint = getOrError(conf, SOAP_ENDPOINT); String dataExtensionKey = getOrError(conf, DATA_EXTENSION_KEY); Operation operation = Operation.valueOf(getOrError(conf, OPERATION)); int maxBatchSize = Integer.parseInt(getOrError(conf, MAX_BATCH_SIZE)); boolean failOnError = Boolean.parseBoolean(getOrError(conf, FAIL_ON_ERROR)); boolean shouldTruncate = Boolean.parseBoolean(getOrError(conf, TRUNCATE)); try { DataExtensionClient client = DataExtensionClient.create(dataExtensionKey, clientId, clientSecret, authEndpoint, soapEndpoint); RecordDataExtensionRowConverter converter = new RecordDataExtensionRowConverter(client.getDataExtensionInfo(), shouldTruncate); return new DataExtensionRecordWriter(client, converter, operation, maxBatchSize, failOnError); } catch (ETSdkException e) { throw new IOException("Unable to create Salesforce Marketing Cloud client.", e); } } @Override public void checkOutputSpecs(JobContext context) { // no-op } @Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) { return new OutputCommitter() { @Override public void setupJob(JobContext jobContext) { } @Override public void setupTask(TaskAttemptContext taskContext) { } @Override public boolean needsTaskCommit(TaskAttemptContext taskContext) { return false; } @Override public void commitTask(TaskAttemptContext taskContext) { // no-op } @Override public void abortTask(TaskAttemptContext taskContext) { // no-op } }; } private String getOrError(Configuration conf, String key) { String val = conf.get(key); if (val == null) { throw new IllegalStateException("Missing required value for " + key); } return val; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.checkpoint; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.JobStatus; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.core.fs.FileSystem; import org.apache.flink.core.fs.Path; import org.apache.flink.core.io.SimpleVersionedSerializer; import org.apache.flink.metrics.groups.UnregisteredMetricsGroup; import org.apache.flink.runtime.checkpoint.CheckpointCoordinatorTestingUtils.CheckpointCoordinatorBuilder; import org.apache.flink.runtime.concurrent.ComponentMainThreadExecutorServiceAdapter; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.executiongraph.Execution; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.executiongraph.ExecutionGraph; import org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils; import org.apache.flink.runtime.executiongraph.ExecutionJobVertex; import org.apache.flink.runtime.executiongraph.ExecutionVertex; import org.apache.flink.runtime.executiongraph.utils.SimpleAckingTaskManagerGateway; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.jobgraph.tasks.CheckpointCoordinatorConfiguration; import org.apache.flink.runtime.jobmaster.LogicalSlot; import org.apache.flink.runtime.jobmaster.TestingLogicalSlotBuilder; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.messages.checkpoint.AcknowledgeCheckpoint; import org.apache.flink.runtime.messages.checkpoint.DeclineCheckpoint; import org.apache.flink.runtime.rpc.exceptions.RpcException; import org.apache.flink.runtime.state.CheckpointMetadataOutputStream; import org.apache.flink.runtime.state.CheckpointStorage; import org.apache.flink.runtime.state.CheckpointStorageAccess; import org.apache.flink.runtime.state.CheckpointStorageLocation; import org.apache.flink.runtime.state.IncrementalRemoteKeyedStateHandle; import org.apache.flink.runtime.state.KeyGroupRange; import org.apache.flink.runtime.state.KeyGroupRangeAssignment; import org.apache.flink.runtime.state.KeyedStateHandle; import org.apache.flink.runtime.state.OperatorStateHandle; import org.apache.flink.runtime.state.OperatorStreamStateHandle; import org.apache.flink.runtime.state.PlaceholderStreamStateHandle; import org.apache.flink.runtime.state.SharedStateRegistry; import org.apache.flink.runtime.state.StateHandleID; import org.apache.flink.runtime.state.StreamStateHandle; import org.apache.flink.runtime.state.filesystem.FileStateHandle; import org.apache.flink.runtime.state.filesystem.FsStateBackend; import org.apache.flink.runtime.state.memory.ByteStreamStateHandle; import org.apache.flink.runtime.state.memory.MemoryBackendCheckpointStorageAccess; import org.apache.flink.runtime.state.memory.MemoryStateBackend; import org.apache.flink.runtime.state.memory.NonPersistentMetadataCheckpointStorageLocation; import org.apache.flink.runtime.state.storage.JobManagerCheckpointStorage; import org.apache.flink.runtime.state.testutils.TestCompletedCheckpointStorageLocation; import org.apache.flink.runtime.testutils.DirectScheduledExecutorService; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.TestLogger; import org.apache.flink.util.concurrent.FutureUtils; import org.apache.flink.util.concurrent.ManuallyTriggeredScheduledExecutor; import org.apache.flink.util.concurrent.ScheduledExecutor; import org.apache.flink.util.concurrent.ScheduledExecutorServiceAdapter; import org.apache.flink.util.function.TriFunctionWithException; import org.apache.flink.shaded.guava30.com.google.common.collect.Iterables; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.mockito.verification.VerificationMode; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import static org.apache.flink.runtime.checkpoint.CheckpointFailureReason.CHECKPOINT_ASYNC_EXCEPTION; import static org.apache.flink.runtime.checkpoint.CheckpointFailureReason.CHECKPOINT_DECLINED; import static org.apache.flink.runtime.checkpoint.CheckpointFailureReason.CHECKPOINT_EXPIRED; import static org.apache.flink.runtime.checkpoint.CheckpointFailureReason.IO_EXCEPTION; import static org.apache.flink.runtime.checkpoint.CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.apache.flink.util.Preconditions.checkState; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Matchers.any; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** Tests for the checkpoint coordinator. */ public class CheckpointCoordinatorTest extends TestLogger { @Test public void testAbortedCheckpointStatsUpdatedAfterFailure() throws Exception { testReportStatsAfterFailure( 1L, (coordinator, execution, metrics) -> { coordinator.reportStats(1L, execution.getAttemptId(), metrics); return null; }); } @Test public void testCheckpointStatsUpdatedAfterFailure() throws Exception { testReportStatsAfterFailure( 1L, (coordinator, execution, metrics) -> coordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( execution.getVertex().getJobId(), execution.getAttemptId(), 1L, metrics, new TaskStateSnapshot()), TASK_MANAGER_LOCATION_INFO)); } private void testReportStatsAfterFailure( long checkpointId, TriFunctionWithException< CheckpointCoordinator, Execution, CheckpointMetrics, ?, CheckpointException> reportFn) throws Exception { JobVertexID decliningVertexID = new JobVertexID(); JobVertexID lateReportVertexID = new JobVertexID(); ExecutionGraph executionGraph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(decliningVertexID) .addJobVertex(lateReportVertexID) .build(); ExecutionVertex decliningVertex = executionGraph.getJobVertex(decliningVertexID).getTaskVertices()[0]; ExecutionVertex lateReportVertex = executionGraph.getJobVertex(lateReportVertexID).getTaskVertices()[0]; CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator coordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(executionGraph) .setTimer(manuallyTriggeredScheduledExecutor) .build(); coordinator.setCheckpointStatsTracker(statsTracker); CompletableFuture<CompletedCheckpoint> result = coordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); checkState( coordinator.getNumberOfPendingCheckpoints() == 1, "wrong number of pending checkpoints: %s", coordinator.getNumberOfPendingCheckpoints()); if (result.isDone()) { result.get(); } coordinator.receiveDeclineMessage( new DeclineCheckpoint( executionGraph.getJobID(), decliningVertex.getCurrentExecutionAttempt().getAttemptId(), checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), "test"); CheckpointMetrics lateReportedMetrics = new CheckpointMetricsBuilder() .setTotalBytesPersisted(18) .setBytesProcessedDuringAlignment(19) .setAsyncDurationMillis(20) .setAlignmentDurationNanos(123 * 1_000_000) .setCheckpointStartDelayNanos(567 * 1_000_000) .build(); reportFn.apply( coordinator, lateReportVertex.getCurrentExecutionAttempt(), lateReportedMetrics); assertStatsEqual( checkpointId, lateReportVertex.getJobvertexId(), 0, lateReportedMetrics, statsTracker.createSnapshot().getHistory().getCheckpointById(checkpointId)); } private boolean hasNoSubState(OperatorState s) { return s.getNumberCollectedStates() == 0; } private void assertStatsEqual( long checkpointId, JobVertexID jobVertexID, int subtasIdx, CheckpointMetrics expected, AbstractCheckpointStats actual) { assertEquals(checkpointId, actual.getCheckpointId()); assertEquals(CheckpointStatsStatus.FAILED, actual.getStatus()); assertEquals(expected.getTotalBytesPersisted(), actual.getStateSize()); assertEquals(0, actual.getNumberOfAcknowledgedSubtasks()); SubtaskStateStats taskStats = actual.getAllTaskStateStats().stream() .filter(s -> s.getJobVertexId().equals(jobVertexID)) .findAny() .get() .getSubtaskStats()[subtasIdx]; assertEquals( expected.getAlignmentDurationNanos() / 1_000_000, taskStats.getAlignmentDuration()); assertEquals(expected.getUnalignedCheckpoint(), taskStats.getUnalignedCheckpoint()); assertEquals(expected.getAsyncDurationMillis(), taskStats.getAsyncCheckpointDuration()); assertEquals( expected.getAlignmentDurationNanos() / 1_000_000, taskStats.getAlignmentDuration()); assertEquals( expected.getCheckpointStartDelayNanos() / 1_000_000, taskStats.getCheckpointStartDelay()); } private static final String TASK_MANAGER_LOCATION_INFO = "Unknown location"; private ManuallyTriggeredScheduledExecutor manuallyTriggeredScheduledExecutor; @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); @Before public void setUp() throws Exception { manuallyTriggeredScheduledExecutor = new ManuallyTriggeredScheduledExecutor(); } @Test public void testScheduleTriggerRequestDuringShutdown() throws Exception { ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); CheckpointCoordinator coordinator = getCheckpointCoordinator(new ScheduledExecutorServiceAdapter(executor)); coordinator.shutdown(); executor.shutdownNow(); coordinator.scheduleTriggerRequest(); // shouldn't fail } @Test public void testMinCheckpointPause() throws Exception { // will use a different thread to allow checkpoint triggering before exiting from // receiveAcknowledgeMessage ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); CheckpointCoordinator coordinator = null; try { int pause = 1000; JobVertexID jobVertexId = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexId) .setMainThreadExecutor( ComponentMainThreadExecutorServiceAdapter .forSingleThreadExecutor( new DirectScheduledExecutorService())) .build(); ExecutionVertex vertex = graph.getJobVertex(jobVertexId).getTaskVertices()[0]; ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); coordinator = new CheckpointCoordinatorBuilder() .setTimer(new ScheduledExecutorServiceAdapter(executorService)) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setCheckpointInterval(pause) .setCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(1) .setMinPauseBetweenCheckpoints(pause) .build()) .setExecutionGraph(graph) .build(); coordinator.startCheckpointScheduler(); coordinator.triggerCheckpoint( true); // trigger, execute, and later complete by receiveAcknowledgeMessage coordinator.triggerCheckpoint( true); // enqueue and later see if it gets executed in the middle of // receiveAcknowledgeMessage while (coordinator.getNumberOfPendingCheckpoints() == 0) { // wait for at least 1 request to be fully processed Thread.sleep(10); } coordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptId, 1L), TASK_MANAGER_LOCATION_INFO); Thread.sleep(pause / 2); assertEquals(0, coordinator.getNumberOfPendingCheckpoints()); // make sure that the 2nd request is eventually processed while (coordinator.getNumberOfPendingCheckpoints() == 0) { Thread.sleep(1); } } finally { if (coordinator != null) { coordinator.shutdown(); } executorService.shutdownNow(); } } @Test public void testCheckpointAbortsIfTriggerTasksAreNotExecuted() throws Exception { // set up the coordinator and validate the initial state ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID(), false) .setTransitToRunning(false) .build(); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); // nothing should be happening assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // trigger the first checkpoint. this should not succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); // still, nothing should be happening assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); checkpointCoordinator.shutdown(); } @Test public void testCheckpointAbortsIfTriggerTasksAreFinished() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .build(); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); Arrays.stream(graph.getJobVertex(jobVertexID1).getTaskVertices()) .forEach(task -> task.getCurrentExecutionAttempt().markFinished()); // nothing should be happening assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // trigger the first checkpoint. this should not succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); // still, nothing should be happening assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); checkpointCoordinator.shutdown(); } @Test public void testCheckpointTriggeredAfterSomeTasksFinishedIfAllowed() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1, 3, 256) .addJobVertex(jobVertexID2, 3, 256) .build(); ExecutionJobVertex jobVertex1 = graph.getJobVertex(jobVertexID1); ExecutionJobVertex jobVertex2 = graph.getJobVertex(jobVertexID2); jobVertex1.getTaskVertices()[0].getCurrentExecutionAttempt().markFinished(); jobVertex1.getTaskVertices()[1].getCurrentExecutionAttempt().markFinished(); jobVertex2.getTaskVertices()[1].getCurrentExecutionAttempt().markFinished(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setTimer(manuallyTriggeredScheduledExecutor) .setAllowCheckpointsAfterTasksFinished(true) .build(); CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); checkpointCoordinator.setCheckpointStatsTracker(statsTracker); // nothing should be happening assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // trigger the first checkpoint. this will not fail because we allow checkpointing even with // finished tasks final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isDone()); assertFalse(checkpointFuture.isCompletedExceptionally()); // Triggering should succeed assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); PendingCheckpoint pendingCheckpoint = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); AbstractCheckpointStats checkpointStats = statsTracker .createSnapshot() .getHistory() .getCheckpointById(pendingCheckpoint.getCheckpointID()); assertEquals(3, checkpointStats.getNumberOfAcknowledgedSubtasks()); for (ExecutionVertex task : Arrays.asList( jobVertex1.getTaskVertices()[0], jobVertex1.getTaskVertices()[1], jobVertex2.getTaskVertices()[1])) { // those tasks that are already finished are automatically marked as acknowledged assertNotNull( checkpointStats.getTaskStateStats(task.getJobvertexId()) .getSubtaskStats()[task.getParallelSubtaskIndex()]); } } @Test public void testTasksFinishDuringTriggering() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .setTransitToRunning(false) .addJobVertex(jobVertexID1, 1, 256) .addJobVertex(jobVertexID2, 1, 256) .build(); ExecutionJobVertex jobVertex1 = graph.getJobVertex(jobVertexID1); ExecutionVertex taskVertex = jobVertex1.getTaskVertices()[0]; ExecutionJobVertex jobVertex2 = graph.getJobVertex(jobVertexID2); ExecutionVertex taskVertex2 = jobVertex2.getTaskVertices()[0]; AtomicBoolean checkpointAborted = new AtomicBoolean(false); LogicalSlot slot1 = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new SimpleAckingTaskManagerGateway() { @Override public CompletableFuture<Acknowledge> triggerCheckpoint( ExecutionAttemptID executionAttemptID, JobID jobId, long checkpointId, long timestamp, CheckpointOptions checkpointOptions) { taskVertex.getCurrentExecutionAttempt().markFinished(); return FutureUtils.completedExceptionally( new RpcException("")); } }) .createTestingLogicalSlot(); LogicalSlot slot2 = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new SimpleAckingTaskManagerGateway() { @Override public void notifyCheckpointAborted( ExecutionAttemptID executionAttemptID, JobID jobId, long checkpointId, long latestCompletedCheckpointId, long timestamp) { checkpointAborted.set(true); } }) .createTestingLogicalSlot(); ExecutionGraphTestUtils.setVertexResource(taskVertex, slot1); taskVertex.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); ExecutionGraphTestUtils.setVertexResource(taskVertex2, slot2); taskVertex2.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setTimer(manuallyTriggeredScheduledExecutor) .setAllowCheckpointsAfterTasksFinished(true) .build(); // nothing should be happening assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // trigger the first checkpoint. this will not fail because we allow checkpointing even with // finished tasks final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertTrue(checkpointAborted.get()); } @Test public void testTriggerAndDeclineCheckpointThenFailureManagerThrowsException() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; final ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); final ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); final String errorMsg = "Exceeded checkpoint failure tolerance number!"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(errorMsg); // set up the coordinator CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph, checkpointFailureManager); try { // trigger the checkpoint. this should succeed final CompletableFuture<CompletedCheckpoint> checkPointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkPointFuture); long checkpointId = checkpointCoordinator .getPendingCheckpoints() .entrySet() .iterator() .next() .getKey(); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); // acknowledge from one of the tasks checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId), TASK_MANAGER_LOCATION_INFO); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); // decline checkpoint from the other task checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); fail("Test failed."); } catch (Exception e) { ExceptionUtils.assertThrowableWithMessage(e, errorMsg); } finally { checkpointCoordinator.shutdown(); } } @Test public void testIOExceptionCheckpointExceedsTolerableFailureNumber() throws Exception { // create some mock Execution vertices that receive the checkpoint trigger messages ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID()) .build(); final String expectedErrorMessage = "Expected Error Message"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(expectedErrorMessage); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph, checkpointFailureManager); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); checkpointCoordinator.abortPendingCheckpoints(new CheckpointException(IO_EXCEPTION)); fail("Test failed."); } catch (Exception e) { ExceptionUtils.assertThrowableWithMessage(e, expectedErrorMessage); } finally { checkpointCoordinator.shutdown(); } } @Test public void testIOExceptionForPeriodicSchedulingWithInactiveTasks() throws Exception { CheckpointCoordinator checkpointCoordinator = setupCheckpointCoordinatorWithInactiveTasks(new IOExceptionCheckpointStorage()); final CompletableFuture<CompletedCheckpoint> onCompletionPromise = checkpointCoordinator.triggerCheckpoint( CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), null, true); manuallyTriggeredScheduledExecutor.triggerAll(); try { onCompletionPromise.get(); fail("should not trigger periodic checkpoint after IOException occurred."); } catch (Exception e) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(e, CheckpointException.class); if (!checkpointExceptionOptional.isPresent() || checkpointExceptionOptional.get().getCheckpointFailureReason() != IO_EXCEPTION) { throw e; } } } /** Tests that do not trigger checkpoint when IOException occurred. */ @Test public void testTriggerCheckpointAfterIOException() throws Exception { // given: Checkpoint coordinator which fails on initializeLocationForCheckpoint. TestFailJobCallback failureCallback = new TestFailJobCallback(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setFailureManager(new CheckpointFailureManager(0, failureCallback)) .setCheckpointStorage(new IOExceptionCheckpointStorage()) .setTimer(manuallyTriggeredScheduledExecutor) .build(); // when: The checkpoint is triggered. testTriggerCheckpoint(checkpointCoordinator, IO_EXCEPTION); // then: Failure manager should fail the job. assertEquals(1, failureCallback.getInvokeCounter()); } @Test public void testCheckpointAbortsIfTriggerTasksAreFinishedAndIOException() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .build(); // set up the coordinator CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointStorage(new IOExceptionCheckpointStorage()) .setTimer(manuallyTriggeredScheduledExecutor) .build(); Arrays.stream(graph.getJobVertex(jobVertexID1).getTaskVertices()) .forEach(task -> task.getCurrentExecutionAttempt().markFinished()); // nothing should be happening assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); checkpointCoordinator.startCheckpointScheduler(); // trigger the first checkpoint. this should not succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); // still, nothing should be happening assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); checkpointCoordinator.shutdown(); } @Test public void testExpiredCheckpointExceedsTolerableFailureNumber() throws Exception { // create some mock Execution vertices that receive the checkpoint trigger messages ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID()) .build(); final String errorMsg = "Exceeded checkpoint failure tolerance number!"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(errorMsg); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph, checkpointFailureManager); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); checkpointCoordinator.abortPendingCheckpoints( new CheckpointException(CHECKPOINT_EXPIRED)); fail("Test failed."); } catch (Exception e) { ExceptionUtils.assertThrowableWithMessage(e, errorMsg); } finally { checkpointCoordinator.shutdown(); } } @Test public void testTriggerAndDeclineSyncCheckpointFailureSimple() throws Exception { testTriggerAndDeclineCheckpointSimple(CHECKPOINT_DECLINED); } @Test public void testTriggerAndDeclineAsyncCheckpointFailureSimple() throws Exception { testTriggerAndDeclineCheckpointSimple(CHECKPOINT_ASYNC_EXCEPTION); } /** * This test triggers a checkpoint and then sends a decline checkpoint message from one of the * tasks. The expected behaviour is that said checkpoint is discarded and a new checkpoint is * triggered. */ private void testTriggerAndDeclineCheckpointSimple( CheckpointFailureReason checkpointFailureReason) throws Exception { final CheckpointException checkpointException = new CheckpointException(checkpointFailureReason); JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); TestFailJobCallback failJobCallback = new TestFailJobCallback(); // set up the coordinator and validate the initial state CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setAlignedCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointFailureManager( new CheckpointFailureManager(0, failJobCallback)) .build(); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // trigger the first checkpoint. this should succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); // validate that we have a pending checkpoint assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // we have one task scheduled that will cancel after timeout assertEquals(1, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); long checkpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); assertNotNull(checkpoint); assertEquals(checkpointId, checkpoint.getCheckpointId()); assertEquals(graph.getJobID(), checkpoint.getJobId()); assertEquals(2, checkpoint.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint.getOperatorStates().size()); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); // check that the vertices received the trigger checkpoint message for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { CheckpointCoordinatorTestingUtils.TriggeredCheckpoint triggeredCheckpoint = gateway.getOnlyTriggeredCheckpoint( vertex.getCurrentExecutionAttempt().getAttemptId()); assertEquals(checkpointId, triggeredCheckpoint.checkpointId); assertEquals(checkpoint.getCheckpointTimestamp(), triggeredCheckpoint.timestamp); assertEquals( CheckpointOptions.forCheckpointWithDefaultLocation(), triggeredCheckpoint.checkpointOptions); } // acknowledge from one of the tasks checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId), "Unknown location"); assertEquals(1, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(1, checkpoint.getNumberOfNonAcknowledgedTasks()); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); // acknowledge the same task again (should not matter) checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId), "Unknown location"); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); // decline checkpoint from the other task, this should cancel the checkpoint // and trigger a new one checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, checkpointException), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDisposed()); // the canceler is also removed assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); // validate that we have no new pending checkpoint assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // decline again, nothing should happen // decline from the other task, nothing should happen checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, checkpointException), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID2, checkpointId, checkpointException), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDisposed()); assertEquals(1, failJobCallback.getInvokeCounter()); checkpointCoordinator.shutdown(); } /** * This test triggers two checkpoints and then sends a decline message from one of the tasks for * the first checkpoint. This should discard the first checkpoint while not triggering a new * checkpoint because a later checkpoint is already in progress. */ @Test public void testTriggerAndDeclineCheckpointComplex() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); // trigger the first checkpoint. this should succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); // trigger second checkpoint, should also succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); // validate that we have a pending checkpoint assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(2, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); Iterator<Map.Entry<Long, PendingCheckpoint>> it = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator(); long checkpoint1Id = it.next().getKey(); long checkpoint2Id = it.next().getKey(); PendingCheckpoint checkpoint1 = checkpointCoordinator.getPendingCheckpoints().get(checkpoint1Id); PendingCheckpoint checkpoint2 = checkpointCoordinator.getPendingCheckpoints().get(checkpoint2Id); assertNotNull(checkpoint1); assertEquals(checkpoint1Id, checkpoint1.getCheckpointId()); assertEquals(graph.getJobID(), checkpoint1.getJobId()); assertEquals(2, checkpoint1.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint1.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint1.getOperatorStates().size()); assertFalse(checkpoint1.isDisposed()); assertFalse(checkpoint1.areTasksFullyAcknowledged()); assertNotNull(checkpoint2); assertEquals(checkpoint2Id, checkpoint2.getCheckpointId()); assertEquals(graph.getJobID(), checkpoint2.getJobId()); assertEquals(2, checkpoint2.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint2.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint2.getOperatorStates().size()); assertFalse(checkpoint2.isDisposed()); assertFalse(checkpoint2.areTasksFullyAcknowledged()); // check that the vertices received the trigger checkpoint message for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { List<CheckpointCoordinatorTestingUtils.TriggeredCheckpoint> triggeredCheckpoints = gateway.getTriggeredCheckpoints( vertex.getCurrentExecutionAttempt().getAttemptId()); assertEquals(2, triggeredCheckpoints.size()); assertEquals(checkpoint1Id, triggeredCheckpoints.get(0).checkpointId); assertEquals(checkpoint2Id, triggeredCheckpoints.get(1).checkpointId); } // decline checkpoint from one of the tasks, this should cancel the checkpoint checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpoint1Id, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { assertEquals( checkpoint1Id, gateway.getOnlyNotifiedAbortedCheckpoint( vertex.getCurrentExecutionAttempt().getAttemptId()) .checkpointId); } assertTrue(checkpoint1.isDisposed()); // validate that we have only one pending checkpoint left assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); // validate that it is the same second checkpoint from earlier long checkpointIdNew = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpointNew = checkpointCoordinator.getPendingCheckpoints().get(checkpointIdNew); assertEquals(checkpoint2Id, checkpointIdNew); assertNotNull(checkpointNew); assertEquals(checkpointIdNew, checkpointNew.getCheckpointId()); assertEquals(graph.getJobID(), checkpointNew.getJobId()); assertEquals(2, checkpointNew.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpointNew.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpointNew.getOperatorStates().size()); assertFalse(checkpointNew.isDisposed()); assertFalse(checkpointNew.areTasksFullyAcknowledged()); assertNotEquals(checkpoint1.getCheckpointId(), checkpointNew.getCheckpointId()); // decline again, nothing should happen // decline from the other task, nothing should happen checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpoint1Id, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID2, checkpoint1Id, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint1.isDisposed()); // will not notify abort message again for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { assertEquals( 1, gateway.getNotifiedAbortedCheckpoints( vertex.getCurrentExecutionAttempt().getAttemptId()) .size()); } checkpointCoordinator.shutdown(); } @Test public void testTriggerAndConfirmSimpleCheckpoint() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); // trigger the first checkpoint. this should succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); // validate that we have a pending checkpoint assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); long checkpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); assertNotNull(checkpoint); assertEquals(checkpointId, checkpoint.getCheckpointId()); assertEquals(graph.getJobID(), checkpoint.getJobId()); assertEquals(2, checkpoint.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint.getOperatorStates().size()); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); // check that the vertices received the trigger checkpoint message for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates1 = mock(TaskStateSnapshot.class); TaskStateSnapshot taskOperatorSubtaskStates2 = mock(TaskStateSnapshot.class); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState2 = mock(OperatorSubtaskState.class); when(taskOperatorSubtaskStates1.getSubtaskStateByOperatorID(opID1)) .thenReturn(subtaskState1); when(taskOperatorSubtaskStates2.getSubtaskStateByOperatorID(opID2)) .thenReturn(subtaskState2); // acknowledge from one of the tasks AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); assertEquals(1, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(1, checkpoint.getNumberOfNonAcknowledgedTasks()); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); verify(taskOperatorSubtaskStates2, never()) .registerSharedStates(any(SharedStateRegistry.class)); // acknowledge the same task again (should not matter) checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); verify(subtaskState2, never()).registerSharedStates(any(SharedStateRegistry.class)); // acknowledge the other task. checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); // the checkpoint is internally converted to a successful checkpoint and the // pending checkpoint object is disposed assertTrue(checkpoint.isDisposed()); // the now we should have a completed checkpoint assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); // the canceler should be removed now assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); // validate that the subtasks states have registered their shared states. { verify(subtaskState1, times(1)).registerSharedStates(any(SharedStateRegistry.class)); verify(subtaskState2, times(1)).registerSharedStates(any(SharedStateRegistry.class)); } // validate that the relevant tasks got a confirmation message for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointId, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } CompletedCheckpoint success = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertEquals(graph.getJobID(), success.getJobId()); assertEquals(checkpoint.getCheckpointId(), success.getCheckpointID()); assertEquals(2, success.getOperatorStates().size()); // --------------- // trigger another checkpoint and see that this one replaces the other checkpoint // --------------- gateway.resetCount(); checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointIdNew = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); CompletedCheckpoint successNew = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertEquals(graph.getJobID(), successNew.getJobId()); assertEquals(checkpointIdNew, successNew.getCheckpointID()); assertEquals(2, successNew.getOperatorStates().size()); assertTrue(successNew.getOperatorStates().values().stream().allMatch(this::hasNoSubState)); // validate that the relevant tasks got a confirmation message for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointIdNew, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); assertEquals( checkpointIdNew, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } checkpointCoordinator.shutdown(); } @Test public void testMultipleConcurrentCheckpoints() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); JobVertexID jobVertexID3 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .addJobVertex(jobVertexID3, false) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionVertex vertex3 = graph.getJobVertex(jobVertexID3).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID3 = vertex3.getCurrentExecutionAttempt().getAttemptId(); // set up the coordinator and validate the initial state CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // trigger the first checkpoint. this should succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending1 = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); long checkpointId1 = pending1.getCheckpointId(); // trigger messages should have been sent for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId1, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } // acknowledge one of the three tasks checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId1), TASK_MANAGER_LOCATION_INFO); // start the second checkpoint // trigger the first checkpoint. this should succeed gateway.resetCount(); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending2; { Iterator<PendingCheckpoint> all = checkpointCoordinator.getPendingCheckpoints().values().iterator(); PendingCheckpoint cc1 = all.next(); PendingCheckpoint cc2 = all.next(); pending2 = pending1 == cc1 ? cc2 : cc1; } long checkpointId2 = pending2.getCheckpointId(); // trigger messages should have been sent for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId2, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } // we acknowledge the remaining two tasks from the first // checkpoint and two tasks from the second checkpoint checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID3, checkpointId1), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId2), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId1), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId2), TASK_MANAGER_LOCATION_INFO); // now, the first checkpoint should be confirmed assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertTrue(pending1.isDisposed()); // the first confirm message should be out for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2, vertex3)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointId1, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } // send the last remaining ack for the second checkpoint gateway.resetCount(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID3, checkpointId2), TASK_MANAGER_LOCATION_INFO); // now, the second checkpoint should be confirmed assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(2, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertTrue(pending2.isDisposed()); // the second commit message should be out for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2, vertex3)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointId2, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } // validate the committed checkpoints List<CompletedCheckpoint> scs = checkpointCoordinator.getSuccessfulCheckpoints(); CompletedCheckpoint sc1 = scs.get(0); assertEquals(checkpointId1, sc1.getCheckpointID()); assertEquals(graph.getJobID(), sc1.getJobId()); assertEquals(3, sc1.getOperatorStates().size()); assertTrue(sc1.getOperatorStates().values().stream().allMatch(this::hasNoSubState)); CompletedCheckpoint sc2 = scs.get(1); assertEquals(checkpointId2, sc2.getCheckpointID()); assertEquals(graph.getJobID(), sc2.getJobId()); assertEquals(3, sc2.getOperatorStates().size()); assertTrue(sc2.getOperatorStates().values().stream().allMatch(this::hasNoSubState)); checkpointCoordinator.shutdown(); } @Test public void testSuccessfulCheckpointSubsumesUnsuccessful() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); JobVertexID jobVertexID3 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .addJobVertex(jobVertexID3, false) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionVertex vertex3 = graph.getJobVertex(jobVertexID3).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID3 = vertex3.getCurrentExecutionAttempt().getAttemptId(); // set up the coordinator and validate the initial state final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(10); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setCompletedCheckpointStore(completedCheckpointStore) .setTimer(manuallyTriggeredScheduledExecutor) .build(); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // trigger the first checkpoint. this should succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending1 = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); long checkpointId1 = pending1.getCheckpointId(); // trigger messages should have been sent for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId1, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID3 = vertex3.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates11 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates12 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates13 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState11 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState12 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState13 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates11.putSubtaskStateByOperatorID(opID1, subtaskState11); taskOperatorSubtaskStates12.putSubtaskStateByOperatorID(opID2, subtaskState12); taskOperatorSubtaskStates13.putSubtaskStateByOperatorID(opID3, subtaskState13); // acknowledge one of the three tasks checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates12), TASK_MANAGER_LOCATION_INFO); // start the second checkpoint gateway.resetCount(); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending2; { Iterator<PendingCheckpoint> all = checkpointCoordinator.getPendingCheckpoints().values().iterator(); PendingCheckpoint cc1 = all.next(); PendingCheckpoint cc2 = all.next(); pending2 = pending1 == cc1 ? cc2 : cc1; } long checkpointId2 = pending2.getCheckpointId(); TaskStateSnapshot taskOperatorSubtaskStates21 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates22 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates23 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState21 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState22 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState23 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates21.putSubtaskStateByOperatorID(opID1, subtaskState21); taskOperatorSubtaskStates22.putSubtaskStateByOperatorID(opID2, subtaskState22); taskOperatorSubtaskStates23.putSubtaskStateByOperatorID(opID3, subtaskState23); // trigger messages should have been sent for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId2, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } // we acknowledge one more task from the first checkpoint and the second // checkpoint completely. The second checkpoint should then subsume the first checkpoint checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID3, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates23), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates21), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates11), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates22), TASK_MANAGER_LOCATION_INFO); // now, the second checkpoint should be confirmed, and the first discarded // actually both pending checkpoints are discarded, and the second has been transformed // into a successful checkpoint assertTrue(pending1.isDisposed()); assertTrue(pending2.isDisposed()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // validate that all received subtask states in the first checkpoint have been discarded verify(subtaskState11, times(1)).discardState(); verify(subtaskState12, times(1)).discardState(); // validate that all subtask states in the second checkpoint are not discarded verify(subtaskState21, never()).discardState(); verify(subtaskState22, never()).discardState(); verify(subtaskState23, never()).discardState(); // validate the committed checkpoints List<CompletedCheckpoint> scs = checkpointCoordinator.getSuccessfulCheckpoints(); CompletedCheckpoint success = scs.get(0); assertEquals(checkpointId2, success.getCheckpointID()); assertEquals(graph.getJobID(), success.getJobId()); assertEquals(3, success.getOperatorStates().size()); // the first confirm message should be out for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2, vertex3)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointId2, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } // send the last remaining ack for the first checkpoint. This should not do anything checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID3, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates13), TASK_MANAGER_LOCATION_INFO); verify(subtaskState13, times(1)).discardState(); checkpointCoordinator.shutdown(); completedCheckpointStore.shutdown(JobStatus.FINISHED, new CheckpointsCleaner()); // validate that the states in the second checkpoint have been discarded verify(subtaskState21, times(1)).discardState(); verify(subtaskState22, times(1)).discardState(); verify(subtaskState23, times(1)).discardState(); } @Test public void testCheckpointTimeoutIsolated() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); // set up the coordinator CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); // trigger a checkpoint, partially acknowledged final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); assertFalse(checkpoint.isDisposed()); OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates1 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpoint.getCheckpointId(), new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); // triggers cancelling manuallyTriggeredScheduledExecutor.triggerScheduledTasks(); assertTrue("Checkpoint was not canceled by the timeout", checkpoint.isDisposed()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // validate that the received states have been discarded verify(subtaskState1, times(1)).discardState(); // no confirm message must have been sent for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(0, gateway.getNotifiedCompletedCheckpoints(attemptId).size()); } checkpointCoordinator.shutdown(); } @Test public void testHandleMessagesForNonExistingCheckpoints() throws Exception { // create some mock execution vertices and trigger some checkpoint JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); long checkpointId = checkpointCoordinator.getPendingCheckpoints().keySet().iterator().next(); // send some messages that do not belong to either the job or the any // of the vertices that need to be acknowledged. // non of the messages should throw an exception // wrong job id checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(new JobID(), attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); // unknown checkpoint checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, 1L), TASK_MANAGER_LOCATION_INFO); // unknown ack vertex checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), new ExecutionAttemptID(), checkpointId), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.shutdown(); } /** * Tests that late acknowledge checkpoint messages are properly cleaned up. Furthermore it tests * that unknown checkpoint messages for the same job a are cleaned up as well. In contrast * checkpointing messages from other jobs should not be touched. A late acknowledge message is * an acknowledge message which arrives after the checkpoint has been declined. * * @throws Exception */ @Test public void testStateCleanupForLateOrUnknownMessages() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration(chkConfig) .setTimer(manuallyTriggeredScheduledExecutor) .build(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); PendingCheckpoint pendingCheckpoint = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); long checkpointId = pendingCheckpoint.getCheckpointId(); OperatorID opIDtrigger = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStatesTrigger = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskStateTrigger = mock(OperatorSubtaskState.class); taskOperatorSubtaskStatesTrigger.putSubtaskStateByOperatorID( opIDtrigger, subtaskStateTrigger); // acknowledge the first trigger vertex checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStatesTrigger), TASK_MANAGER_LOCATION_INFO); // verify that the subtask state has not been discarded verify(subtaskStateTrigger, never()).discardState(); TaskStateSnapshot unknownSubtaskState = mock(TaskStateSnapshot.class); // receive an acknowledge message for an unknown vertex checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), unknownSubtaskState), TASK_MANAGER_LOCATION_INFO); // we should discard acknowledge messages from an unknown vertex belonging to our job verify(unknownSubtaskState, times(1)).discardState(); TaskStateSnapshot differentJobSubtaskState = mock(TaskStateSnapshot.class); // receive an acknowledge message from an unknown job checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( new JobID(), new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), differentJobSubtaskState), TASK_MANAGER_LOCATION_INFO); // we should not interfere with different jobs verify(differentJobSubtaskState, never()).discardState(); // duplicate acknowledge message for the trigger vertex TaskStateSnapshot triggerSubtaskState = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), triggerSubtaskState), TASK_MANAGER_LOCATION_INFO); // duplicate acknowledge messages for a known vertex should not trigger discarding the state verify(triggerSubtaskState, never()).discardState(); // let the checkpoint fail at the first ack vertex reset(subtaskStateTrigger); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); assertTrue(pendingCheckpoint.isDisposed()); // check that we've cleaned up the already acknowledged state verify(subtaskStateTrigger, times(1)).discardState(); TaskStateSnapshot ackSubtaskState = mock(TaskStateSnapshot.class); // late acknowledge message from the second ack vertex checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), ackSubtaskState), TASK_MANAGER_LOCATION_INFO); // check that we also cleaned up this state verify(ackSubtaskState, times(1)).discardState(); // receive an acknowledge message from an unknown job reset(differentJobSubtaskState); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( new JobID(), new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), differentJobSubtaskState), TASK_MANAGER_LOCATION_INFO); // we should not interfere with different jobs verify(differentJobSubtaskState, never()).discardState(); TaskStateSnapshot unknownSubtaskState2 = mock(TaskStateSnapshot.class); // receive an acknowledge message for an unknown vertex checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), new ExecutionAttemptID(), checkpointId, new CheckpointMetrics(), unknownSubtaskState2), TASK_MANAGER_LOCATION_INFO); // we should discard acknowledge messages from an unknown vertex belonging to our job verify(unknownSubtaskState2, times(1)).discardState(); } @Test public void testMaxConcurrentAttempts1() { testMaxConcurrentAttempts(1); } @Test public void testMaxConcurrentAttempts2() { testMaxConcurrentAttempts(2); } @Test public void testMaxConcurrentAttempts5() { testMaxConcurrentAttempts(5); } @Test public void testTriggerAndConfirmSimpleSavepoint() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); // set up the coordinator and validate the initial state CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); // trigger the first checkpoint. this should succeed String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepointFuture = checkpointCoordinator.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(savepointFuture.isDone()); // validate that we have a pending savepoint assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); long checkpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint pending = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); assertNotNull(pending); assertEquals(checkpointId, pending.getCheckpointId()); assertEquals(graph.getJobID(), pending.getJobId()); assertEquals(2, pending.getNumberOfNonAcknowledgedTasks()); assertEquals(0, pending.getNumberOfAcknowledgedTasks()); assertEquals(0, pending.getOperatorStates().size()); assertFalse(pending.isDisposed()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(pending.canBeSubsumed()); OperatorID opID1 = OperatorID.fromJobVertexID(vertex1.getJobvertexId()); OperatorID opID2 = OperatorID.fromJobVertexID(vertex2.getJobvertexId()); TaskStateSnapshot taskOperatorSubtaskStates1 = mock(TaskStateSnapshot.class); TaskStateSnapshot taskOperatorSubtaskStates2 = mock(TaskStateSnapshot.class); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState2 = mock(OperatorSubtaskState.class); when(taskOperatorSubtaskStates1.getSubtaskStateByOperatorID(opID1)) .thenReturn(subtaskState1); when(taskOperatorSubtaskStates2.getSubtaskStateByOperatorID(opID2)) .thenReturn(subtaskState2); // acknowledge from one of the tasks AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); assertEquals(1, pending.getNumberOfAcknowledgedTasks()); assertEquals(1, pending.getNumberOfNonAcknowledgedTasks()); assertFalse(pending.isDisposed()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(savepointFuture.isDone()); // acknowledge the same task again (should not matter) checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); assertFalse(pending.isDisposed()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(savepointFuture.isDone()); // acknowledge the other task. checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); // the checkpoint is internally converted to a successful checkpoint and the // pending checkpoint object is disposed assertTrue(pending.isDisposed()); assertNotNull(savepointFuture.get()); // the now we should have a completed checkpoint assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); // validate that the relevant tasks got a confirmation message for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } // validate that the shared states are registered { verify(subtaskState1, times(1)).registerSharedStates(any(SharedStateRegistry.class)); verify(subtaskState2, times(1)).registerSharedStates(any(SharedStateRegistry.class)); } CompletedCheckpoint success = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertEquals(graph.getJobID(), success.getJobId()); assertEquals(pending.getCheckpointId(), success.getCheckpointID()); assertEquals(2, success.getOperatorStates().size()); // --------------- // trigger another checkpoint and see that this one replaces the other checkpoint // --------------- gateway.resetCount(); savepointFuture = checkpointCoordinator.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(savepointFuture.isDone()); long checkpointIdNew = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); CompletedCheckpoint successNew = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertEquals(graph.getJobID(), successNew.getJobId()); assertEquals(checkpointIdNew, successNew.getCheckpointID()); assertEquals(2, successNew.getOperatorStates().size()); assertTrue(successNew.getOperatorStates().values().stream().allMatch(this::hasNoSubState)); assertNotNull(savepointFuture.get()); // validate that the first savepoint does not discard its private states. verify(subtaskState1, never()).discardState(); verify(subtaskState2, never()).discardState(); // validate that the relevant tasks got a confirmation message for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointIdNew, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); assertEquals( checkpointIdNew, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } checkpointCoordinator.shutdown(); } /** * Triggers a savepoint and two checkpoints. The second checkpoint completes and subsumes the * first checkpoint, but not the first savepoint. Then we trigger another checkpoint and * savepoint. The 2nd savepoint completes and subsumes the last checkpoint, but not the first * savepoint. */ @Test public void testSavepointsAreNotSubsumed() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); StandaloneCheckpointIDCounter counter = new StandaloneCheckpointIDCounter(); // set up the coordinator and validate the initial state CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setCheckpointIDCounter(counter) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(10)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); // Trigger savepoint and checkpoint CompletableFuture<CompletedCheckpoint> savepointFuture1 = checkpointCoordinator.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); long savepointId1 = counter.getLast(); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); long checkpointId2 = counter.getLast(); assertEquals(3, checkpointCoordinator.getNumberOfPendingCheckpoints()); // 2nd checkpoint should subsume the 1st checkpoint, but not the savepoint checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId2), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertFalse(checkpointCoordinator.getPendingCheckpoints().get(savepointId1).isDisposed()); assertFalse(savepointFuture1.isDone()); CompletableFuture<CompletedCheckpoint> checkpointFuture3 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture3); assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); CompletableFuture<CompletedCheckpoint> savepointFuture2 = checkpointCoordinator.triggerSavepoint(savepointDir); manuallyTriggeredScheduledExecutor.triggerAll(); long savepointId2 = counter.getLast(); FutureUtils.throwIfCompletedExceptionally(savepointFuture2); assertEquals(3, checkpointCoordinator.getNumberOfPendingCheckpoints()); // 2nd savepoint should subsume the last checkpoint, but not the 1st savepoint checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, savepointId2), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, savepointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(2, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertFalse(checkpointCoordinator.getPendingCheckpoints().get(savepointId1).isDisposed()); assertFalse(savepointFuture1.isDone()); assertNotNull(savepointFuture2.get()); // Ack first savepoint checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, savepointId1), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, savepointId1), TASK_MANAGER_LOCATION_INFO); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(3, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertNotNull(savepointFuture1.get()); } private void testMaxConcurrentAttempts(int maxConcurrentAttempts) { try { JobVertexID jobVertexID1 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration .CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) // periodic interval is 10 ms .setCheckpointTimeout(200000) // timeout is very long (200 s) .setMinPauseBetweenCheckpoints(0L) // no extra delay .setMaxConcurrentCheckpoints(maxConcurrentAttempts) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); checkpointCoordinator.startCheckpointScheduler(); for (int i = 0; i < maxConcurrentAttempts; i++) { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } assertEquals(maxConcurrentAttempts, gateway.getTriggeredCheckpoints(attemptID1).size()); assertEquals(0, gateway.getNotifiedCompletedCheckpoints(attemptID1).size()); // now, once we acknowledge one checkpoint, it should trigger the next one checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, 1L), TASK_MANAGER_LOCATION_INFO); final Collection<ScheduledFuture<?>> periodicScheduledTasks = manuallyTriggeredScheduledExecutor.getActivePeriodicScheduledTask(); assertEquals(1, periodicScheduledTasks.size()); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals( maxConcurrentAttempts + 1, gateway.getTriggeredCheckpoints(attemptID1).size()); // no further checkpoints should happen manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals( maxConcurrentAttempts + 1, gateway.getTriggeredCheckpoints(attemptID1).size()); checkpointCoordinator.shutdown(); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testMaxConcurrentAttemptsWithSubsumption() throws Exception { final int maxConcurrentAttempts = 2; JobVertexID jobVertexID1 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) // periodic interval is 10 ms .setCheckpointTimeout(200000) // timeout is very long (200 s) .setMinPauseBetweenCheckpoints(0L) // no extra delay .setMaxConcurrentCheckpoints(maxConcurrentAttempts) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); checkpointCoordinator.startCheckpointScheduler(); do { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } while (checkpointCoordinator.getNumberOfPendingCheckpoints() < maxConcurrentAttempts); // validate that the pending checkpoints are there assertEquals(maxConcurrentAttempts, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertNotNull(checkpointCoordinator.getPendingCheckpoints().get(1L)); assertNotNull(checkpointCoordinator.getPendingCheckpoints().get(2L)); // now we acknowledge the second checkpoint, which should subsume the first checkpoint // and allow two more checkpoints to be triggered // now, once we acknowledge one checkpoint, it should trigger the next one checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, 2L), TASK_MANAGER_LOCATION_INFO); // after a while, there should be the new checkpoints do { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } while (checkpointCoordinator.getNumberOfPendingCheckpoints() < maxConcurrentAttempts); // do the final check assertEquals(maxConcurrentAttempts, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertNotNull(checkpointCoordinator.getPendingCheckpoints().get(3L)); assertNotNull(checkpointCoordinator.getPendingCheckpoints().get(4L)); checkpointCoordinator.shutdown(); } @Test public void testPeriodicSchedulingWithInactiveTasks() throws Exception { CheckpointCoordinator checkpointCoordinator = setupCheckpointCoordinatorWithInactiveTasks(new MemoryStateBackend()); // the coordinator should start checkpointing now manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointCoordinator.getNumberOfPendingCheckpoints() > 0); } private CheckpointCoordinator setupCheckpointCoordinatorWithInactiveTasks( CheckpointStorage checkpointStorage) throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .setTransitToRunning(false) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) // periodic interval is 10 ms .setCheckpointTimeout(200000) // timeout is very long (200 s) .setMinPauseBetweenCheckpoints(0) // no extra delay .setMaxConcurrentCheckpoints(2) // max two concurrent checkpoints .build(); CheckpointIDCounterWithOwner checkpointIDCounter = new CheckpointIDCounterWithOwner(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setCheckpointStorage(checkpointStorage) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointIDCounter(checkpointIDCounter) .build(); checkpointIDCounter.setOwner(checkpointCoordinator); checkpointCoordinator.startCheckpointScheduler(); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); // no checkpoint should have started so far assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); // now move the state to RUNNING vertex1.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); // the coordinator should start checkpointing now manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); return checkpointCoordinator; } /** Tests that the savepoints can be triggered concurrently. */ @Test public void testConcurrentSavepoints() throws Exception { int numSavepoints = 5; JobVertexID jobVertexID1 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); StandaloneCheckpointIDCounter checkpointIDCounter = new StandaloneCheckpointIDCounter(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setMaxConcurrentCheckpoints( 1) // max one checkpoint at a time => should not affect savepoints .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration(chkConfig) .setCheckpointIDCounter(checkpointIDCounter) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); List<CompletableFuture<CompletedCheckpoint>> savepointFutures = new ArrayList<>(); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); // Trigger savepoints for (int i = 0; i < numSavepoints; i++) { savepointFutures.add(checkpointCoordinator.triggerSavepoint(savepointDir)); } // After triggering multiple savepoints, all should in progress for (CompletableFuture<CompletedCheckpoint> savepointFuture : savepointFutures) { assertFalse(savepointFuture.isDone()); } manuallyTriggeredScheduledExecutor.triggerAll(); // ACK all savepoints long checkpointId = checkpointIDCounter.getLast(); for (int i = 0; i < numSavepoints; i++, checkpointId--) { checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); } // After ACKs, all should be completed for (CompletableFuture<CompletedCheckpoint> savepointFuture : savepointFutures) { assertNotNull(savepointFuture.get()); } } /** Tests that no minimum delay between savepoints is enforced. */ @Test public void testMinDelayBetweenSavepoints() throws Exception { CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setMinPauseBetweenCheckpoints( 100000000L) // very long min delay => should not affect savepoints .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepoint0 = checkpointCoordinator.triggerSavepoint(savepointDir); assertFalse("Did not trigger savepoint", savepoint0.isDone()); CompletableFuture<CompletedCheckpoint> savepoint1 = checkpointCoordinator.triggerSavepoint(savepointDir); assertFalse("Did not trigger savepoint", savepoint1.isDone()); } /** Tests that the externalized checkpoint configuration is respected. */ @Test public void testExternalizedCheckpoints() throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .build(); // set up the coordinator and validate the initial state CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setCheckpointRetentionPolicy(CheckpointRetentionPolicy.RETAIN_ON_FAILURE) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration(chkConfig) .setTimer(manuallyTriggeredScheduledExecutor) .build(); CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); for (PendingCheckpoint checkpoint : checkpointCoordinator.getPendingCheckpoints().values()) { CheckpointProperties props = checkpoint.getProps(); CheckpointProperties expected = CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.RETAIN_ON_FAILURE); assertEquals(expected, props); } // the now we should have a completed checkpoint checkpointCoordinator.shutdown(); } @Test public void testCreateKeyGroupPartitions() { testCreateKeyGroupPartitions(1, 1); testCreateKeyGroupPartitions(13, 1); testCreateKeyGroupPartitions(13, 2); testCreateKeyGroupPartitions(Short.MAX_VALUE, 1); testCreateKeyGroupPartitions(Short.MAX_VALUE, 13); testCreateKeyGroupPartitions(Short.MAX_VALUE, Short.MAX_VALUE); Random r = new Random(1234); for (int k = 0; k < 1000; ++k) { int maxParallelism = 1 + r.nextInt(Short.MAX_VALUE - 1); int parallelism = 1 + r.nextInt(maxParallelism); testCreateKeyGroupPartitions(maxParallelism, parallelism); } } private void testCreateKeyGroupPartitions(int maxParallelism, int parallelism) { List<KeyGroupRange> ranges = StateAssignmentOperation.createKeyGroupPartitions(maxParallelism, parallelism); for (int i = 0; i < maxParallelism; ++i) { KeyGroupRange range = ranges.get( KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup( maxParallelism, parallelism, i)); if (!range.contains(i)) { Assert.fail("Could not find expected key-group " + i + " in range " + range); } } } @Test public void testPartitionableStateRepartitioning() { Random r = new Random(42); for (int run = 0; run < 10000; ++run) { int oldParallelism = 1 + r.nextInt(9); int newParallelism = 1 + r.nextInt(9); int numNamedStates = 1 + r.nextInt(9); int maxPartitionsPerState = 1 + r.nextInt(9); doTestPartitionableStateRepartitioning( r, oldParallelism, newParallelism, numNamedStates, maxPartitionsPerState); } } private void doTestPartitionableStateRepartitioning( Random r, int oldParallelism, int newParallelism, int numNamedStates, int maxPartitionsPerState) { List<List<OperatorStateHandle>> previousParallelOpInstanceStates = new ArrayList<>(oldParallelism); for (int i = 0; i < oldParallelism; ++i) { Path fakePath = new Path("/fake-" + i); Map<String, OperatorStateHandle.StateMetaInfo> namedStatesToOffsets = new HashMap<>(); int off = 0; for (int s = 0; s < numNamedStates - 1; ++s) { long[] offs = new long[1 + r.nextInt(maxPartitionsPerState)]; for (int o = 0; o < offs.length; ++o) { offs[o] = off; ++off; } OperatorStateHandle.Mode mode = r.nextInt(10) == 0 ? OperatorStateHandle.Mode.UNION : OperatorStateHandle.Mode.SPLIT_DISTRIBUTE; namedStatesToOffsets.put( "State-" + s, new OperatorStateHandle.StateMetaInfo(offs, mode)); } if (numNamedStates % 2 == 0) { // finally add a broadcast state long[] offs = {off + 1, off + 2, off + 3, off + 4}; namedStatesToOffsets.put( "State-" + (numNamedStates - 1), new OperatorStateHandle.StateMetaInfo( offs, OperatorStateHandle.Mode.BROADCAST)); } previousParallelOpInstanceStates.add( Collections.singletonList( new OperatorStreamStateHandle( namedStatesToOffsets, new FileStateHandle(fakePath, -1)))); } Map<StreamStateHandle, Map<String, List<Long>>> expected = new HashMap<>(); int taskIndex = 0; int expectedTotalPartitions = 0; for (List<OperatorStateHandle> previousParallelOpInstanceState : previousParallelOpInstanceStates) { Assert.assertEquals(1, previousParallelOpInstanceState.size()); for (OperatorStateHandle psh : previousParallelOpInstanceState) { Map<String, OperatorStateHandle.StateMetaInfo> offsMap = psh.getStateNameToPartitionOffsets(); Map<String, List<Long>> offsMapWithList = new HashMap<>(offsMap.size()); for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> e : offsMap.entrySet()) { long[] offs = e.getValue().getOffsets(); int replication; switch (e.getValue().getDistributionMode()) { case UNION: replication = newParallelism; break; case BROADCAST: int extra = taskIndex < (newParallelism % oldParallelism) ? 1 : 0; replication = newParallelism / oldParallelism + extra; break; case SPLIT_DISTRIBUTE: replication = 1; break; default: throw new RuntimeException( "Unknown distribution mode " + e.getValue().getDistributionMode()); } if (replication > 0) { expectedTotalPartitions += replication * offs.length; List<Long> offsList = new ArrayList<>(offs.length); for (long off : offs) { for (int p = 0; p < replication; ++p) { offsList.add(off); } } offsMapWithList.put(e.getKey(), offsList); } } if (!offsMapWithList.isEmpty()) { expected.put(psh.getDelegateStateHandle(), offsMapWithList); } taskIndex++; } } OperatorStateRepartitioner repartitioner = RoundRobinOperatorStateRepartitioner.INSTANCE; List<List<OperatorStateHandle>> pshs = repartitioner.repartitionState( previousParallelOpInstanceStates, oldParallelism, newParallelism); Map<StreamStateHandle, Map<String, List<Long>>> actual = new HashMap<>(); int minCount = Integer.MAX_VALUE; int maxCount = 0; int actualTotalPartitions = 0; for (int p = 0; p < newParallelism; ++p) { int partitionCount = 0; Collection<OperatorStateHandle> pshc = pshs.get(p); for (OperatorStateHandle sh : pshc) { for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> namedState : sh.getStateNameToPartitionOffsets().entrySet()) { Map<String, List<Long>> stateToOffsets = actual.get(sh.getDelegateStateHandle()); if (stateToOffsets == null) { stateToOffsets = new HashMap<>(); actual.put(sh.getDelegateStateHandle(), stateToOffsets); } List<Long> actualOffs = stateToOffsets.get(namedState.getKey()); if (actualOffs == null) { actualOffs = new ArrayList<>(); stateToOffsets.put(namedState.getKey(), actualOffs); } long[] add = namedState.getValue().getOffsets(); for (long l : add) { actualOffs.add(l); } partitionCount += namedState.getValue().getOffsets().length; } } minCount = Math.min(minCount, partitionCount); maxCount = Math.max(maxCount, partitionCount); actualTotalPartitions += partitionCount; } for (Map<String, List<Long>> v : actual.values()) { for (List<Long> l : v.values()) { Collections.sort(l); } } // if newParallelism equals to oldParallelism, we would only redistribute UNION state if // possible. if (oldParallelism != newParallelism) { int maxLoadDiff = maxCount - minCount; Assert.assertTrue( "Difference in partition load is > 1 : " + maxLoadDiff, maxLoadDiff <= 1); } Assert.assertEquals(expectedTotalPartitions, actualTotalPartitions); Assert.assertEquals(expected, actual); } /** Tests that the pending checkpoint stats callbacks are created. */ @Test public void testCheckpointStatsTrackerPendingCheckpointCallback() throws Exception { // set up the coordinator and validate the initial state CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .build(); CheckpointStatsTracker tracker = mock(CheckpointStatsTracker.class); checkpointCoordinator.setCheckpointStatsTracker(tracker); when(tracker.reportPendingCheckpoint( anyLong(), anyLong(), any(CheckpointProperties.class), any(Map.class))) .thenReturn(mock(PendingCheckpointStats.class)); // Trigger a checkpoint and verify callback CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); verify(tracker, times(1)) .reportPendingCheckpoint( eq(1L), any(Long.class), eq( CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION)), any()); } /** Tests that the restore callbacks are called if registered. */ @Test public void testCheckpointStatsTrackerRestoreCallback() throws Exception { StandaloneCompletedCheckpointStore store = new StandaloneCompletedCheckpointStore(1); // set up the coordinator and validate the initial state CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCompletedCheckpointStore(store) .setTimer(manuallyTriggeredScheduledExecutor) .build(); store.addCheckpoint( new CompletedCheckpoint( new JobID(), 0, 0, 0, Collections.<OperatorID, OperatorState>emptyMap(), Collections.<MasterState>emptyList(), CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), new TestCompletedCheckpointStorageLocation()), new CheckpointsCleaner(), () -> {}); CheckpointStatsTracker tracker = mock(CheckpointStatsTracker.class); checkpointCoordinator.setCheckpointStatsTracker(tracker); assertTrue( checkpointCoordinator.restoreLatestCheckpointedStateToAll( Collections.emptySet(), true)); verify(tracker, times(1)).reportRestoredCheckpoint(any(RestoredCheckpointStats.class)); } @Test public void testSharedStateRegistrationOnRestore() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); int parallelism1 = 2; int maxParallelism1 = 4; ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1, parallelism1, maxParallelism1) .build(); ExecutionJobVertex jobVertex1 = graph.getJobVertex(jobVertexID1); final EmbeddedCompletedCheckpointStore store = new EmbeddedCompletedCheckpointStore(10); final List<SharedStateRegistry> createdSharedStateRegistries = new ArrayList<>(2); // set up the coordinator and validate the initial state final CheckpointCoordinatorBuilder coordinatorBuilder = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setTimer(manuallyTriggeredScheduledExecutor) .setSharedStateRegistryFactory( deleteExecutor -> { SharedStateRegistry instance = new SharedStateRegistry(deleteExecutor); createdSharedStateRegistries.add(instance); return instance; }); final CheckpointCoordinator coordinator = coordinatorBuilder.setCompletedCheckpointStore(store).build(); final int numCheckpoints = 3; List<KeyGroupRange> keyGroupPartitions1 = StateAssignmentOperation.createKeyGroupPartitions(maxParallelism1, parallelism1); for (int i = 0; i < numCheckpoints; ++i) { performIncrementalCheckpoint( graph.getJobID(), coordinator, jobVertex1, keyGroupPartitions1, i); } List<CompletedCheckpoint> completedCheckpoints = coordinator.getSuccessfulCheckpoints(); assertEquals(numCheckpoints, completedCheckpoints.size()); int sharedHandleCount = 0; List<Map<StateHandleID, StreamStateHandle>> sharedHandlesByCheckpoint = new ArrayList<>(numCheckpoints); for (int i = 0; i < numCheckpoints; ++i) { sharedHandlesByCheckpoint.add(new HashMap<>(2)); } int cp = 0; for (CompletedCheckpoint completedCheckpoint : completedCheckpoints) { for (OperatorState taskState : completedCheckpoint.getOperatorStates().values()) { for (OperatorSubtaskState subtaskState : taskState.getStates()) { for (KeyedStateHandle keyedStateHandle : subtaskState.getManagedKeyedState()) { // test we are once registered with the current registry verify(keyedStateHandle, times(1)) .registerSharedStates(createdSharedStateRegistries.get(0)); IncrementalRemoteKeyedStateHandle incrementalKeyedStateHandle = (IncrementalRemoteKeyedStateHandle) keyedStateHandle; sharedHandlesByCheckpoint .get(cp) .putAll(incrementalKeyedStateHandle.getSharedState()); for (StreamStateHandle streamStateHandle : incrementalKeyedStateHandle.getSharedState().values()) { assertTrue( !(streamStateHandle instanceof PlaceholderStreamStateHandle)); verify(streamStateHandle, never()).discardState(); ++sharedHandleCount; } for (StreamStateHandle streamStateHandle : incrementalKeyedStateHandle.getPrivateState().values()) { verify(streamStateHandle, never()).discardState(); } verify(incrementalKeyedStateHandle.getMetaStateHandle(), never()) .discardState(); } verify(subtaskState, never()).discardState(); } } ++cp; } // 2 (parallelism) x (1 (CP0) + 2 (CP1) + 2 (CP2)) = 10 assertEquals(10, sharedHandleCount); // discard CP0 store.removeOldestCheckpoint(); // we expect no shared state was discarded because the state of CP0 is still referenced by // CP1 for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (StreamStateHandle streamStateHandle : cpList.values()) { verify(streamStateHandle, never()).discardState(); } } // shutdown the store store.shutdown(JobStatus.SUSPENDED, new CheckpointsCleaner()); // restore the store Set<ExecutionJobVertex> tasks = new HashSet<>(); tasks.add(jobVertex1); assertEquals(JobStatus.SUSPENDED, store.getShutdownStatus().orElse(null)); final EmbeddedCompletedCheckpointStore secondStore = new EmbeddedCompletedCheckpointStore(10, store.getAllCheckpoints()); final CheckpointCoordinator secondCoordinator = coordinatorBuilder.setCompletedCheckpointStore(secondStore).build(); assertTrue(secondCoordinator.restoreLatestCheckpointedStateToAll(tasks, false)); // validate that all shared states are registered again after the recovery. cp = 0; for (CompletedCheckpoint completedCheckpoint : completedCheckpoints) { for (OperatorState taskState : completedCheckpoint.getOperatorStates().values()) { for (OperatorSubtaskState subtaskState : taskState.getStates()) { for (KeyedStateHandle keyedStateHandle : subtaskState.getManagedKeyedState()) { VerificationMode verificationMode; // test we are once registered with the new registry if (cp > 0) { verificationMode = times(1); } else { verificationMode = never(); } // check that all are registered with the new registry verify(keyedStateHandle, verificationMode) .registerSharedStates( Iterables.getLast(createdSharedStateRegistries)); } } } ++cp; } // discard CP1 secondStore.removeOldestCheckpoint(); // we expect that all shared state from CP0 is no longer referenced and discarded. CP2 is // still live and also // references the state from CP1, so we expect they are not discarded. for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (Map.Entry<StateHandleID, StreamStateHandle> entry : cpList.entrySet()) { String key = entry.getKey().getKeyString(); int belongToCP = Integer.parseInt(String.valueOf(key.charAt(key.length() - 1))); if (belongToCP == 0) { verify(entry.getValue(), times(1)).discardState(); } else { verify(entry.getValue(), never()).discardState(); } } } // discard CP2 secondStore.removeOldestCheckpoint(); // we expect all shared state was discarded now, because all CPs are for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (StreamStateHandle streamStateHandle : cpList.values()) { verify(streamStateHandle, times(1)).discardState(); } } } @Test public void jobFailsIfInFlightSynchronousSavepointIsDiscarded() throws Exception { final Tuple2<Integer, Throwable> invocationCounterAndException = Tuple2.of(0, null); final Throwable expectedRootCause = new IOException("Custom-Exception"); JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); // set up the coordinator and validate the initial state final CheckpointCoordinator coordinator = getCheckpointCoordinator( graph, new CheckpointFailureManager( 0, new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { invocationCounterAndException.f0 += 1; invocationCounterAndException.f1 = cause; } @Override public void failJobDueToTaskFailure( Throwable cause, ExecutionAttemptID failingTask) { throw new AssertionError( "This method should not be called for the test."); } })); final CompletableFuture<CompletedCheckpoint> savepointFuture = coordinator.triggerSynchronousSavepoint(false, "test-dir"); manuallyTriggeredScheduledExecutor.triggerAll(); final PendingCheckpoint syncSavepoint = declineSynchronousSavepoint( graph.getJobID(), coordinator, attemptID1, expectedRootCause); assertTrue(syncSavepoint.isDisposed()); try { savepointFuture.get(); fail("Expected Exception not found."); } catch (ExecutionException e) { final Throwable cause = ExceptionUtils.stripExecutionException(e); assertTrue(cause instanceof CheckpointException); assertEquals(expectedRootCause.getMessage(), cause.getCause().getCause().getMessage()); } assertEquals(1L, invocationCounterAndException.f0.intValue()); assertTrue( invocationCounterAndException.f1 instanceof CheckpointException && invocationCounterAndException .f1 .getCause() .getCause() .getMessage() .equals(expectedRootCause.getMessage())); coordinator.shutdown(); } /** Tests that do not trigger checkpoint when stop the coordinator after the eager pre-check. */ @Test public void testTriggerCheckpointAfterStopping() throws Exception { StoppingCheckpointIDCounter testingCounter = new StoppingCheckpointIDCounter(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointIDCounter(testingCounter) .setTimer(manuallyTriggeredScheduledExecutor) .build(); testingCounter.setOwner(checkpointCoordinator); testTriggerCheckpoint(checkpointCoordinator, PERIODIC_SCHEDULER_SHUTDOWN); } private void testTriggerCheckpoint( CheckpointCoordinator checkpointCoordinator, CheckpointFailureReason expectedFailureReason) throws Exception { try { // start the coordinator checkpointCoordinator.startCheckpointScheduler(); final CompletableFuture<CompletedCheckpoint> onCompletionPromise = checkpointCoordinator.triggerCheckpoint( CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), null, true); manuallyTriggeredScheduledExecutor.triggerAll(); try { onCompletionPromise.get(); fail("should not trigger periodic checkpoint"); } catch (ExecutionException e) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(e, CheckpointException.class); if (!checkpointExceptionOptional.isPresent() || checkpointExceptionOptional.get().getCheckpointFailureReason() != expectedFailureReason) { throw e; } } } finally { checkpointCoordinator.shutdown(); } } @Test public void testSavepointScheduledInUnalignedMode() throws Exception { int maxConcurrentCheckpoints = 1; int checkpointRequestsToSend = 10; int activeRequests = 0; ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .build(); CheckpointCoordinator coordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setUnalignedCheckpointsEnabled(true) .setMaxConcurrentCheckpoints(maxConcurrentCheckpoints) .build()) .setExecutionGraph(graph) .setTimer(manuallyTriggeredScheduledExecutor) .build(); try { List<Future<?>> checkpointFutures = new ArrayList<>(checkpointRequestsToSend); coordinator.startCheckpointScheduler(); while (activeRequests < checkpointRequestsToSend) { checkpointFutures.add(coordinator.triggerCheckpoint(true)); activeRequests++; } assertEquals( activeRequests - maxConcurrentCheckpoints, coordinator.getNumQueuedRequests()); Future<?> savepointFuture = coordinator.triggerSavepoint("/tmp"); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals( ++activeRequests - maxConcurrentCheckpoints, coordinator.getNumQueuedRequests()); coordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), new ExecutionAttemptID(), 1L, new CheckpointException(CHECKPOINT_DECLINED)), "none"); manuallyTriggeredScheduledExecutor.triggerAll(); activeRequests--; // savepoint triggered assertEquals( activeRequests - maxConcurrentCheckpoints, coordinator.getNumQueuedRequests()); assertEquals(1, checkpointFutures.stream().filter(Future::isDone).count()); assertFalse(savepointFuture.isDone()); assertEquals(maxConcurrentCheckpoints, coordinator.getNumberOfPendingCheckpoints()); CheckpointProperties props = coordinator.getPendingCheckpoints().values().iterator().next().getProps(); assertTrue(props.isSavepoint()); assertFalse(props.forceCheckpoint()); } finally { coordinator.shutdown(); } } /** * Test that the checkpoint still behave correctly when the task checkpoint is triggered by the * master hooks and finished before the master checkpoint. Also make sure that the operator * coordinators are checkpointed before starting the task checkpoint. */ @Test public void testExternallyInducedSourceWithOperatorCoordinator() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates1 = new TaskStateSnapshot(); TaskStateSnapshot taskOperatorSubtaskStates2 = new TaskStateSnapshot(); OperatorSubtaskState subtaskState1 = OperatorSubtaskState.builder().build(); OperatorSubtaskState subtaskState2 = OperatorSubtaskState.builder().build(); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID2, subtaskState2); // Create a mock OperatorCoordinatorCheckpointContext which completes the checkpoint // immediately. AtomicBoolean coordCheckpointDone = new AtomicBoolean(false); OperatorCoordinatorCheckpointContext coordinatorCheckpointContext = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOnCallingCheckpointCoordinator( (checkpointId, result) -> { coordCheckpointDone.set(true); result.complete(new byte[0]); }) .setOperatorID(opID1) .build(); // set up the coordinator and validate the initial state CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint( Collections.singleton(coordinatorCheckpointContext)) .build(); AtomicReference<Long> checkpointIdRef = new AtomicReference<>(); // Add a master hook which triggers and acks the task checkpoint immediately. // In this case the task checkpoints would complete before the job master checkpoint // completes. checkpointCoordinator.addMasterHook( new MasterTriggerRestoreHook<Integer>() { @Override public String getIdentifier() { return "anything"; } @Override @Nullable public CompletableFuture<Integer> triggerCheckpoint( long checkpointId, long timestamp, Executor executor) throws Exception { assertTrue( "The coordinator checkpoint should have finished.", coordCheckpointDone.get()); // Acknowledge the checkpoint in the master hooks so the task snapshots // complete before // the master state snapshot completes. checkpointIdRef.set(checkpointId); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1); AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); return null; } @Override public void restoreCheckpoint(long checkpointId, Integer checkpointData) throws Exception {} @Override public SimpleVersionedSerializer<Integer> createCheckpointDataSerializer() { return new SimpleVersionedSerializer<Integer>() { @Override public int getVersion() { return 0; } @Override public byte[] serialize(Integer obj) throws IOException { return new byte[0]; } @Override public Integer deserialize(int version, byte[] serialized) throws IOException { return 1; } }; } }); // Verify initial state. assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); // trigger the first checkpoint. this should succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); // now we should have a completed checkpoint assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); // the canceler should be removed now assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); // validate that the relevant tasks got a confirmation message long checkpointId = checkpointIdRef.get(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } CompletedCheckpoint success = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertEquals(graph.getJobID(), success.getJobId()); assertEquals(2, success.getOperatorStates().size()); checkpointCoordinator.shutdown(); } @Test public void testCompleteCheckpointFailureWithExternallyInducedSource() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates1 = new TaskStateSnapshot(); TaskStateSnapshot taskOperatorSubtaskStates2 = new TaskStateSnapshot(); OperatorSubtaskState subtaskState1 = OperatorSubtaskState.builder().build(); OperatorSubtaskState subtaskState2 = OperatorSubtaskState.builder().build(); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); taskOperatorSubtaskStates2.putSubtaskStateByOperatorID(opID2, subtaskState2); // Create a mock OperatorCoordinatorCheckpointContext which completes the checkpoint // immediately. AtomicBoolean coordCheckpointDone = new AtomicBoolean(false); OperatorCoordinatorCheckpointContext coordinatorCheckpointContext = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOnCallingCheckpointCoordinator( (checkpointId, result) -> { coordCheckpointDone.set(true); result.complete(new byte[0]); }) .setOperatorID(opID1) .build(); // set up the coordinator and validate the initial state CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint( Collections.singleton(coordinatorCheckpointContext)) .setCheckpointStorage( new JobManagerCheckpointStorage() { private static final long serialVersionUID = 8134582566514272546L; // Throw exception when finalizing the checkpoint. @Override public CheckpointStorageAccess createCheckpointStorage( JobID jobId) throws IOException { return new MemoryBackendCheckpointStorageAccess( jobId, null, null, 100) { @Override public CheckpointStorageLocation initializeLocationForCheckpoint( long checkpointId) throws IOException { return new NonPersistentMetadataCheckpointStorageLocation( 1000) { @Override public CheckpointMetadataOutputStream createMetadataOutputStream() throws IOException { throw new IOException( "Artificial Exception"); } }; } }; } }) .build(); AtomicReference<Long> checkpointIdRef = new AtomicReference<>(); // Add a master hook which triggers and acks the task checkpoint immediately. // In this case the task checkpoints would complete before the job master checkpoint // completes. checkpointCoordinator.addMasterHook( new MasterTriggerRestoreHook<Integer>() { @Override public String getIdentifier() { return "anything"; } @Override @Nullable public CompletableFuture<Integer> triggerCheckpoint( long checkpointId, long timestamp, Executor executor) throws Exception { assertTrue( "The coordinator checkpoint should have finished.", coordCheckpointDone.get()); // Acknowledge the checkpoint in the master hooks so the task snapshots // complete before // the master state snapshot completes. checkpointIdRef.set(checkpointId); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1); AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); return null; } @Override public void restoreCheckpoint(long checkpointId, Integer checkpointData) throws Exception {} @Override public SimpleVersionedSerializer<Integer> createCheckpointDataSerializer() { return new SimpleVersionedSerializer<Integer>() { @Override public int getVersion() { return 0; } @Override public byte[] serialize(Integer obj) throws IOException { return new byte[0]; } @Override public Integer deserialize(int version, byte[] serialized) throws IOException { return 1; } }; } }); // trigger the first checkpoint. this should succeed final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertTrue(checkpointCoordinator.getSuccessfulCheckpoints().isEmpty()); } @Test public void testResetCalledInRegionRecovery() throws Exception { CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .build(); TestResetHook hook = new TestResetHook("id"); checkpointCoordinator.addMasterHook(hook); assertFalse(hook.resetCalled); checkpointCoordinator.restoreLatestCheckpointedStateToSubtasks(Collections.emptySet()); assertTrue(hook.resetCalled); } @Test public void testNotifyCheckpointAbortionInOperatorCoordinator() throws Exception { JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .build(); ExecutionVertex executionVertex = graph.getJobVertex(jobVertexID).getTaskVertices()[0]; ExecutionAttemptID attemptID = executionVertex.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorTestingUtils.MockOperatorCoordinatorCheckpointContext context = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOperatorID(new OperatorID()) .setOnCallingCheckpointCoordinator( (ignored, future) -> future.complete(new byte[0])) .build(); // set up the coordinator and validate the initial state CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint(Collections.singleton(context)) .build(); try { // Trigger checkpoint 1. checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointId1 = Collections.max(checkpointCoordinator.getPendingCheckpoints().keySet()); // Trigger checkpoint 2. checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); // Acknowledge checkpoint 2. This should abort checkpoint 1. long checkpointId2 = Collections.max(checkpointCoordinator.getPendingCheckpoints().keySet()); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID, checkpointId2, new CheckpointMetrics(), null); checkpointCoordinator.receiveAcknowledgeMessage(acknowledgeCheckpoint1, ""); // OperatorCoordinator should have been notified of the abortion of checkpoint 1. assertEquals(Collections.singletonList(1L), context.getAbortedCheckpoints()); assertEquals(Collections.singletonList(2L), context.getCompletedCheckpoints()); } finally { checkpointCoordinator.shutdown(); } } @Test public void testReportLatestCompletedCheckpointIdWithAbort() throws Exception { JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .setTransitToRunning(false) .build(); ExecutionVertex task = graph.getJobVertex(jobVertexID).getTaskVertices()[0]; AtomicLong reportedCheckpointId = new AtomicLong(-1); LogicalSlot slot = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new SimpleAckingTaskManagerGateway() { @Override public void notifyCheckpointAborted( ExecutionAttemptID executionAttemptID, JobID jobId, long checkpointId, long latestCompletedCheckpointId, long timestamp) { reportedCheckpointId.set(latestCompletedCheckpointId); } }) .createTestingLogicalSlot(); ExecutionGraphTestUtils.setVertexResource(task, slot); task.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setTimer(manuallyTriggeredScheduledExecutor) .setAllowCheckpointsAfterTasksFinished(true) .build(); // Trigger a successful checkpoint CompletableFuture<CompletedCheckpoint> result = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long completedCheckpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), task.getCurrentExecutionAttempt().getAttemptId(), completedCheckpointId, new CheckpointMetrics(), new TaskStateSnapshot()), "localhost"); assertTrue(result.isDone()); assertFalse(result.isCompletedExceptionally()); result = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long abortedCheckpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), task.getCurrentExecutionAttempt().getAttemptId(), abortedCheckpointId, new CheckpointException(CHECKPOINT_EXPIRED)), "localhost"); assertTrue(result.isCompletedExceptionally()); assertEquals(completedCheckpointId, reportedCheckpointId.get()); } @Test public void testBaseLocationsNotInitialized() throws Exception { File checkpointDir = tmpFolder.newFolder(); JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .setTransitToRunning(false) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setCheckpointInterval(Long.MAX_VALUE) .build()) .setCheckpointStorage(new FsStateBackend(checkpointDir.toURI())) .build(); Path jobCheckpointPath = new Path(checkpointDir.getAbsolutePath(), graph.getJobID().toString()); FileSystem fs = FileSystem.get(checkpointDir.toURI()); // directory will not be created if checkpointing is disabled Assert.assertFalse(fs.exists(jobCheckpointPath)); } private CheckpointCoordinator getCheckpointCoordinator(ExecutionGraph graph) throws Exception { return new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setAlignedCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .build(); } private CheckpointCoordinator getCheckpointCoordinator( ExecutionGraph graph, CheckpointFailureManager failureManager) throws Exception { return new CheckpointCoordinatorBuilder() .setExecutionGraph(graph) .setTimer(manuallyTriggeredScheduledExecutor) .setFailureManager(failureManager) .build(); } private CheckpointCoordinator getCheckpointCoordinator(ScheduledExecutor timer) throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID()) .build(); // set up the coordinator and validate the initial state return new CheckpointCoordinatorBuilder().setExecutionGraph(graph).setTimer(timer).build(); } private CheckpointFailureManager getCheckpointFailureManager(String errorMsg) { return new CheckpointFailureManager( 0, new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { throw new RuntimeException(errorMsg); } @Override public void failJobDueToTaskFailure( Throwable cause, ExecutionAttemptID failingTask) { throw new RuntimeException(errorMsg); } }); } private PendingCheckpoint declineSynchronousSavepoint( final JobID jobId, final CheckpointCoordinator coordinator, final ExecutionAttemptID attemptID, final Throwable reason) { final long checkpointId = coordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); final PendingCheckpoint checkpoint = coordinator.getPendingCheckpoints().get(checkpointId); coordinator.receiveDeclineMessage( new DeclineCheckpoint( jobId, attemptID, checkpointId, new CheckpointException(CHECKPOINT_DECLINED, reason)), TASK_MANAGER_LOCATION_INFO); return checkpoint; } private void performIncrementalCheckpoint( JobID jobId, CheckpointCoordinator checkpointCoordinator, ExecutionJobVertex jobVertex1, List<KeyGroupRange> keyGroupPartitions1, int cpSequenceNumber) throws Exception { // trigger the checkpoint checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(1, checkpointCoordinator.getPendingCheckpoints().size()); long checkpointId = Iterables.getOnlyElement(checkpointCoordinator.getPendingCheckpoints().keySet()); for (int index = 0; index < jobVertex1.getParallelism(); index++) { KeyGroupRange keyGroupRange = keyGroupPartitions1.get(index); Map<StateHandleID, StreamStateHandle> privateState = new HashMap<>(); privateState.put( new StateHandleID("private-1"), spy(new ByteStreamStateHandle("private-1", new byte[] {'p'}))); Map<StateHandleID, StreamStateHandle> sharedState = new HashMap<>(); // let all but the first CP overlap by one shared state. if (cpSequenceNumber > 0) { sharedState.put( new StateHandleID("shared-" + (cpSequenceNumber - 1)), spy(new PlaceholderStreamStateHandle())); } sharedState.put( new StateHandleID("shared-" + cpSequenceNumber), spy( new ByteStreamStateHandle( "shared-" + cpSequenceNumber + "-" + keyGroupRange, new byte[] {'s'}))); IncrementalRemoteKeyedStateHandle managedState = spy( new IncrementalRemoteKeyedStateHandle( new UUID(42L, 42L), keyGroupRange, checkpointId, sharedState, privateState, spy(new ByteStreamStateHandle("meta", new byte[] {'m'})))); OperatorSubtaskState operatorSubtaskState = spy(OperatorSubtaskState.builder().setManagedKeyedState(managedState).build()); Map<OperatorID, OperatorSubtaskState> opStates = new HashMap<>(); opStates.put( jobVertex1.getOperatorIDs().get(0).getGeneratedOperatorID(), operatorSubtaskState); TaskStateSnapshot taskStateSnapshot = new TaskStateSnapshot(opStates); AcknowledgeCheckpoint acknowledgeCheckpoint = new AcknowledgeCheckpoint( jobId, jobVertex1 .getTaskVertices()[index] .getCurrentExecutionAttempt() .getAttemptId(), checkpointId, new CheckpointMetrics(), taskStateSnapshot); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint, TASK_MANAGER_LOCATION_INFO); } } private static class IOExceptionCheckpointStorage extends JobManagerCheckpointStorage { @Override public CheckpointStorageAccess createCheckpointStorage(JobID jobId) throws IOException { return new MemoryBackendCheckpointStorageAccess(jobId, null, null, 100) { @Override public CheckpointStorageLocation initializeLocationForCheckpoint(long checkpointId) throws IOException { throw new IOException("disk is error!"); } }; } } private static class StoppingCheckpointIDCounter extends CheckpointIDCounterWithOwner { @Override public long getAndIncrement() throws Exception { checkNotNull(owner); owner.stopCheckpointScheduler(); return super.getAndIncrement(); } } private static class CheckpointIDCounterWithOwner extends StandaloneCheckpointIDCounter { protected CheckpointCoordinator owner; void setOwner(CheckpointCoordinator coordinator) { this.owner = checkNotNull(coordinator); } } private static class TestFailJobCallback implements CheckpointFailureManager.FailJobCallback { private int invokeCounter = 0; @Override public void failJob(Throwable cause) { invokeCounter++; } @Override public void failJobDueToTaskFailure( final Throwable cause, final ExecutionAttemptID executionAttemptID) { invokeCounter++; } public int getInvokeCounter() { return invokeCounter; } } private static class TestResetHook implements MasterTriggerRestoreHook<String> { private final String id; boolean resetCalled; TestResetHook(String id) { this.id = id; this.resetCalled = false; } @Override public String getIdentifier() { return id; } @Override public void reset() throws Exception { resetCalled = true; } @Override public CompletableFuture<String> triggerCheckpoint( long checkpointId, long timestamp, Executor executor) { throw new UnsupportedOperationException(); } @Override public void restoreCheckpoint(long checkpointId, @Nullable String checkpointData) throws Exception { throw new UnsupportedOperationException(); } @Override public SimpleVersionedSerializer<String> createCheckpointDataSerializer() { throw new UnsupportedOperationException(); } } }
/* * Certain versions of software and/or documents ("Material") accessible here may contain branding from * Hewlett-Packard Company (now HP Inc.) and Hewlett Packard Enterprise Company. As of September 1, 2017, * the Material is now offered by Micro Focus, a separately owned and operated company. Any reference to the HP * and Hewlett Packard Enterprise/HPE marks is historical in nature, and the HP and Hewlett Packard Enterprise/HPE * marks are the property of their respective owners. * __________________________________________________________________ * MIT License * * (c) Copyright 2012-2021 Micro Focus or one of its affiliates. * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated * documentation files (the "Software"), to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, * and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO * THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * ___________________________________________________________________ */ package com.microfocus.application.automation.tools.octane.tests.detection; /** * Class describing metadata of executed tests for test pushing to Octane */ public class ResultFields { private String framework; private String testingTool; private String testLevel; private String testType; public ResultFields() { } public ResultFields(final String framework, final String testingTool, final String testLevel) { this(framework, testingTool, testLevel, null); } public ResultFields(final String framework, final String testingTool, final String testLevel, final String testType) { this.framework = framework; this.testingTool = testingTool; this.testLevel = testLevel; this.testType = testType; } public String getFramework() { return framework; } public String getTestingTool() { return testingTool; } public String getTestLevel() { return testLevel; } public void setFramework(final String framework) { this.framework = framework; } public void setTestLevel(final String testLevel) { this.testLevel = testLevel; } public void setTestingTool(final String testingTool) { this.testingTool = testingTool; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final ResultFields that = (ResultFields) o; if (framework != null ? !framework.equals(that.framework) : that.framework != null) { return false; } if (testingTool != null ? !testingTool.equals(that.testingTool) : that.testingTool != null) { return false; } if (testType != null ? !testType.equals(that.testType) : that.testType != null) { return false; } return !(testLevel != null ? !testLevel.equals(that.testLevel) : that.testLevel != null); } @Override public int hashCode() { int result = framework != null ? framework.hashCode() : 0; result = 31 * result + (testingTool != null ? testingTool.hashCode() : 0); result = 31 * result + (testLevel != null ? testLevel.hashCode() : 0); result = 31 * result + (testType != null ? testType.hashCode() : 0); return result; } public String getTestType() { return testType; } public void setTestType(String testType) { this.testType = testType; } }
package net.paulek.antylogout.utils; import net.minecraft.server.v1_12_R1.ChatMessageType; import net.minecraft.server.v1_12_R1.IChatBaseComponent; import net.minecraft.server.v1_12_R1.PacketPlayOutChat; import org.bukkit.craftbukkit.v1_12_R1.entity.CraftPlayer; import org.bukkit.entity.Player; public class CreateText { public static void sendPacket(Player player, String text){ IChatBaseComponent iChatBaseComponent = IChatBaseComponent.ChatSerializer.a("{\"text\":\""+text+"\"}"); PacketPlayOutChat packetPlayOutChat = new PacketPlayOutChat(iChatBaseComponent, ChatMessageType.GAME_INFO); ((CraftPlayer)player).getHandle().playerConnection.sendPacket(packetPlayOutChat); } }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v1.services.stub; import com.google.ads.googleads.v1.resources.RemarketingAction; import com.google.ads.googleads.v1.services.GetRemarketingActionRequest; import com.google.ads.googleads.v1.services.MutateRemarketingActionsRequest; import com.google.ads.googleads.v1.services.MutateRemarketingActionsResponse; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.UnaryCallable; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS /** * Base stub class for Google Ads API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class RemarketingActionServiceStub implements BackgroundResource { public UnaryCallable<GetRemarketingActionRequest, RemarketingAction> getRemarketingActionCallable() { throw new UnsupportedOperationException("Not implemented: getRemarketingActionCallable()"); } public UnaryCallable<MutateRemarketingActionsRequest, MutateRemarketingActionsResponse> mutateRemarketingActionsCallable() { throw new UnsupportedOperationException("Not implemented: mutateRemarketingActionsCallable()"); } @Override public abstract void close(); }
package com.atguigu.gmall.ums.service; import com.baomidou.mybatisplus.extension.service.IService; import com.atguigu.gmall.ums.entity.MemberCollectSpuEntity; import com.atguigu.core.bean.PageVo; import com.atguigu.core.bean.QueryCondition; /** * 会员收藏的商品 * * @author lixianfeng * @email lxf@atguigu.com * @date 2019-12-03 18:25:04 */ public interface MemberCollectSpuService extends IService<MemberCollectSpuEntity> { PageVo queryPage(QueryCondition params); }
package jakemarsden.opengl.engine.light; import jakemarsden.opengl.engine.math.Color3; import jakemarsden.opengl.engine.math.Vector3; import org.checkerframework.checker.nullness.qual.NonNull; public final class SpotLight extends Light { /** The apex of the cone of light cast by this spotlight */ private @NonNull Vector3 pos; /** Points along the axis of the cone of light cast by this spotlight */ private @NonNull Vector3 dir; /** * The aperture (angle between opposite "sides") of the <em>inner</em> cone of light cast by this * spotlight. A spotlight casts light at 100% intensity within this inner cone */ private float aperture; /** * The aperture (angle between opposite "sides") of the <em>outer</em> cone of light cast by this * spotlight. The light cast by a spotlight diminishes in intensity between its inner (100% * intensity) and outer (0% intensity) cones. A spotlight casts no light outside of its outer cone */ private float outerAperture; public SpotLight( @NonNull Vector3 pos, @NonNull Vector3 dir, float aperture, float outerAperture, @NonNull Color3 ambient, @NonNull Color3 diffuse, @NonNull Color3 specular) { super(ambient, diffuse, specular); this.pos = pos; this.dir = dir; this.aperture = aperture; this.outerAperture = outerAperture; } public @NonNull Vector3 getPosition() { return this.pos; } public void setPosition(@NonNull Vector3 pos) { this.pos = pos; } public @NonNull Vector3 getDirection() { return this.dir; } public void setDirection(@NonNull Vector3 dir) { this.dir = dir; } public float getAperture() { return aperture; } public void setAperture(float aperture) { this.aperture = aperture; } public float getOuterAperture() { return outerAperture; } public void setOuterAperture(float outerAperture) { this.outerAperture = outerAperture; } }
/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 4.1 */ /* JavaCCOptions: */ package net.sourceforge.pmd.lang.ast; /** Token Manager Error. */ public class TokenMgrError extends RuntimeException { /* * Ordinals for various reasons why an Error of this type can be thrown. */ /** * Lexical error occurred. */ public static final int LEXICAL_ERROR = 0; /** * An attempt was made to create a second instance of a static token manager. */ public static final int STATIC_LEXER_ERROR = 1; /** * Tried to change to an invalid lexical state. */ public static final int INVALID_LEXICAL_STATE = 2; /** * Detected (and bailed out of) an infinite loop in the token manager. */ public static final int LOOP_DETECTED = 3; /** * Indicates the reason why the exception is thrown. It will have * one of the above 4 values. */ int errorCode; /** * Replaces unprintable characters by their escaped (or unicode escaped) * equivalents in the given string */ protected static final String addEscapes(String str) { StringBuffer retval = new StringBuffer(); char ch; for (int i = 0; i < str.length(); i++) { switch (str.charAt(i)) { case 0 : continue; case '\b': retval.append("\\b"); continue; case '\t': retval.append("\\t"); continue; case '\n': retval.append("\\n"); continue; case '\f': retval.append("\\f"); continue; case '\r': retval.append("\\r"); continue; case '\"': retval.append("\\\""); continue; case '\'': retval.append("\\\'"); continue; case '\\': retval.append("\\\\"); continue; default: if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { String s = "0000" + Integer.toString(ch, 16); retval.append("\\u" + s.substring(s.length() - 4, s.length())); } else { retval.append(ch); } continue; } } return retval.toString(); } /** * Returns a detailed message for the Error when it is thrown by the * token manager to indicate a lexical error. * Parameters : * EOFSeen : indicates if EOF caused the lexical error * curLexState : lexical state in which this error occurred * errorLine : line number when the error occurred * errorColumn : column number when the error occurred * errorAfter : prefix that was seen before this error occurred * curchar : the offending character * Note: You can customize the lexical error message by modifying this method. */ protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) { return("Lexical error in file " + AbstractTokenManager.getFileName() + " at line " + errorLine + ", column " + errorColumn + ". Encountered: " + (EOFSeen ? "<EOF> " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") + "after : \"" + addEscapes(errorAfter) + "\""); } /** * You can also modify the body of this method to customize your error messages. * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not * of end-users concern, so you can return something like : * * "Internal Error : Please file a bug report .... " * * from this method for such cases in the release version of your parser. */ public String getMessage() { return super.getMessage(); } /* * Constructors of various flavors follow. */ /** No arg constructor. */ public TokenMgrError() { } /** Constructor with message and reason. */ public TokenMgrError(String message, int reason) { super(message); errorCode = reason; } /** Full Constructor. */ public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) { this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason); } } /* JavaCC - OriginalChecksum=887ef629387efaf1cf31f0dc7c077049 (do not edit this line) */
package smalltalk.compiler; import org.antlr.symtab.Scope; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.tree.ParseTreeWalker; import smalltalk.misc.Utils; import smalltalk.vm.Bytecode; import java.util.ArrayList; import java.util.List; public class Compiler { protected final STSymbolTable symtab; public final List<String> errors = new ArrayList<>(); protected SmalltalkParser parser; protected CommonTokenStream tokens; protected SmalltalkParser.FileContext fileTree; protected String fileName; public boolean genDbg; // generate dbg file,line instructions protected ParseTreeWalker walker; public Compiler() { symtab = new STSymbolTable(); fileName = "<unknown>"; } public Compiler(STSymbolTable symtab) { this.symtab = symtab; fileName = "<string>"; } public STSymbolTable compile(ANTLRInputStream input) { // parse class(es) fileTree = parseClasses(input); if(null != input.name){ fileName = input.name; fileName = fileName.substring(fileName.lastIndexOf('/')+1); } // System.out.println(fileName); // define symbols defSymbols(fileTree); // resolve symbols resolveSymbols(fileTree); // gen code CodeGenerator generator = new CodeGenerator(this); generator.visitFile(fileTree); return symtab; } // Convenience methods for code gen // these are what I coded imitating ParrT's except nil char int pop public static Code push_nil() { return Code.of(Bytecode.NIL); } public static Code push_self() { return Code.of(Bytecode.SELF);} public static Code push_true() { return Code.of(Bytecode.TRUE);} public static Code push_false() { return Code.of(Bytecode.FALSE);} public static Code push_char(int c) { return Code.of(Bytecode.PUSH_CHAR).join(Utils.shortToBytes(c));} public static Code push_int(int v) { return Code.of(Bytecode.PUSH_INT).join(Utils.intToBytes(v)); } public static Code push_float(float v) { return Code.of(Bytecode.PUSH_FLOAT).join(Utils.floatToBytes(v));} public static Code push_field(int v) { return Code.of(Bytecode.PUSH_FIELD).join(Utils.shortToBytes(v));} public static Code push_local(int v1, int v2){ return Code.of(Bytecode.PUSH_LOCAL).join(Utils.shortToBytes(v1).join(Utils.shortToBytes(v2))); } public static Code push_literal(int v){ return Code.of(Bytecode.PUSH_LITERAL).join(Utils.toLiteral(v)); } public static Code push_global(int v){ return Code.of(Bytecode.PUSH_GLOBAL).join(Utils.toLiteral(v)); } public static Code push_array(int v){ return Code.of(Bytecode.PUSH_ARRAY).join(Utils.shortToBytes(v)); } public static Code store_field(int v){ return Code.of(Bytecode.STORE_FIELD).join(Utils.shortToBytes(v)); } public static Code store_local(int v1, int v2){ return Code.of(Bytecode.STORE_LOCAL).join(Utils.shortToBytes(v1)).join(Utils.shortToBytes(v2)); } public static Code pop() { return Code.of(Bytecode.POP);} public static Code send(int s, int i){ return Code.of(Bytecode.SEND).join(Utils.shortToBytes(s)).join(Utils.toLiteral(i)); } public static Code send_super(int s, int i){ return Code.of(Bytecode.SEND_SUPER).join(Utils.shortToBytes(s)).join(Utils.toLiteral(i)); } public static Code block(short v) { return Code.of(Bytecode.BLOCK).join(Utils.shortToBytes(v));} public static Code block_return() { return Code.of(Bytecode.BLOCK_RETURN);} public static Code method_return() { return Code.of(Bytecode.RETURN);} public static Code dbg(int literalIndex, int line, int charPos) { return Code.of(Bytecode.DBG).join(Utils.toLiteral(literalIndex).join(Utils.intToBytes(Bytecode.combineLineCharPos(line,charPos)))); } // Error support public void error(String msg) { errors.add(msg); } public String getFileName() { return fileName; } public void defineFields(STClass cl, List<String> instanceVars) { if( null != instanceVars ){ for (String varStr:instanceVars){ try{ cl.define(new STField(varStr)); }catch (IllegalArgumentException e){ error("redefinition of "+varStr+" in "+cl.toQualifierString(">>")); } } } } public void defineArguments(STBlock stBlock, List<String> args) { if( null != args ){ for (String argStr: args){ try{ stBlock.define(new STArg(argStr)); }catch (IllegalArgumentException e){ error("redefinition of "+argStr+" in "+stBlock.toQualifierString(">>")); } } } } public void defineLocals(Scope currentScope, List<String> vars) { if( null != vars ){ for(String varStr : vars){ try { currentScope.define(new STVariable(varStr)); }catch (IllegalArgumentException e){ error("redefinition of "+varStr+" in "+currentScope.toQualifierString(">>")); } } } } public STMethod createMethod(String methodName, SmalltalkParser.MethodContext ctx) { return new STMethod(methodName,ctx); } public STMethod createMethod(String main, SmalltalkParser.MainContext ctx) { return new STMethod(main,ctx); } public STMethod createPrimitiveMethod(STClass stClass, String selector, String primitiveName, SmalltalkParser.MethodContext methodNode) { return new STPrimitiveMethod(selector,methodNode,primitiveName); } public STBlock createBlock(STMethod currentBlock, SmalltalkParser.BlockContext ctx) { return new STBlock(currentBlock,ctx); } public SmalltalkParser.FileContext parseClasses(ANTLRInputStream antlrInputStream) { ParserRuleContext ruleContext; Lexer l = new SmalltalkLexer(antlrInputStream); tokens = new CommonTokenStream(l); parser = new SmalltalkParser(tokens); fileTree = parser.file(); return fileTree; } public void defSymbols(ParserRuleContext tree) { walker = new ParseTreeWalker(); walker.walk(new DefineSymbols(this),tree); } public void resolveSymbols(ParserRuleContext tree) { walker = new ParseTreeWalker(); walker.walk(new ResolveSymbols(this),tree); } }
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * RequestPlatformTargetingErrorReason.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202005; public class RequestPlatformTargetingErrorReason implements java.io.Serializable { private java.lang.String _value_; private static java.util.HashMap _table_ = new java.util.HashMap(); // Constructor protected RequestPlatformTargetingErrorReason(java.lang.String value) { _value_ = value; _table_.put(_value_,this); } public static final java.lang.String _REQUEST_PLATFORM_TYPE_NOT_SUPPORTED_BY_LINE_ITEM_TYPE = "REQUEST_PLATFORM_TYPE_NOT_SUPPORTED_BY_LINE_ITEM_TYPE"; public static final java.lang.String _UNKNOWN = "UNKNOWN"; public static final RequestPlatformTargetingErrorReason REQUEST_PLATFORM_TYPE_NOT_SUPPORTED_BY_LINE_ITEM_TYPE = new RequestPlatformTargetingErrorReason(_REQUEST_PLATFORM_TYPE_NOT_SUPPORTED_BY_LINE_ITEM_TYPE); public static final RequestPlatformTargetingErrorReason UNKNOWN = new RequestPlatformTargetingErrorReason(_UNKNOWN); public java.lang.String getValue() { return _value_;} public static RequestPlatformTargetingErrorReason fromValue(java.lang.String value) throws java.lang.IllegalArgumentException { RequestPlatformTargetingErrorReason enumeration = (RequestPlatformTargetingErrorReason) _table_.get(value); if (enumeration==null) throw new java.lang.IllegalArgumentException(); return enumeration; } public static RequestPlatformTargetingErrorReason fromString(java.lang.String value) throws java.lang.IllegalArgumentException { return fromValue(value); } public boolean equals(java.lang.Object obj) {return (obj == this);} public int hashCode() { return toString().hashCode();} public java.lang.String toString() { return _value_;} public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);} public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.EnumSerializer( _javaType, _xmlType); } public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.EnumDeserializer( _javaType, _xmlType); } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(RequestPlatformTargetingErrorReason.class); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202005", "RequestPlatformTargetingError.Reason")); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } }
/* * Copyright 2016 Alex Andres * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.lecturestudio.stylus.demo.render; import org.lecturestudio.stylus.demo.model.Shape; public interface RenderSurfaceRenderer<S extends Shape, T> { void render(S shape, T renderContext); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.index.keys; import java.nio.ByteBuffer; import java.util.Set; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.ColumnDefinition; import org.apache.cassandra.db.*; import org.apache.cassandra.db.index.SecondaryIndex; import org.apache.cassandra.db.rows.*; import org.apache.cassandra.db.index.AbstractSimplePerColumnSecondaryIndex; import org.apache.cassandra.db.index.SecondaryIndexSearcher; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.utils.concurrent.OpOrder; /** * Implements a secondary index for a column family using a second column family. * The design uses inverted index http://en.wikipedia.org/wiki/Inverted_index. * The row key is the indexed value. For example, if we're indexing a column named * city, the index value of city is the row key. * The column names are the keys of the records. To see a detailed example, please * refer to wikipedia. */ public class KeysIndex extends AbstractSimplePerColumnSecondaryIndex { public static void addIndexClusteringColumns(CFMetaData.Builder indexMetadata, CFMetaData baseMetadata, ColumnDefinition cfDef) { indexMetadata.addClusteringColumn("partition_key", SecondaryIndex.keyComparator); } @Override public void indexRow(DecoratedKey key, Row row, OpOrder.Group opGroup, int nowInSec) { super.indexRow(key, row, opGroup, nowInSec); // This is used when building indexes, in particular when the index is first created. On thrift, this // potentially means the column definition just got created, and so we need to check if's not a "dynamic" // row that actually correspond to the index definition. assert baseCfs.metadata.isCompactTable(); if (!row.isStatic()) { Clustering clustering = row.clustering(); if (clustering.get(0).equals(columnDef.name.bytes)) { Cell cell = row.getCell(baseCfs.metadata.compactValueColumn()); if (cell != null && cell.isLive(nowInSec)) insert(key.getKey(), clustering, cell, opGroup); } } } protected ByteBuffer getIndexedValue(ByteBuffer rowKey, Clustering clustering, ByteBuffer cellValue, CellPath path) { return cellValue; } protected CBuilder buildIndexClusteringPrefix(ByteBuffer rowKey, ClusteringPrefix prefix, CellPath path) { CBuilder builder = CBuilder.create(getIndexComparator()); builder.add(rowKey); return builder; } public SecondaryIndexSearcher createSecondaryIndexSearcher(Set<ColumnDefinition> columns) { return new KeysSearcher(baseCfs.indexManager, columns); } public void validateOptions() throws ConfigurationException { // no options used } }
package com.fasterxml.jackson.databind.jsontype.deftyping; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.databind.*; import com.fasterxml.jackson.databind.node.ObjectNode; public class TestDefaultForArrays extends BaseMapTest { static class ArrayBean { public Object[] values; public ArrayBean() { this(null); } public ArrayBean(Object[] v) { values = v; } } static class PrimitiveArrayBean { @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) public Object stuff; protected PrimitiveArrayBean() { } public PrimitiveArrayBean(Object value) { stuff = value; } } /* /********************************************************** /* Unit tests /********************************************************** */ /** * Simple unit test for verifying that we get String array * back, even though declared type is Object array */ public void testArrayTypingSimple() throws Exception { ObjectMapper mapper = jsonMapperBuilder() .enableDefaultTyping(DefaultTyping.NON_CONCRETE_AND_ARRAYS) .build(); ArrayBean bean = new ArrayBean(new String[0]); String json = mapper.writeValueAsString(bean); ArrayBean result = mapper.readValue(json, ArrayBean.class); assertNotNull(result.values); assertEquals(String[].class, result.values.getClass()); } // And let's try it with deeper array as well public void testArrayTypingNested() throws Exception { ObjectMapper mapper = jsonMapperBuilder() .enableDefaultTyping(DefaultTyping.NON_CONCRETE_AND_ARRAYS) .build(); ArrayBean bean = new ArrayBean(new String[0][0]); String json = mapper.writeValueAsString(bean); ArrayBean result = mapper.readValue(json, ArrayBean.class); assertNotNull(result.values); assertEquals(String[][].class, result.values.getClass()); } public void testNodeInArray() throws Exception { JsonNode node = objectMapper().readTree("{\"a\":3}"); ObjectMapper mapper = jsonMapperBuilder() .enableDefaultTyping(DefaultTyping.JAVA_LANG_OBJECT) .build(); Object[] obs = new Object[] { node }; String json = mapper.writeValueAsString(obs); Object[] result = mapper.readValue(json, Object[].class); assertEquals(1, result.length); Object ob = result[0]; assertTrue(ob instanceof JsonNode); } @SuppressWarnings("deprecation") public void testNodeInEmptyArray() throws Exception { Map<String, List<String>> outerMap = new HashMap<String, List<String>>(); outerMap.put("inner", new ArrayList<String>()); ObjectMapper vanillaMapper = jsonMapperBuilder() .disable(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS) .build(); JsonNode tree = vanillaMapper.convertValue(outerMap, JsonNode.class); String json = vanillaMapper.writeValueAsString(tree); assertEquals("{}", json); JsonNode node = vanillaMapper.readTree("{\"a\":[]}"); ObjectMapper mapper = vanillaMapper.rebuild() .enableDefaultTyping(DefaultTyping.JAVA_LANG_OBJECT) .build(); Object[] obs = new Object[] { node }; json = mapper.writeValueAsString(obs); Object[] result = mapper.readValue(json, Object[].class); assertEquals(1, result.length); Object elem = result[0]; assertTrue(elem instanceof ObjectNode); assertEquals(0, ((ObjectNode) elem).size()); } public void testArraysOfArrays() throws Exception { ObjectMapper mapper = jsonMapperBuilder() .enableDefaultTyping(DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY) .build(); Object value = new Object[][] { new Object[] {} }; String json = mapper.writeValueAsString(value); // try with different (but valid) nominal types: _testArraysAs(mapper, json, Object[][].class); _testArraysAs(mapper, json, Object[].class); _testArraysAs(mapper, json, Object.class); } public void testArrayTypingForPrimitiveArrays() throws Exception { ObjectMapper mapper = jsonMapperBuilder() .enableDefaultTyping(DefaultTyping.NON_CONCRETE_AND_ARRAYS) .build(); _testArrayTypingForPrimitiveArrays(mapper, new int[] { 1, 2, 3 }); _testArrayTypingForPrimitiveArrays(mapper, new long[] { 1, 2, 3 }); _testArrayTypingForPrimitiveArrays(mapper, new short[] { 1, 2, 3 }); _testArrayTypingForPrimitiveArrays(mapper, new double[] { 0.5, 5.5, -1.0 }); _testArrayTypingForPrimitiveArrays(mapper, new float[] { 0.5f, 5.5f, -1.0f }); _testArrayTypingForPrimitiveArrays(mapper, new boolean[] { true, false }); _testArrayTypingForPrimitiveArrays(mapper, new byte[] { 1, 2, 3 }); _testArrayTypingForPrimitiveArrays(mapper, new char[] { 'a', 'b' }); } private void _testArrayTypingForPrimitiveArrays(ObjectMapper mapper, Object v) throws Exception { PrimitiveArrayBean input = new PrimitiveArrayBean(v); String json = mapper.writeValueAsString(input); PrimitiveArrayBean result = mapper.readValue(json, PrimitiveArrayBean.class); assertNotNull(result.stuff); assertSame(v.getClass(), result.stuff.getClass()); } /* /********************************************************** /* Helper methods /********************************************************** */ protected void _testArraysAs(ObjectMapper mapper, String json, Class<?> type) throws Exception { Object o = mapper.readValue(json, type); assertNotNull(o); assertTrue(o instanceof Object[]); Object[] main = (Object[]) o; assertEquals(1, main.length); Object element = main[0]; assertNotNull(element); assertTrue(element instanceof Object[]); assertEquals(0, ((Object[]) element).length); } }
/* * Copyright 2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.sergeivisotsky.metadata.selector.filtering.config; import io.github.sergeivisotsky.metadata.selector.filtering.UrlViewQueryParser; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * @author Sergei Visotsky */ @Configuration public class FilteringConfig { @Bean public UrlViewQueryParser queryFilterService() { return new UrlViewQueryParser(); } }
package br.com.zupacademy.erivelton.transacao.entidade; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.validation.constraints.NotBlank; @Entity public class Estabelecimento { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @NotBlank private String nome; @NotBlank private String cidade; @NotBlank private String endereco; @Deprecated public Estabelecimento() { } public Estabelecimento(String nome, String cidade, String endereco) { this.nome = nome; this.cidade = cidade; this.endereco = endereco; } public String getNome() { return nome; } public String getEndereco() { return endereco; } public String getCidade() { return cidade; } }
// Copyright © 2016-2022 Andy Goryachev <andy@goryachev.com> package goryachev.common.util; import goryachev.common.log.Log; import java.io.File; import java.io.FileNotFoundException; /** * File-based Settings Provider. */ public class FileSettingsProvider extends SettingsProviderBase { protected static final Log log = Log.get("FileSettingsProvider"); private File file; public FileSettingsProvider(File f) { setFile(f); } public void setFile(File f) { file = f; } public void save() { try { String s = asString(); CKit.write(file, s); } catch(Exception e) { log.error(e); } } public void load() throws Exception { try { String s = CKit.readString(file); loadFromString(s); } catch(FileNotFoundException ignore) { } } public void loadQuiet() { try { load(); } catch(Exception e) { log.error(e); } } public void load(File f) throws Exception { setFile(f); load(); } public void loadQuiet(File f) { try { load(f); } catch(Exception e) { log.error(e); } } }
package com.weimin.common.util; /** * 2015-11-19 11:43:42 判断格式的正确性 * * @author TJ */ public class ValidateUtil extends Validate { /** * 包含有字母数字下划线汉字2到30个字符之间 */ public static final String REGX_REAL_NAME = "[,\\.#@\\\\\\-\\(\\)\\u0020\\w\\u4e00-\\u9fa5、,。()]{2,128}"; public static final String REGX_PHONE_NUMBER = "1[3-9]\\d{9}"; public static final String REGX_EMAIL = "[\\w\\-]{2,}@[\\w\\-]+\\.[a-zA-Z]{2,}(\\.[a-zA-Z]{2,})?"; /** * 是有效的用户名 * * @param name * @return */ public static boolean isValidRealName(String name) { return name.matches(REGX_REAL_NAME); } /** * 是有效的手机号 * * @param phoneno * @return */ public static boolean isValidPhoneNumber(String phoneno) { return phoneno.matches(REGX_PHONE_NUMBER); } /** * 是有效的邮箱 * * @param email * @return */ public static boolean isValidEmail(String email) { return email.matches(REGX_EMAIL); } }
/* * Copyright 2014 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns.threadsafety; import static com.google.errorprone.matchers.method.MethodMatchers.instanceMethod; import com.google.auto.value.AutoValue; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.errorprone.VisitorState; import com.google.errorprone.annotations.concurrent.UnlockMethod; import com.google.errorprone.bugpatterns.threadsafety.GuardedByExpression.Kind; import com.google.errorprone.bugpatterns.threadsafety.GuardedByExpression.Select; import com.google.errorprone.matchers.Matcher; import com.google.errorprone.matchers.Matchers; import com.google.errorprone.util.ASTHelpers; import com.sun.source.tree.ExpressionTree; import com.sun.source.tree.IdentifierTree; import com.sun.source.tree.LambdaExpressionTree; import com.sun.source.tree.MemberSelectTree; import com.sun.source.tree.MethodInvocationTree; import com.sun.source.tree.MethodTree; import com.sun.source.tree.NewClassTree; import com.sun.source.tree.SynchronizedTree; import com.sun.source.tree.Tree; import com.sun.source.tree.TryTree; import com.sun.source.tree.VariableTree; import com.sun.source.util.TreePathScanner; import com.sun.source.util.TreeScanner; import com.sun.tools.javac.code.Symbol; import com.sun.tools.javac.code.Symbol.ClassSymbol; import com.sun.tools.javac.tree.JCTree; import com.sun.tools.javac.tree.JCTree.JCExpression; import com.sun.tools.javac.tree.JCTree.JCNewClass; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import javax.lang.model.element.Modifier; /** * A method body analyzer. Responsible for tracking the set of held locks, and checking accesses to * guarded members. * * @author cushon@google.com (Liam Miller-Cushon) */ public class HeldLockAnalyzer { /** Listener interface for accesses to guarded members. */ public interface LockEventListener { /** * Handles a guarded member access. * * @param tree The member access expression. * @param guard The member's guard expression. * @param locks The set of held locks. */ void handleGuardedAccess(ExpressionTree tree, GuardedByExpression guard, HeldLockSet locks); } /** * Analyzes a method body, tracking the set of held locks and checking accesses to guarded * members. */ public static void analyze( VisitorState state, LockEventListener listener, Predicate<Tree> isSuppressed) { HeldLockSet locks = HeldLockSet.empty(); locks = handleMonitorGuards(state, locks); new LockScanner(state, listener, isSuppressed).scan(state.getPath(), locks); } // Don't use Class#getName() for inner classes, we don't want `Monitor$Guard` private static final String MONITOR_GUARD_CLASS = "com.google.common.util.concurrent.Monitor.Guard"; private static HeldLockSet handleMonitorGuards(VisitorState state, HeldLockSet locks) { JCNewClass newClassTree = ASTHelpers.findEnclosingNode(state.getPath(), JCNewClass.class); if (newClassTree == null) { return locks; } Symbol clazzSym = ASTHelpers.getSymbol(newClassTree.clazz); if (!(clazzSym instanceof ClassSymbol)) { return locks; } if (!((ClassSymbol) clazzSym).fullname.contentEquals(MONITOR_GUARD_CLASS)) { return locks; } Optional<GuardedByExpression> lockExpression = GuardedByBinder.bindExpression( Iterables.getOnlyElement(newClassTree.getArguments()), state); if (!lockExpression.isPresent()) { return locks; } return locks.plus(lockExpression.get()); } private static class LockScanner extends TreePathScanner<Void, HeldLockSet> { private final VisitorState visitorState; private final LockEventListener listener; private final Predicate<Tree> isSuppressed; private static final GuardedByExpression.Factory F = new GuardedByExpression.Factory(); private LockScanner( VisitorState visitorState, LockEventListener listener, Predicate<Tree> isSuppressed) { this.visitorState = visitorState; this.listener = listener; this.isSuppressed = isSuppressed; } @Override public Void visitMethod(MethodTree tree, HeldLockSet locks) { // Synchronized instance methods hold the 'this' lock; synchronized static methods // hold the Class lock for the enclosing class. Set<Modifier> mods = tree.getModifiers().getFlags(); if (mods.contains(Modifier.SYNCHRONIZED)) { Symbol owner = (((JCTree.JCMethodDecl) tree).sym.owner); GuardedByExpression lock = mods.contains(Modifier.STATIC) ? F.classLiteral(owner) : F.thisliteral(); locks = locks.plus(lock); } // @GuardedBy annotations on methods are trusted for declarations, and checked // for invocations. for (String guard : GuardedByUtils.getGuardValues(tree, visitorState)) { Optional<GuardedByExpression> bound = GuardedByBinder.bindString(guard, GuardedBySymbolResolver.from(tree, visitorState)); if (bound.isPresent()) { locks = locks.plus(bound.get()); } } return super.visitMethod(tree, locks); } @Override public Void visitTry(TryTree tree, HeldLockSet locks) { scan(tree.getResources(), locks); List<? extends Tree> resources = tree.getResources(); scan(resources, locks); // Cheesy try/finally heuristic: assume that all locks released in the finally // are held for the entirety of the try and catch statements. Collection<GuardedByExpression> releasedLocks = ReleasedLockFinder.find(tree.getFinallyBlock(), visitorState); if (resources.isEmpty()) { scan(tree.getBlock(), locks.plusAll(releasedLocks)); } else { // We don't know what to do with the try-with-resources block. // TODO(cushon) - recognize common try-with-resources patterns. Currently there is no // standard implementation of an AutoCloseable lock resource to detect. } scan(tree.getCatches(), locks.plusAll(releasedLocks)); scan(tree.getFinallyBlock(), locks); return null; } @Override public Void visitSynchronized(SynchronizedTree tree, HeldLockSet locks) { // The synchronized expression is held in the body of the synchronized statement: Optional<GuardedByExpression> lockExpression = GuardedByBinder.bindExpression((JCExpression) tree.getExpression(), visitorState); scan(tree.getBlock(), lockExpression.isPresent() ? locks.plus(lockExpression.get()) : locks); return null; } @Override public Void visitMemberSelect(MemberSelectTree tree, HeldLockSet locks) { checkMatch(tree, locks); return super.visitMemberSelect(tree, locks); } @Override public Void visitIdentifier(IdentifierTree tree, HeldLockSet locks) { checkMatch(tree, locks); return super.visitIdentifier(tree, locks); } @Override public Void visitNewClass(NewClassTree tree, HeldLockSet locks) { // Don't descend into anonymous class declarations; their method declarations // will be analyzed separately. return null; } @Override public Void visitLambdaExpression(LambdaExpressionTree node, HeldLockSet heldLockSet) { // Don't descend into lambda; they will be analyzed separately. return null; } @Override public Void visitVariable(VariableTree node, HeldLockSet locks) { if (!isSuppressed.apply(node)) { return super.visitVariable(node, locks); } else { return null; } } private void checkMatch(ExpressionTree tree, HeldLockSet locks) { for (String guardString : GuardedByUtils.getGuardValues(tree, visitorState)) { GuardedByBinder.bindString(guardString, GuardedBySymbolResolver.from(tree, visitorState)) .ifPresent( guard -> { Optional<GuardedByExpression> boundGuard = ExpectedLockCalculator.from((JCTree.JCExpression) tree, guard, visitorState); if (!boundGuard.isPresent()) { // We couldn't resolve a guarded by expression in the current scope, so we can't // guarantee the access is protected and must report an error to be safe. listener.handleGuardedAccess( tree, new GuardedByExpression.Factory().error(guardString), locks); return; } listener.handleGuardedAccess(tree, boundGuard.get(), locks); }); } } } /** An abstraction over the lock classes we understand. */ @AutoValue abstract static class LockResource { /** The fully-qualified name of the lock class. */ abstract String className(); /** The method that acquires the lock. */ abstract String lockMethod(); /** The method that releases the lock. */ abstract String unlockMethod(); public Matcher<ExpressionTree> createUnlockMatcher() { return instanceMethod().onDescendantOf(className()).named(unlockMethod()); } public Matcher<ExpressionTree> createLockMatcher() { return instanceMethod().onDescendantOf(className()).named(lockMethod()); } static LockResource create(String className, String lockMethod, String unlockMethod) { return new AutoValue_HeldLockAnalyzer_LockResource(className, lockMethod, unlockMethod); } } /** The set of supported lock classes. */ private static final ImmutableList<LockResource> LOCK_RESOURCES = ImmutableList.of( LockResource.create("java.util.concurrent.locks.Lock", "lock", "unlock"), LockResource.create("com.google.common.util.concurrent.Monitor", "enter", "leave"), LockResource.create("java.util.concurrent.Semaphore", "acquire", "release")); private static class LockOperationFinder extends TreeScanner<Void, Void> { static Collection<GuardedByExpression> find( Tree tree, VisitorState state, Matcher<ExpressionTree> lockOperationMatcher) { if (tree == null) { return Collections.emptyList(); } LockOperationFinder finder = new LockOperationFinder(state, lockOperationMatcher); tree.accept(finder, null); return finder.locks; } private static final String READ_WRITE_LOCK_CLASS = "java.util.concurrent.locks.ReadWriteLock"; private final Matcher<ExpressionTree> lockOperationMatcher; /** Matcher for ReadWriteLock lock accessors. */ private static final Matcher<ExpressionTree> READ_WRITE_ACCESSOR_MATCHER = Matchers.<ExpressionTree>anyOf( instanceMethod().onDescendantOf(READ_WRITE_LOCK_CLASS).named("readLock"), instanceMethod().onDescendantOf(READ_WRITE_LOCK_CLASS).named("writeLock")); private final VisitorState state; private final Set<GuardedByExpression> locks = new HashSet<>(); private LockOperationFinder(VisitorState state, Matcher<ExpressionTree> lockOperationMatcher) { this.state = state; this.lockOperationMatcher = lockOperationMatcher; } @Override public Void visitMethodInvocation(MethodInvocationTree tree, Void unused) { handleReleasedLocks(tree); handleUnlockAnnotatedMethods(tree); return null; } /** * Checks for locks that are released directly. Currently only {@link * java.util.concurrent.locks.Lock#unlock()} is supported. * * <p>TODO(cushon): Semaphores, CAS, ... ? */ private void handleReleasedLocks(MethodInvocationTree tree) { if (!lockOperationMatcher.matches(tree, state)) { return; } Optional<GuardedByExpression> node = GuardedByBinder.bindExpression((JCExpression) tree, state); if (node.isPresent()) { GuardedByExpression receiver = ((GuardedByExpression.Select) node.get()).base(); locks.add(receiver); // The analysis interprets members guarded by {@link ReadWriteLock}s as requiring that // either the read or write lock is held for all accesses, but doesn't enforce a policy // for which of the two is held. Technically the write lock should be required while // writing to the guarded member and the read lock should be used for all other accesses, // but in practice the write lock is frequently held while performing a mutating operation // on the object stored in the field (e.g. inserting into a List). // TODO(cushon): investigate a better way to specify the contract for ReadWriteLocks. if ((tree.getMethodSelect() instanceof MemberSelectTree) && READ_WRITE_ACCESSOR_MATCHER.matches(ASTHelpers.getReceiver(tree), state)) { locks.add(((Select) receiver).base()); } } } /** Checks {@link UnlockMethod}-annotated methods. */ private void handleUnlockAnnotatedMethods(MethodInvocationTree tree) { UnlockMethod annotation = ASTHelpers.getAnnotation(tree, UnlockMethod.class); if (annotation == null) { return; } for (String lockString : annotation.value()) { Optional<GuardedByExpression> guard = GuardedByBinder.bindString(lockString, GuardedBySymbolResolver.from(tree, state)); // TODO(cushon): http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html#ifPresent if (guard.isPresent()) { Optional<GuardedByExpression> lock = ExpectedLockCalculator.from((JCExpression) tree, guard.get(), state); if (lock.isPresent()) { locks.add(lock.get()); } } } } } /** * Find the locks that are released in the given tree. (e.g. the 'finally' clause of a * try/finally) */ static class ReleasedLockFinder { /** Matcher for methods that release lock resources. */ private static final Matcher<ExpressionTree> UNLOCK_MATCHER = Matchers.<ExpressionTree>anyOf(unlockMatchers()); private static Iterable<Matcher<ExpressionTree>> unlockMatchers() { return Iterables.transform( LOCK_RESOURCES, new Function<LockResource, Matcher<ExpressionTree>>() { @Override public Matcher<ExpressionTree> apply(LockResource res) { return res.createUnlockMatcher(); } }); } static Collection<GuardedByExpression> find(Tree tree, VisitorState state) { return LockOperationFinder.find(tree, state, UNLOCK_MATCHER); } } /** * Find the locks that are acquired in the given tree. (e.g. the body of a @LockMethod-annotated * method.) */ static class AcquiredLockFinder { /** Matcher for methods that acquire lock resources. */ private static final Matcher<ExpressionTree> LOCK_MATCHER = Matchers.<ExpressionTree>anyOf(unlockMatchers()); private static Iterable<Matcher<ExpressionTree>> unlockMatchers() { return Iterables.transform( LOCK_RESOURCES, new Function<LockResource, Matcher<ExpressionTree>>() { @Override public Matcher<ExpressionTree> apply(LockResource res) { return res.createLockMatcher(); } }); } static Collection<GuardedByExpression> find(Tree tree, VisitorState state) { return LockOperationFinder.find(tree, state, LOCK_MATCHER); } } static class ExpectedLockCalculator { private static final GuardedByExpression.Factory F = new GuardedByExpression.Factory(); /** * Determine the lock expression that needs to be held when accessing a specific guarded member. * * <p>If the lock expression resolves to an instance member, the result will be a select * expression with the same base as the original guarded member access. * * <p>For example: * * <pre>{@code * class MyClass { * final Object mu = new Object(); * @GuardedBy("mu") * int x; * } * void m(MyClass myClass) { * myClass.x++; * } * }</pre> * * To determine the lock that must be held when accessing myClass.x, from is called with * "myClass.x" and "mu", and returns "myClass.mu". */ static Optional<GuardedByExpression> from( JCTree.JCExpression guardedMemberExpression, GuardedByExpression guard, VisitorState state) { if (isGuardReferenceAbsolute(guard)) { return Optional.of(guard); } Optional<GuardedByExpression> guardedMember = GuardedByBinder.bindExpression(guardedMemberExpression, state); if (!guardedMember.isPresent()) { return Optional.empty(); } GuardedByExpression memberBase = ((GuardedByExpression.Select) guardedMember.get()).base(); return Optional.of(helper(guard, memberBase)); } /** * Returns true for guard expressions that require an 'absolute' reference, i.e. where the * expression to access the lock is always the same, regardless of how the guarded member is * accessed. * * <p>E.g.: * * <ul> * <li>class object: 'TypeName.class' * <li>static access: 'TypeName.member' * <li>enclosing instance: 'Outer.this' * <li>enclosing instance member: 'Outer.this.member' * </ul> */ private static boolean isGuardReferenceAbsolute(GuardedByExpression guard) { GuardedByExpression instance = guard.kind() == Kind.SELECT ? getSelectInstance(guard) : guard; return instance.kind() != Kind.THIS; } /** Gets the base expression of a (possibly nested) member select expression. */ private static GuardedByExpression getSelectInstance(GuardedByExpression guard) { if (guard instanceof Select) { return getSelectInstance(((Select) guard).base()); } return guard; } private static GuardedByExpression helper( GuardedByExpression lockExpression, GuardedByExpression memberAccess) { switch (lockExpression.kind()) { case SELECT: { GuardedByExpression.Select lockSelect = (GuardedByExpression.Select) lockExpression; return F.select(helper(lockSelect.base(), memberAccess), lockSelect.sym()); } case THIS: return memberAccess; default: throw new IllegalGuardedBy(lockExpression.toString()); } } } }
package org.aksw.rdfunit.model.impl; import com.google.common.collect.ImmutableSet; import java.util.Collection; import java.util.Collections; import java.util.Optional; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NonNull; import lombok.ToString; import org.aksw.rdfunit.model.interfaces.Binding; import org.aksw.rdfunit.model.interfaces.Pattern; import org.aksw.rdfunit.model.interfaces.PatternBasedTestCase; import org.aksw.rdfunit.model.interfaces.ResultAnnotation; import org.aksw.rdfunit.model.interfaces.TestCase; import org.aksw.rdfunit.model.interfaces.TestCaseAnnotation; import org.aksw.rdfunit.model.interfaces.shacl.PrefixDeclaration; import org.aksw.rdfunit.utils.CommonNames; import org.aksw.rdfunit.vocabulary.SHACL; import org.apache.jena.query.QuerySolution; import org.apache.jena.rdf.model.RDFNode; import org.apache.jena.rdf.model.Resource; /** * @author Dimitris Kontokostas * @since 1/3/14 3:49 PM */ @ToString @EqualsAndHashCode(of = {"pattern", "bindings", "testCaseAnnotation"}) public class PatternBasedTestCaseImpl implements TestCase, PatternBasedTestCase { @Getter @NonNull private final Pattern pattern; @NonNull private final ImmutableSet<Binding> bindings; @Getter @NonNull private final Resource element; @Getter @NonNull private final TestCaseAnnotation testCaseAnnotation; @Getter @NonNull private final String sparqlWhere; @Getter @NonNull private final String sparqlPrevalence; private final String focusNodeVarName; public PatternBasedTestCaseImpl(@NonNull Resource resource, @NonNull TestCaseAnnotation annotation, @NonNull Pattern pattern, @NonNull Collection<Binding> bindings) { this.element = resource; this.testCaseAnnotation = annotation; this.pattern = pattern; this.bindings = ImmutableSet.copyOf(bindings); this.sparqlWhere = initSparqlWhere(); this.sparqlPrevalence = initSparqlPrevalence(); focusNodeVarName = testCaseAnnotation.getVariableAnnotations().stream() .filter(ra -> ra.getAnnotationProperty().equals(SHACL.focusNode)) .map(ResultAnnotation::getAnnotationVarName) .filter(Optional::isPresent) .map(Optional::get) .findFirst() .orElse(CommonNames.This); // validate if (bindings.size() != pattern.getParameters().size()) { // throw new TestCaseInstantiationException("Non valid bindings in TestCase: " + testURI); } } private static String instantiateBindings(Collection<Binding> bindings, String query) { String sparql = query; for (Binding b : bindings) { sparql = sparql.replace("%%" + b.getParameterId() + "%%", b.getValueAsString()); } return sparql; } public String getAutoGeneratorURI() { return testCaseAnnotation.getAutoGeneratorURI(); } @Override public Collection<Binding> getBindings() { return bindings; } @Override public Collection<PrefixDeclaration> getPrefixDeclarations() { return Collections.emptyList(); } private String initSparqlWhere() { return instantiateBindings(bindings, pattern.getSparqlWherePattern()).trim(); } private String initSparqlPrevalence() { if (pattern.getSparqlPatternPrevalence().isPresent()) { return instantiateBindings(bindings, pattern.getSparqlPatternPrevalence().get()).trim(); } else { return ""; } } @Override public RDFNode getFocusNode(QuerySolution solution) { return solution.get(this.focusNodeVarName); } }
package br.com.ibnetwork.guara; import java.io.File; import java.net.URL; import java.net.URLClassLoader; import java.util.List; import org.apache.maven.project.MavenProject; public class MavenUtils { @SuppressWarnings("unchecked") public static ClassLoader getProjectClassLoader(MavenProject project) throws Exception { List elements = project.getCompileClasspathElements(); URL[] urls = new URL[elements.size()]; for ( int i = 0; i < elements.size(); i++ ) { Object obj = elements.get(i); if(obj != null) { if(obj instanceof File) { File file = (File) obj; urls[i] = file.toURI().toURL(); } else { File tmp = new File((String) obj); urls[i] = tmp.toURI().toURL(); } } } ClassLoader cl = new URLClassLoader(urls,MavenUtils.class.getClassLoader()); return cl; } }
package SNAct1.cards.enemyBossCards; import SNAct1.cards.AbstractSNActCard; import SNAct1.monsters.BossNinian; import basemod.AutoAdd; import com.megacrit.cardcrawl.cards.AbstractCard; import com.megacrit.cardcrawl.characters.AbstractPlayer; import com.megacrit.cardcrawl.monsters.AbstractMonster; import static SNAct1.SNAct1Mod.makeID; @AutoAdd.Ignore public class BossNinisGrace extends AbstractSNActCard { public final static String ID = makeID(BossNinisGrace.class.getSimpleName()); BossNinian cardOwner; public BossNinisGrace(BossNinian cardOwner) { super(ID, 1, CardType.SKILL, CardRarity.UNCOMMON, CardTarget.SELF, CardColor.COLORLESS); this.cardOwner = cardOwner; this.magicNumber = baseMagicNumber = cardOwner.blockBuffDuration; this.secondMagicNumber = baseSecondMagicNumber = cardOwner.blockBuffValue; } @Override public void use(AbstractPlayer p, AbstractMonster m) { } @Override public void upp() { } @Override public AbstractCard makeCopy() { return new BossNinisGrace(cardOwner); } }
package org.torproject.android.sample; import android.app.Application; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.util.Log; import android.widget.TextView; import com.jrummyapps.android.shell.CommandResult; import com.jrummyapps.android.shell.Shell; import org.torproject.android.binary.TorResourceInstaller; import java.io.File; public class SampleTorActivity extends AppCompatActivity { private TextView tvNotice; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_sample_tor); tvNotice = findViewById(R.id.lblStatus); try { TorResourceInstaller torResourceInstaller = new TorResourceInstaller(this, getFilesDir()); File fileTorBin = torResourceInstaller.installResources(); File fileTorRc = torResourceInstaller.getTorrcFile(); boolean success = fileTorBin != null && fileTorBin.canExecute(); String message = "Tor install success? " + success; logNotice(message); if (success) { runTorShellCmd(fileTorBin, fileTorRc); } } catch (Exception e) { e.printStackTrace(); logNotice(e.getMessage()); } } private void logNotice(String notice) { tvNotice.setText(notice); } private void logNotice(String notice, Exception e) { logNotice(notice); Log.e("SampleTor", "error occurred", e); } private boolean runTorShellCmd(File fileTor, File fileTorrc) throws Exception { File appCacheHome = getDir(SampleTorServiceConstants.DIRECTORY_TOR_DATA, Application.MODE_PRIVATE); if (!fileTorrc.exists()) { logNotice("torrc not installed: " + fileTorrc.getCanonicalPath()); return false; } String torCmdString = fileTor.getCanonicalPath() + " DataDirectory " + appCacheHome.getCanonicalPath() + " --defaults-torrc " + fileTorrc; int exitCode = -1; try { exitCode = exec(torCmdString + " --verify-config", true); } catch (Exception e) { logNotice("Tor configuration did not verify: " + e.getMessage(), e); return false; } try { exitCode = exec(torCmdString, true); } catch (Exception e) { logNotice("Tor was unable to start: " + e.getMessage(), e); return false; } if (exitCode != 0) { logNotice("Tor did not start. Exit:" + exitCode); return false; } return true; } private int exec(String cmd, boolean wait) throws Exception { CommandResult shellResult = Shell.run(cmd); // debug("CMD: " + cmd + "; SUCCESS=" + shellResult.isSuccessful()); if (!shellResult.isSuccessful()) { throw new Exception("Error: " + shellResult.exitCode + " ERR=" + shellResult.getStderr() + " OUT=" + shellResult.getStdout()); } return shellResult.exitCode; } }
package peacefulotter.game; import peacefulotter.engine.components.Camera; import peacefulotter.engine.components.GameObject; import peacefulotter.engine.components.PhysicsObject; import peacefulotter.engine.components.lights.DirectionalLight; import peacefulotter.engine.components.lights.PointLight; import peacefulotter.engine.components.renderer.MeshRenderer; import peacefulotter.engine.components.renderer.MultiMeshRenderer; import peacefulotter.engine.core.Game; import peacefulotter.engine.core.maths.Quaternion; import peacefulotter.engine.core.maths.Vector3f; import peacefulotter.engine.rendering.graphics.Material; import peacefulotter.engine.rendering.graphics.Mesh; import peacefulotter.engine.rendering.graphics.Texture; import peacefulotter.engine.rendering.shaders.Attenuation; import peacefulotter.game.actor.FlashLight; import peacefulotter.game.actor.Ghost; import peacefulotter.game.actor.Player; import peacefulotter.game.actor.Weapon; import peacefulotter.game.map.FPSWorld; // Hide Mouse // int hideMouse = action == 1 ? GLFW_CURSOR_DISABLED : GLFW_CURSOR_NORMAL; // glfwSetInputMode( window, GLFW_CURSOR, hideMouse ); public class FPSGame extends Game { public FPSGame( String winName, int winWidth, int winHeight ) { super( winName, winWidth, winHeight ); } public void init() { /* MATERIALS */ Material bricks2 = new Material( new Texture( "bricks2.jpg" ), new Texture( "bricks2_normal.jpg" ), new Texture( "bricks2_height.png" ), 1f, 4, 0.03f, -0.04f ); Material alienMaterial = new Material( new Texture( "metal.jpg" ), new Texture( "metal_normal.jpg" ), new Texture( "metal_height.png" ), 2, 12, 0.04f, -1f ); /* MAP */ FPSWorld world = FPSWorld.INSTANCE; setWorld( world ); // Mesh tree1 = new Mesh( "tree3/", "Tree_OBJ.obj" ); // GameObject tree = new GameObject().addComponent( new MeshRenderer( tree1, alienMaterial ) ); // tree.getTransform().scale( 0.03f ).translate( new Vector3f( 200, 0, 400 ) ); // MultiMeshRenderer mmr = new MultiMeshRenderer( "tree1/", "Tree_OBJ.obj" ); // GameObject tree = new GameObject().addComponent( mmr ); // tree.getTransform().scale( 0.03f ).translate( new Vector3f( 200, 0, 400 ) ); //Mesh house = new Mesh( "house/", "house3.obj" ); //GameObject houseObject = new GameObject().addComponent( new MeshRenderer( house, alienMaterial ) ); //houseObject.getTransform().scale( 30 ).rotate( Vector3f.Y_AXIS, 180 ); /* DUMMY */ Player dummy = new Player.PlayerBuilder() .setPosition( new Vector3f( 10, 10, 10 ) ) .setWeapon( new Weapon() ) // .setFlashLight( new FlashLight( new Vector3f( 0.8f, 0.4f, 0.8f ), 0.15f, new Attenuation( 0, 1, 0f ), 0.6f ) ) .setTerrain( world.getTerrain() ) .setMultiMeshRenderer( new MultiMeshRenderer( "reaper/", "reaper.obj") ) .build(); GameObject sphereObj = new GameObject(); sphereObj.addComponent( new MeshRenderer( new Mesh( "sphere.obj" ), bricks2 ) ); sphereObj.getTransform().translate( new Vector3f( 0, 6, 0 ) ); sphereObj.getTransform().scale( new Vector3f( 0.4f, 0.4f, 0.4f ) ); dummy.addChild( sphereObj ); dummy.getTransform() .translate( new Vector3f( 0, 0, 5 ) ) .rotate( new Vector3f(0, 1, 0 ), 180); /* PLAYER */ PhysicsObject player = new Player.PlayerBuilder() // .setPlayerUser() .setPosition( new Vector3f( 50, 2, 50 ) ) // .setCamera( Camera.CameraBuilder.getDefaultCamera() ) .setTerrain( world.getTerrain() ) .setMultiMeshRenderer( new MultiMeshRenderer( "reaper/", "reaper.obj") ) .setWeapon( new Weapon() ) .setFlashLight( new FlashLight( new Vector3f( 0.8f, 0.1f, 0.8f ), 0.03f, new Attenuation( 0.2f, 0.01f, 0f ), 0.3f ) ) .build(); Ghost ghost = new Ghost( world.getTerrain(), true ); ghost.addComponent( Camera.CameraBuilder.getDefaultCamera() ); ghost.getTransform().translate( new Vector3f( 0, 10, 0 ) ); /* LIGHTS */ GameObject dirLightObject = new GameObject(); DirectionalLight dirLight = new DirectionalLight( new Vector3f( 0.55f, 0.5f,0.55f ), 1f ); dirLightObject.addComponent( dirLight ); dirLightObject.getTransform().setRotation( new Quaternion( new Vector3f( 1, -1, 0 ), -45 ) ); GameObject pointLightObject = new GameObject(); PointLight pointLight = new PointLight( new Vector3f( 0.6f, 0.1f, 0.2f ), 0.01f, new Attenuation( 0.5f, 0.1f, 0f ) ); pointLight.getTransform().translate( new Vector3f( 20, 10, 10 ) ); pointLightObject.addComponent( pointLight ); addObjects( world, dirLightObject, pointLightObject ); addPhysicalObjects( player, dummy, ghost ); super.init(); } public void update( float deltaTime ) { super.update( deltaTime ); } }
/* * Copyright 2020 ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package tech.pegasys.teku.services.powchain; import static tech.pegasys.teku.pow.api.Eth1DataCachePeriodCalculator.calculateEth1DataCacheDurationPriorToCurrentTime; import static tech.pegasys.teku.util.config.Constants.MAXIMUM_CONCURRENT_ETH1_REQUESTS; import java.util.concurrent.TimeUnit; import okhttp3.OkHttpClient; import okhttp3.logging.HttpLoggingInterceptor; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.web3j.protocol.Web3j; import org.web3j.protocol.http.HttpService; import tech.pegasys.teku.infrastructure.async.AsyncRunner; import tech.pegasys.teku.infrastructure.async.SafeFuture; import tech.pegasys.teku.pow.DepositContractAccessor; import tech.pegasys.teku.pow.DepositFetcher; import tech.pegasys.teku.pow.DepositProcessingController; import tech.pegasys.teku.pow.ErrorTrackingEth1Provider; import tech.pegasys.teku.pow.Eth1BlockFetcher; import tech.pegasys.teku.pow.Eth1DepositManager; import tech.pegasys.teku.pow.Eth1HeadTracker; import tech.pegasys.teku.pow.Eth1Provider; import tech.pegasys.teku.pow.MinimumGenesisTimeBlockFinder; import tech.pegasys.teku.pow.ThrottlingEth1Provider; import tech.pegasys.teku.pow.Web3jEth1Provider; import tech.pegasys.teku.pow.api.Eth1EventsChannel; import tech.pegasys.teku.service.serviceutils.Service; import tech.pegasys.teku.service.serviceutils.ServiceConfig; import tech.pegasys.teku.storage.api.Eth1DepositStorageChannel; import tech.pegasys.teku.util.cli.VersionProvider; import tech.pegasys.teku.util.config.TekuConfiguration; public class PowchainService extends Service { private static final Logger LOG = LogManager.getLogger(); private final Eth1DepositManager eth1DepositManager; private final Eth1HeadTracker headTracker; public PowchainService(final ServiceConfig config) { TekuConfiguration tekuConfig = config.getConfig(); AsyncRunner asyncRunner = config.createAsyncRunner("powchain"); Web3j web3j = createWeb3j(tekuConfig); final Eth1Provider eth1Provider = new ThrottlingEth1Provider( new ErrorTrackingEth1Provider( new Web3jEth1Provider(web3j, asyncRunner), asyncRunner, config.getTimeProvider()), MAXIMUM_CONCURRENT_ETH1_REQUESTS); DepositContractAccessor depositContractAccessor = DepositContractAccessor.create( eth1Provider, web3j, config.getConfig().getEth1DepositContractAddress().toHexString()); final Eth1EventsChannel eth1EventsChannel = config.getEventChannels().getPublisher(Eth1EventsChannel.class); final Eth1DepositStorageChannel eth1DepositStorageChannel = config.getEventChannels().getPublisher(Eth1DepositStorageChannel.class, asyncRunner); final Eth1BlockFetcher eth1BlockFetcher = new Eth1BlockFetcher( eth1EventsChannel, eth1Provider, config.getTimeProvider(), calculateEth1DataCacheDurationPriorToCurrentTime()); final DepositFetcher depositFetcher = new DepositFetcher( eth1Provider, eth1EventsChannel, depositContractAccessor.getContract(), eth1BlockFetcher, asyncRunner); headTracker = new Eth1HeadTracker(asyncRunner, eth1Provider); final DepositProcessingController depositProcessingController = new DepositProcessingController( eth1Provider, eth1EventsChannel, asyncRunner, depositFetcher, eth1BlockFetcher, headTracker); eth1DepositManager = new Eth1DepositManager( eth1Provider, asyncRunner, eth1EventsChannel, eth1DepositStorageChannel, depositProcessingController, new MinimumGenesisTimeBlockFinder(eth1Provider)); } private Web3j createWeb3j(final TekuConfiguration tekuConfig) { final HttpService web3jService = new HttpService(tekuConfig.getEth1Endpoint(), createOkHttpClient()); web3jService.addHeader("User-Agent", VersionProvider.VERSION); return Web3j.build(web3jService); } private static OkHttpClient createOkHttpClient() { final OkHttpClient.Builder builder = new OkHttpClient.Builder() // Increased read timeout allows ETH1 nodes time to process large log requests .readTimeout(1, TimeUnit.MINUTES); if (LOG.isTraceEnabled()) { HttpLoggingInterceptor logging = new HttpLoggingInterceptor(LOG::trace); logging.setLevel(HttpLoggingInterceptor.Level.BODY); builder.addInterceptor(logging); } return builder.build(); } @Override protected SafeFuture<?> doStart() { return SafeFuture.allOfFailFast( SafeFuture.fromRunnable(headTracker::start), SafeFuture.fromRunnable(eth1DepositManager::start)); } @Override protected SafeFuture<?> doStop() { return SafeFuture.allOfFailFast( SafeFuture.fromRunnable(headTracker::stop), SafeFuture.fromRunnable(eth1DepositManager::stop)); } }
/* * JasperReports - Free Java Reporting Library. * Copyright (C) 2001 - 2013 Jaspersoft Corporation. All rights reserved. * http://www.jaspersoft.com * * Unless you have purchased a commercial license agreement from Jaspersoft, * the following license terms apply: * * This program is part of JasperReports. * * JasperReports is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * JasperReports is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with JasperReports. If not, see <http://www.gnu.org/licenses/>. */ package net.sf.jasperreports.engine.design; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.InputStream; import net.sf.jasperreports.engine.DefaultJasperReportsContext; import net.sf.jasperreports.engine.JRException; import net.sf.jasperreports.engine.JasperReportsContext; /** * @author Teodor Danciu (teodord@users.sourceforge.net) * @version $Id: JRJavacCompiler.java 5878 2013-01-07 20:23:13Z teodord $ */ public class JRJavacCompiler extends JRAbstractMultiClassCompiler { /** * */ public JRJavacCompiler(JasperReportsContext jasperReportsContext) { super(jasperReportsContext); } /** * @deprecated Replaced by {@link #JRJavacCompiler(JasperReportsContext)}. */ public JRJavacCompiler() { this(DefaultJasperReportsContext.getInstance()); } /** * */ public String compileClasses(File[] sourceFiles, String classpath) throws JRException { String[] source = new String[sourceFiles.length + 3]; source[0] = "javac"; source[1] = "-classpath"; source[2] = classpath; for (int i = 0; i < sourceFiles.length; i++) { source[i + 3] = sourceFiles[i].getPath(); } try { // Compile the source file and arrange to read the errors if any. Process compile = Runtime.getRuntime().exec(source); InputStream errFile = compile.getErrorStream(); // Read the error messages (if any) into the ByteArrayOutputStream ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int count = 0; do { count = errFile.read(buffer); if (count > 0) { baos.write(buffer, 0, count); } } while (count >= 0); if( baos.toString().indexOf("error") != -1 ) { return baos.toString(); } return null; } catch (Exception e) { StringBuffer files = new StringBuffer(); for (int i = 0; i < sourceFiles.length; ++i) { files.append(sourceFiles[i].getPath()); files.append(' '); } throw new JRException("Error compiling report java source files : " + files, e); } } }
package com.jbtits.otus.lecture16.frontend.webSocket.messages; import com.fasterxml.jackson.annotation.JsonProperty; public class AuthAction extends Action { @JsonProperty(required = true) private String login; @JsonProperty(required = true) private String password; public AuthAction() { super(); } public AuthAction(String uuid, String action, String login, String password) { super(uuid, action); this.login = login; this.password = password; } public String getLogin() { return login; } public String getPassword() { return password; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2012.08.30 at 11:39:15 AM CEST // package es.rickyepoderi.wbxml.bind.syncml; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "cmdID", "noResp", "cred", "meta", "item" }) @XmlRootElement(name = "Exec", namespace="SYNCML:SYNCML1.1") public class Exec { @XmlElement(name = "CmdID", namespace="SYNCML:SYNCML1.1", required = true) protected String cmdID; @XmlElement(name = "NoResp", namespace="SYNCML:SYNCML1.1") protected NoResp noResp; @XmlElement(name = "Cred", namespace="SYNCML:SYNCML1.1") protected Cred cred; @XmlElement(name = "Meta", namespace="SYNCML:SYNCML1.1") protected Meta meta; @XmlElement(name = "Item", namespace="SYNCML:SYNCML1.1", required = true) protected Item item; /** * Gets the value of the cmdID property. * * @return * possible object is * {@link String } * */ public String getCmdID() { return cmdID; } /** * Sets the value of the cmdID property. * * @param value * allowed object is * {@link String } * */ public void setCmdID(String value) { this.cmdID = value; } /** * Gets the value of the noResp property. * * @return * possible object is * {@link NoResp } * */ public NoResp getNoResp() { return noResp; } /** * Sets the value of the noResp property. * * @param value * allowed object is * {@link NoResp } * */ public void setNoResp(NoResp value) { this.noResp = value; } /** * Gets the value of the cred property. * * @return * possible object is * {@link Cred } * */ public Cred getCred() { return cred; } /** * Sets the value of the cred property. * * @param value * allowed object is * {@link Cred } * */ public void setCred(Cred value) { this.cred = value; } /** * Gets the value of the meta property. * * @return * possible object is * {@link Meta } * */ public Meta getMeta() { return meta; } /** * Sets the value of the meta property. * * @param value * allowed object is * {@link Meta } * */ public void setMeta(Meta value) { this.meta = value; } /** * Gets the value of the item property. * * @return * possible object is * {@link Item } * */ public Item getItem() { return item; } /** * Sets the value of the item property. * * @param value * allowed object is * {@link Item } * */ public void setItem(Item value) { this.item = value; } }
/* * MIT License * * Copyright (c) 2022 MASES s.r.l. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ /************************************************************************************** * <auto-generated> * This code was generated from a template using JCOReflector * * Manual changes to this file may cause unexpected behavior in your application. * Manual changes to this file will be overwritten if the code is regenerated. * </auto-generated> *************************************************************************************/ package system.componentmodel.design; import org.mases.jcobridge.*; import org.mases.jcobridge.netreflection.*; // Import section import system.componentmodel.design.MenuCommandsChangedEventArgs; /** * The Java interface to be implemented to receive events from the CLR using {@link MenuCommandsChangedEventHandler}. * <p> * * See: <a href="https://docs.microsoft.com/en-us/dotnet/api/System.ComponentModel.Design.MenuCommandsChangedEventHandler" target="_top">https://docs.microsoft.com/en-us/dotnet/api/System.ComponentModel.Design.MenuCommandsChangedEventHandler</a> */ public interface IMenuCommandsChangedEventHandler { public void Invoke(NetObject sender, MenuCommandsChangedEventArgs e); }
package com.futurewei.alcor.route; import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.doNothing; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.junit.Assert.*; import com.futurewei.alcor.route.config.UnitTestConfig; import com.futurewei.alcor.route.service.RouteDatabaseService; import com.futurewei.alcor.route.service.RouteWithSubnetMapperService; import com.futurewei.alcor.route.service.RouteWithVpcMapperService; import com.futurewei.alcor.web.entity.route.RouteEntity; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.context.annotation.ComponentScan; import org.springframework.http.MediaType; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.result.MockMvcResultMatchers; import java.io.IOException; @ComponentScan(value = "com.futurewei.alcor.common.test.config") @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, properties = {"httpbin=http://localhost:${wiremock.server.port}"}) @AutoConfigureMockMvc public class RouteControllerTests { @Autowired private MockMvc mockMvc; @MockBean private RouteDatabaseService routeDatabaseService; @MockBean private RouteWithVpcMapperService routeWithVpcMapperService; @MockBean private RouteWithSubnetMapperService routeWithSubnetMapperService; private String getByIdUri = "/routes/" + UnitTestConfig.routeId; private String createSubnetUri = "/subnets/" + UnitTestConfig.subnetId + "/routes"; private String createVpcUri = "/vpcs/" + UnitTestConfig.vpcId + "/routes"; private String deleteUri = "/vpcs/" + UnitTestConfig.vpcId + "/routes/" + UnitTestConfig.routeId; @Test public void routeGetById_canFindRoute_pass () throws Exception { Mockito.when(routeDatabaseService.getByRouteId(UnitTestConfig.routeId)) .thenReturn(new RouteEntity(){{setId(UnitTestConfig.routeId);}}); this.mockMvc.perform(get(getByIdUri)) .andDo(print()) .andExpect(status().isOk()) .andExpect(MockMvcResultMatchers.jsonPath("$.route.id").value(UnitTestConfig.routeId)); } @Test public void routeGetById_canNotFindRoute_notPass () throws Exception { Mockito.when(routeDatabaseService.getByRouteId(UnitTestConfig.routeId)).thenReturn(null); String response = this.mockMvc.perform(get(getByIdUri)) .andDo(print()) .andExpect(status().isOk()) .andReturn().getResponse().getContentAsString(); System.out.println("-----json returned = " + response); assertEquals("{\"route\":null}", response); } @Test public void createVpcRoute_create_pass () throws Exception { doNothing().when(routeWithVpcMapperService).addMapperByRouteEntity(eq(UnitTestConfig.vpcId), any(RouteEntity.class)); this.mockMvc.perform(post(createVpcUri).contentType(MediaType.APPLICATION_JSON) .content(UnitTestConfig.vpcResource)) .andDo(print()) .andExpect(status().is(201)) .andExpect(MockMvcResultMatchers.jsonPath("$.route.destination").value(UnitTestConfig.cidr)); } @Test public void createVpcRoute_parameterNullOrEmpty_notPass () throws Exception { try { doNothing().when(routeWithVpcMapperService).addMapperByRouteEntity(eq(UnitTestConfig.vpcId), any(RouteEntity.class)); this.mockMvc.perform(post(createVpcUri).contentType(MediaType.APPLICATION_JSON) .content(UnitTestConfig.vpcResource)) .andDo(print()) .andExpect(status().is(201)) .andExpect(MockMvcResultMatchers.jsonPath("$.route.destination").value(UnitTestConfig.cidr)); } catch (Exception e) { assertEquals("{\"route\":null}", e.getMessage()); } } @Test public void createSubnetRoute_create_pass () throws Exception { doNothing().when(routeWithSubnetMapperService).addMapperByRouteEntity(eq(UnitTestConfig.subnetId), any(RouteEntity.class)); this.mockMvc.perform(post(createSubnetUri).contentType(MediaType.APPLICATION_JSON) .content(UnitTestConfig.resource)) .andDo(print()) .andExpect(status().is(201)) .andExpect(MockMvcResultMatchers.jsonPath("$.route.destination").value(UnitTestConfig.cidr)); } @Test public void createSubnetRoute_parameterNullOrEmpty_notPass () throws Exception { try { doNothing().when(routeWithSubnetMapperService).addMapperByRouteEntity(eq(UnitTestConfig.subnetId), any(RouteEntity.class)); this.mockMvc.perform(post(createSubnetUri).contentType(MediaType.APPLICATION_JSON) .content(UnitTestConfig.resource)) .andDo(print()) .andExpect(status().is(201)) .andExpect(MockMvcResultMatchers.jsonPath("$.route.destination").value(UnitTestConfig.cidr)); } catch (Exception e) { assertEquals("{\"route\":null}", e.getMessage()); } } @Test public void deleteRuleById_deleteWhenIdExist_pass () throws Exception { Mockito.when(routeDatabaseService.getByRouteId(UnitTestConfig.routeId)) .thenReturn(new RouteEntity(){{setId(UnitTestConfig.routeId);}}); this.mockMvc.perform(delete(deleteUri)) .andDo(print()) .andExpect(status().isOk()) .andExpect(MockMvcResultMatchers.jsonPath("$.id").value(UnitTestConfig.routeId)); } @Test public void deleteRuleById_deleteWhenIdNotExist_notPass () throws Exception { Mockito.when(routeDatabaseService.getByRouteId(UnitTestConfig.routeId)) .thenReturn(null); String response = this.mockMvc.perform(delete(deleteUri)) .andDo(print()) .andExpect(status().isOk()) .andReturn().getResponse().getContentAsString(); assertEquals("{\"id\":null}", response); } @Before public void init() throws IOException { System.out.println("Start Test-----------------"); } @After public void after() { System.out.println("End Test-----------------"); } }
package ddb.io.voxelnet.serial; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; /** * Base type for all array values */ public abstract class SeArrayValue extends SeValue { private int length; protected SeArrayValue() {} protected SeArrayValue(int length) { this.length = length; } @Override public void serializeTo(DataOutputStream output) throws IOException { output.writeInt(length); } @Override public boolean deserializeFrom(DataInputStream input) throws IOException { this.length = input.readInt(); return true; } @Override public int getComputedSize() { return Integer.BYTES; } /** * Gets the length of the array * @return The length of the array */ public int getLength() { return length; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mapreduce; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.PrintStream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.LauncherSecurityManager; import org.apache.hadoop.util.ToolRunner; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; @Category({MapReduceTests.class, LargeTests.class}) public class TestCellCounter { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestCellCounter.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final byte[] ROW1 = Bytes.toBytesBinary("\\x01row1"); private static final byte[] ROW2 = Bytes.toBytesBinary("\\x01row2"); private static final String FAMILY_A_STRING = "a"; private static final String FAMILY_B_STRING = "b"; private static final byte[] FAMILY_A = Bytes.toBytes(FAMILY_A_STRING); private static final byte[] FAMILY_B = Bytes.toBytes(FAMILY_B_STRING); private static final byte[] QUALIFIER = Bytes.toBytes("q"); private static Path FQ_OUTPUT_DIR; private static final String OUTPUT_DIR = "target" + File.separator + "test-data" + File.separator + "output"; private static long now = System.currentTimeMillis(); @Rule public TestName name = new TestName(); @BeforeClass public static void beforeClass() throws Exception { UTIL.startMiniCluster(); FQ_OUTPUT_DIR = new Path(OUTPUT_DIR).makeQualified(new LocalFileSystem()); FileUtil.fullyDelete(new File(OUTPUT_DIR)); } @AfterClass public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } /** * Test CellCounter all data should print to output * */ @Test public void testCellCounter() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); byte[][] families = { FAMILY_A, FAMILY_B }; Table t = UTIL.createTable(sourceTable, families); try{ Put p = new Put(ROW1); p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12")); p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13")); t.put(p); p = new Put(ROW2); p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21")); p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); t.put(p); String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1" }; runCount(args); FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000"); String data = IOUtils.toString(inputStream); inputStream.close(); assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2")); assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2")); assertTrue(data.contains("Total ROWS" + "\t" + "1")); assertTrue(data.contains("b;q" + "\t" + "1")); assertTrue(data.contains("a;q" + "\t" + "1")); assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1")); assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1")); }finally{ t.close(); FileUtil.fullyDelete(new File(OUTPUT_DIR)); } } /** * Test CellCounter all data should print to output */ @Test public void testCellCounterPrefix() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); byte[][] families = { FAMILY_A, FAMILY_B }; Table t = UTIL.createTable(sourceTable, families); try { Put p = new Put(ROW1); p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12")); p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13")); t.put(p); p = new Put(ROW2); p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21")); p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); t.put(p); String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "\\x01row1" }; runCount(args); FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000"); String data = IOUtils.toString(inputStream); inputStream.close(); assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2")); assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2")); assertTrue(data.contains("Total ROWS" + "\t" + "1")); assertTrue(data.contains("b;q" + "\t" + "1")); assertTrue(data.contains("a;q" + "\t" + "1")); assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1")); assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1")); } finally { t.close(); FileUtil.fullyDelete(new File(OUTPUT_DIR)); } } /** * Test CellCounter with time range all data should print to output */ @Test public void testCellCounterStartTimeRange() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); byte[][] families = { FAMILY_A, FAMILY_B }; Table t = UTIL.createTable(sourceTable, families); try{ Put p = new Put(ROW1); p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12")); p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13")); t.put(p); p = new Put(ROW2); p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21")); p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); t.put(p); String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1", "--starttime=" + now, "--endtime=" + now + 2 }; runCount(args); FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000"); String data = IOUtils.toString(inputStream); inputStream.close(); assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2")); assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2")); assertTrue(data.contains("Total ROWS" + "\t" + "1")); assertTrue(data.contains("b;q" + "\t" + "1")); assertTrue(data.contains("a;q" + "\t" + "1")); assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1")); assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1")); }finally{ t.close(); FileUtil.fullyDelete(new File(OUTPUT_DIR)); } } /** * Test CellCounter with time range all data should print to output */ @Test public void testCellCounteEndTimeRange() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); byte[][] families = { FAMILY_A, FAMILY_B }; Table t = UTIL.createTable(sourceTable, families); try{ Put p = new Put(ROW1); p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12")); p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13")); t.put(p); p = new Put(ROW2); p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21")); p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); t.put(p); String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1", "--endtime=" + now + 1 }; runCount(args); FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000"); String data = IOUtils.toString(inputStream); inputStream.close(); assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2")); assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2")); assertTrue(data.contains("Total ROWS" + "\t" + "1")); assertTrue(data.contains("b;q" + "\t" + "1")); assertTrue(data.contains("a;q" + "\t" + "1")); assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1")); assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1")); }finally{ t.close(); FileUtil.fullyDelete(new File(OUTPUT_DIR)); } } /** * Test CellCounter with time range all data should print to output */ @Test public void testCellCounteOutOfTimeRange() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); byte[][] families = { FAMILY_A, FAMILY_B }; Table t = UTIL.createTable(sourceTable, families); try{ Put p = new Put(ROW1); p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12")); p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13")); t.put(p); p = new Put(ROW2); p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21")); p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); t.put(p); String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "--starttime=" + now + 1, "--endtime=" + now + 2 }; runCount(args); FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000"); String data = IOUtils.toString(inputStream); inputStream.close(); // nothing should hace been emitted to the reducer assertTrue(data.isEmpty()); }finally{ t.close(); FileUtil.fullyDelete(new File(OUTPUT_DIR)); } } private boolean runCount(String[] args) throws Exception { // need to make a copy of the configuration because to make sure // different temp dirs are used. int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new CellCounter(), args); return status == 0; } /** * Test main method of CellCounter */ @Test public void testCellCounterMain() throws Exception { PrintStream oldPrintStream = System.err; SecurityManager SECURITY_MANAGER = System.getSecurityManager(); LauncherSecurityManager newSecurityManager= new LauncherSecurityManager(); System.setSecurityManager(newSecurityManager); ByteArrayOutputStream data = new ByteArrayOutputStream(); String[] args = {}; System.setErr(new PrintStream(data)); try { System.setErr(new PrintStream(data)); try { CellCounter.main(args); fail("should be SecurityException"); } catch (SecurityException e) { assertEquals(-1, newSecurityManager.getExitCode()); assertTrue(data.toString().contains("ERROR: Wrong number of parameters:")); // should be information about usage assertTrue(data.toString().contains("Usage:")); } } finally { System.setErr(oldPrintStream); System.setSecurityManager(SECURITY_MANAGER); } } /** * Test CellCounter for complete table all data should print to output */ @Test public void testCellCounterForCompleteTable() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); String outputPath = OUTPUT_DIR + sourceTable; LocalFileSystem localFileSystem = new LocalFileSystem(); Path outputDir = new Path(outputPath).makeQualified(localFileSystem.getUri(), localFileSystem.getWorkingDirectory()); byte[][] families = { FAMILY_A, FAMILY_B }; Table t = UTIL.createTable(sourceTable, families); try { Put p = new Put(ROW1); p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12")); p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13")); t.put(p); p = new Put(ROW2); p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21")); p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); t.put(p); String[] args = { sourceTable.getNameAsString(), outputDir.toString(), ";" }; runCount(args); FileInputStream inputStream = new FileInputStream(outputPath + File.separator + "part-r-00000"); String data = IOUtils.toString(inputStream); inputStream.close(); assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2")); assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "4")); assertTrue(data.contains("Total ROWS" + "\t" + "2")); assertTrue(data.contains("b;q" + "\t" + "2")); assertTrue(data.contains("a;q" + "\t" + "2")); assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1")); assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1")); assertTrue(data.contains("row2;a;q_Versions" + "\t" + "1")); assertTrue(data.contains("row2;b;q_Versions" + "\t" + "1")); FileUtil.fullyDelete(new File(outputPath)); args = new String[] { "-D " + TableInputFormat.SCAN_COLUMN_FAMILY + "=a, b", sourceTable.getNameAsString(), outputDir.toString(), ";"}; runCount(args); inputStream = new FileInputStream(outputPath + File.separator + "part-r-00000"); String data2 = IOUtils.toString(inputStream); inputStream.close(); assertEquals(data, data2); } finally { t.close(); localFileSystem.close(); FileUtil.fullyDelete(new File(outputPath)); } } @Test public void TestCellCounterWithoutOutputDir() throws Exception { String[] args = new String[] { "tableName" }; assertEquals("CellCounter should exit with -1 as output directory is not specified.", -1, ToolRunner.run(HBaseConfiguration.create(), new CellCounter(), args)); } }
package e.x.b; public interface p<P1, P2, R> { R invoke(P1 p1, P2 p2); }
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1 begin_comment comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ end_comment begin_package DECL|package|org.apache.hadoop.fs.azure package|package name|org operator|. name|apache operator|. name|hadoop operator|. name|fs operator|. name|azure package|; end_package begin_import import|import name|org operator|. name|apache operator|. name|commons operator|. name|lang3 operator|. name|Validate import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|fs operator|. name|azure operator|. name|security operator|. name|Constants import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|fs operator|. name|azure operator|. name|security operator|. name|SpnegoToken import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|fs operator|. name|azure operator|. name|security operator|. name|WasbDelegationTokenIdentifier import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|io operator|. name|retry operator|. name|RetryPolicy import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|security operator|. name|UserGroupInformation import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|security operator|. name|authentication operator|. name|client operator|. name|AuthenticatedURL import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|security operator|. name|authentication operator|. name|client operator|. name|AuthenticationException import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|security operator|. name|authentication operator|. name|client operator|. name|Authenticator import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|security operator|. name|token operator|. name|Token import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|security operator|. name|token operator|. name|TokenIdentifier import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|security operator|. name|token operator|. name|delegation operator|. name|web operator|. name|KerberosDelegationTokenAuthenticator import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|http operator|. name|NameValuePair import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|http operator|. name|client operator|. name|methods operator|. name|HttpGet import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|http operator|. name|client operator|. name|methods operator|. name|HttpPost import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|http operator|. name|client operator|. name|methods operator|. name|HttpPut import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|http operator|. name|client operator|. name|methods operator|. name|HttpUriRequest import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|http operator|. name|client operator|. name|utils operator|. name|URIBuilder import|; end_import begin_import import|import name|org operator|. name|slf4j operator|. name|Logger import|; end_import begin_import import|import name|org operator|. name|slf4j operator|. name|LoggerFactory import|; end_import begin_import import|import name|java operator|. name|io operator|. name|IOException import|; end_import begin_import import|import name|java operator|. name|net operator|. name|InetAddress import|; end_import begin_import import|import name|java operator|. name|net operator|. name|URISyntaxException import|; end_import begin_import import|import name|java operator|. name|security operator|. name|PrivilegedExceptionAction import|; end_import begin_import import|import name|java operator|. name|util operator|. name|List import|; end_import begin_comment comment|/** * Helper class the has constants and helper methods * used in WASB when integrating with a remote http cred * service which uses Kerberos and delegation tokens. * Currently, remote service will be used to generate * SAS keys, authorization and delegation token operations. */ end_comment begin_class DECL|class|SecureWasbRemoteCallHelper specifier|public class|class name|SecureWasbRemoteCallHelper extends|extends name|WasbRemoteCallHelper block|{ DECL|field|LOG specifier|public specifier|static specifier|final name|Logger name|LOG init|= name|LoggerFactory operator|. name|getLogger argument_list|( name|SecureWasbRemoteCallHelper operator|. name|class argument_list|) decl_stmt|; comment|/** * Delegation token query parameter to be used when making rest call. */ DECL|field|DELEGATION_TOKEN_QUERY_PARAM_NAME specifier|private specifier|static specifier|final name|String name|DELEGATION_TOKEN_QUERY_PARAM_NAME init|= literal|"delegation" decl_stmt|; comment|/** * Delegation token to be used for making the remote call. */ DECL|field|delegationToken specifier|private name|Token argument_list|< name|? argument_list|> name|delegationToken init|= literal|null decl_stmt|; comment|/** * Does Remote Http Call requires Kerberos Authentication always, even if the delegation token is present. */ DECL|field|alwaysRequiresKerberosAuth specifier|private name|boolean name|alwaysRequiresKerberosAuth decl_stmt|; comment|/** * Enable caching of Spnego token. */ DECL|field|isSpnegoTokenCachingEnabled specifier|private name|boolean name|isSpnegoTokenCachingEnabled decl_stmt|; comment|/** * Cached SPNEGO token. */ DECL|field|spnegoToken specifier|private name|SpnegoToken name|spnegoToken decl_stmt|; DECL|method|SecureWasbRemoteCallHelper (RetryPolicy retryPolicy, boolean alwaysRequiresKerberosAuth, boolean isSpnegoTokenCachingEnabled) specifier|public name|SecureWasbRemoteCallHelper parameter_list|( name|RetryPolicy name|retryPolicy parameter_list|, name|boolean name|alwaysRequiresKerberosAuth parameter_list|, name|boolean name|isSpnegoTokenCachingEnabled parameter_list|) block|{ name|super argument_list|( name|retryPolicy argument_list|) expr_stmt|; name|this operator|. name|alwaysRequiresKerberosAuth operator|= name|alwaysRequiresKerberosAuth expr_stmt|; name|this operator|. name|isSpnegoTokenCachingEnabled operator|= name|isSpnegoTokenCachingEnabled expr_stmt|; block|} annotation|@ name|Override DECL|method|makeRemoteRequest (final String[] urls, final String path, final List<NameValuePair> queryParams, final String httpMethod) specifier|public name|String name|makeRemoteRequest parameter_list|( specifier|final name|String index|[] name|urls parameter_list|, specifier|final name|String name|path parameter_list|, specifier|final name|List argument_list|< name|NameValuePair argument_list|> name|queryParams parameter_list|, specifier|final name|String name|httpMethod parameter_list|) throws|throws name|IOException block|{ specifier|final name|UserGroupInformation name|ugi init|= name|UserGroupInformation operator|. name|getCurrentUser argument_list|() decl_stmt|; name|UserGroupInformation name|connectUgi init|= name|ugi operator|. name|getRealUser argument_list|() decl_stmt|; if|if condition|( name|connectUgi operator|!= literal|null condition|) block|{ name|queryParams operator|. name|add argument_list|( operator|new name|NameValuePair argument_list|() block|{ annotation|@ name|Override specifier|public name|String name|getName parameter_list|() block|{ return|return name|Constants operator|. name|DOAS_PARAM return|; block|} annotation|@ name|Override specifier|public name|String name|getValue parameter_list|() block|{ return|return name|ugi operator|. name|getShortUserName argument_list|() return|; block|} block|} argument_list|) expr_stmt|; block|} else|else block|{ name|connectUgi operator|= name|ugi expr_stmt|; block|} specifier|final name|Token name|delegationToken init|= name|getDelegationToken argument_list|( name|ugi argument_list|) decl_stmt|; if|if condition|( operator|! name|alwaysRequiresKerberosAuth operator|&& name|delegationToken operator|!= literal|null condition|) block|{ specifier|final name|String name|delegationTokenEncodedUrlString init|= name|delegationToken operator|. name|encodeToUrlString argument_list|() decl_stmt|; name|queryParams operator|. name|add argument_list|( operator|new name|NameValuePair argument_list|() block|{ annotation|@ name|Override specifier|public name|String name|getName parameter_list|() block|{ return|return name|DELEGATION_TOKEN_QUERY_PARAM_NAME return|; block|} annotation|@ name|Override specifier|public name|String name|getValue parameter_list|() block|{ return|return name|delegationTokenEncodedUrlString return|; block|} block|} argument_list|) expr_stmt|; block|} if|if condition|( name|delegationToken operator|== literal|null condition|) block|{ name|connectUgi operator|. name|checkTGTAndReloginFromKeytab argument_list|() expr_stmt|; block|} name|String name|s init|= literal|null decl_stmt|; try|try block|{ name|s operator|= name|connectUgi operator|. name|doAs argument_list|( operator|new name|PrivilegedExceptionAction argument_list|< name|String argument_list|> argument_list|() block|{ annotation|@ name|Override specifier|public name|String name|run parameter_list|() throws|throws name|Exception block|{ return|return name|retryableRequest argument_list|( name|urls argument_list|, name|path argument_list|, name|queryParams argument_list|, name|httpMethod argument_list|) return|; block|} block|} argument_list|) expr_stmt|; block|} catch|catch parameter_list|( name|InterruptedException name|e parameter_list|) block|{ name|Thread operator|. name|currentThread argument_list|() operator|. name|interrupt argument_list|() expr_stmt|; throw|throw operator|new name|IOException argument_list|( name|e operator|. name|getMessage argument_list|() argument_list|, name|e argument_list|) throw|; block|} return|return name|s return|; block|} annotation|@ name|Override DECL|method|getHttpRequest (String[] urls, String path, List<NameValuePair> queryParams, int urlIndex, String httpMethod, boolean requiresNewAuth) specifier|public name|HttpUriRequest name|getHttpRequest parameter_list|( name|String index|[] name|urls parameter_list|, name|String name|path parameter_list|, name|List argument_list|< name|NameValuePair argument_list|> name|queryParams parameter_list|, name|int name|urlIndex parameter_list|, name|String name|httpMethod parameter_list|, name|boolean name|requiresNewAuth parameter_list|) throws|throws name|URISyntaxException throws|, name|IOException block|{ name|URIBuilder name|uriBuilder init|= operator|new name|URIBuilder argument_list|( name|urls index|[ name|urlIndex index|] argument_list|) operator|. name|setPath argument_list|( name|path argument_list|) operator|. name|setParameters argument_list|( name|queryParams argument_list|) decl_stmt|; if|if condition|( name|uriBuilder operator|. name|getHost argument_list|() operator|. name|equals argument_list|( literal|"localhost" argument_list|) condition|) block|{ name|uriBuilder operator|. name|setHost argument_list|( name|InetAddress operator|. name|getLocalHost argument_list|() operator|. name|getCanonicalHostName argument_list|() argument_list|) expr_stmt|; block|} name|HttpUriRequest name|httpUriRequest init|= literal|null decl_stmt|; switch|switch condition|( name|httpMethod condition|) block|{ case|case name|HttpPut operator|. name|METHOD_NAME case|: name|httpUriRequest operator|= operator|new name|HttpPut argument_list|( name|uriBuilder operator|. name|build argument_list|() argument_list|) expr_stmt|; break|break; case|case name|HttpPost operator|. name|METHOD_NAME case|: name|httpUriRequest operator|= operator|new name|HttpPost argument_list|( name|uriBuilder operator|. name|build argument_list|() argument_list|) expr_stmt|; break|break; default|default: name|httpUriRequest operator|= operator|new name|HttpGet argument_list|( name|uriBuilder operator|. name|build argument_list|() argument_list|) expr_stmt|; break|break; block|} name|LOG operator|. name|debug argument_list|( literal|"SecureWasbRemoteCallHelper#getHttpRequest() {}" argument_list|, name|uriBuilder operator|. name|build argument_list|() operator|. name|toURL argument_list|() argument_list|) expr_stmt|; if|if condition|( name|alwaysRequiresKerberosAuth operator||| name|delegationToken operator|== literal|null condition|) block|{ name|AuthenticatedURL operator|. name|Token name|token init|= literal|null decl_stmt|; specifier|final name|Authenticator name|kerberosAuthenticator init|= operator|new name|KerberosDelegationTokenAuthenticator argument_list|() decl_stmt|; try|try block|{ if|if condition|( name|isSpnegoTokenCachingEnabled operator|&& operator|! name|requiresNewAuth operator|&& name|spnegoToken operator|!= literal|null operator|&& name|spnegoToken operator|. name|isTokenValid argument_list|() condition|) block|{ name|token operator|= name|spnegoToken operator|. name|getToken argument_list|() expr_stmt|; block|} else|else block|{ name|token operator|= operator|new name|AuthenticatedURL operator|. name|Token argument_list|() expr_stmt|; name|kerberosAuthenticator operator|. name|authenticate argument_list|( name|uriBuilder operator|. name|build argument_list|() operator|. name|toURL argument_list|() argument_list|, name|token argument_list|) expr_stmt|; name|spnegoToken operator|= operator|new name|SpnegoToken argument_list|( name|token argument_list|) expr_stmt|; block|} block|} catch|catch parameter_list|( name|AuthenticationException name|e parameter_list|) block|{ throw|throw operator|new name|WasbRemoteCallException argument_list|( name|Constants operator|. name|AUTHENTICATION_FAILED_ERROR_MESSAGE argument_list|, name|e argument_list|) throw|; block|} name|Validate operator|. name|isTrue argument_list|( name|token operator|. name|isSet argument_list|() argument_list|, literal|"Authenticated Token is NOT present. The request cannot proceed." argument_list|) expr_stmt|; name|httpUriRequest operator|. name|setHeader argument_list|( literal|"Cookie" argument_list|, name|AuthenticatedURL operator|. name|AUTH_COOKIE operator|+ literal|"=" operator|+ name|token argument_list|) expr_stmt|; block|} return|return name|httpUriRequest return|; block|} DECL|method|getDelegationToken ( UserGroupInformation userGroupInformation) specifier|private name|Token argument_list|< name|? argument_list|> name|getDelegationToken parameter_list|( name|UserGroupInformation name|userGroupInformation parameter_list|) throws|throws name|IOException block|{ if|if condition|( name|this operator|. name|delegationToken operator|== literal|null condition|) block|{ name|Token argument_list|< name|? argument_list|> name|token init|= literal|null decl_stmt|; for|for control|( name|Token name|iterToken range|: name|userGroupInformation operator|. name|getTokens argument_list|() control|) block|{ if|if condition|( name|iterToken operator|. name|getKind argument_list|() operator|. name|equals argument_list|( name|WasbDelegationTokenIdentifier operator|. name|TOKEN_KIND argument_list|) condition|) block|{ name|token operator|= name|iterToken expr_stmt|; name|LOG operator|. name|debug argument_list|( literal|"{} token found in cache : {}" argument_list|, name|WasbDelegationTokenIdentifier operator|. name|TOKEN_KIND argument_list|, name|iterToken argument_list|) expr_stmt|; break|break; block|} block|} name|LOG operator|. name|debug argument_list|( literal|"UGI Information: {}" argument_list|, name|userGroupInformation operator|. name|toString argument_list|() argument_list|) expr_stmt|; comment|// ugi tokens are usually indicative of a task which can't comment|// refetch tokens. even if ugi has credentials, don't attempt comment|// to get another token to match hdfs/rpc behavior if|if condition|( name|token operator|!= literal|null condition|) block|{ name|LOG operator|. name|debug argument_list|( literal|"Using UGI token: {}" argument_list|, name|token argument_list|) expr_stmt|; name|setDelegationToken argument_list|( name|token argument_list|) expr_stmt|; block|} block|} if|if condition|( name|LOG operator|. name|isDebugEnabled argument_list|() condition|) block|{ name|LOG operator|. name|debug argument_list|( literal|"Delegation token from cache - {}" argument_list|, name|delegationToken operator|!= literal|null condition|? name|delegationToken operator|. name|encodeToUrlString argument_list|() else|: literal|"null" argument_list|) expr_stmt|; block|} return|return name|this operator|. name|delegationToken return|; block|} DECL|method|setDelegationToken ( final Token<T> token) specifier|private parameter_list|< name|T extends|extends name|TokenIdentifier parameter_list|> name|void name|setDelegationToken parameter_list|( specifier|final name|Token argument_list|< name|T argument_list|> name|token parameter_list|) block|{ synchronized|synchronized init|( name|this init|) block|{ name|this operator|. name|delegationToken operator|= name|token expr_stmt|; block|} block|} block|} end_class end_unit
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigureprocessor; import java.io.OutputStream; /** * Test configuration with an annotated method. * * @author Madhura Bhave */ @TestConfiguration public class TestMethodConfiguration { @TestConditionalOnClass(name = "java.io.InputStream", value = OutputStream.class) public Object method() { return null; } }
package itesm.mx.expediciones_biosfera.behavior.activities; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.Handler; import android.support.design.widget.NavigationView; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBar; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.support.v7.widget.Toolbar; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.bumptech.glide.Glide; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseUser; import java.io.ByteArrayOutputStream; import java.util.ArrayList; import itesm.mx.expediciones_biosfera.R; import itesm.mx.expediciones_biosfera.behavior.fragments.ReservationsListFragment; import itesm.mx.expediciones_biosfera.behavior.fragments.PackagesFragment; import itesm.mx.expediciones_biosfera.behavior.fragments.ProfileFragment; import itesm.mx.expediciones_biosfera.database.operations.User; import itesm.mx.expediciones_biosfera.database.operations.UserOperations; import itesm.mx.expediciones_biosfera.utilities.CircleTransform; public class DrawerActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener, ProfileFragment.OnSaveProfileListener { private Toolbar toolbar; private FirebaseAuth firebaseAuth; private FirebaseUser currentUser; private DrawerLayout drawer; private ActionBarDrawerToggle toggle; private boolean doubleBackToExitPressedOnce = false; ImageView ivPicture; TextView tvName; TextView tvMail; UserOperations dao; public void setImage(){ dao = new UserOperations(this); dao.open(); ArrayList<User> users = dao.getAllUsers(); dao.close(); getFirebaseUser(); String firebaseId = currentUser.getUid(); for(int i = 0; i < users.size(); i++){ if(users.get(i).getFbid().equals(firebaseId)){ byte[] bytes = users.get(i).getPicture(); Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length); ByteArrayOutputStream stream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream); Glide.with(this) .load(stream.toByteArray()) .asBitmap() .transform(new CircleTransform(this)) .into(ivPicture); } } } public void setToolbar() { toolbar = findViewById(R.id.toolbar); setSupportActionBar(toolbar); } public void setDrawerLayout() { drawer = findViewById(R.id.drawer_layout); toggle = new ActionBarDrawerToggle(this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close) { public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); invalidateOptionsMenu(); } public void onDrawerClosed(View view) { super.onDrawerClosed(view); invalidateOptionsMenu(); } }; toggle.setDrawerIndicatorEnabled(true); drawer.addDrawerListener(toggle); toggle.syncState(); } public void configureNavigationView() { NavigationView navigationView = findViewById(R.id.nav_view); navigationView.setNavigationItemSelectedListener(this); navigationView.setCheckedItem(R.id.nav_packages); View headerView = navigationView.getHeaderView(0); tvName = headerView.findViewById(R.id.text_user_name); tvMail = headerView.findViewById(R.id.text_email); ivPicture = headerView.findViewById(R.id.image_profile); tvName.setText(currentUser.getDisplayName()); tvMail.setText(currentUser.getEmail()); setImage(); Menu menu = navigationView.getMenu(); for (int i = 0; i < menu.size(); i++) { MenuItem item = menu.getItem(i); if (item.isChecked()) { ActionBar supportActionBar = getSupportActionBar(); if(supportActionBar != null) { supportActionBar.setTitle(item.getTitle()); } } } } public void getFirebaseUser() { firebaseAuth = FirebaseAuth.getInstance(); currentUser = firebaseAuth.getCurrentUser(); } protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_drawer); getFirebaseUser(); setToolbar(); setDrawerLayout(); configureNavigationView(); PackagesFragment packagesFragment = new PackagesFragment(); getSupportFragmentManager().beginTransaction().add(R.id.content_frame, packagesFragment).commit(); } public void signOut() { firebaseAuth.signOut(); Toast.makeText(this, getResources().getString(R.string.sign_out_success), Toast.LENGTH_LONG).show(); Intent intent = new Intent(this, AuthenticationActivity.class); startActivity(intent); } @Override public boolean onOptionsItemSelected(MenuItem item) { return toggle != null && toggle.onOptionsItemSelected(item) || super.onOptionsItemSelected(item); } @SuppressWarnings("StatementWithEmptyBody") @Override public boolean onNavigationItemSelected(MenuItem item) { int id = item.getItemId(); Fragment fragment = null; if (id == R.id.nav_packages) { fragment = new PackagesFragment(); } else if (id == R.id.nav_profile) { fragment = new ProfileFragment(); } else if (id == R.id.nav_signout) { signOut(); } else if (id == R.id.nav_reservations){ fragment = new ReservationsListFragment(); } getSupportFragmentManager().popBackStack(null, FragmentManager.POP_BACK_STACK_INCLUSIVE); if (fragment != null) { getSupportFragmentManager().beginTransaction().replace(R.id.content_frame, fragment).commit(); item.setChecked(true); ActionBar supportActionBar = getSupportActionBar(); if(supportActionBar != null) { supportActionBar.setTitle(item.getTitle()); } } drawer.closeDrawer(GravityCompat.START); return true; } @Override public void onBackPressed() { if (drawer.isDrawerOpen(GravityCompat.START)) { drawer.closeDrawer(GravityCompat.START); } else { if (doubleBackToExitPressedOnce) { NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view); MenuItem item = navigationView.getMenu().getItem(3); onNavigationItemSelected(item); return; } doubleBackToExitPressedOnce = true; Toast.makeText(this, R.string.double_click_back, Toast.LENGTH_SHORT).show(); new Handler().postDelayed(new Runnable() { @Override public void run() { doubleBackToExitPressedOnce = false; } }, 2000); } } public void closeProfile() { setImage(); NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view); MenuItem item = navigationView.getMenu().getItem(0); onNavigationItemSelected(item); } }
package com.example.goranminov.popmovies.syncMovie; import android.content.Context; import android.content.Intent; import android.support.annotation.NonNull; /** * Created by goranminov on 23/04/2017. */ public class MovieSyncUtils { /** * Helper method to perform a sync immediately using an IntentService for asynchronous * execution. * * @param context The Context used to start the IntentService for the sync. */ public static void startImmediateSync(@NonNull final Context context) { Intent intent = new Intent(context, MovieSyncIntentService.class); context.startService(intent); } }
/* * JBoss, Home of Professional Open Source * Copyright 2013, Red Hat, Inc., and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.cdi.tck.interceptors.tests.contract.aroundConstruct; import jakarta.enterprise.context.Dependent; @Dependent public class BravoParameter { public final String value; public BravoParameter(String value) { this.value = value; } public String getValue() { return value; } }
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package leap.core.security; import leap.core.AppContext; import leap.core.security.crypto.PasswordEncoder; import leap.lang.Args; public class SEC { public static UserPrincipal user(){ return SecurityContext.user(); } public static String encodePassword(String plainPassword){ Args.notNull(plainPassword,"plain password"); return getPasswordEncoder().encode(plainPassword); } public static boolean matchPassword(String plainPassword,String encodedPassword){ return getPasswordEncoder().matches(plainPassword, encodedPassword); } public static PasswordEncoder getPasswordEncoder(){ return AppContext.factory().getBean(PasswordEncoder.class); } protected SEC(){ } }
package cat.xarxarepublicana.hashtagsxrep.infrastructure.repository.jdbc.mapper; import cat.xarxarepublicana.hashtagsxrep.domain.extraction.TwitterExtraction; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; @Mapper public interface TwitterExtractionMapper { void insert(@Param("extraction") TwitterExtraction twitterExtraction, @Param("ranked") Boolean ranked); void deleteDataByMonitorId(@Param("monitorId") String monitorId); }
/* * Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cloudfront.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * A list of Amazon Web Services accounts and the active CloudFront key pairs in each account that CloudFront can use to * verify the signatures of signed URLs and signed cookies. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cloudfront-2020-05-31/ActiveTrustedSigners" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ActiveTrustedSigners implements Serializable, Cloneable { /** * <p> * This field is <code>true</code> if any of the Amazon Web Services accounts in the list have active CloudFront key * pairs that CloudFront can use to verify the signatures of signed URLs and signed cookies. If not, this field is * <code>false</code>. * </p> */ private Boolean enabled; /** * <p> * The number of Amazon Web Services accounts in the list. * </p> */ private Integer quantity; /** * <p> * A list of Amazon Web Services accounts and the identifiers of active CloudFront key pairs in each account that * CloudFront can use to verify the signatures of signed URLs and signed cookies. * </p> */ private com.amazonaws.internal.SdkInternalList<Signer> items; /** * Default constructor for ActiveTrustedSigners object. Callers should use the setter or fluent setter (with...) * methods to initialize the object after creating it. */ public ActiveTrustedSigners() { } /** * Constructs a new ActiveTrustedSigners object. Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param items * A list of Amazon Web Services accounts and the identifiers of active CloudFront key pairs in each account * that CloudFront can use to verify the signatures of signed URLs and signed cookies. */ public ActiveTrustedSigners(java.util.List<Signer> items) { setItems(items); } /** * <p> * This field is <code>true</code> if any of the Amazon Web Services accounts in the list have active CloudFront key * pairs that CloudFront can use to verify the signatures of signed URLs and signed cookies. If not, this field is * <code>false</code>. * </p> * * @param enabled * This field is <code>true</code> if any of the Amazon Web Services accounts in the list have active * CloudFront key pairs that CloudFront can use to verify the signatures of signed URLs and signed cookies. * If not, this field is <code>false</code>. */ public void setEnabled(Boolean enabled) { this.enabled = enabled; } /** * <p> * This field is <code>true</code> if any of the Amazon Web Services accounts in the list have active CloudFront key * pairs that CloudFront can use to verify the signatures of signed URLs and signed cookies. If not, this field is * <code>false</code>. * </p> * * @return This field is <code>true</code> if any of the Amazon Web Services accounts in the list have active * CloudFront key pairs that CloudFront can use to verify the signatures of signed URLs and signed cookies. * If not, this field is <code>false</code>. */ public Boolean getEnabled() { return this.enabled; } /** * <p> * This field is <code>true</code> if any of the Amazon Web Services accounts in the list have active CloudFront key * pairs that CloudFront can use to verify the signatures of signed URLs and signed cookies. If not, this field is * <code>false</code>. * </p> * * @param enabled * This field is <code>true</code> if any of the Amazon Web Services accounts in the list have active * CloudFront key pairs that CloudFront can use to verify the signatures of signed URLs and signed cookies. * If not, this field is <code>false</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public ActiveTrustedSigners withEnabled(Boolean enabled) { setEnabled(enabled); return this; } /** * <p> * This field is <code>true</code> if any of the Amazon Web Services accounts in the list have active CloudFront key * pairs that CloudFront can use to verify the signatures of signed URLs and signed cookies. If not, this field is * <code>false</code>. * </p> * * @return This field is <code>true</code> if any of the Amazon Web Services accounts in the list have active * CloudFront key pairs that CloudFront can use to verify the signatures of signed URLs and signed cookies. * If not, this field is <code>false</code>. */ public Boolean isEnabled() { return this.enabled; } /** * <p> * The number of Amazon Web Services accounts in the list. * </p> * * @param quantity * The number of Amazon Web Services accounts in the list. */ public void setQuantity(Integer quantity) { this.quantity = quantity; } /** * <p> * The number of Amazon Web Services accounts in the list. * </p> * * @return The number of Amazon Web Services accounts in the list. */ public Integer getQuantity() { return this.quantity; } /** * <p> * The number of Amazon Web Services accounts in the list. * </p> * * @param quantity * The number of Amazon Web Services accounts in the list. * @return Returns a reference to this object so that method calls can be chained together. */ public ActiveTrustedSigners withQuantity(Integer quantity) { setQuantity(quantity); return this; } /** * <p> * A list of Amazon Web Services accounts and the identifiers of active CloudFront key pairs in each account that * CloudFront can use to verify the signatures of signed URLs and signed cookies. * </p> * * @return A list of Amazon Web Services accounts and the identifiers of active CloudFront key pairs in each account * that CloudFront can use to verify the signatures of signed URLs and signed cookies. */ public java.util.List<Signer> getItems() { if (items == null) { items = new com.amazonaws.internal.SdkInternalList<Signer>(); } return items; } /** * <p> * A list of Amazon Web Services accounts and the identifiers of active CloudFront key pairs in each account that * CloudFront can use to verify the signatures of signed URLs and signed cookies. * </p> * * @param items * A list of Amazon Web Services accounts and the identifiers of active CloudFront key pairs in each account * that CloudFront can use to verify the signatures of signed URLs and signed cookies. */ public void setItems(java.util.Collection<Signer> items) { if (items == null) { this.items = null; return; } this.items = new com.amazonaws.internal.SdkInternalList<Signer>(items); } /** * <p> * A list of Amazon Web Services accounts and the identifiers of active CloudFront key pairs in each account that * CloudFront can use to verify the signatures of signed URLs and signed cookies. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setItems(java.util.Collection)} or {@link #withItems(java.util.Collection)} if you want to override the * existing values. * </p> * * @param items * A list of Amazon Web Services accounts and the identifiers of active CloudFront key pairs in each account * that CloudFront can use to verify the signatures of signed URLs and signed cookies. * @return Returns a reference to this object so that method calls can be chained together. */ public ActiveTrustedSigners withItems(Signer... items) { if (this.items == null) { setItems(new com.amazonaws.internal.SdkInternalList<Signer>(items.length)); } for (Signer ele : items) { this.items.add(ele); } return this; } /** * <p> * A list of Amazon Web Services accounts and the identifiers of active CloudFront key pairs in each account that * CloudFront can use to verify the signatures of signed URLs and signed cookies. * </p> * * @param items * A list of Amazon Web Services accounts and the identifiers of active CloudFront key pairs in each account * that CloudFront can use to verify the signatures of signed URLs and signed cookies. * @return Returns a reference to this object so that method calls can be chained together. */ public ActiveTrustedSigners withItems(java.util.Collection<Signer> items) { setItems(items); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEnabled() != null) sb.append("Enabled: ").append(getEnabled()).append(","); if (getQuantity() != null) sb.append("Quantity: ").append(getQuantity()).append(","); if (getItems() != null) sb.append("Items: ").append(getItems()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ActiveTrustedSigners == false) return false; ActiveTrustedSigners other = (ActiveTrustedSigners) obj; if (other.getEnabled() == null ^ this.getEnabled() == null) return false; if (other.getEnabled() != null && other.getEnabled().equals(this.getEnabled()) == false) return false; if (other.getQuantity() == null ^ this.getQuantity() == null) return false; if (other.getQuantity() != null && other.getQuantity().equals(this.getQuantity()) == false) return false; if (other.getItems() == null ^ this.getItems() == null) return false; if (other.getItems() != null && other.getItems().equals(this.getItems()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEnabled() == null) ? 0 : getEnabled().hashCode()); hashCode = prime * hashCode + ((getQuantity() == null) ? 0 : getQuantity().hashCode()); hashCode = prime * hashCode + ((getItems() == null) ? 0 : getItems().hashCode()); return hashCode; } @Override public ActiveTrustedSigners clone() { try { return (ActiveTrustedSigners) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package com.raed.drawingview.brushes.androidpathbrushes; import android.graphics.PorterDuff; import android.graphics.PorterDuffXfermode; import android.util.Log; public class Eraser extends PathBrush { private static final String TAG = "Eraser"; public Eraser(int minSizePx, int maxSizePx) { super(minSizePx, maxSizePx); mPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR)); mPaint.setColor(-1); } @Override public void setColor(int color) { Log.w(TAG,"Eraser does not has a color"); //Erasers do not have a color } }
package com.vitor.fp; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; class Example04Test { @Test public void testHigherOrderFunctionResult6(){ Integer value = 2; Integer expected = 6; IFunction<Integer, Integer> addFirst = x -> x + value ; IFunction<Integer, Integer> addSecond = x -> x + value; IFunction<Integer, Integer> f = Example04.compose.apply(addFirst).apply(addSecond); Integer actual = f.apply(value); assertEquals(expected, actual, "Should return 6"); } }
package org.opensrp.register.service.handler; import static org.mockito.Mockito.inOrder; import static org.mockito.MockitoAnnotations.initMocks; import java.util.List; import org.joda.time.LocalDate; import org.json.JSONArray; import org.json.JSONObject; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InOrder; import org.mockito.Mock; import org.opensrp.domain.Event; import org.opensrp.register.service.handler.BaseScheduleHandler.ActionType; import org.opensrp.register.service.scheduling.AnteNatalCareSchedulesService; import org.opensrp.scheduler.HealthSchedulerService; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @PowerMockIgnore({ "org.apache.log4j.*", "org.apache.commons.logging.*" }) public class ANCScheduleHandlerTest extends TestResourceLoader { @Mock private AnteNatalCareSchedulesService anteNatalCareSchedulesService; private ANCScheduleHandler aNCScheduleHandler; @Mock private HealthSchedulerService scheduler; private static final String JSON_KEY_TYPES = "types"; private static final String JSON_KEY_EVENTS = "events"; @Before public void setUp() throws Exception { initMocks(this); aNCScheduleHandler = new ANCScheduleHandler(anteNatalCareSchedulesService); } @Test public void shouldTestANCScheduleHandler() throws Exception { Event event = geteventOfVaccination(); JSONArray schedulesJsonObject = new JSONArray("[" + getFile() + "]"); String scheduleName = null; for (int i = 0; i < schedulesJsonObject.length(); i++) { JSONObject scheduleJsonObject = schedulesJsonObject.getJSONObject(i); JSONArray eventsJsonArray = scheduleJsonObject.getJSONArray(JSON_KEY_EVENTS); for (int j = 0; j < eventsJsonArray.length(); j++) { JSONObject scheduleConfigEvent = eventsJsonArray.getJSONObject(j); JSONArray eventTypesJsonArray = scheduleConfigEvent.getJSONArray(JSON_KEY_TYPES); List<String> eventsList = jsonArrayToList(eventTypesJsonArray); if (eventsList.contains(event.getEventType())) { String action = aNCScheduleHandler.getAction(scheduleConfigEvent); String milestone = aNCScheduleHandler.getMilestone(scheduleConfigEvent); LocalDate date = LocalDate.parse("2016-07-10"); if (milestone.equalsIgnoreCase("opv2") && action.equalsIgnoreCase(ActionType.enroll.toString())) { aNCScheduleHandler.handle(event,scheduleConfigEvent, scheduleName); InOrder inOrder = inOrder(anteNatalCareSchedulesService); inOrder.verify(anteNatalCareSchedulesService).enrollMother(event.getBaseEntityId(),"Ante Natal Care Reminder Visit", LocalDate.parse("2016-07-10"), event.getId()); } else if (milestone.equalsIgnoreCase("opv2") && action.equalsIgnoreCase(ActionType.fulfill.toString())) { aNCScheduleHandler.handle(event,scheduleConfigEvent, scheduleName); InOrder inOrder = inOrder(anteNatalCareSchedulesService); inOrder.verify(anteNatalCareSchedulesService).fullfillMilestone(event.getBaseEntityId(), event.getProviderId(), "Ante Natal Care Reminder Visit", date, event.getId()); } else { } } } } } }
package ru.job4j.tracker.actions; import ru.job4j.tracker.StartUI; import ru.job4j.tracker.Tracker; import ru.job4j.tracker.input.Input; public class ExitProgram extends BaseAction { public ExitProgram(int key, String info) { super(key, info); } @Override public void execute(Input input, Tracker tracker) { StartUI.exit = true; System.out.println("------------ Good bye! --------------"); } }
/* * Copyright 2014-2020 Sayi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.deepoove.poi.config; /** * 默认模板语法 * * @author Sayi */ public enum GramerSymbol { /** * 图片 */ IMAGE('@'), /** * 文本 */ TEXT('\0'), /** * 表格 */ TABLE('#'), /** * 列表 */ NUMBERIC('*'), /** * 引用 */ DOCX_TEMPLATE('+'), /** * 循环(if & for each)语法块起始 */ ITERABLE_START('?'), /** * 语法块结束 */ BLOCK_END('/'); private char symbol; private GramerSymbol(char symbol) { this.symbol = symbol; } public char getSymbol() { return this.symbol; } @Override public String toString() { return String.valueOf(this.symbol); } }
/* * Copyright 2020 Anton Tananaev (anton ) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hazelcast.protocol; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelHandlerContext; import org.hazelcast.BaseFrameDecoder; import org.hazelcast.helper.DataConverter; import java.nio.charset.StandardCharsets; public class FutureWayFrameDecoder extends BaseFrameDecoder { @Override protected Object decode( ChannelHandlerContext ctx, Channel channel, ByteBuf buf) throws Exception { if (buf.readableBytes() < 10) { return null; } int length = Unpooled.wrappedBuffer(DataConverter.parseHex( buf.getCharSequence(buf.readerIndex() + 2, 8, StandardCharsets.US_ASCII).toString())).readInt() + 17; if (buf.readableBytes() >= length) { return buf.readRetainedSlice(length); } return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.common.serialize; import java.io.IOException; /** * Basic data type input interface. */ public interface DataInput { /** * Read boolean. * * @return boolean. * @throws IOException */ boolean readBool() throws IOException; /** * Read byte. * * @return byte value. * @throws IOException */ byte readByte() throws IOException; /** * Read short integer. * * @return short. * @throws IOException */ short readShort() throws IOException; /** * Read integer. * * @return integer. * @throws IOException */ int readInt() throws IOException; /** * Read long. * * @return long. * @throws IOException */ long readLong() throws IOException; /** * Read float. * * @return float. * @throws IOException */ float readFloat() throws IOException; /** * Read double. * * @return double. * @throws IOException */ double readDouble() throws IOException; /** * Read UTF-8 string. * * @return string. * @throws IOException */ String readUTF() throws IOException; /** * Read byte array. * * @return byte array. * @throws IOException */ byte[] readBytes() throws IOException; }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti5.engine.impl.jobexecutor; import org.activiti.engine.runtime.Job; import org.activiti5.engine.impl.interceptor.CommandContext; import org.activiti5.engine.impl.persistence.entity.ExecutionEntity; import org.activiti5.engine.impl.pvm.runtime.AtomicOperation; /** * * @author Daniel Meyer */ public class AsyncContinuationJobHandler implements JobHandler { public final static String TYPE = "async-continuation"; public String getType() { return TYPE; } public void execute(Job job, String configuration, ExecutionEntity execution, CommandContext commandContext) { // ATM only AtomicOperationTransitionCreateScope can be performed asynchronously AtomicOperation atomicOperation = AtomicOperation.TRANSITION_CREATE_SCOPE; commandContext.performOperation(atomicOperation, execution); } }
package syntheagecco.openehr.sdk.model.generated.geccoserologischerbefundcomposition.definition; import java.time.temporal.TemporalAmount; import javax.annotation.processing.Generated; import org.ehrbase.client.annotations.Entity; import org.ehrbase.client.annotations.OptionFor; import org.ehrbase.client.annotations.Path; import org.ehrbase.client.classgenerator.interfaces.RMEntity; @Entity @Generated( value = "org.ehrbase.client.classgenerator.ClassGenerator", date = "2021-09-01T01:48:23.003818400+02:00", comments = "https://github.com/ehrbase/openEHR_SDK Version: 1.5.0" ) @OptionFor("DV_DURATION") public class ProAnalytQuantitativesErgebnisDvDuration implements RMEntity, ProAnalytQuantitativesErgebnisChoice { /** * Path: GECCO_Serologischer Befund/Befund/Jedes Ereignis/Labortest-Panel/Pro Analyt/Quantitatives Ergebnis/Quantitatives Ergebnis * Description: (Mess-)Wert des Analyt-Resultats. */ @Path("|value") private TemporalAmount quantitativesErgebnisValue; public void setQuantitativesErgebnisValue(TemporalAmount quantitativesErgebnisValue) { this.quantitativesErgebnisValue = quantitativesErgebnisValue; } public TemporalAmount getQuantitativesErgebnisValue() { return this.quantitativesErgebnisValue ; } }
package com.callforcode.greenfarm.consts; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; @Component public class GreenFarmSwitch { private static boolean sessionAuthentication; @Value("${green-farm.session.authentication.enable}") private void setSessionAuthentication(boolean sessionSwitch) { sessionAuthentication = sessionSwitch; } public static boolean isSessionAuthenticationOn() { return sessionAuthentication; } }
/** * This file is a component of Quartz Powered, this license makes sure any work * associated with Quartz Powered, must follow the conditions of the license included. * * The MIT License (MIT) * * Copyright (c) 2015 Quartz Powered * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.quartzpowered.protocol.codec.v1_8_R1.play.client; import org.quartzpowered.network.buffer.Buffer; import org.quartzpowered.network.protocol.codec.Codec; import org.quartzpowered.protocol.data.BlockPosition; import org.quartzpowered.protocol.packet.play.client.ExplosionPacket; import java.util.List; public class ExplosionCodec implements Codec<ExplosionPacket> { @Override public void encode(Buffer buffer, ExplosionPacket packet) { buffer.writeFloat((float) packet.getX()); buffer.writeFloat((float) packet.getY()); buffer.writeFloat((float) packet.getZ()); buffer.writeFloat((float) packet.getRadius()); List<BlockPosition> relativeBlockChanges = packet.getRelativeBlockChanges(); buffer.writeInt(relativeBlockChanges.size()); for (BlockPosition pos : relativeBlockChanges) { buffer.writeByte(pos.getX()); buffer.writeByte(pos.getY()); buffer.writeByte(pos.getZ()); } buffer.writeFloat((float) packet.getPlayerMotionX()); buffer.writeFloat((float) packet.getPlayerMotionY()); buffer.writeFloat((float) packet.getPlayerMotionZ()); } @Override public void decode(Buffer buffer, ExplosionPacket packet) { packet.setX(buffer.readFloat()); packet.setY(buffer.readFloat()); packet.setZ(buffer.readFloat()); packet.setRadius(buffer.readFloat()); List<BlockPosition> relativeBlockChanges = packet.getRelativeBlockChanges(); int count = buffer.readInt(); for (int i = 0; i < count; i++) { relativeBlockChanges.add(new BlockPosition( buffer.readByte(), buffer.readByte(), buffer.readByte() )); } packet.setPlayerMotionX(buffer.readFloat()); packet.setPlayerMotionY(buffer.readFloat()); packet.setPlayerMotionZ(buffer.readFloat()); } }
/* * Copyright 1999-2011 Alibaba Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.dubbo.monitor.support; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.ReentrantLock; import com.alibaba.dubbo.common.Constants; import com.alibaba.dubbo.common.URL; import com.alibaba.dubbo.monitor.Monitor; import com.alibaba.dubbo.monitor.MonitorFactory; import com.alibaba.dubbo.monitor.MonitorService; /** * AbstractMonitorFactroy. (SPI, Singleton, ThreadSafe) * * @author william.liangf */ public abstract class AbstractMonitorFactory implements MonitorFactory { // 注册中心获取过程锁 private static final ReentrantLock LOCK = new ReentrantLock(); // 注册中心集合 Map<RegistryAddress, Registry> private static final Map<String, Monitor> MONITORS = new ConcurrentHashMap<String, Monitor>(); public static Collection<Monitor> getMonitors() { return Collections.unmodifiableCollection(MONITORS.values()); } public Monitor getMonitor(URL url) { url = url.setPath(MonitorService.class.getName()).addParameter(Constants.INTERFACE_KEY, MonitorService.class.getName()); String key = url.toServiceString(); LOCK.lock(); try { Monitor monitor = MONITORS.get(key); if (monitor != null) { return monitor; } monitor = createMonitor(url); if (monitor == null) { throw new IllegalStateException("Can not create monitor " + url); } MONITORS.put(key, monitor); return monitor; } finally { // 释放锁 LOCK.unlock(); } } protected abstract Monitor createMonitor(URL url); }
package com.sequenceiq.cloudbreak.repository; import java.util.List; import javax.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.sequenceiq.cloudbreak.api.model.DetailedStackStatus; import com.sequenceiq.cloudbreak.api.model.Status; import com.sequenceiq.cloudbreak.cloud.store.InMemoryStateStore; import com.sequenceiq.cloudbreak.converter.scheduler.StatusToPollGroupConverter; import com.sequenceiq.cloudbreak.domain.Resource; import com.sequenceiq.cloudbreak.domain.SecurityConfig; import com.sequenceiq.cloudbreak.domain.Stack; import com.sequenceiq.cloudbreak.domain.StackStatus; @Component public class StackUpdater { private static final Logger LOGGER = LoggerFactory.getLogger(StackUpdater.class); @Inject private StackStatusRepository stackStatusRepository; @Inject private StackRepository stackRepository; @Inject private ResourceRepository resourceRepository; @Inject private StatusToPollGroupConverter statusToPollGroupConverter; @Inject private SecurityConfigRepository securityConfigRepository; public Stack updateStackStatus(Long stackId, DetailedStackStatus detailedStatus) { return doUpdateStackStatus(stackId, detailedStatus, ""); } public Stack updateStackStatus(Long stackId, DetailedStackStatus detailedStatus, String statusReason) { return doUpdateStackStatus(stackId, detailedStatus, statusReason); } public Stack addStackResources(Long stackId, List<Resource> resources) { Stack stack = stackRepository.findById(stackId); for (Resource resource : resources) { resource.setStack(stack); } resourceRepository.save(resources); stack.getResources().addAll(resources); return stackRepository.save(stack); } public void removeStackResources(List<Resource> resources) { resourceRepository.delete(resources); } public Stack updateStackSecurityConfig(Stack stack, SecurityConfig securityConfig) { securityConfig = securityConfigRepository.save(securityConfig); stack.setSecurityConfig(securityConfig); return stackRepository.save(stack); } private Stack doUpdateStackStatus(Long stackId, DetailedStackStatus detailedStatus, String statusReason) { Stack stack = stackRepository.findById(stackId); Status status = detailedStatus.getStatus(); if (!stack.isDeleteCompleted()) { stack.setStackStatus(new StackStatus(stack, status, statusReason, detailedStatus)); InMemoryStateStore.putStack(stackId, statusToPollGroupConverter.convert(status)); if (Status.DELETE_COMPLETED.equals(status)) { InMemoryStateStore.deleteStack(stackId); } stack = stackRepository.save(stack); } return stack; } }
/* * Copyright © 2013-2019 camunda services GmbH and various authors (info@camunda.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest.dto.runtime; import static java.lang.Boolean.TRUE; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response.Status; import org.camunda.bpm.engine.ProcessEngine; import org.camunda.bpm.engine.rest.dto.AbstractQueryDto; import org.camunda.bpm.engine.rest.dto.CamundaQueryParam; import org.camunda.bpm.engine.rest.dto.VariableQueryParameterDto; import org.camunda.bpm.engine.rest.dto.converter.BooleanConverter; import org.camunda.bpm.engine.rest.dto.converter.StringListConverter; import org.camunda.bpm.engine.rest.dto.converter.StringSetConverter; import org.camunda.bpm.engine.rest.dto.converter.VariableListConverter; import org.camunda.bpm.engine.rest.exception.InvalidRequestException; import org.camunda.bpm.engine.runtime.ProcessInstanceQuery; import com.fasterxml.jackson.databind.ObjectMapper; public class ProcessInstanceQueryDto extends AbstractQueryDto<ProcessInstanceQuery> { private static final String SORT_BY_INSTANCE_ID_VALUE = "instanceId"; private static final String SORT_BY_DEFINITION_KEY_VALUE = "definitionKey"; private static final String SORT_BY_DEFINITION_ID_VALUE = "definitionId"; private static final String SORT_BY_TENANT_ID = "tenantId"; private static final String SORT_BY_BUSINESS_KEY = "businessKey"; private static final List<String> VALID_SORT_BY_VALUES; static { VALID_SORT_BY_VALUES = new ArrayList<String>(); VALID_SORT_BY_VALUES.add(SORT_BY_INSTANCE_ID_VALUE); VALID_SORT_BY_VALUES.add(SORT_BY_DEFINITION_KEY_VALUE); VALID_SORT_BY_VALUES.add(SORT_BY_DEFINITION_ID_VALUE); VALID_SORT_BY_VALUES.add(SORT_BY_TENANT_ID); VALID_SORT_BY_VALUES.add(SORT_BY_BUSINESS_KEY); } private String deploymentId; private String processDefinitionKey; private String businessKey; private String businessKeyLike; private String caseInstanceId; private String processDefinitionId; private String superProcessInstance; private String subProcessInstance; private String superCaseInstance; private String subCaseInstance; private Boolean active; private Boolean suspended; private Set<String> processInstanceIds; private String incidentId; private String incidentType; private String incidentMessage; private String incidentMessageLike; private List<String> tenantIds; private Boolean withoutTenantId; private List<String> activityIds; private Boolean rootProcessInstances; private Boolean isProcessDefinitionWithoutTenantId; private List<VariableQueryParameterDto> variables; public ProcessInstanceQueryDto() { } public ProcessInstanceQueryDto(ObjectMapper objectMapper, MultivaluedMap<String, String> queryParameters) { super(objectMapper, queryParameters); } public Set<String> getProcessInstanceIds() { return processInstanceIds; } @CamundaQueryParam(value = "processInstanceIds", converter = StringSetConverter.class) public void setProcessInstanceIds(Set<String> processInstanceIds) { this.processInstanceIds = processInstanceIds; } public String getDeploymentId() { return deploymentId; } @CamundaQueryParam("deploymentId") public void setDeploymentId(String deploymentId) { this.deploymentId = deploymentId; } public String getProcessDefinitionKey() { return processDefinitionKey; } @CamundaQueryParam("processDefinitionKey") public void setProcessDefinitionKey(String processDefinitionKey) { this.processDefinitionKey = processDefinitionKey; } public String getBusinessKey() { return businessKey; } @CamundaQueryParam("businessKey") public void setBusinessKey(String businessKey) { this.businessKey = businessKey; } public String getBusinessKeyLike() { return businessKeyLike; } @CamundaQueryParam("businessKeyLike") public void setBusinessKeyLike(String businessKeyLike) { this.businessKeyLike = businessKeyLike; } public String getCaseInstanceId() { return caseInstanceId; } @CamundaQueryParam("caseInstanceId") public void setCaseInstanceId(String caseInstanceId) { this.caseInstanceId = caseInstanceId; } public String getProcessDefinitionId() { return processDefinitionId; } @CamundaQueryParam("processDefinitionId") public void setProcessDefinitionId(String processDefinitionId) { this.processDefinitionId = processDefinitionId; } public String getSuperProcessInstance() { return superProcessInstance; } @CamundaQueryParam("superProcessInstance") public void setSuperProcessInstance(String superProcessInstance) { this.superProcessInstance = superProcessInstance; } public String getSubProcessInstance() { return subProcessInstance; } @CamundaQueryParam("subProcessInstance") public void setSubProcessInstance(String subProcessInstance) { this.subProcessInstance = subProcessInstance; } public String getSuperCaseInstance() { return superCaseInstance; } @CamundaQueryParam("superCaseInstance") public void setSuperCaseInstance(String superCaseInstance) { this.superCaseInstance = superCaseInstance; } public String getSubCaseInstance() { return subCaseInstance; } @CamundaQueryParam("subCaseInstance") public void setSubCaseInstance(String subCaseInstance) { this.subCaseInstance = subCaseInstance; } public Boolean isActive() { return active; } @CamundaQueryParam(value = "active", converter = BooleanConverter.class) public void setActive(Boolean active) { this.active = active; } public Boolean isSuspended() { return suspended; } @CamundaQueryParam(value = "suspended", converter = BooleanConverter.class) public void setSuspended(Boolean suspended) { this.suspended = suspended; } public List<VariableQueryParameterDto> getVariables() { return variables; } @CamundaQueryParam(value = "variables", converter = VariableListConverter.class) public void setVariables(List<VariableQueryParameterDto> variables) { this.variables = variables; } public String getIncidentId() { return incidentId; } @CamundaQueryParam(value = "incidentId") public void setIncidentId(String incidentId) { this.incidentId = incidentId; } public String getIncidentType() { return incidentType; } @CamundaQueryParam(value = "incidentType") public void setIncidentType(String incidentType) { this.incidentType = incidentType; } public String getIncidentMessage() { return incidentMessage; } @CamundaQueryParam(value = "incidentMessage") public void setIncidentMessage(String incidentMessage) { this.incidentMessage = incidentMessage; } public String getIncidentMessageLike() { return incidentMessageLike; } @CamundaQueryParam(value = "incidentMessageLike") public void setIncidentMessageLike(String incidentMessageLike) { this.incidentMessageLike = incidentMessageLike; } public List<String> getTenantIdIn() { return tenantIds; } @CamundaQueryParam(value = "tenantIdIn", converter = StringListConverter.class) public void setTenantIdIn(List<String> tenantIds) { this.tenantIds = tenantIds; } public Boolean isWithoutTenantId() { return withoutTenantId; } @CamundaQueryParam(value = "withoutTenantId", converter = BooleanConverter.class) public void setWithoutTenantId(Boolean withoutTenantId) { this.withoutTenantId = withoutTenantId; } public List<String> getActivityIds() { return activityIds; } @CamundaQueryParam(value = "activityIdIn", converter = StringListConverter.class) public void setActivityIdIn(List<String> activityIds) { this.activityIds = activityIds; } public Boolean isRootProcessInstances() { return rootProcessInstances; } @CamundaQueryParam(value = "rootProcessInstances", converter = BooleanConverter.class) public void setRootProcessInstances(Boolean rootProcessInstances) { this.rootProcessInstances = rootProcessInstances; } public Boolean isProcessDefinitionWithoutTenantId() { return isProcessDefinitionWithoutTenantId; } @CamundaQueryParam(value = "processDefinitionWithoutTenantId", converter = BooleanConverter.class) public void setProcessDefinitionWithoutTenantId(Boolean isProcessDefinitionWithoutTenantId) { this.isProcessDefinitionWithoutTenantId = isProcessDefinitionWithoutTenantId; } @Override protected boolean isValidSortByValue(String value) { return VALID_SORT_BY_VALUES.contains(value); } @Override protected ProcessInstanceQuery createNewQuery(ProcessEngine engine) { return engine.getRuntimeService().createProcessInstanceQuery(); } @Override protected void applyFilters(ProcessInstanceQuery query) { if (processInstanceIds != null) { query.processInstanceIds(processInstanceIds); } if (processDefinitionKey != null) { query.processDefinitionKey(processDefinitionKey); } if (deploymentId != null) { query.deploymentId(deploymentId); } if (businessKey != null) { query.processInstanceBusinessKey(businessKey); } if (businessKeyLike != null) { query.processInstanceBusinessKeyLike(businessKeyLike); } if (caseInstanceId != null) { query.caseInstanceId(caseInstanceId); } if (processDefinitionId != null) { query.processDefinitionId(processDefinitionId); } if (superProcessInstance != null) { query.superProcessInstanceId(superProcessInstance); } if (subProcessInstance != null) { query.subProcessInstanceId(subProcessInstance); } if (superCaseInstance != null) { query.superCaseInstanceId(superCaseInstance); } if (subCaseInstance != null) { query.subCaseInstanceId(subCaseInstance); } if (TRUE.equals(active)) { query.active(); } if (TRUE.equals(suspended)) { query.suspended(); } if (incidentId != null) { query.incidentId(incidentId); } if (incidentType != null) { query.incidentType(incidentType); } if (incidentMessage != null) { query.incidentMessage(incidentMessage); } if (incidentMessageLike != null) { query.incidentMessageLike(incidentMessageLike); } if (tenantIds != null && !tenantIds.isEmpty()) { query.tenantIdIn(tenantIds.toArray(new String[tenantIds.size()])); } if (TRUE.equals(withoutTenantId)) { query.withoutTenantId(); } if (activityIds != null && !activityIds.isEmpty()) { query.activityIdIn(activityIds.toArray(new String[activityIds.size()])); } if (TRUE.equals(rootProcessInstances)) { query.rootProcessInstances(); } if (TRUE.equals(isProcessDefinitionWithoutTenantId)) { query.processDefinitionWithoutTenantId(); } if (variables != null) { for (VariableQueryParameterDto variableQueryParam : variables) { String variableName = variableQueryParam.getName(); String op = variableQueryParam.getOperator(); Object variableValue = variableQueryParam.resolveValue(objectMapper); if (op.equals(VariableQueryParameterDto.EQUALS_OPERATOR_NAME)) { query.variableValueEquals(variableName, variableValue); } else if (op.equals(VariableQueryParameterDto.GREATER_THAN_OPERATOR_NAME)) { query.variableValueGreaterThan(variableName, variableValue); } else if (op.equals(VariableQueryParameterDto.GREATER_THAN_OR_EQUALS_OPERATOR_NAME)) { query.variableValueGreaterThanOrEqual(variableName, variableValue); } else if (op.equals(VariableQueryParameterDto.LESS_THAN_OPERATOR_NAME)) { query.variableValueLessThan(variableName, variableValue); } else if (op.equals(VariableQueryParameterDto.LESS_THAN_OR_EQUALS_OPERATOR_NAME)) { query.variableValueLessThanOrEqual(variableName, variableValue); } else if (op.equals(VariableQueryParameterDto.NOT_EQUALS_OPERATOR_NAME)) { query.variableValueNotEquals(variableName, variableValue); } else if (op.equals(VariableQueryParameterDto.LIKE_OPERATOR_NAME)) { query.variableValueLike(variableName, String.valueOf(variableValue)); } else { throw new InvalidRequestException(Status.BAD_REQUEST, "Invalid variable comparator specified: " + op); } } } } @Override protected void applySortBy(ProcessInstanceQuery query, String sortBy, Map<String, Object> parameters, ProcessEngine engine) { if (sortBy.equals(SORT_BY_INSTANCE_ID_VALUE)) { query.orderByProcessInstanceId(); } else if (sortBy.equals(SORT_BY_DEFINITION_KEY_VALUE)) { query.orderByProcessDefinitionKey(); } else if (sortBy.equals(SORT_BY_DEFINITION_ID_VALUE)) { query.orderByProcessDefinitionId(); } else if (sortBy.equals(SORT_BY_TENANT_ID)) { query.orderByTenantId(); } else if (sortBy.equals(SORT_BY_BUSINESS_KEY)) { query.orderByBusinessKey(); } } }
package com.hubspot.singularity.data.history; import java.util.Collections; import java.util.List; import com.google.common.base.Optional; import com.google.inject.Inject; import com.google.inject.Singleton; import com.hubspot.mesos.JavaUtils; import com.hubspot.singularity.OrderDirection; import com.hubspot.singularity.SingularityRequestHistory; import com.hubspot.singularity.config.SingularityConfiguration; import com.hubspot.singularity.data.RequestManager; @Singleton public class RequestHistoryHelper extends BlendedHistoryHelper<SingularityRequestHistory, String> { private final RequestManager requestManager; private final HistoryManager historyManager; @Inject public RequestHistoryHelper(RequestManager requestManager, HistoryManager historyManager, SingularityConfiguration configuration) { super(configuration.getDatabaseConfiguration().isPresent()); this.requestManager = requestManager; this.historyManager = historyManager; } @Override protected List<SingularityRequestHistory> getFromZk(String requestId) { List<SingularityRequestHistory> requestHistory = requestManager.getRequestHistory(requestId); Collections.sort(requestHistory); return requestHistory; } @Override protected List<SingularityRequestHistory> getFromHistory(String requestId, int historyStart, int numFromHistory) { return historyManager.getRequestHistory(requestId, Optional.of(OrderDirection.DESC), historyStart, numFromHistory); } public Optional<SingularityRequestHistory> getFirstHistory(String requestId) { Optional<SingularityRequestHistory> firstHistory = JavaUtils.getFirst(historyManager.getRequestHistory(requestId, Optional.of(OrderDirection.ASC), 0, 1)); if (firstHistory.isPresent()) { return firstHistory; } return JavaUtils.getLast(getFromZk(requestId)); } public Optional<SingularityRequestHistory> getLastHistory(String requestId) { Optional<SingularityRequestHistory> lastHistory = JavaUtils.getFirst(getFromZk(requestId)); if (lastHistory.isPresent()) { return lastHistory; } return JavaUtils.getFirst(historyManager.getRequestHistory(requestId, Optional.of(OrderDirection.DESC), 0, 1)); } @Override protected Optional<Integer> getTotalCount(String requestId, boolean canSkipZk) { int numFromZk; if (sqlEnabled && canSkipZk) { numFromZk = 0; } else { numFromZk = requestManager.getRequestHistory(requestId).size(); } int numFromHistory = historyManager.getRequestHistoryCount(requestId); return Optional.of(numFromZk + numFromHistory); } }
package com.x.attendance.assemble.control.jaxrs.attendanceadmin; import com.x.base.core.exception.PromptException; class AttendanceAdminDeleteException extends PromptException { private static final long serialVersionUID = 1859164370743532895L; AttendanceAdminDeleteException( Throwable e, String id ) { super("系统在根据ID删除管理员信息时发生异常.ID:" + id, e ); } }
package cl.agriapp.lite.queryside.party.infraestructure.scheduler; import cl.agriapp.lite.queryside.party.infraestructure.producer.event.PartyCreatedEvent; import cl.agriapp.lite.queryside.party.infraestructure.producer.handler.PartyEventHandler; import lombok.extern.log4j.Log4j2; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; @Log4j2 @Component public class PartyEventScheduler { @Autowired PartyEventHandler partyEventHandler; @Scheduled(fixedRate = 2000) public void emittPartyCreatedEvent(){ PartyCreatedEvent partyCreatedEvent = new PartyCreatedEvent( Double.valueOf(Math.random()).toString(), "Daniel", "Carvajal"); partyEventHandler.handle(partyCreatedEvent); log.info(String.format("Event emitted: %s ", partyCreatedEvent)); } }
/******************************************************************************* * Copyright (c) 1991, 2021 IBM Corp. and others * * This program and the accompanying materials are made available under * the terms of the Eclipse Public License 2.0 which accompanies this * distribution and is available at https://www.eclipse.org/legal/epl-2.0/ * or the Apache License, Version 2.0 which accompanies this distribution * and is available at https://www.apache.org/licenses/LICENSE-2.0. * * This Source Code may also be made available under the following * Secondary Licenses when the conditions for such availability set * forth in the Eclipse Public License, v. 2.0 are satisfied: GNU * General Public License, version 2 with the GNU Classpath * Exception [1] and GNU General Public License, version 2 with the * OpenJDK Assembly Exception [2]. * * [1] https://www.gnu.org/software/classpath/license.html * [2] http://openjdk.java.net/legal/assembly-exception.html * * SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 OR LicenseRef-GPL-2.0 WITH Assembly-exception *******************************************************************************/ /** * WARNING!!! GENERATED FILE * * This class is generated. * Do not use the Eclipse "Organize Imports" feature on this class. * * It can contain user content, but that content must be delimited with the * the tags * [BEGIN USER IMPORTS] * [END USER IMPORTS] * * or * * [BEGIN USER CODE] * [END USER CODE] * * These tags are entered as comments. Characters before [ and after ] are ignored. * Lines between the tags are inserted into the newly generated file. * * IMPORTS are combined and inserted above newly generated imports. CODE is combined * and inserted immediately after the class declaration * * All lines outside these tags are lost and replaced with newly generated code. */ package com.ibm.j9ddr.vm29.pointer.generated; /*[BEGIN USER IMPORTS]*/ /*[END USER IMPORTS]*/ import com.ibm.j9ddr.CorruptDataException; import com.ibm.j9ddr.vm29.pointer.*; import com.ibm.j9ddr.vm29.structure.*; import com.ibm.j9ddr.vm29.types.*; /** * Structure: GC_ClassStaticsDeclarationOrderIteratorPointer * * A generated implementation of a VM structure * * This class contains generated code and MAY contain hand written user code. * * Hand written user code must be contained at the top of * the class file, specifically above * the comment line containing WARNING!!! GENERATED CODE * * ALL code below the GENERATED warning will be replaced with new generated code * each time the PointerGenerator utility is run. * * The generated code will provide getters for all elements in the GC_ClassStaticsDeclarationOrderIteratorPointer * structure. Where possible, meaningful return types are inferred. * * The user may add methods to provide meaningful return types where only pointers * could be automatically inferred. */ @com.ibm.j9ddr.GeneratedPointerClass(structureClass=GC_ClassStaticsDeclarationOrderIterator.class) public class GC_ClassStaticsDeclarationOrderIteratorPointer extends StructurePointer { // NULL public static final GC_ClassStaticsDeclarationOrderIteratorPointer NULL = new GC_ClassStaticsDeclarationOrderIteratorPointer(0); /*[BEGIN USER CODE]*/ /*[END USER CODE]*/ // Do not call this constructor. Use static method cast instead. protected GC_ClassStaticsDeclarationOrderIteratorPointer(long address) { super(address); } public static GC_ClassStaticsDeclarationOrderIteratorPointer cast(AbstractPointer structure) { return cast(structure.getAddress()); } public static GC_ClassStaticsDeclarationOrderIteratorPointer cast(UDATA udata) { return cast(udata.longValue()); } public static GC_ClassStaticsDeclarationOrderIteratorPointer cast(long address) { if (address == 0) { return NULL; } return new GC_ClassStaticsDeclarationOrderIteratorPointer(address); } public GC_ClassStaticsDeclarationOrderIteratorPointer add(long count) { return GC_ClassStaticsDeclarationOrderIteratorPointer.cast(address + (GC_ClassStaticsDeclarationOrderIterator.SIZEOF * count)); } public GC_ClassStaticsDeclarationOrderIteratorPointer add(Scalar count) { return add(count.longValue()); } public GC_ClassStaticsDeclarationOrderIteratorPointer addOffset(long offset) { return GC_ClassStaticsDeclarationOrderIteratorPointer.cast(address + offset); } public GC_ClassStaticsDeclarationOrderIteratorPointer addOffset(Scalar offset) { return addOffset(offset.longValue()); } public GC_ClassStaticsDeclarationOrderIteratorPointer sub(long count) { return GC_ClassStaticsDeclarationOrderIteratorPointer.cast(address - (GC_ClassStaticsDeclarationOrderIterator.SIZEOF * count)); } public GC_ClassStaticsDeclarationOrderIteratorPointer sub(Scalar count) { return sub(count.longValue()); } public GC_ClassStaticsDeclarationOrderIteratorPointer subOffset(long offset) { return GC_ClassStaticsDeclarationOrderIteratorPointer.cast(address - offset); } public GC_ClassStaticsDeclarationOrderIteratorPointer subOffset(Scalar offset) { return subOffset(offset.longValue()); } public GC_ClassStaticsDeclarationOrderIteratorPointer untag(long mask) { return GC_ClassStaticsDeclarationOrderIteratorPointer.cast(address & ~mask); } public GC_ClassStaticsDeclarationOrderIteratorPointer untag() { return untag(UDATA.SIZEOF - 1); } protected long sizeOfBaseType() { return GC_ClassStaticsDeclarationOrderIterator.SIZEOF; } // Implementation methods // J9Class* _clazz @com.ibm.j9ddr.GeneratedFieldAccessor(offsetFieldName="__clazzOffset_", declaredType="J9Class*") public J9ClassPointer _clazz() throws CorruptDataException { return J9ClassPointer.cast(getPointerAtOffset(GC_ClassStaticsDeclarationOrderIterator.__clazzOffset_)); } // J9Class* _clazz public PointerPointer _clazzEA() throws CorruptDataException { return PointerPointer.cast(nonNullFieldEA(GC_ClassStaticsDeclarationOrderIterator.__clazzOffset_)); } // J9ROMFieldShape* _fieldShape @com.ibm.j9ddr.GeneratedFieldAccessor(offsetFieldName="__fieldShapeOffset_", declaredType="J9ROMFieldShape*") public J9ROMFieldShapePointer _fieldShape() throws CorruptDataException { return J9ROMFieldShapePointer.cast(getPointerAtOffset(GC_ClassStaticsDeclarationOrderIterator.__fieldShapeOffset_)); } // J9ROMFieldShape* _fieldShape public PointerPointer _fieldShapeEA() throws CorruptDataException { return PointerPointer.cast(nonNullFieldEA(GC_ClassStaticsDeclarationOrderIterator.__fieldShapeOffset_)); } // IDATA _index @com.ibm.j9ddr.GeneratedFieldAccessor(offsetFieldName="__indexOffset_", declaredType="IDATA") public IDATA _index() throws CorruptDataException { return getIDATAAtOffset(GC_ClassStaticsDeclarationOrderIterator.__indexOffset_); } // IDATA _index public IDATAPointer _indexEA() throws CorruptDataException { return IDATAPointer.cast(nonNullFieldEA(GC_ClassStaticsDeclarationOrderIterator.__indexOffset_)); } // J9JavaVM* _javaVM @com.ibm.j9ddr.GeneratedFieldAccessor(offsetFieldName="__javaVMOffset_", declaredType="J9JavaVM*") public J9JavaVMPointer _javaVM() throws CorruptDataException { return J9JavaVMPointer.cast(getPointerAtOffset(GC_ClassStaticsDeclarationOrderIterator.__javaVMOffset_)); } // J9JavaVM* _javaVM public PointerPointer _javaVMEA() throws CorruptDataException { return PointerPointer.cast(nonNullFieldEA(GC_ClassStaticsDeclarationOrderIterator.__javaVMOffset_)); } // J9ROMFullTraversalFieldOffsetWalkState _walkState @com.ibm.j9ddr.GeneratedFieldAccessor(offsetFieldName="__walkStateOffset_", declaredType="J9ROMFullTraversalFieldOffsetWalkState") public J9ROMFullTraversalFieldOffsetWalkStatePointer _walkState() throws CorruptDataException { return J9ROMFullTraversalFieldOffsetWalkStatePointer.cast(nonNullFieldEA(GC_ClassStaticsDeclarationOrderIterator.__walkStateOffset_)); } // J9ROMFullTraversalFieldOffsetWalkState _walkState public PointerPointer _walkStateEA() throws CorruptDataException { return PointerPointer.cast(nonNullFieldEA(GC_ClassStaticsDeclarationOrderIterator.__walkStateOffset_)); } }
/* Copyright (C) 2001, 2010 United States Government as represented by the Administrator of the National Aeronautics and Space Administration. All Rights Reserved. */ package gov.nasa.worldwind.applications.gos.awt; import gov.nasa.worldwind.applications.gos.*; import gov.nasa.worldwind.applications.gos.globe.GlobeModel; import gov.nasa.worldwind.applications.gos.html.HTMLFormatter; import gov.nasa.worldwind.ogc.wms.*; import gov.nasa.worldwind.util.WWUtil; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.util.*; import java.util.List; /** * @author dcollins * @version $Id: WMSLayerPanel.java 13555 2010-07-15 16:49:17Z dcollins $ */ public class WMSLayerPanel extends JPanel { protected Record record; protected GlobeModel globeModel; protected WMSCapabilities caps; public WMSLayerPanel(Record record, GlobeModel globeModel) { this.record = record; this.globeModel = globeModel; this.setBackground(Color.WHITE); } public WMSCapabilities getCapabilities() { return this.caps; } public void setCapabilities(final WMSCapabilities caps) { if (!SwingUtilities.isEventDispatchThread()) { SwingUtilities.invokeLater(new Runnable() { public void run() { setCapabilities(caps); } }); } else { this.caps = caps; this.onCapabilitiesChanged(); } } protected void onCapabilitiesChanged() { this.removeAll(); if (this.caps == null) return; List<WMSLayerCapabilities> layerList = this.caps.getNamedLayers(); if (layerList == null || layerList.size() == 0) return; this.setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JLabel label = new JLabel(this.createTitle()); label.setAlignmentX(Component.LEFT_ALIGNMENT); this.add(label); this.add(Box.createVerticalStrut(10)); layerList = this.sortLayerList(layerList); for (WMSLayerCapabilities layer : layerList) { if (layer == null) continue; Set<WMSLayerStyle> styleSet = layer.getStyles(); if (styleSet == null || styleSet.size() == 0) { this.addLayerControl(this.caps, layer, null); this.add(Box.createVerticalStrut(3)); } else { for (WMSLayerStyle style : styleSet) { if (style == null) continue; this.addLayerControl(this.caps, layer, style); this.add(Box.createVerticalStrut(3)); } } } } protected String createTitle() { StringBuilder sb = new StringBuilder(); HTMLFormatter formatter = new HTMLFormatter(); formatter.setEnableAdvancedHTML(false); formatter.beginHTMLBody(sb); formatter.beginHeading(sb, 1); sb.append("Map layers"); String s = this.caps.getServiceInformation().getServiceTitle(); if (WWUtil.isEmpty(s)) s = "No name"; sb.append(" for \"").append(s).append("\""); formatter.endHeading(sb, 1); s = this.caps.getRequestURL("GetCapabilities", "http", "get"); if (!WWUtil.isEmpty(s)) { formatter.beginFont(sb, "#888888"); sb.append(" [").append(s).append("]"); formatter.endFont(sb); } formatter.endHTMLBody(sb); return sb.toString(); } protected List<WMSLayerCapabilities> sortLayerList(List<WMSLayerCapabilities> list) { Collections.sort(list, new Comparator<WMSLayerCapabilities>() { public int compare(WMSLayerCapabilities a, WMSLayerCapabilities b) { return String.CASE_INSENSITIVE_ORDER.compare(a.getName(), b.getName()); } }); return list; } protected void addLayerControl(WMSCapabilities caps, WMSLayerCapabilities layer, WMSLayerStyle style) { if (this.globeModel == null) return; Action action = this.globeModel.hasWMSLayer(this.record.getIdentifier(), layer, style) ? new RemoveWMSLayerAction(this, caps, layer, style) : new AddWMSLayerAction(this, caps, layer, style); JCheckBox jcb = new JCheckBox(action); jcb.setAlignmentX(Component.LEFT_ALIGNMENT); this.add(jcb); } protected String makeWMSLayerDisplayName(WMSLayerCapabilities layer, WMSLayerStyle style) { return ResourceUtil.makeWMSLayerDisplayName(layer, style); } protected static class AddWMSLayerAction extends AbstractAction { protected WMSLayerPanel owner; protected WMSCapabilities caps; protected WMSLayerCapabilities layer; protected WMSLayerStyle style; public AddWMSLayerAction(WMSLayerPanel owner, WMSCapabilities caps, WMSLayerCapabilities layer, WMSLayerStyle style) { super(owner.makeWMSLayerDisplayName(layer, style)); this.putValue(Action.SELECTED_KEY, false); this.owner = owner; this.caps = caps; this.layer = layer; this.style = style; } public void actionPerformed(ActionEvent event) { this.owner.globeModel.addWMSLayer(this.owner.record.getIdentifier(), this.caps, this.layer, this.style); ((AbstractButton) event.getSource()).setAction( new RemoveWMSLayerAction(this.owner, this.caps, this.layer, this.style)); } } protected static class RemoveWMSLayerAction extends AbstractAction { protected WMSLayerPanel owner; protected WMSCapabilities caps; protected WMSLayerCapabilities layer; protected WMSLayerStyle style; public RemoveWMSLayerAction(WMSLayerPanel owner, WMSCapabilities caps, WMSLayerCapabilities layer, WMSLayerStyle style) { super(owner.makeWMSLayerDisplayName(layer, style)); this.putValue(Action.SELECTED_KEY, true); this.caps = caps; this.owner = owner; this.layer = layer; this.style = style; } public void actionPerformed(ActionEvent event) { this.owner.globeModel.removeWMSLayer(this.owner.record.getIdentifier(), this.layer, this.style); ((AbstractButton) event.getSource()).setAction( new AddWMSLayerAction(this.owner, this.caps, this.layer, this.style)); } } }
/***************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ****************************************************************/ package org.apache.cayenne.modeler.action; import javax.swing.JOptionPane; import javax.swing.SwingUtilities; import java.awt.event.ActionEvent; import java.sql.SQLException; import java.util.HashSet; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.apache.cayenne.map.DataMap; import org.apache.cayenne.modeler.Application; import org.apache.cayenne.modeler.ProjectController; import org.apache.cayenne.modeler.dialog.db.load.DbLoadResultDialog; import org.apache.cayenne.modeler.dialog.db.load.DbLoaderContext; import org.apache.cayenne.modeler.dialog.db.load.LoadDataMapTask; import org.apache.cayenne.modeler.editor.DbImportController; import org.apache.cayenne.modeler.editor.dbimport.DbImportView; import org.apache.cayenne.modeler.pref.DBConnectionInfo; /** * Action that imports database structure into a DataMap. */ public class ReverseEngineeringAction extends DBConnectionAwareAction { private static final String ACTION_NAME = "Reengineer Database Schema"; private static final String ICON_NAME = "icon-dbi-runImport.png"; private static final String DIALOG_TITLE = "Reengineer DB Schema: Connect to Database"; private DbImportView view; private AtomicInteger dataMapCount; protected Set<DataMap> dataMaps; public String getIconName() { return ICON_NAME; } ReverseEngineeringAction(Application application) { super(getActionName(), application); } public static String getActionName() { return ACTION_NAME; } public void performAction(Set<DataMap> dataMapSet) { resetParams(); dataMaps.addAll(dataMapSet); dataMapCount.set(dataMaps.size()); ProjectController projectController = Application.getInstance().getFrameController().getProjectController(); for(DataMap dataMap : dataMapSet) { projectController.setCurrentDataMap(dataMap); startImport(); } } private void startImport(){ final DbLoaderContext context = new DbLoaderContext(application.getMetaData()); DBConnectionInfo connectionInfo = getConnectionInfo(DIALOG_TITLE); if(connectionInfo == null) { return; } context.setProjectController(getProjectController()); try { context.setConnection(connectionInfo.makeDataSource(getApplication().getClassLoadingService()).getConnection()); } catch (SQLException ex) { JOptionPane.showMessageDialog( Application.getFrame(), ex.getMessage(), "Error loading schemas dialog", JOptionPane.ERROR_MESSAGE); return; } if(!context.buildConfig(connectionInfo, view, true)) { try { context.getConnection().close(); } catch (SQLException ignored) {} return; } DbImportController dbImportController = Application.getInstance().getFrameController().getDbImportController(); DbLoadResultDialog dbLoadResultDialog = dbImportController.createDialog(); runLoaderInThread(context, () -> { application.getUndoManager().discardAllEdits(); try { context.getConnection().close(); if(dataMapCount.decrementAndGet() <= 0 && !context.isInterrupted()) { if (!dbLoadResultDialog.isVisible() && !dbLoadResultDialog.getTableForMap().isEmpty()) { dbImportController.showDialog(); } } } catch (SQLException ignored) {} }); } /** * Connects to DB and delegates processing to DbLoaderController, starting it asynchronously. */ @Override public void performAction(ActionEvent event) { resetParams(); dataMaps.add(Application.getInstance().getFrameController().getProjectController().getCurrentDataMap()); dataMapCount.set(dataMaps.size()); startImport(); } private void resetParams() { dataMapCount = new AtomicInteger(); this.dataMaps = new HashSet<>(); } private void runLoaderInThread(final DbLoaderContext context, final Runnable callback) { Thread th = new Thread(() -> { LoadDataMapTask task = new LoadDataMapTask(Application.getFrame(), "Reengineering DB", context); task.startAndWait(); SwingUtilities.invokeLater(callback); }); th.start(); } public void setView(DbImportView view) { this.view = view; } }
package com.dji.sample.component.mqtt.service; import com.dji.sample.component.mqtt.model.CommonTopicResponse; /** * @author sean.zhou * @version 0.1 * @date 2021/11/25 */ public interface IMessageSenderService { /** * Publish a message to a specific topic. * @param topic target * @param response message */ void publish(String topic, CommonTopicResponse response); /** * Use a specific qos to push messages to a specific topic. * @param topic target * @param qos qos * @param response message */ void publish(String topic, int qos, CommonTopicResponse response); }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.client5.http.impl.cache; import java.util.Date; import java.util.Iterator; import org.apache.hc.client5.http.cache.HeaderConstants; import org.apache.hc.client5.http.cache.HttpCacheEntry; import org.apache.hc.client5.http.cache.Resource; import org.apache.hc.client5.http.utils.DateUtils; import org.apache.hc.core5.annotation.Contract; import org.apache.hc.core5.annotation.ThreadingBehavior; import org.apache.hc.core5.http.Header; import org.apache.hc.core5.http.HeaderElement; import org.apache.hc.core5.http.HttpHeaders; import org.apache.hc.core5.http.HttpRequest; import org.apache.hc.core5.http.MessageHeaders; import org.apache.hc.core5.http.message.MessageSupport; /** * @since 4.1 */ @Contract(threading = ThreadingBehavior.IMMUTABLE) class CacheValidityPolicy { public static final long MAX_AGE = 2147483648L; CacheValidityPolicy() { super(); } public long getCurrentAgeSecs(final HttpCacheEntry entry, final Date now) { return getCorrectedInitialAgeSecs(entry) + getResidentTimeSecs(entry, now); } public long getFreshnessLifetimeSecs(final HttpCacheEntry entry) { final long maxage = getMaxAge(entry); if (maxage > -1) { return maxage; } final Date dateValue = entry.getDate(); if (dateValue == null) { return 0L; } final Date expiry = DateUtils.parseDate(entry, HeaderConstants.EXPIRES); if (expiry == null) { return 0; } final long diff = expiry.getTime() - dateValue.getTime(); return (diff / 1000); } public boolean isResponseFresh(final HttpCacheEntry entry, final Date now) { return (getCurrentAgeSecs(entry, now) < getFreshnessLifetimeSecs(entry)); } /** * Decides if this response is fresh enough based Last-Modified and Date, if available. * This entry is meant to be used when isResponseFresh returns false. The algorithm is as follows: * * if last-modified and date are defined, freshness lifetime is coefficient*(date-lastModified), * else freshness lifetime is defaultLifetime * * @param entry the cache entry * @param now what time is it currently (When is right NOW) * @param coefficient Part of the heuristic for cache entry freshness * @param defaultLifetime How long can I assume a cache entry is default TTL * @return {@code true} if the response is fresh */ public boolean isResponseHeuristicallyFresh(final HttpCacheEntry entry, final Date now, final float coefficient, final long defaultLifetime) { return (getCurrentAgeSecs(entry, now) < getHeuristicFreshnessLifetimeSecs(entry, coefficient, defaultLifetime)); } public long getHeuristicFreshnessLifetimeSecs(final HttpCacheEntry entry, final float coefficient, final long defaultLifetime) { final Date dateValue = entry.getDate(); final Date lastModifiedValue = DateUtils.parseDate(entry, HeaderConstants.LAST_MODIFIED); if (dateValue != null && lastModifiedValue != null) { final long diff = dateValue.getTime() - lastModifiedValue.getTime(); if (diff < 0) { return 0; } return (long)(coefficient * (diff / 1000)); } return defaultLifetime; } public boolean isRevalidatable(final HttpCacheEntry entry) { return entry.getFirstHeader(HeaderConstants.ETAG) != null || entry.getFirstHeader(HeaderConstants.LAST_MODIFIED) != null; } public boolean mustRevalidate(final HttpCacheEntry entry) { return hasCacheControlDirective(entry, HeaderConstants.CACHE_CONTROL_MUST_REVALIDATE); } public boolean proxyRevalidate(final HttpCacheEntry entry) { return hasCacheControlDirective(entry, HeaderConstants.CACHE_CONTROL_PROXY_REVALIDATE); } public boolean mayReturnStaleWhileRevalidating(final HttpCacheEntry entry, final Date now) { final Iterator<HeaderElement> it = MessageSupport.iterate(entry, HeaderConstants.CACHE_CONTROL); while (it.hasNext()) { final HeaderElement elt = it.next(); if (HeaderConstants.STALE_WHILE_REVALIDATE.equalsIgnoreCase(elt.getName())) { try { final int allowedStalenessLifetime = Integer.parseInt(elt.getValue()); if (getStalenessSecs(entry, now) <= allowedStalenessLifetime) { return true; } } catch (final NumberFormatException nfe) { // skip malformed directive } } } return false; } public boolean mayReturnStaleIfError(final HttpRequest request, final HttpCacheEntry entry, final Date now) { final long stalenessSecs = getStalenessSecs(entry, now); return mayReturnStaleIfError(request, HeaderConstants.CACHE_CONTROL, stalenessSecs) || mayReturnStaleIfError(entry, HeaderConstants.CACHE_CONTROL, stalenessSecs); } private boolean mayReturnStaleIfError(final MessageHeaders headers, final String name, final long stalenessSecs) { boolean result = false; final Iterator<HeaderElement> it = MessageSupport.iterate(headers, name); while (it.hasNext()) { final HeaderElement elt = it.next(); if (HeaderConstants.STALE_IF_ERROR.equals(elt.getName())) { try { final int staleIfErrorSecs = Integer.parseInt(elt.getValue()); if (stalenessSecs <= staleIfErrorSecs) { result = true; break; } } catch (final NumberFormatException nfe) { // skip malformed directive } } } return result; } /** * This matters for deciding whether the cache entry is valid to serve as a * response. If these values do not match, we might have a partial response * * @param entry The cache entry we are currently working with * @return boolean indicating whether actual length matches Content-Length */ protected boolean contentLengthHeaderMatchesActualLength(final HttpCacheEntry entry) { final Header h = entry.getFirstHeader(HttpHeaders.CONTENT_LENGTH); if (h != null) { try { final long responseLen = Long.parseLong(h.getValue()); final Resource resource = entry.getResource(); if (resource == null) { return false; } final long resourceLen = resource.length(); return responseLen == resourceLen; } catch (final NumberFormatException ex) { return false; } } return true; } protected long getApparentAgeSecs(final HttpCacheEntry entry) { final Date dateValue = entry.getDate(); if (dateValue == null) { return MAX_AGE; } final long diff = entry.getResponseDate().getTime() - dateValue.getTime(); if (diff < 0L) { return 0; } return (diff / 1000); } protected long getAgeValue(final HttpCacheEntry entry) { long ageValue = 0; for (final Header hdr : entry.getHeaders(HeaderConstants.AGE)) { long hdrAge; try { hdrAge = Long.parseLong(hdr.getValue()); if (hdrAge < 0) { hdrAge = MAX_AGE; } } catch (final NumberFormatException nfe) { hdrAge = MAX_AGE; } ageValue = (hdrAge > ageValue) ? hdrAge : ageValue; } return ageValue; } protected long getCorrectedReceivedAgeSecs(final HttpCacheEntry entry) { final long apparentAge = getApparentAgeSecs(entry); final long ageValue = getAgeValue(entry); return (apparentAge > ageValue) ? apparentAge : ageValue; } protected long getResponseDelaySecs(final HttpCacheEntry entry) { final long diff = entry.getResponseDate().getTime() - entry.getRequestDate().getTime(); return (diff / 1000L); } protected long getCorrectedInitialAgeSecs(final HttpCacheEntry entry) { return getCorrectedReceivedAgeSecs(entry) + getResponseDelaySecs(entry); } protected long getResidentTimeSecs(final HttpCacheEntry entry, final Date now) { final long diff = now.getTime() - entry.getResponseDate().getTime(); return (diff / 1000L); } protected long getMaxAge(final HttpCacheEntry entry) { long maxage = -1; final Iterator<HeaderElement> it = MessageSupport.iterate(entry, HeaderConstants.CACHE_CONTROL); while (it.hasNext()) { final HeaderElement elt = it.next(); if (HeaderConstants.CACHE_CONTROL_MAX_AGE.equals(elt.getName()) || "s-maxage".equals(elt.getName())) { try { final long currMaxAge = Long.parseLong(elt.getValue()); if (maxage == -1 || currMaxAge < maxage) { maxage = currMaxAge; } } catch (final NumberFormatException nfe) { // be conservative if can't parse maxage = 0; } } } return maxage; } public boolean hasCacheControlDirective(final HttpCacheEntry entry, final String directive) { final Iterator<HeaderElement> it = MessageSupport.iterate(entry, HeaderConstants.CACHE_CONTROL); while (it.hasNext()) { final HeaderElement elt = it.next(); if (directive.equalsIgnoreCase(elt.getName())) { return true; } } return false; } public long getStalenessSecs(final HttpCacheEntry entry, final Date now) { final long age = getCurrentAgeSecs(entry, now); final long freshness = getFreshnessLifetimeSecs(entry); if (age <= freshness) { return 0L; } return (age - freshness); } }
package org.http4k.aws.lambda; import java.util.Collections; import java.util.Map; public class ApiGatewayProxyRequest { public String path; public String httpMethod; public Map<String, String> headers = Collections.emptyMap(); public Map<String, String> queryStringParameters = Collections.emptyMap(); public String body = ""; }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hwpf; import java.io.IOException; import java.io.OutputStream; import org.apache.poi.hwpf.model.ComplexFileTable; import org.apache.poi.hwpf.model.OldCHPBinTable; import org.apache.poi.hwpf.model.OldPAPBinTable; import org.apache.poi.hwpf.model.OldSectionTable; import org.apache.poi.hwpf.model.PieceDescriptor; import org.apache.poi.hwpf.model.TextPiece; import org.apache.poi.hwpf.model.TextPieceTable; import org.apache.poi.hwpf.usermodel.Range; import org.apache.poi.poifs.filesystem.DirectoryNode; import org.apache.poi.poifs.filesystem.POIFSFileSystem; import org.apache.poi.util.LittleEndian; /** * Provides very simple support for old (Word 6 / Word 95) * files. */ public class HWPFOldDocument extends HWPFDocumentCore { private TextPieceTable tpt; private StringBuilder _text; public HWPFOldDocument(POIFSFileSystem fs) throws IOException { this(fs.getRoot()); } public HWPFOldDocument(DirectoryNode directory) throws IOException { super(directory); // Where are things? int sedTableOffset = LittleEndian.getInt(_mainStream, 0x88); int sedTableSize = LittleEndian.getInt(_mainStream, 0x8c); int chpTableOffset = LittleEndian.getInt(_mainStream, 0xb8); int chpTableSize = LittleEndian.getInt(_mainStream, 0xbc); int papTableOffset = LittleEndian.getInt(_mainStream, 0xc0); int papTableSize = LittleEndian.getInt(_mainStream, 0xc4); //int shfTableOffset = LittleEndian.getInt(_mainStream, 0x60); //int shfTableSize = LittleEndian.getInt(_mainStream, 0x64); int complexTableOffset = LittleEndian.getInt(_mainStream, 0x160); // We need to get hold of the text that makes up the // document, which might be regular or fast-saved ComplexFileTable cft = null; StringBuffer text = new StringBuffer(); if(_fib.getFibBase().isFComplex()) { cft = new ComplexFileTable( _mainStream, _mainStream, complexTableOffset, _fib.getFibBase().getFcMin() ); tpt = cft.getTextPieceTable(); for(TextPiece tp : tpt.getTextPieces()) { text.append( tp.getStringBuilder() ); } } else { // TODO Discover if these older documents can ever hold Unicode Strings? // (We think not, because they seem to lack a Piece table) // TODO Build the Piece Descriptor properly // (We have to fake it, as they don't seem to have a proper Piece table) PieceDescriptor pd = new PieceDescriptor(new byte[] {0,0, 0,0,0,127, 0,0}, 0); pd.setFilePosition(_fib.getFibBase().getFcMin()); // Generate a single Text Piece Table, with a single Text Piece // which covers all the (8 bit only) text in the file tpt = new TextPieceTable(); byte[] textData = new byte[_fib.getFibBase().getFcMac()-_fib.getFibBase().getFcMin()]; System.arraycopy(_mainStream, _fib.getFibBase().getFcMin(), textData, 0, textData.length); TextPiece tp = new TextPiece( 0, textData.length, textData, pd ); tpt.add(tp); text.append(tp.getStringBuilder()); } _text = tpt.getText(); // Now we can fetch the character and paragraph properties _cbt = new OldCHPBinTable( _mainStream, chpTableOffset, chpTableSize, _fib.getFibBase().getFcMin(), tpt ); _pbt = new OldPAPBinTable( _mainStream, papTableOffset, papTableSize, _fib.getFibBase().getFcMin(), tpt ); _st = new OldSectionTable( _mainStream, sedTableOffset, sedTableSize, _fib.getFibBase().getFcMin(), tpt ); /* * in this mode we preserving PAPX/CHPX structure from file, so text may * miss from output, and text order may be corrupted */ boolean preserveBinTables = false; try { preserveBinTables = Boolean.parseBoolean( System .getProperty( HWPFDocument.PROPERTY_PRESERVE_BIN_TABLES ) ); } catch ( Exception exc ) { // ignore; } if ( !preserveBinTables ) { _cbt.rebuild( cft ); _pbt.rebuild( _text, cft ); } } public Range getOverallRange() { // Life is easy when we have no footers, headers or unicode! return new Range( 0, _fib.getFibBase().getFcMac() - _fib.getFibBase().getFcMin(), this ); } public Range getRange() { return getOverallRange(); } public TextPieceTable getTextTable() { return tpt; } @Override public StringBuilder getText() { return _text; } @Override public void write(OutputStream out) throws IOException { throw new IllegalStateException("Writing is not available for the older file formats"); } }
package models; import java.util.Objects; public class Users { private int id; private String name; private String position; private String staff_role; public Users(String name, String position, String staff_role) { this.name = name; this.position = position; this.staff_role = staff_role; } public int getId() { return id; } public String getName() { return name; } public String getPosition() { return position; } public String getStaff_role() { return staff_role; } public void setId(int id) { this.id = id; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Users users = (Users) o; return id == users.id && Objects.equals(name, users.name) && Objects.equals(position, users.position) && Objects.equals(staff_role, users.staff_role) ; } @Override public int hashCode() { return Objects.hash(id, name, position, staff_role); } }
////////////////////////////////////// // 15 puzzle solver // // // // 2015-04-26 // // Leonardone @ NEETSDKASU // ////////////////////////////////////// import java.io.*; import java.util.*; class Main { static boolean[] flags = new boolean[16]; static Map<Long, Long> map; static Puzzle problem; static { problem = getProblem(); map = makeRoot(); } public static void main(String[] args) throws Exception { int p1 = problem.getPos(1L); int p2 = problem.getPos(2L); if (p1 < p2) { if (p2 > 5) { if ((p2 & 3) == 0) { solve3(p2, 4); } else { solve3(p2, 5); } } } solve3(problem.getPos(1L), 0); flags[0] = true; solve3(problem.getPos(2L), 1); flags[1] = true; solve3(problem.getPos(5L), 4); flags[4] = true; solve3(problem.getPos(6L), 5); flags[5] = true; if (problem.getPos(3L) > 7) { solve3(problem.getPos(3L), 3); } if (problem.getPos(4L) > 7) { solve3(problem.getPos(4L), 3); } if (solve1()) { return; } if (solve2()) { return; } } static boolean seekRoot(int[] rt, int s, int g) { int c = 1; rt[g] = c; while (rt[s] == 0) { boolean fz = false; for (int i = 0; i < 16; i++) { if (rt[i] != c) { continue; } if (i - 4 >= 0 && rt[i - 4] == 0 && !flags[i - 4]) { rt[i - 4] = c + 1; fz = true; } if (i + 4 < 16 && rt[i + 4] == 0 && !flags[i + 4]) { rt[i + 4] = c + 1; fz = true; } if ((i & 3) != 0 && i - 1 >= 0 && rt[i - 1] == 0 && !flags[i - 1]) { rt[i - 1] = c + 1; fz = true; } if ((i & 3) != 3 && i + 1 < 16 && rt[i + 1] == 0 && !flags[i + 1]) { rt[i + 1] = c + 1; fz = true; } } c++; if (!fz) { return false; } } return true; } static void solve3(int pn, int g) { if (flags[g] || flags[pn] || pn == g) { return ; } int p0 = problem.getPos(0L); int[] r1 = new int[16]; seekRoot(r1, pn, g); for (int cn = r1[pn] - 1; cn > 0; cn--) { int pi = -1; // a 4 if (pn + 4 < 16 && r1[pn + 4] == cn) { pi = pn + 4; } // a 3 if (pn - 4 >= 0 && r1[pn - 4] == cn) { pi = pn - 4; } // a 1 if ((pn & 3) != 0 && pn - 1 >= 0 && r1[pn - 1] == cn) { pi = pn - 1; } // a 2 if ((pn & 3) != 3 && pn + 1 < 16 && r1[pn + 1] == cn) { pi = pn + 1; } Puzzle temp; int[] r0 = new int[16]; boolean f1 = flags[pn]; flags[pn] = true; seekRoot(r0, p0, pi); flags[pn] = f1; for (int c0 = r0[p0] - 1; c0 > 0; c0--) { // b 4 if ((p0 & 3) != 3 && p0 + 1 < 16 && r0[p0 + 1] == c0) { temp = problem.moveRight(); p0++; } else // b 3 if ((p0 & 3) != 0 && p0 - 1 >= 0 && r0[p0 - 1] == c0) { temp = problem.moveLeft(); p0--; } else // b 1 if (p0 - 4 >= 0 && r0[p0 - 4] == c0) { temp = problem.moveUp(); p0 -= 4; } else // b 2 if (p0 + 4 < 16 && r0[p0 + 4] == c0) { temp = problem.moveDown(); p0 += 4; } else { System.out.println("are?"); return; } System.out.println(Puzzle.moveNumber(temp.data, problem.data)); problem = temp; } if (pi == pn - 1) { temp = problem.moveRight(); p0++; } else if (pi == pn + 1) { temp = problem.moveLeft(); p0--; } else if (pi == pn - 4) { temp = problem.moveDown(); p0 += 4; } else if (pi == pn + 4) { temp = problem.moveUp(); p0 -= 4; } else { System.out.println("arere?"); return; } System.out.println(Puzzle.moveNumber(temp.data, problem.data)); problem = temp; pn = pi; } } static Puzzle getProblem() { BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); long[] data = new long[16]; int n = 0; try { for (int i = 0; i < 4; i++) { String[] str = in.readLine().split(" "); for (String s : str) { if (!"*".equals(s)) { data[n] = Long.parseLong(s); } n++; } } } catch (IOException e) { } return new Puzzle(data); } static Map<Long, Long> makeRoot() { Map<Puzzle, Long> hs1 = new HashMap<>(200000), hs2 = new HashMap<>(200000), hs3; Map<Long, Long> map = new HashMap<>(400000); { Puzzle pzl = new Puzzle(); hs1.put(pzl, pzl.data); map.put(pzl.data, 0L); } for (int i = 1; i <= 22; i++) { hs2.clear(); for (Puzzle pzl : hs1.keySet()) { Long pd = hs1.get(pzl); for (int j = 0; j < 4; j++) { Puzzle temp; if ((temp = pzl.move(j)) == pzl) { continue; } if ((temp.z & 0xFF00_FF00_0000_0000L) != 0L) { continue; } Long d = temp.data; if (map.containsKey(d) || map.containsKey(Puzzle.symmetry(temp.data))) { continue; } map.put(d, pd); hs2.put(temp, d); } } hs3 = hs1; hs1 = hs2; hs2 = hs3; } return map; } static boolean solve1() { Long d = problem.data; Long s = Puzzle.symmetry(problem.data); if (map.containsKey(d)) { for (;;) { Long temp = map.get(d); if (temp == 0L) { break; } System.out.println(Puzzle.moveNumber(temp, d)); d = temp; } return true; } else if (map.containsKey(s)) { d = s; for (;;) { Long temp = map.get(d); if (temp == 0L) { break; } System.out.println(Puzzle.symmetryNumber(Puzzle.moveNumber(temp, d))); d = temp; } return true; } else { return false; } } static boolean solve2() { Map<Puzzle, Long> hs1 = new HashMap<>(200000), hs2 = new HashMap<>(200000), hs3; Map<Long, Long> map2 = new HashMap<>(400000); hs1.put(problem, problem.data); map2.put(problem.data, 0L); for (int i = 0; i < 100; i++) { hs2.clear(); for (Puzzle pzl : hs1.keySet()) { Long pd = hs1.get(pzl); for (int j = 0; j < 4; j++) { Puzzle temp; if ((temp = pzl.move(j)) == pzl) { continue; } if ((temp.z & 0xFF00_FF00_0000_0000L) != 0L) { continue; } Long d = temp.data; Long s = Puzzle.symmetry(temp.data); if (map2.containsKey(d) || map2.containsKey(s)) { continue; } hs2.put(temp, d); map2.put(d, pd); if (map.containsKey(d) || map.containsKey(s)) { Deque<Long> list = new ArrayDeque<>(i + 5); for (;;) { Long t = map2.get(d); if (t.longValue() == 0L) { break; } list.addLast(Puzzle.moveNumber(t, d)); d = t; } while (!list.isEmpty()) { System.out.println(list.removeLast()); } problem = temp; solve1(); return true; } } } hs3 = hs1; hs1 = hs2; hs2 = hs3; } return false; } } final class Puzzle { private static final long[] symmetry_table = {0L, 1L, 5L, 9L, 13L, 2L, 6L, 10L, 14L, 3L, 7L, 11L, 15L, 4L, 8L, 12L}; public static long symmetry(long data) { for (int i = 60; i >= 0; i -= 4) { data = (data & ~(0xFL << i)) | (symmetry_table[(int)((data >> i) & 0xFL)] << i); } return (0xF000_0F00_00F0_000FL & data) | ((0x0F00_00F0_000F_0000L & data) >> 12) | ((0x0000_F000_0F00_00F0L & data) << 12) | ((0x00F0_000F_0000_0000L & data) >> 24) | ((0x0000_0000_F000_0F00L & data) << 24) | ((0x000F_0000_0000_0000L & data) >> 36) | ((0x0000_0000_0000_F000L & data) << 36); } public static long symmetryNumber(long n) { return symmetry_table[(int)(n & 0xFL)]; } public static long set(long data, int index, long n) { int k = 60 - (index & 0xF) * 4; return (data & ~(0xFL << k)) | ((n & 0xFL) << k); } public static long moveNumber(long data1, long data2) { long x = data1 ^ data2; for (int i = 0; i < 16; i++) { if ((x & 0xF) != 0L) { return x & 0xF; } x >>= 4; } return 0L; } public final long data, z; final int hash; public Puzzle() { long data = 0L; for (long i = 1L; i < 16L; i++) { data |= i << (64L - (i << 2)); } this.data = data; this.z = 0xFL; hash = (int)(((data >> 32) ^ data) & 0xFFFF_FFFF); } public Puzzle(long[] n) { long data = 0L; int zi = 0; for (int i = 0; i < 16; i++) { if (n[i] == 0) { zi = i; } else { data |= (n[i] & 0xFL) << (60L - (i << 2)); } } this.data = data; this.z = 0xFL << (60L - (zi << 2)); hash = (int)(((data >> 32) ^ data) & 0xFFFF_FFFF); } public Puzzle(long data) { long z = 0xFL; while ((data & z) != 0L) { z <<= 4; if (z == 0xFL) { throw new IllegalArgumentException("data(" + data + ")"); } } this.data = data; this.z = z; hash = (int)(((data >> 32) ^ data) & 0xFFFF_FFFF); } public Puzzle(Puzzle puzzle) { this.data = puzzle.data; this.z = puzzle.z; hash = (int)(((data >> 32) ^ data) & 0xFFFF_FFFF); } private Puzzle(long data, long z) { this.data = data; this.z = z; hash = (int)(((data >> 32) ^ data) & 0xFFFF_FFFF); } public int getPos(long n) { for (int i = 0; i < 16; i++) { if (getNum(i) == n) { return i; } } return -1; } public long getNum(int index) { return (data >> (60 - (index << 2))) & 0xFL; } public Puzzle moveLeft() { if ((0xF000_F000_F000_F000L & z) == 0L) { return new Puzzle((data | ((data >> 4) & z)) & ~(z << 4), z << 4); } else { return this; } } public Puzzle moveRight() { if ((0xF00F_000F_000F_000FL & z) == 0L) { return new Puzzle((data | ((data << 4) & z)) & ~(z >> 4), z >> 4); } else if ((0x000F_000F_000F_000FL & z) == 0L) { return new Puzzle((data | ((data << 4) & z)) & 0xF0FF_FFFF_FFFF_FFFFL, 0x0F00_0000_0000_0000L); } else { return this; } } public Puzzle moveUp() { if ((0xFFFF_0000_0000_0000L & z) == 0L) { return new Puzzle((data | ((data >> 16) & z)) & ~(z << 16), z << 16); } else { return this; } } public Puzzle moveDown() { if ((0xF000_0000_0000_FFFFL & z) == 0L) { return new Puzzle((data | ((data << 16) & z)) & ~(z >> 16), z >> 16); } else if ((0x0000_0000_0000_FFFFL & z) == 0L) { return new Puzzle((data | ((data << 16) & z)) & 0xFFFF_0FFF_FFFF_FFFFL, 0x0000_F000_0000_0000L); } else { return this; } } public Puzzle move(int v) { switch (v & 3) { case 0: return moveLeft(); case 1: return moveUp(); case 2: return moveRight(); case 3: return moveDown(); default: return this; } } public @Override String toString() { StringBuilder sb = new StringBuilder(40); for (int j = 60; j >= 0; j -= 4) { long n = (data >> j) & 0xFL; if ((j & 0xF) == 12 && j < 60) { sb.append(System.lineSeparator()); } else { sb.append(' '); } if (n < 10L) { sb.append(' '); } if (n == 0L) { sb.append('*'); } else { sb.append(n); } } return sb.substring(1); } public @Override int hashCode() { return hash; } public @Override boolean equals(Object o) { //if (o == this) return true; if (o == null) return false; //if (!getClass().equals(o.getClass())) return false; return data == ((Puzzle)o).data; } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.chanven.lib.cptr.loadmore; import android.annotation.TargetApi; import android.content.Context; import android.database.DataSetObservable; import android.database.DataSetObserver; import android.os.Build; import android.util.AttributeSet; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.widget.*; import java.lang.reflect.Field; import java.util.ArrayList; /** * A {@link GridView} that supports adding header rows in a * very similar way to {@link android.widget.ListView}. * See {@link GridViewWithHeaderAndFooter#addHeaderView(View, Object, boolean)} * See {@link GridViewWithHeaderAndFooter#addFooterView(View, Object, boolean)} */ public class GridViewWithHeaderAndFooter extends GridView { public static boolean DEBUG = false; private OnItemClickListener mOnItemClickListener; private OnItemLongClickListener mOnItemLongClickListener; /** * A class that represents a fixed view in a list, for example a header at the top * or a footer at the bottom. */ private static class FixedViewInfo { /** * The view to add to the grid */ public View view; public ViewGroup viewContainer; /** * The data backing the view. This is returned from {@link ListAdapter#getItem(int)}. */ public Object data; /** * <code>true</code> if the fixed view should be selectable in the grid */ public boolean isSelectable; } private int mNumColumns = AUTO_FIT; private View mViewForMeasureRowHeight = null; private int mRowHeight = -1; //log tag can be at most 23 characters private static final String LOG_TAG = "GridViewHeaderAndFooter"; private ArrayList<FixedViewInfo> mHeaderViewInfos = new ArrayList<FixedViewInfo>(); private ArrayList<FixedViewInfo> mFooterViewInfos = new ArrayList<FixedViewInfo>(); private ListAdapter mOriginalAdapter; private ItemClickHandler mItemClickHandler; private void initHeaderGridView() { } public GridViewWithHeaderAndFooter(Context context) { super(context); initHeaderGridView(); } public GridViewWithHeaderAndFooter(Context context, AttributeSet attrs) { super(context, attrs); initHeaderGridView(); } public GridViewWithHeaderAndFooter(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); initHeaderGridView(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); ListAdapter adapter = getAdapter(); if (adapter != null && adapter instanceof HeaderViewGridAdapter) { ((HeaderViewGridAdapter) adapter).setNumColumns(getNumColumnsCompatible()); ((HeaderViewGridAdapter) adapter).setRowHeight(getRowHeight()); } } @Override public void setClipChildren(boolean clipChildren) { // Ignore, since the header rows depend on not being clipped } public void setClipChildrenSupper(boolean clipChildren) { super.setClipChildren(false); } public void addHeaderView(View v) { addHeaderView(v, null, true); } public void addHeaderView(View v, Object data, boolean isSelectable) { ListAdapter adapter = getAdapter(); if (adapter != null && !(adapter instanceof HeaderViewGridAdapter)) { throw new IllegalStateException( "Cannot add header view to grid -- setAdapter has already been called."); } ViewGroup.LayoutParams lyp = v.getLayoutParams(); FixedViewInfo info = new FixedViewInfo(); FrameLayout fl = new FullWidthFixedViewLayout(getContext()); if (lyp != null) { v.setLayoutParams(new FrameLayout.LayoutParams(lyp.width, lyp.height)); fl.setLayoutParams(new AbsListView.LayoutParams(lyp.width, lyp.height)); } fl.addView(v); info.view = v; info.viewContainer = fl; info.data = data; info.isSelectable = isSelectable; mHeaderViewInfos.add(info); // in the case of re-adding a header view, or adding one later on, // we need to notify the observer if (adapter != null) { ((HeaderViewGridAdapter) adapter).notifyDataSetChanged(); } } public void addFooterView(View v) { addFooterView(v, null, true); } public void addFooterView(View v, Object data, boolean isSelectable) { ListAdapter mAdapter = getAdapter(); if (mAdapter != null && !(mAdapter instanceof HeaderViewGridAdapter)) { // throw new IllegalStateException( // "Cannot add header view to grid -- setAdapter has already been called."); } ViewGroup.LayoutParams lyp = v.getLayoutParams(); FixedViewInfo info = new FixedViewInfo(); FrameLayout fl = new FullWidthFixedViewLayout(getContext()); if (lyp != null) { v.setLayoutParams(new FrameLayout.LayoutParams(lyp.width, lyp.height)); fl.setLayoutParams(new AbsListView.LayoutParams(lyp.width, lyp.height)); } if (null != v.getParent()) { ((ViewGroup)v.getParent()).removeView(v); } fl.addView(v); info.view = v; info.viewContainer = fl; info.data = data; info.isSelectable = isSelectable; mFooterViewInfos.add(info); if (mAdapter != null) { // ((HeaderViewGridAdapter) mAdapter).notifyDataSetChanged(); } } public int getHeaderViewCount() { return mHeaderViewInfos.size(); } public int getFooterViewCount() { return mFooterViewInfos.size(); } /** * Removes a previously-added header view. * * @param v The view to remove * @return true if the view was removed, false if the view was not a header * view */ public boolean removeHeaderView(View v) { if (mHeaderViewInfos.size() > 0) { boolean result = false; ListAdapter adapter = getAdapter(); if (adapter != null && ((HeaderViewGridAdapter) adapter).removeHeader(v)) { result = true; } removeFixedViewInfo(v, mHeaderViewInfos); return result; } return false; } /** * Removes a previously-added footer view. * * @param v The view to remove * @return true if the view was removed, false if the view was not a header * view */ public boolean removeFooterView(View v) { if (mFooterViewInfos.size() > 0) { boolean result = false; ListAdapter adapter = getAdapter(); if (adapter != null && ((HeaderViewGridAdapter) adapter).removeFooter(v)) { result = true; } // removeFixedViewInfo(v, mFooterViewInfos); return result; } return false; } private void removeFixedViewInfo(View v, ArrayList<FixedViewInfo> where) { int len = where.size(); for (int i = 0; i < len; ++i) { FixedViewInfo info = where.get(i); if (info.view == v) { where.remove(i); break; } } } @TargetApi(11) private int getNumColumnsCompatible() { if (Build.VERSION.SDK_INT >= 11) { return super.getNumColumns(); } else { try { Field numColumns = GridView.class.getDeclaredField("mNumColumns"); numColumns.setAccessible(true); return numColumns.getInt(this); } catch (Exception e) { if (mNumColumns != -1) { return mNumColumns; } throw new RuntimeException("Can not determine the mNumColumns for this API platform, please call setNumColumns to set it."); } } } @TargetApi(16) private int getColumnWidthCompatible() { if (Build.VERSION.SDK_INT >= 16) { return super.getColumnWidth(); } else { try { Field numColumns = GridView.class.getDeclaredField("mColumnWidth"); numColumns.setAccessible(true); return numColumns.getInt(this); } catch (NoSuchFieldException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); mViewForMeasureRowHeight = null; } public void invalidateRowHeight() { mRowHeight = -1; } public int getHeaderHeight(int row) { if (row >= 0) { return mHeaderViewInfos.get(row).view.getMeasuredHeight(); } return 0; } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) public int getVerticalSpacing() { int value = 0; try { int currentapiVersion = android.os.Build.VERSION.SDK_INT; if (currentapiVersion < Build.VERSION_CODES.JELLY_BEAN) { Field field = GridView.class.getDeclaredField("mVerticalSpacing"); field.setAccessible(true); value = field.getInt(this); } else { value = super.getVerticalSpacing(); } } catch (Exception ignore) { } return value; } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) public int getHorizontalSpacing() { int value = 0; try { int currentapiVersion = android.os.Build.VERSION.SDK_INT; if (currentapiVersion < Build.VERSION_CODES.JELLY_BEAN) { Field field = GridView.class.getDeclaredField("mHorizontalSpacing"); field.setAccessible(true); value = field.getInt(this); } else { value = super.getHorizontalSpacing(); } } catch (Exception ignore) { } return value; } public int getRowHeight() { if (mRowHeight > 0) { return mRowHeight; } ListAdapter adapter = getAdapter(); int numColumns = getNumColumnsCompatible(); // adapter has not been set or has no views in it; if (adapter == null || adapter.getCount() <= numColumns * (mHeaderViewInfos.size() + mFooterViewInfos.size())) { return -1; } int mColumnWidth = getColumnWidthCompatible(); View view = getAdapter().getView(numColumns * mHeaderViewInfos.size(), mViewForMeasureRowHeight, this); AbsListView.LayoutParams p = (AbsListView.LayoutParams) view.getLayoutParams(); if (p == null) { p = new AbsListView.LayoutParams(-1, -2, 0); view.setLayoutParams(p); } int childHeightSpec = getChildMeasureSpec( MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED), 0, p.height); int childWidthSpec = getChildMeasureSpec( MeasureSpec.makeMeasureSpec(mColumnWidth, MeasureSpec.EXACTLY), 0, p.width); view.measure(childWidthSpec, childHeightSpec); mViewForMeasureRowHeight = view; mRowHeight = view.getMeasuredHeight(); return mRowHeight; } @TargetApi(11) public void tryToScrollToBottomSmoothly() { int lastPos = getAdapter().getCount() - 1; if (Build.VERSION.SDK_INT >= 11) { smoothScrollToPositionFromTop(lastPos, 0); } else { setSelection(lastPos); } } @TargetApi(11) public void tryToScrollToBottomSmoothly(int duration) { int lastPos = getAdapter().getCount() - 1; if (Build.VERSION.SDK_INT >= 11) { smoothScrollToPositionFromTop(lastPos, 0, duration); } else { setSelection(lastPos); } } @Override public void setAdapter(ListAdapter adapter) { mOriginalAdapter = adapter; if (mHeaderViewInfos.size() > 0 || mFooterViewInfos.size() > 0) { HeaderViewGridAdapter headerViewGridAdapter = new HeaderViewGridAdapter(mHeaderViewInfos, mFooterViewInfos, adapter); int numColumns = getNumColumnsCompatible(); if (numColumns > 1) { headerViewGridAdapter.setNumColumns(numColumns); } headerViewGridAdapter.setRowHeight(getRowHeight()); super.setAdapter(headerViewGridAdapter); } else { super.setAdapter(adapter); } } public ListAdapter getOriginalAdapter() { return mOriginalAdapter; } /** * full width */ private class FullWidthFixedViewLayout extends FrameLayout { public FullWidthFixedViewLayout(Context context) { super(context); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { int realLeft = GridViewWithHeaderAndFooter.this.getPaddingLeft() + getPaddingLeft(); // Try to make where it should be, from left, full width if (realLeft != left) { offsetLeftAndRight(realLeft - left); } super.onLayout(changed, left, top, right, bottom); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int targetWidth = GridViewWithHeaderAndFooter.this.getMeasuredWidth() - GridViewWithHeaderAndFooter.this.getPaddingLeft() - GridViewWithHeaderAndFooter.this.getPaddingRight(); widthMeasureSpec = MeasureSpec.makeMeasureSpec(targetWidth, MeasureSpec.getMode(widthMeasureSpec)); super.onMeasure(widthMeasureSpec, heightMeasureSpec); } } @Override public void setNumColumns(int numColumns) { super.setNumColumns(numColumns); mNumColumns = numColumns; ListAdapter adapter = getAdapter(); if (adapter != null && adapter instanceof HeaderViewGridAdapter) { ((HeaderViewGridAdapter) adapter).setNumColumns(numColumns); } } /** * ListAdapter used when a HeaderGridView has header views. This ListAdapter * wraps another one and also keeps track of the header views and their * associated data objects. * <p>This is intended as a base class; you will probably not need to * use this class directly in your own code. */ private static class HeaderViewGridAdapter implements WrapperListAdapter, Filterable { // This is used to notify the container of updates relating to number of columns // or headers changing, which changes the number of placeholders needed private final DataSetObservable mDataSetObservable = new DataSetObservable(); private final ListAdapter mAdapter; static final ArrayList<FixedViewInfo> EMPTY_INFO_LIST = new ArrayList<FixedViewInfo>(); // This ArrayList is assumed to NOT be null. ArrayList<FixedViewInfo> mHeaderViewInfos; ArrayList<FixedViewInfo> mFooterViewInfos; private int mNumColumns = 1; private int mRowHeight = -1; boolean mAreAllFixedViewsSelectable; private final boolean mIsFilterable; private boolean mCachePlaceHoldView = true; // From Recycle Bin or calling getView, this a question... private boolean mCacheFirstHeaderView = false; public HeaderViewGridAdapter(ArrayList<FixedViewInfo> headerViewInfos, ArrayList<FixedViewInfo> footViewInfos, ListAdapter adapter) { mAdapter = adapter; mIsFilterable = adapter instanceof Filterable; if (headerViewInfos == null) { mHeaderViewInfos = EMPTY_INFO_LIST; } else { mHeaderViewInfos = headerViewInfos; } if (footViewInfos == null) { mFooterViewInfos = EMPTY_INFO_LIST; } else { mFooterViewInfos = footViewInfos; } mAreAllFixedViewsSelectable = areAllListInfosSelectable(mHeaderViewInfos) && areAllListInfosSelectable(mFooterViewInfos); } public void setNumColumns(int numColumns) { if (numColumns < 1) { return; } if (mNumColumns != numColumns) { mNumColumns = numColumns; notifyDataSetChanged(); } } public void setRowHeight(int height) { mRowHeight = height; } public int getHeadersCount() { return mHeaderViewInfos.size(); } public int getFootersCount() { return mFooterViewInfos.size(); } /** * @return true if this adapter doesn't contain any data. This is used to determine * whether the empty view should be displayed. A typical implementation will return * getCount() == 0 but since getCount() includes the headers and footers, specialized * adapters might want a different behavior. */ @Override public boolean isEmpty() { return (mAdapter == null || mAdapter.isEmpty()); } private boolean areAllListInfosSelectable(ArrayList<FixedViewInfo> infos) { if (infos != null) { for (FixedViewInfo info : infos) { if (!info.isSelectable) { return false; } } } return true; } public boolean removeHeader(View v) { for (int i = 0; i < mHeaderViewInfos.size(); i++) { FixedViewInfo info = mHeaderViewInfos.get(i); if (info.view == v) { mHeaderViewInfos.remove(i); mAreAllFixedViewsSelectable = areAllListInfosSelectable(mHeaderViewInfos) && areAllListInfosSelectable(mFooterViewInfos); mDataSetObservable.notifyChanged(); return true; } } return false; } public boolean removeFooter(View v) { for (int i = 0; i < mFooterViewInfos.size(); i++) { FixedViewInfo info = mFooterViewInfos.get(i); if (info.view == v) { mFooterViewInfos.remove(i); mAreAllFixedViewsSelectable = areAllListInfosSelectable(mHeaderViewInfos) && areAllListInfosSelectable(mFooterViewInfos); mDataSetObservable.notifyChanged(); return true; } } return false; } @Override public int getCount() { if (mAdapter != null) { return (getFootersCount() + getHeadersCount()) * mNumColumns + getAdapterAndPlaceHolderCount(); } else { return (getFootersCount() + getHeadersCount()) * mNumColumns; } } @Override public boolean areAllItemsEnabled() { return mAdapter == null || mAreAllFixedViewsSelectable && mAdapter.areAllItemsEnabled(); } private int getAdapterAndPlaceHolderCount() { return (int) (Math.ceil(1f * mAdapter.getCount() / mNumColumns) * mNumColumns); } @Override public boolean isEnabled(int position) { // Header (negative positions will throw an IndexOutOfBoundsException) int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns; if (position < numHeadersAndPlaceholders) { return position % mNumColumns == 0 && mHeaderViewInfos.get(position / mNumColumns).isSelectable; } // Adapter final int adjPosition = position - numHeadersAndPlaceholders; int adapterCount = 0; if (mAdapter != null) { adapterCount = getAdapterAndPlaceHolderCount(); if (adjPosition < adapterCount) { return adjPosition < mAdapter.getCount() && mAdapter.isEnabled(adjPosition); } } // Footer (off-limits positions will throw an IndexOutOfBoundsException) final int footerPosition = adjPosition - adapterCount; return footerPosition % mNumColumns == 0 && mFooterViewInfos.get(footerPosition / mNumColumns).isSelectable; } @Override public Object getItem(int position) { // Header (negative positions will throw an ArrayIndexOutOfBoundsException) int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns; if (position < numHeadersAndPlaceholders) { if (position % mNumColumns == 0) { return mHeaderViewInfos.get(position / mNumColumns).data; } return null; } // Adapter final int adjPosition = position - numHeadersAndPlaceholders; int adapterCount = 0; if (mAdapter != null) { adapterCount = getAdapterAndPlaceHolderCount(); if (adjPosition < adapterCount) { if (adjPosition < mAdapter.getCount()) { return mAdapter.getItem(adjPosition); } else { return null; } } } // Footer (off-limits positions will throw an IndexOutOfBoundsException) final int footerPosition = adjPosition - adapterCount; if (footerPosition % mNumColumns == 0) { return mFooterViewInfos.get(footerPosition).data; } else { return null; } } @Override public long getItemId(int position) { int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns; if (mAdapter != null && position >= numHeadersAndPlaceholders) { int adjPosition = position - numHeadersAndPlaceholders; int adapterCount = mAdapter.getCount(); if (adjPosition < adapterCount) { return mAdapter.getItemId(adjPosition); } } return -1; } @Override public boolean hasStableIds() { return mAdapter != null && mAdapter.hasStableIds(); } @Override public View getView(int position, View convertView, ViewGroup parent) { if (DEBUG) { Log.d(LOG_TAG, String.format("getView: %s, reused: %s", position, convertView == null)); } // Header (negative positions will throw an ArrayIndexOutOfBoundsException) int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns; if (position < numHeadersAndPlaceholders) { View headerViewContainer = mHeaderViewInfos .get(position / mNumColumns).viewContainer; if (position % mNumColumns == 0) { return headerViewContainer; } else { if (convertView == null) { convertView = new View(parent.getContext()); } // We need to do this because GridView uses the height of the last item // in a row to determine the height for the entire row. convertView.setVisibility(View.INVISIBLE); convertView.setMinimumHeight(headerViewContainer.getHeight()); return convertView; } } // Adapter final int adjPosition = position - numHeadersAndPlaceholders; int adapterCount = 0; if (mAdapter != null) { adapterCount = getAdapterAndPlaceHolderCount(); if (adjPosition < adapterCount) { if (adjPosition < mAdapter.getCount()) { return mAdapter.getView(adjPosition, convertView, parent); } else { if (convertView == null) { convertView = new View(parent.getContext()); } convertView.setVisibility(View.INVISIBLE); convertView.setMinimumHeight(mRowHeight); return convertView; } } } // Footer final int footerPosition = adjPosition - adapterCount; if (footerPosition < getCount()) { View footViewContainer = mFooterViewInfos .get(footerPosition / mNumColumns).viewContainer; if (position % mNumColumns == 0) { return footViewContainer; } else { if (convertView == null) { convertView = new View(parent.getContext()); } // We need to do this because GridView uses the height of the last item // in a row to determine the height for the entire row. convertView.setVisibility(View.INVISIBLE); convertView.setMinimumHeight(footViewContainer.getHeight()); return convertView; } } throw new ArrayIndexOutOfBoundsException(position); } @Override public int getItemViewType(int position) { final int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns; final int adapterViewTypeStart = mAdapter == null ? 0 : mAdapter.getViewTypeCount() - 1; int type = AdapterView.ITEM_VIEW_TYPE_HEADER_OR_FOOTER; if (mCachePlaceHoldView) { // Header if (position < numHeadersAndPlaceholders) { if (position == 0) { if (mCacheFirstHeaderView) { type = adapterViewTypeStart + mHeaderViewInfos.size() + mFooterViewInfos.size() + 1 + 1; } } if (position % mNumColumns != 0) { type = adapterViewTypeStart + (position / mNumColumns + 1); } } } // Adapter final int adjPosition = position - numHeadersAndPlaceholders; int adapterCount = 0; if (mAdapter != null) { adapterCount = getAdapterAndPlaceHolderCount(); if (adjPosition >= 0 && adjPosition < adapterCount) { if (adjPosition < mAdapter.getCount()) { type = mAdapter.getItemViewType(adjPosition); } else { if (mCachePlaceHoldView) { type = adapterViewTypeStart + mHeaderViewInfos.size() + 1; } } } } if (mCachePlaceHoldView) { // Footer final int footerPosition = adjPosition - adapterCount; if (footerPosition >= 0 && footerPosition < getCount() && (footerPosition % mNumColumns) != 0) { type = adapterViewTypeStart + mHeaderViewInfos.size() + 1 + (footerPosition / mNumColumns + 1); } } if (DEBUG) { Log.d(LOG_TAG, String.format("getItemViewType: pos: %s, result: %s", position, type, mCachePlaceHoldView, mCacheFirstHeaderView)); } return type; } /** * content view, content view holder, header[0], header and footer placeholder(s) * */ @Override public int getViewTypeCount() { int count = mAdapter == null ? 1 : mAdapter.getViewTypeCount(); if (mCachePlaceHoldView) { int offset = mHeaderViewInfos.size() + 1 + mFooterViewInfos.size(); if (mCacheFirstHeaderView) { offset += 1; } count += offset; } if (DEBUG) { Log.d(LOG_TAG, String.format("getViewTypeCount: %s", count)); } return count; } @Override public void registerDataSetObserver(DataSetObserver observer) { mDataSetObservable.registerObserver(observer); if (mAdapter != null) { mAdapter.registerDataSetObserver(observer); } } @Override public void unregisterDataSetObserver(DataSetObserver observer) { mDataSetObservable.unregisterObserver(observer); if (mAdapter != null) { mAdapter.unregisterDataSetObserver(observer); } } @Override public Filter getFilter() { if (mIsFilterable) { return ((Filterable) mAdapter).getFilter(); } return null; } @Override public ListAdapter getWrappedAdapter() { return mAdapter; } public void notifyDataSetChanged() { mDataSetObservable.notifyChanged(); } } @Override public void setOnItemClickListener(OnItemClickListener l) { mOnItemClickListener = l; super.setOnItemClickListener(getItemClickHandler()); } @Override public void setOnItemLongClickListener(OnItemLongClickListener listener) { mOnItemLongClickListener = listener; super.setOnItemLongClickListener(getItemClickHandler()); } private ItemClickHandler getItemClickHandler() { if (mItemClickHandler == null) { mItemClickHandler = new ItemClickHandler(); } return mItemClickHandler; } private class ItemClickHandler implements android.widget.AdapterView.OnItemClickListener, AdapterView.OnItemLongClickListener { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { if (mOnItemClickListener != null) { int resPos = position - getHeaderViewCount() * getNumColumnsCompatible(); if (resPos >= 0) { mOnItemClickListener.onItemClick(parent, view, resPos, id); } } } @Override public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) { if (mOnItemLongClickListener != null) { int resPos = position - getHeaderViewCount() * getNumColumnsCompatible(); if (resPos >= 0) { mOnItemLongClickListener.onItemLongClick(parent, view, resPos, id); } } return true; } } }
package ssh; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStreamWriter; public class WriteFile { public static void main(String[] args) throws Exception { File file = new File("/home/zhangjin/share/nohup1.out"); if(!file.exists()){ file.createNewFile(); } BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file))); int i = 0; while(true){ i++; Thread.sleep(5*1000); bw.write("line "+i); bw.newLine(); bw.flush(); // System.out.println("write line "+i); } } }
/* * Copyright (c) 2004, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.jvmstat.monitor; import sun.jvmstat.monitor.*; /** * Interface to support asynchronous polling of the exported * instrumentation of a target Java Virtual Machine. * * @author Brian Doherty * @since 1.5 */ public interface BufferedMonitoredVm extends MonitoredVm { /** * Interface to get the bytes associated with the instrumentation * for the target Java Virtual Machine. * * @return byte[] - a byte array containing the current bytes * for the instrumentation exported by the * target Java Virtual Machine. */ byte[] getBytes(); /** * Interface to get the size of the instrumentation buffer * for the target Java Virtual Machine. * * @return int - the size of the instrumentation buffer for the * target Java Virtual Machine. */ int getCapacity(); }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.common.impl; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import javax.naming.InitialContext; import javax.sql.DataSource; import org.apache.ibatis.builder.xml.XMLConfigBuilder; import org.apache.ibatis.builder.xml.XMLMapperBuilder; import org.apache.ibatis.datasource.pooled.PooledDataSource; import org.apache.ibatis.mapping.Environment; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.session.SqlSessionFactory; import org.apache.ibatis.session.defaults.DefaultSqlSessionFactory; import org.apache.ibatis.transaction.TransactionFactory; import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory; import org.apache.ibatis.transaction.managed.ManagedTransactionFactory; import org.flowable.engine.common.api.FlowableException; import org.flowable.engine.common.api.delegate.event.FlowableEventDispatcher; import org.flowable.engine.common.api.delegate.event.FlowableEventListener; import org.flowable.engine.common.impl.cfg.CommandExecutorImpl; import org.flowable.engine.common.impl.cfg.IdGenerator; import org.flowable.engine.common.impl.cfg.TransactionContextFactory; import org.flowable.engine.common.impl.cfg.standalone.StandaloneMybatisTransactionContextFactory; import org.flowable.engine.common.impl.db.CommonDbSchemaManager; import org.flowable.engine.common.impl.db.DbSchemaManager; import org.flowable.engine.common.impl.db.DbSqlSessionFactory; import org.flowable.engine.common.impl.db.LogSqlExecutionTimePlugin; import org.flowable.engine.common.impl.db.MybatisTypeAliasConfigurator; import org.flowable.engine.common.impl.db.MybatisTypeHandlerConfigurator; import org.flowable.engine.common.impl.event.EventDispatchAction; import org.flowable.engine.common.impl.interceptor.CommandConfig; import org.flowable.engine.common.impl.interceptor.CommandContextFactory; import org.flowable.engine.common.impl.interceptor.CommandContextInterceptor; import org.flowable.engine.common.impl.interceptor.CommandExecutor; import org.flowable.engine.common.impl.interceptor.CommandInterceptor; import org.flowable.engine.common.impl.interceptor.DefaultCommandInvoker; import org.flowable.engine.common.impl.interceptor.LogInterceptor; import org.flowable.engine.common.impl.interceptor.SessionFactory; import org.flowable.engine.common.impl.interceptor.TransactionContextInterceptor; import org.flowable.engine.common.impl.persistence.GenericManagerFactory; import org.flowable.engine.common.impl.persistence.StrongUuidGenerator; import org.flowable.engine.common.impl.persistence.cache.EntityCache; import org.flowable.engine.common.impl.persistence.cache.EntityCacheImpl; import org.flowable.engine.common.impl.persistence.entity.Entity; import org.flowable.engine.common.impl.runtime.Clock; import org.flowable.engine.common.impl.service.CommonEngineServiceImpl; import org.flowable.engine.common.impl.util.DefaultClockImpl; import org.flowable.engine.common.impl.util.IoUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class AbstractEngineConfiguration { protected static final Logger LOGGER = LoggerFactory.getLogger(AbstractEngineConfiguration.class); /** The tenant id indicating 'no tenant' */ public static final String NO_TENANT_ID = ""; /** * Checks the version of the DB schema against the library when the form engine is being created and throws an exception if the versions don't match. */ public static final String DB_SCHEMA_UPDATE_FALSE = "false"; public static final String DB_SCHEMA_UPDATE_CREATE = "create"; public static final String DB_SCHEMA_UPDATE_CREATE_DROP = "create-drop"; /** * Creates the schema when the form engine is being created and drops the schema when the form engine is being closed. */ public static final String DB_SCHEMA_UPDATE_DROP_CREATE = "drop-create"; /** * Upon building of the process engine, a check is performed and an update of the schema is performed if it is necessary. */ public static final String DB_SCHEMA_UPDATE_TRUE = "true"; protected String databaseType; protected String jdbcDriver = "org.h2.Driver"; protected String jdbcUrl = "jdbc:h2:tcp://localhost/~/flowable"; protected String jdbcUsername = "sa"; protected String jdbcPassword = ""; protected String dataSourceJndiName; protected int jdbcMaxActiveConnections; protected int jdbcMaxIdleConnections; protected int jdbcMaxCheckoutTime; protected int jdbcMaxWaitTime; protected boolean jdbcPingEnabled; protected String jdbcPingQuery; protected int jdbcPingConnectionNotUsedFor; protected int jdbcDefaultTransactionIsolationLevel; protected DataSource dataSource; protected DbSchemaManager commonDbSchemaManager; protected DbSchemaManager dbSchemaManager; protected String databaseSchemaUpdate = DB_SCHEMA_UPDATE_FALSE; protected String xmlEncoding = "UTF-8"; // COMMAND EXECUTORS /////////////////////////////////////////////// protected CommandExecutor commandExecutor; protected Collection<? extends CommandInterceptor> defaultCommandInterceptors; protected CommandConfig defaultCommandConfig; protected CommandConfig schemaCommandConfig; protected CommandContextFactory commandContextFactory; protected CommandInterceptor commandInvoker; protected List<CommandInterceptor> customPreCommandInterceptors; protected List<CommandInterceptor> customPostCommandInterceptors; protected List<CommandInterceptor> commandInterceptors; protected Map<String, AbstractEngineConfiguration> engineConfigurations = new HashMap<>(); protected Map<String, AbstractServiceConfiguration> serviceConfigurations = new HashMap<>(); protected ClassLoader classLoader; /** * Either use Class.forName or ClassLoader.loadClass for class loading. See http://forums.activiti.org/content/reflectutilloadclass-and-custom- classloader */ protected boolean useClassForNameClassLoading = true; // MYBATIS SQL SESSION FACTORY ///////////////////////////////////// protected boolean isDbHistoryUsed = true; protected DbSqlSessionFactory dbSqlSessionFactory; protected SqlSessionFactory sqlSessionFactory; protected TransactionFactory transactionFactory; protected TransactionContextFactory transactionContextFactory; /** * If set to true, enables bulk insert (grouping sql inserts together). Default true. * For some databases (eg DB2+z/OS) needs to be set to false. */ protected boolean isBulkInsertEnabled = true; /** * Some databases have a limit of how many parameters one sql insert can have (eg SQL Server, 2000 params (!= insert statements) ). Tweak this parameter in case of exceptions indicating too much * is being put into one bulk insert, or make it higher if your database can cope with it and there are inserts with a huge amount of data. * <p> * By default: 100 (75 for mssql server as it has a hard limit of 2000 parameters in a statement) */ protected int maxNrOfStatementsInBulkInsert = 100; public int DEFAULT_MAX_NR_OF_STATEMENTS_BULK_INSERT_SQL_SERVER = 60; // currently Execution has most params (31). 2000 / 31 = 64. protected Set<Class<?>> customMybatisMappers; protected Set<String> customMybatisXMLMappers; protected Set<String> dependentEngineMyBatisXmlMappers; protected List<MybatisTypeAliasConfigurator> dependentEngineMybatisTypeAliasConfigs; protected List<MybatisTypeHandlerConfigurator> dependentEngineMybatisTypeHandlerConfigs; // SESSION FACTORIES /////////////////////////////////////////////// protected List<SessionFactory> customSessionFactories; protected Map<Class<?>, SessionFactory> sessionFactories; protected boolean enableEventDispatcher = true; protected FlowableEventDispatcher eventDispatcher; protected List<FlowableEventListener> eventListeners; protected Map<String, List<FlowableEventListener>> typedEventListeners; protected List<EventDispatchAction> additionalEventDispatchActions; protected boolean transactionsExternallyManaged; /** * Flag that can be set to configure or not a relational database is used. This is useful for custom implementations that do not use relational databases * at all. * * If true (default), the {@link AbstractEngineConfiguration#getDatabaseSchemaUpdate()} value will be used to determine what needs to happen wrt the database schema. * * If false, no validation or schema creation will be done. That means that the database schema must have been created 'manually' before but the engine does not validate whether the schema is * correct. The {@link AbstractEngineConfiguration#getDatabaseSchemaUpdate()} value will not be used. */ protected boolean usingRelationalDatabase = true; /** * Allows configuring a database table prefix which is used for all runtime operations of the process engine. For example, if you specify a prefix named 'PRE1.', Flowable will query for executions * in a table named 'PRE1.ACT_RU_EXECUTION_'. * * <p /> * <strong>NOTE: the prefix is not respected by automatic database schema management. If you use {@link AbstractEngineConfiguration#DB_SCHEMA_UPDATE_CREATE_DROP} or * {@link AbstractEngineConfiguration#DB_SCHEMA_UPDATE_TRUE}, Flowable will create the database tables using the default names, regardless of the prefix configured here.</strong> */ protected String databaseTablePrefix = ""; /** * Escape character for doing wildcard searches. * * This will be added at then end of queries that include for example a LIKE clause. For example: SELECT * FROM table WHERE column LIKE '%\%%' ESCAPE '\'; */ protected String databaseWildcardEscapeCharacter; /** * database catalog to use */ protected String databaseCatalog = ""; /** * In some situations you want to set the schema to use for table checks / generation if the database metadata doesn't return that correctly, see https://jira.codehaus.org/browse/ACT-1220, * https://jira.codehaus.org/browse/ACT-1062 */ protected String databaseSchema; /** * Set to true in case the defined databaseTablePrefix is a schema-name, instead of an actual table name prefix. This is relevant for checking if Flowable-tables exist, the databaseTablePrefix * will not be used here - since the schema is taken into account already, adding a prefix for the table-check will result in wrong table-names. */ protected boolean tablePrefixIsSchema; /** * Enables the MyBatis plugin that logs the execution time of sql statements. */ protected boolean enableLogSqlExecutionTime; protected Properties databaseTypeMappings = getDefaultDatabaseTypeMappings(); protected List<EngineDeployer> customPreDeployers; protected List<EngineDeployer> customPostDeployers; protected List<EngineDeployer> deployers; public static final String DATABASE_TYPE_H2 = "h2"; public static final String DATABASE_TYPE_HSQL = "hsql"; public static final String DATABASE_TYPE_MYSQL = "mysql"; public static final String DATABASE_TYPE_ORACLE = "oracle"; public static final String DATABASE_TYPE_POSTGRES = "postgres"; public static final String DATABASE_TYPE_MSSQL = "mssql"; public static final String DATABASE_TYPE_DB2 = "db2"; public static Properties getDefaultDatabaseTypeMappings() { Properties databaseTypeMappings = new Properties(); databaseTypeMappings.setProperty("H2", DATABASE_TYPE_H2); databaseTypeMappings.setProperty("HSQL Database Engine", DATABASE_TYPE_HSQL); databaseTypeMappings.setProperty("MySQL", DATABASE_TYPE_MYSQL); databaseTypeMappings.setProperty("Oracle", DATABASE_TYPE_ORACLE); databaseTypeMappings.setProperty("PostgreSQL", DATABASE_TYPE_POSTGRES); databaseTypeMappings.setProperty("Microsoft SQL Server", DATABASE_TYPE_MSSQL); databaseTypeMappings.setProperty(DATABASE_TYPE_DB2, DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/NT", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/NT64", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2 UDP", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/LINUX", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/LINUX390", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/LINUXX8664", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/LINUXZ64", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/LINUXPPC64", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/400 SQL", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/6000", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2 UDB iSeries", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/AIX64", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/HPUX", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/HP64", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/SUN", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/SUN64", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/PTX", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2/2", DATABASE_TYPE_DB2); databaseTypeMappings.setProperty("DB2 UDB AS400", DATABASE_TYPE_DB2); return databaseTypeMappings; } protected Map<Object, Object> beans; protected IdGenerator idGenerator; protected Clock clock; // Variables public static final int DEFAULT_GENERIC_MAX_LENGTH_STRING = 4000; public static final int DEFAULT_ORACLE_MAX_LENGTH_STRING = 2000; /** * Define a max length for storing String variable types in the database. Mainly used for the Oracle NVARCHAR2 limit of 2000 characters */ protected int maxLengthStringVariableType = -1; // DataSource // /////////////////////////////////////////////////////////////// protected void initDataSource() { if (dataSource == null) { if (dataSourceJndiName != null) { try { dataSource = (DataSource) new InitialContext().lookup(dataSourceJndiName); } catch (Exception e) { throw new FlowableException("couldn't lookup datasource from " + dataSourceJndiName + ": " + e.getMessage(), e); } } else if (jdbcUrl != null) { if ((jdbcDriver == null) || (jdbcUsername == null)) { throw new FlowableException("DataSource or JDBC properties have to be specified in a process engine configuration"); } LOGGER.debug("initializing datasource to db: {}", jdbcUrl); if (LOGGER.isInfoEnabled()) { LOGGER.info("Configuring Datasource with following properties (omitted password for security)"); LOGGER.info("datasource driver : {}", jdbcDriver); LOGGER.info("datasource url : {}", jdbcUrl); LOGGER.info("datasource user name : {}", jdbcUsername); } PooledDataSource pooledDataSource = new PooledDataSource(this.getClass().getClassLoader(), jdbcDriver, jdbcUrl, jdbcUsername, jdbcPassword); if (jdbcMaxActiveConnections > 0) { pooledDataSource.setPoolMaximumActiveConnections(jdbcMaxActiveConnections); } if (jdbcMaxIdleConnections > 0) { pooledDataSource.setPoolMaximumIdleConnections(jdbcMaxIdleConnections); } if (jdbcMaxCheckoutTime > 0) { pooledDataSource.setPoolMaximumCheckoutTime(jdbcMaxCheckoutTime); } if (jdbcMaxWaitTime > 0) { pooledDataSource.setPoolTimeToWait(jdbcMaxWaitTime); } if (jdbcPingEnabled) { pooledDataSource.setPoolPingEnabled(true); if (jdbcPingQuery != null) { pooledDataSource.setPoolPingQuery(jdbcPingQuery); } pooledDataSource.setPoolPingConnectionsNotUsedFor(jdbcPingConnectionNotUsedFor); } if (jdbcDefaultTransactionIsolationLevel > 0) { pooledDataSource.setDefaultTransactionIsolationLevel(jdbcDefaultTransactionIsolationLevel); } dataSource = pooledDataSource; } if (dataSource instanceof PooledDataSource) { // ACT-233: connection pool of Ibatis is not properly // initialized if this is not called! ((PooledDataSource) dataSource).forceCloseAll(); } } if (databaseType == null) { initDatabaseType(); } } public void initDatabaseType() { Connection connection = null; try { connection = dataSource.getConnection(); DatabaseMetaData databaseMetaData = connection.getMetaData(); String databaseProductName = databaseMetaData.getDatabaseProductName(); LOGGER.debug("database product name: '{}'", databaseProductName); databaseType = databaseTypeMappings.getProperty(databaseProductName); if (databaseType == null) { throw new FlowableException("couldn't deduct database type from database product name '" + databaseProductName + "'"); } LOGGER.debug("using database type: {}", databaseType); } catch (SQLException e) { LOGGER.error("Exception while initializing Database connection", e); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { LOGGER.error("Exception while closing the Database connection", e); } } // Special care for MSSQL, as it has a hard limit of 2000 params per statement (incl bulk statement). // Especially with executions, with 100 as default, this limit is passed. if (DATABASE_TYPE_MSSQL.equals(databaseType)) { maxNrOfStatementsInBulkInsert = DEFAULT_MAX_NR_OF_STATEMENTS_BULK_INSERT_SQL_SERVER; } } public void initDbSchemaManager() { if (this.commonDbSchemaManager == null) { this.commonDbSchemaManager = new CommonDbSchemaManager(); } } // session factories //////////////////////////////////////////////////////// public void addSessionFactory(SessionFactory sessionFactory) { sessionFactories.put(sessionFactory.getSessionType(), sessionFactory); } public void initCommandContextFactory() { if (commandContextFactory == null) { commandContextFactory = new CommandContextFactory(); } } public void initTransactionContextFactory() { if (transactionContextFactory == null) { transactionContextFactory = new StandaloneMybatisTransactionContextFactory(); } } public void initCommandExecutors() { initDefaultCommandConfig(); initSchemaCommandConfig(); initCommandInvoker(); initCommandInterceptors(); initCommandExecutor(); } public void initDefaultCommandConfig() { if (defaultCommandConfig == null) { defaultCommandConfig = new CommandConfig(); } } public void initSchemaCommandConfig() { if (schemaCommandConfig == null) { schemaCommandConfig = new CommandConfig(); } } public void initCommandInvoker() { if (commandInvoker == null) { commandInvoker = new DefaultCommandInvoker(); } } public void initCommandInterceptors() { if (commandInterceptors == null) { commandInterceptors = new ArrayList<>(); if (customPreCommandInterceptors != null) { commandInterceptors.addAll(customPreCommandInterceptors); } commandInterceptors.addAll(getDefaultCommandInterceptors()); if (customPostCommandInterceptors != null) { commandInterceptors.addAll(customPostCommandInterceptors); } commandInterceptors.add(commandInvoker); } } public Collection<? extends CommandInterceptor> getDefaultCommandInterceptors() { if (defaultCommandInterceptors == null) { List<CommandInterceptor> interceptors = new ArrayList<>(); interceptors.add(new LogInterceptor()); CommandInterceptor transactionInterceptor = createTransactionInterceptor(); if (transactionInterceptor != null) { interceptors.add(transactionInterceptor); } if (commandContextFactory != null) { String engineCfgKey = getEngineCfgKey(); CommandContextInterceptor commandContextInterceptor = new CommandContextInterceptor(commandContextFactory); engineConfigurations.put(engineCfgKey, this); commandContextInterceptor.setEngineConfigurations(engineConfigurations); commandContextInterceptor.setServiceConfigurations(serviceConfigurations); commandContextInterceptor.setCurrentEngineConfigurationKey(engineCfgKey); interceptors.add(commandContextInterceptor); } if (transactionContextFactory != null) { interceptors.add(new TransactionContextInterceptor(transactionContextFactory)); } List<CommandInterceptor> additionalCommandInterceptors = getAdditionalDefaultCommandInterceptors(); if (additionalCommandInterceptors != null) { interceptors.addAll(additionalCommandInterceptors); } defaultCommandInterceptors = interceptors; } return defaultCommandInterceptors; } public abstract String getEngineCfgKey(); public List<CommandInterceptor> getAdditionalDefaultCommandInterceptors() { return null; } public void initCommandExecutor() { if (commandExecutor == null) { CommandInterceptor first = initInterceptorChain(commandInterceptors); commandExecutor = new CommandExecutorImpl(getDefaultCommandConfig(), first); } } public CommandInterceptor initInterceptorChain(List<CommandInterceptor> chain) { if (chain == null || chain.isEmpty()) { throw new FlowableException("invalid command interceptor chain configuration: " + chain); } for (int i = 0; i < chain.size() - 1; i++) { chain.get(i).setNext(chain.get(i + 1)); } return chain.get(0); } public abstract CommandInterceptor createTransactionInterceptor(); public void initBeans() { if (beans == null) { beans = new HashMap<>(); } } // id generator // ///////////////////////////////////////////////////////////// public void initIdGenerator() { if (idGenerator == null) { idGenerator = new StrongUuidGenerator(); } } public void initClock() { if (clock == null) { clock = new DefaultClockImpl(); } } // services // ///////////////////////////////////////////////////////////////// protected void initService(Object service) { if (service instanceof CommonEngineServiceImpl) { ((CommonEngineServiceImpl) service).setCommandExecutor(commandExecutor); } } // myBatis SqlSessionFactory // //////////////////////////////////////////////// public void initSessionFactories() { if (sessionFactories == null) { sessionFactories = new HashMap<>(); if (usingRelationalDatabase) { initDbSqlSessionFactory(); } addSessionFactory(new GenericManagerFactory(EntityCache.class, EntityCacheImpl.class)); commandContextFactory.setSessionFactories(sessionFactories); } if (customSessionFactories != null) { for (SessionFactory sessionFactory : customSessionFactories) { addSessionFactory(sessionFactory); } } } public void initDbSqlSessionFactory() { if (dbSqlSessionFactory == null) { dbSqlSessionFactory = createDbSqlSessionFactory(); } dbSqlSessionFactory.setDatabaseType(databaseType); dbSqlSessionFactory.setSqlSessionFactory(sqlSessionFactory); dbSqlSessionFactory.setDbHistoryUsed(isDbHistoryUsed); dbSqlSessionFactory.setDatabaseTablePrefix(databaseTablePrefix); dbSqlSessionFactory.setTablePrefixIsSchema(tablePrefixIsSchema); dbSqlSessionFactory.setDatabaseCatalog(databaseCatalog); dbSqlSessionFactory.setDatabaseSchema(databaseSchema); dbSqlSessionFactory.setMaxNrOfStatementsInBulkInsert(maxNrOfStatementsInBulkInsert); initDbSqlSessionFactoryEntitySettings(); addSessionFactory(dbSqlSessionFactory); } public DbSqlSessionFactory createDbSqlSessionFactory() { return new DbSqlSessionFactory(); } protected abstract void initDbSqlSessionFactoryEntitySettings(); protected void defaultInitDbSqlSessionFactoryEntitySettings(List<Class<? extends Entity>> insertOrder, List<Class<? extends Entity>> deleteOrder) { for (Class<? extends Entity> clazz : insertOrder) { dbSqlSessionFactory.getInsertionOrder().add(clazz); if (isBulkInsertEnabled) { dbSqlSessionFactory.getBulkInserteableEntityClasses().add(clazz); } } for (Class<? extends Entity> clazz : deleteOrder) { dbSqlSessionFactory.getDeletionOrder().add(clazz); } } public void initTransactionFactory() { if (transactionFactory == null) { if (transactionsExternallyManaged) { transactionFactory = new ManagedTransactionFactory(); Properties properties = new Properties(); properties.put("closeConnection", "false"); this.transactionFactory.setProperties(properties); } else { transactionFactory = new JdbcTransactionFactory(); } } } public void initSqlSessionFactory() { if (sqlSessionFactory == null) { InputStream inputStream = null; try { inputStream = getMyBatisXmlConfigurationStream(); Environment environment = new Environment("default", transactionFactory, dataSource); Reader reader = new InputStreamReader(inputStream); Properties properties = new Properties(); properties.put("prefix", databaseTablePrefix); String wildcardEscapeClause = ""; if ((databaseWildcardEscapeCharacter != null) && (databaseWildcardEscapeCharacter.length() != 0)) { wildcardEscapeClause = " escape '" + databaseWildcardEscapeCharacter + "'"; } properties.put("wildcardEscapeClause", wildcardEscapeClause); // set default properties properties.put("limitBefore", ""); properties.put("limitAfter", ""); properties.put("limitBetween", ""); properties.put("limitOuterJoinBetween", ""); properties.put("limitBeforeNativeQuery", ""); properties.put("blobType", "BLOB"); properties.put("boolValue", "TRUE"); if (databaseType != null) { properties.load(getResourceAsStream(pathToEngineDbProperties())); } Configuration configuration = initMybatisConfiguration(environment, reader, properties); sqlSessionFactory = new DefaultSqlSessionFactory(configuration); } catch (Exception e) { throw new FlowableException("Error while building ibatis SqlSessionFactory: " + e.getMessage(), e); } finally { IoUtil.closeSilently(inputStream); } } } public String pathToEngineDbProperties() { return "org/flowable/common/db/properties/" + databaseType + ".properties"; } public Configuration initMybatisConfiguration(Environment environment, Reader reader, Properties properties) { XMLConfigBuilder parser = new XMLConfigBuilder(reader, "", properties); Configuration configuration = parser.getConfiguration(); if (databaseType != null) { configuration.setDatabaseId(databaseType); } configuration.setEnvironment(environment); initCustomMybatisMappers(configuration); initMybatisTypeHandlers(configuration); if (isEnableLogSqlExecutionTime()) { initMyBatisLogSqlExecutionTimePlugin(configuration); } configuration = parseMybatisConfiguration(parser); return configuration; } public void initCustomMybatisMappers(Configuration configuration) { if (getCustomMybatisMappers() != null) { for (Class<?> clazz : getCustomMybatisMappers()) { configuration.addMapper(clazz); } } } public void initMybatisTypeHandlers(Configuration configuration) { // To be extended } public void initMyBatisLogSqlExecutionTimePlugin(Configuration configuration) { configuration.addInterceptor(new LogSqlExecutionTimePlugin()); } public Configuration parseMybatisConfiguration(XMLConfigBuilder parser) { Configuration configuration = parser.parse(); if (dependentEngineMybatisTypeAliasConfigs != null) { for (MybatisTypeAliasConfigurator typeAliasConfig : dependentEngineMybatisTypeAliasConfigs) { typeAliasConfig.configure(configuration.getTypeAliasRegistry()); } } if (dependentEngineMybatisTypeHandlerConfigs != null) { for (MybatisTypeHandlerConfigurator typeHandlerConfig : dependentEngineMybatisTypeHandlerConfigs) { typeHandlerConfig.configure(configuration.getTypeHandlerRegistry()); } } parseDependentEngineMybatisXMLMappers(configuration); parseCustomMybatisXMLMappers(configuration); return configuration; } public void parseCustomMybatisXMLMappers(Configuration configuration) { if (getCustomMybatisXMLMappers() != null) { for (String resource : getCustomMybatisXMLMappers()) { parseMybatisXmlMapping(configuration, resource); } } } public void parseDependentEngineMybatisXMLMappers(Configuration configuration) { if (getDependentEngineMyBatisXmlMappers() != null) { for (String resource : getDependentEngineMyBatisXmlMappers()) { parseMybatisXmlMapping(configuration, resource); } } } protected void parseMybatisXmlMapping(Configuration configuration, String resource) { // see XMLConfigBuilder.mapperElement() XMLMapperBuilder mapperParser = new XMLMapperBuilder(getResourceAsStream(resource), configuration, resource, configuration.getSqlFragments()); mapperParser.parse(); } protected InputStream getResourceAsStream(String resource) { ClassLoader classLoader = getClassLoader(); if (classLoader != null) { return getClassLoader().getResourceAsStream(resource); } else { return this.getClass().getClassLoader().getResourceAsStream(resource); } } public abstract InputStream getMyBatisXmlConfigurationStream(); // getters and setters // ////////////////////////////////////////////////////// public abstract String getEngineName(); public ClassLoader getClassLoader() { return classLoader; } public AbstractEngineConfiguration setClassLoader(ClassLoader classLoader) { this.classLoader = classLoader; return this; } public boolean isUseClassForNameClassLoading() { return useClassForNameClassLoading; } public AbstractEngineConfiguration setUseClassForNameClassLoading(boolean useClassForNameClassLoading) { this.useClassForNameClassLoading = useClassForNameClassLoading; return this; } public String getDatabaseType() { return databaseType; } public AbstractEngineConfiguration setDatabaseType(String databaseType) { this.databaseType = databaseType; return this; } public DataSource getDataSource() { return dataSource; } public AbstractEngineConfiguration setDataSource(DataSource dataSource) { this.dataSource = dataSource; return this; } public DbSchemaManager getDbSchemaManager() { return dbSchemaManager; } public AbstractEngineConfiguration setDbSchemaManager(DbSchemaManager dbSchemaManager) { this.dbSchemaManager = dbSchemaManager; return this; } public DbSchemaManager getCommonDbSchemaManager() { return commonDbSchemaManager; } public AbstractEngineConfiguration setCommonDbSchemaManager(DbSchemaManager commonDbSchemaManager) { this.commonDbSchemaManager = commonDbSchemaManager; return this; } public String getJdbcDriver() { return jdbcDriver; } public AbstractEngineConfiguration setJdbcDriver(String jdbcDriver) { this.jdbcDriver = jdbcDriver; return this; } public String getJdbcUrl() { return jdbcUrl; } public AbstractEngineConfiguration setJdbcUrl(String jdbcUrl) { this.jdbcUrl = jdbcUrl; return this; } public String getJdbcUsername() { return jdbcUsername; } public AbstractEngineConfiguration setJdbcUsername(String jdbcUsername) { this.jdbcUsername = jdbcUsername; return this; } public String getJdbcPassword() { return jdbcPassword; } public AbstractEngineConfiguration setJdbcPassword(String jdbcPassword) { this.jdbcPassword = jdbcPassword; return this; } public int getJdbcMaxActiveConnections() { return jdbcMaxActiveConnections; } public AbstractEngineConfiguration setJdbcMaxActiveConnections(int jdbcMaxActiveConnections) { this.jdbcMaxActiveConnections = jdbcMaxActiveConnections; return this; } public int getJdbcMaxIdleConnections() { return jdbcMaxIdleConnections; } public AbstractEngineConfiguration setJdbcMaxIdleConnections(int jdbcMaxIdleConnections) { this.jdbcMaxIdleConnections = jdbcMaxIdleConnections; return this; } public int getJdbcMaxCheckoutTime() { return jdbcMaxCheckoutTime; } public AbstractEngineConfiguration setJdbcMaxCheckoutTime(int jdbcMaxCheckoutTime) { this.jdbcMaxCheckoutTime = jdbcMaxCheckoutTime; return this; } public int getJdbcMaxWaitTime() { return jdbcMaxWaitTime; } public AbstractEngineConfiguration setJdbcMaxWaitTime(int jdbcMaxWaitTime) { this.jdbcMaxWaitTime = jdbcMaxWaitTime; return this; } public boolean isJdbcPingEnabled() { return jdbcPingEnabled; } public AbstractEngineConfiguration setJdbcPingEnabled(boolean jdbcPingEnabled) { this.jdbcPingEnabled = jdbcPingEnabled; return this; } public int getJdbcPingConnectionNotUsedFor() { return jdbcPingConnectionNotUsedFor; } public AbstractEngineConfiguration setJdbcPingConnectionNotUsedFor(int jdbcPingConnectionNotUsedFor) { this.jdbcPingConnectionNotUsedFor = jdbcPingConnectionNotUsedFor; return this; } public int getJdbcDefaultTransactionIsolationLevel() { return jdbcDefaultTransactionIsolationLevel; } public AbstractEngineConfiguration setJdbcDefaultTransactionIsolationLevel(int jdbcDefaultTransactionIsolationLevel) { this.jdbcDefaultTransactionIsolationLevel = jdbcDefaultTransactionIsolationLevel; return this; } public String getJdbcPingQuery() { return jdbcPingQuery; } public AbstractEngineConfiguration setJdbcPingQuery(String jdbcPingQuery) { this.jdbcPingQuery = jdbcPingQuery; return this; } public String getDataSourceJndiName() { return dataSourceJndiName; } public AbstractEngineConfiguration setDataSourceJndiName(String dataSourceJndiName) { this.dataSourceJndiName = dataSourceJndiName; return this; } public CommandConfig getSchemaCommandConfig() { return schemaCommandConfig; } public AbstractEngineConfiguration setSchemaCommandConfig(CommandConfig schemaCommandConfig) { this.schemaCommandConfig = schemaCommandConfig; return this; } public boolean isTransactionsExternallyManaged() { return transactionsExternallyManaged; } public AbstractEngineConfiguration setTransactionsExternallyManaged(boolean transactionsExternallyManaged) { this.transactionsExternallyManaged = transactionsExternallyManaged; return this; } public Map<Object, Object> getBeans() { return beans; } public AbstractEngineConfiguration setBeans(Map<Object, Object> beans) { this.beans = beans; return this; } public IdGenerator getIdGenerator() { return idGenerator; } public AbstractEngineConfiguration setIdGenerator(IdGenerator idGenerator) { this.idGenerator = idGenerator; return this; } public String getXmlEncoding() { return xmlEncoding; } public AbstractEngineConfiguration setXmlEncoding(String xmlEncoding) { this.xmlEncoding = xmlEncoding; return this; } public CommandConfig getDefaultCommandConfig() { return defaultCommandConfig; } public AbstractEngineConfiguration setDefaultCommandConfig(CommandConfig defaultCommandConfig) { this.defaultCommandConfig = defaultCommandConfig; return this; } public CommandExecutor getCommandExecutor() { return commandExecutor; } public AbstractEngineConfiguration setCommandExecutor(CommandExecutor commandExecutor) { this.commandExecutor = commandExecutor; return this; } public CommandContextFactory getCommandContextFactory() { return commandContextFactory; } public AbstractEngineConfiguration setCommandContextFactory(CommandContextFactory commandContextFactory) { this.commandContextFactory = commandContextFactory; return this; } public CommandInterceptor getCommandInvoker() { return commandInvoker; } public AbstractEngineConfiguration setCommandInvoker(CommandInterceptor commandInvoker) { this.commandInvoker = commandInvoker; return this; } public List<CommandInterceptor> getCustomPreCommandInterceptors() { return customPreCommandInterceptors; } public AbstractEngineConfiguration setCustomPreCommandInterceptors(List<CommandInterceptor> customPreCommandInterceptors) { this.customPreCommandInterceptors = customPreCommandInterceptors; return this; } public List<CommandInterceptor> getCustomPostCommandInterceptors() { return customPostCommandInterceptors; } public AbstractEngineConfiguration setCustomPostCommandInterceptors(List<CommandInterceptor> customPostCommandInterceptors) { this.customPostCommandInterceptors = customPostCommandInterceptors; return this; } public List<CommandInterceptor> getCommandInterceptors() { return commandInterceptors; } public AbstractEngineConfiguration setCommandInterceptors(List<CommandInterceptor> commandInterceptors) { this.commandInterceptors = commandInterceptors; return this; } public Map<String, AbstractEngineConfiguration> getEngineConfigurations() { return engineConfigurations; } public AbstractEngineConfiguration setEngineConfigurations(Map<String, AbstractEngineConfiguration> engineConfigurations) { this.engineConfigurations = engineConfigurations; return this; } public void addEngineConfiguration(String key, AbstractEngineConfiguration engineConfiguration) { if (engineConfigurations == null) { engineConfigurations = new HashMap<>(); } engineConfigurations.put(key, engineConfiguration); } public Map<String, AbstractServiceConfiguration> getServiceConfigurations() { return serviceConfigurations; } public AbstractEngineConfiguration setServiceConfigurations(Map<String, AbstractServiceConfiguration> serviceConfigurations) { this.serviceConfigurations = serviceConfigurations; return this; } public void addServiceConfiguration(String key, AbstractServiceConfiguration serviceConfiguration) { if (serviceConfigurations == null) { serviceConfigurations = new HashMap<>(); } serviceConfigurations.put(key, serviceConfiguration); } public void setDefaultCommandInterceptors(Collection<? extends CommandInterceptor> defaultCommandInterceptors) { this.defaultCommandInterceptors = defaultCommandInterceptors; } public SqlSessionFactory getSqlSessionFactory() { return sqlSessionFactory; } public AbstractEngineConfiguration setSqlSessionFactory(SqlSessionFactory sqlSessionFactory) { this.sqlSessionFactory = sqlSessionFactory; return this; } public boolean isDbHistoryUsed() { return isDbHistoryUsed; } public AbstractEngineConfiguration setDbHistoryUsed(boolean isDbHistoryUsed) { this.isDbHistoryUsed = isDbHistoryUsed; return this; } public DbSqlSessionFactory getDbSqlSessionFactory() { return dbSqlSessionFactory; } public AbstractEngineConfiguration setDbSqlSessionFactory(DbSqlSessionFactory dbSqlSessionFactory) { this.dbSqlSessionFactory = dbSqlSessionFactory; return this; } public TransactionFactory getTransactionFactory() { return transactionFactory; } public AbstractEngineConfiguration setTransactionFactory(TransactionFactory transactionFactory) { this.transactionFactory = transactionFactory; return this; } public TransactionContextFactory getTransactionContextFactory() { return transactionContextFactory; } public AbstractEngineConfiguration setTransactionContextFactory(TransactionContextFactory transactionContextFactory) { this.transactionContextFactory = transactionContextFactory; return this; } public int getMaxNrOfStatementsInBulkInsert() { return maxNrOfStatementsInBulkInsert; } public AbstractEngineConfiguration setMaxNrOfStatementsInBulkInsert(int maxNrOfStatementsInBulkInsert) { this.maxNrOfStatementsInBulkInsert = maxNrOfStatementsInBulkInsert; return this; } public boolean isBulkInsertEnabled() { return isBulkInsertEnabled; } public AbstractEngineConfiguration setBulkInsertEnabled(boolean isBulkInsertEnabled) { this.isBulkInsertEnabled = isBulkInsertEnabled; return this; } public Set<Class<?>> getCustomMybatisMappers() { return customMybatisMappers; } public AbstractEngineConfiguration setCustomMybatisMappers(Set<Class<?>> customMybatisMappers) { this.customMybatisMappers = customMybatisMappers; return this; } public Set<String> getCustomMybatisXMLMappers() { return customMybatisXMLMappers; } public AbstractEngineConfiguration setCustomMybatisXMLMappers(Set<String> customMybatisXMLMappers) { this.customMybatisXMLMappers = customMybatisXMLMappers; return this; } public Set<String> getDependentEngineMyBatisXmlMappers() { return dependentEngineMyBatisXmlMappers; } public AbstractEngineConfiguration setDependentEngineMyBatisXmlMappers(Set<String> dependentEngineMyBatisXmlMappers) { this.dependentEngineMyBatisXmlMappers = dependentEngineMyBatisXmlMappers; return this; } public List<MybatisTypeAliasConfigurator> getDependentEngineMybatisTypeAliasConfigs() { return dependentEngineMybatisTypeAliasConfigs; } public AbstractEngineConfiguration setDependentEngineMybatisTypeAliasConfigs(List<MybatisTypeAliasConfigurator> dependentEngineMybatisTypeAliasConfigs) { this.dependentEngineMybatisTypeAliasConfigs = dependentEngineMybatisTypeAliasConfigs; return this; } public List<MybatisTypeHandlerConfigurator> getDependentEngineMybatisTypeHandlerConfigs() { return dependentEngineMybatisTypeHandlerConfigs; } public AbstractEngineConfiguration setDependentEngineMybatisTypeHandlerConfigs(List<MybatisTypeHandlerConfigurator> dependentEngineMybatisTypeHandlerConfigs) { this.dependentEngineMybatisTypeHandlerConfigs = dependentEngineMybatisTypeHandlerConfigs; return this; } public List<SessionFactory> getCustomSessionFactories() { return customSessionFactories; } public AbstractEngineConfiguration setCustomSessionFactories(List<SessionFactory> customSessionFactories) { this.customSessionFactories = customSessionFactories; return this; } public boolean isUsingRelationalDatabase() { return usingRelationalDatabase; } public AbstractEngineConfiguration setUsingRelationalDatabase(boolean usingRelationalDatabase) { this.usingRelationalDatabase = usingRelationalDatabase; return this; } public String getDatabaseTablePrefix() { return databaseTablePrefix; } public AbstractEngineConfiguration setDatabaseTablePrefix(String databaseTablePrefix) { this.databaseTablePrefix = databaseTablePrefix; return this; } public String getDatabaseWildcardEscapeCharacter() { return databaseWildcardEscapeCharacter; } public AbstractEngineConfiguration setDatabaseWildcardEscapeCharacter(String databaseWildcardEscapeCharacter) { this.databaseWildcardEscapeCharacter = databaseWildcardEscapeCharacter; return this; } public String getDatabaseCatalog() { return databaseCatalog; } public AbstractEngineConfiguration setDatabaseCatalog(String databaseCatalog) { this.databaseCatalog = databaseCatalog; return this; } public String getDatabaseSchema() { return databaseSchema; } public AbstractEngineConfiguration setDatabaseSchema(String databaseSchema) { this.databaseSchema = databaseSchema; return this; } public boolean isTablePrefixIsSchema() { return tablePrefixIsSchema; } public AbstractEngineConfiguration setTablePrefixIsSchema(boolean tablePrefixIsSchema) { this.tablePrefixIsSchema = tablePrefixIsSchema; return this; } public boolean isEnableLogSqlExecutionTime() { return enableLogSqlExecutionTime; } public void setEnableLogSqlExecutionTime(boolean enableLogSqlExecutionTime) { this.enableLogSqlExecutionTime = enableLogSqlExecutionTime; } public Map<Class<?>, SessionFactory> getSessionFactories() { return sessionFactories; } public AbstractEngineConfiguration setSessionFactories(Map<Class<?>, SessionFactory> sessionFactories) { this.sessionFactories = sessionFactories; return this; } public String getDatabaseSchemaUpdate() { return databaseSchemaUpdate; } public AbstractEngineConfiguration setDatabaseSchemaUpdate(String databaseSchemaUpdate) { this.databaseSchemaUpdate = databaseSchemaUpdate; return this; } public boolean isEnableEventDispatcher() { return enableEventDispatcher; } public AbstractEngineConfiguration setEnableEventDispatcher(boolean enableEventDispatcher) { this.enableEventDispatcher = enableEventDispatcher; return this; } public FlowableEventDispatcher getEventDispatcher() { return eventDispatcher; } public AbstractEngineConfiguration setEventDispatcher(FlowableEventDispatcher eventDispatcher) { this.eventDispatcher = eventDispatcher; return this; } public List<FlowableEventListener> getEventListeners() { return eventListeners; } public AbstractEngineConfiguration setEventListeners(List<FlowableEventListener> eventListeners) { this.eventListeners = eventListeners; return this; } public Map<String, List<FlowableEventListener>> getTypedEventListeners() { return typedEventListeners; } public AbstractEngineConfiguration setTypedEventListeners(Map<String, List<FlowableEventListener>> typedEventListeners) { this.typedEventListeners = typedEventListeners; return this; } public List<EventDispatchAction> getAdditionalEventDispatchActions() { return additionalEventDispatchActions; } public AbstractEngineConfiguration setAdditionalEventDispatchActions(List<EventDispatchAction> additionalEventDispatchActions) { this.additionalEventDispatchActions = additionalEventDispatchActions; return this; } public Clock getClock() { return clock; } public AbstractEngineConfiguration setClock(Clock clock) { this.clock = clock; return this; } public int getMaxLengthString() { if (maxLengthStringVariableType == -1) { if ("oracle".equalsIgnoreCase(databaseType)) { return DEFAULT_ORACLE_MAX_LENGTH_STRING; } else { return DEFAULT_GENERIC_MAX_LENGTH_STRING; } } else { return maxLengthStringVariableType; } } public int getMaxLengthStringVariableType() { return maxLengthStringVariableType; } public AbstractEngineConfiguration setMaxLengthStringVariableType(int maxLengthStringVariableType) { this.maxLengthStringVariableType = maxLengthStringVariableType; return this; } public List<EngineDeployer> getDeployers() { return deployers; } public AbstractEngineConfiguration setDeployers(List<EngineDeployer> deployers) { this.deployers = deployers; return this; } public List<EngineDeployer> getCustomPreDeployers() { return customPreDeployers; } public AbstractEngineConfiguration setCustomPreDeployers(List<EngineDeployer> customPreDeployers) { this.customPreDeployers = customPreDeployers; return this; } public List<EngineDeployer> getCustomPostDeployers() { return customPostDeployers; } public AbstractEngineConfiguration setCustomPostDeployers(List<EngineDeployer> customPostDeployers) { this.customPostDeployers = customPostDeployers; return this; } }
/* * Copyright 2019 http://www.hswebframework.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.hswebframework.web.dictionary.entity; import io.swagger.v3.oas.annotations.media.Schema; import lombok.Getter; import lombok.Setter; import org.hswebframework.ezorm.rdb.mapping.annotation.Comment; import org.hswebframework.ezorm.rdb.mapping.annotation.DefaultValue; import org.hswebframework.web.api.crud.entity.GenericEntity; import org.hswebframework.web.api.crud.entity.RecordCreationEntity; import org.hswebframework.web.dict.DictDefine; import org.hswebframework.web.dict.defaults.DefaultDictDefine; import javax.persistence.Column; import javax.persistence.Table; import javax.validation.constraints.NotBlank; import java.util.List; /** * 数据字典 * * @author hsweb-generator-online */ @Table(name = "s_dictionary") @Comment("数据字典") @Getter @Setter public class DictionaryEntity extends GenericEntity<String> implements RecordCreationEntity { //字典名称 @Column(nullable = false) @NotBlank(message = "名称不能为空") @Schema(description = "字典名称") private String name; //分类 @Column(length = 64, name = "classified") @Schema(description = "分类标识") private String classified; //说明 @Column @Schema(description = "说明") private String describe; //创建时间 @Column(name = "create_time") @Schema(description = "创建时间") private Long createTime; //创建人id @Column(name = "creator_id") @Schema(description = "创建人ID") private String creatorId; //状态 @Column(name = "status") @DefaultValue("1") @Schema(description = "状态,0禁用,1启用") private Byte status; //字段选项 private List<DictionaryItemEntity> items; public DictDefine toDictDefine() { return DefaultDictDefine .builder() .id(this.getId()) .alias(this.getName()) .comments(this.getDescribe()) .items(this.getItems()) .build(); } }
/* * Copyright 2018 NEOautus Ltd. (http://neoautus.com) * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.lucidj.telnetd; import org.apache.felix.gogo.jline.Shell; import org.apache.felix.service.command.CommandProcessor; import org.apache.felix.service.command.CommandSession; import org.jline.terminal.Size; import org.jline.terminal.Terminal; import org.jline.terminal.TerminalBuilder; import org.lucidj.api.admind.Task; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; class GogoTask implements Task, Shell.Context { private final static Logger log = LoggerFactory.getLogger (GogoTask.class); private final static int TERMINFO_TYPE = 0; private final static int TERMINFO_SIZE = 1; private final static String[] GOSH_ARGV = { "--login", // Please? "--noshutdown" // Do NOT shutdown framework when telnet connection is closed :O }; private BundleContext context; private Terminal terminal; private InputStream in; private OutputStream out; private OutputStream err; private int terminfo_index = -1; private StringBuffer terminfo_buf; private String terminfo_type = "dumb"; private int terminfo_width = 80; private int terminfo_height = 25; public GogoTask (BundleContext context, InputStream in, OutputStream out, OutputStream err, String name, String[] options) { this.context = context; this.in = in; this.out = out; this.err = err; } private void run_gosh (Terminal terminal) { ServiceReference<CommandProcessor> cp_ref = null; try { if ((cp_ref = context.getServiceReference (CommandProcessor.class)) == null) { terminal.writer ().println ("Command Processor not available"); return; } CommandProcessor cp = context.getService (cp_ref); CommandSession session = cp.createSession (terminal.input (), terminal.output (), terminal.output ()); session.put (Shell.VAR_TERMINAL, terminal); //environment.forEach(session::put); new Shell (this, cp).gosh (session, GOSH_ARGV); } catch (Exception e) { terminal.writer ().println ("Error running Command Processor: " + e.toString()); } finally { if (cp_ref != null) { context.ungetService (cp_ref); } } } private void terminfo_event (int index, String value) { log.debug ("terminfo_event: index={} value='{}'", index, value); switch (index) { case TERMINFO_TYPE: { log.debug ("Terminal type: {}", value); terminfo_type = value; break; } case TERMINFO_SIZE: { String[] args = value.split (";"); terminfo_width = Integer.parseInt (args [0]); terminfo_height = Integer.parseInt (args [1]); log.debug ("Terminal size: {} x {}", terminfo_width, terminfo_height); if (terminal != null) { terminal.setSize (new Size (terminfo_width, terminfo_height)); terminal.raise (Terminal.Signal.WINCH); } break; } } } private int filter_terminfo_sequences (int ch) { log.debug ("filter: {} {} -> {}", ch, Integer.toHexString(ch), (char)ch); if (ch >= 0xf0 && ch <= 0xff) { if (terminfo_buf != null) { terminfo_event (terminfo_index, terminfo_buf.toString ()); terminfo_index = -1; terminfo_buf = null; } if (ch != 0xff) { terminfo_index = ch & 0x0f; terminfo_buf = new StringBuffer (); } } else if (terminfo_buf != null) { terminfo_buf.append ((char)ch); } else { return (ch); } return (-1); // Skip this char } @Override // Task public boolean run () throws Exception { // Wrap TelnetIO input with an InputStream InputStream soft_in = new InputStream () { @Override public int read () throws IOException { try { int ch = -1; while (ch == -1) { // Do NOT run into EOF and filter out terminfo sequences if (in.available() == 0) { try { Thread.sleep (20); } catch (InterruptedException ignore) {}; } else // We have available chars to read { ch = filter_terminfo_sequences (in.read () & 0xff); } } return (ch); } catch (IOException e) { // We assume the connection has closed return (-1); } } }; // Synchronize terminfo parameters while (soft_in.read () != '\r'); // todo: timeout terminal = TerminalBuilder.builder () .type (terminfo_type) .streams (soft_in, out) .system (false) .name ("gosh") .size (new Size (terminfo_width, terminfo_height)) .build (); // Enter the shell run_gosh (terminal); log.debug ("Connection closed"); soft_in.close (); return (true); } @Override // Shell.Context public String getProperty (String s) { return (System.getProperty (s)); } @Override // Shell.Context public void exit () { try { terminal.close (); } catch (IOException ignore) {}; } } // EOF
package net.voxelindustry.voidheart.client.model.portalframe; import net.minecraft.client.texture.Sprite; import net.minecraft.client.texture.SpriteAtlasTexture; import net.minecraft.client.util.SpriteIdentifier; import net.minecraft.util.Identifier; import net.voxelindustry.voidheart.common.block.PortalFrameStateProperties.FrameConnection; import java.text.NumberFormat; import java.util.function.Consumer; import static net.voxelindustry.voidheart.VoidHeart.MODID; public class PortalFrameVeinSpriteManager { private static final SpriteIdentifier[] FRAME_SPRITE_IDENTIFIERS = new SpriteIdentifier[60]; private static final Sprite[] FRAME_SPRITES = new Sprite[60]; private static final SpriteIdentifier[] CORE_SPRITE_IDENTIFIERS = new SpriteIdentifier[4]; private static final Sprite[] CORE_SPRITES = new Sprite[4]; private static Sprite VOIDBRICK_SPRITE; static { var format = NumberFormat.getInstance(); format.setMinimumIntegerDigits(2); for (int i = 0; i < 60; i++) { FRAME_SPRITE_IDENTIFIERS[i] = getFrameSpriteIdentifier("portal_" + format.format(i)); } for (int i = 0; i < 4; i++) { CORE_SPRITE_IDENTIFIERS[i] = getCoreSpriteIdentifier("core_" + i); } } public static void registerSprites(Consumer<Identifier> spriteRegistrar) { for (var spriteIdentifier : FRAME_SPRITE_IDENTIFIERS) spriteRegistrar.accept(spriteIdentifier.getTextureId()); for (SpriteIdentifier coreSpriteIdentifier : CORE_SPRITE_IDENTIFIERS) spriteRegistrar.accept(coreSpriteIdentifier.getTextureId()); } public static void loadSprites() { for (int i = 0; i < 60; i++) FRAME_SPRITES[i] = FRAME_SPRITE_IDENTIFIERS[i].getSprite(); for (int i = 0; i < 4; i++) CORE_SPRITES[i] = CORE_SPRITE_IDENTIFIERS[i].getSprite(); VOIDBRICK_SPRITE = new SpriteIdentifier(SpriteAtlasTexture.BLOCK_ATLAS_TEXTURE, new Identifier(MODID, "block/voidstone_bricks")).getSprite(); } public static SpriteIdentifier[] getFrameSpriteIdentifiers() { return FRAME_SPRITE_IDENTIFIERS; } public static Sprite getCoreSprite() { return CORE_SPRITES[0]; } public static Sprite getCoreOverlaySprite() { return CORE_SPRITES[3]; } public static Sprite getCoreBrokenOverlaySprite() { return CORE_SPRITES[2]; } public static Sprite getBackgroundSpriteForFront(int variant) { return FRAME_SPRITES[28 + variant]; } public static Sprite getOverlaySpriteForFront(int variant) { return FRAME_SPRITES[30 + 28 + variant]; } public static Sprite getOverlaySpriteForSide(FrameConnection left, FrameConnection right, FrameConnection up, FrameConnection down, int variant) { return getSpriteForSide(left, right, up, down, variant, 30); } public static Sprite getBackgroundSpriteForSide(FrameConnection left, FrameConnection right, FrameConnection up, FrameConnection down, int variant) { var sprite = getSpriteForSide(left, right, up, down, variant, 0); if (sprite == null) return VOIDBRICK_SPRITE; return sprite; } private static Sprite getSpriteForSide(FrameConnection left, FrameConnection right, FrameConnection up, FrameConnection down, int variant, int offset) { if (!left.isConnected() && !right.isConnected() && !up.isConnected() && !down.isConnected()) return null; if (left == FrameConnection.INTERIOR && right == FrameConnection.INTERIOR || up == FrameConnection.INTERIOR && down == FrameConnection.INTERIOR) return FRAME_SPRITES[28 + variant + offset]; if (left == FrameConnection.INTERIOR) { if (right == FrameConnection.FRAME) return FRAME_SPRITES[28 + variant + offset]; return FRAME_SPRITES[10 + variant + offset]; } if (right == FrameConnection.INTERIOR) { if (left == FrameConnection.FRAME) return FRAME_SPRITES[28 + variant + offset]; return FRAME_SPRITES[8 + variant + offset]; } if (down == FrameConnection.INTERIOR) return FRAME_SPRITES[4 + variant + offset]; if (up == FrameConnection.INTERIOR) return FRAME_SPRITES[6 + variant + offset]; if (left == FrameConnection.FRAME) { if (right == FrameConnection.FRAME) return up == FrameConnection.FRAME ? FRAME_SPRITES[6 + variant + offset] : FRAME_SPRITES[4 + variant + offset]; if (up == FrameConnection.FRAME) return FRAME_SPRITES[2 + offset]; if (down == FrameConnection.FRAME) return FRAME_SPRITES[0 + offset]; } if (right == FrameConnection.FRAME) { if (up == FrameConnection.FRAME) return FRAME_SPRITES[3 + offset]; if (down == FrameConnection.FRAME) return FRAME_SPRITES[1 + offset]; } return null; } private static SpriteIdentifier getFrameSpriteIdentifier(String from) { return new SpriteIdentifier(SpriteAtlasTexture.BLOCK_ATLAS_TEXTURE, new Identifier(MODID, "block/portal/" + from)); } private static SpriteIdentifier getCoreSpriteIdentifier(String from) { return new SpriteIdentifier(SpriteAtlasTexture.BLOCK_ATLAS_TEXTURE, new Identifier(MODID, "block/portalcore/" + from)); } }
/* * Copyright 2015 DECOIT GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.decoit.simu.cbor.ifmap.simu.deserializer.identifier; import co.nstant.in.cbor.CborDecoder; import co.nstant.in.cbor.model.Array; import co.nstant.in.cbor.model.DataItem; import de.decoit.simu.cbor.ifmap.simu.AbstractSimuTestBase; import de.decoit.simu.cbor.ifmap.simu.identifier.CBORVulnerability; import de.decoit.simu.cbor.ifmap.simu.util.SimuNamespaces; import de.decoit.simu.cbor.xml.dictionary.DictionaryProvider; import de.decoit.simu.cbor.xml.dictionary.DictionarySimpleElement; import java.io.ByteArrayInputStream; import java.util.List; import javax.xml.bind.DatatypeConverter; import lombok.extern.slf4j.Slf4j; import org.junit.Test; import static org.junit.Assert.*; /** * * @author Thomas Rix (rix@decoit.de) */ @Slf4j public class VulnerabilityDeserializerTest extends AbstractSimuTestBase { private final String administrativeDomain = "simu-adm"; private final String id = "vuln-id"; private final String type = "vuln-type"; private final Double severity = 42.0; @Test public void testDeserialize() throws Exception { byte[] input = DatatypeConverter.parseHexBinary("8404D9A4100286006873696D752D61646D026776756C6E2D6964016976" + "756C6E2D7479706580"); ByteArrayInputStream bis = new ByteArrayInputStream(input); CborDecoder cd = new CborDecoder(bis); List<DataItem> diList = cd.decode(); Array topLevelArray = (Array) diList.get(0); String dictPath = "<" + SimuNamespaces.SIMU + ">" + CBORVulnerability.XML_NAME; DictionarySimpleElement dse = DictionaryProvider.getInstance().findElementByPath(dictPath); CBORVulnerability result = VulnerabilityDeserializer .getInstance() .deserialize((Array)topLevelArray.getDataItems().get(2), (Array)topLevelArray.getDataItems().get(3), dse); assertEquals(administrativeDomain, result.getAdministrativeDomain()); assertEquals(type, result.getType()); assertEquals(id, result.getId()); assertNull(result.getSeverity()); } @Test public void testDeserialize_full() throws Exception { byte[] input = DatatypeConverter.parseHexBinary("8404D9A4100288006873696D752D61646D026776756C6E2D6964016976" + "756C6E2D7479706503FB404500000000000080"); ByteArrayInputStream bis = new ByteArrayInputStream(input); CborDecoder cd = new CborDecoder(bis); List<DataItem> diList = cd.decode(); Array topLevelArray = (Array) diList.get(0); String dictPath = "<" + SimuNamespaces.SIMU + ">" + CBORVulnerability.XML_NAME; DictionarySimpleElement dse = DictionaryProvider.getInstance().findElementByPath(dictPath); CBORVulnerability result = VulnerabilityDeserializer .getInstance() .deserialize((Array)topLevelArray.getDataItems().get(2), (Array)topLevelArray.getDataItems().get(3), dse); assertEquals(administrativeDomain, result.getAdministrativeDomain()); assertEquals(type, result.getType()); assertEquals(id, result.getId()); assertEquals(severity, result.getSeverity()); } }
package org.keycloak.adapters.wildfly; import io.undertow.security.api.SecurityContext; import io.undertow.server.HttpServerExchange; import org.jboss.logging.Logger; import org.jboss.security.NestableGroup; import org.jboss.security.SecurityConstants; import org.jboss.security.SecurityContextAssociation; import org.jboss.security.SimpleGroup; import org.jboss.security.SimplePrincipal; import org.keycloak.adapters.AdapterTokenStore; import org.keycloak.adapters.HttpFacade; import org.keycloak.adapters.KeycloakDeployment; import org.keycloak.adapters.undertow.KeycloakUndertowAccount; import org.keycloak.adapters.undertow.ServletRequestAuthenticator; import org.keycloak.adapters.undertow.UndertowUserSessionManagement; import javax.security.auth.Subject; import java.security.Principal; import java.security.acl.Group; import java.util.Collection; import java.util.Enumeration; import java.util.Iterator; import java.util.Set; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class WildflyRequestAuthenticator extends ServletRequestAuthenticator { protected static Logger log = Logger.getLogger(WildflyRequestAuthenticator.class); public WildflyRequestAuthenticator(HttpFacade facade, KeycloakDeployment deployment, int sslRedirectPort, SecurityContext securityContext, HttpServerExchange exchange, AdapterTokenStore tokenStore) { super(facade, deployment, sslRedirectPort, securityContext, exchange, tokenStore); } @Override protected void propagateKeycloakContext(KeycloakUndertowAccount account) { super.propagateKeycloakContext(account); SecurityInfoHelper.propagateSessionInfo(account); log.debug("propagate security context to wildfly"); Subject subject = new Subject(); Set<Principal> principals = subject.getPrincipals(); principals.add(account.getPrincipal()); Group[] roleSets = getRoleSets(account.getRoles()); for (int g = 0; g < roleSets.length; g++) { Group group = roleSets[g]; String name = group.getName(); Group subjectGroup = createGroup(name, principals); if (subjectGroup instanceof NestableGroup) { /* A NestableGroup only allows Groups to be added to it so we need to add a SimpleGroup to subjectRoles to contain the roles */ SimpleGroup tmp = new SimpleGroup("Roles"); subjectGroup.addMember(tmp); subjectGroup = tmp; } // Copy the group members to the Subject group Enumeration<? extends Principal> members = group.members(); while (members.hasMoreElements()) { Principal role = (Principal) members.nextElement(); subjectGroup.addMember(role); } } // add the CallerPrincipal group if none has been added in getRoleSets Group callerGroup = new SimpleGroup(SecurityConstants.CALLER_PRINCIPAL_GROUP); callerGroup.addMember(account.getPrincipal()); principals.add(callerGroup); org.jboss.security.SecurityContext sc = SecurityContextAssociation.getSecurityContext(); Principal userPrincipal = getPrincipal(subject); sc.getUtil().createSubjectInfo(userPrincipal, account, subject); } /** * Get the Principal given the authenticated Subject. Currently the first subject that is not of type {@code Group} is * considered or the single subject inside the CallerPrincipal group. * * @param subject * @return the authenticated subject */ protected Principal getPrincipal(Subject subject) { Principal principal = null; Principal callerPrincipal = null; if (subject != null) { Set<Principal> principals = subject.getPrincipals(); if (principals != null && !principals.isEmpty()) { for (Principal p : principals) { if (!(p instanceof Group) && principal == null) { principal = p; } if (p instanceof Group) { Group g = Group.class.cast(p); if (g.getName().equals(SecurityConstants.CALLER_PRINCIPAL_GROUP) && callerPrincipal == null) { Enumeration<? extends Principal> e = g.members(); if (e.hasMoreElements()) callerPrincipal = e.nextElement(); } } } } } return callerPrincipal == null ? principal : callerPrincipal; } protected Group createGroup(String name, Set<Principal> principals) { Group roles = null; Iterator<Principal> iter = principals.iterator(); while (iter.hasNext()) { Object next = iter.next(); if ((next instanceof Group) == false) continue; Group grp = (Group) next; if (grp.getName().equals(name)) { roles = grp; break; } } // If we did not find a group create one if (roles == null) { roles = new SimpleGroup(name); principals.add(roles); } return roles; } protected Group[] getRoleSets(Collection<String> roleSet) { SimpleGroup roles = new SimpleGroup("Roles"); Group[] roleSets = {roles}; for (String role : roleSet) { roles.addMember(new SimplePrincipal(role)); } return roleSets; } }
package CSharp.textGen; /*Generated by MPS */ import jetbrains.mps.text.rt.TextGenDescriptorBase; import jetbrains.mps.text.rt.TextGenContext; import jetbrains.mps.text.impl.TextGenSupport; import jetbrains.mps.lang.smodel.generator.smodelAdapter.SLinkOperations; import jetbrains.mps.smodel.adapter.structure.MetaAdapterFactory; import jetbrains.mps.internal.collections.runtime.ListSequence; import org.jetbrains.mps.openapi.model.SNode; import jetbrains.mps.internal.collections.runtime.Sequence; public class Equality_expression_TextGen extends TextGenDescriptorBase { @Override public void generateText(final TextGenContext ctx) { final TextGenSupport tgs = new TextGenSupport(ctx); if (SLinkOperations.getTarget(ctx.getPrimaryInput(), MetaAdapterFactory.getContainmentLink(0x5f522167449a4486L, 0x94654f30de6e5cecL, 0x70aadf795f70aeL, 0x70aadf795f73d1L, "Relational_expression_1")) != null) { tgs.appendNode(SLinkOperations.getTarget(ctx.getPrimaryInput(), MetaAdapterFactory.getContainmentLink(0x5f522167449a4486L, 0x94654f30de6e5cecL, 0x70aadf795f70aeL, 0x70aadf795f73d1L, "Relational_expression_1"))); } if (ListSequence.fromList(SLinkOperations.getChildren(ctx.getPrimaryInput(), MetaAdapterFactory.getContainmentLink(0x5f522167449a4486L, 0x94654f30de6e5cecL, 0x70aadf795f70aeL, 0x70aadf795f73d2L, "Equality_expression_block_1_1_2"))).count() > 0) { tgs.append(" "); { Iterable<SNode> collection = SLinkOperations.getChildren(ctx.getPrimaryInput(), MetaAdapterFactory.getContainmentLink(0x5f522167449a4486L, 0x94654f30de6e5cecL, 0x70aadf795f70aeL, 0x70aadf795f73d2L, "Equality_expression_block_1_1_2")); final SNode lastItem = Sequence.fromIterable(collection).last(); for (SNode item : collection) { tgs.appendNode(item); if (item != lastItem) { tgs.append(" "); } } } } } }
package com.breadwallet.wallet; import android.app.Activity; import android.app.AlertDialog; import android.app.KeyguardManager; import android.content.Context; import android.content.DialogInterface; import android.graphics.Bitmap; import android.graphics.Point; import android.media.AudioManager; import android.media.MediaPlayer; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.Handler; import android.os.SystemClock; import android.util.Log; import android.view.Display; import android.view.MotionEvent; import android.view.View; import android.view.WindowManager; import android.widget.EditText; import android.widget.ImageView; import android.widget.Toast; import com.breadwallet.R; import com.breadwallet.BreadWalletApp; import com.breadwallet.exceptions.BRKeystoreErrorException; import com.breadwallet.presenter.activities.BreadActivity; import com.breadwallet.presenter.customviews.BRDialogView; import com.breadwallet.presenter.customviews.BRToast; import com.breadwallet.presenter.entities.BRMerkleBlockEntity; import com.breadwallet.presenter.entities.BRPeerEntity; import com.breadwallet.presenter.entities.BRTransactionEntity; import com.breadwallet.presenter.entities.ImportPrivKeyEntity; import com.breadwallet.presenter.entities.PaymentItem; import com.breadwallet.presenter.entities.TransactionListItem; import com.breadwallet.presenter.interfaces.BROnSignalCompletion; import com.breadwallet.tools.animation.BRAnimator; import com.breadwallet.tools.animation.BreadDialog; import com.breadwallet.tools.animation.SpringAnimator; import com.breadwallet.tools.qrcode.QRUtils; import com.breadwallet.tools.sqlite.MerkleBlockDataSource; import com.breadwallet.tools.sqlite.PeerDataSource; import com.breadwallet.tools.sqlite.TransactionDataSource; import com.breadwallet.tools.threads.ImportPrivKeyTask; import com.breadwallet.tools.util.BRConstants; import com.breadwallet.tools.manager.BRNotificationManager; import com.breadwallet.tools.manager.SharedPreferencesManager; import com.breadwallet.tools.util.TypesConverter; import com.breadwallet.tools.util.Utils; import com.breadwallet.tools.util.Bip39Reader; import com.breadwallet.tools.security.KeyStoreManager; import com.google.firebase.crash.FirebaseCrash; import junit.framework.Assert; import java.io.IOException; import java.security.SecureRandom; import java.util.ArrayList; import java.util.List; /** * BreadWallet * <p/> * Created by Mihail Gutan <mihail@breadwallet.com> on 12/10/15. * Copyright (c) 2016 breadwallet LLC * <p/> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p/> * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * <p/> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ public class BRWalletManager { private static final String TAG = BRWalletManager.class.getName(); private static BRWalletManager instance; private static final int WHITE = 0xFFFFFFFF; private static final int BLACK = 0xFF000000; public List<OnBalanceChanged> balanceListeners; public void setBalance(Context context, long balance) { if (context == null) { Log.e(TAG, "setBalance: FAILED TO SET THE BALANCE"); return; } SharedPreferencesManager.putCatchedBalance(context, balance); refreshAddress(context); for (OnBalanceChanged listener : balanceListeners) { if (listener != null) listener.onBalanceChanged(balance); } } public long getBalance(Context context) { return SharedPreferencesManager.getCatchedBalance(context); } private static int messageId = 0; private BRWalletManager() { balanceListeners = new ArrayList<>(); } public static BRWalletManager getInstance() { if (instance == null) { instance = new BRWalletManager(); } return instance; } public boolean generateRandomSeed(Context ctx) { SecureRandom sr = new SecureRandom(); String[] words = new String[0]; List<String> list; try { String languageCode = ctx.getString(R.string.lang_Android); list = Bip39Reader.getWordList(ctx, languageCode); words = list.toArray(new String[list.size()]); } catch (IOException e) { e.printStackTrace(); } byte[] keyBytes = sr.generateSeed(16); if (words.length < 2000) { RuntimeException ex = new IllegalArgumentException("the list is wrong, size: " + words.length); FirebaseCrash.report(ex); throw ex; } if (keyBytes.length == 0) throw new NullPointerException("failed to create the seed"); byte[] strPhrase = encodeSeed(keyBytes, words); if (strPhrase == null || strPhrase.length == 0) { RuntimeException ex = new NullPointerException("failed to encodeSeed"); FirebaseCrash.report(ex); throw ex; } boolean success = false; try { success = KeyStoreManager.putKeyStorePhrase(strPhrase, ctx, BRConstants.PUT_PHRASE_NEW_WALLET_REQUEST_CODE); } catch (BRKeystoreErrorException e) { e.printStackTrace(); } if (!success) return false; byte[] authKey = getAuthPrivKeyForAPI(keyBytes); if (authKey == null || authKey.length == 0) { RuntimeException ex = new IllegalArgumentException("authKey is invalid"); FirebaseCrash.report(ex); throw ex; } KeyStoreManager.putAuthKey(authKey, ctx); KeyStoreManager.putWalletCreationTime((int) (System.currentTimeMillis() / 1000), ctx); byte[] strBytes = TypesConverter.getNullTerminatedPhrase(strPhrase); byte[] pubKey = BRWalletManager.getInstance().getMasterPubKey(strBytes); KeyStoreManager.putMasterPublicKey(pubKey, ctx); return true; } public boolean wipeKeyStore(Context context) { Log.e(TAG, "wipeKeyStore"); return KeyStoreManager.resetWalletKeyStore(context); } /** * true if keystore is available and we know that no wallet exists on it */ public boolean noWallet(Context ctx) { byte[] pubkey = KeyStoreManager.getMasterPublicKey(ctx); if (pubkey == null || pubkey.length == 0) { byte[] phrase; try { phrase = KeyStoreManager.getKeyStorePhrase(ctx, 0); //if not authenticated, an error will be thrown and returned false, so no worry about mistakenly removing the wallet if (phrase == null || phrase.length == 0) { return true; } } catch (BRKeystoreErrorException e) { e.printStackTrace(); return false; } } return false; } public boolean noWalletForPlatform(Context ctx) { byte[] pubkey = KeyStoreManager.getMasterPublicKey(ctx); return pubkey == null || pubkey.length == 0; } /** * true if device passcode is enabled */ public boolean isPasscodeEnabled(Context ctx) { KeyguardManager keyguardManager = (KeyguardManager) ctx.getSystemService(Activity.KEYGUARD_SERVICE); return keyguardManager.isKeyguardSecure(); } public boolean isNetworkAvailable(Context ctx) { if (ctx == null) return false; ConnectivityManager cm = (ConnectivityManager) ctx.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo netInfo = cm.getActiveNetworkInfo(); return netInfo != null && netInfo.isConnectedOrConnecting(); } //BLOCKS public static boolean refreshAddress(Context ctx) { String address = getReceiveAddress(); if (Utils.isNullOrEmpty(address)) { Log.e(TAG, "refreshAddress: WARNING, retrieved address:" + address); return false; } SharedPreferencesManager.putReceiveAddress(ctx, address); return true; } public boolean isPaperKeyWritten(Context context) { return SharedPreferencesManager.getPhraseWroteDown(context); } public void wipeWalletButKeystore(final Context ctx) { Log.e(TAG, "wipeWalletButKeystore"); new Thread(new Runnable() { @Override public void run() { BRPeerManager.getInstance().peerManagerFreeEverything(); walletFreeEverything(); } }).start(); TransactionDataSource.getInstance(ctx).deleteAllTransactions(); MerkleBlockDataSource.getInstance(ctx).deleteAllBlocks(); PeerDataSource.getInstance(ctx).deleteAllPeers(); SharedPreferencesManager.clearAllPrefs(ctx); } public boolean confirmSweep(final Context ctx, final String privKey) { if (ctx == null) return false; if (isValidBitcoinBIP38Key(privKey)) { Log.d(TAG, "isValidBitcoinBIP38Key true"); ((Activity) ctx).runOnUiThread(new Runnable() { @Override public void run() { final AlertDialog.Builder builder = new AlertDialog.Builder(ctx); // builder.setTitle("password protected key"); final View input = ((Activity) ctx).getLayoutInflater().inflate(R.layout.view_bip38password_dialog, null); // Specify the type of input expected; this, for example, sets the input as a password, and will mask the text builder.setView(input); final EditText editText = (EditText) input.findViewById(R.id.bip38password_edittext); (new Handler()).postDelayed(new Runnable() { public void run() { editText.dispatchTouchEvent(MotionEvent.obtain(SystemClock.uptimeMillis(), SystemClock.uptimeMillis(), MotionEvent.ACTION_DOWN, 0, 0, 0)); editText.dispatchTouchEvent(MotionEvent.obtain(SystemClock.uptimeMillis(), SystemClock.uptimeMillis(), MotionEvent.ACTION_UP, 0, 0, 0)); } }, 100); // Set up the buttons builder.setPositiveButton(ctx.getString(R.string.Button_ok), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // if (!((BreadWalletApp) ((Activity) ctx).getApplication()).hasInternetAccess()) { // ((Activity) ctx).runOnUiThread(new Runnable() { // @Override // public void run() { //// BreadDialog.showCustomDialog(ctx, ctx.getString(R.string.warning), //// ctx.getString(R.string.not_connected), ctx.getString(R.string.ok)); // } // }); // // return; // } if (ctx != null) ((Activity) ctx).runOnUiThread(new Runnable() { @Override public void run() { BRToast.showCustomToast(ctx, ctx.getString(R.string.BRWalletManager_checkingPrivKeyBalance_Android), 500, Toast.LENGTH_LONG, R.drawable.toast_layout_blue); } }); if (editText == null) { Log.e(TAG, "onClick: edit text is null!"); return; } final String pass = editText.getText().toString(); Log.e(TAG, "onClick: before"); new Thread(new Runnable() { @Override public void run() { String decryptedKey = decryptBip38Key(privKey, pass); Log.e(TAG, "onClick: after"); if (decryptedKey.equals("")) { SpringAnimator.springView(input); confirmSweep(ctx, privKey); } else { confirmSweep(ctx, decryptedKey); } } }).start(); } }); builder.setNegativeButton(ctx.getString(R.string.Button_cancel), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); builder.show(); } }); return true; } else if (isValidBitcoinPrivateKey(privKey)) { Log.d(TAG, "isValidBitcoinPrivateKey true"); new ImportPrivKeyTask(((Activity) ctx)).execute(privKey); return true; } else { Log.e(TAG, "confirmSweep: !isValidBitcoinPrivateKey && !isValidBitcoinBIP38Key"); return false; } } // public static void showWritePhraseDialog(final Context ctx, final boolean firstTime) { // // if (ctx != null) { // ((Activity) ctx).runOnUiThread(new Runnable() { // @Override // public void run() { // boolean phraseWroteDown = SharedPreferencesManager.getPhraseWroteDown(ctx); // if (phraseWroteDown) return; // long now = System.currentTimeMillis() / 1000; // long lastMessageShow = SharedPreferencesManager.getPhraseWarningTime(ctx); // if (lastMessageShow == 0 || (!firstTime && lastMessageShow > (now - 36 * 60 * 60))) // return;//36 * 60 * 60// // if (BRWalletManager.getInstance().getBalance(ctx) > SharedPreferencesManager.getTotalLimit(ctx)) { //// getInstance(ctx).animateSavePhraseFlow(); // return; // } // SharedPreferencesManager.putPhraseWarningTime(ctx, System.currentTimeMillis() / 1000); // AlertDialog alert; // AlertDialog.Builder builder = new AlertDialog.Builder(ctx); // builder.setTitle(ctx.getString(R.string.you_received_bitcoin)); // builder.setMessage(String.format(ctx.getString(R.string.write_down_phrase), // ctx.getString(R.string.write_down_phrase_holder1))); // builder.setPositiveButton(ctx.getString(R.string.show_phrase), // new DialogInterface.OnClickListener() { // public void onClick(final DialogInterface dialog, int which) { // new Thread(new Runnable() { // @Override // public void run() { // dialog.dismiss(); //// BRWalletManager.getInstance().animateSavePhraseFlow(); // } // }).start(); // } // }); // builder.setNegativeButton(ctx.getString(R.string.ok), // new DialogInterface.OnClickListener() { // public void onClick(DialogInterface dialog, int which) { // dialog.dismiss(); // } // }); // builder.setCancelable(false); // alert = builder.create(); // alert.show(); // } // }); // // } // // } /** * Wallet callbacks */ public static void publishCallback(final String message, final int error) { Log.e(TAG, "publishCallback: " + message + ", err:" + error); final Activity app = BreadWalletApp.getBreadContext(); app.runOnUiThread(new Runnable() { @Override public void run() { new Handler().postDelayed(new Runnable() { @Override public void run() { BRAnimator.showBreadSignal(app, error == 0 ? "Send Confirmation" : "Error", error == 0 ? "Money Sent!" : message, error == 0 ? R.drawable.ic_check_mark_white : R.drawable.ic_error_outline_black_24dp, new BROnSignalCompletion() { @Override public void onComplete() { if (app != null && !app.isDestroyed()) app.getFragmentManager().popBackStack(); } }); } }, 500); } }); // PaymentProtocolPostPaymentTask.waiting = false; // if (error != 0) { // if (!PaymentProtocolPostPaymentTask.waiting && !PaymentProtocolPostPaymentTask.sent) { // if (PaymentProtocolPostPaymentTask.pendingErrorMessages.get(PaymentProtocolPostPaymentTask.MESSAGE) != null) { // BreadDialog.showCustomDialog(ctx, PaymentProtocolPostPaymentTask.pendingErrorMessages.get(PaymentProtocolPostPaymentTask.TITLE), // PaymentProtocolPostPaymentTask.pendingErrorMessages.get(PaymentProtocolPostPaymentTask.MESSAGE), ctx.getString(R.string.ok)); // PaymentProtocolPostPaymentTask.pendingErrorMessages = null; // } else { // BRToast.showCustomToast(BreadActivity.app, message, // BreadActivity.screenParametersPoint.y / 2, Toast.LENGTH_LONG, R.drawable.toast_layout_black); // } // } // } else { // PaymentProtocolPostPaymentTask.sent = true; // } } public static void onBalanceChanged(final long balance) { Log.d(TAG, "onBalanceChanged: " + balance); Activity app = BreadWalletApp.getBreadContext(); BRWalletManager.getInstance().setBalance(app, balance); } public static void onTxAdded(byte[] tx, int blockHeight, long timestamp, final long amount, String hash) { Log.d(TAG, "onTxAdded: " + String.format("tx.length: %d, blockHeight: %d, timestamp: %d, amount: %d, hash: %s", tx.length, blockHeight, timestamp, amount, hash)); // if (getInstance().getTxCount() <= 1) { // SharedPreferencesManager.putPhraseWarningTime(ctx, System.currentTimeMillis() / 1000); // ctx.runOnUiThread(new Runnable() { // @Override // public void run() { // new Handler().postDelayed(new Runnable() { // @Override // public void run() { // showWritePhraseDialog(ctx, true); // } // }, 2000); // } // }); // // } Activity ctx = BreadWalletApp.getBreadContext(); if (ctx != null) TransactionDataSource.getInstance(ctx).putTransaction(new BRTransactionEntity(tx, blockHeight, timestamp, hash)); else Log.e(TAG, "onTxAdded: ctx is null!"); } private static void showSentReceivedToast(final Context ctx, final String message) { messageId++; if (ctx != null) { ((Activity) ctx).runOnUiThread(new Runnable() { @Override public void run() { final int temp = messageId; new Handler().postDelayed(new Runnable() { @Override public void run() { if (temp == messageId) { if (BRToast.isToastShown()) { BRToast.showCustomToast(ctx, message, BreadWalletApp.DISPLAY_HEIGHT_PX / 2, Toast.LENGTH_LONG, R.drawable.toast_layout_black); AudioManager audioManager = (AudioManager) ctx.getSystemService(Context.AUDIO_SERVICE); if (audioManager.getRingerMode() == AudioManager.RINGER_MODE_NORMAL) { final MediaPlayer mp = MediaPlayer.create(ctx, R.raw.coinflip); mp.start(); } messageId = 0; if (!BreadActivity.appVisible) BRNotificationManager.sendNotification(ctx, R.drawable.notification_icon, ctx.getString(R.string.app_name), message, 1); } } } }, 1000); } }); } else { Log.e(TAG, "showSentReceivedToast: failed, ctx is null"); } } public static void onTxUpdated(String hash, int blockHeight, int timeStamp) { Log.d(TAG, "onTxUpdated: " + String.format("hash: %s, blockHeight: %d, timestamp: %d", hash, blockHeight, timeStamp)); Activity ctx = BreadWalletApp.getBreadContext(); if (ctx != null) { TransactionDataSource.getInstance(ctx).updateTxBlockHeight(hash, blockHeight, timeStamp); } else { Log.e(TAG, "onTxUpdated: Failed, ctx is null"); } } public static void onTxDeleted(String hash, int notifyUser, final int recommendRescan) { Log.e(TAG, "onTxDeleted: " + String.format("hash: %s, notifyUser: %d, recommendRescan: %d", hash, notifyUser, recommendRescan)); final Activity ctx = BreadWalletApp.getBreadContext(); if (ctx != null) { TransactionDataSource.getInstance(ctx).deleteTxByHash(hash); if (notifyUser == 1) { ctx.runOnUiThread(new Runnable() { @Override public void run() { AlertDialog alert; AlertDialog.Builder builder = new AlertDialog.Builder(ctx); builder.setTitle(R.string.BRWalletManager_transactionRejected_Android); builder.setMessage(recommendRescan == 1 ? ctx.getString(R.string.BRWalletManager_walletOutOfSync_Android) : ""); if (recommendRescan == 1) builder.setPositiveButton(R.string.ReScan_alertAction, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { // if (BRAnimator.checkTheMultipressingAvailability()) { new Thread(new Runnable() { @Override public void run() { BRPeerManager.getInstance().rescan(); } }).start(); // } } }); builder.setNegativeButton(ctx.getString(R.string.Button_cancel), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); alert = builder.create(); alert.show(); } }); } } else { Log.e(TAG, "onTxDeleted: Failed! ctx is null"); } } public boolean validatePhrase(Context ctx, String phrase) { String[] words = new String[0]; List<String> list; String[] cleanWordList = null; try { boolean isLocal = true; String languageCode = ctx.getString(R.string.lang_Android); list = Bip39Reader.getWordList(ctx, languageCode); String[] phraseWords = phrase.split(" "); if (!list.contains(phraseWords[0])) { isLocal = false; } if (!isLocal) { String lang = Bip39Reader.getLang(ctx, phraseWords[0]); if (lang != null) { list = Bip39Reader.getWordList(ctx, lang); } } words = list.toArray(new String[list.size()]); cleanWordList = Bip39Reader.cleanWordList(words); if (cleanWordList == null) return false; } catch (IOException e) { e.printStackTrace(); } if (words.length != 2048) { RuntimeException ex = new IllegalArgumentException("words.length is not 2048"); FirebaseCrash.report(ex); throw ex; } return validateRecoveryPhrase(cleanWordList, phrase); } public void startTheWalletIfExists(final Activity app) { final BRWalletManager m = BRWalletManager.getInstance(); if (!m.isPasscodeEnabled(app)) { //Device passcode/password should be enabled for the app to work BreadDialog.showCustomDialog(app, "Warning", app.getString(R.string.IntroScreen_encryption_needed_Android), "close", null, new BRDialogView.BROnClickListener() { @Override public void onClick(BRDialogView brDialogView) { app.finish(); } }, null, new DialogInterface.OnDismissListener() { @Override public void onDismiss(DialogInterface dialog) { app.finish(); } }, 0); } else { if (!m.noWallet(app)) { BRAnimator.startBreadActivity(app, true); } } } //BLOCKS public void setUpTheWallet(final Context ctx) { Log.d(TAG, "setUpTheWallet..."); Assert.assertNotNull(ctx); if (ctx == null) return; BRWalletManager m = BRWalletManager.getInstance(); final BRPeerManager pm = BRPeerManager.getInstance(); if (!m.isCreated()) { List<BRTransactionEntity> transactions = TransactionDataSource.getInstance(ctx).getAllTransactions(); int transactionsCount = transactions.size(); if (transactionsCount > 0) { m.createTxArrayWithCount(transactionsCount); for (BRTransactionEntity entity : transactions) { m.putTransaction(entity.getBuff(), entity.getBlockheight(), entity.getTimestamp()); } } byte[] pubkeyEncoded = KeyStoreManager.getMasterPublicKey(ctx); //Save the first address for future check m.createWallet(transactionsCount, pubkeyEncoded); String firstAddress = BRWalletManager.getFirstAddress(pubkeyEncoded); SharedPreferencesManager.putFirstAddress(ctx, firstAddress); long fee = SharedPreferencesManager.getFeePerKb(ctx); if (fee == 0) fee = BRConstants.DEFAULT_FEE_PER_KB; BRWalletManager.getInstance().setFeePerKb(fee); } if (!pm.isCreated()) { List<BRMerkleBlockEntity> blocks = MerkleBlockDataSource.getInstance(ctx).getAllMerkleBlocks(); List<BRPeerEntity> peers = PeerDataSource.getInstance(ctx).getAllPeers(); final int blocksCount = blocks.size(); final int peersCount = peers.size(); if (blocksCount > 0) { pm.createBlockArrayWithCount(blocksCount); for (BRMerkleBlockEntity entity : blocks) { pm.putBlock(entity.getBuff(), entity.getBlockHeight()); } } if (peersCount > 0) { pm.createPeerArrayWithCount(peersCount); for (BRPeerEntity entity : peers) { pm.putPeer(entity.getAddress(), entity.getPort(), entity.getTimeStamp()); } } Log.d(TAG, "blocksCount before connecting: " + blocksCount); Log.d(TAG, "peersCount before connecting: " + peersCount); int walletTimeString = KeyStoreManager.getWalletCreationTime(ctx); Log.e(TAG, "setUpTheWallet: walletTimeString: " + walletTimeString); pm.create(walletTimeString, blocksCount, peersCount); } pm.connect(); if (SharedPreferencesManager.getStartHeight(ctx) == 0) new Thread(new Runnable() { @Override public void run() { SharedPreferencesManager.putStartHeight(ctx, BRPeerManager.getCurrentBlockHeight()); } }).start(); } public boolean generateQR(Context ctx, String bitcoinURL, ImageView qrcode) { if (qrcode == null || bitcoinURL == null || bitcoinURL.isEmpty()) return false; WindowManager manager = (WindowManager) ctx.getSystemService(Activity.WINDOW_SERVICE); Display display = manager.getDefaultDisplay(); Point point = new Point(); display.getSize(point); int width = point.x; int height = point.y; int smallerDimension = width < height ? width : height; smallerDimension = (int) (smallerDimension * 0.55f); Bitmap bitmap = null; bitmap = QRUtils.encodeAsBitmap(bitcoinURL, smallerDimension); //qrcode.setPadding(1, 1, 1, 1); //qrcode.setBackgroundResource(R.color.gray); if (bitmap == null) return false; qrcode.setImageBitmap(bitmap); return true; } public void offerToChangeTheAmount(final Context app, final PaymentItem item) { BreadDialog.showCustomDialog(app, app.getString(R.string.insufficient_funds), app.getString(R.string.change_payment_amount), app.getString(R.string.change), app.getString(R.string.Button_cancel), new BRDialogView.BROnClickListener() { @Override public void onClick(BRDialogView brDialogView) { BRAnimator.showSendFragment((Activity) app, Utils.createBitcoinUrl(item.addresses[0], 0, null, null, null)); brDialogView.dismissWithAnimation(); } }, new BRDialogView.BROnClickListener() { @Override public void onClick(BRDialogView brDialogView) { brDialogView.dismissWithAnimation(); } }, null, 0); } public void addBalanceChangedListener(OnBalanceChanged listener) { if (balanceListeners == null) { Log.e(TAG, "addBalanceChangedListener: statusUpdateListeners is null"); return; } if (!balanceListeners.contains(listener)) balanceListeners.add(listener); } public void removeListener(OnBalanceChanged listener) { if (balanceListeners == null) { Log.e(TAG, "addBalanceChangedListener: statusUpdateListeners is null"); return; } balanceListeners.remove(listener); } public interface OnBalanceChanged { void onBalanceChanged(long balance); } private native byte[] encodeSeed(byte[] seed, String[] wordList); public native void createWallet(int transactionCount, byte[] pubkey); public native void putTransaction(byte[] transaction, long blockHeight, long timeStamp); public native void createTxArrayWithCount(int count); public native byte[] getMasterPubKey(byte[] normalizedString); public static native String getReceiveAddress(); public native TransactionListItem[] getTransactions(); public static native boolean validateAddress(String address); public native boolean addressContainedInWallet(String address); public native boolean addressIsUsed(String address); public native int feeForTransaction(String addressHolder, long amountHolder); public native int feeForTransactionAmount(long amountHolder); public native long getMinOutputAmount(); public native long getMaxOutputAmount(); public native boolean isCreated(); // public native boolean transactionIsVerified(String txHash); public native byte[] tryTransaction(String addressHolder, long amountHolder); // returns the given amount (amount is in satoshis) in local currency units (i.e. pennies, pence) // price is local currency units per bitcoin public native long localAmount(long amount, double price); // returns the given local currency amount in satoshis // price is local currency units (i.e. pennies, pence) per bitcoin public native long bitcoinAmount(long localAmount, double price); public native void walletFreeEverything(); private native boolean validateRecoveryPhrase(String[] words, String phrase); public native static String getFirstAddress(byte[] mpk); public native boolean publishSerializedTransaction(byte[] serializedTransaction, byte[] phrase); public native long getTotalSent(); public native long setFeePerKb(long fee); public native boolean isValidBitcoinPrivateKey(String key); public native boolean isValidBitcoinBIP38Key(String key); public native String getAddressFromPrivKey(String key); public native void createInputArray(); public native void addInputToPrivKeyTx(byte[] hash, int vout, byte[] script, long amount); public native boolean confirmKeySweep(byte[] tx, String key); public native ImportPrivKeyEntity getPrivKeyObject(); public native String decryptBip38Key(String privKey, String pass); public native String reverseTxHash(String txHash); public native int getTxCount(); public native long getMinOutputAmountRequested(); public static native byte[] getAuthPrivKeyForAPI(byte[] seed); public static native String getAuthPublicKeyForAPI(byte[] privKey); public static native byte[] getSeedFromPhrase(byte[] phrase); public static native boolean isTestNet(); }
package cn.jpush.b.a.b; import cn.jpush.a.a.bg; import cn.jpush.a.a.bi; import cn.jpush.a.a.bj; import com.google.gson.jpush.annotations.a; import com.google.protobuf.jpush.c; public class v extends q { public static final String a = v.class.getName(); @a long b; @a String c; final p a(long j, String str) { bj a = bi.j().a(this.b); if (this.c != null) { a.a(bg.j().a(c.a(this.c)).a()); } return new p(3, 1, j, str, a.a()); } }
package com.uscexp.util.function; import java.util.Objects; @FunctionalInterface public interface CheckedConsumer<T> { void accept(T var1) throws Exception; default CheckedConsumer<T> andThen(CheckedConsumer<? super T> var1) { Objects.requireNonNull(var1); return (var2) -> { this.accept(var2); var1.accept(var2); }; } }
package springboot.samples.datajpa_postgresql; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class DataJpaApplication { public static void main(String[] args) { SpringApplication.run(DataJpaApplication.class, args); } }
package com.chuxin.law.ry.server.response; /** * Created by AMing on 16/1/27. * Company RongCloud */ public class AddGroupMemberResponse { private int code; public int getCode() { return code; } public void setCode(int code) { this.code = code; } }
package jat.coreNOSA.trajectory; /* JAT: Java Astrodynamics Toolkit * * Copyright (c) 2003 National Aeronautics and Space Administration. All rights reserved. * * This file is part of JAT. JAT is free software; you can * redistribute it and/or modify it under the terms of the * NASA Open Source Agreement * * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * NASA Open Source Agreement for more details. * * You should have received a copy of the NASA Open Source Agreement * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * */ import java.io.Serializable; /** * <P> * The TimeUnits.java Class provides the means for specifying the * time units used in creating a trajectory. * * @author * @version 1.0 */ public final class TimeUnits implements Serializable { /** * */ private static final long serialVersionUID = -6348915627577412589L; private String name; private TimeUnits(String nm) { name = nm; } public String toString() { return name; } public final static TimeUnits SECONDS = new TimeUnits("s"), DAYS = new TimeUnits("days"), OTHER = new TimeUnits("Other"); public final static TimeUnits[] index = { SECONDS, DAYS, OTHER }; public static void main(String[] args) { TimeUnits m = TimeUnits.SECONDS; System.out.println(m); m = TimeUnits.index[1]; System.out.println(m); System.out.println(m == TimeUnits.SECONDS); System.out.println(m.equals(TimeUnits.DAYS)); } }
package dal.dao; import core.config.AppConfig; import core.exceptions.FatalException; import dal.dao.interfaces.DocumentDao; import domaine.dto.ChoixMobiliteDto; import domaine.dto.DocumentDto; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; public class DocumentDaoImpl extends DaoImpl<DocumentDto> implements DocumentDao { @Override public boolean ajouterDocumentRempli(DocumentDto document) { ps = super.dalbs.createPrepareStatement(AppConfig.getValueOf("c_document_rempli")); try { ps.setInt(1, document.getId()); ps.setInt(2, document.getMobilite().getNumCandidature()); ps.setInt(3, document.getMobilite().getNumPreference()); ps.executeUpdate(); } catch (SQLException exception) { throw new FatalException(exception.getMessage()); } return true; } @Override public boolean supprimerDocumentRempli(ChoixMobiliteDto mobilite, DocumentDto document) { ps = super.dalbs.createPrepareStatement(AppConfig.getValueOf("d_document_rempli")); try { ps.setInt(1, document.getId()); ps.setInt(2, mobilite.getNumCandidature()); ps.setInt(3, mobilite.getNumPreference()); ps.executeUpdate(); } catch (SQLException exception) { throw new FatalException(exception.getMessage()); } return true; } @Override public List<DocumentDto> getListeDocumentsARemplir(ChoixMobiliteDto mobi) { ps = super.dalbs.createPrepareStatement(AppConfig.getValueOf("r_document_a_remplir")); List<DocumentDto> documents = null; try { ps.setInt(2, mobi.getNumPreference()); ps.setInt(1, mobi.getNumCandidature()); try (ResultSet res = ps.executeQuery()) { documents = new ArrayList<DocumentDto>(); while (res.next()) { documents.add(super.fillEntityWithResult(res)); } } } catch (SQLException exception) { throw new FatalException(exception.getMessage()); } return documents; } @Override public List<DocumentDto> getListeDocumentsRemplis(ChoixMobiliteDto mobilite) { ps = super.dalbs.createPrepareStatement(AppConfig.getValueOf("r_document_rempli")); List<DocumentDto> documents = null; try { ps.setInt(1, mobilite.getNumCandidature()); ps.setInt(2, mobilite.getNumPreference()); try (ResultSet res = ps.executeQuery()) { documents = new ArrayList<DocumentDto>(); while (res.next()) { documents.add(super.fillEntityWithResult(res)); } } } catch (SQLException exception) { throw new FatalException(exception.getMessage()); } return documents; } }
package persistencia; import com.thoughtworks.xstream.XStream; import dominio.Reserva; import java.io.File; import java.io.FileWriter; import java.util.ArrayList; import java.util.Objects; public class ReservaBD { private static String caminho = "C:\\Users\\Wallace\\Documents\\NetBeansProjects\\PI 2ºSemestre\\Site-Recanto-Ferraz2\\"; private static ArrayList<Reserva> lista = new ArrayList<Reserva>(); //adiciona um objeto da classe Reserva //na lista que simula o banco de dados public static void inserir(Reserva reserva){ lerXml(); if(lista.size() == 0){//se s lista estiver vazia reserva.setCodigo(1); }else{//se alista tem objetos //pegar o ultimo codigo int ultimaPosicao = lista.size()-1; Reserva ultimaReserva = lista.get(ultimaPosicao); //somar+1 no carrinho adicionado reserva.setCodigo(ultimaReserva.getCodigo()+1); } lista.add(reserva); salvarXml(); } public static void alterar(Reserva reservaAlterada){ excluir(reservaAlterada.getCodigo()); inserir(reservaAlterada); } public static void excluir(int codigo){ lerXml(); for(int i=0; i < lista.size(); i++){ Reserva cadaReserva = lista.get(i); if (cadaReserva.getCodigo() == codigo) { lista.remove(i); } } salvarXml(); } public static ArrayList<Reserva> listar(){ lerXml(); return lista; } public static Reserva getByCodigo(int codigo){ lerXml(); for(int i=0; i < lista.size(); i++){ Reserva cadaReserva = lista.get(i); if (cadaReserva.getCodigo() == codigo){ return cadaReserva; } } return null; } private static void lerXml(){ File arquivo=new File(caminho + "reservas.xml"); if (arquivo.exists()){ //armazenar XML no vetor XStream xstream=new XStream(); xstream.alias("reserva",Reserva.class); lista = (ArrayList<Reserva>) xstream.fromXML(arquivo); }else{ lista = new ArrayList<Reserva>(); } } private static void salvarXml(){ XStream xstream = new XStream(); xstream.alias("reserva",Reserva.class); try{ FileWriter escritor=new FileWriter(caminho + "reservas.xml"); escritor.write( xstream.toXML(lista) ); escritor.close(); }catch(Exception ex){ System.out.println(ex.getMessage()); } } }