repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
groboclown/p4ic4idea
p4java/r18-1/src/test/java/com/perforce/p4java/tests/dev/unit/features122/ProgressCallbackTest.java
5144
/** * */ package com.perforce.p4java.tests.dev.unit.features122; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; import java.net.URISyntaxException; import java.util.Map; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import com.perforce.p4java.client.IClient; import com.perforce.p4java.exception.P4JavaException; import com.perforce.p4java.server.IOptionsServer; import com.perforce.p4java.server.callback.IProgressCallback; import com.perforce.p4java.tests.dev.annotations.TestId; import com.perforce.p4java.tests.dev.unit.P4JavaTestCase; /** * Tests IProgressCallback - desire more info passed to method tick(). * The 'p4 sync' command more than just the clientFile as currently available. * Report the sync size at the start - which is the totalFileSize. */ @TestId("Bugs101_Job040241Test") public class ProgressCallbackTest extends P4JavaTestCase { private IProgressCallback progressCallback = new IProgressCallback() { public int counter = 0; public void start(int key) { System.out.println("start: key = " + key); counter=0; } public boolean tick(int key, String tickMarker) { counter++; System.out.println("tick: key = " + key); System.out.println("tick: tickMarker = " + tickMarker); if (counter == 3) { System.out.println("Counter: " + counter + "... Cancel callback..."); return false; } return true; } public void stop(int key) { System.out.println("stop: key = " + key); } }; IOptionsServer server = null; IClient client = null; /** * @BeforeClass annotation to a method to be run before all the tests in a * class. */ @BeforeClass public static void oneTimeSetUp() { // one-time initialization code (before all the tests). } /** * @AfterClass annotation to a method to be run after all the tests in a * class. */ @AfterClass public static void oneTimeTearDown() { // one-time cleanup code (after all the tests). } /** * @Before annotation to a method to be run before each test in a class. */ @Before public void setUp() { // initialization code (before each test). try { server = getServer(); assertNotNull(server); client = server.getClient("p4TestUserWS"); assertNotNull(client); server.setCurrentClient(client); } catch (P4JavaException e) { fail("Unexpected exception: " + e.getLocalizedMessage()); } catch (URISyntaxException e) { fail("Unexpected exception: " + e.getLocalizedMessage()); } } /** * @After annotation to a method to be run after each test in a class. */ @After public void tearDown() { // cleanup code (after each test). if (server != null) { this.endServerSession(server); } } /** * Test Progresscallback */ @Test public void testProgresscallback() { try { IProgressCallback prevCallback = server.registerProgressCallback(progressCallback); assertNull(prevCallback); System.out.println("=========== fstat ==========="); // fstat Map<String, Object>[] retMap = server.execInputStringMapCmd("fstat", new String[]{"//depot/112Dev/CopyFilesTest/..."}, null); assertNotNull(retMap); // fstat - no such file(s) retMap = server.execInputStringMapCmd("fstat", new String[]{"//depot/112Dev/CopyFilesTest/a/..."}, null); assertNotNull(retMap); System.out.println("=============================="); System.out.println("=========== files ==========="); // files retMap = server.execInputStringMapCmd("files", new String[]{"//depot/112Dev/CopyFilesTest/..."}, null); assertNotNull(retMap); // files - no such file(s) retMap = server.execInputStringMapCmd("files", new String[]{"//depot/112Dev/CopyFilesTest/a/..."}, null); assertNotNull(retMap); System.out.println("=============================="); System.out.println("============ sync ============"); // sync retMap = server.execInputStringMapCmd("sync", new String[]{"-f", "-n", "//depot/112Dev/CopyFilesTest/..."}, null); assertNotNull(retMap); // sync - no such file(s) retMap = server.execInputStringMapCmd("sync", new String[]{"-f", "-n", "//depot/112Dev/CopyFilesTest/a/..."}, null); assertNotNull(retMap); System.out.println("=============================="); System.out.println("============ jobs ============"); // jobs retMap = server.execInputStringMapCmd("jobs", null, null); assertNotNull(retMap); System.out.println("=============================="); System.out.println("============ changes ============"); // changes retMap = server.execInputStringMapCmd("changes", null, null); assertNotNull(retMap); System.out.println("=============================="); System.out.println("============ users ============"); // users retMap = server.execInputStringMapCmd("users", null, null); assertNotNull(retMap); System.out.println("=============================="); } catch (Exception exc) { fail("Unexpected exception: " + exc.getLocalizedMessage()); } } }
apache-2.0
apache/openwebbeans
webbeans-impl/src/test/java/org/apache/webbeans/test/injection/injectionpoint/beans/ProducerInjectionPointInstanceOwner.java
1198
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.webbeans.test.injection.injectionpoint.beans; import javax.enterprise.inject.Instance; import javax.inject.Inject; public class ProducerInjectionPointInstanceOwner { @Inject private Instance<ProducerMethodInjectionPointOwner> ipOwnerInstance; public Instance<ProducerMethodInjectionPointOwner> getIpOwnerInstance() { return ipOwnerInstance; } }
apache-2.0
magat/thymeleaf
src/main/java/org/thymeleaf/standard/expression/ExpressionSequence.java
2465
/* * ============================================================================= * * Copyright (c) 2011-2014, The THYMELEAF team (http://www.thymeleaf.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ============================================================================= */ package org.thymeleaf.standard.expression; import java.io.Serializable; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.thymeleaf.util.Validate; /** * * @author Daniel Fern&aacute;ndez * * @since 1.1 * */ public final class ExpressionSequence implements Iterable<IStandardExpression>, Serializable { private static final long serialVersionUID = -6069208208568731809L; private final List<IStandardExpression> expressions; public ExpressionSequence(final List<? extends IStandardExpression> expressions) { super(); Validate.notNull(expressions, "Expression list cannot be null"); Validate.containsNoNulls(expressions, "Expression list cannot contain any nulls"); this.expressions = Collections.unmodifiableList(expressions); } public List<IStandardExpression> getExpressions() { return this.expressions; } public int size() { return this.expressions.size(); } public Iterator<IStandardExpression> iterator() { return this.expressions.iterator(); } public String getStringRepresentation() { final StringBuilder sb = new StringBuilder(); if (this.expressions.size() > 0) { sb.append(this.expressions.get(0)); for (int i = 1; i < this.expressions.size(); i++) { sb.append(','); sb.append(this.expressions.get(i)); } } return sb.toString(); } @Override public String toString() { return getStringRepresentation(); } }
apache-2.0
lburgazzoli/spring-boot
spring-boot-project/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/trace/http/InMemoryHttpTraceRepository.java
2019
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.trace.http; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; /** * In-memory implementation of {@link HttpTraceRepository}. * * @author Dave Syer * @author Olivier Bourgain * @since 2.0.0 */ public class InMemoryHttpTraceRepository implements HttpTraceRepository { private int capacity = 100; private boolean reverse = true; private final List<HttpTrace> traces = new LinkedList<>(); /** * Flag to say that the repository lists traces in reverse order. * @param reverse flag value (default true) */ public void setReverse(boolean reverse) { synchronized (this.traces) { this.reverse = reverse; } } /** * Set the capacity of the in-memory repository. * @param capacity the capacity */ public void setCapacity(int capacity) { synchronized (this.traces) { this.capacity = capacity; } } @Override public List<HttpTrace> findAll() { synchronized (this.traces) { return Collections.unmodifiableList(new ArrayList<>(this.traces)); } } @Override public void add(HttpTrace trace) { synchronized (this.traces) { while (this.traces.size() >= this.capacity) { this.traces.remove(this.reverse ? this.capacity - 1 : 0); } if (this.reverse) { this.traces.add(0, trace); } else { this.traces.add(trace); } } } }
apache-2.0
alessandrogurgel/pedefacil
drodrolib/src/main/java/org/michenux/drodrolib/network/volley/GsonRequest.java
2302
package org.michenux.drodrolib.network.volley; import android.location.Location; import com.android.volley.AuthFailureError; import com.android.volley.NetworkResponse; import com.android.volley.ParseError; import com.android.volley.Request; import com.android.volley.Response; import com.android.volley.Response.ErrorListener; import com.android.volley.Response.Listener; import com.android.volley.toolbox.HttpHeaderParser; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; import org.michenux.drodrolib.network.json.LocationDeserializer; import org.michenux.drodrolib.network.json.TimestampDeserializer; import java.io.UnsupportedEncodingException; import java.sql.Timestamp; import java.util.Map; public class GsonRequest<T> extends Request<T> { private final Gson gson ; private final Class<T> clazz; private final Map<String, String> headers; private final Listener<T> listener; public GsonRequest(int method, String url, Class<T> clazz, Map<String, String> headers, Listener<T> listener, ErrorListener errorListener) { super(method, url, errorListener); GsonBuilder gsonBuilder = new GsonBuilder(); gsonBuilder.registerTypeAdapter(Timestamp.class, new TimestampDeserializer()); gsonBuilder.registerTypeAdapter(Location.class, new LocationDeserializer()); this.gson = gsonBuilder.create(); this.clazz = clazz; this.headers = headers; this.listener = listener; } @Override public Map<String, String> getHeaders() throws AuthFailureError { return headers != null ? headers : super.getHeaders(); } @Override protected void deliverResponse(T response) { listener.onResponse(response); } @Override protected Response<T> parseNetworkResponse(NetworkResponse response) { try { String json = new String( response.data, HttpHeaderParser.parseCharset(response.headers)); return Response.success( gson.fromJson(json, clazz), HttpHeaderParser.parseCacheHeaders(response)); } catch (UnsupportedEncodingException | JsonSyntaxException e) { return Response.error(new ParseError(e)); } } }
apache-2.0
kisskys/incubator-asterixdb
asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/json/clean/ARecordPrinter.java
3650
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.om.pointables.printer.json.clean; import java.io.IOException; import java.io.PrintStream; import java.util.List; import org.apache.asterix.common.exceptions.AsterixException; import org.apache.asterix.om.pointables.ARecordVisitablePointable; import org.apache.asterix.om.pointables.base.IVisitablePointable; import org.apache.asterix.om.types.ATypeTag; import org.apache.asterix.om.types.EnumDeserializer; import org.apache.hyracks.algebricks.common.utils.Pair; /** * This class is to print the content of a record. It is ONLY visible to * APrintVisitor. */ class ARecordPrinter { private static String LEFT_PAREN = "{ "; private static String RIGHT_PAREN = " }"; private static String COMMA = ", "; private static String COLON = ": "; private final Pair<PrintStream, ATypeTag> nameVisitorArg = new Pair<PrintStream, ATypeTag>(null, ATypeTag.STRING); private final Pair<PrintStream, ATypeTag> itemVisitorArg = new Pair<PrintStream, ATypeTag>(null, null); public ARecordPrinter() { } public void printRecord(ARecordVisitablePointable recordAccessor, PrintStream ps, APrintVisitor visitor) throws IOException, AsterixException { List<IVisitablePointable> fieldNames = recordAccessor.getFieldNames(); List<IVisitablePointable> fieldTags = recordAccessor.getFieldTypeTags(); List<IVisitablePointable> fieldValues = recordAccessor.getFieldValues(); nameVisitorArg.first = ps; itemVisitorArg.first = ps; // print the beginning part ps.print(LEFT_PAREN); // print field 0 to n-2 for (int i = 0; i < fieldNames.size() - 1; i++) { printField(ps, visitor, fieldNames, fieldTags, fieldValues, i); // print the comma ps.print(COMMA); } // print field n-1 if (fieldValues.size() > 0) { printField(ps, visitor, fieldNames, fieldTags, fieldValues, fieldValues.size() - 1); } // print the end part ps.print(RIGHT_PAREN); } private void printField(PrintStream ps, APrintVisitor visitor, List<IVisitablePointable> fieldNames, List<IVisitablePointable> fieldTags, List<IVisitablePointable> fieldValues, int i) throws AsterixException { IVisitablePointable itemTypeTag = fieldTags.get(i); IVisitablePointable item = fieldValues.get(i); ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(itemTypeTag.getByteArray()[itemTypeTag .getStartOffset()]); itemVisitorArg.second = item.getLength() <= 1 ? ATypeTag.NULL : typeTag; // print field name fieldNames.get(i).accept(visitor, nameVisitorArg); ps.print(COLON); // print field value item.accept(visitor, itemVisitorArg); } }
apache-2.0
apache/incubator-sentry
sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestUpdateableAuthzPaths.java
8405
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sentry.hdfs; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.sentry.hdfs.service.thrift.TPathChanges; import static org.junit.Assert.assertTrue; import org.junit.Test; import com.google.common.collect.Lists; public class TestUpdateableAuthzPaths { @Test public void testFullUpdate() { HMSPaths hmsPaths = createBaseHMSPaths(1, 1); assertTrue(hmsPaths.findAuthzObjectExactMatches(new String[]{"db1"}).contains("db1")); assertTrue(hmsPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11"}).contains("db1.tbl11")); assertTrue(hmsPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part111"}).contains("db1.tbl11")); assertTrue(hmsPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part112"}).contains("db1.tbl11")); UpdateableAuthzPaths authzPaths = new UpdateableAuthzPaths(hmsPaths); PathsUpdate update = new PathsUpdate(1, true); update.toThrift().setPathsDump(authzPaths.getPathsDump().createPathsDump()); UpdateableAuthzPaths authzPaths2 = new UpdateableAuthzPaths(new String[] {"/"}); UpdateableAuthzPaths pre = authzPaths2.updateFull(update); assertFalse(pre == authzPaths2); authzPaths2 = pre; assertTrue(authzPaths2.findAuthzObjectExactMatches(new String[]{"db1"}).contains("db1")); assertTrue(authzPaths2.findAuthzObjectExactMatches(new String[]{"db1", "tbl11"}).contains("db1.tbl11")); assertTrue(authzPaths2.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part111"}).contains("db1.tbl11")); assertTrue(authzPaths2.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part112"}).contains("db1.tbl11")); // Ensure Full Update wipes old stuff UpdateableAuthzPaths authzPaths3 = new UpdateableAuthzPaths(createBaseHMSPaths(2, 1)); update = new PathsUpdate(2, true); update.toThrift().setPathsDump(authzPaths3.getPathsDump().createPathsDump()); pre = authzPaths2.updateFull(update); assertFalse(pre == authzPaths2); authzPaths2 = pre; assertNull(authzPaths2.findAuthzObjectExactMatches(new String[]{"db1"})); assertNull(authzPaths2.findAuthzObjectExactMatches(new String[]{"db1", "tbl11"})); assertTrue(authzPaths2.findAuthzObjectExactMatches(new String[]{"db2"}).contains("db2")); assertTrue(authzPaths2.findAuthzObjectExactMatches(new String[]{"db2", "tbl21"}).contains("db2.tbl21")); assertTrue(authzPaths2.findAuthzObjectExactMatches(new String[]{"db2", "tbl21", "part211"}).contains("db2.tbl21")); assertTrue(authzPaths2.findAuthzObjectExactMatches(new String[]{"db2", "tbl21", "part212"}).contains("db2.tbl21")); } @Test public void testPartialUpdateAddPath() { HMSPaths hmsPaths = createBaseHMSPaths(1, 1); UpdateableAuthzPaths authzPaths = new UpdateableAuthzPaths(hmsPaths); ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); // Create table PathsUpdate update = new PathsUpdate(2, false); TPathChanges pathChange = update.newPathChange("db1.tbl12"); pathChange.addToAddPaths(PathsUpdate.parsePath("hdfs:///db1/tbl12")); authzPaths.updatePartial(Lists.newArrayList(update), lock); // Add partition update = new PathsUpdate(3, false); pathChange = update.newPathChange("db1.tbl12"); pathChange.addToAddPaths(PathsUpdate.parsePath("hdfs:///db1/tbl12/part121")); authzPaths.updatePartial(Lists.newArrayList(update), lock); // Ensure no change in existing Paths assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1"}).contains("db1")); assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11"}).contains("db1.tbl11")); assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part111"}).contains("db1.tbl11")); assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part112"}).contains("db1.tbl11")); // Verify new Paths assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl12"}).contains("db1.tbl12")); assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl12", "part121"}).contains("db1.tbl12")); // Rename table update = new PathsUpdate(4, false); update.newPathChange("db1.xtbl11").addToAddPaths(PathsUpdate.parsePath("hdfs:///db1/xtbl11")); update.newPathChange("db1.tbl11").addToDelPaths(PathsUpdate.parsePath("hdfs:///db1/tbl11")); authzPaths.updatePartial(Lists.newArrayList(update), lock); // Verify name change assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1"}).contains("db1")); assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "xtbl11"}).contains("db1.xtbl11")); // Explicit set location has to be done on the partition else it will be associated to // the old location assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part111"}).contains("db1.xtbl11")); assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part112"}).contains("db1.xtbl11")); // Verify other tables are not touched assertNull(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "xtbl12"})); assertNull(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "xtbl12", "part121"})); assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl12"}).contains("db1.tbl12")); assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl12", "part121"}).contains("db1.tbl12")); } @Test public void testPartialUpdateDelPath() { HMSPaths hmsPaths = createBaseHMSPaths(1, 1); UpdateableAuthzPaths authzPaths = new UpdateableAuthzPaths(hmsPaths); ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11"}).contains("db1.tbl11")); assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part111"}).contains("db1.tbl11")); // Drop partition PathsUpdate update = new PathsUpdate(2, false); TPathChanges pathChange = update.newPathChange("db1.tbl11"); pathChange.addToDelPaths(PathsUpdate.parsePath("hdfs:///db1/tbl11/part111")); authzPaths.updatePartial(Lists.newArrayList(update), lock); // Verify Paths deleted assertNull(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part111"})); // Verify rest ok assertTrue(authzPaths.findAuthzObjectExactMatches(new String[]{"db1", "tbl11", "part112"}).contains("db1.tbl11")); } @Test public void testDefaultDbPath() { HMSPaths hmsPaths = new HMSPaths(new String[] {"/user/hive/warehouse"}); hmsPaths._addAuthzObject("default", Lists.newArrayList("/user/hive/warehouse")); assertTrue(hmsPaths.findAuthzObject(new String[]{"user", "hive", "warehouse"}).contains("default")); } private HMSPaths createBaseHMSPaths(int dbNum, int tblNum) { String db = "db" + dbNum; String tbl = "tbl" + dbNum + "" + tblNum; String fullTbl = db + "." + tbl; String dbPath = "/" + db; String tblPath = "/" + db + "/" + tbl; String partPath = tblPath + "/part" + dbNum + "" + tblNum; HMSPaths hmsPaths = new HMSPaths(new String[] {"/"}); hmsPaths._addAuthzObject(db, Lists.newArrayList(dbPath)); hmsPaths._addAuthzObject(fullTbl, Lists.newArrayList(tblPath)); hmsPaths._addPathsToAuthzObject(fullTbl, Lists.newArrayList( partPath + "1", partPath + "2" )); return hmsPaths; } }
apache-2.0
xdujiang/Nimingban
app/src/main/java/com/hippo/widget/SnackbarManager.java
6317
/* * Copyright 2015 Hippo Seven * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hippo.widget; import android.os.Handler; import android.os.Looper; import android.os.Message; import java.lang.ref.WeakReference; public class SnackbarManager { private static final int MSG_TIMEOUT = 0; private static final int SHORT_DURATION_MS = 1500; private static final int LONG_DURATION_MS = 2750; private static SnackbarManager sSnackbarManager; private final Object mLock; private final Handler mHandler; private SnackbarRecord mCurrentSnackbar; private SnackbarRecord mNextSnackbar; static SnackbarManager getInstance() { if (sSnackbarManager == null) { sSnackbarManager = new SnackbarManager(); } return sSnackbarManager; } private SnackbarManager() { mLock = new Object(); mHandler = new Handler(Looper.getMainLooper(), new Handler.Callback() { @Override public boolean handleMessage(Message message) { switch (message.what) { case MSG_TIMEOUT: handleTimeout((SnackbarManager.SnackbarRecord)message.obj); return true; } return false; } }); } public void show(int duration, Callback callback) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { mCurrentSnackbar.duration = duration; mHandler.removeCallbacksAndMessages(mCurrentSnackbar); scheduleTimeoutLocked(mCurrentSnackbar); return; } if (isNextSnackbar(callback)) { mNextSnackbar.duration = duration; } else { mNextSnackbar = new SnackbarRecord(duration, callback); } if ((mCurrentSnackbar != null) && (cancelSnackbarLocked(mCurrentSnackbar, Snackbar.Callback.DISMISS_EVENT_CONSECUTIVE))) { return; } mCurrentSnackbar = null; showNextSnackbarLocked(); } } public void dismiss(Callback callback, int event) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { cancelSnackbarLocked(mCurrentSnackbar, event); } else if (isNextSnackbar(callback)) { cancelSnackbarLocked(mNextSnackbar, event); } } } public void onDismissed(Callback callback) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { mCurrentSnackbar = null; if (mNextSnackbar != null) { showNextSnackbarLocked(); } } } } public void onShown(Callback callback) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { scheduleTimeoutLocked(mCurrentSnackbar); } } } public void cancelTimeout(Callback callback) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { mHandler.removeCallbacksAndMessages(mCurrentSnackbar); } } } public void restoreTimeout(Callback callback) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { scheduleTimeoutLocked(mCurrentSnackbar); } } } private static class SnackbarRecord { private final WeakReference<Callback> callback; private int duration; SnackbarRecord(int duration, SnackbarManager.Callback callback) { this.callback = new WeakReference<>(callback); this.duration = duration; } boolean isSnackbar(SnackbarManager.Callback callback) { return (callback != null) && (this.callback.get() == callback); } } private void showNextSnackbarLocked() { if (mNextSnackbar != null) { mCurrentSnackbar = mNextSnackbar; mNextSnackbar = null; Callback callback = mCurrentSnackbar.callback.get(); if (callback != null) { callback.show(); } else { mCurrentSnackbar = null; } } } private boolean cancelSnackbarLocked(SnackbarRecord record, int event) { Callback callback = record.callback.get(); if (callback != null) { callback.dismiss(event); return true; } return false; } private boolean isCurrentSnackbar(Callback callback) { return (mCurrentSnackbar != null) && (mCurrentSnackbar.isSnackbar(callback)); } private boolean isNextSnackbar(Callback callback) { return (mNextSnackbar != null) && (mNextSnackbar.isSnackbar(callback)); } private void scheduleTimeoutLocked(SnackbarRecord r) { if (r.duration == Snackbar.LENGTH_INDEFINITE) { return; } int durationMs; if (r.duration > Snackbar.LENGTH_LONG) { durationMs = r.duration; } else if (r.duration == Snackbar.LENGTH_SHORT) { durationMs = SHORT_DURATION_MS; } else { durationMs = LONG_DURATION_MS; } mHandler.removeCallbacksAndMessages(r); mHandler.sendMessageDelayed(Message.obtain(mHandler, MSG_TIMEOUT, r), durationMs); } private void handleTimeout(SnackbarRecord record) { synchronized (mLock) { if ((mCurrentSnackbar == record) || (mNextSnackbar == record)) { cancelSnackbarLocked(record, Snackbar.Callback.DISMISS_EVENT_TIMEOUT); } } } interface Callback { void show(); void dismiss(int paramInt); } }
apache-2.0
pacozaa/BoofCV
main/feature/src/boofcv/struct/feature/NccFeature.java
1575
/* * Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.struct.feature; /** * Description for normalized cross correlation (NCC). The descriptor's value * in a NCC feature is the pixel intensity value minus the mean pixel intensity value. * </p> * value[i] = I(x,y) - mean * * @author Peter Abeles */ public class NccFeature extends TupleDesc_F64 { /** Mean pixel intensity Can be used to reconstruct the original values of the template.*/ public double mean; /** standard deviation of pixel intensity*/ public double sigma; public NccFeature(int numFeatures) { super(numFeatures); } protected NccFeature() { } @Override public NccFeature copy() { NccFeature ret = new NccFeature( value.length ); ret.setTo(this); return ret; } @Override public void setTo(TupleDesc_F64 source) { super.setTo(source); NccFeature ncc = (NccFeature)source; this.mean = ncc.mean; this.sigma = ncc.sigma; } }
apache-2.0
alien4cloud/alien4cloud
alien4cloud-tosca/src/main/java/alien4cloud/tosca/parser/impl/advanced/RelationshipTemplateParser.java
4234
package alien4cloud.tosca.parser.impl.advanced; import java.util.Map; import javax.annotation.Resource; import org.alien4cloud.tosca.model.definitions.AbstractPropertyValue; import org.alien4cloud.tosca.model.definitions.Interface; import org.alien4cloud.tosca.model.templates.RelationshipTemplate; import org.springframework.stereotype.Component; import org.yaml.snakeyaml.nodes.MappingNode; import org.yaml.snakeyaml.nodes.Node; import org.yaml.snakeyaml.nodes.NodeTuple; import org.yaml.snakeyaml.nodes.ScalarNode; import alien4cloud.tosca.parser.INodeParser; import alien4cloud.tosca.parser.ParserUtils; import alien4cloud.tosca.parser.ParsingContextExecution; import alien4cloud.tosca.parser.ParsingError; import alien4cloud.tosca.parser.ParsingErrorLevel; import alien4cloud.tosca.parser.impl.ErrorCode; import alien4cloud.tosca.parser.impl.base.BaseParserFactory; import alien4cloud.tosca.parser.impl.base.MapParser; import alien4cloud.tosca.parser.impl.base.ScalarParser; /** * Parse a relationship */ @Deprecated @Component public class RelationshipTemplateParser implements INodeParser<RelationshipTemplate> { @Resource private ScalarParser scalarParser; @Resource private BaseParserFactory baseParserFactory; @Override public RelationshipTemplate parse(Node node, ParsingContextExecution context) { // To parse a relationship template we actually get the parent node to retrieve the requirement name; if (!(node instanceof MappingNode) || ((MappingNode) node).getValue().size() != 1) { ParserUtils.addTypeError(node, context.getParsingErrors(), "Requirement assignment"); } MappingNode assignmentNode = (MappingNode) node; RelationshipTemplate relationshipTemplate = new RelationshipTemplate(); relationshipTemplate.setRequirementName(scalarParser.parse(assignmentNode.getValue().get(0).getKeyNode(), context)); // Now parse the content of the relationship assignment. node = assignmentNode.getValue().get(0).getValueNode(); if (node instanceof ScalarNode) { // Short notation (host: compute) relationshipTemplate.setTarget(scalarParser.parse(node, context)); } else if (node instanceof MappingNode) { MappingNode mappingNode = (MappingNode) node; for (NodeTuple nodeTuple : mappingNode.getValue()) { String key = scalarParser.parse(nodeTuple.getKeyNode(), context); switch (key) { case "node": relationshipTemplate.setTarget(scalarParser.parse(nodeTuple.getValueNode(), context)); break; case "capability": relationshipTemplate.setTargetedCapabilityName(scalarParser.parse(nodeTuple.getValueNode(), context)); break; case "relationship": relationshipTemplate.setType(scalarParser.parse(nodeTuple.getValueNode(), context)); break; case "properties": INodeParser<AbstractPropertyValue> propertyValueParser = context.getRegistry().get("node_template_property"); MapParser<AbstractPropertyValue> mapParser = baseParserFactory.getMapParser(propertyValueParser, "node_template_property"); relationshipTemplate.setProperties(mapParser.parse(nodeTuple.getValueNode(), context)); break; case "interfaces": INodeParser<Map<String, Interface>> interfacesParser = context.getRegistry().get("interfaces"); relationshipTemplate.setInterfaces(interfacesParser.parse(nodeTuple.getValueNode(), context)); break; default: context.getParsingErrors().add(new ParsingError(ParsingErrorLevel.WARNING, ErrorCode.UNKNOWN_ARTIFACT_KEY, null, node.getStartMark(), "Unrecognized key while parsing implementation artifact", node.getEndMark(), key)); } } } else { ParserUtils.addTypeError(node, context.getParsingErrors(), "Requirement assignment"); } return relationshipTemplate; } }
apache-2.0
minio/minio-java
api/src/main/java/io/minio/BucketExistsArgs.java
991
/* * MinIO Java SDK for Amazon S3 Compatible Cloud Storage, (C) 2020 MinIO, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.minio; /** Argument class of {@link MinioClient#bucketExists}. */ public class BucketExistsArgs extends BucketArgs { public static Builder builder() { return new Builder(); } /** Argument builder of {@link BucketExistsArgs}. */ public static final class Builder extends BucketArgs.Builder<Builder, BucketExistsArgs> {} }
apache-2.0
apache/geronimo
testsuite/webservices-testsuite/jaxws-mtom-tests/mtom-test-war/src/main/java/org/apache/geronimo/echo/Echo.java
3138
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.geronimo.echo; import java.awt.Image; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebResult; import javax.jws.WebService; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.ws.RequestWrapper; import javax.xml.ws.ResponseWrapper; @WebService(name = "Echo", targetNamespace = "http://geronimo.apache.org/echo") @XmlSeeAlso({ ObjectFactory.class }) public interface Echo { /** * * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "hello", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.Hello") @ResponseWrapper(localName = "helloResponse", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.HelloResponse") public String hello( @WebParam(name = "arg0", targetNamespace = "") String arg0); /** * * @param bytes * @param useMTOM * @return * returns byte[] */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "echoBytes", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.EchoBytes") @ResponseWrapper(localName = "echoBytesResponse", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.EchoBytesResponse") public byte[] echoBytes( @WebParam(name = "useMTOM", targetNamespace = "") boolean useMTOM, @WebParam(name = "bytes", targetNamespace = "") byte[] bytes); /** * * @param imageBytes * @param useMTOM * @return * returns java.awt.Image */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "echoImage", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.EchoImage") @ResponseWrapper(localName = "echoImageResponse", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.EchoImageResponse") public Image echoImage( @WebParam(name = "useMTOM", targetNamespace = "") boolean useMTOM, @WebParam(name = "imageBytes", targetNamespace = "") Image imageBytes); }
apache-2.0
wouterv/orientdb
core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMinor.java
6113
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.sql.operator; import com.orientechnologies.orient.core.command.OCommandContext; import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.index.OCompositeIndexDefinition; import com.orientechnologies.orient.core.index.OIndex; import com.orientechnologies.orient.core.index.OIndexCursor; import com.orientechnologies.orient.core.index.OIndexDefinition; import com.orientechnologies.orient.core.index.OIndexDefinitionMultiValue; import com.orientechnologies.orient.core.index.OIndexInternal; import com.orientechnologies.orient.core.metadata.schema.OType; import com.orientechnologies.orient.core.record.impl.ODocumentHelper; import com.orientechnologies.orient.core.serialization.serializer.record.binary.OBinaryField; import com.orientechnologies.orient.core.serialization.serializer.record.binary.ORecordSerializerBinary; import com.orientechnologies.orient.core.sql.filter.OSQLFilterCondition; import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemField; import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemParameter; import java.util.List; /** * MINOR operator. * * @author Luca Garulli * */ public class OQueryOperatorMinor extends OQueryOperatorEqualityNotNulls { private boolean binaryEvaluate=false; public OQueryOperatorMinor() { super("<", 5, false); ODatabaseDocumentInternal db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined(); if (db != null) binaryEvaluate = db.getSerializer().getSupportBinaryEvaluate(); } @Override @SuppressWarnings("unchecked") protected boolean evaluateExpression(final OIdentifiable iRecord, final OSQLFilterCondition iCondition, final Object iLeft, final Object iRight, OCommandContext iContext) { final Object right = OType.convert(iRight, iLeft.getClass()); if (right == null) return false; return ((Comparable<Object>) iLeft).compareTo(right) < 0; } @Override public OIndexReuseType getIndexReuseType(final Object iLeft, final Object iRight) { if (iRight == null || iLeft == null) return OIndexReuseType.NO_INDEX; return OIndexReuseType.INDEX_METHOD; } @Override public OIndexCursor executeIndexQuery(OCommandContext iContext, OIndex<?> index, List<Object> keyParams, boolean ascSortOrder) { final OIndexDefinition indexDefinition = index.getDefinition(); final OIndexInternal<?> internalIndex = index.getInternal(); if (!internalIndex.canBeUsedInEqualityOperators() || !internalIndex.hasRangeQuerySupport()) return null; final OIndexCursor cursor; if (indexDefinition.getParamCount() == 1) { final Object key; if (indexDefinition instanceof OIndexDefinitionMultiValue) key = ((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(keyParams.get(0)); else key = indexDefinition.createValue(keyParams); if (key == null) return null; cursor = index.iterateEntriesMinor(key, false, ascSortOrder); } else { // if we have situation like "field1 = 1 AND field2 < 2" // then we fetch collection which left included boundary is the smallest composite key in the // index that contains key with value field1=1 and which right not included boundary // is the biggest composite key in the index that contains key with values field1=1 and field2=2. final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition; final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams.subList(0, keyParams.size() - 1)); if (keyOne == null) return null; final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams); if (keyTwo == null) return null; cursor = index.iterateEntriesBetween(keyOne, true, keyTwo, false, ascSortOrder); } updateProfiler(iContext, index, keyParams, indexDefinition); return cursor; } @Override public ORID getBeginRidRange(Object iLeft, Object iRight) { return null; } @Override public ORID getEndRidRange(final Object iLeft, final Object iRight) { if (iLeft instanceof OSQLFilterItemField && ODocumentHelper.ATTRIBUTE_RID.equals(((OSQLFilterItemField) iLeft).getRoot())) if (iRight instanceof ORID) return (ORID) iRight; else { if (iRight instanceof OSQLFilterItemParameter && ((OSQLFilterItemParameter) iRight).getValue(null, null, null) instanceof ORID) return (ORID) ((OSQLFilterItemParameter) iRight).getValue(null, null, null); } return null; } @Override public boolean evaluate(final OBinaryField iFirstField, final OBinaryField iSecondField, OCommandContext iContext) { return ORecordSerializerBinary.INSTANCE.getCurrentSerializer().getComparator().compare(iFirstField, iSecondField) < 0; } @Override public boolean isSupportingBinaryEvaluate() { return binaryEvaluate; } }
apache-2.0
vorburger/mifos-head
application/src/main/java/org/mifos/accounts/loan/business/LoanScheduleEntity.java
26882
/* * Copyright (c) 2005-2010 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.accounts.loan.business; import org.mifos.accounts.business.AccountActionDateEntity; import org.mifos.accounts.business.AccountBO; import org.mifos.accounts.business.AccountFeesActionDetailEntity; import org.mifos.accounts.business.AccountPaymentEntity; import org.mifos.accounts.loan.persistance.LoanPersistence; import org.mifos.accounts.loan.schedule.domain.Installment; import org.mifos.accounts.loan.util.helpers.LoanConstants; import org.mifos.accounts.loan.util.helpers.RepaymentScheduleInstallment; import org.mifos.accounts.util.helpers.*; import org.mifos.application.master.business.MifosCurrency; import org.mifos.customers.business.CustomerBO; import org.mifos.customers.personnel.business.PersonnelBO; import org.mifos.framework.util.DateTimeService; import org.mifos.framework.util.helpers.Money; import org.mifos.platform.util.CollectionUtils; import java.util.*; import static org.mifos.framework.util.helpers.NumberUtils.min; public class LoanScheduleEntity extends AccountActionDateEntity { private Money principal; private Money interest; // TODO: Instance variable "penalty" appears to be unused. Verify and // remove. private Money penalty; private Money extraInterest; private Money miscFee; private Money miscPenalty; private Money principalPaid; private Money interestPaid; private Money penaltyPaid; private Money extraInterestPaid; private Money miscFeePaid; private Money miscPenaltyPaid; private Set<AccountFeesActionDetailEntity> accountFeesActionDetails = new HashSet<AccountFeesActionDetailEntity>(); private int versionNo; private PaymentAllocation paymentAllocation; protected LoanScheduleEntity() { super(null, null, null, null, null); } public LoanScheduleEntity(AccountBO account, CustomerBO customer, Short installmentId, java.sql.Date actionDate, PaymentStatus paymentStatus, Money principal, Money interest) { super(account, customer, installmentId, actionDate, paymentStatus); this.principal = principal; this.interest = interest; reset(account.getCurrency()); } private void reset(MifosCurrency currency) { this.penalty = new Money(currency); this.extraInterest = new Money(currency); this.miscFee = new Money(currency); this.miscPenalty = new Money(currency); this.principalPaid = new Money(currency); this.interestPaid = new Money(currency); this.penaltyPaid = new Money(currency); this.extraInterestPaid = new Money(currency); this.miscFeePaid = new Money(currency); this.miscPenaltyPaid = new Money(currency); } public Money getInterest() { return interest; } public void setInterest(Money interest) { this.interest = interest; } public Money getInterestPaid() { return interestPaid; } void setInterestPaid(Money interestPaid) { this.interestPaid = interestPaid; } void setPenalty(Money penalty) { this.penalty = penalty; } public Money getPenaltyPaid() { return penaltyPaid; } void setPenaltyPaid(Money penaltyPaid) { this.penaltyPaid = penaltyPaid; } public Money getPrincipal() { return principal; } public void setPrincipal(Money principal) { this.principal = principal; } public Money getPrincipalPaid() { return principalPaid; } void setPrincipalPaid(Money principalPaid) { this.principalPaid = principalPaid; } public Money getPrincipalDue() { return principal.subtract(principalPaid); } public Money getInterestDue() { return interest.subtract(interestPaid); } public Money getPenalty() { return penalty; } public Set<AccountFeesActionDetailEntity> getAccountFeesActionDetails() { return accountFeesActionDetails; } public void addAccountFeesAction(AccountFeesActionDetailEntity accountFeesAction) { accountFeesActionDetails.add(accountFeesAction); } public Money getMiscFee() { return miscFee; } void setMiscFee(Money miscFee) { this.miscFee = miscFee; } public Money getMiscFeePaid() { return miscFeePaid; } void setMiscFeePaid(Money miscFeePaid) { this.miscFeePaid = miscFeePaid; } public Money getMiscPenalty() { return miscPenalty; } void setMiscPenalty(Money miscPenalty) { this.miscPenalty = miscPenalty; } public Money getMiscPenaltyPaid() { return miscPenaltyPaid; } public Money getMiscPenaltyDue() { return miscPenalty.subtract(miscPenaltyPaid); } void setMiscPenaltyPaid(Money miscPenaltyPaid) { this.miscPenaltyPaid = miscPenaltyPaid; } public Money getPenaltyDue() { return (penalty.add(miscPenalty)).subtract(penaltyPaid.add(miscPenaltyPaid)); } public Money getTotalDue() { return principal.subtract(principalPaid).add(getEffectiveInterestDue()).add(getPenaltyDue()).add(getMiscFeeDue()); } public Money getTotalDueWithoutPrincipal() { return getInterestDue().add(getPenaltyDue()).add(getMiscFeeDue()); } public Money getTotalPenalty() { return penalty.add(miscPenalty); } public Money getTotalDueWithFees() { return getTotalDue().add(getTotalFeesDue()); } public Money getTotalScheduleAmountWithFees() { return principal.add( interest.add(penalty).add(getTotalScheduledFeeAmountWithMiscFee()).add(miscPenalty)); } public OverDueAmounts getDueAmnts() { OverDueAmounts overDueAmounts = new OverDueAmounts(); overDueAmounts.setFeesOverdue(getTotalFeesDue().add(getMiscFeeDue())); overDueAmounts.setInterestOverdue(getInterestDue()); overDueAmounts.setPenaltyOverdue(getPenaltyDue()); overDueAmounts.setPrincipalOverDue(getPrincipalDue()); overDueAmounts.setTotalPrincipalPaid(getPrincipalPaid()); return overDueAmounts; } void makeEarlyRepaymentEnteries(String payFullOrPartial) { if (payFullOrPartial.equals(LoanConstants.PAY_FEES_PENALTY_INTEREST)) { setPrincipalPaid(getPrincipalPaid().add(getPrincipalDue())); setInterestPaid(getInterestPaid().add(getInterestDue())); setPenaltyPaid(getPenaltyPaid().add(getPenaltyDue())); setMiscFeePaid(getMiscFeePaid().add(getMiscFee())); setMiscPenaltyPaid(getMiscPenaltyPaid().add(getMiscPenalty())); makeRepaymentEntries(payFullOrPartial); } else if (payFullOrPartial.equals(LoanConstants.PAY_FEES_PENALTY)) { setPrincipalPaid(getPrincipalPaid().add(getPrincipalDue())); setPenaltyPaid(getPenaltyPaid().add(getPenaltyDue())); setMiscFeePaid(getMiscFeePaid().add(getMiscFee())); setMiscPenaltyPaid(getMiscPenaltyPaid().add(getMiscPenalty())); makeRepaymentEntries(payFullOrPartial); } else { setPrincipalPaid(getPrincipalPaid().add(getPrincipalDue())); makeRepaymentEntries(payFullOrPartial); } } private void makeRepaymentEntries(String payFullOrPartial) { setPaymentStatus(PaymentStatus.PAID); setPaymentDate(new DateTimeService().getCurrentJavaSqlDate()); Set<AccountFeesActionDetailEntity> accountFeesActionDetailSet = this.getAccountFeesActionDetails(); for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : accountFeesActionDetailSet) { ((LoanFeeScheduleEntity) accountFeesActionDetailEntity).makeRepaymentEnteries(payFullOrPartial); } } public void updatePaymentDetailsForAdjustment(LoanTrxnDetailEntity loanReverseTrxn) { CalculatedInterestOnPayment interestOnPayment = loanReverseTrxn.getCalculatedInterestOnPayment(); Money overdueInterestPaid = calculateExtraInterestPaid(interestOnPayment); principalPaid = principalPaid.add(loanReverseTrxn.getPrincipalAmount()); interest = calculateAdjustedInterest(interestOnPayment, overdueInterestPaid, loanReverseTrxn); interestPaid = interestPaid.add(loanReverseTrxn.getInterestAmount()).add(overdueInterestPaid); penaltyPaid = penaltyPaid.add(loanReverseTrxn.getPenaltyAmount()); miscPenaltyPaid = miscPenaltyPaid.add(loanReverseTrxn.getMiscPenaltyAmount()); miscFeePaid = miscFeePaid.add(loanReverseTrxn.getMiscFeeAmount()); extraInterestPaid = extraInterestPaid.subtract(overdueInterestPaid); } private Money calculateExtraInterestPaid(CalculatedInterestOnPayment interestOnPayment) { return interestOnPayment == null ? Money.zero(getCurrency()) : interestOnPayment.getExtraInterestPaid(); } private Money calculateAdjustedInterest(CalculatedInterestOnPayment interestOnPayment, Money overdueInterestPaid, LoanTrxnDetailEntity loanReverseTrxn) { if (((LoanBO)account).isDecliningBalanceInterestRecalculation()) { return interestOnPayment.getOriginalInterest().subtract(loanReverseTrxn.getInterestAmount()).subtract(overdueInterestPaid. add(interestOnPayment.getInterestDueTillPaid())); } return interest; } Money waiveFeeCharges() { Money chargeWaived = new Money(getCurrency()); chargeWaived = chargeWaived.add(getMiscFeeDue()); setMiscFee(getMiscFeePaid()); for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : getAccountFeesActionDetails()) { chargeWaived = chargeWaived.add(((LoanFeeScheduleEntity) accountFeesActionDetailEntity).waiveCharges()); } return chargeWaived; } void removeAccountFeesActionDetailEntity(AccountFeesActionDetailEntity accountFeesActionDetailEntity) { accountFeesActionDetails.remove(accountFeesActionDetailEntity); } public Money getMiscFeeDue() { return getMiscFee().subtract(getMiscFeePaid()); } public Money getTotalFeesDue() { Money totalFees = new Money(getCurrency()); for (AccountFeesActionDetailEntity obj : accountFeesActionDetails) { totalFees = totalFees.add(obj.getFeeDue()); } return totalFees; } public Money getTotalFeeAmountPaidWithMiscFee() { Money totalFees = new Money(getCurrency()); for (AccountFeesActionDetailEntity obj : accountFeesActionDetails) { totalFees = totalFees.add(obj.getFeeAmountPaid()); } totalFees = totalFees.add(getMiscFeePaid()); return totalFees; } public Money getTotalScheduledFeeAmountWithMiscFee() { Money totalFees = new Money(getCurrency()); for (AccountFeesActionDetailEntity obj : accountFeesActionDetails) { totalFees = totalFees.add(obj.getFeeAmount()); } totalFees = totalFees.add(getMiscFee()); return totalFees; } public Money getTotalFeesDueWithMiscFee() { return miscFee.add(getTotalFeesDue()); } public Money getTotalFees() { Money totalFees = new Money(getCurrency()); for (AccountFeesActionDetailEntity obj : accountFeesActionDetails) { totalFees = totalFees.add(obj.getFeeAmount()); } return totalFees; } public Money getTotalFeesPaid() { Money totalFees = new Money(getCurrency()); for (AccountFeesActionDetailEntity obj : accountFeesActionDetails) { totalFees = totalFees.add(obj.getFeeAmountPaid()); } return totalFees; } public Money getTotalFeeDueWithMiscFeeDue() { return getMiscFeeDue().add(getTotalFeesDue()); } public Money getTotalPaymentDue() { return getTotalDue().add(getTotalFeesDue()); } Money removeFees(Short feeId) { Money feeAmount = null; AccountFeesActionDetailEntity objectToRemove = null; Set<AccountFeesActionDetailEntity> accountFeesActionDetailSet = this.getAccountFeesActionDetails(); for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : accountFeesActionDetailSet) { if (accountFeesActionDetailEntity.getFee().getFeeId().equals(feeId) && (accountFeesActionDetailEntity.getFeeAmountPaid() == null || accountFeesActionDetailEntity .getFeeAmountPaid().isZero())) { objectToRemove = accountFeesActionDetailEntity; feeAmount = objectToRemove.getFeeAmount(); break; } else if (accountFeesActionDetailEntity.getFee().getFeeId().equals(feeId) && accountFeesActionDetailEntity.getFeeAmountPaid() != null && accountFeesActionDetailEntity.getFeeAmountPaid().isGreaterThanZero()) { feeAmount = accountFeesActionDetailEntity.getFeeAmount().subtract( accountFeesActionDetailEntity.getFeeAmountPaid()); ((LoanFeeScheduleEntity) accountFeesActionDetailEntity).setFeeAmount(accountFeesActionDetailEntity .getFeeAmountPaid()); break; } } if (objectToRemove != null) { this.removeAccountFeesActionDetailEntity(objectToRemove); } return feeAmount; } public AccountFeesActionDetailEntity getAccountFeesAction(Short feeId) { for (AccountFeesActionDetailEntity accountFeesAction : getAccountFeesActionDetails()) { if (accountFeesAction.getFee().getFeeId().equals(feeId)) { return accountFeesAction; } } return null; } Money waivePenaltyCharges() { Money chargeWaived = new Money(getCurrency()); chargeWaived = chargeWaived.add(getMiscPenaltyDue()); setMiscPenalty(getMiscPenaltyPaid()); return chargeWaived; } void applyMiscCharge(Short chargeType, Money charge) { if (chargeType.equals(Short.valueOf(AccountConstants.MISC_FEES))) { setMiscFee(getMiscFee().add(charge)); } else if (chargeType.equals(Short.valueOf(AccountConstants.MISC_PENALTY))) { setMiscPenalty(getMiscPenalty().add(charge)); } } public boolean isPrincipalZero() { return principal.isZero(); } public boolean isFeeAlreadyAttatched(Short feeId) { for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : this.getAccountFeesActionDetails()) { if (accountFeesActionDetailEntity.getFee().getFeeId().equals(feeId)) { return true; } } return false; } public boolean isPaymentAppliedToAccountFees() { Money feesPaid = new Money(getCurrency(),"0.0"); for (AccountFeesActionDetailEntity accountFeesActionDetail : getAccountFeesActionDetails()) { feesPaid = feesPaid.add(accountFeesActionDetail.getFeeAmountPaid()); } return feesPaid.isNonZero(); } public boolean isPaymentApplied() { return getPrincipalPaid().isNonZero() || getEffectiveInterestPaid().isNonZero() || getMiscFeePaid().isNonZero() || getMiscPenaltyPaid().isNonZero() || isPaymentAppliedToAccountFees(); } public void setVersionNo(int versionNo) { this.versionNo = versionNo; } public int getVersionNo() { return versionNo; } public List<AccountFeesActionDetailEntity> getAccountFeesActionDetailsSortedByFeeId() { List<AccountFeesActionDetailEntity> sortedList = new ArrayList<AccountFeesActionDetailEntity>(); sortedList.addAll(this.getAccountFeesActionDetails()); Collections.sort(sortedList); return sortedList; } public RepaymentScheduleInstallment toDto(Locale userLocale) { return new RepaymentScheduleInstallment(this.installmentId, this.actionDate, this.principal, this.interest, this.getTotalFeesDue(), this.miscFee, this.miscPenalty, userLocale); } public boolean isSameAs(AccountActionDateEntity accountActionDateEntity) { return getInstallmentId().equals(accountActionDateEntity.getInstallmentId()); } public Money getExtraInterest() { return extraInterest == null ? Money.zero(getCurrency()) : new Money(getCurrency(), extraInterest.getAmount()); } public void setExtraInterest(Money extraInterest) { this.extraInterest = extraInterest; } public Money getExtraInterestPaid() { return extraInterestPaid == null ? Money.zero(getCurrency()) : new Money(getCurrency(), extraInterestPaid.getAmount()); } public void setExtraInterestPaid(Money extraInterestPaid) { this.extraInterestPaid = extraInterestPaid; } public Money getExtraInterestDue() { return getExtraInterest().subtract(getExtraInterestPaid()); } public Money getEffectiveInterestPaid() { return interestPaid.add(getExtraInterestPaid()); } public Money getEffectiveInterestDue() { return getInterestDue().add(getExtraInterestDue()); } private Money payMiscPenalty(final Money amount) { Money payable = min(amount, getMiscPenaltyDue()); allocateMiscPenalty(payable); return amount.subtract(payable); } private void allocateMiscPenalty(Money payable) { paymentAllocation.allocateForMiscPenalty(payable); miscPenaltyPaid = miscPenaltyPaid.add(payable); } private Money payPenalty(final Money amount) { Money payable = min(amount, (getPenalty().subtract(getPenaltyPaid()))); allocatePenalty(payable); return amount.subtract(payable); } private void allocatePenalty(Money payable) { paymentAllocation.allocateForPenalty(payable); penaltyPaid = penaltyPaid.add(payable); } private Money payMiscFees(final Money amount) { Money payable = min(amount, getMiscFeeDue()); allocateMiscFees(payable); return amount.subtract(payable); } private void allocateMiscFees(Money payable) { paymentAllocation.allocateForMiscFees(payable); miscFeePaid = miscFeePaid.add(payable); } private void allocateExtraInterest(Money payable) { paymentAllocation.allocateForExtraInterest(payable); extraInterestPaid = extraInterestPaid.add(payable); } private Money payFees(final Money amount) { Money balance = amount; for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : getAccountFeesActionDetails()) { balance = accountFeesActionDetailEntity.payFee(balance); Integer feeId = accountFeesActionDetailEntity.getAccountFeesActionDetailId(); Money feeAllocated = accountFeesActionDetailEntity.getFeeAllocated(); paymentAllocation.allocateForFee(feeId, feeAllocated); } return balance; } private Money payInterest(final Money amount) { Money payable = min(amount, getInterestDue()); allocateInterest(payable); return amount.subtract(payable); } private void allocateInterest(Money payable) { paymentAllocation.allocateForInterest(payable); interestPaid = interestPaid.add(payable); } private Money payPrincipal(final Money amount) { Money payable = min(amount, getPrincipalDue()); allocatePrincipal(payable); return amount.subtract(payable); } private void allocatePrincipal(Money payable) { paymentAllocation.allocateForPrincipal(payable); principalPaid = principalPaid.add(payable); } public Money payComponents(Money paymentAmount, Date paymentDate) { initPaymentAllocation(paymentAmount.getCurrency()); Money balanceAmount = paymentAmount; balanceAmount = payMiscPenalty(balanceAmount); balanceAmount = payPenalty(balanceAmount); balanceAmount = payMiscFees(balanceAmount); balanceAmount = payFees(balanceAmount); balanceAmount = payInterest(balanceAmount); balanceAmount = payPrincipal(balanceAmount); recordPayment(paymentDate); return balanceAmount; } public void payComponents(Installment installment, MifosCurrency currency, Date paymentDate) { initPaymentAllocation(currency); allocatePrincipal(new Money(currency, installment.getCurrentPrincipalPaid())); allocateInterest(new Money(currency, installment.getCurrentInterestPaid())); allocateExtraInterest(new Money(currency, installment.getCurrentExtraInterestPaid())); payFees(new Money(currency, installment.getCurrentFeesPaid())); allocateMiscFees(new Money(currency, installment.getCurrentMiscFeesPaid())); allocatePenalty(new Money(currency, installment.getCurrentPenaltyPaid())); allocateMiscPenalty(new Money(currency, installment.getCurrentMiscPenaltyPaid())); updateInterest(installment, currency); setExtraInterest(new Money(currency, installment.getExtraInterest())); recordPayment(paymentDate); } private void updateInterest(Installment installment, MifosCurrency currency) { if (installment.hasEffectiveInterest()) { setInterest(new Money(currency, installment.getEffectiveInterest().add(interestPaid.getAmount()))); } else { setInterest(new Money(currency, installment.getInterest())); } } private void initPaymentAllocation(MifosCurrency currency) { paymentAllocation = new PaymentAllocation(currency); } public PaymentAllocation getPaymentAllocation() { return paymentAllocation; } void recordForAdjustment() { setPaymentStatus(PaymentStatus.UNPAID); setPaymentDate(null); } void recordPayment(Date paymentDate) { setPaymentDate(new java.sql.Date(paymentDate.getTime())); setPaymentStatus(getTotalDueWithFees().isTinyAmount() ? PaymentStatus.PAID : PaymentStatus.UNPAID); } public double getPrincipalAsDouble() { return principal.getAmount().doubleValue(); } public double getInterestAsDouble() { return interest.getAmount().doubleValue(); } public double getPenaltyAsDouble() { return penalty.getAmount().doubleValue(); } public double getMiscFeeAsDouble() { return miscFee.getAmount().doubleValue(); } public double getMiscPenaltyAsDouble() { return miscPenalty.getAmount().doubleValue(); } public double getTotalFeesAsDouble() { return getTotalFees().getAmount().doubleValue(); } public double getPrincipalPaidAsDouble() { return principalPaid.getAmount().doubleValue(); } public double getInterestPaidAsDouble() { return interestPaid.getAmount().doubleValue(); } public double getPenaltyPaidAsDouble() { return penaltyPaid.getAmount().doubleValue(); } public double getMiscFeePaidAsDouble() { return miscFeePaid.getAmount().doubleValue(); } public double getMiscPenaltyPaidAsDouble() { return miscPenaltyPaid.getAmount().doubleValue(); } public double getTotalFeesPaidAsDouble() { return getTotalFeesPaid().getAmount().doubleValue(); } public double getPrincipalDueAsDouble() { return getPrincipalDue().getAmount().doubleValue(); } public double getInterestDueAsDouble() { return getInterestDue().getAmount().doubleValue(); } public double getPenaltyDueAsDouble() { return getPenaltyDue().getAmount().doubleValue(); } public double getMiscFeesDueAsDouble() { return getMiscFeeDue().getAmount().doubleValue(); } public double getMiscPenaltyDueAsDouble() { return getMiscPenaltyDue().getAmount().doubleValue(); } public double getTotalFeesDueAsDouble() { return getTotalFeesDue().getAmount().doubleValue(); } public LoanTrxnDetailEntity updateSummaryAndPerformanceHistory(AccountPaymentEntity accountPayment, PersonnelBO personnel, Date transactionDate) { LoanBO loanBO = (LoanBO) account; LoanPersistence loanPersistence = loanBO.getLoanPersistence(); LoanTrxnDetailEntity loanTrxnDetailEntity = recordTransaction(accountPayment, personnel, transactionDate, loanPersistence); loanBO.recordSummaryAndPerfHistory(isPaid(), paymentAllocation); return loanTrxnDetailEntity; } private LoanTrxnDetailEntity recordTransaction(AccountPaymentEntity accountPayment, PersonnelBO personnel, Date transactionDate, LoanPersistence loanPersistence) { // TODO: Avoid passing the persistence instance in the constructor for reference data lookup LoanTrxnDetailEntity loanTrxnDetailEntity = new LoanTrxnDetailEntity(accountPayment, this, personnel, transactionDate, AccountActionTypes.LOAN_REPAYMENT, AccountConstants.PAYMENT_RCVD, loanPersistence); accountPayment.addAccountTrxn(loanTrxnDetailEntity); return loanTrxnDetailEntity; } public Money applyPayment(AccountPaymentEntity accountPaymentEntity, Money balance, PersonnelBO personnel, Date transactionDate) { if (isNotPaid() && balance.isGreaterThanZero()) { balance = payComponents(balance, transactionDate); updateSummaryAndPerformanceHistory(accountPaymentEntity, personnel, transactionDate); } return balance; } boolean hasFees() { return CollectionUtils.isNotEmpty(accountFeesActionDetails); } public void setPaymentAllocation(PaymentAllocation paymentAllocation) { this.paymentAllocation = paymentAllocation; } double getExtraInterestPaidAsDouble() { return getExtraInterestPaid().getAmount().doubleValue(); } }
apache-2.0
kevinearls/camel
components/camel-mail/src/test/java/org/apache/camel/component/mail/MailCollectionHeaderTest.java
2329
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.mail; import java.util.List; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Test; import org.jvnet.mock_javamail.Mailbox; public class MailCollectionHeaderTest extends CamelTestSupport { @Test public void testMailHeaderWithCollection() throws Exception { Mailbox.clearAll(); String[] foo = new String[] {"Carlsberg", "Heineken"}; template.sendBodyAndHeader("direct:a", "Hello World", "beers", foo); MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); mock.expectedBodiesReceived("Hello World"); mock.message(0).header("beers").isNotNull(); mock.assertIsSatisfied(); Object beers = mock.getReceivedExchanges().get(0).getIn().getHeader("beers"); assertNotNull(beers); List<?> list = assertIsInstanceOf(List.class, beers); assertEquals("Carlsberg", list.get(0)); assertEquals("Heineken", list.get(1)); } protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { public void configure() throws Exception { from("direct:a").to("smtp://localhost?username=james@localhost"); from("pop3://localhost?username=james&password=secret&consumer.initialDelay=100&consumer.delay=100").to("mock:result"); } }; } }
apache-2.0
deeplearning4j/deeplearning4j
deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/text/documentiterator/BasicLabelAwareIterator.java
5848
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.text.documentiterator; import lombok.NonNull; import org.deeplearning4j.text.documentiterator.interoperability.DocumentIteratorConverter; import org.deeplearning4j.text.sentenceiterator.SentenceIterator; import org.deeplearning4j.text.sentenceiterator.interoperability.SentenceIteratorConverter; import org.deeplearning4j.text.sentenceiterator.labelaware.LabelAwareSentenceIterator; import java.util.concurrent.atomic.AtomicLong; /** * This is simple class, for building Sentence-Label pairs for ParagraphVectors/Doc2Vec. * Idea is simple - you provide SentenceIterator or DocumentIterator, and it builds nice structure for future model reuse * * @author raver119@gmail.com */ public class BasicLabelAwareIterator implements LabelAwareIterator { // this counter is used for dumb labels generation protected AtomicLong documentPosition = new AtomicLong(0); protected LabelsSource generator; protected transient LabelAwareIterator backendIterator; private BasicLabelAwareIterator() { } /** * This method checks, if there's more LabelledDocuments * @return */ public boolean hasNextDocument() { return backendIterator.hasNextDocument(); } /** * This method returns next LabelledDocument * @return */ public LabelledDocument nextDocument() { return backendIterator.nextDocument(); } /** * This methods resets LabelAwareIterator */ public void reset() { backendIterator.reset(); } /** * This method returns LabelsSource instance, containing all labels derived from this iterator * @return */ @Override public LabelsSource getLabelsSource() { return generator; } @Override public boolean hasNext() { return hasNextDocument(); } @Override public LabelledDocument next() { return nextDocument(); } @Override public void shutdown() { // no-op } @Override public void remove() { // no-op } public static class Builder { private String labelTemplate = "DOC_"; private LabelAwareIterator labelAwareIterator; private LabelsSource generator = new LabelsSource(labelTemplate); /** * This method should stay protected, since it's only viable for testing purposes */ protected Builder() { } /** * We assume that each sentence in this iterator is separate document/paragraph * * @param iterator */ public Builder(@NonNull SentenceIterator iterator) { this.labelAwareIterator = new SentenceIteratorConverter(iterator, generator); } /** * We assume that each inputStream in this iterator is separate document/paragraph * @param iterator */ public Builder(@NonNull DocumentIterator iterator) { this.labelAwareIterator = new DocumentIteratorConverter(iterator, generator); } /** * We assume that each sentence in this iterator is separate document/paragraph. * Labels will be converted into LabelledDocument format * * @param iterator */ public Builder(@NonNull LabelAwareSentenceIterator iterator) { this.labelAwareIterator = new SentenceIteratorConverter(iterator, generator); } /** * We assume that each inputStream in this iterator is separate document/paragraph * Labels will be converted into LabelledDocument format * * @param iterator */ public Builder(@NonNull LabelAwareDocumentIterator iterator) { this.labelAwareIterator = new DocumentIteratorConverter(iterator, generator); } public Builder(@NonNull LabelAwareIterator iterator) { this.labelAwareIterator = iterator; this.generator = iterator.getLabelsSource(); } /** * Label template will be used for sentence labels generation. I.e. if provided template is "DOCUMENT_", all documents/paragraphs will have their labels starting from "DOCUMENT_0" to "DOCUMENT_X", where X is the total number of documents - 1 * * @param template * @return */ public Builder setLabelTemplate(@NonNull String template) { this.labelTemplate = template; this.generator.setTemplate(template); return this; } /** * TODO: To be implemented * * @param source * @return */ public Builder setLabelsSource(@NonNull LabelsSource source) { this.generator = source; return this; } public BasicLabelAwareIterator build() { BasicLabelAwareIterator iterator = new BasicLabelAwareIterator(); iterator.generator = this.generator; iterator.backendIterator = this.labelAwareIterator; return iterator; } } }
apache-2.0
sungsoo/tez-0.4.0
tez-api/src/test/java/org/apache/tez/dag/api/TestDAGPlan.java
13586
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.dag.api; import static org.junit.Assert.*; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.tez.dag.api.EdgeProperty.DataMovementType; import org.apache.tez.dag.api.EdgeProperty.DataSourceType; import org.apache.tez.dag.api.EdgeProperty.SchedulingType; import org.apache.tez.dag.api.records.DAGProtos.DAGPlan; import org.apache.tez.dag.api.records.DAGProtos.EdgePlan; import org.apache.tez.dag.api.records.DAGProtos.PlanTaskConfiguration; import org.apache.tez.dag.api.records.DAGProtos.PlanTaskLocationHint; import org.apache.tez.dag.api.records.DAGProtos.PlanVertexType; import org.apache.tez.dag.api.records.DAGProtos.VertexPlan; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; // based on TestDAGLocationHint public class TestDAGPlan { @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); //TODO: doesn't seem to be deleting this folder automatically as expected. @Test(timeout = 5000) public void testBasicJobPlanSerde() throws IOException { DAGPlan job = DAGPlan.newBuilder() .setName("test") .addVertex( VertexPlan.newBuilder() .setName("vertex1") .setType(PlanVertexType.NORMAL) .addTaskLocationHint(PlanTaskLocationHint.newBuilder().addHost("machineName").addRack("rack1").build()) .setTaskConfig( PlanTaskConfiguration.newBuilder() .setNumTasks(2) .setVirtualCores(4) .setMemoryMb(1024) .setJavaOpts("") .setTaskModule("x.y") .build()) .build()) .build(); File file = tempFolder.newFile("jobPlan"); FileOutputStream outStream = null; try { outStream = new FileOutputStream(file); job.writeTo(outStream); } finally { if(outStream != null){ outStream.close(); } } DAGPlan inJob; FileInputStream inputStream; try { inputStream = new FileInputStream(file); inJob = DAGPlan.newBuilder().mergeFrom(inputStream).build(); } finally { outStream.close(); } Assert.assertEquals(job, inJob); } @Test(timeout = 5000) public void testEdgeManagerSerde() { DAG dag = new DAG("testDag"); ProcessorDescriptor pd1 = new ProcessorDescriptor("processor1") .setUserPayload("processor1Bytes".getBytes()); ProcessorDescriptor pd2 = new ProcessorDescriptor("processor2") .setUserPayload("processor2Bytes".getBytes()); Vertex v1 = new Vertex("v1", pd1, 10, Resource.newInstance(1024, 1)); Vertex v2 = new Vertex("v2", pd2, 1, Resource.newInstance(1024, 1)); v1.setJavaOpts("").setTaskEnvironment(new HashMap<String, String>()) .setTaskLocalResources(new HashMap<String, LocalResource>()); v2.setJavaOpts("").setTaskEnvironment(new HashMap<String, String>()) .setTaskLocalResources(new HashMap<String, LocalResource>()); InputDescriptor inputDescriptor = new InputDescriptor("input").setUserPayload("inputBytes" .getBytes()); OutputDescriptor outputDescriptor = new OutputDescriptor("output").setUserPayload("outputBytes" .getBytes()); Edge edge = new Edge(v1, v2, new EdgeProperty( new EdgeManagerDescriptor("emClass").setUserPayload("emPayload".getBytes()), DataSourceType.PERSISTED, SchedulingType.SEQUENTIAL, outputDescriptor, inputDescriptor)); dag.addVertex(v1).addVertex(v2).addEdge(edge); DAGPlan dagProto = dag.createDag(new TezConfiguration()); EdgeProperty edgeProperty = DagTypeConverters.createEdgePropertyMapFromDAGPlan(dagProto .getEdgeList().get(0)); EdgeManagerDescriptor emDesc = edgeProperty.getEdgeManagerDescriptor(); Assert.assertNotNull(emDesc); Assert.assertEquals("emClass", emDesc.getClassName()); Assert.assertTrue(Arrays.equals("emPayload".getBytes(), emDesc.getUserPayload())); } @Test(timeout = 5000) public void testUserPayloadSerde() { DAG dag = new DAG("testDag"); ProcessorDescriptor pd1 = new ProcessorDescriptor("processor1"). setUserPayload("processor1Bytes".getBytes()); ProcessorDescriptor pd2 = new ProcessorDescriptor("processor2"). setUserPayload("processor2Bytes".getBytes()); Vertex v1 = new Vertex("v1", pd1, 10, Resource.newInstance(1024, 1)); Vertex v2 = new Vertex("v2", pd2, 1, Resource.newInstance(1024, 1)); v1.setJavaOpts("").setTaskEnvironment(new HashMap<String, String>()) .setTaskLocalResources(new HashMap<String, LocalResource>()); v2.setJavaOpts("").setTaskEnvironment(new HashMap<String, String>()) .setTaskLocalResources(new HashMap<String, LocalResource>()); InputDescriptor inputDescriptor = new InputDescriptor("input"). setUserPayload("inputBytes".getBytes()); OutputDescriptor outputDescriptor = new OutputDescriptor("output"). setUserPayload("outputBytes".getBytes()); Edge edge = new Edge(v1, v2, new EdgeProperty( DataMovementType.SCATTER_GATHER, DataSourceType.PERSISTED, SchedulingType.SEQUENTIAL, outputDescriptor, inputDescriptor)); dag.addVertex(v1).addVertex(v2).addEdge(edge); DAGPlan dagProto = dag.createDag(new TezConfiguration()); assertEquals(2, dagProto.getVertexCount()); assertEquals(1, dagProto.getEdgeCount()); VertexPlan v1Proto = dagProto.getVertex(0); VertexPlan v2Proto = dagProto.getVertex(1); EdgePlan edgeProto = dagProto.getEdge(0); assertEquals("processor1Bytes", new String(v1Proto.getProcessorDescriptor() .getUserPayload().toByteArray())); assertEquals("processor1", v1Proto.getProcessorDescriptor().getClassName()); assertEquals("processor2Bytes", new String(v2Proto.getProcessorDescriptor() .getUserPayload().toByteArray())); assertEquals("processor2", v2Proto.getProcessorDescriptor().getClassName()); assertEquals("inputBytes", new String(edgeProto.getEdgeDestination() .getUserPayload().toByteArray())); assertEquals("input", edgeProto.getEdgeDestination().getClassName()); assertEquals("outputBytes", new String(edgeProto.getEdgeSource() .getUserPayload().toByteArray())); assertEquals("output", edgeProto.getEdgeSource().getClassName()); EdgeProperty edgeProperty = DagTypeConverters .createEdgePropertyMapFromDAGPlan(dagProto.getEdgeList().get(0)); byte[] ib = edgeProperty.getEdgeDestination().getUserPayload(); assertEquals("inputBytes", new String(ib)); assertEquals("input", edgeProperty.getEdgeDestination().getClassName()); byte[] ob = edgeProperty.getEdgeSource().getUserPayload(); assertEquals("outputBytes", new String(ob)); assertEquals("output", edgeProperty.getEdgeSource().getClassName()); } @Test(timeout = 5000) public void userVertexOrderingIsMaintained() { DAG dag = new DAG("testDag"); ProcessorDescriptor pd1 = new ProcessorDescriptor("processor1"). setUserPayload("processor1Bytes".getBytes()); ProcessorDescriptor pd2 = new ProcessorDescriptor("processor2"). setUserPayload("processor2Bytes".getBytes()); ProcessorDescriptor pd3 = new ProcessorDescriptor("processor3"). setUserPayload("processor3Bytes".getBytes()); Vertex v1 = new Vertex("v1", pd1, 10, Resource.newInstance(1024, 1)); Vertex v2 = new Vertex("v2", pd2, 1, Resource.newInstance(1024, 1)); Vertex v3 = new Vertex("v3", pd3, 1, Resource.newInstance(1024, 1)); v1.setJavaOpts("").setTaskEnvironment(new HashMap<String, String>()) .setTaskLocalResources(new HashMap<String, LocalResource>()); v2.setJavaOpts("").setTaskEnvironment(new HashMap<String, String>()) .setTaskLocalResources(new HashMap<String, LocalResource>()); v3.setJavaOpts("").setTaskEnvironment(new HashMap<String, String>()) .setTaskLocalResources(new HashMap<String, LocalResource>()); InputDescriptor inputDescriptor = new InputDescriptor("input"). setUserPayload("inputBytes".getBytes()); OutputDescriptor outputDescriptor = new OutputDescriptor("output"). setUserPayload("outputBytes".getBytes()); Edge edge = new Edge(v1, v2, new EdgeProperty( DataMovementType.SCATTER_GATHER, DataSourceType.PERSISTED, SchedulingType.SEQUENTIAL, outputDescriptor, inputDescriptor)); dag.addVertex(v1).addVertex(v2).addEdge(edge).addVertex(v3); DAGPlan dagProto = dag.createDag(new TezConfiguration()); assertEquals(3, dagProto.getVertexCount()); assertEquals(1, dagProto.getEdgeCount()); VertexPlan v1Proto = dagProto.getVertex(0); VertexPlan v2Proto = dagProto.getVertex(1); VertexPlan v3Proto = dagProto.getVertex(2); EdgePlan edgeProto = dagProto.getEdge(0); assertEquals("processor1Bytes", new String(v1Proto.getProcessorDescriptor() .getUserPayload().toByteArray())); assertEquals("processor1", v1Proto.getProcessorDescriptor().getClassName()); assertEquals("processor2Bytes", new String(v2Proto.getProcessorDescriptor() .getUserPayload().toByteArray())); assertEquals("processor2", v2Proto.getProcessorDescriptor().getClassName()); assertEquals("processor3Bytes", new String(v3Proto.getProcessorDescriptor() .getUserPayload().toByteArray())); assertEquals("processor3", v3Proto.getProcessorDescriptor().getClassName()); assertEquals("inputBytes", new String(edgeProto.getEdgeDestination() .getUserPayload().toByteArray())); assertEquals("input", edgeProto.getEdgeDestination().getClassName()); assertEquals("outputBytes", new String(edgeProto.getEdgeSource() .getUserPayload().toByteArray())); assertEquals("output", edgeProto.getEdgeSource().getClassName()); EdgeProperty edgeProperty = DagTypeConverters .createEdgePropertyMapFromDAGPlan(dagProto.getEdgeList().get(0)); byte[] ib = edgeProperty.getEdgeDestination().getUserPayload(); assertEquals("inputBytes", new String(ib)); assertEquals("input", edgeProperty.getEdgeDestination().getClassName()); byte[] ob = edgeProperty.getEdgeSource().getUserPayload(); assertEquals("outputBytes", new String(ob)); assertEquals("output", edgeProperty.getEdgeSource().getClassName()); } @Test (timeout=5000) public void testCredentialsSerde() { DAG dag = new DAG("testDag"); ProcessorDescriptor pd1 = new ProcessorDescriptor("processor1"). setUserPayload("processor1Bytes".getBytes()); ProcessorDescriptor pd2 = new ProcessorDescriptor("processor2"). setUserPayload("processor2Bytes".getBytes()); Vertex v1 = new Vertex("v1", pd1, 10, Resource.newInstance(1024, 1)); Vertex v2 = new Vertex("v2", pd2, 1, Resource.newInstance(1024, 1)); v1.setJavaOpts("").setTaskEnvironment(new HashMap<String, String>()) .setTaskLocalResources(new HashMap<String, LocalResource>()); v2.setJavaOpts("").setTaskEnvironment(new HashMap<String, String>()) .setTaskLocalResources(new HashMap<String, LocalResource>()); InputDescriptor inputDescriptor = new InputDescriptor("input"). setUserPayload("inputBytes".getBytes()); OutputDescriptor outputDescriptor = new OutputDescriptor("output"). setUserPayload("outputBytes".getBytes()); Edge edge = new Edge(v1, v2, new EdgeProperty( DataMovementType.SCATTER_GATHER, DataSourceType.PERSISTED, SchedulingType.SEQUENTIAL, outputDescriptor, inputDescriptor)); dag.addVertex(v1).addVertex(v2).addEdge(edge); Credentials dagCredentials = new Credentials(); Token<TokenIdentifier> token1 = new Token<TokenIdentifier>(); Token<TokenIdentifier> token2 = new Token<TokenIdentifier>(); dagCredentials.addToken(new Text("Token1"), token1); dagCredentials.addToken(new Text("Token2"), token2); dag.setCredentials(dagCredentials); DAGPlan dagProto = dag.createDag(new TezConfiguration()); assertTrue(dagProto.hasCredentialsBinary()); Credentials fetchedCredentials = DagTypeConverters.convertByteStringToCredentials(dagProto .getCredentialsBinary()); assertEquals(2, fetchedCredentials.numberOfTokens()); assertNotNull(fetchedCredentials.getToken(new Text("Token1"))); assertNotNull(fetchedCredentials.getToken(new Text("Token2"))); } }
apache-2.0
anavidad3/flume-ingestion
stratio-sinks/stratio-druid-sink/src/test/java/com/stratio/ingestion/sink/druid/DruidSinkIT.java
6762
/** * Copyright (C) 2014 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stratio.ingestion.sink.druid; import java.io.IOException; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.concurrent.TimeUnit; import org.apache.flume.Channel; import org.apache.flume.Context; import org.apache.flume.Event; import org.apache.flume.EventDeliveryException; import org.apache.flume.Transaction; import org.apache.flume.channel.MemoryChannel; import org.apache.flume.conf.Configurables; import org.apache.flume.event.EventBuilder; import org.fest.assertions.Assertions; import org.junit.Before; import org.junit.Test; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.base.Charsets; import com.google.common.collect.Maps; /** * Created by eambrosio on 30/03/15. */ public class DruidSinkIT { private Channel channel; private DruidSink druidSink; @Before public void setup() { // Context channelContext = new Context(); // channelContext.put("checkpointDir","data/check"); // channelContext.put("dataDirs","data/data"); // channelContext.put("capacity","1000"); // channelContext.put("transactionCapacity","100"); // channelContext.put("checkpointInterval","300"); // channel = new FileChannel(); Context channelContext = new Context(); channelContext.put("capacity", "10000"); channelContext.put("transactionCapacity", "5000"); channel = new MemoryChannel(); channel.setName("junitChannel"); Configurables.configure(channel, channelContext); channel.start(); druidSink = new DruidSink(); druidSink.setChannel(channel); druidSink.configure(getMockContext()); druidSink.start(); } @Test public void processValidEvents() throws EventDeliveryException { Transaction tx = channel.getTransaction(); tx.begin(); getNTrackerEvents(1000); tx.commit(); tx.close(); for (int i = 0; i < 1; i++) { druidSink.process(); } tx = channel.getTransaction(); tx.begin(); Assertions.assertThat(channel.take()).isNull(); } @Test public void process500KValidEvents() throws EventDeliveryException { for (int i = 0; i < 10; i++) { processValidEvents(); } } private void getNEvents(int numEvents, TimeUnit timeUnit) { for (int i = 0; i < numEvents; i++) { channel.put(getEvent(getOffset(timeUnit))); } } private void getNTrackerEvents(int numEvents) { for (int i = 0; i < numEvents; i++) { channel.put(getTrackerEvent()); } } private long getOffset(TimeUnit timeUnit) { long offset = 0; switch (timeUnit) { case MILLISECONDS: offset = 1; break; case SECONDS: offset = 1000; break; case MINUTES: offset = 1000 * 60; break; case HOURS: offset = 1000 * 60 * 60; break; case DAYS: offset = 1000 * 60 * 60 * 24; break; default: offset = 0; break; } return offset; } private Event getTrackerEvent() { Random random = new Random(); String[] users = new String[] { "user1@santander.com", "user2@santander.com", "user3@santander.com", "user4@santander.com" }; String[] isoCode = new String[] { "DE", "ES", "US", "FR" }; TimeUnit[] offset = new TimeUnit[] { TimeUnit.DAYS, TimeUnit.HOURS, TimeUnit.SECONDS }; ObjectNode jsonBody = new ObjectNode(JsonNodeFactory.instance); Map<String, String> headers; ObjectMapper mapper = new ObjectMapper(); JsonNode jsonNode = null; final String fileName = "/trackerSample" + random.nextInt(4) + ".json"; try { jsonNode = mapper.readTree(getClass().getResourceAsStream(fileName)); } catch (IOException e) { e.printStackTrace(); } headers = mapper.convertValue(jsonNode, Map.class); headers.put("timestamp", String.valueOf(new Date().getTime() + getOffset(offset[random.nextInt(3)]) * random .nextInt(100))); headers.put("santanderID", users[random.nextInt(4)]); headers.put("isoCode", isoCode[random.nextInt(4)]); return EventBuilder.withBody(jsonBody.toString().getBytes(Charsets.UTF_8), headers); } private Event getEvent(long offset) { ObjectNode jsonBody = new ObjectNode(JsonNodeFactory.instance); jsonBody.put("field1", "foo"); jsonBody.put("field2", 32); jsonBody.put("timestamp", String.valueOf(new Date().getTime())); Map<String, String> headers = new HashMap<String, String>(); headers.put("field3", "bar"); // Overwrites the value defined in JSON body headers.put("field4", "64"); headers.put("field5", "true"); headers.put("field6", "1.0"); headers.put("field7", "11"); final long l = new Date().getTime(); headers.put("timestamp", String.valueOf(l + offset)); headers.put("myString2", "baz"); return EventBuilder.withBody(jsonBody.toString().getBytes(Charsets.UTF_8), headers); } private Context getMockContext() { Map<String, String> mapProperties = loadProperties("/context.properties"); Context context = new Context(mapProperties); return context; } private Map<String, String> loadProperties(String file) { Properties properties = new Properties(); try { properties.load(getClass().getResourceAsStream(file)); } catch (IOException e) { e.printStackTrace(); } return Maps.fromProperties(properties); } }
apache-2.0
IllusionRom-deprecated/android_platform_tools_idea
platform/platform-impl/src/com/intellij/ide/actions/OpenProjectFileChooserDescriptor.java
3508
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.actions; import com.intellij.icons.AllIcons; import com.intellij.ide.highlighter.ProjectFileType; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileElement; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.projectImport.ProjectOpenProcessor; import org.jetbrains.annotations.Nullable; import javax.swing.*; public class OpenProjectFileChooserDescriptor extends FileChooserDescriptor { private static final Icon ourProjectIcon = IconLoader.getIcon(ApplicationInfoEx.getInstanceEx().getSmallIconUrl()); public OpenProjectFileChooserDescriptor(final boolean chooseFiles) { super(chooseFiles, true, chooseFiles, chooseFiles, false, false); } public boolean isFileSelectable(final VirtualFile file) { if (file == null) return false; return isProjectDirectory(file) || isProjectFile(file); } public Icon getIcon(final VirtualFile file) { if (isProjectDirectory(file)) { return dressIcon(file, ourProjectIcon); } final Icon icon = getImporterIcon(file); if (icon != null) { return dressIcon(file, icon); } return super.getIcon(file); } @Nullable private static Icon getImporterIcon(final VirtualFile virtualFile) { final ProjectOpenProcessor provider = ProjectOpenProcessor.getImportProvider(virtualFile); if (provider != null) { return virtualFile.isDirectory() && provider.lookForProjectsInDirectory() ? AllIcons.Nodes.IdeaModule : provider.getIcon(virtualFile); } return null; } public boolean isFileVisible(final VirtualFile file, final boolean showHiddenFiles) { if (!showHiddenFiles && FileElement.isFileHidden(file)) return false; return isProjectFile(file) || super.isFileVisible(file, showHiddenFiles) && file.isDirectory(); } public static boolean isProjectFile(final VirtualFile file) { if (isIprFile(file)) return true; final ProjectOpenProcessor importProvider = ProjectOpenProcessor.getImportProvider(file); return importProvider != null; } private static boolean isIprFile(VirtualFile file) { if ((!file.isDirectory() && file.getName().toLowerCase().endsWith(ProjectFileType.DOT_DEFAULT_EXTENSION))) { return true; } return false; } private static boolean isProjectDirectory(final VirtualFile virtualFile) { // the root directory of any drive is never an IDEA project if (virtualFile.getParent() == null) return false; // NOTE: For performance reasons, it's very important not to iterate through all of the children here. if (virtualFile.isDirectory() && virtualFile.isValid() && virtualFile.findChild(Project.DIRECTORY_STORE_FOLDER) != null) return true; return false; } }
apache-2.0
doctales/dita-ot
src/main/plugins/org.dita.pdf2/src/com/idiominc/ws/opentopic/fo/index2/IndexPreprocessorTask.java
7945
package com.idiominc.ws.opentopic.fo.index2; import static org.dita.dost.util.Constants.*; import com.idiominc.ws.opentopic.fo.index2.configuration.IndexConfiguration; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Task; import org.apache.tools.ant.Project; import org.apache.xml.resolver.tools.CatalogResolver; import org.dita.dost.log.DITAOTAntLogger; import org.dita.dost.util.XMLUtils; import org.w3c.dom.Document; import org.xml.sax.InputSource; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import java.io.FileOutputStream; import java.util.Locale; /* Copyright (c) 2004-2006 by Idiom Technologies, Inc. All rights reserved. IDIOM is a registered trademark of Idiom Technologies, Inc. and WORLDSERVER and WORLDSTART are trademarks of Idiom Technologies, Inc. All other trademarks are the property of their respective owners. IDIOM TECHNOLOGIES, INC. IS DELIVERING THE SOFTWARE "AS IS," WITH ABSOLUTELY NO WARRANTIES WHATSOEVER, WHETHER EXPRESS OR IMPLIED, AND IDIOM TECHNOLOGIES, INC. DISCLAIMS ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE AND WARRANTY OF NON-INFRINGEMENT. IDIOM TECHNOLOGIES, INC. SHALL NOT BE LIABLE FOR INDIRECT, INCIDENTAL, SPECIAL, COVER, PUNITIVE, EXEMPLARY, RELIANCE, OR CONSEQUENTIAL DAMAGES (INCLUDING BUT NOT LIMITED TO LOSS OF ANTICIPATED PROFIT), ARISING FROM ANY CAUSE UNDER OR RELATED TO OR ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF IDIOM TECHNOLOGIES, INC. HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. Idiom Technologies, Inc. and its licensors shall not be liable for any damages suffered by any person as a result of using and/or modifying the Software or its derivatives. In no event shall Idiom Technologies, Inc.'s liability for any damages hereunder exceed the amounts received by Idiom Technologies, Inc. as a result of this transaction. These terms and conditions supersede the terms and conditions in any licensing agreement to the extent that such terms and conditions conflict with those set forth herein. This file is part of the DITA Open Toolkit project. See the accompanying license.txt file for applicable licenses. */ public class IndexPreprocessorTask extends Task { // private String input = null; private String input = ""; private String output = ""; private String catalogs = null; private String locale = "ja"; private String indexConfig = ""; public static boolean failOnError = false; public static boolean processingFaild = false; private static final String prefix = "opentopic-index"; private static final String namespace_url = "http://www.idiominc.com/opentopic/index"; public static void main(final String[] args) { new IndexPreprocessorTask().execute(); } @Override public void execute() throws BuildException { checkParameters(); if (this.catalogs != null) { System.setProperty("xml.catalog.files", this.catalogs); } try { final DocumentBuilder documentBuilder = XMLUtils.getDocumentBuilder(); documentBuilder.setEntityResolver(new CatalogResolver() { @Override public InputSource resolveEntity(final String publicId, String systemId) { // strip path from DTD location final int slashIdx = systemId.lastIndexOf("/"); if (slashIdx >= 0) { systemId = systemId.substring(slashIdx + 1); } // resolve real location with XMLCatalogResolver return super.resolveEntity(publicId, systemId); } }); final Document doc = documentBuilder.parse(input); final IndexPreprocessor preprocessor = new IndexPreprocessor(this.prefix, this.namespace_url); preprocessor.setLogger(new DITAOTAntLogger(getProject())); // Walks through source document and builds an array of IndexEntry and builds // new Document with pre-processed index entries included. final IndexPreprocessResult result = preprocessor.process(doc); final Document resultDoc = result.getDocument(); // Parse index configuration from file specified from ANT script final IndexConfiguration configuration = IndexConfiguration.parse(documentBuilder.parse(this.indexConfig)); final IndexEntry[] indexEntries = result.getIndexEntries(); Locale loc; // Split passed locale string to lang and country codes if (locale.indexOf("-") == 2 || locale.indexOf("_") == 2) { loc = new Locale(locale.substring(0, 2), locale.substring(3)); } else { loc = new Locale(this.locale); } // Append index groups to the end of document preprocessor.createAndAddIndexGroups(indexEntries, configuration, resultDoc, loc); if (processingFaild) { setActiveProjectProperty("ws.runtime.index.preprocess.fail","true"); } // Serialize processed document final TransformerFactory transformerFactory = TransformerFactory.newInstance(); final Transformer transformer = transformerFactory.newTransformer(); transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); transformer.setOutputProperty(OutputKeys.INDENT, "no"); transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); transformer.setOutputProperty(OutputKeys.STANDALONE, "yes"); if (doc.getDoctype() != null) { if (null != doc.getDoctype().getPublicId()) { transformer.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, doc.getDoctype().getPublicId()); } if (null != doc.getDoctype().getSystemId()) { transformer.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, doc.getDoctype().getSystemId()); } } final FileOutputStream out = new FileOutputStream(this.output); final StreamResult streamResult = new StreamResult(out); transformer.transform(new DOMSource(resultDoc), streamResult); out.close(); } catch (final Exception e) { e.printStackTrace(); throw new BuildException(e); } } private void checkParameters() throws BuildException { if (null == locale || null == input || null == output || null == indexConfig) { throw new BuildException("locale, indexConfig, input, output attributes are required"); } } public void setInput(final String theInput) { this.input = theInput; } public void setOutput(final String theOutput) { this.output = theOutput; } public void setCatalogs(final String theCatalogs) { this.catalogs = theCatalogs; } public void setLocale(final String theLocale) { this.locale = theLocale; } public void setIndexConfig(final String theIndexConfig) { this.indexConfig = theIndexConfig; } public void setFailOnError(final String theFailOnErro) { this.failOnError = theFailOnErro.equals("true"); } private void setActiveProjectProperty(final String propertyName, final String propertyValue) { final Project activeProject = getProject(); if (activeProject != null) { activeProject.setProperty(propertyName, propertyValue); } } }
apache-2.0
racker/omnibus
source/db-5.0.26.NC/java/src/com/sleepycat/db/BtreeStats.java
9461
/*- * Automatically built by dist/s_java_stat. * Only the javadoc comments can be edited. * * See the file LICENSE for redistribution information. * * Copyright (c) 2002, 2010 Oracle and/or its affiliates. All rights reserved. */ package com.sleepycat.db; /** The BtreeStats object is used to return Btree or Recno database statistics. */ public class BtreeStats extends DatabaseStats { // no public constructor /* package */ BtreeStats() {} private int bt_magic; /** The magic number that identifies the file as a Btree database. */ public int getMagic() { return bt_magic; } private int bt_version; /** The version of the Btree database. */ public int getVersion() { return bt_version; } private int bt_metaflags; /** The metadata flags. */ public int getMetaFlags() { return bt_metaflags; } private int bt_nkeys; /** The number of keys or records in the database. <p> For the Btree Access Method, the number of keys in the database. If the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method or the database was configured to support retrieval by record number, the count will be exact. Otherwise, the count will be the last saved value unless it has never been calculated, in which case it will be 0. <p> For the Recno Access Method, the number of records in the database. If the database was configured with mutable record numbers the count will be exact. Otherwise, if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method, the count will be exact but will include deleted records; if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method, the count will be exact and will not include deleted records. */ public int getNumKeys() { return bt_nkeys; } private int bt_ndata; /** The number of key/data pairs or records in the database. <p> For the Btree Access Method, the number of key/data pairs in the database. If the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method, the count will be exact. Otherwise, the count will be the last saved value unless it has never been calculated, in which case it will be 0. <p> For the Recno Access Method, the number of records in the database. If the database was configured with mutable record numbers, the count will be exact. Otherwise, if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method, the count will be exact but will include deleted records; if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method, the count will be exact and will not include deleted records. */ public int getNumData() { return bt_ndata; } private int bt_pagecnt; /** The number of pages in the database. <p> Returned if {@link StatsConfig#setFast} was configured. */ public int getPageCount() { return bt_pagecnt; } private int bt_pagesize; /** The underlying database page size, in bytes. */ public int getPageSize() { return bt_pagesize; } private int bt_minkey; /** The minimum keys per page. */ public int getMinKey() { return bt_minkey; } private int bt_re_len; /** The length of fixed-length records. */ public int getReLen() { return bt_re_len; } private int bt_re_pad; /** The padding byte value for fixed-length records. */ public int getRePad() { return bt_re_pad; } private int bt_levels; /** The number of levels in the database. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getLevels() { return bt_levels; } private int bt_int_pg; /** The number of database internal pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getIntPages() { return bt_int_pg; } private int bt_leaf_pg; /** The number of database leaf pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getLeafPages() { return bt_leaf_pg; } private int bt_dup_pg; /** The number of database duplicate pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getDupPages() { return bt_dup_pg; } private int bt_over_pg; /** The number of database overflow pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getOverPages() { return bt_over_pg; } private int bt_empty_pg; /** The number of empty database pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getEmptyPages() { return bt_empty_pg; } private int bt_free; /** The number of pages on the free list. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getFree() { return bt_free; } private long bt_int_pgfree; /** The number of bytes free in database internal pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public long getIntPagesFree() { return bt_int_pgfree; } private long bt_leaf_pgfree; /** The number of bytes free in database leaf pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public long getLeafPagesFree() { return bt_leaf_pgfree; } private long bt_dup_pgfree; /** The number of bytes free in database duplicate pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public long getDupPagesFree() { return bt_dup_pgfree; } private long bt_over_pgfree; /** The number of bytes free in database overflow pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public long getOverPagesFree() { return bt_over_pgfree; } /** For convenience, the BtreeStats class has a toString method that lists all the data fields. */ public String toString() { return "BtreeStats:" + "\n bt_magic=" + bt_magic + "\n bt_version=" + bt_version + "\n bt_metaflags=" + bt_metaflags + "\n bt_nkeys=" + bt_nkeys + "\n bt_ndata=" + bt_ndata + "\n bt_pagecnt=" + bt_pagecnt + "\n bt_pagesize=" + bt_pagesize + "\n bt_minkey=" + bt_minkey + "\n bt_re_len=" + bt_re_len + "\n bt_re_pad=" + bt_re_pad + "\n bt_levels=" + bt_levels + "\n bt_int_pg=" + bt_int_pg + "\n bt_leaf_pg=" + bt_leaf_pg + "\n bt_dup_pg=" + bt_dup_pg + "\n bt_over_pg=" + bt_over_pg + "\n bt_empty_pg=" + bt_empty_pg + "\n bt_free=" + bt_free + "\n bt_int_pgfree=" + bt_int_pgfree + "\n bt_leaf_pgfree=" + bt_leaf_pgfree + "\n bt_dup_pgfree=" + bt_dup_pgfree + "\n bt_over_pgfree=" + bt_over_pgfree ; } }
apache-2.0
smslib/smslib
smslib/src/main/java/org/smslib/callback/IInboundCallCallback.java
191
package org.smslib.callback; import org.smslib.callback.events.InboundCallCallbackEvent; public interface IInboundCallCallback { public boolean process(InboundCallCallbackEvent event); }
apache-2.0
apache/tapestry3
tapestry-examples/Vlib/src/org/apache/tapestry/vlib/pages/EditBook.java
4895
// Copyright 2004 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry.vlib.pages; import java.rmi.RemoteException; import java.util.HashMap; import java.util.Map; import javax.ejb.CreateException; import javax.ejb.FinderException; import org.apache.tapestry.ApplicationRuntimeException; import org.apache.tapestry.IRequestCycle; import org.apache.tapestry.Tapestry; import org.apache.tapestry.event.PageEvent; import org.apache.tapestry.event.PageRenderListener; import org.apache.tapestry.vlib.Protected; import org.apache.tapestry.vlib.VirtualLibraryEngine; import org.apache.tapestry.vlib.ejb.IOperations; /** * Edits the properties of at book. * * @author Howard Lewis Ship * @version $Id$ **/ public abstract class EditBook extends Protected implements PageRenderListener { public abstract Map getAttributes(); public abstract void setAttributes(Map attributes); public abstract String getPublisherName(); public abstract Integer getBookId(); public abstract void setBookId(Integer bookId); /** * Invoked (from {@link MyLibrary}) to begin editting a book. * Gets the attributes from the {@link org.apache.tapestry.vlib.ejb.IBook} * and updates the request cycle to render this page, * **/ public void beginEdit(IRequestCycle cycle, Integer bookId) { setBookId(bookId); VirtualLibraryEngine vengine = (VirtualLibraryEngine) getEngine(); int i = 0; while (true) { try { // Get the attributes as a source for our input fields. IOperations operations = vengine.getOperations(); setAttributes(operations.getBookAttributes(bookId)); break; } catch (FinderException ex) { throw new ApplicationRuntimeException(ex); } catch (RemoteException ex) { vengine.rmiFailure( "Remote exception setting up page for book #" + bookId + ".", ex, i++); } } cycle.activate(this); } /** * Used to update the book when the form is submitted. * **/ public void formSubmit(IRequestCycle cycle) { Map attributes = getAttributes(); Integer publisherId = (Integer) attributes.get("publisherId"); String publisherName = getPublisherName(); if (publisherId == null && Tapestry.isBlank(publisherName)) { setErrorField("inputPublisherName", getMessage("need-publisher-name")); return; } if (publisherId != null && Tapestry.isNonBlank(publisherName)) { setErrorField("inputPublisherName", getMessage("leave-publisher-name-empty")); return; } // Check for an error from a validation field if (isInError()) return; // OK, do the update. VirtualLibraryEngine vengine = (VirtualLibraryEngine)cycle.getEngine(); Integer bookId = getBookId(); int i = 0; while (true) { IOperations bean = vengine.getOperations(); try { if (publisherId != null) bean.updateBook(bookId, attributes); else { bean.updateBook(bookId, attributes, publisherName); vengine.clearCache(); } break; } catch (FinderException ex) { throw new ApplicationRuntimeException(ex); } catch (CreateException ex) { throw new ApplicationRuntimeException(ex); } catch (RemoteException ex) { vengine.rmiFailure("Remote exception updating book #" + bookId + ".", ex, i++); continue; } } MyLibrary page = (MyLibrary) cycle.getPage("MyLibrary"); page.setMessage(format("updated-book", attributes.get("title"))); page.activate(cycle); } public void pageBeginRender(PageEvent event) { if (getAttributes() == null) setAttributes(new HashMap()); } }
apache-2.0
PinaeOS/rafiki
src/example/java/org/pinae/rafiki/trigger/CronTriggerExample.java
2660
package org.pinae.rafiki.trigger; import org.pinae.rafiki.trigger.Trigger; import org.pinae.rafiki.trigger.TriggerException; import org.pinae.rafiki.trigger.impl.CronTrigger; import static org.pinae.rafiki.trigger.helper.DateHelper.today; import org.apache.log4j.Logger; public class CronTriggerExample { private static Logger logger = Logger.getLogger(CronTriggerExample.class); public static Trigger getTrigger0(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0-30/5 * * * * * *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger1(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0 * * * * * *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger2(){ CronTrigger trigger = null; try { trigger = new CronTrigger("30 1-5 * * * * *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger3(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0 * 12-17 * * * *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger4(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0 * * 25-30 * * *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger5(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0 * * * DEC SUN *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger6(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0 * * * DEC * 2013"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger7(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0-30/10 * * 5-12 MAY * 2014"); trigger.setStartTime(today(15, 10, 0)); trigger.setEndTime(today(21, 30, 0)); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } }
apache-2.0
huihoo/olat
olat7.8/src/test/java/org/olat/test/functional/groups/GroupConcurrenciesTest.java
3912
package org.olat.test.functional.groups; import org.olat.test.util.selenium.BaseSeleneseTestCase; import org.olat.test.util.selenium.olatapi.OLATWorkflowHelper; import org.olat.test.util.selenium.olatapi.WorkflowHelper; import org.olat.test.util.selenium.olatapi.group.Group; import org.olat.test.util.selenium.olatapi.group.GroupAdmin; import org.olat.test.util.setup.SetupType; import org.olat.test.util.setup.context.Context; import com.thoughtworks.selenium.SeleniumException; /** * * Group owner and participant test project group concurrencies. <br/> * Test setup: <br/> * 1. Clean-up: delete all groups from author <br/> * <p> * Test case: <br/> * 1. author creates group GROUP_NAME with group tool wiki <br/> * 2. author adds participant <br/> * 3. student logs in, check if he can select wiki <br/> * 4. author removes wiki <br/> * 5. student checks that wiki is no longer available and that he gets appropriate error message <br/> * 6. author deletes group <br/> * * * @author sandra * */ public class GroupConcurrenciesTest extends BaseSeleneseTestCase { public void testGroupConcurrencies() throws Exception { Context context = Context.setupContext(getFullName(), SetupType.TWO_NODE_CLUSTER); // delete all my groups first !!! WorkflowHelper.deleteAllGroupsFromAuthor(context.getStandardAuthorOlatLoginInfos(1)); // Author01 creates project group with wiki System.out.println("logging in browser 1..."); OLATWorkflowHelper oLATWorkflowHelper1 = context.getOLATWorkflowHelper(context.getStandardAuthorOlatLoginInfos(1)); GroupAdmin groupAdmin1 = oLATWorkflowHelper1.getGroups().createProjectGroup("project group selenium 4", ""); groupAdmin1.setTools(false, false, false, false, false, true, false); String[] participants = { context.getStandardStudentOlatLoginInfos(1).getUsername() }; groupAdmin1.addMembers(participants, new String[0]); groupAdmin1.close("project group selenium 4"); // student01 opens group with wiki System.out.println("logging in browser 2..."); OLATWorkflowHelper oLATWorkflowHelper2 = context.getOLATWorkflowHelper(context.getStandardStudentOlatLoginInfos(2)); Group group2 = oLATWorkflowHelper2.getGroups().selectGroup("project group selenium 4"); group2.selectWiki(); // Author01 removes wiki groupAdmin1 = oLATWorkflowHelper1.getGroups().selectGroup("project group selenium 4").selectAdministration(); groupAdmin1.setTools(false, false, false, false, false, true, false); // Student01 wants to click on Wiki, but wiki was removed by the group owner if (group2.hasWiki()) { group2.selectWiki(); } Thread.sleep(10000); // wiki dissapears silently/or not (why behaviour changes?) with a certain delay after removal assertFalse(group2.hasWiki()); // Author01 deletes group oLATWorkflowHelper1.getGroups().deleteGroup("project group selenium 4"); // student clicks on group and gets appropriate message try { if (group2.getSelenium().isElementPresent("ui=group::menu_members()")) { group2.selectMembers(); } } catch (SeleniumException e) { // ok - ajax could come and refresh the group tab and notice that it has been modified right // after we asked 'isElementPresent' .. hence not doing anything with this exception! } for (int second = 0;; second++) { if (second >= 60) fail("timeout"); try { if (group2.isTextPresent("This group's configuration has been modified (group deleted, members changed). Please close the tab.")) break; } catch (Exception e) { } Thread.sleep(1000); } } }
apache-2.0
janstey/fuse
fabric/fabric-camel-c24io/src/test/java/org/fusesource/fabric/camel/c24io/spring/SpringWithBadElementNameTest.java
1478
/** * Copyright (C) FuseSource, Inc. * http://fusesource.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.fabric.camel.c24io.spring; import junit.framework.TestCase; import org.apache.camel.RuntimeCamelException; import org.junit.Test; import org.springframework.context.support.AbstractXmlApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; /** * @version $Revision$ */ public class SpringWithBadElementNameTest extends TestCase { @Test public void testBadElementName() throws Exception { try { AbstractXmlApplicationContext appContext = new ClassPathXmlApplicationContext("org/fusesource/fabric/camel/c24io/spring/badElementName.xml"); appContext.start(); fail("should have failed!"); } catch (RuntimeCamelException e) { System.out.println("Caught expected: " + e); e.printStackTrace(); } } }
apache-2.0
apache/jena
jena-arq/src/test/java/org/apache/jena/arq/junit/sparql/tests/SerializationTest.java
2680
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.arq.junit.sparql.tests; import org.apache.jena.arq.junit.manifest.ManifestEntry; import org.apache.jena.atlas.io.IndentedLineBuffer ; import org.apache.jena.query.Query ; import org.apache.jena.query.Syntax ; import org.apache.jena.sparql.sse.SSE_ParseException ; import org.apache.jena.sparql.util.QueryUtils ; public class SerializationTest implements Runnable { static int count = 0 ; String queryString ; ManifestEntry testEntry; public SerializationTest(ManifestEntry entry) { testEntry = entry ; } // A serialization test is: // Read query in. // Serialize to string. // Parse again. // Are they equal? @Override public void run() { Query query = SparqlTestLib.queryFromEntry(testEntry); // Whatever was read in. runTestWorker(query, query.getSyntax()) ; } protected void runTestWorker(Query query, Syntax syntax) { IndentedLineBuffer buff = new IndentedLineBuffer() ; query.serialize(buff, syntax) ; String baseURI = null ; if ( ! query.explicitlySetBaseURI() ) // Not in query - use the same one (e.g. file read from) . baseURI = query.getBaseURI() ; // Query syntax and algebra tests. try { QueryUtils.checkParse(query) ; } catch (RuntimeException ex) { System.err.println("**** Test: "+testEntry.getName()) ; System.err.println("** "+ex.getMessage()) ; System.err.println(query) ; throw ex ; } try { QueryUtils.checkOp(query, true) ; } catch (SSE_ParseException ex) { System.err.println("**** Test: "+testEntry.getName()) ; System.err.println("** Algebra error: "+ex.getMessage()) ; } } }
apache-2.0
JRebirth/JRebirth
org.jrebirth.af/core/src/main/java/org/jrebirth/af/core/ui/adapter/DefaultKeyAdapter.java
1671
/** * Get more info at : www.jrebirth.org . * Copyright JRebirth.org © 2011-2013 * Contact : sebastien.bordes@jrebirth.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jrebirth.af.core.ui.adapter; import javafx.scene.input.KeyEvent; import org.jrebirth.af.core.ui.AbstractBaseController; /** * The class <strong>DefaultKeyAdapter</strong>. * * @author Sébastien Bordes * * @param <C> The controller class which manage this event adapter */ public class DefaultKeyAdapter<C extends AbstractBaseController<?, ?>> extends AbstractDefaultAdapter<C> implements KeyAdapter { /** * {@inheritDoc} */ @Override public void key(final KeyEvent keyEvent) { // Nothing to do yet } /** * {@inheritDoc} */ @Override public void keyPressed(final KeyEvent keyEvent) { // Nothing to do yet } /** * {@inheritDoc} */ @Override public void keyReleased(final KeyEvent keyEvent) { // Nothing to do yet } /** * {@inheritDoc} */ @Override public void keyTyped(final KeyEvent keyEvent) { // Nothing to do yet } }
apache-2.0
apache/incubator-shardingsphere
shardingsphere-proxy/shardingsphere-proxy-frontend/shardingsphere-proxy-frontend-mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLComQueryPacketExecutorTest.java
4108
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query; import org.apache.shardingsphere.db.protocol.mysql.constant.MySQLCharacterSet; import org.apache.shardingsphere.db.protocol.mysql.constant.MySQLConstants; import org.apache.shardingsphere.db.protocol.mysql.packet.command.query.text.query.MySQLComQueryPacket; import org.apache.shardingsphere.proxy.backend.response.header.query.QueryResponseHeader; import org.apache.shardingsphere.proxy.backend.response.header.query.impl.QueryHeader; import org.apache.shardingsphere.proxy.backend.response.header.update.UpdateResponseHeader; import org.apache.shardingsphere.proxy.backend.session.ConnectionSession; import org.apache.shardingsphere.proxy.backend.text.TextProtocolBackendHandler; import org.apache.shardingsphere.proxy.frontend.command.executor.ResponseType; import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Answers; import org.mockito.Mock; import org.mockito.internal.util.reflection.FieldSetter; import org.mockito.junit.MockitoJUnitRunner; import java.sql.SQLException; import java.util.Collections; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public final class MySQLComQueryPacketExecutorTest { @Mock private TextProtocolBackendHandler textProtocolBackendHandler; @Mock private MySQLComQueryPacket packet; @Mock(answer = Answers.RETURNS_DEEP_STUBS) private ConnectionSession connectionSession; @Before public void setUp() { when(packet.getSql()).thenReturn(""); when(connectionSession.getAttributeMap().attr(MySQLConstants.MYSQL_CHARACTER_SET_ATTRIBUTE_KEY).get()).thenReturn(MySQLCharacterSet.UTF8MB4_GENERAL_CI); } @Test public void assertIsQueryResponse() throws SQLException, NoSuchFieldException { MySQLComQueryPacketExecutor mysqlComQueryPacketExecutor = new MySQLComQueryPacketExecutor(packet, connectionSession); FieldSetter.setField(mysqlComQueryPacketExecutor, MySQLComQueryPacketExecutor.class.getDeclaredField("textProtocolBackendHandler"), textProtocolBackendHandler); when(textProtocolBackendHandler.execute()).thenReturn(new QueryResponseHeader(Collections.singletonList(mock(QueryHeader.class)))); mysqlComQueryPacketExecutor.execute(); assertThat(mysqlComQueryPacketExecutor.getResponseType(), is(ResponseType.QUERY)); } @Test public void assertIsUpdateResponse() throws SQLException, NoSuchFieldException { MySQLComQueryPacketExecutor mysqlComQueryPacketExecutor = new MySQLComQueryPacketExecutor(packet, connectionSession); FieldSetter.setField(mysqlComQueryPacketExecutor, MySQLComQueryPacketExecutor.class.getDeclaredField("textProtocolBackendHandler"), textProtocolBackendHandler); when(textProtocolBackendHandler.execute()).thenReturn(new UpdateResponseHeader(mock(SQLStatement.class))); mysqlComQueryPacketExecutor.execute(); assertThat(mysqlComQueryPacketExecutor.getResponseType(), is(ResponseType.UPDATE)); } }
apache-2.0
ServiceComb/java-chassis
demo/demo-multi-registries/demo-multi-registries-client/src/main/java/org/apache/servicecomb/demo/registry/SchemaDiscoveryTestCase.java
1724
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.demo.registry; import org.apache.servicecomb.demo.CategorizedTestCase; import org.apache.servicecomb.demo.TestMgr; import org.apache.servicecomb.provider.pojo.RpcReference; import org.springframework.stereotype.Component; @Component public class SchemaDiscoveryTestCase implements CategorizedTestCase { @RpcReference(microserviceName = "thirdParty-no-schema-server", schemaId = "ServerEndpoint") IServerEndpoint serverEndpoint; @Override public void testRestTransport() throws Exception { // invoke thirdParty-no-schema-server(mocked by demo-multi-registries-server) TestMgr.check("hello", serverEndpoint.getName("hello")); } @Override public void testHighwayTransport() throws Exception { } @Override public void testAllTransport() throws Exception { } @Override public String getMicroserviceName() { return "thirdParty-service-center"; } }
apache-2.0
forGGe/kaa
client/client-multi/client-java-core/src/main/java/org/kaaproject/kaa/client/channel/failover/strategies/FailoverStrategy.java
2267
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.client.channel.failover.strategies; import org.kaaproject.kaa.client.channel.TransportConnectionInfo; import org.kaaproject.kaa.client.channel.failover.FailoverDecision; import org.kaaproject.kaa.client.channel.failover.FailoverStatus; import java.util.concurrent.TimeUnit; /** * Failover strategy is responsible for producing failover decisions based on failover statuses. */ public interface FailoverStrategy { /** * Needs to be invoked to determine a decision that resolves the failover. * * @param failoverStatus current status of the failover. * * @return decision which is meant to resolve the failover. * * @see FailoverDecision * @see FailoverStatus */ FailoverDecision onFailover(FailoverStatus failoverStatus); /** * Needs to be invoked once client recovered after failover. * * @param connectionInfo server information * * @see org.kaaproject.kaa.client.channel.TransportConnectionInfo */ void onRecover(TransportConnectionInfo connectionInfo); /** * Use the {@link #getTimeUnit()} method to get current time unit. * * @return period of time after which will be made attempt to tweak bootstrap server. */ long getBootstrapServersRetryPeriod(); /** * Use the {@link #getTimeUnit()} method to get current time unit. * * @return period of time after which will be made attempt to tweak operation server. */ long getOperationServersRetryPeriod(); /** * @return time unit used within a scope of current failover strategy. */ TimeUnit getTimeUnit(); }
apache-2.0
qq254963746/memcached-session-manager
serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/Benchmark.java
12323
/* * Copyright 2010 Martin Grotzke * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an &quot;AS IS&quot; BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package de.javakaffee.web.msm.serializer; import java.util.Calendar; import java.util.concurrent.atomic.AtomicInteger; import org.apache.catalina.core.StandardContext; import org.apache.catalina.loader.WebappLoader; import de.javakaffee.web.msm.JavaSerializationTranscoder; import de.javakaffee.web.msm.MemcachedBackupSession; import de.javakaffee.web.msm.MemcachedBackupSessionManager; import de.javakaffee.web.msm.SessionAttributesTranscoder; import de.javakaffee.web.msm.TranscoderService; import de.javakaffee.web.msm.serializer.TestClasses.Address; import de.javakaffee.web.msm.serializer.TestClasses.Component; import de.javakaffee.web.msm.serializer.TestClasses.Person; import de.javakaffee.web.msm.serializer.TestClasses.Person.Gender; import de.javakaffee.web.msm.serializer.javolution.JavolutionTranscoder; import de.javakaffee.web.msm.serializer.kryo.KryoTranscoder; /** * A simple benchmark for existing serialization strategies. * * @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ public class Benchmark { /* * 50000: * -- JavaSerializationTranscoder -- Serializing 1000 sessions took 156863 msec. serialized size is 59016 bytes. -- JavolutionTranscoder -- Serializing 1000 sessions took 251870 msec. serialized size is 138374 bytes. -- KryoTranscoder -- Serializing 1000 sessions took 154816 msec. serialized size is 70122 bytes. */ public static void main( final String[] args ) throws InterruptedException { //Thread.sleep( 1000 ); final MemcachedBackupSessionManager manager = createManager(); // some warmup final int warmupCycles = 100000; warmup( manager, new JavaSerializationTranscoder(), warmupCycles, 100, 3 ); warmup( manager, new JavolutionTranscoder( Thread.currentThread().getContextClassLoader(), false ), warmupCycles, 100, 3 ); warmup( manager, new KryoTranscoder(), warmupCycles, 100, 3 ); recover(); benchmark( manager, 10, 500, 4 /* 4^4 = 256 */ ); benchmark( manager, 10, 100, 3 /* 3^3 = 27 */ ); benchmark( manager, 10, 10, 2 /* 2^2 = 4 */ ); // Thread.sleep( Integer.MAX_VALUE ); } private static void benchmark( final MemcachedBackupSessionManager manager, final int rounds, final int countPersons, final int nodesPerEdge ) throws InterruptedException { final Stats javaSerStats = new Stats(); final Stats javaDeSerStats = new Stats(); benchmark( manager, new JavaSerializationTranscoder(), javaSerStats, javaDeSerStats, rounds, countPersons, nodesPerEdge ); recover(); final Stats javolutionSerStats = new Stats(); final Stats javolutionDeSerStats = new Stats(); benchmark( manager, new JavolutionTranscoder( Thread.currentThread().getContextClassLoader(), false ), javolutionSerStats, javolutionDeSerStats, rounds, countPersons, nodesPerEdge ); recover(); final Stats kryoSerStats = new Stats(); final Stats kryoDeSerStats = new Stats(); benchmark( manager, new KryoTranscoder(), kryoSerStats, kryoDeSerStats, rounds, countPersons, nodesPerEdge ); System.out.println( "Serialization,Size,Ser-Min,Ser-Avg,Ser-Max,Deser-Min,Deser-Avg,Deser-Max"); System.out.println( toCSV( "Java", javaSerStats, javaDeSerStats ) ); System.out.println( toCSV( "Javolution", javolutionSerStats, javolutionDeSerStats ) ); System.out.println( toCSV( "Kryo", kryoSerStats, kryoDeSerStats ) ); } private static String toCSV( final String name, final Stats serStats, final Stats deSerStats ) { return name + "," + serStats.size +","+ minAvgMax( serStats ) + "," + minAvgMax( deSerStats ); } private static String minAvgMax( final Stats stats ) { return stats.min +","+ stats.avg +","+ stats.max; } private static void recover() throws InterruptedException { Thread.sleep( 200 ); System.gc(); Thread.sleep( 200 ); } private static void benchmark( final MemcachedBackupSessionManager manager, final SessionAttributesTranscoder transcoder, final Stats serializationStats, final Stats deserializationStats, final int rounds, final int countPersons, final int nodesPerEdge ) throws InterruptedException { System.out.println( "Running benchmark for " + transcoder.getClass().getSimpleName() + "..." + " (rounds: "+ rounds +", persons: "+ countPersons +", nodes: "+ ((int)Math.pow( nodesPerEdge, nodesPerEdge ) + nodesPerEdge + 1 ) +")" ); final TranscoderService transcoderService = new TranscoderService( transcoder ); final MemcachedBackupSession session = createSession( manager, "123456789abcdefghijk987654321", countPersons, nodesPerEdge ); final byte[] data = transcoderService.serialize( session ); final int size = data.length; for( int r = 0; r < rounds; r++ ) { final long start = System.currentTimeMillis(); for( int i = 0; i < 500; i++ ) { transcoderService.serialize( session ); } serializationStats.registerSince( start ); serializationStats.setSize( size ); } System.gc(); Thread.sleep( 100 ); // deserialization for( int r = 0; r < rounds; r++ ) { final long start = System.currentTimeMillis(); for( int i = 0; i < 500; i++ ) { transcoderService.deserialize( data, manager ); } deserializationStats.registerSince( start ); deserializationStats.setSize( size ); } } private static void warmup( final MemcachedBackupSessionManager manager, final SessionAttributesTranscoder transcoder, final int loops, final int countPersons, final int nodesPerEdge ) throws InterruptedException { final TranscoderService transcoderService = new TranscoderService( transcoder ); final MemcachedBackupSession session = createSession( manager, "123456789abcdefghijk987654321", countPersons, nodesPerEdge ); System.out.print("Performing warmup for serialization using "+ transcoder.getClass().getSimpleName() +"..."); final long serWarmupStart = System.currentTimeMillis(); for( int i = 0; i < loops; i++ ) transcoderService.serialize( session ); System.out.println(" (" + (System.currentTimeMillis() - serWarmupStart) + " ms)"); System.out.print("Performing warmup for deserialization..."); final byte[] data = transcoderService.serialize( session ); final long deserWarmupStart = System.currentTimeMillis(); for( int i = 0; i < loops; i++ ) transcoderService.deserialize( data, manager ); System.out.println(" (" + (System.currentTimeMillis() - deserWarmupStart) + " ms)"); } private static MemcachedBackupSession createSession( final MemcachedBackupSessionManager manager, final String id, final int countPersons, final int countNodesPerEdge ) { final MemcachedBackupSession session = manager.createEmptySession(); session.setId( id ); session.setValid( true ); session.setAttribute( "stringbuffer", new StringBuffer( "<string\n&buffer/>" ) ); session.setAttribute( "stringbuilder", new StringBuilder( "<string\n&buffer/>" ) ); session.setAttribute( "persons", createPersons( countPersons ) ); session.setAttribute( "mycontainer", new TestClasses.MyContainer() ); session.setAttribute( "component", createComponents( countNodesPerEdge ) ); return session; } private static Component createComponents( final int countNodesPerEdge ) { final Component root = new Component( "root" ); for ( int i = 0; i < countNodesPerEdge; i++ ) { final Component node = new Component( "child" + i ); addChildren( node, countNodesPerEdge ); root.addChild( node ); } return root; } private static void addChildren( final Component node, final int count ) { for ( int i = 0; i < count; i++ ) { node.addChild( new Component( node.getName() + "-" + i ) ); } } private static Person[] createPersons( final int countPersons ) { final Person[] persons = new Person[countPersons]; for( int i = 0; i < countPersons; i++ ) { final Calendar dateOfBirth = Calendar.getInstance(); dateOfBirth.set( Calendar.YEAR, dateOfBirth.get( Calendar.YEAR ) - 42 ); final Person person = TestClasses.createPerson( "Firstname" + i + " Lastname" + i, i % 2 == 0 ? Gender.FEMALE : Gender.MALE, dateOfBirth, "email" + i + "-1@example.org", "email" + i + "-2@example.org", "email" + i + "-3@example.org" ); person.addAddress( new Address( "route66", "123456", "sincity", "sincountry" ) ); if ( i > 0 ) { person.addFriend( persons[i - 1] ); } persons[i] = person; } return persons; } private static MemcachedBackupSessionManager createManager() { final MemcachedBackupSessionManager manager = new MemcachedBackupSessionManager(); final StandardContext container = new StandardContext(); manager.setContainer( container ); final WebappLoader webappLoader = new WebappLoader() { /** * {@inheritDoc} */ @Override public ClassLoader getClassLoader() { return Thread.currentThread().getContextClassLoader(); } }; manager.getContainer().setLoader( webappLoader ); return manager; } static class Stats { long min; long max; double avg; int size; private boolean _first = true; private final AtomicInteger _count = new AtomicInteger(); /** * A utility method that calculates the difference of the time * between the given <code>startInMillis</code> and {@link System#currentTimeMillis()} * and registers the difference via {@link #register(long)}. * @param startInMillis the time in millis that shall be subtracted from {@link System#currentTimeMillis()}. */ public void registerSince( final long startInMillis ) { register( System.currentTimeMillis() - startInMillis ); } public void setSize( final int size ) { this.size = size; } /** * Register the given value. * @param value the value to register. */ public void register( final long value ) { if ( value < min || _first ) { min = value; } if ( value > max || _first ) { max = value; } avg = ( avg * _count.get() + value ) / _count.incrementAndGet(); _first = false; } /** * Returns a string array with labels and values of count, min, avg and max. * @return a String array. */ public String[] getInfo() { return new String[] { "Count = " + _count.get(), "Min = "+ min, "Avg = "+ avg, "Max = "+ max }; } } }
apache-2.0
astubbs/wicket.get-portals2
wicket-examples/src/main/java/org/apache/wicket/examples/signin2/Home.java
1174
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.examples.signin2; import org.apache.wicket.PageParameters; /** * Simple home page. * * @author Jonathan Locke */ public class Home extends AuthenticatedWebPage { /** * Constructor * * @param parameters * Page parameters (ignored since this is the home page) */ public Home(final PageParameters parameters) { } }
apache-2.0
ButterflyNetwork/bazel
src/main/java/com/google/devtools/build/lib/analysis/configuredtargets/InputFileConfiguredTarget.java
3565
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis.configuredtargets; import com.google.common.base.Preconditions; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.TargetContext; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.packages.InputFile; import com.google.devtools.build.lib.packages.License; import com.google.devtools.build.lib.packages.PackageSpecification.PackageGroupContents; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec.Instantiator; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec.VisibleForSerialization; import com.google.devtools.build.lib.skylarkinterface.SkylarkPrinter; import com.google.devtools.build.lib.skylarkinterface.SkylarkValue; /** * A ConfiguredTarget for an InputFile. * * <p>All InputFiles for the same target are equivalent, so configuration does not play any role * here and is always set to <b>null</b>. */ @AutoCodec public final class InputFileConfiguredTarget extends FileConfiguredTarget implements SkylarkValue { private final NestedSet<TargetLicense> licenses; @Instantiator @VisibleForSerialization InputFileConfiguredTarget( Label label, NestedSet<PackageGroupContents> visibility, Artifact artifact, NestedSet<TargetLicense> licenses) { super(label, null, visibility, artifact); this.licenses = licenses; } public InputFileConfiguredTarget( TargetContext targetContext, InputFile inputFile, Artifact artifact) { this(inputFile.getLabel(), targetContext.getVisibility(), artifact, makeLicenses(inputFile)); Preconditions.checkArgument(getConfigurationKey() == null, getLabel()); Preconditions.checkArgument(targetContext.getTarget() == inputFile, getLabel()); } private static NestedSet<TargetLicense> makeLicenses(InputFile inputFile) { License license = inputFile.getLicense(); return license == License.NO_LICENSE ? NestedSetBuilder.emptySet(Order.LINK_ORDER) : NestedSetBuilder.create( Order.LINK_ORDER, new TargetLicense(inputFile.getLabel(), license)); } @Override public String toString() { return "InputFileConfiguredTarget(" + getLabel() + ")"; } @Override public final NestedSet<TargetLicense> getTransitiveLicenses() { return licenses; } @Override public TargetLicense getOutputLicenses() { return null; } @Override public boolean hasOutputLicenses() { return false; } @Override public void repr(SkylarkPrinter printer) { printer.append("<input file target " + getLabel() + ">"); } }
apache-2.0
ZeroMemes/ClientAPI
src/main/java/clientapi/load/mixin/extension/IMinecraft.java
1304
/* * Copyright 2018 ImpactDevelopment * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package clientapi.load.mixin.extension; import net.minecraft.util.Session; import net.minecraft.util.Timer; /** * @author Brady * @since 2/20/2017 */ public interface IMinecraft { /** * @return Returns the Game's Timer */ Timer getTimer(); /** * Sets the game's session * * @param session The new Session */ void setSession(Session session); /** * Sets the right click delay timer * * @param delay The new right click delay */ void setRightClickDelayTimer(int delay); /** * Clicks a mouse button * * @param button The button to click (LEFT, MIDDLE, RIGHT) */ void clickMouse(int button); }
apache-2.0
Akari10032/12306
app/src/main/java/com/akari/tickets/beans/QueryTrainsResponse.java
848
package com.akari.tickets.beans; import java.util.List; /** * Created by Akari on 2017/2/17. */ public class QueryTrainsResponse { private List<Data> data; public List<Data> getData() { return data; } public void setData(List<Data> data) { this.data = data; } public static class Data { private String secretStr; private QueryLeftNewDTO queryLeftNewDTO; public String getSecretStr() { return secretStr; } public void setSecretStr(String secretStr) { this.secretStr = secretStr; } public QueryLeftNewDTO getQueryLeftNewDTO() { return queryLeftNewDTO; } public void setQueryLeftNewDTO(QueryLeftNewDTO queryLeftNewDTO) { this.queryLeftNewDTO = queryLeftNewDTO; } } }
apache-2.0
ServiceComb/java-chassis
integration-tests/it-consumer/src/main/java/org/apache/servicecomb/it/schema/generic/TestMyService.java
3513
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.it.schema.generic; import java.util.ArrayList; import java.util.List; import org.apache.servicecomb.it.Consumers; import org.junit.Assert; import org.junit.Test; public class TestMyService { private static Consumers<IMyService> myservice = new Consumers<>("MyEndpoint", IMyService.class); private static Consumers<IMyService> myserviceWithInterface = new Consumers<>("MyEndpointWithInterface", IMyService.class); @Test public void testServiceInoke() { PersonBean bean = new PersonBean(); bean.setName("p"); PersonBean resultBean = myservice.getIntf().hello(bean); Assert.assertEquals("p", resultBean.getName()); resultBean = myservice.getIntf().hello(bean, "p"); Assert.assertEquals("p:p", resultBean.getName()); resultBean = myservice.getIntf().actual(); Assert.assertEquals("p", resultBean.getName()); resultBean = myservice.getIntf().objectParam("p"); Assert.assertEquals("p", resultBean.getName()); resultBean = myservice.getIntf().objectParamTwo("p", "p"); Assert.assertEquals("p:p", resultBean.getName()); PersonBean[] beanArray = new PersonBean[] {bean}; PersonBean[] beanArrayResult = myservice.getIntf().helloBody(beanArray); Assert.assertEquals("p", beanArrayResult[0].getName()); List<PersonBean> beanList = new ArrayList<>(); beanList.add(bean); List<PersonBean> beanListResult = myservice.getIntf().helloList(beanList); Assert.assertEquals("p", beanListResult.get(0).getName()); } @Test public void testServiceWithInterfaceInoke() { PersonBean bean = new PersonBean(); bean.setName("p"); PersonBean resultBean = myserviceWithInterface.getIntf().hello(bean); Assert.assertEquals("p", resultBean.getName()); resultBean = myserviceWithInterface.getIntf().hello(bean, "p"); Assert.assertEquals("p:p", resultBean.getName()); resultBean = myserviceWithInterface.getIntf().actual(); Assert.assertEquals("p", resultBean.getName()); resultBean = myserviceWithInterface.getIntf().objectParam("p"); Assert.assertEquals("p", resultBean.getName()); resultBean = myserviceWithInterface.getIntf().objectParamTwo("p", "p"); Assert.assertEquals("p:p", resultBean.getName()); PersonBean[] beanArray = new PersonBean[] {bean}; PersonBean[] beanArrayResult = myserviceWithInterface.getIntf().helloBody(beanArray); Assert.assertEquals("p", beanArrayResult[0].getName()); List<PersonBean> beanList = new ArrayList<>(); beanList.add(bean); List<PersonBean> beanListResult = myserviceWithInterface.getIntf().helloList(beanList); Assert.assertEquals("p", beanListResult.get(0).getName()); } }
apache-2.0
TkmTwoProjects/tkmtwo-sarapi
core/src/main/java/com/tkmtwo/sarapi/convert/DateInfoToValueConverter.java
989
/* * * Copyright 2014 Tom Mahaffey * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.tkmtwo.sarapi.convert; import com.bmc.arsys.api.DateInfo; import com.bmc.arsys.api.Value; import org.springframework.core.convert.converter.Converter; /** * */ public final class DateInfoToValueConverter implements Converter<DateInfo, Value> { @Override public Value convert(DateInfo di) { return (di == null) ? new Value() : new Value(di); } }
apache-2.0
hydrator/wrangler
wrangler-service/src/main/java/io/cdap/wrangler/service/database/DriverCleanup.java
1265
/* * Copyright © 2017-2019 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package io.cdap.wrangler.service.database; import com.google.common.base.Throwables; import io.cdap.cdap.etl.api.Destroyable; import java.sql.DriverManager; import java.sql.SQLException; import javax.annotation.Nullable; /** * Cleans up JDBC drivers. */ public class DriverCleanup implements Destroyable { private final JDBCDriverShim driverShim; DriverCleanup(@Nullable JDBCDriverShim driverShim) { this.driverShim = driverShim; } public void destroy() { if (driverShim != null) { try { DriverManager.deregisterDriver(driverShim); } catch (SQLException e) { throw Throwables.propagate(e); } } } }
apache-2.0
gladyscarrizales/manifoldcf
framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/HopFilterManager.java
8021
/* $Id: HopFilterManager.java 988245 2010-08-23 18:39:35Z kwright $ */ /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.crawler.jobs; import org.apache.manifoldcf.core.interfaces.*; import org.apache.manifoldcf.agents.interfaces.*; import org.apache.manifoldcf.crawler.interfaces.*; import java.util.*; /** This class manages the "hopfilters" table, which contains the hopcount filters for each job. * It's separated from the main jobs table because we will need multiple hop filters per job. * * <br><br> * <b>jobhopfilters</b> * <table border="1" cellpadding="3" cellspacing="0"> * <tr class="TableHeadingColor"> * <th>Field</th><th>Type</th><th>Description&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</th> * <tr><td>ownerid</td><td>BIGINT</td><td>Reference:jobs.id</td></tr> * <tr><td>linktype</td><td>VARCHAR(255)</td><td></td></tr> * <tr><td>maxhops</td><td>BIGINT</td><td></td></tr> * </table> * <br><br> * */ public class HopFilterManager extends org.apache.manifoldcf.core.database.BaseTable { public static final String _rcsid = "@(#)$Id: HopFilterManager.java 988245 2010-08-23 18:39:35Z kwright $"; // Schema public final static String ownerIDField = "ownerid"; public final static String linkTypeField = "linktype"; public final static String maxHopsField = "maxhops"; /** Constructor. *@param threadContext is the thread context. *@param database is the database instance. */ public HopFilterManager(IThreadContext threadContext, IDBInterface database) throws ManifoldCFException { super(database,"jobhopfilters"); } /** Install or upgrade. *@param ownerTable is the name of the table that owns this one. *@param owningTablePrimaryKey is the primary key of the owning table. */ public void install(String ownerTable, String owningTablePrimaryKey) throws ManifoldCFException { // Standard practice: outer loop while (true) { Map existing = getTableSchema(null,null); if (existing == null) { HashMap map = new HashMap(); map.put(ownerIDField,new ColumnDescription("BIGINT",false,false,ownerTable,owningTablePrimaryKey,false)); // Null link types are NOT allowed here. The restrictions can only be made on a real link type. map.put(linkTypeField,new ColumnDescription("VARCHAR(255)",false,false,null,null,false)); map.put(maxHopsField,new ColumnDescription("BIGINT",false,false,null,null,false)); performCreate(map,null); } else { // Upgrade code goes here, as needed } // Index management IndexDescription ownerIndex = new IndexDescription(true,new String[]{ownerIDField,linkTypeField}); // Get rid of indexes that shouldn't be there Map indexes = getTableIndexes(null,null); Iterator iter = indexes.keySet().iterator(); while (iter.hasNext()) { String indexName = (String)iter.next(); IndexDescription id = (IndexDescription)indexes.get(indexName); if (ownerIndex != null && id.equals(ownerIndex)) ownerIndex = null; else if (indexName.indexOf("_pkey") == -1) // This index shouldn't be here; drop it performRemoveIndex(indexName); } // Add the ones we didn't find if (ownerIndex != null) performAddIndex(null,ownerIndex); break; } } /** Uninstall. */ public void deinstall() throws ManifoldCFException { performDrop(null); } /** Read rows for a given owner id. *@param id is the owner id. *@return a map of link type to max hop count (as a Long). */ public Map readRows(Long id) throws ManifoldCFException { ArrayList list = new ArrayList(); list.add(id); IResultSet set = performQuery("SELECT "+linkTypeField+","+maxHopsField+" FROM "+getTableName()+" WHERE "+ownerIDField+"=?",list, null,null); Map rval = new HashMap(); if (set.getRowCount() == 0) return rval; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); String linkType = (String)row.getValue(linkTypeField); Long max = (Long)row.getValue(maxHopsField); rval.put(linkType,max); i++; } return rval; } /** Fill in a set of filters corresponding to a set of owner id's. *@param returnValues is a map keyed by ownerID, with value of JobDescription. *@param ownerIDList is the list of owner id's. *@param ownerIDParams is the corresponding set of owner id parameters. */ public void getRows(Map<Long,JobDescription> returnValues, String ownerIDList, ArrayList ownerIDParams) throws ManifoldCFException { IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerIDField+" IN ("+ownerIDList+")",ownerIDParams, null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long ownerID = (Long)row.getValue(ownerIDField); String linkType = (String)row.getValue(linkTypeField); Long maxHops = (Long)row.getValue(maxHopsField); returnValues.get(ownerID).addHopCountFilter(linkType,maxHops); i++; } } /** Compare a filter list against what's in a job description. *@param ownerID is the owning identifier. *@param list is the job description to write hopcount filters for. */ public boolean compareRows(Long ownerID, IJobDescription list) throws ManifoldCFException { // Compare hopcount filter criteria. Map filterRows = readRows(ownerID); Map newFilterRows = list.getHopCountFilters(); if (filterRows.size() != newFilterRows.size()) return false; for (String linkType : (Collection<String>)filterRows.keySet()) { Long oldCount = (Long)filterRows.get(linkType); Long newCount = (Long)newFilterRows.get(linkType); if (oldCount == null || newCount == null) return false; if (oldCount.longValue() != newCount.longValue()) return false; } return true; } /** Write a filter list into the database. *@param ownerID is the owning identifier. *@param list is the job description to write hopcount filters for. */ public void writeRows(Long ownerID, IJobDescription list) throws ManifoldCFException { beginTransaction(); try { int i = 0; HashMap map = new HashMap(); Map filters = list.getHopCountFilters(); Iterator iter = filters.keySet().iterator(); while (iter.hasNext()) { String linkType = (String)iter.next(); Long maxHops = (Long)filters.get(linkType); map.clear(); map.put(linkTypeField,linkType); map.put(maxHopsField,maxHops); map.put(ownerIDField,ownerID); performInsert(map,null); } } catch (ManifoldCFException e) { signalRollback(); throw e; } catch (Error e) { signalRollback(); throw e; } finally { endTransaction(); } } /** Delete rows. *@param ownerID is the owner whose rows to delete. */ public void deleteRows(Long ownerID) throws ManifoldCFException { ArrayList list = new ArrayList(); list.add(ownerID); performDelete("WHERE "+ownerIDField+"=?",list,null); } }
apache-2.0
apache/logging-log4j2
log4j-1.2-api/src/test/java/org/apache/log4j/util/LineNumberFilter.java
1310
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.log4j.util; import org.apache.oro.text.perl.Perl5Util; public class LineNumberFilter implements Filter { Perl5Util util = new Perl5Util(); @Override public String filter(final String in) { if (util.match("/\\(.*:\\d{1,4}\\)/", in)) { return util.substitute("s/:\\d{1,4}\\)/:XXX)/", in); } if (in.indexOf(", Compiled Code") >= 0) { return util.substitute("s/, Compiled Code/:XXX/", in); } return in; } }
apache-2.0
eug48/hapi-fhir
hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Since.java
1368
package ca.uhn.fhir.rest.annotation; /* * #%L * HAPI FHIR - Core Library * %% * Copyright (C) 2014 - 2017 University Health Network * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import ca.uhn.fhir.rest.param.DateParam; import ca.uhn.fhir.rest.param.DateRangeParam; /** * Parameter annotation for the _since parameter, which indicates to the * server that only results dated since the given instant will be returned. * <p> * Parameters with this annotation should be of type {@link DateParam} or {@link DateRangeParam} * </p> * * @see History */ @Target(value=ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public @interface Since { //nothing }
apache-2.0
EvilMcJerkface/crate
server/src/main/java/org/elasticsearch/common/unit/ByteSizeUnit.java
6846
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.unit; /** * A {@code SizeUnit} represents size at a given unit of * granularity and provides utility methods to convert across units. * A {@code SizeUnit} does not maintain size information, but only * helps organize and use size representations that may be maintained * separately across various contexts. */ public enum ByteSizeUnit { BYTES { @Override public long toBytes(long size) { return size; } @Override public long toKB(long size) { return size / (C1 / C0); } @Override public long toMB(long size) { return size / (C2 / C0); } @Override public long toGB(long size) { return size / (C3 / C0); } @Override public long toTB(long size) { return size / (C4 / C0); } @Override public long toPB(long size) { return size / (C5 / C0); } @Override public String getSuffix() { return "b"; } }, KB { @Override public long toBytes(long size) { return x(size, C1 / C0, MAX / (C1 / C0)); } @Override public long toKB(long size) { return size; } @Override public long toMB(long size) { return size / (C2 / C1); } @Override public long toGB(long size) { return size / (C3 / C1); } @Override public long toTB(long size) { return size / (C4 / C1); } @Override public long toPB(long size) { return size / (C5 / C1); } @Override public String getSuffix() { return "kb"; } }, MB { @Override public long toBytes(long size) { return x(size, C2 / C0, MAX / (C2 / C0)); } @Override public long toKB(long size) { return x(size, C2 / C1, MAX / (C2 / C1)); } @Override public long toMB(long size) { return size; } @Override public long toGB(long size) { return size / (C3 / C2); } @Override public long toTB(long size) { return size / (C4 / C2); } @Override public long toPB(long size) { return size / (C5 / C2); } @Override public String getSuffix() { return "mb"; } }, GB { @Override public long toBytes(long size) { return x(size, C3 / C0, MAX / (C3 / C0)); } @Override public long toKB(long size) { return x(size, C3 / C1, MAX / (C3 / C1)); } @Override public long toMB(long size) { return x(size, C3 / C2, MAX / (C3 / C2)); } @Override public long toGB(long size) { return size; } @Override public long toTB(long size) { return size / (C4 / C3); } @Override public long toPB(long size) { return size / (C5 / C3); } @Override public String getSuffix() { return "gb"; } }, TB { @Override public long toBytes(long size) { return x(size, C4 / C0, MAX / (C4 / C0)); } @Override public long toKB(long size) { return x(size, C4 / C1, MAX / (C4 / C1)); } @Override public long toMB(long size) { return x(size, C4 / C2, MAX / (C4 / C2)); } @Override public long toGB(long size) { return x(size, C4 / C3, MAX / (C4 / C3)); } @Override public long toTB(long size) { return size; } @Override public long toPB(long size) { return size / (C5 / C4); } @Override public String getSuffix() { return "tb"; } }, PB { @Override public long toBytes(long size) { return x(size, C5 / C0, MAX / (C5 / C0)); } @Override public long toKB(long size) { return x(size, C5 / C1, MAX / (C5 / C1)); } @Override public long toMB(long size) { return x(size, C5 / C2, MAX / (C5 / C2)); } @Override public long toGB(long size) { return x(size, C5 / C3, MAX / (C5 / C3)); } @Override public long toTB(long size) { return x(size, C5 / C4, MAX / (C5 / C4)); } @Override public long toPB(long size) { return size; } @Override public String getSuffix() { return "pb"; } }; static final long C0 = 1L; static final long C1 = C0 * 1024L; static final long C2 = C1 * 1024L; static final long C3 = C2 * 1024L; static final long C4 = C3 * 1024L; static final long C5 = C4 * 1024L; static final long MAX = Long.MAX_VALUE; public static ByteSizeUnit fromId(int id) { if (id < 0 || id >= values().length) { throw new IllegalArgumentException("No byte size unit found for id [" + id + "]"); } return values()[id]; } /** * Scale d by m, checking for overflow. * This has a short name to make above code more readable. */ static long x(long d, long m, long over) { if (d > over) return Long.MAX_VALUE; if (d < -over) return Long.MIN_VALUE; return d * m; } public abstract long toBytes(long size); public abstract long toKB(long size); public abstract long toMB(long size); public abstract long toGB(long size); public abstract long toTB(long size); public abstract long toPB(long size); public abstract String getSuffix(); }
apache-2.0
onders86/camel
components/camel-spring/src/test/java/org/apache/camel/component/bean/AutowireConstructorRouteTest.java
1776
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.bean; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.spring.SpringTestSupport; import org.junit.Test; import org.springframework.context.support.AbstractXmlApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; /** * @version */ public class AutowireConstructorRouteTest extends SpringTestSupport { @Test public void testAutowireConstructor() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello Camel at 1", "Hello World at 2"); template.sendBody("direct:start", "Camel"); template.sendBody("direct:start", "World"); assertMockEndpointsSatisfied(); } protected AbstractXmlApplicationContext createApplicationContext() { return new ClassPathXmlApplicationContext("org/apache/camel/component/bean/AutowireConstructorRouteTest.xml"); } }
apache-2.0
dmeister/pig-cll-gz
src/org/apache/pig/newplan/logical/rules/ColumnPruneVisitor.java
20333
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.newplan.logical.rules; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pig.LoadFunc; import org.apache.pig.LoadPushDown; import org.apache.pig.LoadPushDown.RequiredField; import org.apache.pig.LoadPushDown.RequiredFieldList; import org.apache.pig.data.DataType; import org.apache.pig.impl.logicalLayer.FrontendException; import org.apache.pig.impl.util.Pair; import org.apache.pig.newplan.Operator; import org.apache.pig.newplan.OperatorPlan; import org.apache.pig.newplan.ReverseDependencyOrderWalker; import org.apache.pig.newplan.logical.Util; import org.apache.pig.newplan.logical.expression.LogicalExpressionPlan; import org.apache.pig.newplan.logical.expression.ProjectExpression; import org.apache.pig.newplan.logical.relational.LOCogroup; import org.apache.pig.newplan.logical.relational.LOCross; import org.apache.pig.newplan.logical.relational.LOFilter; import org.apache.pig.newplan.logical.relational.LOForEach; import org.apache.pig.newplan.logical.relational.LOGenerate; import org.apache.pig.newplan.logical.relational.LOInnerLoad; import org.apache.pig.newplan.logical.relational.LOJoin; import org.apache.pig.newplan.logical.relational.LOLoad; import org.apache.pig.newplan.logical.relational.LOSort; import org.apache.pig.newplan.logical.relational.LOSplit; import org.apache.pig.newplan.logical.relational.LOSplitOutput; import org.apache.pig.newplan.logical.relational.LOStore; import org.apache.pig.newplan.logical.relational.LOUnion; import org.apache.pig.newplan.logical.relational.LogicalPlan; import org.apache.pig.newplan.logical.relational.LogicalRelationalNodesVisitor; import org.apache.pig.newplan.logical.relational.LogicalRelationalOperator; import org.apache.pig.newplan.logical.relational.LogicalSchema; public class ColumnPruneVisitor extends LogicalRelationalNodesVisitor { protected static final Log log = LogFactory.getLog(ColumnPruneVisitor.class); private Map<LOLoad,Pair<Map<Integer,Set<String>>,Set<Integer>>> requiredItems = new HashMap<LOLoad,Pair<Map<Integer,Set<String>>,Set<Integer>>>(); private boolean columnPrune; public ColumnPruneVisitor(OperatorPlan plan, Map<LOLoad,Pair<Map<Integer,Set<String>>,Set<Integer>>> requiredItems, boolean columnPrune) throws FrontendException { super(plan, new ReverseDependencyOrderWalker(plan)); this.columnPrune = columnPrune; this.requiredItems = requiredItems; } public void addRequiredItems(LOLoad load, Pair<Map<Integer,Set<String>>,Set<Integer>> requiredItem) { requiredItems.put(load, requiredItem); } @Override public void visit(LOLoad load) throws FrontendException { if(! requiredItems.containsKey( load ) ) { return; } Pair<Map<Integer,Set<String>>,Set<Integer>> required = requiredItems.get(load); RequiredFieldList requiredFields = new RequiredFieldList(); LogicalSchema s = load.getSchema(); for (int i=0;i<s.size();i++) { RequiredField requiredField = null; // As we have done processing ahead, we assume that // a column is not present in both ColumnPruner and // MapPruner if( required.first != null && required.first.containsKey(i) ) { requiredField = new RequiredField(); requiredField.setIndex(i); requiredField.setAlias(s.getField(i).alias); requiredField.setType(s.getField(i).type); List<RequiredField> subFields = new ArrayList<RequiredField>(); for( String key : required.first.get(i) ) { RequiredField subField = new RequiredField(key,-1,null,DataType.BYTEARRAY); subFields.add(subField); } requiredField.setSubFields(subFields); requiredFields.add(requiredField); } if( required.second != null && required.second.contains(i) ) { requiredField = new RequiredField(); requiredField.setIndex(i); requiredField.setAlias(s.getField(i).alias); requiredField.setType(s.getField(i).type); requiredFields.add(requiredField); } } boolean[] columnRequired = new boolean[s.size()]; for (RequiredField rf : requiredFields.getFields()) columnRequired[rf.getIndex()] = true; List<Pair<Integer, Integer>> pruneList = new ArrayList<Pair<Integer, Integer>>(); for (int i=0;i<columnRequired.length;i++) { if (!columnRequired[i]) pruneList.add(new Pair<Integer, Integer>(0, i)); } StringBuffer message = new StringBuffer(); if (pruneList.size()!=0) { message.append("Columns pruned for " + load.getAlias() + ": "); for (int i=0;i<pruneList.size();i++) { message.append("$"+pruneList.get(i).second); if (i!=pruneList.size()-1) message.append(", "); } log.info(message); } message = new StringBuffer(); for(RequiredField rf: requiredFields.getFields()) { List<RequiredField> sub = rf.getSubFields(); if (sub != null) { message.append("Map key required for " + load.getAlias() + ": $" + rf.getIndex() + "->" + sub + "\n"); } } if (message.length()!=0) log.info(message); LoadPushDown.RequiredFieldResponse response = null; try { LoadFunc loadFunc = load.getLoadFunc(); if (loadFunc instanceof LoadPushDown) { response = ((LoadPushDown)loadFunc).pushProjection(requiredFields); } } catch (FrontendException e) { log.warn("pushProjection on "+load+" throw an exception, skip it"); } // Loader does not support column pruning, insert foreach if (columnPrune) { if (response==null || !response.getRequiredFieldResponse()) { LogicalPlan p = (LogicalPlan)load.getPlan(); Operator next = p.getSuccessors(load).get(0); // if there is already a LOForEach after load, we don't need to // add another LOForEach if (next instanceof LOForEach) { return; } LOForEach foreach = new LOForEach(load.getPlan()); // add foreach to the base plan p.add(foreach); p.insertBetween(load, foreach, next); LogicalPlan innerPlan = new LogicalPlan(); foreach.setInnerPlan(innerPlan); // build foreach inner plan List<LogicalExpressionPlan> exps = new ArrayList<LogicalExpressionPlan>(); LOGenerate gen = new LOGenerate(innerPlan, exps, new boolean[requiredFields.getFields().size()]); innerPlan.add(gen); for (int i=0; i<requiredFields.getFields().size(); i++) { LoadPushDown.RequiredField rf = requiredFields.getFields().get(i); LOInnerLoad innerLoad = new LOInnerLoad(innerPlan, foreach, rf.getIndex()); innerPlan.add(innerLoad); innerPlan.connect(innerLoad, gen); LogicalExpressionPlan exp = new LogicalExpressionPlan(); ProjectExpression prj = new ProjectExpression(exp, i, -1, gen); exp.add(prj); exps.add(exp); } } else { // columns are pruned, reset schema for LOLoader List<Integer> requiredIndexes = new ArrayList<Integer>(); List<LoadPushDown.RequiredField> fieldList = requiredFields.getFields(); for (int i=0; i<fieldList.size(); i++) { requiredIndexes.add(fieldList.get(i).getIndex()); } load.setRequiredFields(requiredIndexes); LogicalSchema newSchema = new LogicalSchema(); for (int i=0; i<fieldList.size(); i++) { newSchema.addField(s.getField(fieldList.get(i).getIndex())); } load.setSchema(newSchema); } } } @Override public void visit(LOFilter filter) throws FrontendException { } @Override public void visit(LOSplitOutput splitOutput) throws FrontendException { } @SuppressWarnings("unchecked") @Override public void visit(LOSplit split) throws FrontendException { List<Operator> branchOutputs = split.getPlan().getSuccessors(split); for (int i=0;i<branchOutputs.size();i++) { Operator branchOutput = branchOutputs.get(i); Set<Long> branchOutputUids = (Set<Long>)branchOutput.getAnnotation(ColumnPruneHelper.INPUTUIDS); if (branchOutputUids!=null) { Set<Integer> columnsToDrop = new HashSet<Integer>(); for (int j=0;j<split.getSchema().size();j++) { if (!branchOutputUids.contains(split.getSchema().getField(j).uid)) columnsToDrop.add(j); } if (!columnsToDrop.isEmpty()) { LOForEach foreach = Util.addForEachAfter((LogicalPlan)split.getPlan(), split, i, columnsToDrop); foreach.getSchema(); } } } } @Override public void visit(LOSort sort) throws FrontendException { } @Override public void visit(LOStore store) throws FrontendException { } @Override public void visit( LOCogroup cg ) throws FrontendException { addForEachIfNecessary(cg); } @Override public void visit(LOJoin join) throws FrontendException { } @Override public void visit(LOCross cross) throws FrontendException { } @Override @SuppressWarnings("unchecked") public void visit(LOForEach foreach) throws FrontendException { if (!columnPrune) { return; } // get column numbers from input uids Set<Long> inputUids = (Set<Long>)foreach.getAnnotation(ColumnPruneHelper.INPUTUIDS); // Get all top level projects LogicalPlan innerPlan = foreach.getInnerPlan(); List<LOInnerLoad> innerLoads= new ArrayList<LOInnerLoad>(); List<Operator> sources = innerPlan.getSources(); for (Operator s : sources) { if (s instanceof LOInnerLoad) innerLoads.add((LOInnerLoad)s); } // If project of the innerLoad is not in INPUTUIDS, remove this innerLoad Set<LOInnerLoad> innerLoadsToRemove = new HashSet<LOInnerLoad>(); for (LOInnerLoad innerLoad: innerLoads) { ProjectExpression project = innerLoad.getProjection(); if (project.isProjectStar()) { LogicalSchema.LogicalFieldSchema tupleFS = project.getFieldSchema(); // Check the first component of the star projection long uid = tupleFS.schema.getField(0).uid; if (!inputUids.contains(uid)) innerLoadsToRemove.add(innerLoad); } else { if (!inputUids.contains(project.getFieldSchema().uid)) innerLoadsToRemove.add(innerLoad); } } // Find the logical operator immediate precede LOGenerate which should be removed (the whole branch) Set<LogicalRelationalOperator> branchHeadToRemove = new HashSet<LogicalRelationalOperator>(); for (LOInnerLoad innerLoad : innerLoadsToRemove) { Operator op = innerLoad; while (!(innerPlan.getSuccessors(op).get(0) instanceof LOGenerate)) { op = innerPlan.getSuccessors(op).get(0); } branchHeadToRemove.add((LogicalRelationalOperator)op); } // Find the expression plan to remove LOGenerate gen = (LOGenerate)innerPlan.getSinks().get(0); List<LogicalExpressionPlan> genPlansToRemove = new ArrayList<LogicalExpressionPlan>(); List<LogicalExpressionPlan> genPlans = gen.getOutputPlans(); for (int i=0;i<genPlans.size();i++) { LogicalExpressionPlan expPlan = genPlans.get(i); List<Operator> expSources = expPlan.getSinks(); for (Operator expSrc : expSources) { if (expSrc instanceof ProjectExpression) { LogicalRelationalOperator reference = ((ProjectExpression)expSrc).findReferent(); if (branchHeadToRemove.contains(reference)) { genPlansToRemove.add(expPlan); } } } } // Build the temporary structure based on genPlansToRemove, which include: // * flattenList // * outputPlanSchemas // * uidOnlySchemas // * inputsRemoved // We first construct inputsNeeded, and inputsRemoved = (all inputs) - inputsNeeded. // We cannot figure out inputsRemoved directly since the inputs may be used by other output plan. // We can only get inputsRemoved after visiting all output plans. List<Boolean> flattenList = new ArrayList<Boolean>(); Set<Integer> inputsNeeded = new HashSet<Integer>(); Set<Integer> inputsRemoved = new HashSet<Integer>(); List<LogicalSchema> outputPlanSchemas = new ArrayList<LogicalSchema>(); List<LogicalSchema> uidOnlySchemas = new ArrayList<LogicalSchema>(); List<LogicalSchema> userDefinedSchemas = null; if (gen.getUserDefinedSchema()!=null) userDefinedSchemas = new ArrayList<LogicalSchema>(); for (int i=0;i<genPlans.size();i++) { LogicalExpressionPlan genPlan = genPlans.get(i); if (!genPlansToRemove.contains(genPlan)) { flattenList.add(gen.getFlattenFlags()[i]); outputPlanSchemas.add(gen.getOutputPlanSchemas().get(i)); uidOnlySchemas.add(gen.getUidOnlySchemas().get(i)); if (gen.getUserDefinedSchema()!=null) { userDefinedSchemas.add(gen.getUserDefinedSchema().get(i)); } List<Operator> sinks = genPlan.getSinks(); for(Operator s: sinks) { if (s instanceof ProjectExpression) { inputsNeeded.add(((ProjectExpression)s).getInputNum()); } } } } List<Operator> preds = innerPlan.getPredecessors(gen); if (preds!=null) { // otherwise, all gen plan are based on constant, no need to adjust for (int i=0;i<preds.size();i++) { if (!inputsNeeded.contains(i)) inputsRemoved.add(i); } } // Change LOGenerate: remove unneeded output expression plan // change flatten flag, outputPlanSchema, uidOnlySchemas boolean[] flatten = new boolean[flattenList.size()]; for (int i=0;i<flattenList.size();i++) flatten[i] = flattenList.get(i); gen.setFlattenFlags(flatten); gen.setOutputPlanSchemas(outputPlanSchemas); gen.setUidOnlySchemas(uidOnlySchemas); gen.setUserDefinedSchema(userDefinedSchemas); for (LogicalExpressionPlan genPlanToRemove : genPlansToRemove) { genPlans.remove(genPlanToRemove); } // shift project input if (!inputsRemoved.isEmpty()) { for (LogicalExpressionPlan genPlan : genPlans) { List<Operator> sinks = genPlan.getSinks(); for(Operator s: sinks) { if (s instanceof ProjectExpression) { int input = ((ProjectExpression)s).getInputNum(); int numToShift = 0; for (int i :inputsRemoved) { if (i<input) numToShift++; } ((ProjectExpression)s).setInputNum(input-numToShift); } } } } // Prune unneeded LOInnerLoad List<LogicalRelationalOperator> predToRemove = new ArrayList<LogicalRelationalOperator>(); for (int i : inputsRemoved) { predToRemove.add((LogicalRelationalOperator)preds.get(i)); } for (LogicalRelationalOperator pred : predToRemove) { removeSubTree(pred); } } @Override public void visit(LOUnion union) throws FrontendException { // AddForEach before union if necessary. List<Operator> preds = new ArrayList<Operator>(); preds.addAll(plan.getPredecessors(union)); for (Operator pred : preds) { addForEachIfNecessary((LogicalRelationalOperator)pred); } } // remove all the operators starting from an operator private void removeSubTree(LogicalRelationalOperator op) throws FrontendException { LogicalPlan p = (LogicalPlan)op.getPlan(); List<Operator> ll = p.getPredecessors(op); if (ll != null) { for(Operator pred: ll) { removeSubTree((LogicalRelationalOperator)pred); } } if (p.getSuccessors(op) != null) { Operator[] succs = p.getSuccessors(op).toArray(new Operator[0]); for(Operator s: succs) { p.disconnect(op, s); } } p.remove(op); } // Add ForEach after op to prune unnecessary columns @SuppressWarnings("unchecked") private void addForEachIfNecessary(LogicalRelationalOperator op) throws FrontendException { Set<Long> outputUids = (Set<Long>)op.getAnnotation(ColumnPruneHelper.OUTPUTUIDS); if (outputUids!=null) { LogicalSchema schema = op.getSchema(); Set<Integer> columnsToDrop = new HashSet<Integer>(); for (int i=0;i<schema.size();i++) { if (!outputUids.contains(schema.getField(i).uid)) columnsToDrop.add(i); } if (!columnsToDrop.isEmpty()) { LOForEach foreach = Util.addForEachAfter((LogicalPlan)op.getPlan(), op, 0, columnsToDrop); foreach.getSchema(); } } } }
apache-2.0
parstream/parstream-avro
decoder/src/test/java/com/parstream/adaptor/avro/test/ArrayTest.java
17043
/** * Copyright 2015 ParStream GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.parstream.adaptor.avro.test; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import java.io.File; import java.util.ArrayList; import java.util.List; import org.apache.avro.Schema; import org.apache.avro.Schema.Parser; import org.apache.avro.generic.GenericArray; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericRecord; import org.apache.avro.util.Utf8; import org.junit.Test; import com.parstream.adaptor.avro.AvroAdaptor; import com.parstream.driver.ColumnInfo; public class ArrayTest { @Test public void testArrayOfUtf8() throws Exception { Schema recordTypeSchema = new Parser().parse(new File("target/test-classes/array/StringUtf8/record.avsc")); // prepare ArrayList with elements to test List<Utf8> arrayElements = new ArrayList<Utf8>(1); arrayElements.add(new Utf8("test string")); // construct the GenericArray from the above ArrayList Schema recordMemberSchema = recordTypeSchema.getField("recordMember").schema(); GenericArray<Utf8> avroArray = new GenericData.Array<Utf8>(recordMemberSchema, arrayElements); // put the GenericArray in a GenericRecord GenericRecord recordType = new GenericData.Record(recordTypeSchema); recordType.put(0, avroArray); ColumnInfo[] colInfo = new ColumnInfo[1]; colInfo[0] = AdaptorTestUtils.constructColumnInfo("id", AdaptorTestUtils.Type.VARSTRING, 0, 0); AvroAdaptor decoder = new AvroAdaptor(new File("target/test-classes/array/StringUtf8/avro.ini"), colInfo); List<Object[]> res = decoder.convertRecord(recordType); assertEquals("resulting list size", 1, res.size()); assertArrayEquals("resulting item", new Object[] { "test string" }, res.get(0)); } @Test public void testArrayOfString() throws Exception { Schema recordTypeSchema = new Parser().parse(new File("target/test-classes/array/StringUtf8/record.avsc")); // prepare ArrayList with elements to test List<String> arrayElements = new ArrayList<String>(1); arrayElements.add("test string"); // construct the GenericArray from the above ArrayList Schema recordMemberSchema = recordTypeSchema.getField("recordMember").schema(); GenericArray<String> avroArray = new GenericData.Array<String>(recordMemberSchema, arrayElements); // put the GenericArray in a GenericRecord GenericRecord recordType = new GenericData.Record(recordTypeSchema); recordType.put(0, avroArray); ColumnInfo[] colInfo = new ColumnInfo[1]; colInfo[0] = AdaptorTestUtils.constructColumnInfo("id", AdaptorTestUtils.Type.VARSTRING, 0, 0); AvroAdaptor decoder = new AvroAdaptor(new File("target/test-classes/array/StringUtf8/avro.ini"), colInfo); List<Object[]> res = decoder.convertRecord(recordType); assertEquals("resulting list size", 1, res.size()); assertArrayEquals("resulting item", new Object[] { "test string" }, res.get(0)); } @Test public void testArrayOfInteger() throws Exception { Schema recordTypeSchema = new Parser().parse(new File("target/test-classes/array/Integer/record.avsc")); int intVal = 12; // prepare ArrayList with elements to test List<Integer> arrayElements = new ArrayList<Integer>(1); arrayElements.add(intVal); // construct the GenericArray from the above ArrayList Schema recordMemberSchema = recordTypeSchema.getField("recordMember").schema(); GenericArray<Integer> avroArray = new GenericData.Array<Integer>(recordMemberSchema, arrayElements); // put the GenericArray in a GenericRecord GenericRecord recordType = new GenericData.Record(recordTypeSchema); recordType.put(0, avroArray); ColumnInfo[] colInfo = new ColumnInfo[1]; colInfo[0] = AdaptorTestUtils.constructColumnInfo("id", AdaptorTestUtils.Type.UINT32, 0, 0); AvroAdaptor decoder = new AvroAdaptor(new File("target/test-classes/array/Integer/avro.ini"), colInfo); List<Object[]> res = decoder.convertRecord(recordType); assertEquals("resulting list size", 1, res.size()); assertArrayEquals("resulting item", new Object[] { intVal }, res.get(0)); } @Test public void testArrayOfFloat() throws Exception { Schema recordTypeSchema = new Parser().parse(new File("target/test-classes/array/Float/record.avsc")); float floatVal = 12.34f; // prepare ArrayList with elements to test List<Float> arrayElements = new ArrayList<Float>(1); arrayElements.add(floatVal); // construct the GenericArray from the above ArrayList Schema recordMemberSchema = recordTypeSchema.getField("recordMember").schema(); GenericArray<Float> avroArray = new GenericData.Array<Float>(recordMemberSchema, arrayElements); // put the GenericArray in a GenericRecord GenericRecord recordType = new GenericData.Record(recordTypeSchema); recordType.put(0, avroArray); ColumnInfo[] colInfo = new ColumnInfo[1]; colInfo[0] = AdaptorTestUtils.constructColumnInfo("id", AdaptorTestUtils.Type.FLOAT, 0, 0); AvroAdaptor decoder = new AvroAdaptor(new File("target/test-classes/array/Float/avro.ini"), colInfo); List<Object[]> res = decoder.convertRecord(recordType); assertEquals("resulting list size", 1, res.size()); assertArrayEquals("resulting item", new Object[] { floatVal }, res.get(0)); } @Test public void testArrayOfDouble() throws Exception { Schema recordTypeSchema = new Parser().parse(new File("target/test-classes/array/Double/record.avsc")); double doubleVal = 12.34d; // prepare ArrayList with elements to test List<Double> arrayElements = new ArrayList<Double>(1); arrayElements.add(doubleVal); // construct the GenericArray from the above ArrayList Schema recordMemberSchema = recordTypeSchema.getField("recordMember").schema(); GenericArray<Double> avroArray = new GenericData.Array<Double>(recordMemberSchema, arrayElements); // put the GenericArray in a GenericRecord GenericRecord recordType = new GenericData.Record(recordTypeSchema); recordType.put(0, avroArray); ColumnInfo[] colInfo = new ColumnInfo[1]; colInfo[0] = AdaptorTestUtils.constructColumnInfo("id", AdaptorTestUtils.Type.DOUBLE, 0, 0); AvroAdaptor decoder = new AvroAdaptor(new File("target/test-classes/array/Double/avro.ini"), colInfo); List<Object[]> res = decoder.convertRecord(recordType); assertEquals("resulting list size", 1, res.size()); assertArrayEquals("resulting item", new Object[] { doubleVal }, res.get(0)); } @Test public void testArrayOfLong() throws Exception { Schema recordTypeSchema = new Parser().parse(new File("target/test-classes/array/Long/record.avsc")); long longVal = 12l; // prepare ArrayList with elements to test List<Long> arrayElements = new ArrayList<Long>(1); arrayElements.add(longVal); // construct the GenericArray from the above ArrayList Schema recordMemberSchema = recordTypeSchema.getField("recordMember").schema(); GenericArray<Long> avroArray = new GenericData.Array<Long>(recordMemberSchema, arrayElements); // put the GenericArray in a GenericRecord GenericRecord recordType = new GenericData.Record(recordTypeSchema); recordType.put(0, avroArray); ColumnInfo[] colInfo = new ColumnInfo[1]; colInfo[0] = AdaptorTestUtils.constructColumnInfo("id", AdaptorTestUtils.Type.INT32, 0, 0); AvroAdaptor decoder = new AvroAdaptor(new File("target/test-classes/array/Long/avro.ini"), colInfo); List<Object[]> res = decoder.convertRecord(recordType); assertEquals("resulting list size", 1, res.size()); assertArrayEquals("resulting item", new Object[] { longVal }, res.get(0)); } @Test public void testArrayOfIntRecord() throws Exception { Schema schema = new Parser().parse(new File("target/test-classes/array/IntRecord/record.avsc")); Schema arraySchema = schema.getField("recordMember").schema(); Schema arrayMemberSchema = arraySchema.getElementType(); int intVal = 16; // construct int record GenericRecord subArrayRecord = new GenericData.Record(arrayMemberSchema); subArrayRecord.put(0, intVal); // construct array of "int record" List<GenericRecord> recordMemberArray = new ArrayList<GenericRecord>(1); recordMemberArray.add(subArrayRecord); GenericArray<GenericRecord> subArrayElementsGenericArray = new GenericData.Array<GenericRecord>(arraySchema, recordMemberArray); // construct a record with the above array GenericRecord rootRecord = new GenericData.Record(schema); rootRecord.put(0, subArrayElementsGenericArray); ColumnInfo[] colInfo = new ColumnInfo[1]; colInfo[0] = AdaptorTestUtils.constructColumnInfo("id", AdaptorTestUtils.Type.UINT32, 0, 0); AvroAdaptor decoder = new AvroAdaptor(new File("target/test-classes/array/IntRecord/avro.ini"), colInfo); List<Object[]> res = decoder.convertRecord(rootRecord); assertEquals("resulting list size", 1, res.size()); assertArrayEquals("resulting item", new Object[] { intVal }, res.get(0)); } @Test public void testMultipleArrayOfStringsBothUsedInConfig() throws Exception { /* * define a record with 2 separate arrays * * fill both arrays with data * * have both entries of the arrays in the config file */ Schema schema = new Parser().parse(new File("target/test-classes/array/MultipleArrayOfStrings/schema.avsc")); Schema arrayRecordSchema1 = schema.getField("arrayRecord1").schema(); Schema arrayElementSchema1 = arrayRecordSchema1.getField("arrayElement1").schema(); Schema arrayRecordSchema2 = schema.getField("arrayRecord2").schema(); Schema arrayElementSchema2 = arrayRecordSchema2.getField("arrayElement2").schema(); // /////// List<String> arrayElements1 = new ArrayList<String>(1); arrayElements1.add("test string 1"); GenericRecord arrayRecord1 = new GenericData.Record(arrayRecordSchema1); GenericArray<String> avroArray1 = new GenericData.Array<String>(arrayElementSchema1, arrayElements1); arrayRecord1.put(0, avroArray1); GenericRecord firstRecord = new GenericData.Record(schema); firstRecord.put(0, arrayRecord1); // /////// List<String> arrayElements2 = new ArrayList<String>(1); arrayElements2.add("test string 2"); GenericRecord arrayRecord2 = new GenericData.Record(arrayRecordSchema2); GenericArray<String> avroArray2 = new GenericData.Array<String>(arrayElementSchema2, arrayElements2); arrayRecord2.put(0, avroArray2); firstRecord.put(1, arrayRecord2); // /////// ColumnInfo[] colInfo = new ColumnInfo[2]; colInfo[0] = AdaptorTestUtils.constructColumnInfo("id", AdaptorTestUtils.Type.VARSTRING, 0, 0); colInfo[1] = AdaptorTestUtils.constructColumnInfo("id2", AdaptorTestUtils.Type.VARSTRING, 0, 0); AvroAdaptor decoder = new AvroAdaptor(new File( "target/test-classes/array/MultipleArrayOfStrings/avroBothArraysUsed.ini"), colInfo); List<Object[]> res = decoder.convertRecord(firstRecord); assertEquals("resulting list size", 1, res.size()); assertArrayEquals("resulting item", new Object[] { "test string 1", "test string 2" }, res.get(0)); } @Test public void testMultipleArrayOfStringsOnlyOneUsedInConfig() throws Exception { /* * define a record with 2 arrays * * fill first array with one element * * fill second array with two elements * * mapping file uses only the first array * * ensure 1 row is returned (unused array of 2 elements is not unfolded) */ ColumnInfo[] colInfo = new ColumnInfo[1]; colInfo[0] = AdaptorTestUtils.constructColumnInfo("id", AdaptorTestUtils.Type.VARSTRING, 0, 0); Schema schema = new Parser().parse(new File("target/test-classes/array/MultipleArrayOfStrings/schema.avsc")); Schema arrayRecordSchema1 = schema.getField("arrayRecord1").schema(); Schema arrayElementSchema1 = arrayRecordSchema1.getField("arrayElement1").schema(); Schema arrayRecordSchema2 = schema.getField("arrayRecord2").schema(); Schema arrayElementSchema2 = arrayRecordSchema2.getField("arrayElement2").schema(); List<String> arrayElements1 = new ArrayList<String>(1); arrayElements1.add("test string 1"); GenericRecord arrayRecord1 = new GenericData.Record(arrayRecordSchema1); GenericArray<String> avroArray1 = new GenericData.Array<String>(arrayElementSchema1, arrayElements1); arrayRecord1.put(0, avroArray1); GenericRecord firstRecord = new GenericData.Record(schema); firstRecord.put(0, arrayRecord1); List<String> arrayElements2 = new ArrayList<String>(2); arrayElements2.add("test string 2"); arrayElements2.add("test string 3"); GenericRecord arrayRecord2 = new GenericData.Record(arrayRecordSchema2); GenericArray<String> avroArray2 = new GenericData.Array<String>(arrayElementSchema2, arrayElements2); arrayRecord2.put(0, avroArray2); firstRecord.put(1, arrayRecord2); AvroAdaptor decoder = new AvroAdaptor(new File( "target/test-classes/array/MultipleArrayOfStrings/avroOnlyOneArrayUsed.ini"), colInfo); List<Object[]> res = decoder.convertRecord(firstRecord); assertEquals("resulting list size", 1, res.size()); assertArrayEquals("resulting item", new Object[] { "test string 1" }, res.get(0)); /** * Now create and test a second record */ GenericRecord secondRecord = new GenericData.Record(schema); secondRecord.put(0, arrayRecord1); secondRecord.put(1, arrayRecord2); List<Object[]> res2 = decoder.convertRecord(secondRecord); assertEquals("resulting list size", 1, res2.size()); assertArrayEquals("resulting item", new Object[] { "test string 1" }, res2.get(0)); } /** * define a record with: a record + an array * * fill first record with string value * * fill second array with two elements * * mapping file uses both the record and the array * * ensure 2 row is returned * * [{"a",arrayVal1}, {"a", arrayVal2}] */ @Test public void testRecordWithRecordAndArray() throws Exception { ColumnInfo[] colInfo = new ColumnInfo[2]; colInfo[0] = AdaptorTestUtils.constructColumnInfo("name", AdaptorTestUtils.Type.VARSTRING, 0, 0); colInfo[1] = AdaptorTestUtils.constructColumnInfo("address", AdaptorTestUtils.Type.VARSTRING, 0, 0); Schema schema = new Parser().parse(new File("target/test-classes/array/RecordWithRecordAndArray/schema.avsc")); GenericRecord firstRecord = new GenericData.Record(schema); firstRecord.put("name", "test name"); Schema addressArraySchema = schema.getField("addressArray").schema(); List<String> addressElements = new ArrayList<String>(2); addressElements.add("street 1"); addressElements.add("street 2"); GenericArray<String> avroAddressArray = new GenericData.Array<String>(addressArraySchema, addressElements); firstRecord.put("addressArray", avroAddressArray); AvroAdaptor decoder = new AvroAdaptor(new File("target/test-classes/array/RecordWithRecordAndArray/avro.ini"), colInfo); List<Object[]> res = decoder.convertRecord(firstRecord); assertEquals("resulting list size", 2, res.size()); assertArrayEquals("resulting item", new Object[] { "test name", "street 1" }, res.get(0)); assertArrayEquals("resulting item", new Object[] { "test name", "street 2" }, res.get(1)); } }
apache-2.0
Fabryprog/camel
components/camel-milo/src/test/java/org/apache/camel/component/milo/server/ServerLocalTest.java
2912
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.milo.server; import java.io.IOException; import org.apache.camel.EndpointInject; import org.apache.camel.RoutesBuilder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.milo.Ports; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelTestSupport; import org.eclipse.milo.opcua.stack.core.types.builtin.DataValue; import org.eclipse.milo.opcua.stack.core.types.builtin.Variant; import org.junit.Before; import org.junit.Test; /** * Unit tests for milo server component without using an actual connection */ public class ServerLocalTest extends CamelTestSupport { private static final String MILO_ITEM_1 = "milo-server:myitem1"; private static final String MOCK_TEST = "mock:test"; @EndpointInject(MOCK_TEST) protected MockEndpoint testEndpoint; @Before public void pickFreePort() throws IOException { final MiloServerComponent component = context().getComponent("milo-server", MiloServerComponent.class); component.setBindPort(Ports.pickServerPort()); } @Override protected RoutesBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from(MILO_ITEM_1).to(MOCK_TEST); } }; } @Test public void shouldStartComponent() { } @Test public void testAcceptVariantString() { sendBody(MILO_ITEM_1, new Variant("Foo")); } @Test public void testAcceptVariantDouble() { sendBody(MILO_ITEM_1, new Variant(0.0)); } @Test public void testAcceptString() { sendBody(MILO_ITEM_1, "Foo"); } @Test public void testAcceptDouble() { sendBody(MILO_ITEM_1, 0.0); } @Test public void testAcceptDataValueString() { sendBody(MILO_ITEM_1, new DataValue(new Variant("Foo"))); } @Test public void testAcceptDataValueDouble() { sendBody(MILO_ITEM_1, new DataValue(new Variant(0.0))); } }
apache-2.0
USEF-Foundation/ri.usef.energy
usef-build/usef-workflow/usef-cro/src/main/java/energy/usef/cro/dto/ParticipantAction.java
1281
/* * Copyright 2015-2016 USEF Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package energy.usef.cro.dto; /** * A data transfer object for relevant Participant data. */ public class ParticipantAction { private String method; private String domain; public ParticipantAction() { // Required for json deserialisation } public ParticipantAction(String method, String domain) { this.method = method; this.domain = domain; } public String getMethod() { return method; } public void setMethod(String method) { this.method = method; } public String getDomain() { return domain; } public void setDomain(String domain) { this.domain = domain; } }
apache-2.0
velmuruganvelayutham/jpa
examples/Chapter4/22-manyToManyJoinTable/src/model/examples/stateless/ProjectServiceBean.java
754
package examples.stateless; import java.util.Collection; import javax.ejb.Stateless; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.Query; import examples.model.Project; @Stateless public class ProjectServiceBean implements ProjectService { @PersistenceContext(unitName="EmployeeService") protected EntityManager em; public Project createProject(String name) { Project proj = new Project(); proj.setName(name); em.persist(proj); return proj; } public Collection<Project> findAllProjects() { Query query = em.createQuery("SELECT p FROM Project p"); return (Collection<Project>) query.getResultList(); } }
apache-2.0
roundrop/ermaster-fast
src/org/insightech/er/db/impl/postgres/PostgresSqlTypeManager.java
363
package org.insightech.er.db.impl.postgres; import org.insightech.er.db.sqltype.SqlType; import org.insightech.er.db.sqltype.SqlTypeManagerBase; public class PostgresSqlTypeManager extends SqlTypeManagerBase { @Override public int getByteLength(final SqlType type, final Integer length, final Integer decimal) { return 0; } }
apache-2.0
practice-vishnoi/dev-spark-1
streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java
78820
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.streaming; import java.io.*; import java.lang.Iterable; import java.nio.charset.Charset; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import scala.Tuple2; import org.junit.Assert; import static org.junit.Assert.*; import org.junit.Test; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.io.Files; import com.google.common.collect.Sets; import org.apache.spark.HashPartitioner; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.*; import org.apache.spark.storage.StorageLevel; import org.apache.spark.streaming.api.java.*; import org.apache.spark.util.Utils; import org.apache.spark.SparkConf; // The test suite itself is Serializable so that anonymous Function implementations can be // serialized, as an alternative to converting these anonymous classes to static inner classes; // see http://stackoverflow.com/questions/758570/. public class JavaAPISuite extends LocalJavaStreamingContext implements Serializable { public void equalIterator(Iterator<?> a, Iterator<?> b) { while (a.hasNext() && b.hasNext()) { Assert.assertEquals(a.next(), b.next()); } Assert.assertEquals(a.hasNext(), b.hasNext()); } public void equalIterable(Iterable<?> a, Iterable<?> b) { equalIterator(a.iterator(), b.iterator()); } @Test public void testInitialization() { Assert.assertNotNull(ssc.sparkContext()); } @SuppressWarnings("unchecked") @Test public void testContextState() { List<List<Integer>> inputData = Arrays.asList(Arrays.asList(1, 2, 3, 4)); Assert.assertTrue(ssc.getState() == StreamingContextState.INITIALIZED); JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaTestUtils.attachTestOutputStream(stream); Assert.assertTrue(ssc.getState() == StreamingContextState.INITIALIZED); ssc.start(); Assert.assertTrue(ssc.getState() == StreamingContextState.ACTIVE); ssc.stop(); Assert.assertTrue(ssc.getState() == StreamingContextState.STOPPED); } @SuppressWarnings("unchecked") @Test public void testCount() { List<List<Integer>> inputData = Arrays.asList( Arrays.asList(1,2,3,4), Arrays.asList(3,4,5), Arrays.asList(3)); List<List<Long>> expected = Arrays.asList( Arrays.asList(4L), Arrays.asList(3L), Arrays.asList(1L)); JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<Long> count = stream.count(); JavaTestUtils.attachTestOutputStream(count); List<List<Long>> result = JavaTestUtils.runStreams(ssc, 3, 3); assertOrderInvariantEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testMap() { List<List<String>> inputData = Arrays.asList( Arrays.asList("hello", "world"), Arrays.asList("goodnight", "moon")); List<List<Integer>> expected = Arrays.asList( Arrays.asList(5,5), Arrays.asList(9,4)); JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<Integer> letterCount = stream.map(new Function<String, Integer>() { @Override public Integer call(String s) throws Exception { return s.length(); } }); JavaTestUtils.attachTestOutputStream(letterCount); List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 2, 2); assertOrderInvariantEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testWindow() { List<List<Integer>> inputData = Arrays.asList( Arrays.asList(1,2,3), Arrays.asList(4,5,6), Arrays.asList(7,8,9)); List<List<Integer>> expected = Arrays.asList( Arrays.asList(1,2,3), Arrays.asList(4,5,6,1,2,3), Arrays.asList(7,8,9,4,5,6), Arrays.asList(7,8,9)); JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<Integer> windowed = stream.window(new Duration(2000)); JavaTestUtils.attachTestOutputStream(windowed); List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 4, 4); assertOrderInvariantEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testWindowWithSlideDuration() { List<List<Integer>> inputData = Arrays.asList( Arrays.asList(1,2,3), Arrays.asList(4,5,6), Arrays.asList(7,8,9), Arrays.asList(10,11,12), Arrays.asList(13,14,15), Arrays.asList(16,17,18)); List<List<Integer>> expected = Arrays.asList( Arrays.asList(1,2,3,4,5,6), Arrays.asList(1,2,3,4,5,6,7,8,9,10,11,12), Arrays.asList(7,8,9,10,11,12,13,14,15,16,17,18), Arrays.asList(13,14,15,16,17,18)); JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<Integer> windowed = stream.window(new Duration(4000), new Duration(2000)); JavaTestUtils.attachTestOutputStream(windowed); List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 8, 4); assertOrderInvariantEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testFilter() { List<List<String>> inputData = Arrays.asList( Arrays.asList("giants", "dodgers"), Arrays.asList("yankees", "red socks")); List<List<String>> expected = Arrays.asList( Arrays.asList("giants"), Arrays.asList("yankees")); JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<String> filtered = stream.filter(new Function<String, Boolean>() { @Override public Boolean call(String s) throws Exception { return s.contains("a"); } }); JavaTestUtils.attachTestOutputStream(filtered); List<List<String>> result = JavaTestUtils.runStreams(ssc, 2, 2); assertOrderInvariantEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testRepartitionMorePartitions() { List<List<Integer>> inputData = Arrays.asList( Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 2); JavaDStreamLike<Integer,JavaDStream<Integer>,JavaRDD<Integer>> repartitioned = stream.repartition(4); JavaTestUtils.attachTestOutputStream(repartitioned); List<List<List<Integer>>> result = JavaTestUtils.runStreamsWithPartitions(ssc, 2, 2); Assert.assertEquals(2, result.size()); for (List<List<Integer>> rdd : result) { Assert.assertEquals(4, rdd.size()); Assert.assertEquals( 10, rdd.get(0).size() + rdd.get(1).size() + rdd.get(2).size() + rdd.get(3).size()); } } @SuppressWarnings("unchecked") @Test public void testRepartitionFewerPartitions() { List<List<Integer>> inputData = Arrays.asList( Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 4); JavaDStreamLike<Integer,JavaDStream<Integer>,JavaRDD<Integer>> repartitioned = stream.repartition(2); JavaTestUtils.attachTestOutputStream(repartitioned); List<List<List<Integer>>> result = JavaTestUtils.runStreamsWithPartitions(ssc, 2, 2); Assert.assertEquals(2, result.size()); for (List<List<Integer>> rdd : result) { Assert.assertEquals(2, rdd.size()); Assert.assertEquals(10, rdd.get(0).size() + rdd.get(1).size()); } } @SuppressWarnings("unchecked") @Test public void testGlom() { List<List<String>> inputData = Arrays.asList( Arrays.asList("giants", "dodgers"), Arrays.asList("yankees", "red socks")); List<List<List<String>>> expected = Arrays.asList( Arrays.asList(Arrays.asList("giants", "dodgers")), Arrays.asList(Arrays.asList("yankees", "red socks"))); JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<List<String>> glommed = stream.glom(); JavaTestUtils.attachTestOutputStream(glommed); List<List<List<String>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testMapPartitions() { List<List<String>> inputData = Arrays.asList( Arrays.asList("giants", "dodgers"), Arrays.asList("yankees", "red socks")); List<List<String>> expected = Arrays.asList( Arrays.asList("GIANTSDODGERS"), Arrays.asList("YANKEESRED SOCKS")); JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<String> mapped = stream.mapPartitions( new FlatMapFunction<Iterator<String>, String>() { @Override public Iterable<String> call(Iterator<String> in) { String out = ""; while (in.hasNext()) { out = out + in.next().toUpperCase(); } return Lists.newArrayList(out); } }); JavaTestUtils.attachTestOutputStream(mapped); List<List<String>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } private class IntegerSum implements Function2<Integer, Integer, Integer> { @Override public Integer call(Integer i1, Integer i2) throws Exception { return i1 + i2; } } private class IntegerDifference implements Function2<Integer, Integer, Integer> { @Override public Integer call(Integer i1, Integer i2) throws Exception { return i1 - i2; } } @SuppressWarnings("unchecked") @Test public void testReduce() { List<List<Integer>> inputData = Arrays.asList( Arrays.asList(1,2,3), Arrays.asList(4,5,6), Arrays.asList(7,8,9)); List<List<Integer>> expected = Arrays.asList( Arrays.asList(6), Arrays.asList(15), Arrays.asList(24)); JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<Integer> reduced = stream.reduce(new IntegerSum()); JavaTestUtils.attachTestOutputStream(reduced); List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 3, 3); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testReduceByWindowWithInverse() { testReduceByWindow(true); } @SuppressWarnings("unchecked") @Test public void testReduceByWindowWithoutInverse() { testReduceByWindow(false); } @SuppressWarnings("unchecked") private void testReduceByWindow(boolean withInverse) { List<List<Integer>> inputData = Arrays.asList( Arrays.asList(1,2,3), Arrays.asList(4,5,6), Arrays.asList(7,8,9)); List<List<Integer>> expected = Arrays.asList( Arrays.asList(6), Arrays.asList(21), Arrays.asList(39), Arrays.asList(24)); JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<Integer> reducedWindowed = null; if (withInverse) { reducedWindowed = stream.reduceByWindow(new IntegerSum(), new IntegerDifference(), new Duration(2000), new Duration(1000)); } else { reducedWindowed = stream.reduceByWindow(new IntegerSum(), new Duration(2000), new Duration(1000)); } JavaTestUtils.attachTestOutputStream(reducedWindowed); List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 4, 4); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testQueueStream() { ssc.stop(); // Create a new JavaStreamingContext without checkpointing SparkConf conf = new SparkConf() .setMaster("local[2]") .setAppName("test") .set("spark.streaming.clock", "org.apache.spark.util.ManualClock"); ssc = new JavaStreamingContext(conf, new Duration(1000)); List<List<Integer>> expected = Arrays.asList( Arrays.asList(1,2,3), Arrays.asList(4,5,6), Arrays.asList(7,8,9)); JavaSparkContext jsc = new JavaSparkContext(ssc.ssc().sc()); JavaRDD<Integer> rdd1 = ssc.sparkContext().parallelize(Arrays.asList(1, 2, 3)); JavaRDD<Integer> rdd2 = ssc.sparkContext().parallelize(Arrays.asList(4, 5, 6)); JavaRDD<Integer> rdd3 = ssc.sparkContext().parallelize(Arrays.asList(7,8,9)); LinkedList<JavaRDD<Integer>> rdds = Lists.newLinkedList(); rdds.add(rdd1); rdds.add(rdd2); rdds.add(rdd3); JavaDStream<Integer> stream = ssc.queueStream(rdds); JavaTestUtils.attachTestOutputStream(stream); List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 3, 3); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testTransform() { List<List<Integer>> inputData = Arrays.asList( Arrays.asList(1,2,3), Arrays.asList(4,5,6), Arrays.asList(7,8,9)); List<List<Integer>> expected = Arrays.asList( Arrays.asList(3,4,5), Arrays.asList(6,7,8), Arrays.asList(9,10,11)); JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<Integer> transformed = stream.transform( new Function<JavaRDD<Integer>, JavaRDD<Integer>>() { @Override public JavaRDD<Integer> call(JavaRDD<Integer> in) throws Exception { return in.map(new Function<Integer, Integer>() { @Override public Integer call(Integer i) throws Exception { return i + 2; } }); } }); JavaTestUtils.attachTestOutputStream(transformed); List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 3, 3); assertOrderInvariantEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testVariousTransform() { // tests whether all variations of transform can be called from Java List<List<Integer>> inputData = Arrays.asList(Arrays.asList(1)); JavaDStream<Integer> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); List<List<Tuple2<String, Integer>>> pairInputData = Arrays.asList(Arrays.asList(new Tuple2<String, Integer>("x", 1))); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream( JavaTestUtils.attachTestInputStream(ssc, pairInputData, 1)); JavaDStream<Integer> transformed1 = stream.transform( new Function<JavaRDD<Integer>, JavaRDD<Integer>>() { @Override public JavaRDD<Integer> call(JavaRDD<Integer> in) throws Exception { return null; } } ); JavaDStream<Integer> transformed2 = stream.transform( new Function2<JavaRDD<Integer>, Time, JavaRDD<Integer>>() { @Override public JavaRDD<Integer> call(JavaRDD<Integer> in, Time time) throws Exception { return null; } } ); JavaPairDStream<String, Integer> transformed3 = stream.transformToPair( new Function<JavaRDD<Integer>, JavaPairRDD<String, Integer>>() { @Override public JavaPairRDD<String, Integer> call(JavaRDD<Integer> in) throws Exception { return null; } } ); JavaPairDStream<String, Integer> transformed4 = stream.transformToPair( new Function2<JavaRDD<Integer>, Time, JavaPairRDD<String, Integer>>() { @Override public JavaPairRDD<String, Integer> call(JavaRDD<Integer> in, Time time) throws Exception { return null; } } ); JavaDStream<Integer> pairTransformed1 = pairStream.transform( new Function<JavaPairRDD<String, Integer>, JavaRDD<Integer>>() { @Override public JavaRDD<Integer> call(JavaPairRDD<String, Integer> in) throws Exception { return null; } } ); JavaDStream<Integer> pairTransformed2 = pairStream.transform( new Function2<JavaPairRDD<String, Integer>, Time, JavaRDD<Integer>>() { @Override public JavaRDD<Integer> call(JavaPairRDD<String, Integer> in, Time time) throws Exception { return null; } } ); JavaPairDStream<String, String> pairTransformed3 = pairStream.transformToPair( new Function<JavaPairRDD<String, Integer>, JavaPairRDD<String, String>>() { @Override public JavaPairRDD<String, String> call(JavaPairRDD<String, Integer> in) throws Exception { return null; } } ); JavaPairDStream<String, String> pairTransformed4 = pairStream.transformToPair( new Function2<JavaPairRDD<String, Integer>, Time, JavaPairRDD<String, String>>() { @Override public JavaPairRDD<String, String> call(JavaPairRDD<String, Integer> in, Time time) throws Exception { return null; } } ); } @SuppressWarnings("unchecked") @Test public void testTransformWith() { List<List<Tuple2<String, String>>> stringStringKVStream1 = Arrays.asList( Arrays.asList( new Tuple2<String, String>("california", "dodgers"), new Tuple2<String, String>("new york", "yankees")), Arrays.asList( new Tuple2<String, String>("california", "sharks"), new Tuple2<String, String>("new york", "rangers"))); List<List<Tuple2<String, String>>> stringStringKVStream2 = Arrays.asList( Arrays.asList( new Tuple2<String, String>("california", "giants"), new Tuple2<String, String>("new york", "mets")), Arrays.asList( new Tuple2<String, String>("california", "ducks"), new Tuple2<String, String>("new york", "islanders"))); List<HashSet<Tuple2<String, Tuple2<String, String>>>> expected = Arrays.asList( Sets.newHashSet( new Tuple2<String, Tuple2<String, String>>("california", new Tuple2<String, String>("dodgers", "giants")), new Tuple2<String, Tuple2<String, String>>("new york", new Tuple2<String, String>("yankees", "mets"))), Sets.newHashSet( new Tuple2<String, Tuple2<String, String>>("california", new Tuple2<String, String>("sharks", "ducks")), new Tuple2<String, Tuple2<String, String>>("new york", new Tuple2<String, String>("rangers", "islanders")))); JavaDStream<Tuple2<String, String>> stream1 = JavaTestUtils.attachTestInputStream( ssc, stringStringKVStream1, 1); JavaPairDStream<String, String> pairStream1 = JavaPairDStream.fromJavaDStream(stream1); JavaDStream<Tuple2<String, String>> stream2 = JavaTestUtils.attachTestInputStream( ssc, stringStringKVStream2, 1); JavaPairDStream<String, String> pairStream2 = JavaPairDStream.fromJavaDStream(stream2); JavaPairDStream<String, Tuple2<String, String>> joined = pairStream1.transformWithToPair( pairStream2, new Function3< JavaPairRDD<String, String>, JavaPairRDD<String, String>, Time, JavaPairRDD<String, Tuple2<String, String>> >() { @Override public JavaPairRDD<String, Tuple2<String, String>> call( JavaPairRDD<String, String> rdd1, JavaPairRDD<String, String> rdd2, Time time ) throws Exception { return rdd1.join(rdd2); } } ); JavaTestUtils.attachTestOutputStream(joined); List<List<Tuple2<String, Tuple2<String, String>>>> result = JavaTestUtils.runStreams(ssc, 2, 2); List<HashSet<Tuple2<String, Tuple2<String, String>>>> unorderedResult = Lists.newArrayList(); for (List<Tuple2<String, Tuple2<String, String>>> res: result) { unorderedResult.add(Sets.newHashSet(res)); } Assert.assertEquals(expected, unorderedResult); } @SuppressWarnings("unchecked") @Test public void testVariousTransformWith() { // tests whether all variations of transformWith can be called from Java List<List<Integer>> inputData1 = Arrays.asList(Arrays.asList(1)); List<List<String>> inputData2 = Arrays.asList(Arrays.asList("x")); JavaDStream<Integer> stream1 = JavaTestUtils.attachTestInputStream(ssc, inputData1, 1); JavaDStream<String> stream2 = JavaTestUtils.attachTestInputStream(ssc, inputData2, 1); List<List<Tuple2<String, Integer>>> pairInputData1 = Arrays.asList(Arrays.asList(new Tuple2<String, Integer>("x", 1))); List<List<Tuple2<Double, Character>>> pairInputData2 = Arrays.asList(Arrays.asList(new Tuple2<Double, Character>(1.0, 'x'))); JavaPairDStream<String, Integer> pairStream1 = JavaPairDStream.fromJavaDStream( JavaTestUtils.attachTestInputStream(ssc, pairInputData1, 1)); JavaPairDStream<Double, Character> pairStream2 = JavaPairDStream.fromJavaDStream( JavaTestUtils.attachTestInputStream(ssc, pairInputData2, 1)); JavaDStream<Double> transformed1 = stream1.transformWith( stream2, new Function3<JavaRDD<Integer>, JavaRDD<String>, Time, JavaRDD<Double>>() { @Override public JavaRDD<Double> call(JavaRDD<Integer> rdd1, JavaRDD<String> rdd2, Time time) throws Exception { return null; } } ); JavaDStream<Double> transformed2 = stream1.transformWith( pairStream1, new Function3<JavaRDD<Integer>, JavaPairRDD<String, Integer>, Time, JavaRDD<Double>>() { @Override public JavaRDD<Double> call(JavaRDD<Integer> rdd1, JavaPairRDD<String, Integer> rdd2, Time time) throws Exception { return null; } } ); JavaPairDStream<Double, Double> transformed3 = stream1.transformWithToPair( stream2, new Function3<JavaRDD<Integer>, JavaRDD<String>, Time, JavaPairRDD<Double, Double>>() { @Override public JavaPairRDD<Double, Double> call(JavaRDD<Integer> rdd1, JavaRDD<String> rdd2, Time time) throws Exception { return null; } } ); JavaPairDStream<Double, Double> transformed4 = stream1.transformWithToPair( pairStream1, new Function3<JavaRDD<Integer>, JavaPairRDD<String, Integer>, Time, JavaPairRDD<Double, Double>>() { @Override public JavaPairRDD<Double, Double> call(JavaRDD<Integer> rdd1, JavaPairRDD<String, Integer> rdd2, Time time) throws Exception { return null; } } ); JavaDStream<Double> pairTransformed1 = pairStream1.transformWith( stream2, new Function3<JavaPairRDD<String, Integer>, JavaRDD<String>, Time, JavaRDD<Double>>() { @Override public JavaRDD<Double> call(JavaPairRDD<String, Integer> rdd1, JavaRDD<String> rdd2, Time time) throws Exception { return null; } } ); JavaDStream<Double> pairTransformed2_ = pairStream1.transformWith( pairStream1, new Function3<JavaPairRDD<String, Integer>, JavaPairRDD<String, Integer>, Time, JavaRDD<Double>>() { @Override public JavaRDD<Double> call(JavaPairRDD<String, Integer> rdd1, JavaPairRDD<String, Integer> rdd2, Time time) throws Exception { return null; } } ); JavaPairDStream<Double, Double> pairTransformed3 = pairStream1.transformWithToPair( stream2, new Function3<JavaPairRDD<String, Integer>, JavaRDD<String>, Time, JavaPairRDD<Double, Double>>() { @Override public JavaPairRDD<Double, Double> call(JavaPairRDD<String, Integer> rdd1, JavaRDD<String> rdd2, Time time) throws Exception { return null; } } ); JavaPairDStream<Double, Double> pairTransformed4 = pairStream1.transformWithToPair( pairStream2, new Function3<JavaPairRDD<String, Integer>, JavaPairRDD<Double, Character>, Time, JavaPairRDD<Double, Double>>() { @Override public JavaPairRDD<Double, Double> call(JavaPairRDD<String, Integer> rdd1, JavaPairRDD<Double, Character> rdd2, Time time) throws Exception { return null; } } ); } @SuppressWarnings("unchecked") @Test public void testStreamingContextTransform(){ List<List<Integer>> stream1input = Arrays.asList( Arrays.asList(1), Arrays.asList(2) ); List<List<Integer>> stream2input = Arrays.asList( Arrays.asList(3), Arrays.asList(4) ); List<List<Tuple2<Integer, String>>> pairStream1input = Arrays.asList( Arrays.asList(new Tuple2<Integer, String>(1, "x")), Arrays.asList(new Tuple2<Integer, String>(2, "y")) ); List<List<Tuple2<Integer, Tuple2<Integer, String>>>> expected = Arrays.asList( Arrays.asList(new Tuple2<Integer, Tuple2<Integer, String>>(1, new Tuple2<Integer, String>(1, "x"))), Arrays.asList(new Tuple2<Integer, Tuple2<Integer, String>>(2, new Tuple2<Integer, String>(2, "y"))) ); JavaDStream<Integer> stream1 = JavaTestUtils.attachTestInputStream(ssc, stream1input, 1); JavaDStream<Integer> stream2 = JavaTestUtils.attachTestInputStream(ssc, stream2input, 1); JavaPairDStream<Integer, String> pairStream1 = JavaPairDStream.fromJavaDStream( JavaTestUtils.attachTestInputStream(ssc, pairStream1input, 1)); List<JavaDStream<?>> listOfDStreams1 = Arrays.<JavaDStream<?>>asList(stream1, stream2); // This is just to test whether this transform to JavaStream compiles JavaDStream<Long> transformed1 = ssc.transform( listOfDStreams1, new Function2<List<JavaRDD<?>>, Time, JavaRDD<Long>>() { @Override public JavaRDD<Long> call(List<JavaRDD<?>> listOfRDDs, Time time) { Assert.assertEquals(2, listOfRDDs.size()); return null; } } ); List<JavaDStream<?>> listOfDStreams2 = Arrays.<JavaDStream<?>>asList(stream1, stream2, pairStream1.toJavaDStream()); JavaPairDStream<Integer, Tuple2<Integer, String>> transformed2 = ssc.transformToPair( listOfDStreams2, new Function2<List<JavaRDD<?>>, Time, JavaPairRDD<Integer, Tuple2<Integer, String>>>() { @Override public JavaPairRDD<Integer, Tuple2<Integer, String>> call(List<JavaRDD<?>> listOfRDDs, Time time) { Assert.assertEquals(3, listOfRDDs.size()); JavaRDD<Integer> rdd1 = (JavaRDD<Integer>)listOfRDDs.get(0); JavaRDD<Integer> rdd2 = (JavaRDD<Integer>)listOfRDDs.get(1); JavaRDD<Tuple2<Integer, String>> rdd3 = (JavaRDD<Tuple2<Integer, String>>)listOfRDDs.get(2); JavaPairRDD<Integer, String> prdd3 = JavaPairRDD.fromJavaRDD(rdd3); PairFunction<Integer, Integer, Integer> mapToTuple = new PairFunction<Integer, Integer, Integer>() { @Override public Tuple2<Integer, Integer> call(Integer i) throws Exception { return new Tuple2<Integer, Integer>(i, i); } }; return rdd1.union(rdd2).mapToPair(mapToTuple).join(prdd3); } } ); JavaTestUtils.attachTestOutputStream(transformed2); List<List<Tuple2<Integer, Tuple2<Integer, String>>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testFlatMap() { List<List<String>> inputData = Arrays.asList( Arrays.asList("go", "giants"), Arrays.asList("boo", "dodgers"), Arrays.asList("athletics")); List<List<String>> expected = Arrays.asList( Arrays.asList("g","o","g","i","a","n","t","s"), Arrays.asList("b", "o", "o", "d","o","d","g","e","r","s"), Arrays.asList("a","t","h","l","e","t","i","c","s")); JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<String> flatMapped = stream.flatMap(new FlatMapFunction<String, String>() { @Override public Iterable<String> call(String x) { return Lists.newArrayList(x.split("(?!^)")); } }); JavaTestUtils.attachTestOutputStream(flatMapped); List<List<String>> result = JavaTestUtils.runStreams(ssc, 3, 3); assertOrderInvariantEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testPairFlatMap() { List<List<String>> inputData = Arrays.asList( Arrays.asList("giants"), Arrays.asList("dodgers"), Arrays.asList("athletics")); List<List<Tuple2<Integer, String>>> expected = Arrays.asList( Arrays.asList( new Tuple2<Integer, String>(6, "g"), new Tuple2<Integer, String>(6, "i"), new Tuple2<Integer, String>(6, "a"), new Tuple2<Integer, String>(6, "n"), new Tuple2<Integer, String>(6, "t"), new Tuple2<Integer, String>(6, "s")), Arrays.asList( new Tuple2<Integer, String>(7, "d"), new Tuple2<Integer, String>(7, "o"), new Tuple2<Integer, String>(7, "d"), new Tuple2<Integer, String>(7, "g"), new Tuple2<Integer, String>(7, "e"), new Tuple2<Integer, String>(7, "r"), new Tuple2<Integer, String>(7, "s")), Arrays.asList( new Tuple2<Integer, String>(9, "a"), new Tuple2<Integer, String>(9, "t"), new Tuple2<Integer, String>(9, "h"), new Tuple2<Integer, String>(9, "l"), new Tuple2<Integer, String>(9, "e"), new Tuple2<Integer, String>(9, "t"), new Tuple2<Integer, String>(9, "i"), new Tuple2<Integer, String>(9, "c"), new Tuple2<Integer, String>(9, "s"))); JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<Integer, String> flatMapped = stream.flatMapToPair( new PairFlatMapFunction<String, Integer, String>() { @Override public Iterable<Tuple2<Integer, String>> call(String in) throws Exception { List<Tuple2<Integer, String>> out = Lists.newArrayList(); for (String letter: in.split("(?!^)")) { out.add(new Tuple2<Integer, String>(in.length(), letter)); } return out; } }); JavaTestUtils.attachTestOutputStream(flatMapped); List<List<Tuple2<Integer, String>>> result = JavaTestUtils.runStreams(ssc, 3, 3); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testUnion() { List<List<Integer>> inputData1 = Arrays.asList( Arrays.asList(1,1), Arrays.asList(2,2), Arrays.asList(3,3)); List<List<Integer>> inputData2 = Arrays.asList( Arrays.asList(4,4), Arrays.asList(5,5), Arrays.asList(6,6)); List<List<Integer>> expected = Arrays.asList( Arrays.asList(1,1,4,4), Arrays.asList(2,2,5,5), Arrays.asList(3,3,6,6)); JavaDStream<Integer> stream1 = JavaTestUtils.attachTestInputStream(ssc, inputData1, 2); JavaDStream<Integer> stream2 = JavaTestUtils.attachTestInputStream(ssc, inputData2, 2); JavaDStream<Integer> unioned = stream1.union(stream2); JavaTestUtils.attachTestOutputStream(unioned); List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 3, 3); assertOrderInvariantEquals(expected, result); } /* * Performs an order-invariant comparison of lists representing two RDD streams. This allows * us to account for ordering variation within individual RDD's which occurs during windowing. */ public static <T> void assertOrderInvariantEquals( List<List<T>> expected, List<List<T>> actual) { List<Set<T>> expectedSets = new ArrayList<Set<T>>(); for (List<T> list: expected) { expectedSets.add(Collections.unmodifiableSet(new HashSet<T>(list))); } List<Set<T>> actualSets = new ArrayList<Set<T>>(); for (List<T> list: actual) { actualSets.add(Collections.unmodifiableSet(new HashSet<T>(list))); } Assert.assertEquals(expectedSets, actualSets); } // PairDStream Functions @SuppressWarnings("unchecked") @Test public void testPairFilter() { List<List<String>> inputData = Arrays.asList( Arrays.asList("giants", "dodgers"), Arrays.asList("yankees", "red socks")); List<List<Tuple2<String, Integer>>> expected = Arrays.asList( Arrays.asList(new Tuple2<String, Integer>("giants", 6)), Arrays.asList(new Tuple2<String, Integer>("yankees", 7))); JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = stream.mapToPair( new PairFunction<String, String, Integer>() { @Override public Tuple2<String, Integer> call(String in) throws Exception { return new Tuple2<String, Integer>(in, in.length()); } }); JavaPairDStream<String, Integer> filtered = pairStream.filter( new Function<Tuple2<String, Integer>, Boolean>() { @Override public Boolean call(Tuple2<String, Integer> in) throws Exception { return in._1().contains("a"); } }); JavaTestUtils.attachTestOutputStream(filtered); List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") private List<List<Tuple2<String, String>>> stringStringKVStream = Arrays.asList( Arrays.asList(new Tuple2<String, String>("california", "dodgers"), new Tuple2<String, String>("california", "giants"), new Tuple2<String, String>("new york", "yankees"), new Tuple2<String, String>("new york", "mets")), Arrays.asList(new Tuple2<String, String>("california", "sharks"), new Tuple2<String, String>("california", "ducks"), new Tuple2<String, String>("new york", "rangers"), new Tuple2<String, String>("new york", "islanders"))); @SuppressWarnings("unchecked") private List<List<Tuple2<String, Integer>>> stringIntKVStream = Arrays.asList( Arrays.asList( new Tuple2<String, Integer>("california", 1), new Tuple2<String, Integer>("california", 3), new Tuple2<String, Integer>("new york", 4), new Tuple2<String, Integer>("new york", 1)), Arrays.asList( new Tuple2<String, Integer>("california", 5), new Tuple2<String, Integer>("california", 5), new Tuple2<String, Integer>("new york", 3), new Tuple2<String, Integer>("new york", 1))); @SuppressWarnings("unchecked") @Test public void testPairMap() { // Maps pair -> pair of different type List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; List<List<Tuple2<Integer, String>>> expected = Arrays.asList( Arrays.asList( new Tuple2<Integer, String>(1, "california"), new Tuple2<Integer, String>(3, "california"), new Tuple2<Integer, String>(4, "new york"), new Tuple2<Integer, String>(1, "new york")), Arrays.asList( new Tuple2<Integer, String>(5, "california"), new Tuple2<Integer, String>(5, "california"), new Tuple2<Integer, String>(3, "new york"), new Tuple2<Integer, String>(1, "new york"))); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<Integer, String> reversed = pairStream.mapToPair( new PairFunction<Tuple2<String, Integer>, Integer, String>() { @Override public Tuple2<Integer, String> call(Tuple2<String, Integer> in) throws Exception { return in.swap(); } }); JavaTestUtils.attachTestOutputStream(reversed); List<List<Tuple2<Integer, String>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testPairMapPartitions() { // Maps pair -> pair of different type List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; List<List<Tuple2<Integer, String>>> expected = Arrays.asList( Arrays.asList( new Tuple2<Integer, String>(1, "california"), new Tuple2<Integer, String>(3, "california"), new Tuple2<Integer, String>(4, "new york"), new Tuple2<Integer, String>(1, "new york")), Arrays.asList( new Tuple2<Integer, String>(5, "california"), new Tuple2<Integer, String>(5, "california"), new Tuple2<Integer, String>(3, "new york"), new Tuple2<Integer, String>(1, "new york"))); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<Integer, String> reversed = pairStream.mapPartitionsToPair( new PairFlatMapFunction<Iterator<Tuple2<String, Integer>>, Integer, String>() { @Override public Iterable<Tuple2<Integer, String>> call(Iterator<Tuple2<String, Integer>> in) throws Exception { LinkedList<Tuple2<Integer, String>> out = new LinkedList<Tuple2<Integer, String>>(); while (in.hasNext()) { Tuple2<String, Integer> next = in.next(); out.add(next.swap()); } return out; } }); JavaTestUtils.attachTestOutputStream(reversed); List<List<Tuple2<Integer, String>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testPairMap2() { // Maps pair -> single List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; List<List<Integer>> expected = Arrays.asList( Arrays.asList(1, 3, 4, 1), Arrays.asList(5, 5, 3, 1)); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaDStream<Integer> reversed = pairStream.map( new Function<Tuple2<String, Integer>, Integer>() { @Override public Integer call(Tuple2<String, Integer> in) throws Exception { return in._2(); } }); JavaTestUtils.attachTestOutputStream(reversed); List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testPairToPairFlatMapWithChangingTypes() { // Maps pair -> pair List<List<Tuple2<String, Integer>>> inputData = Arrays.asList( Arrays.asList( new Tuple2<String, Integer>("hi", 1), new Tuple2<String, Integer>("ho", 2)), Arrays.asList( new Tuple2<String, Integer>("hi", 1), new Tuple2<String, Integer>("ho", 2))); List<List<Tuple2<Integer, String>>> expected = Arrays.asList( Arrays.asList( new Tuple2<Integer, String>(1, "h"), new Tuple2<Integer, String>(1, "i"), new Tuple2<Integer, String>(2, "h"), new Tuple2<Integer, String>(2, "o")), Arrays.asList( new Tuple2<Integer, String>(1, "h"), new Tuple2<Integer, String>(1, "i"), new Tuple2<Integer, String>(2, "h"), new Tuple2<Integer, String>(2, "o"))); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<Integer, String> flatMapped = pairStream.flatMapToPair( new PairFlatMapFunction<Tuple2<String, Integer>, Integer, String>() { @Override public Iterable<Tuple2<Integer, String>> call(Tuple2<String, Integer> in) throws Exception { List<Tuple2<Integer, String>> out = new LinkedList<Tuple2<Integer, String>>(); for (Character s : in._1().toCharArray()) { out.add(new Tuple2<Integer, String>(in._2(), s.toString())); } return out; } }); JavaTestUtils.attachTestOutputStream(flatMapped); List<List<Tuple2<Integer, String>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testPairGroupByKey() { List<List<Tuple2<String, String>>> inputData = stringStringKVStream; List<List<Tuple2<String, List<String>>>> expected = Arrays.asList( Arrays.asList( new Tuple2<String, List<String>>("california", Arrays.asList("dodgers", "giants")), new Tuple2<String, List<String>>("new york", Arrays.asList("yankees", "mets"))), Arrays.asList( new Tuple2<String, List<String>>("california", Arrays.asList("sharks", "ducks")), new Tuple2<String, List<String>>("new york", Arrays.asList("rangers", "islanders")))); JavaDStream<Tuple2<String, String>> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, String> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<String, Iterable<String>> grouped = pairStream.groupByKey(); JavaTestUtils.attachTestOutputStream(grouped); List<List<Tuple2<String, Iterable<String>>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected.size(), result.size()); Iterator<List<Tuple2<String, Iterable<String>>>> resultItr = result.iterator(); Iterator<List<Tuple2<String, List<String>>>> expectedItr = expected.iterator(); while (resultItr.hasNext() && expectedItr.hasNext()) { Iterator<Tuple2<String, Iterable<String>>> resultElements = resultItr.next().iterator(); Iterator<Tuple2<String, List<String>>> expectedElements = expectedItr.next().iterator(); while (resultElements.hasNext() && expectedElements.hasNext()) { Tuple2<String, Iterable<String>> resultElement = resultElements.next(); Tuple2<String, List<String>> expectedElement = expectedElements.next(); Assert.assertEquals(expectedElement._1(), resultElement._1()); equalIterable(expectedElement._2(), resultElement._2()); } Assert.assertEquals(resultElements.hasNext(), expectedElements.hasNext()); } } @SuppressWarnings("unchecked") @Test public void testPairReduceByKey() { List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; List<List<Tuple2<String, Integer>>> expected = Arrays.asList( Arrays.asList( new Tuple2<String, Integer>("california", 4), new Tuple2<String, Integer>("new york", 5)), Arrays.asList( new Tuple2<String, Integer>("california", 10), new Tuple2<String, Integer>("new york", 4))); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream( ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<String, Integer> reduced = pairStream.reduceByKey(new IntegerSum()); JavaTestUtils.attachTestOutputStream(reduced); List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testCombineByKey() { List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; List<List<Tuple2<String, Integer>>> expected = Arrays.asList( Arrays.asList( new Tuple2<String, Integer>("california", 4), new Tuple2<String, Integer>("new york", 5)), Arrays.asList( new Tuple2<String, Integer>("california", 10), new Tuple2<String, Integer>("new york", 4))); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream( ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<String, Integer> combined = pairStream.<Integer>combineByKey( new Function<Integer, Integer>() { @Override public Integer call(Integer i) throws Exception { return i; } }, new IntegerSum(), new IntegerSum(), new HashPartitioner(2)); JavaTestUtils.attachTestOutputStream(combined); List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testCountByValue() { List<List<String>> inputData = Arrays.asList( Arrays.asList("hello", "world"), Arrays.asList("hello", "moon"), Arrays.asList("hello")); List<List<Tuple2<String, Long>>> expected = Arrays.asList( Arrays.asList( new Tuple2<String, Long>("hello", 1L), new Tuple2<String, Long>("world", 1L)), Arrays.asList( new Tuple2<String, Long>("hello", 1L), new Tuple2<String, Long>("moon", 1L)), Arrays.asList( new Tuple2<String, Long>("hello", 1L))); JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Long> counted = stream.countByValue(); JavaTestUtils.attachTestOutputStream(counted); List<List<Tuple2<String, Long>>> result = JavaTestUtils.runStreams(ssc, 3, 3); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testGroupByKeyAndWindow() { List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; List<List<Tuple2<String, List<Integer>>>> expected = Arrays.asList( Arrays.asList( new Tuple2<String, List<Integer>>("california", Arrays.asList(1, 3)), new Tuple2<String, List<Integer>>("new york", Arrays.asList(1, 4)) ), Arrays.asList( new Tuple2<String, List<Integer>>("california", Arrays.asList(1, 3, 5, 5)), new Tuple2<String, List<Integer>>("new york", Arrays.asList(1, 1, 3, 4)) ), Arrays.asList( new Tuple2<String, List<Integer>>("california", Arrays.asList(5, 5)), new Tuple2<String, List<Integer>>("new york", Arrays.asList(1, 3)) ) ); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<String, Iterable<Integer>> groupWindowed = pairStream.groupByKeyAndWindow(new Duration(2000), new Duration(1000)); JavaTestUtils.attachTestOutputStream(groupWindowed); List<List<Tuple2<String, List<Integer>>>> result = JavaTestUtils.runStreams(ssc, 3, 3); Assert.assertEquals(expected.size(), result.size()); for (int i = 0; i < result.size(); i++) { Assert.assertEquals(convert(expected.get(i)), convert(result.get(i))); } } private HashSet<Tuple2<String, HashSet<Integer>>> convert(List<Tuple2<String, List<Integer>>> listOfTuples) { List<Tuple2<String, HashSet<Integer>>> newListOfTuples = new ArrayList<Tuple2<String, HashSet<Integer>>>(); for (Tuple2<String, List<Integer>> tuple: listOfTuples) { newListOfTuples.add(convert(tuple)); } return new HashSet<Tuple2<String, HashSet<Integer>>>(newListOfTuples); } private Tuple2<String, HashSet<Integer>> convert(Tuple2<String, List<Integer>> tuple) { return new Tuple2<String, HashSet<Integer>>(tuple._1(), new HashSet<Integer>(tuple._2())); } @SuppressWarnings("unchecked") @Test public void testReduceByKeyAndWindow() { List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; List<List<Tuple2<String, Integer>>> expected = Arrays.asList( Arrays.asList(new Tuple2<String, Integer>("california", 4), new Tuple2<String, Integer>("new york", 5)), Arrays.asList(new Tuple2<String, Integer>("california", 14), new Tuple2<String, Integer>("new york", 9)), Arrays.asList(new Tuple2<String, Integer>("california", 10), new Tuple2<String, Integer>("new york", 4))); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<String, Integer> reduceWindowed = pairStream.reduceByKeyAndWindow(new IntegerSum(), new Duration(2000), new Duration(1000)); JavaTestUtils.attachTestOutputStream(reduceWindowed); List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 3, 3); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testUpdateStateByKey() { List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; List<List<Tuple2<String, Integer>>> expected = Arrays.asList( Arrays.asList(new Tuple2<String, Integer>("california", 4), new Tuple2<String, Integer>("new york", 5)), Arrays.asList(new Tuple2<String, Integer>("california", 14), new Tuple2<String, Integer>("new york", 9)), Arrays.asList(new Tuple2<String, Integer>("california", 14), new Tuple2<String, Integer>("new york", 9))); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<String, Integer> updated = pairStream.updateStateByKey( new Function2<List<Integer>, Optional<Integer>, Optional<Integer>>() { @Override public Optional<Integer> call(List<Integer> values, Optional<Integer> state) { int out = 0; if (state.isPresent()) { out = out + state.get(); } for (Integer v : values) { out = out + v; } return Optional.of(out); } }); JavaTestUtils.attachTestOutputStream(updated); List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 3, 3); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testUpdateStateByKeyWithInitial() { List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; List<Tuple2<String, Integer>> initial = Arrays.asList ( new Tuple2<String, Integer> ("california", 1), new Tuple2<String, Integer> ("new york", 2)); JavaRDD<Tuple2<String, Integer>> tmpRDD = ssc.sparkContext().parallelize(initial); JavaPairRDD<String, Integer> initialRDD = JavaPairRDD.fromJavaRDD (tmpRDD); List<List<Tuple2<String, Integer>>> expected = Arrays.asList( Arrays.asList(new Tuple2<String, Integer>("california", 5), new Tuple2<String, Integer>("new york", 7)), Arrays.asList(new Tuple2<String, Integer>("california", 15), new Tuple2<String, Integer>("new york", 11)), Arrays.asList(new Tuple2<String, Integer>("california", 15), new Tuple2<String, Integer>("new york", 11))); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<String, Integer> updated = pairStream.updateStateByKey( new Function2<List<Integer>, Optional<Integer>, Optional<Integer>>() { @Override public Optional<Integer> call(List<Integer> values, Optional<Integer> state) { int out = 0; if (state.isPresent()) { out = out + state.get(); } for (Integer v : values) { out = out + v; } return Optional.of(out); } }, new HashPartitioner(1), initialRDD); JavaTestUtils.attachTestOutputStream(updated); List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 3, 3); assertOrderInvariantEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testReduceByKeyAndWindowWithInverse() { List<List<Tuple2<String, Integer>>> inputData = stringIntKVStream; List<List<Tuple2<String, Integer>>> expected = Arrays.asList( Arrays.asList(new Tuple2<String, Integer>("california", 4), new Tuple2<String, Integer>("new york", 5)), Arrays.asList(new Tuple2<String, Integer>("california", 14), new Tuple2<String, Integer>("new york", 9)), Arrays.asList(new Tuple2<String, Integer>("california", 10), new Tuple2<String, Integer>("new york", 4))); JavaDStream<Tuple2<String, Integer>> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); JavaPairDStream<String, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<String, Integer> reduceWindowed = pairStream.reduceByKeyAndWindow(new IntegerSum(), new IntegerDifference(), new Duration(2000), new Duration(1000)); JavaTestUtils.attachTestOutputStream(reduceWindowed); List<List<Tuple2<String, Integer>>> result = JavaTestUtils.runStreams(ssc, 3, 3); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testCountByValueAndWindow() { List<List<String>> inputData = Arrays.asList( Arrays.asList("hello", "world"), Arrays.asList("hello", "moon"), Arrays.asList("hello")); List<HashSet<Tuple2<String, Long>>> expected = Arrays.asList( Sets.newHashSet( new Tuple2<String, Long>("hello", 1L), new Tuple2<String, Long>("world", 1L)), Sets.newHashSet( new Tuple2<String, Long>("hello", 2L), new Tuple2<String, Long>("world", 1L), new Tuple2<String, Long>("moon", 1L)), Sets.newHashSet( new Tuple2<String, Long>("hello", 2L), new Tuple2<String, Long>("moon", 1L))); JavaDStream<String> stream = JavaTestUtils.attachTestInputStream( ssc, inputData, 1); JavaPairDStream<String, Long> counted = stream.countByValueAndWindow(new Duration(2000), new Duration(1000)); JavaTestUtils.attachTestOutputStream(counted); List<List<Tuple2<String, Long>>> result = JavaTestUtils.runStreams(ssc, 3, 3); List<HashSet<Tuple2<String, Long>>> unorderedResult = Lists.newArrayList(); for (List<Tuple2<String, Long>> res: result) { unorderedResult.add(Sets.newHashSet(res)); } Assert.assertEquals(expected, unorderedResult); } @SuppressWarnings("unchecked") @Test public void testPairTransform() { List<List<Tuple2<Integer, Integer>>> inputData = Arrays.asList( Arrays.asList( new Tuple2<Integer, Integer>(3, 5), new Tuple2<Integer, Integer>(1, 5), new Tuple2<Integer, Integer>(4, 5), new Tuple2<Integer, Integer>(2, 5)), Arrays.asList( new Tuple2<Integer, Integer>(2, 5), new Tuple2<Integer, Integer>(3, 5), new Tuple2<Integer, Integer>(4, 5), new Tuple2<Integer, Integer>(1, 5))); List<List<Tuple2<Integer, Integer>>> expected = Arrays.asList( Arrays.asList( new Tuple2<Integer, Integer>(1, 5), new Tuple2<Integer, Integer>(2, 5), new Tuple2<Integer, Integer>(3, 5), new Tuple2<Integer, Integer>(4, 5)), Arrays.asList( new Tuple2<Integer, Integer>(1, 5), new Tuple2<Integer, Integer>(2, 5), new Tuple2<Integer, Integer>(3, 5), new Tuple2<Integer, Integer>(4, 5))); JavaDStream<Tuple2<Integer, Integer>> stream = JavaTestUtils.attachTestInputStream( ssc, inputData, 1); JavaPairDStream<Integer, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<Integer, Integer> sorted = pairStream.transformToPair( new Function<JavaPairRDD<Integer, Integer>, JavaPairRDD<Integer, Integer>>() { @Override public JavaPairRDD<Integer, Integer> call(JavaPairRDD<Integer, Integer> in) throws Exception { return in.sortByKey(); } }); JavaTestUtils.attachTestOutputStream(sorted); List<List<Tuple2<Integer, Integer>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testPairToNormalRDDTransform() { List<List<Tuple2<Integer, Integer>>> inputData = Arrays.asList( Arrays.asList( new Tuple2<Integer, Integer>(3, 5), new Tuple2<Integer, Integer>(1, 5), new Tuple2<Integer, Integer>(4, 5), new Tuple2<Integer, Integer>(2, 5)), Arrays.asList( new Tuple2<Integer, Integer>(2, 5), new Tuple2<Integer, Integer>(3, 5), new Tuple2<Integer, Integer>(4, 5), new Tuple2<Integer, Integer>(1, 5))); List<List<Integer>> expected = Arrays.asList( Arrays.asList(3,1,4,2), Arrays.asList(2,3,4,1)); JavaDStream<Tuple2<Integer, Integer>> stream = JavaTestUtils.attachTestInputStream( ssc, inputData, 1); JavaPairDStream<Integer, Integer> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaDStream<Integer> firstParts = pairStream.transform( new Function<JavaPairRDD<Integer, Integer>, JavaRDD<Integer>>() { @Override public JavaRDD<Integer> call(JavaPairRDD<Integer, Integer> in) throws Exception { return in.map(new Function<Tuple2<Integer, Integer>, Integer>() { @Override public Integer call(Tuple2<Integer, Integer> in) { return in._1(); } }); } }); JavaTestUtils.attachTestOutputStream(firstParts); List<List<Integer>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testMapValues() { List<List<Tuple2<String, String>>> inputData = stringStringKVStream; List<List<Tuple2<String, String>>> expected = Arrays.asList( Arrays.asList(new Tuple2<String, String>("california", "DODGERS"), new Tuple2<String, String>("california", "GIANTS"), new Tuple2<String, String>("new york", "YANKEES"), new Tuple2<String, String>("new york", "METS")), Arrays.asList(new Tuple2<String, String>("california", "SHARKS"), new Tuple2<String, String>("california", "DUCKS"), new Tuple2<String, String>("new york", "RANGERS"), new Tuple2<String, String>("new york", "ISLANDERS"))); JavaDStream<Tuple2<String, String>> stream = JavaTestUtils.attachTestInputStream( ssc, inputData, 1); JavaPairDStream<String, String> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<String, String> mapped = pairStream.mapValues(new Function<String, String>() { @Override public String call(String s) throws Exception { return s.toUpperCase(); } }); JavaTestUtils.attachTestOutputStream(mapped); List<List<Tuple2<String, String>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testFlatMapValues() { List<List<Tuple2<String, String>>> inputData = stringStringKVStream; List<List<Tuple2<String, String>>> expected = Arrays.asList( Arrays.asList(new Tuple2<String, String>("california", "dodgers1"), new Tuple2<String, String>("california", "dodgers2"), new Tuple2<String, String>("california", "giants1"), new Tuple2<String, String>("california", "giants2"), new Tuple2<String, String>("new york", "yankees1"), new Tuple2<String, String>("new york", "yankees2"), new Tuple2<String, String>("new york", "mets1"), new Tuple2<String, String>("new york", "mets2")), Arrays.asList(new Tuple2<String, String>("california", "sharks1"), new Tuple2<String, String>("california", "sharks2"), new Tuple2<String, String>("california", "ducks1"), new Tuple2<String, String>("california", "ducks2"), new Tuple2<String, String>("new york", "rangers1"), new Tuple2<String, String>("new york", "rangers2"), new Tuple2<String, String>("new york", "islanders1"), new Tuple2<String, String>("new york", "islanders2"))); JavaDStream<Tuple2<String, String>> stream = JavaTestUtils.attachTestInputStream( ssc, inputData, 1); JavaPairDStream<String, String> pairStream = JavaPairDStream.fromJavaDStream(stream); JavaPairDStream<String, String> flatMapped = pairStream.flatMapValues( new Function<String, Iterable<String>>() { @Override public Iterable<String> call(String in) { List<String> out = new ArrayList<String>(); out.add(in + "1"); out.add(in + "2"); return out; } }); JavaTestUtils.attachTestOutputStream(flatMapped); List<List<Tuple2<String, String>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testCoGroup() { List<List<Tuple2<String, String>>> stringStringKVStream1 = Arrays.asList( Arrays.asList(new Tuple2<String, String>("california", "dodgers"), new Tuple2<String, String>("new york", "yankees")), Arrays.asList(new Tuple2<String, String>("california", "sharks"), new Tuple2<String, String>("new york", "rangers"))); List<List<Tuple2<String, String>>> stringStringKVStream2 = Arrays.asList( Arrays.asList(new Tuple2<String, String>("california", "giants"), new Tuple2<String, String>("new york", "mets")), Arrays.asList(new Tuple2<String, String>("california", "ducks"), new Tuple2<String, String>("new york", "islanders"))); List<List<Tuple2<String, Tuple2<List<String>, List<String>>>>> expected = Arrays.asList( Arrays.asList( new Tuple2<String, Tuple2<List<String>, List<String>>>("california", new Tuple2<List<String>, List<String>>(Arrays.asList("dodgers"), Arrays.asList("giants"))), new Tuple2<String, Tuple2<List<String>, List<String>>>("new york", new Tuple2<List<String>, List<String>>(Arrays.asList("yankees"), Arrays.asList("mets")))), Arrays.asList( new Tuple2<String, Tuple2<List<String>, List<String>>>("california", new Tuple2<List<String>, List<String>>(Arrays.asList("sharks"), Arrays.asList("ducks"))), new Tuple2<String, Tuple2<List<String>, List<String>>>("new york", new Tuple2<List<String>, List<String>>(Arrays.asList("rangers"), Arrays.asList("islanders"))))); JavaDStream<Tuple2<String, String>> stream1 = JavaTestUtils.attachTestInputStream( ssc, stringStringKVStream1, 1); JavaPairDStream<String, String> pairStream1 = JavaPairDStream.fromJavaDStream(stream1); JavaDStream<Tuple2<String, String>> stream2 = JavaTestUtils.attachTestInputStream( ssc, stringStringKVStream2, 1); JavaPairDStream<String, String> pairStream2 = JavaPairDStream.fromJavaDStream(stream2); JavaPairDStream<String, Tuple2<Iterable<String>, Iterable<String>>> grouped = pairStream1.cogroup(pairStream2); JavaTestUtils.attachTestOutputStream(grouped); List<List<Tuple2<String, Tuple2<Iterable<String>, Iterable<String>>>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected.size(), result.size()); Iterator<List<Tuple2<String, Tuple2<Iterable<String>, Iterable<String>>>>> resultItr = result.iterator(); Iterator<List<Tuple2<String, Tuple2<List<String>, List<String>>>>> expectedItr = expected.iterator(); while (resultItr.hasNext() && expectedItr.hasNext()) { Iterator<Tuple2<String, Tuple2<Iterable<String>, Iterable<String>>>> resultElements = resultItr.next().iterator(); Iterator<Tuple2<String, Tuple2<List<String>, List<String>>>> expectedElements = expectedItr.next().iterator(); while (resultElements.hasNext() && expectedElements.hasNext()) { Tuple2<String, Tuple2<Iterable<String>, Iterable<String>>> resultElement = resultElements.next(); Tuple2<String, Tuple2<List<String>, List<String>>> expectedElement = expectedElements.next(); Assert.assertEquals(expectedElement._1(), resultElement._1()); equalIterable(expectedElement._2()._1(), resultElement._2()._1()); equalIterable(expectedElement._2()._2(), resultElement._2()._2()); } Assert.assertEquals(resultElements.hasNext(), expectedElements.hasNext()); } } @SuppressWarnings("unchecked") @Test public void testJoin() { List<List<Tuple2<String, String>>> stringStringKVStream1 = Arrays.asList( Arrays.asList(new Tuple2<String, String>("california", "dodgers"), new Tuple2<String, String>("new york", "yankees")), Arrays.asList(new Tuple2<String, String>("california", "sharks"), new Tuple2<String, String>("new york", "rangers"))); List<List<Tuple2<String, String>>> stringStringKVStream2 = Arrays.asList( Arrays.asList(new Tuple2<String, String>("california", "giants"), new Tuple2<String, String>("new york", "mets")), Arrays.asList(new Tuple2<String, String>("california", "ducks"), new Tuple2<String, String>("new york", "islanders"))); List<List<Tuple2<String, Tuple2<String, String>>>> expected = Arrays.asList( Arrays.asList( new Tuple2<String, Tuple2<String, String>>("california", new Tuple2<String, String>("dodgers", "giants")), new Tuple2<String, Tuple2<String, String>>("new york", new Tuple2<String, String>("yankees", "mets"))), Arrays.asList( new Tuple2<String, Tuple2<String, String>>("california", new Tuple2<String, String>("sharks", "ducks")), new Tuple2<String, Tuple2<String, String>>("new york", new Tuple2<String, String>("rangers", "islanders")))); JavaDStream<Tuple2<String, String>> stream1 = JavaTestUtils.attachTestInputStream( ssc, stringStringKVStream1, 1); JavaPairDStream<String, String> pairStream1 = JavaPairDStream.fromJavaDStream(stream1); JavaDStream<Tuple2<String, String>> stream2 = JavaTestUtils.attachTestInputStream( ssc, stringStringKVStream2, 1); JavaPairDStream<String, String> pairStream2 = JavaPairDStream.fromJavaDStream(stream2); JavaPairDStream<String, Tuple2<String, String>> joined = pairStream1.join(pairStream2); JavaTestUtils.attachTestOutputStream(joined); List<List<Tuple2<String, Tuple2<String, String>>>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testLeftOuterJoin() { List<List<Tuple2<String, String>>> stringStringKVStream1 = Arrays.asList( Arrays.asList(new Tuple2<String, String>("california", "dodgers"), new Tuple2<String, String>("new york", "yankees")), Arrays.asList(new Tuple2<String, String>("california", "sharks") )); List<List<Tuple2<String, String>>> stringStringKVStream2 = Arrays.asList( Arrays.asList(new Tuple2<String, String>("california", "giants") ), Arrays.asList(new Tuple2<String, String>("new york", "islanders") ) ); List<List<Long>> expected = Arrays.asList(Arrays.asList(2L), Arrays.asList(1L)); JavaDStream<Tuple2<String, String>> stream1 = JavaTestUtils.attachTestInputStream( ssc, stringStringKVStream1, 1); JavaPairDStream<String, String> pairStream1 = JavaPairDStream.fromJavaDStream(stream1); JavaDStream<Tuple2<String, String>> stream2 = JavaTestUtils.attachTestInputStream( ssc, stringStringKVStream2, 1); JavaPairDStream<String, String> pairStream2 = JavaPairDStream.fromJavaDStream(stream2); JavaPairDStream<String, Tuple2<String, Optional<String>>> joined = pairStream1.leftOuterJoin(pairStream2); JavaDStream<Long> counted = joined.count(); JavaTestUtils.attachTestOutputStream(counted); List<List<Long>> result = JavaTestUtils.runStreams(ssc, 2, 2); Assert.assertEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testCheckpointMasterRecovery() throws InterruptedException { List<List<String>> inputData = Arrays.asList( Arrays.asList("this", "is"), Arrays.asList("a", "test"), Arrays.asList("counting", "letters")); List<List<Integer>> expectedInitial = Arrays.asList( Arrays.asList(4,2)); List<List<Integer>> expectedFinal = Arrays.asList( Arrays.asList(1,4), Arrays.asList(8,7)); File tempDir = Files.createTempDir(); tempDir.deleteOnExit(); ssc.checkpoint(tempDir.getAbsolutePath()); JavaDStream<String> stream = JavaCheckpointTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream<Integer> letterCount = stream.map(new Function<String, Integer>() { @Override public Integer call(String s) throws Exception { return s.length(); } }); JavaCheckpointTestUtils.attachTestOutputStream(letterCount); List<List<Integer>> initialResult = JavaTestUtils.runStreams(ssc, 1, 1); assertOrderInvariantEquals(expectedInitial, initialResult); Thread.sleep(1000); ssc.stop(); ssc = new JavaStreamingContext(tempDir.getAbsolutePath()); // Tweak to take into consideration that the last batch before failure // will be re-processed after recovery List<List<Integer>> finalResult = JavaCheckpointTestUtils.runStreams(ssc, 2, 3); assertOrderInvariantEquals(expectedFinal, finalResult.subList(1, 3)); Utils.deleteRecursively(tempDir); } @SuppressWarnings("unchecked") @Test public void testContextGetOrCreate() throws InterruptedException { ssc.stop(); final SparkConf conf = new SparkConf() .setMaster("local[2]") .setAppName("test") .set("newContext", "true"); File emptyDir = Files.createTempDir(); emptyDir.deleteOnExit(); StreamingContextSuite contextSuite = new StreamingContextSuite(); String corruptedCheckpointDir = contextSuite.createCorruptedCheckpoint(); String checkpointDir = contextSuite.createValidCheckpoint(); // Function to create JavaStreamingContext without any output operations // (used to detect the new context) final AtomicBoolean newContextCreated = new AtomicBoolean(false); Function0<JavaStreamingContext> creatingFunc = new Function0<JavaStreamingContext>() { public JavaStreamingContext call() { newContextCreated.set(true); return new JavaStreamingContext(conf, Seconds.apply(1)); } }; newContextCreated.set(false); ssc = JavaStreamingContext.getOrCreate(emptyDir.getAbsolutePath(), creatingFunc); Assert.assertTrue("new context not created", newContextCreated.get()); ssc.stop(); newContextCreated.set(false); ssc = JavaStreamingContext.getOrCreate(corruptedCheckpointDir, creatingFunc, new org.apache.hadoop.conf.Configuration(), true); Assert.assertTrue("new context not created", newContextCreated.get()); ssc.stop(); newContextCreated.set(false); ssc = JavaStreamingContext.getOrCreate(checkpointDir, creatingFunc, new org.apache.hadoop.conf.Configuration()); Assert.assertTrue("old context not recovered", !newContextCreated.get()); ssc.stop(); newContextCreated.set(false); JavaSparkContext sc = new JavaSparkContext(conf); ssc = JavaStreamingContext.getOrCreate(checkpointDir, creatingFunc, new org.apache.hadoop.conf.Configuration()); Assert.assertTrue("old context not recovered", !newContextCreated.get()); ssc.stop(); } /* TEST DISABLED: Pending a discussion about checkpoint() semantics with TD @SuppressWarnings("unchecked") @Test public void testCheckpointofIndividualStream() throws InterruptedException { List<List<String>> inputData = Arrays.asList( Arrays.asList("this", "is"), Arrays.asList("a", "test"), Arrays.asList("counting", "letters")); List<List<Integer>> expected = Arrays.asList( Arrays.asList(4,2), Arrays.asList(1,4), Arrays.asList(8,7)); JavaDStream stream = JavaCheckpointTestUtils.attachTestInputStream(ssc, inputData, 1); JavaDStream letterCount = stream.map(new Function<String, Integer>() { @Override public Integer call(String s) throws Exception { return s.length(); } }); JavaCheckpointTestUtils.attachTestOutputStream(letterCount); letterCount.checkpoint(new Duration(1000)); List<List<Integer>> result1 = JavaCheckpointTestUtils.runStreams(ssc, 3, 3); assertOrderInvariantEquals(expected, result1); } */ // Input stream tests. These mostly just test that we can instantiate a given InputStream with // Java arguments and assign it to a JavaDStream without producing type errors. Testing of the // InputStream functionality is deferred to the existing Scala tests. @Test public void testSocketTextStream() { JavaReceiverInputDStream<String> test = ssc.socketTextStream("localhost", 12345); } @Test public void testSocketString() { class Converter implements Function<InputStream, Iterable<String>> { public Iterable<String> call(InputStream in) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(in)); List<String> out = new ArrayList<String>(); while (true) { String line = reader.readLine(); if (line == null) { break; } out.add(line); } return out; } } JavaDStream<String> test = ssc.socketStream( "localhost", 12345, new Converter(), StorageLevel.MEMORY_ONLY()); } @SuppressWarnings("unchecked") @Test public void testTextFileStream() throws IOException { File testDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark"); List<List<String>> expected = fileTestPrepare(testDir); JavaDStream<String> input = ssc.textFileStream(testDir.toString()); JavaTestUtils.attachTestOutputStream(input); List<List<String>> result = JavaTestUtils.runStreams(ssc, 1, 1); assertOrderInvariantEquals(expected, result); } @SuppressWarnings("unchecked") @Test public void testFileStream() throws IOException { File testDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark"); List<List<String>> expected = fileTestPrepare(testDir); JavaPairInputDStream<LongWritable, Text> inputStream = ssc.fileStream( testDir.toString(), LongWritable.class, Text.class, TextInputFormat.class, new Function<Path, Boolean>() { @Override public Boolean call(Path v1) throws Exception { return Boolean.TRUE; } }, true); JavaDStream<String> test = inputStream.map( new Function<Tuple2<LongWritable, Text>, String>() { @Override public String call(Tuple2<LongWritable, Text> v1) throws Exception { return v1._2().toString(); } }); JavaTestUtils.attachTestOutputStream(test); List<List<String>> result = JavaTestUtils.runStreams(ssc, 1, 1); assertOrderInvariantEquals(expected, result); } @Test public void testRawSocketStream() { JavaReceiverInputDStream<String> test = ssc.rawSocketStream("localhost", 12345); } private List<List<String>> fileTestPrepare(File testDir) throws IOException { File existingFile = new File(testDir, "0"); Files.write("0\n", existingFile, Charset.forName("UTF-8")); assertTrue(existingFile.setLastModified(1000) && existingFile.lastModified() == 1000); List<List<String>> expected = Arrays.asList( Arrays.asList("0") ); return expected; } @SuppressWarnings("unchecked") // SPARK-5795: no logic assertions, just testing that intended API invocations compile private void compileSaveAsJavaAPI(JavaPairDStream<LongWritable,Text> pds) { pds.saveAsNewAPIHadoopFiles( "", "", LongWritable.class, Text.class, org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat.class); pds.saveAsHadoopFiles( "", "", LongWritable.class, Text.class, org.apache.hadoop.mapred.SequenceFileOutputFormat.class); // Checks that a previous common workaround for this API still compiles pds.saveAsNewAPIHadoopFiles( "", "", LongWritable.class, Text.class, (Class) org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat.class); pds.saveAsHadoopFiles( "", "", LongWritable.class, Text.class, (Class) org.apache.hadoop.mapred.SequenceFileOutputFormat.class); } }
apache-2.0
jorgebay/tinkerpop
gremlin-server/src/test/java/org/apache/tinkerpop/gremlin/server/util/DefaultGraphManagerTest.java
5854
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.server.util; import org.apache.tinkerpop.gremlin.server.GraphManager; import org.apache.tinkerpop.gremlin.server.Settings; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerGraph; import org.junit.Test; import javax.script.Bindings; import java.util.Map; import java.util.Set; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; /** * @author Stephen Mallette (http://stephen.genoprime.com) */ public class DefaultGraphManagerTest { @Test public void shouldReturnGraphs() { final Settings settings = Settings.read(DefaultGraphManagerTest.class.getResourceAsStream("../gremlin-server-integration.yaml")); final GraphManager graphManager = new DefaultGraphManager(settings); final Set<String> graphNames = graphManager.getGraphNames(); assertNotNull(graphNames); assertEquals(1, graphNames.size()); assertEquals(graphNames.toArray()[0], "graph"); assertThat(graphManager.getGraph("graph"), instanceOf(TinkerGraph.class)); } @Test public void shouldGetAsBindings() { final Settings settings = Settings.read(DefaultGraphManagerTest.class.getResourceAsStream("../gremlin-server-integration.yaml")); final GraphManager graphManager = new DefaultGraphManager(settings); final Bindings bindings = graphManager.getAsBindings(); assertNotNull(bindings); assertEquals(1, bindings.size()); assertThat(bindings.get("graph"), instanceOf(TinkerGraph.class)); assertThat(bindings.containsKey("graph"), is(true)); } @Test public void shouldGetGraph() { final Settings settings = Settings.read(DefaultGraphManagerTest.class.getResourceAsStream("../gremlin-server-integration.yaml")); final GraphManager graphManager = new DefaultGraphManager(settings); final Graph graph = graphManager.getGraph("graph"); assertNotNull(graph); assertThat(graph, instanceOf(TinkerGraph.class)); } @Test public void shouldGetDynamicallyAddedGraph() { final Settings settings = Settings.read(DefaultGraphManagerTest.class.getResourceAsStream("../gremlin-server-integration.yaml")); final GraphManager graphManager = new DefaultGraphManager(settings); final Graph graph = graphManager.getGraph("graph"); //fake out a graph instance graphManager.putGraph("newGraph", graph); final Set<String> graphNames = graphManager.getGraphNames(); assertNotNull(graphNames); assertEquals(2, graphNames.size()); assertThat(graphNames.contains("newGraph"), is(true)); assertThat(graphManager.getGraph("newGraph"), instanceOf(TinkerGraph.class)); } @Test public void shouldNotGetRemovedGraph() throws Exception { final Settings settings = Settings.read(DefaultGraphManagerTest.class.getResourceAsStream("../gremlin-server-integration.yaml")); final GraphManager graphManager = new DefaultGraphManager(settings); final Graph graph = graphManager.getGraph("graph"); //fake out a graph instance graphManager.putGraph("newGraph", graph); final Set<String> graphNames = graphManager.getGraphNames(); assertNotNull(graphNames); assertEquals(2, graphNames.size()); assertThat(graphNames.contains("newGraph"), is(true)); assertThat(graphManager.getGraph("newGraph"), instanceOf(TinkerGraph.class)); graphManager.removeGraph("newGraph"); final Set<String> graphNames2 = graphManager.getGraphNames(); assertEquals(1, graphNames2.size()); assertThat(graphNames2.contains("newGraph"), is(false)); } @Test public void openGraphShouldReturnExistingGraph() { final Settings settings = Settings.read(DefaultGraphManagerTest.class.getResourceAsStream("../gremlin-server-integration.yaml")); final GraphManager graphManager = new DefaultGraphManager(settings); final Graph graph = graphManager.openGraph("graph", null); assertNotNull(graph); assertThat(graph, instanceOf(TinkerGraph.class)); } @Test public void openGraphShouldReturnNewGraphUsingThunk() { final Settings settings = Settings.read(DefaultGraphManagerTest.class.getResourceAsStream("../gremlin-server-integration.yaml")); final GraphManager graphManager = new DefaultGraphManager(settings); final Graph graph = graphManager.getGraph("graph"); //fake out graph instance final Graph newGraph = graphManager.openGraph("newGraph", (String gName) -> { return graph; }); assertNotNull(graph); assertThat(graph, instanceOf(TinkerGraph.class)); assertSame(graph, newGraph); } }
apache-2.0
jswudi/alluxio
core/server/worker/src/main/java/alluxio/worker/block/evictor/AbstractEvictor.java
9007
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.worker.block.evictor; import alluxio.Sessions; import alluxio.collections.Pair; import alluxio.exception.BlockDoesNotExistException; import alluxio.worker.block.AbstractBlockStoreEventListener; import alluxio.worker.block.BlockMetadataManagerView; import alluxio.worker.block.BlockStoreLocation; import alluxio.worker.block.allocator.Allocator; import alluxio.worker.block.meta.BlockMeta; import alluxio.worker.block.meta.StorageDirView; import alluxio.worker.block.meta.StorageTierView; import com.google.common.base.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.annotation.concurrent.NotThreadSafe; /** * Provides the basic implementation for every evictor. */ @NotThreadSafe public abstract class AbstractEvictor extends AbstractBlockStoreEventListener implements Evictor { private static final Logger LOG = LoggerFactory.getLogger(AbstractEvictor.class); protected final Allocator mAllocator; protected BlockMetadataManagerView mManagerView; /** * Creates a new instance of {@link AbstractEvictor}. * * @param view a view of block metadata information * @param allocator an allocation policy */ public AbstractEvictor(BlockMetadataManagerView view, Allocator allocator) { mManagerView = Preconditions.checkNotNull(view, "view"); mAllocator = Preconditions.checkNotNull(allocator, "allocator"); } /** * A recursive implementation of cascading eviction. * * This method uses a specific eviction strategy to find blocks to evict in the requested * location. After eviction, one {@link alluxio.worker.block.meta.StorageDir} in the location has * the specific amount of free space. It then uses an allocation strategy to allocate space in the * next tier to move each evicted blocks. If the next tier fails to allocate space for the evicted * blocks, the next tier will continue to evict its blocks to free space. * * This method is only used in * {@link #freeSpaceWithView(long, BlockStoreLocation, BlockMetadataManagerView)}. * * @param bytesToBeAvailable bytes to be available after eviction * @param location target location to evict blocks from * @param plan the plan to be recursively updated, is empty when first called in * {@link #freeSpaceWithView(long, BlockStoreLocation, BlockMetadataManagerView)} * @return the first {@link StorageDirView} in the range of location to evict/move bytes from, or * null if there is no plan */ protected StorageDirView cascadingEvict(long bytesToBeAvailable, BlockStoreLocation location, EvictionPlan plan) { location = updateBlockStoreLocation(bytesToBeAvailable, location); // 1. If bytesToBeAvailable can already be satisfied without eviction, return the eligible // StoargeDirView StorageDirView candidateDirView = EvictorUtils.selectDirWithRequestedSpace(bytesToBeAvailable, location, mManagerView); if (candidateDirView != null) { return candidateDirView; } // 2. Iterate over blocks in order until we find a StorageDirView that is in the range of // location and can satisfy bytesToBeAvailable after evicting its blocks iterated so far EvictionDirCandidates dirCandidates = new EvictionDirCandidates(); Iterator<Long> it = getBlockIterator(); while (it.hasNext() && dirCandidates.candidateSize() < bytesToBeAvailable) { long blockId = it.next(); try { BlockMeta block = mManagerView.getBlockMeta(blockId); if (block != null) { // might not present in this view if (block.getBlockLocation().belongsTo(location)) { String tierAlias = block.getParentDir().getParentTier().getTierAlias(); int dirIndex = block.getParentDir().getDirIndex(); dirCandidates.add(mManagerView.getTierView(tierAlias).getDirView(dirIndex), blockId, block.getBlockSize()); } } } catch (BlockDoesNotExistException e) { LOG.warn("Remove block {} from evictor cache because {}", blockId, e); it.remove(); onRemoveBlockFromIterator(blockId); } } // 3. If there is no eligible StorageDirView, return null if (dirCandidates.candidateSize() < bytesToBeAvailable) { return null; } // 4. cascading eviction: try to allocate space in the next tier to move candidate blocks // there. If allocation fails, the next tier will continue to evict its blocks to free space. // Blocks are only evicted from the last tier or it can not be moved to the next tier. candidateDirView = dirCandidates.candidateDir(); List<Long> candidateBlocks = dirCandidates.candidateBlocks(); StorageTierView nextTierView = mManagerView.getNextTier(candidateDirView.getParentTierView()); if (nextTierView == null) { // This is the last tier, evict all the blocks. for (Long blockId : candidateBlocks) { try { BlockMeta block = mManagerView.getBlockMeta(blockId); if (block != null) { candidateDirView.markBlockMoveOut(blockId, block.getBlockSize()); plan.toEvict().add(new Pair<>(blockId, candidateDirView.toBlockStoreLocation())); } } catch (BlockDoesNotExistException e) { continue; } } } else { for (Long blockId : candidateBlocks) { try { BlockMeta block = mManagerView.getBlockMeta(blockId); if (block == null) { continue; } StorageDirView nextDirView = mAllocator.allocateBlockWithView( Sessions.MIGRATE_DATA_SESSION_ID, block.getBlockSize(), BlockStoreLocation.anyDirInTier(nextTierView.getTierViewAlias()), mManagerView); if (nextDirView == null) { nextDirView = cascadingEvict(block.getBlockSize(), BlockStoreLocation.anyDirInTier(nextTierView.getTierViewAlias()), plan); } if (nextDirView == null) { // If we failed to find a dir in the next tier to move this block, evict it and // continue. Normally this should not happen. plan.toEvict().add(new Pair<>(blockId, block.getBlockLocation())); candidateDirView.markBlockMoveOut(blockId, block.getBlockSize()); continue; } plan.toMove().add(new BlockTransferInfo(blockId, block.getBlockLocation(), nextDirView.toBlockStoreLocation())); candidateDirView.markBlockMoveOut(blockId, block.getBlockSize()); nextDirView.markBlockMoveIn(blockId, block.getBlockSize()); } catch (BlockDoesNotExistException e) { continue; } } } return candidateDirView; } @Override public EvictionPlan freeSpaceWithView(long bytesToBeAvailable, BlockStoreLocation location, BlockMetadataManagerView view) { mManagerView = view; List<BlockTransferInfo> toMove = new ArrayList<>(); List<Pair<Long, BlockStoreLocation>> toEvict = new ArrayList<>(); EvictionPlan plan = new EvictionPlan(toMove, toEvict); StorageDirView candidateDir = cascadingEvict(bytesToBeAvailable, location, plan); mManagerView.clearBlockMarks(); if (candidateDir == null) { return null; } return plan; } /** * Returns an iterator for evictor cache blocks. The evictor is responsible for specifying the * iteration order using its own strategy. For example, {@link LRUEvictor} returns an iterator * that iterates through the block ids in LRU order. * * @return an iterator over the ids of the blocks in the evictor cache */ protected abstract Iterator<Long> getBlockIterator(); /** * Performs additional cleanup when a block is removed from the iterator returned by * {@link #getBlockIterator()}. */ protected void onRemoveBlockFromIterator(long blockId) {} /** * Updates the block store location if the evictor wants to free space in a specific location. For * example, {@link PartialLRUEvictor} always evicts blocks from a dir with max free space. * * @param bytesToBeAvailable bytes to be available after eviction * @param location the original block store location * @return the updated block store location */ protected BlockStoreLocation updateBlockStoreLocation(long bytesToBeAvailable, BlockStoreLocation location) { return location; } }
apache-2.0
forGGe/kaa
server/common/dao/src/test/java/org/kaaproject/kaa/server/common/dao/service/ConfigurationServiceImplTest.java
19793
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.server.common.dao.service; import java.io.IOException; import java.util.List; import org.apache.avro.generic.GenericContainer; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.kaaproject.kaa.common.avro.GenericAvroConverter; import org.kaaproject.kaa.common.dto.ApplicationDto; import org.kaaproject.kaa.common.dto.ChangeConfigurationNotification; import org.kaaproject.kaa.common.dto.ConfigurationDto; import org.kaaproject.kaa.common.dto.ConfigurationRecordDto; import org.kaaproject.kaa.common.dto.ConfigurationSchemaDto; import org.kaaproject.kaa.common.dto.EndpointGroupDto; import org.kaaproject.kaa.common.dto.StructureRecordDto; import org.kaaproject.kaa.common.dto.UpdateStatus; import org.kaaproject.kaa.common.dto.VersionDto; import org.kaaproject.kaa.server.common.core.algorithms.generation.ConfigurationGenerationException; import org.kaaproject.kaa.server.common.core.algorithms.schema.SchemaCreationException; import org.kaaproject.kaa.server.common.core.algorithms.schema.SchemaGenerationAlgorithm; import org.kaaproject.kaa.server.common.core.algorithms.schema.SchemaGenerationAlgorithmFactory; import org.kaaproject.kaa.server.common.core.algorithms.schema.SchemaGenerationAlgorithmFactoryImpl; import org.kaaproject.kaa.server.common.core.schema.DataSchema; import org.kaaproject.kaa.server.common.core.schema.KaaSchema; import org.kaaproject.kaa.server.common.dao.AbstractTest; import org.kaaproject.kaa.server.common.dao.exception.IncorrectParameterException; import org.kaaproject.kaa.server.common.dao.exception.UpdateStatusConflictException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Ignore("This test should be extended and initialized with proper context in each NoSQL submodule") public class ConfigurationServiceImplTest extends AbstractTest { private static final Logger LOG = LoggerFactory.getLogger(ConfigurationServiceImplTest.class); private static final String INCORRECT_SQL_ID = "incorrect id"; @Before public void before() throws Exception { clearDBData(); } @Test(expected = IncorrectParameterException.class) public void saveConfigurationWithIncorrectIdTestFail() throws SchemaCreationException { ConfigurationDto configurationDto = new ConfigurationDto(); configurationDto.setId(INCORRECT_SQL_ID); configurationService.saveConfiguration(configurationDto); } @Test(expected = UpdateStatusConflictException.class) public void saveConfigurationWithIncorrectStatusTestFail() throws SchemaCreationException { List<ConfigurationDto> configurations = generateConfigurationDto(null, null, 1, true, false); ConfigurationDto configurationDto = configurationService.findConfigurationById(configurations.get(0).getId()); configurationService.saveConfiguration(configurationDto); } @Test public void saveConfigurationObjectWithIdTest() throws SchemaCreationException, IOException, ConfigurationGenerationException { List<ConfigurationDto> configs = generateConfigurationDto(null, null, 1, false, false); ConfigurationDto saved = configurationService.findConfigurationById(configs.get(0).getId()); ConfigurationDto updated = configurationService.saveConfiguration(saved); // update one more time (nothing should change) updated = configurationService.saveConfiguration(updated); Assert.assertNotNull(saved); Assert.assertEquals(updated.getStatus(), UpdateStatus.INACTIVE); } @Test public void saveConfigurationObjectWithoutIdTest() throws SchemaCreationException { List<ConfigurationDto> configs = generateConfigurationDto(null, null, 1, false, false); ConfigurationDto saved = configurationService.findConfigurationById(configs.get(0).getId()); String inactiveId = saved.getId(); saved.setId(null); ConfigurationDto updated = configurationService.saveConfiguration(saved); Assert.assertNotNull(saved); Assert.assertEquals(saved.getStatus(), UpdateStatus.INACTIVE); Assert.assertEquals(inactiveId, updated.getId()); } @Test public void saveConfigurationObjectWithoutInactiveConfigurationTest() throws SchemaCreationException { List<ConfigurationDto> configs = generateConfigurationDto(null, null, 3, true, false); ConfigurationDto saved = configurationService.findConfigurationById(configs.get(2).getId()); String id = saved.getId(); saved.setId(null); ConfigurationDto updated = configurationService.saveConfiguration(saved); Assert.assertNotNull(updated); Assert.assertEquals(updated.getStatus(), UpdateStatus.INACTIVE); Assert.assertEquals(saved.getSequenceNumber(), updated.getSequenceNumber()); Assert.assertNotEquals(id, updated.getId()); } @Test(expected = IncorrectParameterException.class) public void saveConfigurationObjectWithIncorrectSchemaIdTest() throws SchemaCreationException { List<ConfigurationDto> configurations = generateConfigurationDto(null, null, 1, false, false); ConfigurationDto configurationDto = configurationService.findConfigurationById(configurations.get(0).getId()); configurationDto.setId(null); configurationDto.setSchemaId(100500 + ""); configurationService.saveConfiguration(configurationDto); } @Test public void findConfSchemaByIdTest() { List<ConfigurationSchemaDto> schemas = generateConfSchemaDto(null, 1); ConfigurationSchemaDto schema = schemas.get(0); ConfigurationSchemaDto foundSchema = configurationService.findConfSchemaById(schema.getId()); Assert.assertNotNull(foundSchema); Assert.assertEquals(schema, foundSchema); } @Test(expected = IncorrectParameterException.class) public void findConfSchemaByIdTestFail() { configurationService.findConfSchemaById(INCORRECT_SQL_ID); } @Test public void findLatestConfigurationByAppIdTest() { List<ConfigurationDto> configurations = generateConfigurationDto(null, null, 1, false, false); ConfigurationDto expected = configurations.get(0); ConfigurationDto found = configurationService.findConfigurationByAppIdAndVersion(expected.getApplicationId(), 1); Assert.assertNotNull(found); } @Test(expected = IncorrectParameterException.class) public void findLatestConfigurationByAppIdTestFail() { configurationService.findConfigurationByAppIdAndVersion(INCORRECT_SQL_ID, 1); } @Test public void findConfigurationByIdTest() { List<ConfigurationDto> configurations = generateConfigurationDto(null, null, 1, false, false); ConfigurationDto configuration = configurations.get(0); ConfigurationDto foundConfiguration = configurationService.findConfigurationById(configuration.getId()); Assert.assertNotNull(foundConfiguration); Assert.assertEquals(configuration, foundConfiguration); } @Test(expected = IncorrectParameterException.class) public void findConfigurationByIdTestFail() { configurationService.findConfigurationById(INCORRECT_SQL_ID); } @Test public void activateConfiguration() { List<ConfigurationDto> configurations = generateConfigurationDto(null, null, 1, false, false); String configId = configurations.get(0).getId(); ConfigurationDto found = configurationService.findConfigurationById(configId); ChangeConfigurationNotification notification = configurationService.activateConfiguration(configId, null); Assert.assertNotNull(notification); ConfigurationDto dto = notification.getConfigurationDto(); Assert.assertNotNull(dto); Assert.assertEquals(dto.getStatus(), UpdateStatus.ACTIVE); Assert.assertEquals(dto.getId(), configId); Assert.assertNotEquals(dto.getSequenceNumber(), found.getSequenceNumber()); } @Test(expected = UpdateStatusConflictException.class) public void activateConfigurationTestFail() { List<ConfigurationDto> configurations = generateConfigurationDto(null, null, 1, true, false); ConfigurationDto configuration = configurations.get(0); configurationService.activateConfiguration(configuration.getId(), null); } @Test(expected = IncorrectParameterException.class) public void activateConfigurationWithIncorrectIdTestFail() { configurationService.activateConfiguration(INCORRECT_SQL_ID, null); } @Test public void findConfigurationsByEndpointGroupIdTest() { EndpointGroupDto group = generateEndpointGroupDto(null); List<ConfigurationDto> configurations = generateConfigurationDto(null, group.getId(), 1, true, false); List<ConfigurationDto> dtoList = configurationService.findConfigurationsByEndpointGroupId(group.getId()); Assert.assertNotNull(dtoList); Assert.assertEquals(configurations, dtoList); } @Test public void findConfSchemaByAppIdAndVersionTest() { String appId = generateApplicationDto().getId(); ConfigurationSchemaDto dto = configurationService.findConfSchemaByAppIdAndVersion(appId, 1); Assert.assertNotNull(dto); } @Test public void saveConfSchemaTest() throws SchemaCreationException, IOException { String id = generateConfSchemaDto(null, 1).get(0).getId(); ConfigurationSchemaDto schema = configurationService.findConfSchemaById(id); Assert.assertNotNull(schema); schema.setId(null); schema.setSchema(new DataSchema(readSchemaFileAsString("dao/configuration/default_schema.json")).getRawSchema()); ConfigurationSchemaDto saved = configurationService.saveConfSchema(schema); Assert.assertNotNull(saved); Assert.assertNotEquals(schema.getId(), saved.getId()); } @Test public void removeConfSchemasByAppIdTest() { ApplicationDto application = generateApplicationDto(); String appId = application.getId(); List<ConfigurationSchemaDto> dtoList = configurationService.findConfSchemasByAppId(appId); Assert.assertNotNull(dtoList); Assert.assertFalse(dtoList.isEmpty()); configurationService.removeConfSchemasByAppId(appId); dtoList = configurationService.findConfSchemasByAppId(appId); Assert.assertNotNull(dtoList); Assert.assertTrue(dtoList.isEmpty()); } @Test public void createApplicationTest() throws IOException { String schema = readSchemaFileAsString("dao/schema/testOverrideSchema.json"); String config = readSchemaFileAsString("dao/schema/testOverrideData.json"); GenericAvroConverter<GenericContainer> converter = new GenericAvroConverter<GenericContainer>(schema); GenericContainer container = converter.decodeJson(config); LOG.debug("JSON {}", container); LOG.debug("Converted JSON {} ", new String(converter.encodeToJsonBytes(container))); Assert.assertEquals(converter.encodeToJson(container), new String(converter.encodeToJsonBytes(container))); } @Test public void createSchemaTest() throws Exception { DataSchema schema = new DataSchema(readSchemaFileAsString("dao/schema/dataSchema.json")); SchemaGenerationAlgorithmFactory factory = new SchemaGenerationAlgorithmFactoryImpl(); SchemaGenerationAlgorithm generator = factory.createSchemaGenerator(schema); KaaSchema protocolSchema = generator.getProtocolSchema(); KaaSchema baseSchema = generator.getBaseSchema(); KaaSchema overrideSchema = generator.getOverrideSchema(); LOG.debug("Created Override schema JSON {} ", overrideSchema.getRawSchema()); LOG.debug("Created Base schema JSON {} ", baseSchema.getRawSchema()); LOG.debug("Created Protocol schema JSON {} ", protocolSchema.getRawSchema()); } @Test public void createDefaultSchemaTest() { String id = generateApplicationDto().getId(); ConfigurationSchemaDto schema = generateConfSchemaDto(id, 1).get(0); ConfigurationDto config = configurationService.findConfigurationByAppIdAndVersion(id, schema.getVersion()); Assert.assertEquals(config.getStatus(), UpdateStatus.ACTIVE); } @Test public void findDefaultConfigurationBySchemaIdTest() { ConfigurationSchemaDto schema = generateConfSchemaDto(null, 1).get(0); ConfigurationDto configuration = configurationService.findDefaultConfigurationBySchemaId(schema.getId()); Assert.assertNotNull(configuration); Assert.assertEquals(UpdateStatus.ACTIVE, configuration.getStatus()); Assert.assertEquals(schema.getId(), configuration.getSchemaId()); } @Test public void findConfigurationByEndpointGroupIdAndVersionTest() { ConfigurationSchemaDto schema = generateConfSchemaDto(null, 1).get(0); String groupId = generateEndpointGroupDto(schema.getApplicationId()).getId(); ConfigurationDto config = generateConfigurationDto(schema.getId(), groupId, 1, true, false).get(0); ConfigurationDto configuration = configurationService.findConfigurationByEndpointGroupIdAndVersion(groupId, schema.getVersion()); Assert.assertNotNull(configuration); Assert.assertEquals(config, configuration); } @Test(expected = IncorrectParameterException.class) public void deactivateInactiveConfigurationTest() { ConfigurationDto config = generateConfigurationDto(null, null, 1, false, false).get(0); configurationService.deactivateConfiguration(config.getId(), null); } @Test(expected = IncorrectParameterException.class) public void deactivateIncorrectConfigurationTest() { configurationService.deactivateConfiguration(INCORRECT_SQL_ID, null); } @Test public void deactivateConfigurationTest() { ConfigurationDto config = generateConfigurationDto(null, null, 1, true, false).get(0); configurationService.deactivateConfiguration(config.getId(), null); config = configurationService.findConfigurationById(config.getId()); Assert.assertNotNull(config); Assert.assertEquals(UpdateStatus.DEPRECATED, config.getStatus()); } @Test public void deleteConfigurationRecordTest() { ConfigurationSchemaDto schemaDto = generateConfSchemaDto(null, 1).get(0); EndpointGroupDto group = generateEndpointGroupDto(schemaDto.getApplicationId()); generateConfigurationDto(schemaDto.getId(), group.getId(), 1, true, false); ChangeConfigurationNotification notification = configurationService.deleteConfigurationRecord(schemaDto.getId(), group.getId(), null); Assert.assertNotNull(notification); ConfigurationDto configurationDto = notification.getConfigurationDto(); Assert.assertEquals(UpdateStatus.DEPRECATED, configurationDto.getStatus()); StructureRecordDto<ConfigurationDto> records = configurationService .findConfigurationRecordBySchemaIdAndEndpointGroupId(schemaDto.getId(), group.getId()); Assert.assertNull(records.getInactiveStructureDto()); Assert.assertEquals(UpdateStatus.DEPRECATED, records.getActiveStructureDto().getStatus()); } @Test public void findAllConfigurationRecordsByEndpointGroupIdTest() { String id = generateApplicationDto().getId(); ConfigurationSchemaDto schema = generateConfSchemaDto(id, 1).get(0); EndpointGroupDto group = generateEndpointGroupDto(id); generateConfigurationDto(schema.getId(), group.getId(), 1, true, false); List<ConfigurationRecordDto> records = (List<ConfigurationRecordDto>) configurationService .findAllConfigurationRecordsByEndpointGroupId(group.getId(), false); Assert.assertNotNull(records); Assert.assertEquals(1, records.size()); ConfigurationDto activeConfiguration = records.get(0).getActiveStructureDto(); Assert.assertEquals(UpdateStatus.ACTIVE, activeConfiguration.getStatus()); ConfigurationDto inactiveConfiguration = records.get(0).getInactiveStructureDto(); Assert.assertNull(inactiveConfiguration); } @Test public void findConfigurationRecordBySchemaIdAndEndpointGroupIdTest() { ConfigurationSchemaDto schema = generateConfSchemaDto(null, 1).get(0); EndpointGroupDto group = generateEndpointGroupDto(schema.getApplicationId()); ConfigurationDto activeConfig = generateConfigurationDto(schema.getId(), group.getId(), 1, true, false).get(0); ConfigurationDto inactiveConfig = generateConfigurationDto(schema.getId(), group.getId(), 1, false, false).get(0); StructureRecordDto<ConfigurationDto> record = configurationService .findConfigurationRecordBySchemaIdAndEndpointGroupId(schema.getId(), group.getId()); Assert.assertEquals(activeConfig, record.getActiveStructureDto()); Assert.assertEquals(inactiveConfig, record.getInactiveStructureDto()); } @Test public void findVacantSchemasByEndpointGroupIdTest() { ApplicationDto application = generateApplicationDto(); List<ConfigurationSchemaDto> schemas = generateConfSchemaDto(application.getId(), 4); EndpointGroupDto groupOne = generateEndpointGroupDto(application.getId()); ConfigurationSchemaDto schemaOne = schemas.get(0); generateConfigurationDto(schemaOne.getId(), groupOne.getId(), 1, true, false); EndpointGroupDto groupTwo = generateEndpointGroupDto(application.getId()); List<VersionDto> schemasOne = configurationService.findVacantSchemasByEndpointGroupId(groupOne.getId()); Assert.assertFalse(schemasOne.isEmpty()); Assert.assertEquals(4, schemasOne.size()); List<VersionDto> schemasTwo = configurationService.findVacantSchemasByEndpointGroupId(groupTwo.getId()); Assert.assertFalse(schemasTwo.isEmpty()); Assert.assertEquals(5, schemasTwo.size()); } @Test public void findConfigurationSchemaVersionsByAppIdTest() { ConfigurationSchemaDto schemaDto = generateConfSchemaDto(null, 1).get(0); List<VersionDto> versions = configurationService.findConfigurationSchemaVersionsByAppId(schemaDto.getApplicationId()); Assert.assertFalse(versions.isEmpty()); Assert.assertEquals(2, versions.size()); Assert.assertEquals(versions.get(0).getVersion(), 1); Assert.assertEquals(versions.get(1).getVersion(), 2); } @Test(expected = IncorrectParameterException.class) public void validateConfigurationWithoutGroupIdTest() { ConfigurationDto configuration = new ConfigurationDto(); configuration.setSchemaId("Incorrect Id"); configurationService.saveConfiguration(configuration); } @Test(expected = IncorrectParameterException.class) public void validateConfigurationWithoutSchemaIdTest() { ConfigurationDto configuration = new ConfigurationDto(); configurationService.saveConfiguration(configuration); } }
apache-2.0
gunnarmorling/beanvalidation-tck
tests/src/main/java/org/hibernate/beanvalidation/tck/tests/constraints/application/method/OnlineCalendarService.java
1055
/** * Bean Validation TCK * * License: Apache License, Version 2.0 * See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>. */ package org.hibernate.beanvalidation.tck.tests.constraints.application.method; import static java.lang.annotation.ElementType.ANNOTATION_TYPE; import static java.lang.annotation.ElementType.CONSTRUCTOR; import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.RetentionPolicy.RUNTIME; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.Target; import javax.validation.Constraint; import javax.validation.Payload; /** * @author Gunnar Morling */ @Constraint(validatedBy = OnlineCalendarServiceValidator.class) @Target({ METHOD, CONSTRUCTOR, ANNOTATION_TYPE }) @Retention(RUNTIME) @Documented public @interface OnlineCalendarService { String message() default "{validation.onlineCalendarService}"; Class<?>[] groups() default { }; Class<? extends Payload>[] payload() default { }; }
apache-2.0
cfibmers/open-Autoscaler
api/src/main/java/org/cloudfoundry/autoscaler/api/nls/APIServerMessages_fr.java
4681
/** * WARNING! THIS FILE IS AUTOMATICALLY GENERATED! DO NOT MODIFY IT! * Generated on Fri Feb 26 16:36:10 CST 2016 */ package org.cloudfoundry.autoscaler.api.nls; public class APIServerMessages_fr extends java.util.ListResourceBundle { public Object[][] getContents() { return resources; } private final static Object[][] resources= { { "RestResponseErrorMsg_API_Server_retrieve_service_information_context", "Le serveur d'API extrait les informations sur le service" }, { "RestResponseErrorMsg_Create_Update_Policy_context", "Cr\u00e9er/mettre \u00e0 jour une strat\u00e9gie" }, { "RestResponseErrorMsg_Enable_Policy_context", "Activer une strat\u00e9gie" }, { "RestResponseErrorMsg_Get_Metric_Data_context", "Obtenir les donn\u00e9es de mesure" }, { "RestResponseErrorMsg_Get_Policy_context", "Obtenir une strat\u00e9gie" }, { "RestResponseErrorMsg_Get_Scaling_History_context", "Obtenir l'historique de mise \u00e0 l'\u00e9chelle" }, { "RestResponseErrorMsg_app_info_not_found_error", "CWSCV6008E: L''erreur suivante est survenue lors de l''extraction des informations pour l''application {0} : {1}." }, { "RestResponseErrorMsg_app_not_found_error", "CWSCV6007E: L''application est introuvable : {0}." }, { "RestResponseErrorMsg_attach_policy_in_Create_Policy_context", "association d'une strat\u00e9gie dans l'API Cr\u00e9er/mettre \u00e0 jour une strat\u00e9gie" }, { "RestResponseErrorMsg_cloud_error", "CWSCV6006E: L''appel des API CloudFoundry a \u00e9chou\u00e9 : {0}" }, { "RestResponseErrorMsg_create_policy_in_Create_Policy_context", "cr\u00e9ation d'une strat\u00e9gie dans l'API Cr\u00e9er/mettre \u00e0 jour une strat\u00e9gie" }, { "RestResponseErrorMsg_delete_policy_in_Delete_Policy_context", "suppression d'une strat\u00e9gie dans l'API Supprimer une strat\u00e9gie" }, { "RestResponseErrorMsg_detach_policy_in_Delete_Policy_context", "d\u00e9tachement d'une strat\u00e9gie dans l'API Cr\u00e9er/mettre \u00e0 jour une strat\u00e9gie" }, { "RestResponseErrorMsg_enable_policy_in_Enable_Policy_context", "activation d'une strat\u00e9gie dans l'API Activer une strat\u00e9gie" }, { "RestResponseErrorMsg_get_history_in_Get_History_context", "obtention de l'historique dans l'API Obtenir l'historique" }, { "RestResponseErrorMsg_get_metric_in_Get_Metric_context", "obtention des mesures dans l'API Obtenir les donn\u00e9es de mesure" }, { "RestResponseErrorMsg_get_policy_in_Get_Policy_context", "obtention d'une strat\u00e9gie dans l'API Obtenir une strat\u00e9gie" }, { "RestResponseErrorMsg_input_json_format_error", "CWSCV6003E: Erreur de format des cha\u00eenes JSON d''entr\u00e9e {0} dans le JSON d''entr\u00e9e pour l''API : {1}." }, { "RestResponseErrorMsg_input_json_format_location_error", "CWSCV6012E: Erreur de format \u00e0 la ligne {1} colonne {2} dans les cha\u00eenes JSON d''entr\u00e9e pour l''API: {0}." }, { "RestResponseErrorMsg_input_json_parse_error", "CWSCV6001E: Le serveur d''API ne peut pas analyser syntaxiquement les cha\u00eenes JSON d''entr\u00e9e pour l''API : {0}." }, { "RestResponseErrorMsg_internal_authentication_failed_error", "CWSCV6011E: L''authentification interne a \u00e9chou\u00e9 au cours de l''op\u00e9ration suivante : {0}." }, { "RestResponseErrorMsg_internal_server_error", "CWSCV6005E: Une erreur de serveur interne est survenue au cours de l''op\u00e9ration suivante : {0}." }, { "RestResponseErrorMsg_output_json_format_error", "CWSCV6004E: Erreur de format des cha\u00eenes JSON de sortie {0} dans le JSON de sortie pour l''API : {1}." }, { "RestResponseErrorMsg_output_json_parse_error", "CWSCV6002E: Le serveur d''API ne peut pas analyser syntaxiquement les cha\u00eenes JSON de sortie pour l''API : {0}." }, { "RestResponseErrorMsg_parse_input_json_context", "analyse syntaxique des cha\u00eenes JSON d'entr\u00e9e" }, { "RestResponseErrorMsg_policy_not_exist_error", "CWSCV6010E: La strat\u00e9gie pour l''application {0} est introuvable." }, { "RestResponseErrorMsg_retrieve_application_service_information_context", "extraction des informations sur le service d'application" }, { "RestResponseErrorMsg_retrieve_org_sapce_information_context", "extraction des informations sur l'organisation/l'espace" }, { "RestResponseErrorMsg_service_not_found_error", "CWSCV6009E: Le service {0} pour l''application {1} est introuvable." }, { "RestResponseErrorMsg_update_policy_in_Create_Policy_context", "mise \u00e0 jour de la strat\u00e9gie dans l'API Cr\u00e9er/mettre \u00e0 jour une strat\u00e9gie" } }; }
apache-2.0
grgrzybek/karaf
features/core/src/main/java/org/apache/karaf/features/internal/model/processing/ObjectFactory.java
1069
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.karaf.features.internal.model.processing; import javax.xml.bind.annotation.XmlRegistry; @XmlRegistry public class ObjectFactory { public FeaturesProcessing createFeaturesProcessing() { return new FeaturesProcessing(); } }
apache-2.0
skoulouzis/lobcder
milton2/milton-server-ce/src/main/java/io/milton/http/fs/SimpleLockManager.java
5398
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.milton.http.fs; import io.milton.http.LockManager; import io.milton.http.LockInfo; import io.milton.http.LockResult; import io.milton.http.LockTimeout; import io.milton.http.LockToken; import io.milton.resource.LockableResource; import io.milton.http.exceptions.NotAuthorizedException; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.UUID; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Keys on getUniqueID of the locked resource. * */ public class SimpleLockManager implements LockManager { private static final Logger log = LoggerFactory.getLogger( SimpleLockManager.class ); /** * maps current locks by the file associated with the resource */ Map<String, CurrentLock> locksByUniqueId; Map<String, CurrentLock> locksByToken; public SimpleLockManager() { locksByUniqueId = new HashMap<String, CurrentLock>(); locksByToken = new HashMap<String, CurrentLock>(); } @Override public synchronized LockResult lock( LockTimeout timeout, LockInfo lockInfo, LockableResource r ) { String token = UUID.randomUUID().toString(); return lock(timeout, lockInfo, r, token); } private LockResult lock( LockTimeout timeout, LockInfo lockInfo, LockableResource r, String token ) { LockToken currentLock = currentLock( r ); if( currentLock != null ) { return LockResult.failed( LockResult.FailureReason.ALREADY_LOCKED ); } LockToken newToken = new LockToken( token, lockInfo, timeout ); CurrentLock newLock = new CurrentLock( r.getUniqueId(), newToken, lockInfo.lockedByUser ); locksByUniqueId.put( r.getUniqueId(), newLock ); locksByToken.put( newToken.tokenId, newLock ); return LockResult.success( newToken ); } @Override public synchronized LockResult refresh( String tokenId, LockableResource resource ) { CurrentLock curLock = locksByToken.get( tokenId ); if( curLock == null || curLock.token == null ) { log.warn("attempt to refresh missing token: " + tokenId + " on resource: " + resource.getName() + " will create a new lock"); LockTimeout timeout = new LockTimeout(60*60l); LockInfo lockInfo = new LockInfo(LockInfo.LockScope.EXCLUSIVE, LockInfo.LockType.WRITE, tokenId, LockInfo.LockDepth.ZERO); return lock(timeout, lockInfo, resource, tokenId); } curLock.token.setFrom( new Date() ); return LockResult.success( curLock.token ); } @Override public synchronized void unlock( String tokenId, LockableResource r ) throws NotAuthorizedException { LockToken lockToken = currentLock( r ); if( lockToken == null ) { log.debug( "not locked" ); return; } if( lockToken.tokenId.equals( tokenId ) ) { removeLock( lockToken ); } else { throw new NotAuthorizedException( r ); } } private LockToken currentLock( LockableResource resource ) { CurrentLock curLock = locksByUniqueId.get( resource.getUniqueId() ); if( curLock == null ) { return null; } LockToken token = curLock.token; if( token.isExpired() ) { removeLock( token ); return null; } else { return token; } } private void removeLock( LockToken token ) { log.debug( "removeLock: " + token.tokenId ); CurrentLock currentLock = locksByToken.get( token.tokenId ); if( currentLock != null ) { locksByUniqueId.remove( currentLock.id ); locksByToken.remove( currentLock.token.tokenId ); } else { log.warn( "couldnt find lock: " + token.tokenId ); } } public LockToken getCurrentToken( LockableResource r ) { CurrentLock lock = locksByUniqueId.get( r.getUniqueId() ); if( lock == null ) return null; LockToken token = new LockToken(); token.info = new LockInfo( LockInfo.LockScope.EXCLUSIVE, LockInfo.LockType.WRITE, lock.lockedByUser, LockInfo.LockDepth.ZERO ); token.info.lockedByUser = lock.lockedByUser; token.timeout = lock.token.timeout; token.tokenId = lock.token.tokenId; return token; } class CurrentLock { final String id; final LockToken token; final String lockedByUser; public CurrentLock( String id, LockToken token, String lockedByUser ) { this.id = id; this.token = token; this.lockedByUser = lockedByUser; } } }
apache-2.0
NibiruOS/ui
org.nibiru.ui.ios/src/main/java/org/nibiru/ui/ios/widget/IOSPopup.java
1155
package org.nibiru.ui.ios.widget; import org.nibiru.ui.core.api.Popup; import org.nibiru.ui.core.api.Viewport; import org.nibiru.ui.core.impl.BasePopup; import javax.inject.Inject; import apple.uikit.UIView; public class IOSPopup extends BasePopup<Overlay, UIView> implements Popup { @Inject public IOSPopup(Viewport viewport) { super(Overlay.create(), viewport); } @Override public void show() { control.show(); requestLayout(); } @Override public void hide() { control.hide(); } @Override public void setAutoHide(boolean autoHide) { control.setAutoHide(autoHide); } @Override public void requestLayout() { super.requestLayout(); control.centerContent(); } @Override protected void setNativeContent(UIView nativeContent) { control.setContent(nativeContent); control.centerContent(); } @Override protected void setNativeSize(int width, int height) { if (getContent() != null) { WidgetUtils.setNativeSize((UIView) getContent().asNative(), width, height); } } }
apache-2.0
TangHao1987/intellij-community
platform/diff-impl/src/com/intellij/diff/tools/simple/SimpleDiffViewer.java
28600
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.diff.tools.simple; import com.intellij.diff.DiffContext; import com.intellij.diff.actions.BufferedLineIterator; import com.intellij.diff.actions.NavigationContextChecker; import com.intellij.diff.comparison.DiffTooBigException; import com.intellij.diff.fragments.LineFragment; import com.intellij.diff.requests.ContentDiffRequest; import com.intellij.diff.requests.DiffRequest; import com.intellij.diff.tools.util.*; import com.intellij.diff.tools.util.base.HighlightPolicy; import com.intellij.diff.tools.util.base.TextDiffViewerUtil; import com.intellij.diff.tools.util.side.TwosideTextDiffViewer; import com.intellij.diff.util.*; import com.intellij.diff.util.DiffUserDataKeysEx.ScrollToPolicy; import com.intellij.icons.AllIcons; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.Separator; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.diff.DiffNavigationContext; import com.intellij.openapi.editor.Caret; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.UserDataHolder; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.Function; import org.jetbrains.annotations.*; import javax.swing.*; import java.awt.*; import java.util.ArrayList; import java.util.BitSet; import java.util.Iterator; import java.util.List; import static com.intellij.diff.util.DiffUtil.getLineCount; public class SimpleDiffViewer extends TwosideTextDiffViewer { public static final Logger LOG = Logger.getInstance(SimpleDiffViewer.class); @NotNull private final SyncScrollSupport.SyncScrollable mySyncScrollable; @NotNull private final PrevNextDifferenceIterable myPrevNextDifferenceIterable; @NotNull private final StatusPanel myStatusPanel; @NotNull private final List<SimpleDiffChange> myDiffChanges = new ArrayList<SimpleDiffChange>(); @NotNull private final List<SimpleDiffChange> myInvalidDiffChanges = new ArrayList<SimpleDiffChange>(); @NotNull private final MyFoldingModel myFoldingModel; @NotNull private final MyInitialScrollHelper myInitialScrollHelper = new MyInitialScrollHelper(); @NotNull private final ModifierProvider myModifierProvider; public SimpleDiffViewer(@NotNull DiffContext context, @NotNull DiffRequest request) { super(context, (ContentDiffRequest)request); mySyncScrollable = new MySyncScrollable(); myPrevNextDifferenceIterable = new MyPrevNextDifferenceIterable(); myStatusPanel = new MyStatusPanel(); myFoldingModel = new MyFoldingModel(getEditors(), this); myModifierProvider = new ModifierProvider(); } @Override @CalledInAwt protected void onInit() { super.onInit(); myContentPanel.setPainter(new MyDividerPainter()); myModifierProvider.init(); } @Override @CalledInAwt protected void onDispose() { destroyChangedBlocks(); super.onDispose(); } @NotNull @Override protected List<AnAction> createToolbarActions() { List<AnAction> group = new ArrayList<AnAction>(); group.add(new MyIgnorePolicySettingAction()); group.add(new MyHighlightPolicySettingAction()); group.add(new MyToggleExpandByDefaultAction()); group.add(new MyToggleAutoScrollAction()); group.add(new MyReadOnlyLockAction()); group.add(myEditorSettingsAction); return group; } @Nullable @Override protected List<AnAction> createPopupActions() { List<AnAction> group = new ArrayList<AnAction>(); group.add(Separator.getInstance()); group.add(new MyIgnorePolicySettingAction().getPopupGroup()); group.add(Separator.getInstance()); group.add(new MyHighlightPolicySettingAction().getPopupGroup()); group.add(Separator.getInstance()); group.add(new MyToggleAutoScrollAction()); group.add(new MyToggleExpandByDefaultAction()); return group; } @NotNull @Override protected List<AnAction> createEditorPopupActions() { List<AnAction> group = new ArrayList<AnAction>(); group.add(new ReplaceSelectedChangesAction()); group.add(new AppendSelectedChangesAction()); group.add(new RevertSelectedChangesAction()); group.add(Separator.getInstance()); group.addAll(super.createEditorPopupActions()); return group; } @Override @CalledInAwt protected void processContextHints() { super.processContextHints(); myInitialScrollHelper.processContext(myRequest); } @Override @CalledInAwt protected void updateContextHints() { super.updateContextHints(); myFoldingModel.updateContext(myRequest, getFoldingModelSettings()); myInitialScrollHelper.updateContext(myRequest); } // // Diff // @NotNull public FoldingModelSupport.Settings getFoldingModelSettings() { return TextDiffViewerUtil.getFoldingModelSettings(myContext); } @Override protected void onSlowRediff() { super.onSlowRediff(); myStatusPanel.setBusy(true); myInitialScrollHelper.onSlowRediff(); } @Override @NotNull protected Runnable performRediff(@NotNull final ProgressIndicator indicator) { try { indicator.checkCanceled(); final Document document1 = getContent1().getDocument(); final Document document2 = getContent2().getDocument(); CharSequence[] texts = ApplicationManager.getApplication().runReadAction(new Computable<CharSequence[]>() { @Override public CharSequence[] compute() { return new CharSequence[]{document1.getImmutableCharSequence(), document2.getImmutableCharSequence()}; } }); List<LineFragment> lineFragments = null; if (getHighlightPolicy().isShouldCompare()) { lineFragments = DiffUtil.compare(texts[0], texts[1], getDiffConfig(), indicator); } boolean isEqualContents = (lineFragments == null || lineFragments.isEmpty()) && StringUtil.equals(document1.getCharsSequence(), document2.getCharsSequence()); return apply(new CompareData(lineFragments, isEqualContents)); } catch (DiffTooBigException e) { return applyNotification(DiffNotifications.DIFF_TOO_BIG); } catch (ProcessCanceledException e) { throw e; } catch (Throwable e) { LOG.error(e); return applyNotification(DiffNotifications.ERROR); } } @NotNull private Runnable apply(@NotNull final CompareData data) { return new Runnable() { @Override public void run() { myFoldingModel.updateContext(myRequest, getFoldingModelSettings()); clearDiffPresentation(); if (data.isEqualContent()) myPanel.addNotification(DiffNotifications.EQUAL_CONTENTS); if (data.getFragments() != null) { for (LineFragment fragment : data.getFragments()) { myDiffChanges.add(new SimpleDiffChange(SimpleDiffViewer.this, fragment, getHighlightPolicy().isFineFragments())); } } myFoldingModel.install(data.getFragments(), myRequest, getFoldingModelSettings()); myInitialScrollHelper.onRediff(); myContentPanel.repaintDivider(); myStatusPanel.update(); } }; } @NotNull private Runnable applyNotification(@Nullable final JComponent notification) { return new Runnable() { @Override public void run() { clearDiffPresentation(); if (notification != null) myPanel.addNotification(notification); } }; } private void clearDiffPresentation() { myStatusPanel.setBusy(false); myPanel.resetNotifications(); destroyChangedBlocks(); } @NotNull private DiffUtil.DiffConfig getDiffConfig() { return new DiffUtil.DiffConfig(getTextSettings().getIgnorePolicy(), getHighlightPolicy()); } @NotNull private HighlightPolicy getHighlightPolicy() { return getTextSettings().getHighlightPolicy(); } // // Impl // private void destroyChangedBlocks() { for (SimpleDiffChange change : myDiffChanges) { change.destroyHighlighter(); } myDiffChanges.clear(); for (SimpleDiffChange change : myInvalidDiffChanges) { change.destroyHighlighter(); } myInvalidDiffChanges.clear(); myFoldingModel.destroy(); myContentPanel.repaintDivider(); myStatusPanel.update(); } @Override @CalledInAwt protected void onBeforeDocumentChange(@NotNull DocumentEvent e) { super.onBeforeDocumentChange(e); if (myDiffChanges.isEmpty()) return; Side side = null; if (e.getDocument() == getEditor(Side.LEFT).getDocument()) side = Side.LEFT; if (e.getDocument() == getEditor(Side.RIGHT).getDocument()) side = Side.RIGHT; if (side == null) { LOG.warn("Unknown document changed"); return; } int line1 = e.getDocument().getLineNumber(e.getOffset()); int line2 = e.getDocument().getLineNumber(e.getOffset() + e.getOldLength()) + 1; int shift = DiffUtil.countLinesShift(e); List<SimpleDiffChange> invalid = new ArrayList<SimpleDiffChange>(); for (SimpleDiffChange change : myDiffChanges) { if (change.processChange(line1, line2, shift, side)) { invalid.add(change); } } if (!invalid.isEmpty()) { myDiffChanges.removeAll(invalid); myInvalidDiffChanges.addAll(invalid); } } @Override protected void onDocumentChange(@NotNull DocumentEvent e) { super.onDocumentChange(e); myFoldingModel.onDocumentChanged(e); } @CalledInAwt protected boolean doScrollToChange(@NotNull ScrollToPolicy scrollToPolicy) { SimpleDiffChange targetChange = scrollToPolicy.select(myDiffChanges); if (targetChange == null) return false; doScrollToChange(targetChange, false); return true; } private void doScrollToChange(@NotNull SimpleDiffChange change, final boolean animated) { final int line1 = change.getStartLine(Side.LEFT); final int line2 = change.getStartLine(Side.RIGHT); final int endLine1 = change.getEndLine(Side.LEFT); final int endLine2 = change.getEndLine(Side.RIGHT); DiffUtil.moveCaret(getEditor1(), line1); DiffUtil.moveCaret(getEditor2(), line2); getSyncScrollSupport().makeVisible(getCurrentSide(), line1, endLine1, line2, endLine2, animated); } protected boolean doScrollToContext(@NotNull DiffNavigationContext context) { ChangedLinesIterator changedLinesIterator = new ChangedLinesIterator(Side.RIGHT); NavigationContextChecker checker = new NavigationContextChecker(changedLinesIterator, context); int line = checker.contextMatchCheck(); if (line == -1) { // this will work for the case, when spaces changes are ignored, and corresponding fragments are not reported as changed // just try to find target line -> +- AllLinesIterator allLinesIterator = new AllLinesIterator(Side.RIGHT); NavigationContextChecker checker2 = new NavigationContextChecker(allLinesIterator, context); line = checker2.contextMatchCheck(); } if (line == -1) return false; scrollToLine(Side.RIGHT, line); return true; } // // Getters // @NotNull protected List<SimpleDiffChange> getDiffChanges() { return myDiffChanges; } @NotNull @Override protected SyncScrollSupport.SyncScrollable getSyncScrollable() { return mySyncScrollable; } @NotNull @Override protected JComponent getStatusPanel() { return myStatusPanel; } @NotNull public ModifierProvider getModifierProvider() { return myModifierProvider; } @NotNull @Override public SyncScrollSupport.TwosideSyncScrollSupport getSyncScrollSupport() { //noinspection ConstantConditions return super.getSyncScrollSupport(); } // // Misc // @SuppressWarnings("MethodOverridesStaticMethodOfSuperclass") public static boolean canShowRequest(@NotNull DiffContext context, @NotNull DiffRequest request) { return TwosideTextDiffViewer.canShowRequest(context, request); } @NotNull @CalledInAwt private List<SimpleDiffChange> getSelectedChanges(@NotNull Side side) { final BitSet lines = DiffUtil.getSelectedLines(getEditor(side)); List<SimpleDiffChange> affectedChanges = new ArrayList<SimpleDiffChange>(); for (int i = myDiffChanges.size() - 1; i >= 0; i--) { SimpleDiffChange change = myDiffChanges.get(i); int line1 = change.getStartLine(side); int line2 = change.getEndLine(side); if (DiffUtil.isSelectedByLine(lines, line1, line2)) { affectedChanges.add(change); } } return affectedChanges; } @Nullable @CalledInAwt private SimpleDiffChange getSelectedChange(@NotNull Side side) { int caretLine = getEditor(side).getCaretModel().getLogicalPosition().line; for (SimpleDiffChange change : myDiffChanges) { int line1 = change.getStartLine(side); int line2 = change.getEndLine(side); if (DiffUtil.isSelectedByLine(caretLine, line1, line2)) return change; } return null; } // // Actions // private class MyPrevNextDifferenceIterable extends PrevNextDifferenceIterableBase<SimpleDiffChange> { @NotNull @Override protected List<SimpleDiffChange> getChanges() { return myDiffChanges; } @NotNull @Override protected EditorEx getEditor() { return getCurrentEditor(); } @Override protected int getStartLine(@NotNull SimpleDiffChange change) { return change.getStartLine(getCurrentSide()); } @Override protected int getEndLine(@NotNull SimpleDiffChange change) { return change.getEndLine(getCurrentSide()); } @Override protected void scrollToChange(@NotNull SimpleDiffChange change) { doScrollToChange(change, true); } } private class MyReadOnlyLockAction extends TextDiffViewerUtil.EditorReadOnlyLockAction { public MyReadOnlyLockAction() { super(getContext(), getEditableEditors()); } @Override protected void doApply(boolean readOnly) { super.doApply(readOnly); for (SimpleDiffChange change : myDiffChanges) { change.updateGutterActions(true); } } } // // Modification operations // private abstract class ApplySelectedChangesActionBase extends AnAction implements DumbAware { private final boolean myModifyOpposite; public ApplySelectedChangesActionBase(@Nullable String text, @Nullable String description, @Nullable Icon icon, boolean modifyOpposite) { super(text, description, icon); myModifyOpposite = modifyOpposite; } @Override public void update(@NotNull AnActionEvent e) { Editor editor = e.getData(CommonDataKeys.EDITOR); if (editor != getEditor1() && editor != getEditor2()) { e.getPresentation().setEnabledAndVisible(false); return; } Side side = Side.fromLeft(editor == getEditor(Side.LEFT)); Editor modifiedEditor = getEditor(side.other(myModifyOpposite)); if (!DiffUtil.isEditable(modifiedEditor)) { e.getPresentation().setEnabledAndVisible(false); return; } e.getPresentation().setIcon(getIcon(side)); e.getPresentation().setVisible(true); e.getPresentation().setEnabled(isSomeChangeSelected(side)); } @Override public void actionPerformed(@NotNull final AnActionEvent e) { Editor editor = e.getRequiredData(CommonDataKeys.EDITOR); final Side side = Side.fromLeft(editor == getEditor(Side.LEFT)); final List<SimpleDiffChange> selectedChanges = getSelectedChanges(side); Editor modifiedEditor = getEditor(side.other(myModifyOpposite)); String title = e.getPresentation().getText() + " selected changes"; DiffUtil.executeWriteCommand(modifiedEditor.getDocument(), e.getProject(), title, new Runnable() { @Override public void run() { apply(side, selectedChanges); } }); } protected boolean isSomeChangeSelected(@NotNull Side side) { if (myDiffChanges.isEmpty()) return false; EditorEx editor = getEditor(side); List<Caret> carets = editor.getCaretModel().getAllCarets(); if (carets.size() != 1) return true; Caret caret = carets.get(0); if (caret.hasSelection()) return true; int line = editor.getDocument().getLineNumber(editor.getExpectedCaretOffset()); for (SimpleDiffChange change : myDiffChanges) { if (change.isSelectedByLine(line, side)) return true; } return false; } @NotNull protected abstract Icon getIcon(@NotNull Side side); @CalledWithWriteLock protected abstract void apply(@NotNull Side side, @NotNull List<SimpleDiffChange> changes); } private class ReplaceSelectedChangesAction extends ApplySelectedChangesActionBase { public ReplaceSelectedChangesAction() { super("Replace", null, AllIcons.Diff.Arrow, true); } @NotNull @Override protected Icon getIcon(@NotNull Side side) { return side.isLeft() ? AllIcons.Diff.ArrowRight : AllIcons.Diff.Arrow; } @Override protected void apply(@NotNull Side side, @NotNull List<SimpleDiffChange> changes) { for (SimpleDiffChange change : changes) { replaceChange(change, side); } } } private class AppendSelectedChangesAction extends ApplySelectedChangesActionBase { public AppendSelectedChangesAction() { super("Insert", null, AllIcons.Diff.ArrowLeftDown, true); } @NotNull @Override protected Icon getIcon(@NotNull Side side) { return side.isLeft() ? AllIcons.Diff.ArrowRightDown : AllIcons.Diff.ArrowLeftDown; } @Override protected void apply(@NotNull Side side, @NotNull List<SimpleDiffChange> changes) { for (SimpleDiffChange change : changes) { appendChange(change, side); } } } private class RevertSelectedChangesAction extends ApplySelectedChangesActionBase { public RevertSelectedChangesAction() { super("Revert", null, AllIcons.Diff.Remove, false); } @NotNull @Override protected Icon getIcon(@NotNull Side side) { return AllIcons.Diff.Remove; } @Override protected void apply(@NotNull Side side, @NotNull List<SimpleDiffChange> changes) { for (SimpleDiffChange change : changes) { replaceChange(change, side.other()); } } } @CalledWithWriteLock public void replaceChange(@NotNull SimpleDiffChange change, @NotNull final Side sourceSide) { if (!change.isValid()) return; Side outputSide = sourceSide.other(); DiffUtil.applyModification(getEditor(outputSide).getDocument(), change.getStartLine(outputSide), change.getEndLine(outputSide), getEditor(sourceSide).getDocument(), change.getStartLine(sourceSide), change.getEndLine(sourceSide)); change.destroyHighlighter(); myDiffChanges.remove(change); } @CalledWithWriteLock public void appendChange(@NotNull SimpleDiffChange change, @NotNull final Side sourceSide) { if (!change.isValid()) return; if (change.getStartLine(sourceSide) == change.getEndLine(sourceSide)) return; Side outputSide = sourceSide.other(); DiffUtil.applyModification(getEditor(outputSide).getDocument(), change.getEndLine(outputSide), change.getEndLine(outputSide), getEditor(sourceSide).getDocument(), change.getStartLine(sourceSide), change.getEndLine(sourceSide)); change.destroyHighlighter(); myDiffChanges.remove(change); } private class MyHighlightPolicySettingAction extends TextDiffViewerUtil.HighlightPolicySettingAction { public MyHighlightPolicySettingAction() { super(getTextSettings()); } @Override protected void onSettingsChanged() { rediff(); } } private class MyIgnorePolicySettingAction extends TextDiffViewerUtil.IgnorePolicySettingAction { public MyIgnorePolicySettingAction() { super(getTextSettings()); } @Override protected void onSettingsChanged() { rediff(); } } private class MyToggleExpandByDefaultAction extends TextDiffViewerUtil.ToggleExpandByDefaultAction { public MyToggleExpandByDefaultAction() { super(getTextSettings()); } @Override protected void expandAll(boolean expand) { myFoldingModel.expandAll(expand); } } // // Scroll from annotate // private class AllLinesIterator implements Iterator<Pair<Integer, CharSequence>> { @NotNull private final Side mySide; @NotNull private final Document myDocument; private int myLine = 0; private AllLinesIterator(@NotNull Side side) { mySide = side; myDocument = getEditor(mySide).getDocument(); } @Override public boolean hasNext() { return myLine < getLineCount(myDocument); } @Override public Pair<Integer, CharSequence> next() { int offset1 = myDocument.getLineStartOffset(myLine); int offset2 = myDocument.getLineEndOffset(myLine); CharSequence text = myDocument.getImmutableCharSequence().subSequence(offset1, offset2); Pair<Integer, CharSequence> pair = new Pair<Integer, CharSequence>(myLine, text); myLine++; return pair; } @Override public void remove() { throw new UnsupportedOperationException(); } } private class ChangedLinesIterator extends BufferedLineIterator { @NotNull private final Side mySide; private int myIndex = 0; private ChangedLinesIterator(@NotNull Side side) { mySide = side; init(); } @Override public boolean hasNextBlock() { return myIndex < myDiffChanges.size(); } @Override public void loadNextBlock() { SimpleDiffChange change = myDiffChanges.get(myIndex); myIndex++; int line1 = change.getStartLine(mySide); int line2 = change.getEndLine(mySide); Document document = getEditor(mySide).getDocument(); for (int i = line1; i < line2; i++) { int offset1 = document.getLineStartOffset(i); int offset2 = document.getLineEndOffset(i); CharSequence text = document.getImmutableCharSequence().subSequence(offset1, offset2); addLine(i, text); } } } // // Helpers // @Nullable @Override public Object getData(@NonNls String dataId) { if (DiffDataKeys.PREV_NEXT_DIFFERENCE_ITERABLE.is(dataId)) { return myPrevNextDifferenceIterable; } else if (DiffDataKeys.CURRENT_CHANGE_RANGE.is(dataId)) { SimpleDiffChange change = getSelectedChange(getCurrentSide()); if (change != null) { return new LineRange(change.getStartLine(getCurrentSide()), change.getEndLine(getCurrentSide())); } } return super.getData(dataId); } private class MySyncScrollable extends BaseSyncScrollable { @Override public boolean isSyncScrollEnabled() { return getTextSettings().isEnableSyncScroll(); } public int transfer(@NotNull Side baseSide, int line) { if (myDiffChanges.isEmpty()) { return line; } return super.transfer(baseSide, line); } @Override protected void processHelper(@NotNull ScrollHelper helper) { if (!helper.process(0, 0)) return; for (SimpleDiffChange diffChange : myDiffChanges) { if (!helper.process(diffChange.getStartLine(Side.LEFT), diffChange.getStartLine(Side.RIGHT))) return; if (!helper.process(diffChange.getEndLine(Side.LEFT), diffChange.getEndLine(Side.RIGHT))) return; } helper.process(getEditor1().getDocument().getLineCount(), getEditor2().getDocument().getLineCount()); } } private class MyDividerPainter implements DiffSplitter.Painter, DiffDividerDrawUtil.DividerPaintable { @Override public void paint(@NotNull Graphics g, @NotNull JComponent divider) { Graphics2D gg = DiffDividerDrawUtil.getDividerGraphics(g, divider, getEditor1().getComponent()); gg.setColor(DiffDrawUtil.getDividerColor(getEditor1())); gg.fill(gg.getClipBounds()); //DividerPolygonUtil.paintSimplePolygons(gg, divider.getWidth(), getEditor1(), getEditor2(), this); DiffDividerDrawUtil.paintPolygons(gg, divider.getWidth(), getEditor1(), getEditor2(), this); myFoldingModel.paintOnDivider(gg, divider); gg.dispose(); } @Override public void process(@NotNull Handler handler) { for (SimpleDiffChange diffChange : myDiffChanges) { if (!handler.process(diffChange.getStartLine(Side.LEFT), diffChange.getEndLine(Side.LEFT), diffChange.getStartLine(Side.RIGHT), diffChange.getEndLine(Side.RIGHT), diffChange.getDiffType().getColor(getEditor1()))) { return; } } } } private class MyStatusPanel extends StatusPanel { @Override protected int getChangesCount() { return myDiffChanges.size() + myInvalidDiffChanges.size(); } } private static class CompareData { @Nullable private final List<LineFragment> myFragments; private final boolean myEqualContent; public CompareData(@Nullable List<LineFragment> fragments, boolean equalContent) { myFragments = fragments; myEqualContent = equalContent; } @Nullable public List<LineFragment> getFragments() { return myFragments; } public boolean isEqualContent() { return myEqualContent; } } public class ModifierProvider extends KeyboardModifierListener { public void init() { init(myPanel, SimpleDiffViewer.this); } @Override public void onModifiersChanged() { for (SimpleDiffChange change : myDiffChanges) { change.updateGutterActions(false); } } } private static class MyFoldingModel extends FoldingModelSupport { private final MyPaintable myPaintable = new MyPaintable(0, 1); public MyFoldingModel(@NotNull List<? extends EditorEx> editors, @NotNull Disposable disposable) { super(editors.toArray(new EditorEx[2]), disposable); } public void install(@Nullable final List<LineFragment> fragments, @NotNull UserDataHolder context, @NotNull FoldingModelSupport.Settings settings) { Iterator<int[]> it = map(fragments, new Function<LineFragment, int[]>() { @Override public int[] fun(LineFragment fragment) { return new int[]{ fragment.getStartLine1(), fragment.getEndLine1(), fragment.getStartLine2(), fragment.getEndLine2()}; } }); install(it, context, settings); } public void paintOnDivider(@NotNull Graphics2D gg, @NotNull Component divider) { myPaintable.paintOnDivider(gg, divider); } } private class MyInitialScrollHelper extends MyInitialScrollPositionHelper { @Override protected boolean doScrollToChange() { if (myScrollToChange == null) return false; return SimpleDiffViewer.this.doScrollToChange(myScrollToChange); } @Override protected boolean doScrollToFirstChange() { return SimpleDiffViewer.this.doScrollToChange(ScrollToPolicy.FIRST_CHANGE); } @Override protected boolean doScrollToContext() { if (myNavigationContext == null) return false; return SimpleDiffViewer.this.doScrollToContext(myNavigationContext); } } }
apache-2.0
apache/samza
samza-core/src/main/java/org/apache/samza/table/batching/AsyncBatchingTable.java
6955
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.table.batching; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ScheduledExecutorService; import org.apache.samza.SamzaException; import org.apache.samza.context.Context; import org.apache.samza.storage.kv.Entry; import org.apache.samza.table.AsyncReadWriteUpdateTable; import org.apache.samza.table.utils.TableMetricsUtil; import org.apache.samza.util.HighResolutionClock; /** * A wrapper of a {@link AsyncReadWriteUpdateTable} that supports batch operations. * * This batching table does not guarantee any ordering of different operation types within the batch. * For instance, query(Q) and put/delete(u) operations arrives in the following sequences, Q1, U1, Q2, U2, * it does not mean the remote data store will receive the messages in the same order. Instead, * the operations will be grouped by type and sent via micro batches. For this sequence, Q1 and Q2 will * be grouped to micro batch B1; U1 and U2 will be grouped to micro batch B2, the implementation class * can decide the order of the micro batches. * * Synchronized table operations (get/put/update/delete) should be used with caution for the batching feature. * If the table is used by a single thread, there will be at most one operation in the batch, and the * batch will be performed when the TTL of the batch window expires. Batching does not make sense in this scenario. * * The Batch implementation class can throw {@link BatchingNotSupportedException} if it thinks the operation is * not batch-able. When receiving this exception, {@link AsyncBatchingTable} will send the operation to the * {@link AsyncReadWriteUpdateTable}. * * @param <K> The type of the key. * @param <V> The type of the value. * @param <U> the type of the update applied to this table */ public class AsyncBatchingTable<K, V, U> implements AsyncReadWriteUpdateTable<K, V, U> { private final AsyncReadWriteUpdateTable<K, V, U> table; private final String tableId; private final BatchProvider<K, V, U> batchProvider; private final ScheduledExecutorService batchTimerExecutorService; private BatchProcessor<K, V, U> batchProcessor; /** * @param tableId The id of the table. * @param table The target table that serves the batch operations. * @param batchProvider Batch provider to create a batch instance. * @param batchTimerExecutorService Executor service for batch timer. */ public AsyncBatchingTable(String tableId, AsyncReadWriteUpdateTable<K, V, U> table, BatchProvider<K, V, U> batchProvider, ScheduledExecutorService batchTimerExecutorService) { Preconditions.checkNotNull(tableId); Preconditions.checkNotNull(table); Preconditions.checkNotNull(batchProvider); Preconditions.checkNotNull(batchTimerExecutorService); this.tableId = tableId; this.table = table; this.batchProvider = batchProvider; this.batchTimerExecutorService = batchTimerExecutorService; } @Override public CompletableFuture<V> getAsync(K key, Object... args) { try { return batchProcessor.processQueryOperation(new GetOperation<>(key, args)); } catch (BatchingNotSupportedException e) { return table.getAsync(key, args); } catch (Exception e) { throw new SamzaException(e); } } @Override public CompletableFuture<Map<K, V>> getAllAsync(List<K> keys, Object... args) { return table.getAllAsync(keys); } @Override public <T> CompletableFuture<T> readAsync(int opId, Object ... args) { return table.readAsync(opId, args); } @Override public CompletableFuture<Void> putAsync(K key, V value, Object... args) { try { return batchProcessor.processPutDeleteOrUpdateOperations(new PutOperation<>(key, value, args)); } catch (BatchingNotSupportedException e) { return table.putAsync(key, value, args); } catch (Exception e) { throw new SamzaException(e); } } @Override public CompletableFuture<Void> putAllAsync(List<Entry<K, V>> entries, Object... args) { return table.putAllAsync(entries); } @Override public CompletableFuture<Void> updateAsync(K key, U update) { try { return batchProcessor.processPutDeleteOrUpdateOperations(new UpdateOperation<>(key, update)); } catch (BatchingNotSupportedException e) { return table.updateAsync(key, update); } catch (Exception e) { throw new SamzaException(e); } } @Override public CompletableFuture<Void> updateAllAsync(List<Entry<K, U>> updates) { return table.updateAllAsync(updates); } @Override public CompletableFuture<Void> deleteAsync(K key, Object... args) { try { return batchProcessor.processPutDeleteOrUpdateOperations(new DeleteOperation<>(key, args)); } catch (BatchingNotSupportedException e) { return table.deleteAsync(key, args); } catch (Exception e) { throw new SamzaException(e); } } @Override public CompletableFuture<Void> deleteAllAsync(List<K> keys, Object... args) { return table.deleteAllAsync(keys); } @Override public void init(Context context) { table.init(context); final TableMetricsUtil metricsUtil = new TableMetricsUtil(context, this, tableId); createBatchProcessor(TableMetricsUtil.mayCreateHighResolutionClock(context.getJobContext().getConfig()), new BatchMetrics(metricsUtil)); } @Override public <T> CompletableFuture<T> writeAsync(int opId, Object ... args) { return table.writeAsync(opId, args); } @Override public void flush() { table.flush(); } @Override public void close() { batchProcessor.stop(); table.close(); } @VisibleForTesting void createBatchProcessor(HighResolutionClock clock, BatchMetrics batchMetrics) { batchProcessor = new BatchProcessor<>(batchMetrics, new TableBatchHandler<>(table), batchProvider, clock, batchTimerExecutorService); } @VisibleForTesting BatchProcessor<K, V, U> getBatchProcessor() { return batchProcessor; } }
apache-2.0
dahlstrom-g/intellij-community
java/java-tests/testData/inspection/java9CollectionFactory/beforeHashSetAsListRepeating.java
282
// "Replace with 'Set.of' call" "false" import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; public class Test { static final String CONST = "b"; public static final Set<String> MY_SET = Set.o<caret>f("a", "b", "c", CONST); }
apache-2.0
goodwinnk/intellij-community
platform/diff-impl/src/com/intellij/diff/tools/fragmented/UnifiedDiffViewer.java
47029
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.diff.tools.fragmented; import com.intellij.diff.DiffContext; import com.intellij.diff.actions.AllLinesIterator; import com.intellij.diff.actions.BufferedLineIterator; import com.intellij.diff.actions.impl.OpenInEditorWithMouseAction; import com.intellij.diff.actions.impl.SetEditorSettingsAction; import com.intellij.diff.comparison.DiffTooBigException; import com.intellij.diff.contents.DocumentContent; import com.intellij.diff.fragments.LineFragment; import com.intellij.diff.requests.ContentDiffRequest; import com.intellij.diff.requests.DiffRequest; import com.intellij.diff.tools.util.*; import com.intellij.diff.tools.util.base.InitialScrollPositionSupport; import com.intellij.diff.tools.util.base.ListenerDiffViewerBase; import com.intellij.diff.tools.util.base.TextDiffSettingsHolder.TextDiffSettings; import com.intellij.diff.tools.util.base.TextDiffViewerUtil; import com.intellij.diff.tools.util.side.TwosideTextDiffViewer; import com.intellij.diff.tools.util.text.TwosideTextDiffProvider; import com.intellij.diff.util.*; import com.intellij.diff.util.DiffUserDataKeysEx.ScrollToPolicy; import com.intellij.icons.AllIcons; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.command.undo.UndoManager; import com.intellij.openapi.diff.DiffBundle; import com.intellij.openapi.diff.LineTokenizer; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.actionSystem.EditorActionManager; import com.intellij.openapi.editor.actionSystem.ReadonlyFragmentModificationHandler; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.event.DocumentListener; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.highlighter.EditorHighlighter; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.UserDataHolder; import com.intellij.pom.Navigatable; import com.intellij.util.containers.ContainerUtil; import gnu.trove.TIntFunction; import org.jetbrains.annotations.*; import javax.swing.*; import java.util.*; import static com.intellij.diff.util.DiffUtil.getLinesContent; public class UnifiedDiffViewer extends ListenerDiffViewerBase { @NotNull protected final EditorEx myEditor; @NotNull protected final Document myDocument; @NotNull private final UnifiedDiffPanel myPanel; @NotNull private final SetEditorSettingsAction myEditorSettingsAction; @NotNull private final PrevNextDifferenceIterable myPrevNextDifferenceIterable; @NotNull private final MyStatusPanel myStatusPanel; @NotNull private final MyInitialScrollHelper myInitialScrollHelper = new MyInitialScrollHelper(); @NotNull private final MyFoldingModel myFoldingModel; @NotNull private final TwosideTextDiffProvider.NoIgnore myTextDiffProvider; @NotNull protected Side myMasterSide = Side.RIGHT; @Nullable private ChangedBlockData myChangedBlockData; private final boolean[] myForceReadOnlyFlags; private boolean myReadOnlyLockSet = false; private boolean myDuringOnesideDocumentModification; private boolean myDuringTwosideDocumentModification; private boolean myStateIsOutOfDate; // whether something was changed since last rediff private boolean mySuppressEditorTyping; // our state is inconsistent. No typing can be handled correctly public UnifiedDiffViewer(@NotNull DiffContext context, @NotNull DiffRequest request) { super(context, (ContentDiffRequest)request); myPrevNextDifferenceIterable = new MyPrevNextDifferenceIterable(); myStatusPanel = new MyStatusPanel(); myForceReadOnlyFlags = TextDiffViewerUtil.checkForceReadOnly(myContext, myRequest); boolean leftEditable = isEditable(Side.LEFT, false); boolean rightEditable = isEditable(Side.RIGHT, false); if (leftEditable && !rightEditable) myMasterSide = Side.LEFT; if (!leftEditable && rightEditable) myMasterSide = Side.RIGHT; myDocument = EditorFactory.getInstance().createDocument(""); myEditor = DiffUtil.createEditor(myDocument, myProject, true, true); List<JComponent> titles = DiffUtil.createTextTitles(myRequest, ContainerUtil.list(myEditor, myEditor)); UnifiedContentPanel contentPanel = new UnifiedContentPanel(titles, myEditor); myPanel = new UnifiedDiffPanel(myProject, contentPanel, this, myContext); myFoldingModel = new MyFoldingModel(myEditor, this); myEditorSettingsAction = new SetEditorSettingsAction(getTextSettings(), getEditors()); myEditorSettingsAction.applyDefaults(); myTextDiffProvider = DiffUtil.createNoIgnoreTextDiffProvider(getProject(), getRequest(), getTextSettings(), this::rediff, this); new MyOpenInEditorWithMouseAction().install(getEditors()); TextDiffViewerUtil.checkDifferentDocuments(myRequest); DiffUtil.registerAction(new ReplaceSelectedChangesAction(Side.LEFT, true), myPanel); DiffUtil.registerAction(new AppendSelectedChangesAction(Side.LEFT, true), myPanel); DiffUtil.registerAction(new ReplaceSelectedChangesAction(Side.RIGHT, true), myPanel); DiffUtil.registerAction(new AppendSelectedChangesAction(Side.RIGHT, true), myPanel); } @Override @CalledInAwt protected void onInit() { super.onInit(); installEditorListeners(); installTypingSupport(); myPanel.setLoadingContent(); // We need loading panel only for initial rediff() myPanel.setPersistentNotifications(DiffUtil.getCustomNotifications(myContext, myRequest)); } @Override @CalledInAwt protected void onDispose() { super.onDispose(); EditorFactory.getInstance().releaseEditor(myEditor); } @Override @CalledInAwt protected void processContextHints() { super.processContextHints(); Side side = DiffUtil.getUserData(myRequest, myContext, DiffUserDataKeys.MASTER_SIDE); if (side != null) myMasterSide = side; myInitialScrollHelper.processContext(myRequest); } @Override @CalledInAwt protected void updateContextHints() { super.updateContextHints(); myInitialScrollHelper.updateContext(myRequest); myFoldingModel.updateContext(myRequest, getFoldingModelSettings()); } @CalledInAwt protected void updateEditorCanBeTyped() { myEditor.setViewer(mySuppressEditorTyping || !isEditable(myMasterSide, true)); } private void installTypingSupport() { if (!isEditable(myMasterSide, false)) return; updateEditorCanBeTyped(); myEditor.getColorsScheme().setColor(EditorColors.READONLY_FRAGMENT_BACKGROUND_COLOR, null); // guarded blocks EditorActionManager.getInstance().setReadonlyFragmentModificationHandler(myDocument, new MyReadonlyFragmentModificationHandler()); myDocument.putUserData(UndoManager.ORIGINAL_DOCUMENT, getDocument(myMasterSide)); // use undo of master document myDocument.addDocumentListener(new MyOnesideDocumentListener()); } @NotNull @Override @CalledInAwt public List<AnAction> createToolbarActions() { List<AnAction> group = new ArrayList<>(myTextDiffProvider.getToolbarActions()); group.add(new MyToggleExpandByDefaultAction()); group.add(new MyReadOnlyLockAction()); group.add(myEditorSettingsAction); group.add(Separator.getInstance()); group.addAll(super.createToolbarActions()); return group; } @NotNull @Override @CalledInAwt public List<AnAction> createPopupActions() { List<AnAction> group = new ArrayList<>(myTextDiffProvider.getPopupActions()); group.add(new MyToggleExpandByDefaultAction()); group.add(Separator.getInstance()); group.addAll(super.createPopupActions()); return group; } @NotNull protected List<AnAction> createEditorPopupActions() { List<AnAction> group = new ArrayList<>(); if (isEditable(Side.RIGHT, false)) { group.add(new ReplaceSelectedChangesAction(Side.LEFT, false)); group.add(new ReplaceSelectedChangesAction(Side.RIGHT, false)); } group.add(Separator.getInstance()); group.addAll(TextDiffViewerUtil.createEditorPopupActions()); return group; } @CalledInAwt protected void installEditorListeners() { new TextDiffViewerUtil.EditorActionsPopup(createEditorPopupActions()).install(getEditors()); } // // Diff // @Override @CalledInAwt protected void onSlowRediff() { super.onSlowRediff(); myStatusPanel.setBusy(true); } @Override @NotNull protected Runnable performRediff(@NotNull final ProgressIndicator indicator) { try { indicator.checkCanceled(); final Document document1 = getContent1().getDocument(); final Document document2 = getContent2().getDocument(); final CharSequence[] texts = ReadAction.compute(() -> { return new CharSequence[]{document1.getImmutableCharSequence(), document2.getImmutableCharSequence()}; }); final List<LineFragment> fragments = myTextDiffProvider.compare(texts[0], texts[1], indicator); final DocumentContent content1 = getContent1(); final DocumentContent content2 = getContent2(); indicator.checkCanceled(); TwosideDocumentData data = ReadAction.compute(() -> { indicator.checkCanceled(); UnifiedFragmentBuilder builder = new UnifiedFragmentBuilder(fragments, document1, document2, myMasterSide); builder.exec(); indicator.checkCanceled(); EditorHighlighter highlighter = buildHighlighter(myProject, content1, content2, texts[0], texts[1], builder.getRanges(), builder.getText().length()); UnifiedEditorRangeHighlighter rangeHighlighter = new UnifiedEditorRangeHighlighter(myProject, document1, document2, builder.getRanges()); return new TwosideDocumentData(builder, highlighter, rangeHighlighter); }); UnifiedFragmentBuilder builder = data.getBuilder(); FileType fileType = content2.getContentType() == null ? content1.getContentType() : content2.getContentType(); LineNumberConvertor convertor1 = builder.getConvertor1(); LineNumberConvertor convertor2 = builder.getConvertor2(); List<LineRange> changedLines = builder.getChangedLines(); boolean isContentsEqual = builder.isEqual(); CombinedEditorData editorData = new CombinedEditorData(builder.getText(), data.getHighlighter(), data.getRangeHighlighter(), fileType, convertor1.createConvertor(), convertor2.createConvertor()); return apply(editorData, builder.getBlocks(), convertor1, convertor2, changedLines, isContentsEqual); } catch (DiffTooBigException e) { return () -> { clearDiffPresentation(); myPanel.setTooBigContent(); }; } catch (ProcessCanceledException e) { throw e; } catch (Throwable e) { LOG.error(e); return () -> { clearDiffPresentation(); myPanel.setErrorContent(); }; } } private void clearDiffPresentation() { myPanel.resetNotifications(); myStatusPanel.setBusy(false); destroyChangedBlockData(); myStateIsOutOfDate = false; mySuppressEditorTyping = false; updateEditorCanBeTyped(); } @CalledInAwt protected void markSuppressEditorTyping() { mySuppressEditorTyping = true; updateEditorCanBeTyped(); } @CalledInAwt protected void markStateIsOutOfDate() { myStateIsOutOfDate = true; if (myChangedBlockData != null) { for (UnifiedDiffChange diffChange : myChangedBlockData.getDiffChanges()) { diffChange.updateGutterActions(); } } } @Nullable private EditorHighlighter buildHighlighter(@Nullable Project project, @NotNull DocumentContent content1, @NotNull DocumentContent content2, @NotNull CharSequence text1, @NotNull CharSequence text2, @NotNull List<HighlightRange> ranges, int textLength) { EditorHighlighter highlighter1 = DiffUtil.initEditorHighlighter(project, content1, text1); EditorHighlighter highlighter2 = DiffUtil.initEditorHighlighter(project, content2, text2); if (highlighter1 == null && highlighter2 == null) return null; if (highlighter1 == null) highlighter1 = DiffUtil.initEmptyEditorHighlighter(text1); if (highlighter2 == null) highlighter2 = DiffUtil.initEmptyEditorHighlighter(text2); return new UnifiedEditorHighlighter(myDocument, highlighter1, highlighter2, ranges, textLength); } @NotNull private Runnable apply(@NotNull final CombinedEditorData data, @NotNull final List<ChangedBlock> blocks, @NotNull final LineNumberConvertor convertor1, @NotNull final LineNumberConvertor convertor2, @NotNull final List<LineRange> changedLines, final boolean isContentsEqual) { return () -> { myFoldingModel.updateContext(myRequest, getFoldingModelSettings()); LineCol oldCaretPosition = LineCol.fromOffset(myDocument, myEditor.getCaretModel().getPrimaryCaret().getOffset()); Pair<int[], Side> oldCaretLineTwoside = transferLineFromOneside(oldCaretPosition.line); clearDiffPresentation(); if (isContentsEqual) { boolean equalCharsets = TextDiffViewerUtil.areEqualCharsets(getContents()); boolean equalSeparators = TextDiffViewerUtil.areEqualLineSeparators(getContents()); myPanel.addNotification(DiffNotifications.createEqualContents(equalCharsets, equalSeparators)); } TIntFunction foldingLineConvertor = myFoldingModel.getLineNumberConvertor(); TIntFunction contentConvertor1 = DiffUtil.getContentLineConvertor(getContent1()); TIntFunction contentConvertor2 = DiffUtil.getContentLineConvertor(getContent2()); myEditor.getGutterComponentEx().setLineNumberConvertor( mergeLineConverters(contentConvertor1, data.getLineConvertor1(), foldingLineConvertor), mergeLineConverters(contentConvertor2, data.getLineConvertor2(), foldingLineConvertor)); ApplicationManager.getApplication().runWriteAction(() -> { myDuringOnesideDocumentModification = true; try { myDocument.setText(data.getText()); } finally { myDuringOnesideDocumentModification = false; } }); if (data.getHighlighter() != null) myEditor.setHighlighter(data.getHighlighter()); DiffUtil.setEditorCodeStyle(myProject, myEditor, data.getFileType()); if (data.getRangeHighlighter() != null) data.getRangeHighlighter().apply(myProject, myDocument); ArrayList<UnifiedDiffChange> diffChanges = new ArrayList<>(blocks.size()); for (ChangedBlock block : blocks) { diffChanges.add(new UnifiedDiffChange(this, block)); } List<RangeMarker> guarderRangeBlocks = new ArrayList<>(); if (!myEditor.isViewer()) { for (ChangedBlock block : blocks) { LineRange range = myMasterSide.select(block.getRange2(), block.getRange1()); if (range.isEmpty()) continue; TextRange textRange = DiffUtil.getLinesRange(myDocument, range.start, range.end); guarderRangeBlocks.add(createGuardedBlock(textRange.getStartOffset(), textRange.getEndOffset())); } int textLength = myDocument.getTextLength(); // there are 'fake' newline at the very end guarderRangeBlocks.add(createGuardedBlock(textLength, textLength)); } myChangedBlockData = new ChangedBlockData(diffChanges, guarderRangeBlocks, convertor1, convertor2, isContentsEqual); int newCaretLine = transferLineToOneside(oldCaretLineTwoside.second, oldCaretLineTwoside.second.select(oldCaretLineTwoside.first)); myEditor.getCaretModel().moveToOffset(LineCol.toOffset(myDocument, newCaretLine, oldCaretPosition.column)); myFoldingModel.install(changedLines, myRequest, getFoldingModelSettings()); myInitialScrollHelper.onRediff(); myStatusPanel.update(); myPanel.setGoodContent(); myEditor.getGutterComponentEx().revalidateMarkup(); }; } @NotNull private RangeMarker createGuardedBlock(int start, int end) { RangeMarker block = myDocument.createGuardedBlock(start, end); block.setGreedyToLeft(true); block.setGreedyToRight(true); return block; } private static TIntFunction mergeLineConverters(@Nullable TIntFunction contentConvertor, @NotNull TIntFunction unifiedConvertor, @NotNull TIntFunction foldingConvertor) { return DiffUtil.mergeLineConverters(DiffUtil.mergeLineConverters(contentConvertor, unifiedConvertor), foldingConvertor); } /* * This convertor returns -1 if exact matching is impossible */ @CalledInAwt public int transferLineToOnesideStrict(@NotNull Side side, int line) { if (myChangedBlockData == null) return -1; return myChangedBlockData.getLineNumberConvertor(side).convertInv(line); } /* * This convertor returns -1 if exact matching is impossible */ @CalledInAwt public int transferLineFromOnesideStrict(@NotNull Side side, int line) { if (myChangedBlockData == null) return -1; return myChangedBlockData.getLineNumberConvertor(side).convert(line); } /* * This convertor returns 'good enough' position, even if exact matching is impossible */ @CalledInAwt public int transferLineToOneside(@NotNull Side side, int line) { if (myChangedBlockData == null) return line; return myChangedBlockData.getLineNumberConvertor(side).convertApproximateInv(line); } /* * This convertor returns 'good enough' position, even if exact matching is impossible */ @CalledInAwt @NotNull public Pair<int[], Side> transferLineFromOneside(int line) { int[] lines = new int[2]; if (myChangedBlockData == null) { lines[0] = line; lines[1] = line; return Pair.create(lines, myMasterSide); } LineNumberConvertor lineConvertor1 = myChangedBlockData.getLineNumberConvertor(Side.LEFT); LineNumberConvertor lineConvertor2 = myChangedBlockData.getLineNumberConvertor(Side.RIGHT); Side side = myMasterSide; lines[0] = lineConvertor1.convert(line); lines[1] = lineConvertor2.convert(line); if (lines[0] == -1 && lines[1] == -1) { lines[0] = lineConvertor1.convertApproximate(line); lines[1] = lineConvertor2.convertApproximate(line); } else if (lines[0] == -1) { lines[0] = lineConvertor1.convertApproximate(line); side = Side.RIGHT; } else if (lines[1] == -1) { lines[1] = lineConvertor2.convertApproximate(line); side = Side.LEFT; } return Pair.create(lines, side); } @CalledInAwt private void destroyChangedBlockData() { if (myChangedBlockData == null) return; for (UnifiedDiffChange change : myChangedBlockData.getDiffChanges()) { change.destroyHighlighter(); } for (RangeMarker block : myChangedBlockData.getGuardedRangeBlocks()) { myDocument.removeGuardedBlock(block); } myChangedBlockData = null; UnifiedEditorRangeHighlighter.erase(myProject, myDocument); myFoldingModel.destroy(); myStatusPanel.update(); } // // Typing // private class MyOnesideDocumentListener implements DocumentListener { @Override public void beforeDocumentChange(@NotNull DocumentEvent e) { if (myDuringOnesideDocumentModification) return; if (myChangedBlockData == null) { LOG.warn("oneside beforeDocumentChange - myChangedBlockData == null"); return; } // TODO: modify Document guard range logic - we can handle case, when whole read-only block is modified (ex: my replacing selection). try { myDuringTwosideDocumentModification = true; Document twosideDocument = getDocument(myMasterSide); LineCol onesideStartPosition = LineCol.fromOffset(myDocument, e.getOffset()); LineCol onesideEndPosition = LineCol.fromOffset(myDocument, e.getOffset() + e.getOldLength()); int line1 = onesideStartPosition.line; int line2 = onesideEndPosition.line + 1; int shift = DiffUtil.countLinesShift(e); int twosideStartLine = transferLineFromOnesideStrict(myMasterSide, onesideStartPosition.line); int twosideEndLine = transferLineFromOnesideStrict(myMasterSide, onesideEndPosition.line); if (twosideStartLine == -1 || twosideEndLine == -1) { // this should never happen logDebugInfo(e, onesideStartPosition, onesideEndPosition, twosideStartLine, twosideEndLine); markSuppressEditorTyping(); return; } int twosideStartOffset = twosideDocument.getLineStartOffset(twosideStartLine) + onesideStartPosition.column; int twosideEndOffset = twosideDocument.getLineStartOffset(twosideEndLine) + onesideEndPosition.column; twosideDocument.replaceString(twosideStartOffset, twosideEndOffset, e.getNewFragment()); for (UnifiedDiffChange change : myChangedBlockData.getDiffChanges()) { change.processChange(line1, line2, shift); } LineNumberConvertor masterConvertor = myChangedBlockData.getLineNumberConvertor(myMasterSide); LineNumberConvertor slaveConvertor = myChangedBlockData.getLineNumberConvertor(myMasterSide.other()); masterConvertor.handleMasterChange(line1, line2, shift, true); slaveConvertor.handleMasterChange(line1, line2, shift, false); } finally { // TODO: we can avoid marking state out-of-date in some simple cases (like in SimpleDiffViewer) // but this will greatly increase complexity, so let's wait if it's actually required by users markStateIsOutOfDate(); scheduleRediff(); myDuringTwosideDocumentModification = false; } } private void logDebugInfo(DocumentEvent e, LineCol onesideStartPosition, LineCol onesideEndPosition, int twosideStartLine, int twosideEndLine) { StringBuilder info = new StringBuilder(); Document document1 = getDocument(Side.LEFT); Document document2 = getDocument(Side.RIGHT); info.append("==== UnifiedDiffViewer Debug Info ===="); info.append("myMasterSide - ").append(myMasterSide).append('\n'); info.append("myLeftDocument.length() - ").append(document1.getTextLength()).append('\n'); info.append("myRightDocument.length() - ").append(document2.getTextLength()).append('\n'); info.append("myDocument.length() - ").append(myDocument.getTextLength()).append('\n'); info.append("e.getOffset() - ").append(e.getOffset()).append('\n'); info.append("e.getNewLength() - ").append(e.getNewLength()).append('\n'); info.append("e.getOldLength() - ").append(e.getOldLength()).append('\n'); info.append("onesideStartPosition - ").append(onesideStartPosition).append('\n'); info.append("onesideEndPosition - ").append(onesideEndPosition).append('\n'); info.append("twosideStartLine - ").append(twosideStartLine).append('\n'); info.append("twosideEndLine - ").append(twosideEndLine).append('\n'); Pair<int[], Side> pair1 = transferLineFromOneside(onesideStartPosition.line); Pair<int[], Side> pair2 = transferLineFromOneside(onesideEndPosition.line); info.append("non-strict transferStartLine - ").append(pair1.first[0]).append("-").append(pair1.first[1]) .append(":").append(pair1.second).append('\n'); info.append("non-strict transferEndLine - ").append(pair2.first[0]).append("-").append(pair2.first[1]) .append(":").append(pair2.second).append('\n'); info.append("---- UnifiedDiffViewer Debug Info ----"); LOG.warn(info.toString()); } } @Override protected void onDocumentChange(@NotNull DocumentEvent e) { if (myDuringTwosideDocumentModification) return; markStateIsOutOfDate(); markSuppressEditorTyping(); scheduleRediff(); } // // Modification operations // private abstract class ApplySelectedChangesActionBase extends AnAction implements DumbAware { @NotNull protected final Side myModifiedSide; protected final boolean myShortcut; ApplySelectedChangesActionBase(@NotNull Side modifiedSide, boolean shortcut) { myModifiedSide = modifiedSide; myShortcut = shortcut; } @Override public void update(@NotNull AnActionEvent e) { if (myShortcut) { // consume shortcut even if there are nothing to do - avoid calling some other action e.getPresentation().setEnabledAndVisible(true); return; } Editor editor = e.getData(CommonDataKeys.EDITOR); if (editor != getEditor()) { e.getPresentation().setEnabledAndVisible(false); return; } if (!isEditable(myModifiedSide, true) || isStateIsOutOfDate()) { e.getPresentation().setEnabledAndVisible(false); return; } e.getPresentation().setVisible(true); e.getPresentation().setEnabled(isSomeChangeSelected()); } @Override public void actionPerformed(@NotNull final AnActionEvent e) { final List<UnifiedDiffChange> selectedChanges = getSelectedChanges(); if (selectedChanges.isEmpty()) return; if (!isEditable(myModifiedSide, true)) return; if (isStateIsOutOfDate()) return; String title = e.getPresentation().getText() + " selected changes"; DiffUtil.executeWriteCommand(getDocument(myModifiedSide), e.getProject(), title, () -> { // state is invalidated during apply(), but changes are in reverse order, so they should not conflict with each other apply(ContainerUtil.reverse(selectedChanges)); scheduleRediff(); }); } protected boolean isSomeChangeSelected() { if (myChangedBlockData == null) return false; List<UnifiedDiffChange> changes = myChangedBlockData.getDiffChanges(); if (changes.isEmpty()) return false; return DiffUtil.isSomeRangeSelected(getEditor(), lines -> { return ContainerUtil.exists(changes, change -> isChangeSelected(change, lines)); }); } @NotNull @CalledInAwt private List<UnifiedDiffChange> getSelectedChanges() { if (myChangedBlockData == null) return Collections.emptyList(); final BitSet lines = DiffUtil.getSelectedLines(myEditor); List<UnifiedDiffChange> changes = myChangedBlockData.getDiffChanges(); return ContainerUtil.filter(changes, change -> isChangeSelected(change, lines)); } private boolean isChangeSelected(@NotNull UnifiedDiffChange change, @NotNull BitSet lines) { return DiffUtil.isSelectedByLine(lines, change.getLine1(), change.getLine2()); } @CalledWithWriteLock protected abstract void apply(@NotNull List<UnifiedDiffChange> changes); } private class ReplaceSelectedChangesAction extends ApplySelectedChangesActionBase { ReplaceSelectedChangesAction(@NotNull Side focusedSide, boolean shortcut) { super(focusedSide.other(), shortcut); setShortcutSet(ActionManager.getInstance().getAction(focusedSide.select("Diff.ApplyLeftSide", "Diff.ApplyRightSide")).getShortcutSet()); getTemplatePresentation().setText(focusedSide.select("Revert", "Accept")); getTemplatePresentation().setIcon(focusedSide.select(AllIcons.Diff.Remove, AllIcons.Actions.Checked)); } @Override protected void apply(@NotNull List<UnifiedDiffChange> changes) { for (UnifiedDiffChange change : changes) { replaceChange(change, myModifiedSide.other()); } } } private class AppendSelectedChangesAction extends ApplySelectedChangesActionBase { AppendSelectedChangesAction(@NotNull Side focusedSide, boolean shortcut) { super(focusedSide.other(), shortcut); setShortcutSet(ActionManager.getInstance().getAction(focusedSide.select("Diff.AppendLeftSide", "Diff.AppendRightSide")).getShortcutSet()); getTemplatePresentation().setText("Append"); getTemplatePresentation().setIcon(DiffUtil.getArrowDownIcon(focusedSide)); } @Override protected void apply(@NotNull List<UnifiedDiffChange> changes) { for (UnifiedDiffChange change : changes) { appendChange(change, myModifiedSide.other()); } } } @CalledWithWriteLock public void replaceChange(@NotNull UnifiedDiffChange change, @NotNull Side sourceSide) { Side outputSide = sourceSide.other(); Document document1 = getDocument(Side.LEFT); Document document2 = getDocument(Side.RIGHT); LineFragment lineFragment = change.getLineFragment(); DiffUtil.applyModification(outputSide.select(document1, document2), outputSide.getStartLine(lineFragment), outputSide.getEndLine(lineFragment), sourceSide.select(document1, document2), sourceSide.getStartLine(lineFragment), sourceSide.getEndLine(lineFragment)); // no need to mark myStateIsOutOfDate - it will be made by DocumentListener // TODO: we can apply change manually, without marking state out-of-date. But we'll have to schedule rediff anyway. } @CalledWithWriteLock public void appendChange(@NotNull UnifiedDiffChange change, @NotNull final Side sourceSide) { Side outputSide = sourceSide.other(); Document document1 = getDocument(Side.LEFT); Document document2 = getDocument(Side.RIGHT); LineFragment lineFragment = change.getLineFragment(); if (sourceSide.getStartLine(lineFragment) == sourceSide.getEndLine(lineFragment)) return; DiffUtil.applyModification(outputSide.select(document1, document2), outputSide.getEndLine(lineFragment), outputSide.getEndLine(lineFragment), sourceSide.select(document1, document2), sourceSide.getStartLine(lineFragment), sourceSide.getEndLine(lineFragment)); } // // Impl // @NotNull public TextDiffSettings getTextSettings() { return TextDiffViewerUtil.getTextSettings(myContext); } @NotNull public FoldingModelSupport.Settings getFoldingModelSettings() { return TextDiffViewerUtil.getFoldingModelSettings(myContext); } // // Getters // @NotNull public Side getMasterSide() { return myMasterSide; } @NotNull public EditorEx getEditor() { return myEditor; } @NotNull protected List<? extends EditorEx> getEditors() { return Collections.singletonList(myEditor); } @NotNull protected List<? extends DocumentContent> getContents() { //noinspection unchecked return (List<? extends DocumentContent>)(List)myRequest.getContents(); } @NotNull protected DocumentContent getContent(@NotNull Side side) { return side.select(getContents()); } @NotNull protected DocumentContent getContent1() { return getContent(Side.LEFT); } @NotNull protected DocumentContent getContent2() { return getContent(Side.RIGHT); } @CalledInAwt @Nullable protected List<UnifiedDiffChange> getDiffChanges() { return myChangedBlockData == null ? null : myChangedBlockData.getDiffChanges(); } @NotNull @Override public JComponent getComponent() { return myPanel; } @Nullable @Override public JComponent getPreferredFocusedComponent() { if (!myPanel.isGoodContent()) return null; return myEditor.getContentComponent(); } @NotNull @Override protected JComponent getStatusPanel() { return myStatusPanel; } @CalledInAwt public boolean isEditable(@NotNull Side side, boolean respectReadOnlyLock) { if (myReadOnlyLockSet && respectReadOnlyLock) return false; if (side.select(myForceReadOnlyFlags)) return false; return DiffUtil.canMakeWritable(getDocument(side)); } @NotNull public Document getDocument(@NotNull Side side) { return getContent(side).getDocument(); } protected boolean isStateIsOutOfDate() { return myStateIsOutOfDate; } // // Misc // @Nullable @Override protected Navigatable getNavigatable() { return getNavigatable(LineCol.fromCaret(myEditor)); } @CalledInAwt @Nullable protected UnifiedDiffChange getCurrentChange() { if (myChangedBlockData == null) return null; int caretLine = myEditor.getCaretModel().getLogicalPosition().line; for (UnifiedDiffChange change : myChangedBlockData.getDiffChanges()) { if (DiffUtil.isSelectedByLine(caretLine, change.getLine1(), change.getLine2())) return change; } return null; } @CalledInAwt @Nullable protected Navigatable getNavigatable(@NotNull LineCol position) { Pair<int[], Side> pair = transferLineFromOneside(position.line); int line1 = pair.first[0]; int line2 = pair.first[1]; Navigatable navigatable1 = getContent1().getNavigatable(new LineCol(line1, position.column)); Navigatable navigatable2 = getContent2().getNavigatable(new LineCol(line2, position.column)); if (navigatable1 == null) return navigatable2; if (navigatable2 == null) return navigatable1; return pair.second.select(navigatable1, navigatable2); } public static boolean canShowRequest(@NotNull DiffContext context, @NotNull DiffRequest request) { return TwosideTextDiffViewer.canShowRequest(context, request); } // // Actions // private class MyPrevNextDifferenceIterable extends PrevNextDifferenceIterableBase<UnifiedDiffChange> { @NotNull @Override protected List<UnifiedDiffChange> getChanges() { return ContainerUtil.notNullize(getDiffChanges()); } @NotNull @Override protected EditorEx getEditor() { return myEditor; } @Override protected int getStartLine(@NotNull UnifiedDiffChange change) { return change.getLine1(); } @Override protected int getEndLine(@NotNull UnifiedDiffChange change) { return change.getLine2(); } } private class MyOpenInEditorWithMouseAction extends OpenInEditorWithMouseAction { @Override protected Navigatable getNavigatable(@NotNull Editor editor, int line) { if (editor != myEditor) return null; return UnifiedDiffViewer.this.getNavigatable(new LineCol(line)); } } private class MyToggleExpandByDefaultAction extends TextDiffViewerUtil.ToggleExpandByDefaultAction { MyToggleExpandByDefaultAction() { super(getTextSettings()); } @Override protected void expandAll(boolean expand) { myFoldingModel.expandAll(expand); } } private class MyReadOnlyLockAction extends TextDiffViewerUtil.ReadOnlyLockAction { MyReadOnlyLockAction() { super(getContext()); applyDefaults(); } @Override protected void doApply(boolean readOnly) { myReadOnlyLockSet = readOnly; if (myChangedBlockData != null) { for (UnifiedDiffChange unifiedDiffChange : myChangedBlockData.getDiffChanges()) { unifiedDiffChange.updateGutterActions(); } } updateEditorCanBeTyped(); } @Override protected boolean canEdit() { return !myForceReadOnlyFlags[0] && DiffUtil.canMakeWritable(getContent1().getDocument()) || !myForceReadOnlyFlags[1] && DiffUtil.canMakeWritable(getContent2().getDocument()); } } // // Scroll from annotate // private class ChangedLinesIterator extends BufferedLineIterator { @NotNull private final List<UnifiedDiffChange> myChanges; private int myIndex = 0; private ChangedLinesIterator(@NotNull List<UnifiedDiffChange> changes) { myChanges = changes; init(); } @Override public boolean hasNextBlock() { return myIndex < myChanges.size(); } @Override public void loadNextBlock() { LOG.assertTrue(!myStateIsOutOfDate); UnifiedDiffChange change = myChanges.get(myIndex); myIndex++; LineFragment lineFragment = change.getLineFragment(); Document document = getContent2().getDocument(); CharSequence insertedText = getLinesContent(document, lineFragment.getStartLine2(), lineFragment.getEndLine2()); int lineNumber = lineFragment.getStartLine2(); LineTokenizer tokenizer = new LineTokenizer(insertedText.toString()); for (String line : tokenizer.execute()) { addLine(lineNumber, line); lineNumber++; } } } // // Helpers // @Nullable @Override public Object getData(@NotNull @NonNls String dataId) { if (DiffDataKeys.PREV_NEXT_DIFFERENCE_ITERABLE.is(dataId)) { return myPrevNextDifferenceIterable; } else if (DiffDataKeys.CURRENT_EDITOR.is(dataId)) { return myEditor; } else if (DiffDataKeys.CURRENT_CHANGE_RANGE.is(dataId)) { UnifiedDiffChange change = getCurrentChange(); if (change != null) { return new LineRange(change.getLine1(), change.getLine2()); } } return super.getData(dataId); } private class MyStatusPanel extends StatusPanel { @Nullable @Override protected String getMessage() { if (myChangedBlockData == null) return null; int changesCount = myChangedBlockData.getDiffChanges().size(); if (changesCount == 0 && !myChangedBlockData.isContentsEqual()) { return DiffBundle.message("diff.all.differences.ignored.text"); } return DiffBundle.message("diff.count.differences.status.text", changesCount); } } private static class TwosideDocumentData { @NotNull private final UnifiedFragmentBuilder myBuilder; @Nullable private final EditorHighlighter myHighlighter; @Nullable private final UnifiedEditorRangeHighlighter myRangeHighlighter; TwosideDocumentData(@NotNull UnifiedFragmentBuilder builder, @Nullable EditorHighlighter highlighter, @Nullable UnifiedEditorRangeHighlighter rangeHighlighter) { myBuilder = builder; myHighlighter = highlighter; myRangeHighlighter = rangeHighlighter; } @NotNull public UnifiedFragmentBuilder getBuilder() { return myBuilder; } @Nullable public EditorHighlighter getHighlighter() { return myHighlighter; } @Nullable public UnifiedEditorRangeHighlighter getRangeHighlighter() { return myRangeHighlighter; } } private static class ChangedBlockData { @NotNull private final List<UnifiedDiffChange> myDiffChanges; @NotNull private final List<RangeMarker> myGuardedRangeBlocks; @NotNull private final LineNumberConvertor myLineNumberConvertor1; @NotNull private final LineNumberConvertor myLineNumberConvertor2; private final boolean myIsContentsEqual; ChangedBlockData(@NotNull List<UnifiedDiffChange> diffChanges, @NotNull List<RangeMarker> guarderRangeBlocks, @NotNull LineNumberConvertor lineNumberConvertor1, @NotNull LineNumberConvertor lineNumberConvertor2, boolean isContentsEqual) { myDiffChanges = diffChanges; myGuardedRangeBlocks = guarderRangeBlocks; myLineNumberConvertor1 = lineNumberConvertor1; myLineNumberConvertor2 = lineNumberConvertor2; myIsContentsEqual = isContentsEqual; } @NotNull public List<UnifiedDiffChange> getDiffChanges() { return myDiffChanges; } @NotNull public List<RangeMarker> getGuardedRangeBlocks() { return myGuardedRangeBlocks; } @NotNull public LineNumberConvertor getLineNumberConvertor(@NotNull Side side) { return side.select(myLineNumberConvertor1, myLineNumberConvertor2); } public boolean isContentsEqual() { return myIsContentsEqual; } } private static class CombinedEditorData { @NotNull private final CharSequence myText; @Nullable private final EditorHighlighter myHighlighter; @Nullable private final UnifiedEditorRangeHighlighter myRangeHighlighter; @Nullable private final FileType myFileType; @NotNull private final TIntFunction myLineConvertor1; @NotNull private final TIntFunction myLineConvertor2; CombinedEditorData(@NotNull CharSequence text, @Nullable EditorHighlighter highlighter, @Nullable UnifiedEditorRangeHighlighter rangeHighlighter, @Nullable FileType fileType, @NotNull TIntFunction convertor1, @NotNull TIntFunction convertor2) { myText = text; myHighlighter = highlighter; myRangeHighlighter = rangeHighlighter; myFileType = fileType; myLineConvertor1 = convertor1; myLineConvertor2 = convertor2; } @NotNull public CharSequence getText() { return myText; } @Nullable public EditorHighlighter getHighlighter() { return myHighlighter; } @Nullable public UnifiedEditorRangeHighlighter getRangeHighlighter() { return myRangeHighlighter; } @Nullable public FileType getFileType() { return myFileType; } @NotNull public TIntFunction getLineConvertor1() { return myLineConvertor1; } @NotNull public TIntFunction getLineConvertor2() { return myLineConvertor2; } } private class MyInitialScrollHelper extends InitialScrollPositionSupport.TwosideInitialScrollHelper { @NotNull @Override protected List<? extends Editor> getEditors() { return UnifiedDiffViewer.this.getEditors(); } @Override protected void disableSyncScroll(boolean value) { } @Override public void onSlowRediff() { // Will not happen for initial rediff } @Nullable @Override protected LogicalPosition[] getCaretPositions() { LogicalPosition position = myEditor.getCaretModel().getLogicalPosition(); Pair<int[], Side> pair = transferLineFromOneside(position.line); LogicalPosition[] carets = new LogicalPosition[2]; carets[0] = getPosition(pair.first[0], position.column); carets[1] = getPosition(pair.first[1], position.column); return carets; } @Override protected boolean doScrollToPosition() { if (myCaretPosition == null) return false; LogicalPosition twosidePosition = myMasterSide.selectNotNull(myCaretPosition); int onesideLine = transferLineToOneside(myMasterSide, twosidePosition.line); LogicalPosition position = new LogicalPosition(onesideLine, twosidePosition.column); myEditor.getCaretModel().moveToLogicalPosition(position); if (myEditorsPosition != null && myEditorsPosition.isSame(position)) { DiffUtil.scrollToPoint(myEditor, myEditorsPosition.myPoints[0], false); } else { DiffUtil.scrollToCaret(myEditor, false); } return true; } @NotNull private LogicalPosition getPosition(int line, int column) { if (line == -1) return new LogicalPosition(0, 0); return new LogicalPosition(line, column); } private void doScrollToLine(@NotNull Side side, @NotNull LogicalPosition position) { int onesideLine = transferLineToOneside(side, position.line); DiffUtil.scrollEditor(myEditor, onesideLine, position.column, false); } @Override protected boolean doScrollToLine() { if (myScrollToLine == null) return false; doScrollToLine(myScrollToLine.first, new LogicalPosition(myScrollToLine.second, 0)); return true; } private boolean doScrollToChange(@NotNull ScrollToPolicy scrollToChangePolicy) { if (myChangedBlockData == null) return false; List<UnifiedDiffChange> changes = myChangedBlockData.getDiffChanges(); UnifiedDiffChange targetChange = scrollToChangePolicy.select(changes); if (targetChange == null) return false; DiffUtil.scrollEditor(myEditor, targetChange.getLine1(), false); return true; } @Override protected boolean doScrollToChange() { if (myScrollToChange == null) return false; return doScrollToChange(myScrollToChange); } @Override protected boolean doScrollToFirstChange() { return doScrollToChange(ScrollToPolicy.FIRST_CHANGE); } @Override protected boolean doScrollToContext() { if (myNavigationContext == null) return false; if (myChangedBlockData == null) return false; ChangedLinesIterator changedLinesIterator = new ChangedLinesIterator(myChangedBlockData.getDiffChanges()); int line = myNavigationContext.contextMatchCheck(changedLinesIterator); if (line == -1) { // this will work for the case, when spaces changes are ignored, and corresponding fragments are not reported as changed // just try to find target line -> +- AllLinesIterator allLinesIterator = new AllLinesIterator(getContent2().getDocument()); line = myNavigationContext.contextMatchCheck(allLinesIterator); } if (line == -1) return false; doScrollToLine(Side.RIGHT, new LogicalPosition(line, 0)); return true; } } private static class MyFoldingModel extends FoldingModelSupport { MyFoldingModel(@NotNull EditorEx editor, @NotNull Disposable disposable) { super(new EditorEx[]{editor}, disposable); } public void install(@Nullable List<LineRange> changedLines, @NotNull UserDataHolder context, @NotNull FoldingModelSupport.Settings settings) { Iterator<int[]> it = map(changedLines, line -> new int[]{ line.start, line.end }); install(it, context, settings); } @NotNull public TIntFunction getLineNumberConvertor() { return getLineConvertor(0); } } private static class MyReadonlyFragmentModificationHandler implements ReadonlyFragmentModificationHandler { @Override public void handle(ReadOnlyFragmentModificationException e) { // do nothing } } }
apache-2.0
jwren/intellij-community
plugins/properties/properties-psi-api/src/com/intellij/lang/properties/provider/PropertiesProviderImpl.java
1127
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package com.intellij.lang.properties.provider; import com.intellij.lang.properties.psi.Property; import com.intellij.lang.properties.psi.PropertyKeyIndex; import com.intellij.openapi.project.Project; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.util.containers.ContainerUtil; import com.intellij.properties.provider.PropertiesProvider; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; public class PropertiesProviderImpl implements PropertiesProvider { @Override public @Nullable String getPropertyValue(@NotNull String propertyKey, @NotNull GlobalSearchScope scope) { Project project = scope.getProject(); if (project == null) return null; Collection<Property> property = PropertyKeyIndex.getInstance().get(propertyKey, project, scope); if (property == null) return null; Property item = ContainerUtil.getFirstItem(property); return item != null ? item.getValue() : null; } }
apache-2.0
mehdi149/OF_COMPILER_0.1
gen-src/main/java/org/projectfloodlight/openflow/protocol/OFAsyncConfigPropExperimenterMaster.java
1789
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_interface.java // Do not modify package org.projectfloodlight.openflow.protocol; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import io.netty.buffer.ByteBuf; public interface OFAsyncConfigPropExperimenterMaster extends OFObject, OFAsyncConfigProp { int getType(); OFVersion getVersion(); void writeTo(ByteBuf channelBuffer); Builder createBuilder(); public interface Builder extends OFAsyncConfigProp.Builder { OFAsyncConfigPropExperimenterMaster build(); int getType(); OFVersion getVersion(); } }
apache-2.0
argv0/cloudstack
plugins/network-elements/elastic-loadbalancer/src/com/cloud/network/lb/dao/ElasticLbVmMapDao.java
1527
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.network.lb.dao; import java.util.List; import com.cloud.network.ElasticLbVmMapVO; import com.cloud.network.LoadBalancerVO; import com.cloud.utils.db.GenericDao; import com.cloud.vm.DomainRouterVO; public interface ElasticLbVmMapDao extends GenericDao<ElasticLbVmMapVO, Long> { ElasticLbVmMapVO findOneByLbIdAndElbVmId(long lbId, long elbVmId); ElasticLbVmMapVO findOneByIpIdAndElbVmId(long ipId, long elbVmId); ElasticLbVmMapVO findOneByIp(long ipId); List<ElasticLbVmMapVO> listByElbVmId(long elbVmId); List<ElasticLbVmMapVO> listByLbId(long lbId); int deleteLB(long lbId); List<DomainRouterVO> listUnusedElbVms(); List<LoadBalancerVO> listLbsForElbVm(long elbVmId); }
apache-2.0
pengzong1111/solr4
solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java
10590
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.handler; import java.io.File; import java.io.IOException; import java.util.Date; import java.util.Locale; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.core.SolrCore; import org.apache.solr.util.plugin.SolrCoreAware; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.schema.DateField; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Ping Request Handler for reporting SolrCore health to a Load Balancer. * * <p> * This handler is designed to be used as the endpoint for an HTTP * Load-Balancer to use when checking the "health" or "up status" of a * Solr server. * </p> * * <p> * In it's simplest form, the PingRequestHandler should be * configured with some defaults indicating a request that should be * executed. If the request succeeds, then the PingRequestHandler * will respond back with a simple "OK" status. If the request fails, * then the PingRequestHandler will respond back with the * corrisponding HTTP Error code. Clients (such as load balancers) * can be configured to poll the PingRequestHandler monitoring for * these types of responses (or for a simple connection failure) to * know if there is a problem with the Solr server. * </p> * * <pre class="prettyprint"> * &lt;requestHandler name="/admin/ping" class="solr.PingRequestHandler"&gt; * &lt;lst name="invariants"&gt; * &lt;str name="qt"&gt;/search&lt;/str&gt;&lt;!-- handler to delegate to --&gt; * &lt;str name="q"&gt;some test query&lt;/str&gt; * &lt;/lst&gt; * &lt;/requestHandler&gt; * </pre> * * <p> * A more advanced option available, is to configure the handler with a * "healthcheckFile" which can be used to enable/disable the PingRequestHandler. * </p> * * <pre class="prettyprint"> * &lt;requestHandler name="/admin/ping" class="solr.PingRequestHandler"&gt; * &lt;!-- relative paths are resolved against the data dir --&gt; * &lt;str name="healthcheckFile"&gt;server-enabled.txt&lt;/str&gt; * &lt;lst name="invariants"&gt; * &lt;str name="qt"&gt;/search&lt;/str&gt;&lt;!-- handler to delegate to --&gt; * &lt;str name="q"&gt;some test query&lt;/str&gt; * &lt;/lst&gt; * &lt;/requestHandler&gt; * </pre> * * <ul> * <li>If the health check file exists, the handler will execute the * delegated query and return status as described above. * </li> * <li>If the health check file does not exist, the handler will return * an HTTP error even if the server is working fine and the delegated * query would have succeeded * </li> * </ul> * * <p> * This health check file feature can be used as a way to indicate * to some Load Balancers that the server should be "removed from * rotation" for maintenance, or upgrades, or whatever reason you may * wish. * </p> * * <p> * The health check file may be created/deleted by any external * system, or the PingRequestHandler itself can be used to * create/delete the file by specifying an "action" param in a * request: * </p> * * <ul> * <li><code>http://.../ping?action=enable</code> * - creates the health check file if it does not already exist * </li> * <li><code>http://.../ping?action=disable</code> * - deletes the health check file if it exists * </li> * <li><code>http://.../ping?action=status</code> * - returns a status code indicating if the healthcheck file exists * ("<code>enabled</code>") or not ("<code>disabled</code>") * </li> * </ul> * * @since solr 1.3 */ public class PingRequestHandler extends RequestHandlerBase implements SolrCoreAware { public static Logger log = LoggerFactory.getLogger(PingRequestHandler.class); public static final String HEALTHCHECK_FILE_PARAM = "healthcheckFile"; protected enum ACTIONS {STATUS, ENABLE, DISABLE, PING}; private String healthFileName = null; private File healthcheck = null; @Override public void init(NamedList args) { super.init(args); Object tmp = args.get(HEALTHCHECK_FILE_PARAM); healthFileName = (null == tmp ? null : tmp.toString()); } @Override public void inform( SolrCore core ) { if (null != healthFileName) { healthcheck = new File(healthFileName); if ( ! healthcheck.isAbsolute()) { healthcheck = new File(core.getDataDir(), healthFileName); healthcheck = healthcheck.getAbsoluteFile(); } if ( ! healthcheck.getParentFile().canWrite()) { // this is not fatal, users may not care about enable/disable via // solr request, file might be touched/deleted by an external system log.warn("Directory for configured healthcheck file is not writable by solr, PingRequestHandler will not be able to control enable/disable: {}", healthcheck.getParentFile().getAbsolutePath()); } } } /** * Returns true if the healthcheck flag-file is enabled but does not exist, * otherwise (no file configured, or file configured and exists) * returns false. */ public boolean isPingDisabled() { return (null != healthcheck && ! healthcheck.exists() ); } @Override public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { SolrParams params = req.getParams(); // in this case, we want to default distrib to false so // we only ping the single node Boolean distrib = params.getBool("distrib"); if (distrib == null) { ModifiableSolrParams mparams = new ModifiableSolrParams(params); mparams.set("distrib", false); req.setParams(mparams); } String actionParam = params.get("action"); ACTIONS action = null; if (actionParam == null){ action = ACTIONS.PING; } else { try { action = ACTIONS.valueOf(actionParam.toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException iae){ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown action: " + actionParam); } } switch(action){ case PING: if( isPingDisabled() ) { SolrException e = new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "Service disabled"); rsp.setException(e); return; } handlePing(req, rsp); break; case ENABLE: handleEnable(true); break; case DISABLE: handleEnable(false); break; case STATUS: if( healthcheck == null ){ SolrException e = new SolrException (SolrException.ErrorCode.SERVICE_UNAVAILABLE, "healthcheck not configured"); rsp.setException(e); } else { rsp.add( "status", isPingDisabled() ? "disabled" : "enabled" ); } } } protected void handlePing(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { SolrParams params = req.getParams(); SolrCore core = req.getCore(); // Get the RequestHandler String qt = params.get( CommonParams.QT );//optional; you get the default otherwise SolrRequestHandler handler = core.getRequestHandler( qt ); if( handler == null ) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown RequestHandler (qt): "+qt ); } if( handler instanceof PingRequestHandler ) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot execute the PingRequestHandler recursively" ); } // Execute the ping query and catch any possible exception Throwable ex = null; try { SolrQueryResponse pingrsp = new SolrQueryResponse(); core.execute(handler, req, pingrsp ); ex = pingrsp.getException(); } catch( Exception e ) { ex = e; } // Send an error or an 'OK' message (response code will be 200) if( ex != null ) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Ping query caused exception: "+ex.getMessage(), ex ); } rsp.add( "status", "OK" ); } protected void handleEnable(boolean enable) throws SolrException { if (healthcheck == null) { throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "No healthcheck file defined."); } if ( enable ) { try { // write out when the file was created FileUtils.write(healthcheck, DateField.formatExternal(new Date()), "UTF-8"); } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to write healthcheck flag file", e); } } else { if (healthcheck.exists() && !healthcheck.delete()){ throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "Did not successfully delete healthcheck file: " +healthcheck.getAbsolutePath()); } } } //////////////////////// SolrInfoMBeans methods ////////////////////// @Override public String getDescription() { return "Reports application health to a load-balancer"; } @Override public String getSource() { return "$URL: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene_solr_4_7/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java $"; } }
apache-2.0
lucastheisen/apache-directory-server
kerberos-codec/src/main/java/org/apache/directory/server/kerberos/shared/replay/ReplayCacheImpl.java
7871
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.kerberos.shared.replay; import java.io.Serializable; import javax.security.auth.kerberos.KerberosPrincipal; import net.sf.ehcache.Cache; import net.sf.ehcache.Element; import net.sf.ehcache.store.AbstractPolicy; import org.apache.directory.shared.kerberos.KerberosTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * "The replay cache will store at least the server name, along with the client name, * time, and microsecond fields from the recently-seen authenticators, and if a * matching tuple is found, the KRB_AP_ERR_REPEAT error is returned." * * We will store the entries in Ehacache instance * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public class ReplayCacheImpl implements ReplayCache { private static final Logger LOG = LoggerFactory.getLogger( ReplayCacheImpl.class ); /** ehcache based storage to store the entries */ private Cache cache; /** default clock skew */ private static final long DEFAULT_CLOCK_SKEW = 5 * KerberosTime.MINUTE; /** The clock skew */ private long clockSkew = DEFAULT_CLOCK_SKEW; /** * A structure to hold an entry */ public class ReplayCacheEntry implements Serializable { private static final long serialVersionUID = 1L; /** The server principal */ private KerberosPrincipal serverPrincipal; /** The client principal */ private KerberosPrincipal clientPrincipal; /** The client time */ private KerberosTime clientTime; /** The client micro seconds */ private int clientMicroSeconds; /** * Creates a new instance of ReplayCacheEntry. * * @param serverPrincipal * @param clientPrincipal * @param clientTime * @param clientMicroSeconds */ public ReplayCacheEntry( KerberosPrincipal serverPrincipal, KerberosPrincipal clientPrincipal, KerberosTime clientTime, int clientMicroSeconds ) { this.serverPrincipal = serverPrincipal; this.clientPrincipal = clientPrincipal; this.clientTime = clientTime; this.clientMicroSeconds = clientMicroSeconds; } /** * Returns whether this {@link ReplayCacheEntry} is equal to another {@link ReplayCacheEntry}. * {@link ReplayCacheEntry}'s are equal when the server name, client name, client time, and * the client microseconds are equal. * * @param that * @return true if the ReplayCacheEntry's are equal. */ public boolean equals( ReplayCacheEntry that ) { return serverPrincipal.equals( that.serverPrincipal ) && clientPrincipal.equals( that.clientPrincipal ) && clientTime.equals( that.clientTime ) && clientMicroSeconds == that.clientMicroSeconds; } /** * Returns whether this {@link ReplayCacheEntry} is older than a given time. * * @param clockSkew * @return true if the {@link ReplayCacheEntry}'s client time is outside the clock skew time. */ public boolean isOutsideClockSkew( long clockSkew ) { return !clientTime.isInClockSkew( clockSkew ); } /** * @return create a key to be used while storing in the cache */ private String createKey() { StringBuilder sb = new StringBuilder(); sb.append( ( clientPrincipal == null ) ? "null" : clientPrincipal.getName() ); sb.append( '#' ); sb.append( ( serverPrincipal == null ) ? "null" : serverPrincipal.getName() ); sb.append( '#' ); sb.append( ( clientTime == null ) ? "null" : clientTime.getDate() ); sb.append( '#' ); sb.append( clientMicroSeconds ); return sb.toString(); } } /** * an expiration policy based on the clockskew */ private class ClockskewExpirationPolicy extends AbstractPolicy { /** * {@inheritDoc} */ public String getName() { return "CLOCK-SKEW"; } /** * {@inheritDoc} */ public boolean compare( Element element1, Element element2 ) { ReplayCacheEntry entry = ( ReplayCacheEntry ) element2.getValue(); if ( entry.isOutsideClockSkew( clockSkew ) ) { return true; } return false; } } /** * Creates a new instance of InMemoryReplayCache. Sets the * delay between each cleaning run to 5 seconds. */ public ReplayCacheImpl( Cache cache ) { this.cache = cache; this.cache.setMemoryStoreEvictionPolicy( new ClockskewExpirationPolicy() ); } /** * Creates a new instance of InMemoryReplayCache. Sets the * delay between each cleaning run to 5 seconds. Sets the * clockSkew to the given value * * @param clockSkew the allowed skew (milliseconds) */ public ReplayCacheImpl( Cache cache, long clockSkew ) { this.cache = cache; this.clockSkew = clockSkew; this.cache.setMemoryStoreEvictionPolicy( new ClockskewExpirationPolicy() ); } /** * Sets the clock skew. * * @param clockSkew */ public void setClockSkew( long clockSkew ) { this.clockSkew = clockSkew; } /** * Check if an entry is a replay or not. */ public synchronized boolean isReplay( KerberosPrincipal serverPrincipal, KerberosPrincipal clientPrincipal, KerberosTime clientTime, int clientMicroSeconds ) { ReplayCacheEntry entry = new ReplayCacheEntry( serverPrincipal, clientPrincipal, clientTime, clientMicroSeconds ); Element element = cache.get( entry.createKey() ); if ( element == null ) { return false; } entry = ( ReplayCacheEntry ) element.getValue(); if ( serverPrincipal.equals( entry.serverPrincipal ) && clientTime.equals( entry.clientTime ) && ( clientMicroSeconds == entry.clientMicroSeconds ) ) { return true; } return false; } /** * Add a new entry into the cache. A thread will clean all the timed out * entries. */ public synchronized void save( KerberosPrincipal serverPrincipal, KerberosPrincipal clientPrincipal, KerberosTime clientTime, int clientMicroSeconds ) { ReplayCacheEntry entry = new ReplayCacheEntry( serverPrincipal, clientPrincipal, clientTime, clientMicroSeconds ); Element element = new Element( entry.createKey(), entry ); cache.put( element ); } /** * {@inheritDoc} */ public void clear() { LOG.debug( "removing all the elements from cache" ); cache.removeAll(); } }
apache-2.0
longjl/JFinal_Authority
jfinal-authority/src/main/java/com/ccb/project/vo/planvo.java
772
package com.ccb.project.vo; import java.sql.Date; /** * Created by han on 2015/6/29. */ public class planvo { private String uid; private String prjId; private Date endDate; public Date getBgnDate() { return bgnDate; } public void setBgnDate(Date bgnDate) { this.bgnDate = bgnDate; } public Date getEndDate() { return endDate; } public void setEndDate(Date endDate) { this.endDate = endDate; } public String getPrjId() { return prjId; } public void setPrjId(String prjId) { this.prjId = prjId; } public String getUid() { return uid; } public void setUid(String uid) { this.uid = uid; } private Date bgnDate; }
apache-2.0
meetdestiny/geronimo-trader
modules/mail/src/java/org/apache/geronimo/mail/NullTransport.java
1308
/** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.mail; import javax.mail.Address; import javax.mail.Message; import javax.mail.MessagingException; import javax.mail.Session; import javax.mail.Transport; import javax.mail.URLName; /** * @version $Rev$ $Date$ */ public class NullTransport extends Transport { public NullTransport(Session session, URLName urlName) { super(session, urlName); } public void sendMessage(Message message, Address[] addresses) throws MessagingException { // do nothing } protected boolean protocolConnect(String host, int port, String user, String password) throws MessagingException { return true; // always connect } }
apache-2.0
variacode/rundeck
core/src/main/java/com/dtolabs/rundeck/core/cluster/ClusterInfoService.java
988
/* * Copyright 2018 Rundeck, Inc. (http://rundeck.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dtolabs.rundeck.core.cluster; import org.rundeck.app.spi.AppService; /** * Provides info about cluster configuration */ public interface ClusterInfoService extends AppService { /** * True if cluster mode is enabled */ boolean isClusterModeEnabled(); /** * This cluster member's UUID */ String getServerUUID(); }
apache-2.0
raksha-rao/gluster-ovirt
frontend/webadmin/modules/uicommon/src/main/java/org/ovirt/engine/ui/uicommon/models/datacenters/DataCenterNetworkListModel.java
17841
package org.ovirt.engine.ui.uicommon.models.datacenters; import java.util.Collections; import org.ovirt.engine.core.compat.*; import org.ovirt.engine.ui.uicompat.*; import org.ovirt.engine.core.common.businessentities.*; import org.ovirt.engine.core.common.vdscommands.*; import org.ovirt.engine.core.common.queries.*; import org.ovirt.engine.core.common.action.*; import org.ovirt.engine.ui.frontend.*; import org.ovirt.engine.ui.uicommon.*; import org.ovirt.engine.ui.uicommon.models.*; import org.ovirt.engine.core.common.*; import org.ovirt.engine.ui.uicommon.models.common.*; import org.ovirt.engine.ui.uicompat.*; import org.ovirt.engine.core.common.interfaces.*; import org.ovirt.engine.core.common.businessentities.*; import org.ovirt.engine.core.common.queries.*; import org.ovirt.engine.ui.uicommon.*; import org.ovirt.engine.ui.uicommon.models.*; @SuppressWarnings("unused") public class DataCenterNetworkListModel extends SearchableListModel implements IFrontendMultipleQueryAsyncCallback { private static final String ENGINE_NETWORK = "engine"; private UICommand privateNewCommand; public UICommand getNewCommand() { return privateNewCommand; } private void setNewCommand(UICommand value) { privateNewCommand = value; } private UICommand privateEditCommand; public UICommand getEditCommand() { return privateEditCommand; } private void setEditCommand(UICommand value) { privateEditCommand = value; } private UICommand privateRemoveCommand; public UICommand getRemoveCommand() { return privateRemoveCommand; } private void setRemoveCommand(UICommand value) { privateRemoveCommand = value; } public storage_pool getEntity() { return (storage_pool)super.getEntity(); } public void setEntity(storage_pool value) { super.setEntity(value); } private Model window; public Model getWindow() { return window; } public void setWindow(Model value) { if (window != value) { window = value; OnPropertyChanged(new PropertyChangedEventArgs("Window")); } } private Model confirmWindow; public Model getConfirmWindow() { return confirmWindow; } public void setConfirmWindow(Model value) { if (confirmWindow != value) { confirmWindow = value; OnPropertyChanged(new PropertyChangedEventArgs("ConfirmWindow")); } } private java.util.ArrayList<VDSGroup> privateClusterList; public java.util.ArrayList<VDSGroup> getClusterList() { return privateClusterList; } public void setClusterList(java.util.ArrayList<VDSGroup> value) { privateClusterList = value; } private java.util.ArrayList<SelectionTreeNodeModel> privateSelectionNodeList; public java.util.ArrayList<SelectionTreeNodeModel> getSelectionNodeList() { return privateSelectionNodeList; } public void setSelectionNodeList(java.util.ArrayList<SelectionTreeNodeModel> value) { privateSelectionNodeList = value; } public DataCenterNetworkListModel() { setTitle("Logical Networks"); setNewCommand(new UICommand("New", this)); setEditCommand(new UICommand("Edit", this)); setRemoveCommand(new UICommand("Remove", this)); UpdateActionAvailability(); } @Override protected void OnEntityChanged() { super.OnEntityChanged(); getSearchCommand().Execute(); } @Override public void Search() { if (getEntity() != null) { super.Search(); } } @Override protected void SyncSearch() { super.SyncSearch(); AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { public void OnSuccess(Object model, Object ReturnValue) { SearchableListModel searchableListModel = (SearchableListModel)model; searchableListModel.setItems((java.util.ArrayList<network>)((VdcQueryReturnValue)ReturnValue).getReturnValue()); }}; Frontend.RunQuery(VdcQueryType.GetAllNetworks, new GetAllNetworkQueryParamenters(getEntity().getId()), _asyncQuery); } @Override protected void AsyncSearch() { super.AsyncSearch(); setAsyncResult(Frontend.RegisterQuery(VdcQueryType.GetAllNetworks, new GetAllNetworkQueryParamenters(getEntity().getId()))); setItems(getAsyncResult().getData()); } public void remove() { if (getWindow() != null) { return; } ConfirmationModel model = new ConfirmationModel(); setWindow(model); model.setTitle("Remove Logical Network(s)"); model.setHashName("remove_logical_network"); model.setMessage("Logical Network(s)"); java.util.ArrayList<String> list = new java.util.ArrayList<String>(); for (network a : Linq.<network>Cast(getSelectedItems())) { list.add(a.getname()); } model.setItems(list); UICommand tempVar = new UICommand("OnRemove", this); tempVar.setTitle("OK"); tempVar.setIsDefault(true); model.getCommands().add(tempVar); UICommand tempVar2 = new UICommand("Cancel", this); tempVar2.setTitle("Cancel"); tempVar2.setIsCancel(true); model.getCommands().add(tempVar2); } public void OnRemove() { java.util.ArrayList<VdcActionParametersBase> pb = new java.util.ArrayList<VdcActionParametersBase>(); for (network a : Linq.<network>Cast(getSelectedItems())) { pb.add((VdcActionParametersBase)new AddNetworkStoragePoolParameters(getEntity().getId(), a)); } Frontend.RunMultipleAction(VdcActionType.RemoveNetwork, pb); Cancel(); } public void Edit() { network network = (network)getSelectedItem(); if (getWindow() != null) { return; } DataCenterNetworkModel model = new DataCenterNetworkModel(); setWindow(model); model.setTitle("Edit Logical Network"); model.setHashName("edit_logical_network"); model.getName().setEntity(network.getname()); model.getDescription().setEntity(network.getdescription()); model.setIsStpEnabled(network.getstp()); model.setHasVLanTag(network.getvlan_id() != null); model.getVLanTag().setEntity((network.getvlan_id() == null ? 0 : network.getvlan_id())); setClusterList(DataProvider.GetClusterList(getEntity().getId())); setSelectionNodeList(new java.util.ArrayList<SelectionTreeNodeModel>()); java.util.ArrayList<VdcQueryParametersBase> parametersList = new java.util.ArrayList<VdcQueryParametersBase>(); java.util.ArrayList<VdcQueryType> queryTypeList = new java.util.ArrayList<VdcQueryType>(); for (VDSGroup vdsGroup : getClusterList()) { queryTypeList.add(VdcQueryType.GetAllNetworksByClusterId); parametersList.add(new VdsGroupQueryParamenters(vdsGroup.getID())); SelectionTreeNodeModel tempVar = new SelectionTreeNodeModel(); tempVar.setIsSelectedNullable(false); tempVar.setEntity(vdsGroup); tempVar.setDescription(vdsGroup.getname()); getSelectionNodeList().add(tempVar); } Frontend.RunMultipleQueries(queryTypeList, parametersList, this); model.setDetachAllCommand(new UICommand("DetachClusters", this)); //cannot detach engine networks from clusters if (StringHelper.stringsEqual(network.getname(), ENGINE_NETWORK)) { for (SelectionTreeNodeModel nodeModel : getSelectionNodeList()) { nodeModel.setIsChangable(false); } model.getDetachAllCommand().setIsAvailable(false); model.getName().setIsChangable(false); model.setMessage("Cannot detach Management Network from Clusters"); } } public void New() { if (getWindow() != null) { return; } DataCenterNetworkModel model = new DataCenterNetworkModel(); setWindow(model); model.setTitle("New Logical Network"); model.setHashName("new_logical_network"); model.setIsNew(true); model.setClusters(DataProvider.GetClusterList(getEntity().getId())); UICommand tempVar = new UICommand("OnSave", this); tempVar.setTitle("OK"); tempVar.setIsDefault(true); model.getCommands().add(tempVar); model.setDetachAllCommand(new UICommand("DetachClusters", this)); model.getDetachAllAvailable().setEntity(false); UICommand tempVar2 = new UICommand("Cancel", this); tempVar2.setTitle("Cancel"); tempVar2.setIsCancel(true); model.getCommands().add(tempVar2); } public void OnSave() { DataCenterNetworkModel model = (DataCenterNetworkModel)getWindow(); if (getEntity() == null || (!model.getIsNew() && getSelectedItem() == null)) { Cancel(); return; } model.setcurrentNetwork(model.getIsNew() ? new network() : (network)Cloner.clone(getSelectedItem())); if (!model.Validate()) { return; } //Save changes. model.getcurrentNetwork().setstorage_pool_id(getEntity().getId()); model.getcurrentNetwork().setname((String)model.getName().getEntity()); model.getcurrentNetwork().setstp(model.getIsStpEnabled()); model.getcurrentNetwork().setdescription((String)model.getDescription().getEntity()); model.getcurrentNetwork().setvlan_id(null); if (model.getHasVLanTag()) { model.getcurrentNetwork().setvlan_id(Integer.parseInt(model.getVLanTag().getEntity().toString())); } model.setnewClusters(new java.util.ArrayList<VDSGroup>()); for (SelectionTreeNodeModel selectionTreeNodeModel : model.getClusterTreeNodes()) { //C# TO JAVA CONVERTER TODO TASK: Comparisons involving nullable type instances are not converted to null-value logic: if (selectionTreeNodeModel.getIsSelectedNullable() != null && selectionTreeNodeModel.getIsSelectedNullable().equals(true)) { model.getnewClusters().add((VDSGroup)selectionTreeNodeModel.getEntity()); } } java.util.ArrayList<VDSGroup> detachNetworkFromClusters = Linq.Except(model.getOriginalClusters(), model.getnewClusters()); java.util.ArrayList<VdcActionParametersBase> actionParameters = new java.util.ArrayList<VdcActionParametersBase>(); for (VDSGroup detachNetworkFromCluster : detachNetworkFromClusters) { actionParameters.add((VdcActionParametersBase) new AttachNetworkToVdsGroupParameter(detachNetworkFromCluster, model.getcurrentNetwork())); } model.StartProgress(null); Frontend.RunMultipleAction(VdcActionType.DetachNetworkToVdsGroup, actionParameters, new IFrontendMultipleActionAsyncCallback() { @Override public void Executed(FrontendMultipleActionAsyncResult result) { DataCenterNetworkModel networkModel = (DataCenterNetworkModel)result.getState(); network network = networkModel.getcurrentNetwork(); VdcReturnValueBase returnValue; if (networkModel.getIsNew()) { returnValue = Frontend.RunAction(VdcActionType.AddNetwork, new AddNetworkStoragePoolParameters(getEntity().getId(), network)); } else { if ((Boolean)networkModel.getIsEnabled().getEntity()) { returnValue = Frontend.RunAction(VdcActionType.UpdateNetwork, new AddNetworkStoragePoolParameters(getEntity().getId(), network)); } else { VdcReturnValueBase tempVar = new VdcReturnValueBase(); tempVar.setSucceeded(true); returnValue = tempVar; } } if (returnValue != null && returnValue.getSucceeded()) { Guid networkId = networkModel.getIsNew() ? (Guid)returnValue.getActionReturnValue() : network.getId(); java.util.ArrayList<VDSGroup> attachNetworkToClusters = Linq.Except(networkModel.getnewClusters(), networkModel.getOriginalClusters()); java.util.ArrayList<VdcActionParametersBase> actionParameters1 = new java.util.ArrayList<VdcActionParametersBase>(); for (VDSGroup attachNetworkToCluster : attachNetworkToClusters) { network tempVar2 = new network(); tempVar2.setId(networkId); tempVar2.setname(network.getname()); actionParameters1.add((VdcActionParametersBase) new AttachNetworkToVdsGroupParameter(attachNetworkToCluster, tempVar2)); } Frontend.RunMultipleAction(VdcActionType.AttachNetworkToVdsGroup, actionParameters1); } if (returnValue != null && returnValue.getSucceeded()) { Cancel(); } networkModel.StopProgress(); } }, model); } public void DetachClusters() { ConfirmationModel confirmModel = new ConfirmationModel(); setConfirmWindow(confirmModel); confirmModel.setTitle("Detach Network from ALL Clusters"); confirmModel.setHashName("detach_network_from_all_clusters"); confirmModel.setMessage("You are about to detach the Network from all of the Clusters to which it is currentlyattached.\nAs a result, the Clusters' Hosts might become unreachable.\n\nAre you sure you want to continue?"); confirmModel.getLatch().setIsAvailable(true); UICommand tempVar = new UICommand("OnDetachClusters", this); tempVar.setTitle("OK"); tempVar.setIsDefault(true); confirmModel.getCommands().add(tempVar); UICommand tempVar2 = new UICommand("CancelConfirmation", this); tempVar2.setTitle("Cancel"); tempVar2.setIsCancel(true); confirmModel.getCommands().add(tempVar2); } public void CancelConfirmation() { setConfirmWindow(null); } public void OnDetachClusters() { ConfirmationModel confirmationModel = (ConfirmationModel)getConfirmWindow(); if (!confirmationModel.Validate()) { return; } DataCenterNetworkModel model = (DataCenterNetworkModel)getWindow(); network network = (network)getSelectedItem(); java.util.ArrayList<VdcActionParametersBase> actionParameters = new java.util.ArrayList<VdcActionParametersBase>(); for (SelectionTreeNodeModel selectionTreeNodeModel : model.getClusterTreeNodes()) { //C# TO JAVA CONVERTER TODO TASK: Comparisons involving nullable type instances are not converted to null-value logic: if (selectionTreeNodeModel.getIsSelectedNullable() != null && selectionTreeNodeModel.getIsSelectedNullable().equals(true)) { selectionTreeNodeModel.setIsSelectedNullable(false); actionParameters.add((VdcActionParametersBase)new AttachNetworkToVdsGroupParameter((VDSGroup)selectionTreeNodeModel.getEntity(), network)); } } java.util.ArrayList<VdcReturnValueBase> returnValueList = Frontend.RunMultipleAction(VdcActionType.DetachNetworkToVdsGroup, actionParameters); boolean isSucceded = true; for (VdcReturnValueBase vdcReturnValueBase : returnValueList) { isSucceded &= vdcReturnValueBase.getSucceeded(); } CancelConfirmation(); if (isSucceded) { model.setOriginalClusters(new java.util.ArrayList<VDSGroup>()); model.getIsEnabled().setEntity(true); model.getDetachAllAvailable().setEntity(!(Boolean)model.getIsEnabled().getEntity()); } else { Cancel(); } } public void Cancel() { setWindow(null); } @Override protected void OnSelectedItemChanged() { super.OnSelectedItemChanged(); UpdateActionAvailability(); } @Override protected void SelectedItemsChanged() { super.SelectedItemsChanged(); UpdateActionAvailability(); } private void UpdateActionAvailability() { java.util.List tempVar = getSelectedItems(); java.util.ArrayList selectedItems = (java.util.ArrayList)((tempVar != null) ? tempVar : new java.util.ArrayList()); boolean anyEngine = false; for (Object item : selectedItems) { network network = (network)item; if (StringHelper.stringsEqual(network.getname(), ENGINE_NETWORK)) { anyEngine = true; break; } } getEditCommand().setIsExecutionAllowed(selectedItems.size() == 1); getRemoveCommand().setIsExecutionAllowed(selectedItems.size() > 0 && !anyEngine); } @Override public void ExecuteCommand(UICommand command) { super.ExecuteCommand(command); if (command == getNewCommand()) { New(); } else if (command == getEditCommand()) { Edit(); } else if (command == getRemoveCommand()) { remove(); } else if (StringHelper.stringsEqual(command.getName(), "OnSave")) { OnSave(); } else if (StringHelper.stringsEqual(command.getName(), "Cancel")) { Cancel(); } else if (StringHelper.stringsEqual(command.getName(), "OnRemove")) { OnRemove(); } else if(StringHelper.stringsEqual(command.getName(), "DetachClusters")) { DetachClusters(); } else if(StringHelper.stringsEqual(command.getName(), "OnDetachClusters")) { OnDetachClusters(); } else if (StringHelper.stringsEqual(command.getName(), "CancelConfirmation")) { CancelConfirmation(); } } public void Executed(FrontendMultipleQueryAsyncResult result) { network network = (network)getSelectedItem(); java.util.List<VdcQueryReturnValue> returnValueList = result.getReturnValues(); DataCenterNetworkModel model = (DataCenterNetworkModel)getWindow(); java.util.ArrayList<network> clusterNetworkList = null; boolean networkHasAttachedClusters = false; for (int i = 0; i < returnValueList.size(); i++) { VdcQueryReturnValue returnValue = returnValueList.get(i); if (returnValue.getSucceeded() && returnValue.getReturnValue() != null) { clusterNetworkList = (java.util.ArrayList<network>)returnValue.getReturnValue(); for (network clusterNetwork : clusterNetworkList) { if (clusterNetwork.getId().equals(network.getId())) { model.getOriginalClusters().add((VDSGroup)getSelectionNodeList().get(i).getEntity()); getSelectionNodeList().get(i).setIsSelectedNullable(true); networkHasAttachedClusters = true; break; } } } } if (networkHasAttachedClusters) { model.getIsEnabled().setEntity(false); if (!StringHelper.stringsEqual(network.getname(), ENGINE_NETWORK)) { model.getDetachAllAvailable().setEntity(!(Boolean)model.getIsEnabled().getEntity()); } } model.setClusterTreeNodes(getSelectionNodeList()); if (StringHelper.stringsEqual(network.getname(), ENGINE_NETWORK) && getSelectionNodeList().size() > 0) { UICommand tempVar = new UICommand("Cancel", this); tempVar.setTitle("Close"); tempVar.setIsDefault(true); tempVar.setIsCancel(true); model.getCommands().add(tempVar); } else { UICommand tempVar2 = new UICommand("OnSave", this); tempVar2.setTitle("OK"); tempVar2.setIsDefault(true); model.getCommands().add(tempVar2); UICommand tempVar3 = new UICommand("Cancel", this); tempVar3.setTitle("Cancel"); tempVar3.setIsCancel(true); model.getCommands().add(tempVar3); } } }
apache-2.0
janstey/fuse
fabric/fabric-openshift/src/main/java/org/fusesource/fabric/openshift/commands/support/OpenshiftConnectionListener.java
503
package org.fusesource.fabric.openshift.commands.support; import com.openshift.client.IOpenShiftConnection; public class OpenshiftConnectionListener { private IOpenShiftConnection connection; public IOpenShiftConnection getConnection() { return connection; } public void bindConnection(IOpenShiftConnection connection) { this.connection = connection; } public void unbindConnection(IOpenShiftConnection connection) { this.connection = null; } }
apache-2.0
luchuangbin/test1
src/com/mit/dstore/ui/chat/ImageBucket.java
279
package com.mit.dstore.ui.chat; import java.util.List; /** * @Description 相册数据体 * @author Nana * @date 2014-5-9 */ public class ImageBucket { public int count = 0;//相册中图片数量 public String bucketName; public List<ImageItem> imageList; }
apache-2.0
kierarad/gocd
test/test-utils/src/main/java/com/thoughtworks/go/mail/SysOutStreamConsumer.java
1103
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.mail; import com.thoughtworks.go.util.command.StreamConsumer; import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer; public class SysOutStreamConsumer extends ProcessOutputStreamConsumer { public SysOutStreamConsumer() { super(new SysOut(), new SysOut()); } private static class SysOut implements StreamConsumer { @Override public void consumeLine(String line) { System.out.println(line); } } }
apache-2.0
apache/sanselan
src/main/java/org/apache/commons/imaging/common/RationalNumber.java
3776
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.imaging.common; import java.text.DecimalFormat; import java.text.NumberFormat; public class RationalNumber extends Number { private static final long serialVersionUID = -1; public final int numerator; public final int divisor; public RationalNumber(int numerator, int divisor) { this.numerator = numerator; this.divisor = divisor; } public static final RationalNumber factoryMethod(long n, long d) { // safer than constructor - handles values outside min/max range. // also does some simple finding of common denominators. if (n > Integer.MAX_VALUE || n < Integer.MIN_VALUE || d > Integer.MAX_VALUE || d < Integer.MIN_VALUE) { while ((n > Integer.MAX_VALUE || n < Integer.MIN_VALUE || d > Integer.MAX_VALUE || d < Integer.MIN_VALUE) && (Math.abs(n) > 1) && (Math.abs(d) > 1)) { // brutal, inprecise truncation =( // use the sign-preserving right shift operator. n >>= 1; d >>= 1; } if (d == 0) throw new NumberFormatException("Invalid value, numerator: " + n + ", divisor: " + d); } long gcd = gcd(n, d); d = d / gcd; n = n / gcd; return new RationalNumber((int) n, (int) d); } /** * Return the greatest common divisor */ private static long gcd(long a, long b) { if (b == 0) return a; else return gcd(b, a % b); } public RationalNumber negate() { return new RationalNumber(-numerator, divisor); } @Override public double doubleValue() { return (double) numerator / (double) divisor; } @Override public float floatValue() { return (float) numerator / (float) divisor; } @Override public int intValue() { return numerator / divisor; } @Override public long longValue() { return (long) numerator / (long) divisor; } public boolean isValid() { return divisor != 0; } private static final NumberFormat nf = DecimalFormat.getInstance(); @Override public String toString() { if (divisor == 0) return "Invalid rational (" + numerator + "/" + divisor + ")"; if ((numerator % divisor) == 0) return nf.format(numerator / divisor); return numerator + "/" + divisor + " (" + nf.format((double) numerator / divisor) + ")"; } public String toDisplayString() { if ((numerator % divisor) == 0) return "" + (numerator / divisor); NumberFormat nf = DecimalFormat.getInstance(); nf.setMaximumFractionDigits(3); return nf.format((double) numerator / (double) divisor); } }
apache-2.0
vincent99/cattle
code/implementation/host-api/src/main/java/io/cattle/platform/host/service/HostApiRSAKeyProvider.java
4380
package io.cattle.platform.host.service; import io.cattle.platform.archaius.util.ArchaiusUtil; import io.cattle.platform.core.dao.DataDao; import io.cattle.platform.ssh.common.SshKeyGen; import io.cattle.platform.token.CertSet; import io.cattle.platform.token.impl.RSAKeyProvider; import io.cattle.platform.token.impl.RSAPrivateKeyHolder; import io.cattle.platform.util.exception.ExceptionUtils; import io.cattle.platform.util.type.InitializationTask; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.security.KeyPair; import java.security.PublicKey; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.security.interfaces.RSAPrivateKey; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Callable; import javax.inject.Inject; import org.bouncycastle.openssl.jcajce.JcaPEMWriter; import com.netflix.config.DynamicBooleanProperty; public class HostApiRSAKeyProvider implements RSAKeyProvider, InitializationTask { private static final DynamicBooleanProperty GEN_ON_STARTUP = ArchaiusUtil.getBoolean("host.api.keygen.on.startup"); private static final String KEY = "host.api.key"; private static final String CERT = "host.api.key.cert"; private static final String DEFAULT = "default"; DataDao dataDao; @Override public RSAPrivateKeyHolder getPrivateKey() { KeyPair kp = getKeyPair(); if (kp == null) { return null; } return new RSAPrivateKeyHolder(DEFAULT, (RSAPrivateKey) kp.getPrivate()); } @Override public void start() { if (GEN_ON_STARTUP.get()) { getPrivateKey(); getCACertificate(); } } protected KeyPair getKeyPair() { String encoded = dataDao.getOrCreate(KEY, false, new Callable<String>() { @Override public String call() throws Exception { KeyPair kp = SshKeyGen.generateKeyPair(); return SshKeyGen.toPEM(kp); } }); try { return SshKeyGen.readKeyPair(encoded); } catch (Exception e) { ExceptionUtils.throwRuntime("Failed to read key pair from PEM", e); /* Won't hit next line */ return null; } } @Override public X509Certificate getCACertificate() { final KeyPair kp = getKeyPair(); String encoded = dataDao.getOrCreate(CERT, false, new Callable<String>() { @Override public String call() throws Exception { X509Certificate cert = SshKeyGen.createRootCACert(kp); return SshKeyGen.toPEM(cert); } }); try { return SshKeyGen.readCACert(encoded); } catch (Exception e) { ExceptionUtils.throwRuntime("Failed to CA cert from PEM", e); /* Won't hit next line */ return null; } } @Override public CertSet generateCertificate(String subject, String... sans) throws Exception { KeyPair caKp = getKeyPair(); X509Certificate caCert = getCACertificate(); KeyPair clientKp = SshKeyGen.generateKeyPair(); X509Certificate clientCert = SshKeyGen.generateClientCert(subject, clientKp.getPublic(), caKp.getPrivate(), caCert, sans); CertSet result = new CertSet(caCert, clientCert, clientKp.getPrivate()); return result; } @Override public PublicKey getDefaultPublicKey() { return getPublicKeys().get(DEFAULT); } @Override public Map<String, PublicKey> getPublicKeys() { Map<String, PublicKey> result = new HashMap<>(); KeyPair defaultKp = getKeyPair(); if (defaultKp != null) { result.put(DEFAULT, defaultKp.getPublic()); } return result; } public DataDao getDataDao() { return dataDao; } @Inject public void setDataDao(DataDao dataDao) { this.dataDao = dataDao; } @Override public byte[] toBytes(Certificate cert) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (JcaPEMWriter writer = new JcaPEMWriter(new OutputStreamWriter(baos))) { writer.writeObject(cert); } return baos.toByteArray(); } }
apache-2.0
apache/logging-log4j2
log4j-core/src/test/java/org/apache/logging/log4j/core/pattern/HighlightConverterTest.java
7123
package org.apache.logging.log4j.core.pattern;/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.impl.Log4jLogEvent; import org.apache.logging.log4j.message.SimpleMessage; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; /** * Tests the HighlightConverter. */ public class HighlightConverterTest { @Test public void testAnsiEmpty() { final String[] options = {"", PatternParser.NO_CONSOLE_NO_ANSI + "=false, " + PatternParser.DISABLE_ANSI + "=false"}; final HighlightConverter converter = HighlightConverter.newInstance(null, options); assertNotNull(converter); final LogEvent event = Log4jLogEvent.newBuilder().setLevel(Level.INFO).setLoggerName("a.b.c").setMessage( new SimpleMessage("message in a bottle")).build(); final StringBuilder buffer = new StringBuilder(); converter.format(event, buffer); assertEquals("", buffer.toString()); } @Test public void testAnsiNonEmpty() { final String[] options = {"%-5level: %msg", PatternParser.NO_CONSOLE_NO_ANSI + "=false, " + PatternParser.DISABLE_ANSI + "=false"}; final HighlightConverter converter = HighlightConverter.newInstance(null, options); assertNotNull(converter); final LogEvent event = Log4jLogEvent.newBuilder().setLevel(Level.INFO).setLoggerName("a.b.c").setMessage( new SimpleMessage("message in a bottle")).build(); final StringBuilder buffer = new StringBuilder(); converter.format(event, buffer); assertEquals("\u001B[32mINFO : message in a bottle\u001B[m", buffer.toString()); } @Test public void testLevelNamesBad() { final String colorName = "red"; final String[] options = { "%-5level: %msg", PatternParser.NO_CONSOLE_NO_ANSI + "=false, " + PatternParser.DISABLE_ANSI + "=false, " + "BAD_LEVEL_A=" + colorName + ", BAD_LEVEL_B=" + colorName }; final HighlightConverter converter = HighlightConverter.newInstance(null, options); assertNotNull(converter); assertNotNull(converter.getLevelStyle(Level.TRACE)); assertNotNull(converter.getLevelStyle(Level.DEBUG)); } @Test public void testLevelNamesGood() { final String colorName = "red"; final String[] options = { "%-5level: %msg", PatternParser.NO_CONSOLE_NO_ANSI + "=false, " + PatternParser.DISABLE_ANSI + "=false, " + "DEBUG=" + colorName + ", TRACE=" + colorName }; final HighlightConverter converter = HighlightConverter.newInstance(null, options); assertNotNull(converter); assertEquals(AnsiEscape.createSequence(colorName), converter.getLevelStyle(Level.TRACE)); assertEquals(AnsiEscape.createSequence(colorName), converter.getLevelStyle(Level.DEBUG)); } @Test public void testLevelNamesUnknown() { final String colorName = "blue"; final String[] options = { "%level", PatternParser.NO_CONSOLE_NO_ANSI + "=false, " + PatternParser.DISABLE_ANSI + "=false, " + "DEBUG=" + colorName + ", CUSTOM1=" + colorName }; final HighlightConverter converter = HighlightConverter.newInstance(null, options); assertNotNull(converter); assertNotNull(converter.getLevelStyle(Level.INFO)); assertNotNull(converter.getLevelStyle(Level.DEBUG)); assertNotNull(converter.getLevelStyle(Level.forName("CUSTOM1", 412))); assertNull(converter.getLevelStyle(Level.forName("CUSTOM2", 512))); assertArrayEquals(new byte[] { 27, '[', '3', '4', 'm', 'D', 'E', 'B', 'U', 'G', 27, '[', 'm' }, toFormattedCharSeq(converter, Level.DEBUG).toString().getBytes()); assertArrayEquals(new byte[] { 27, '[', '3', '2', 'm', 'I', 'N', 'F', 'O', 27, '[', 'm' }, toFormattedCharSeq(converter, Level.INFO).toString().getBytes()); assertArrayEquals(new byte[] { 27, '[', '3', '4', 'm', 'C', 'U', 'S', 'T', 'O', 'M', '1', 27, '[', 'm' }, toFormattedCharSeq(converter, Level.forName("CUSTOM1", 412)).toString().getBytes()); assertArrayEquals(new byte[] { 'C', 'U', 'S', 'T', 'O', 'M', '2' }, toFormattedCharSeq(converter, Level.forName("CUSTOM2", 512)).toString().getBytes()); } @Test public void testLevelNamesNone() { final String[] options = { "%-5level: %msg", PatternParser.NO_CONSOLE_NO_ANSI + "=false, " + PatternParser.DISABLE_ANSI + "=false" }; final HighlightConverter converter = HighlightConverter.newInstance(null, options); assertNotNull(converter); assertNotNull(converter.getLevelStyle(Level.TRACE)); assertNotNull(converter.getLevelStyle(Level.DEBUG)); } @Test public void testNoAnsiEmpty() { final String[] options = {"", PatternParser.DISABLE_ANSI + "=true"}; final HighlightConverter converter = HighlightConverter.newInstance(null, options); assertNotNull(converter); final LogEvent event = Log4jLogEvent.newBuilder().setLevel(Level.INFO).setLoggerName("a.b.c").setMessage( new SimpleMessage("message in a bottle")).build(); final StringBuilder buffer = new StringBuilder(); converter.format(event, buffer); assertEquals("", buffer.toString()); } @Test public void testNoAnsiNonEmpty() { final String[] options = {"%-5level: %msg", PatternParser.DISABLE_ANSI + "=true"}; final HighlightConverter converter = HighlightConverter.newInstance(null, options); assertNotNull(converter); final LogEvent event = Log4jLogEvent.newBuilder().setLevel(Level.INFO).setLoggerName("a.b.c").setMessage( new SimpleMessage("message in a bottle")).build(); final StringBuilder buffer = new StringBuilder(); converter.format(event, buffer); assertEquals("INFO : message in a bottle", buffer.toString()); } private CharSequence toFormattedCharSeq(final HighlightConverter converter, final Level level) { final StringBuilder sb = new StringBuilder(); converter.format(Log4jLogEvent.newBuilder().setLevel(level).build(), sb); return sb; } }
apache-2.0
apache/openejb
container/openejb-jee/src/main/java/org/apache/openejb/jee/jba/cmp/Where.java
2101
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openejb.jee.jba.cmp; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlValue; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "where") public class Where { @XmlValue protected String content; /** * Gets the value of the content property. * * @return * possible object is * {@link String } * */ public String getContent() { return content; } /** * Sets the value of the content property. * * @param value * allowed object is * {@link String } * */ public void setContent(String value) { this.content = value; } }
apache-2.0
dmgcodevil/Hystrix
hystrix-core/src/test/java/com/netflix/hystrix/CommonHystrixCommandTests.java
39912
/** * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.hystrix; import com.netflix.hystrix.HystrixCommandProperties.ExecutionIsolationStrategy; import com.netflix.hystrix.AbstractTestHystrixCommand.CacheEnabled; import com.netflix.hystrix.AbstractTestHystrixCommand.ExecutionResult; import com.netflix.hystrix.AbstractTestHystrixCommand.FallbackResult; import com.netflix.hystrix.exception.HystrixBadRequestException; import com.netflix.hystrix.strategy.HystrixPlugins; import com.netflix.hystrix.strategy.concurrency.HystrixContextScheduler; import com.netflix.hystrix.strategy.properties.HystrixProperty; import org.junit.Test; import rx.Observable; import rx.Scheduler; import rx.Subscriber; import rx.functions.Action1; import rx.functions.Func0; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static org.junit.Assert.*; /** * Place to share code and tests between {@link HystrixCommandTest} and {@link HystrixObservableCommandTest}. * @param <C> */ public abstract class CommonHystrixCommandTests<C extends AbstractTestHystrixCommand<Integer>> { /** * Run the command in multiple modes and check that the hook assertions hold in each and that the command succeeds * @param ctor {@link AbstractTestHystrixCommand} constructor * @param assertion sequence of assertions to check after the command has completed */ abstract void assertHooksOnSuccess(Func0<C> ctor, Action1<C> assertion); /** * Run the command in multiple modes and check that the hook assertions hold in each and that the command fails * @param ctor {@link AbstractTestHystrixCommand} constructor * @param assertion sequence of assertions to check after the command has completed */ abstract void assertHooksOnFailure(Func0<C> ctor, Action1<C> assertion); /** * Run the command in multiple modes and check that the hook assertions hold in each and that the command fails * @param ctor {@link AbstractTestHystrixCommand} constructor * @param assertion sequence of assertions to check after the command has completed */ abstract void assertHooksOnFailure(Func0<C> ctor, Action1<C> assertion, boolean failFast); /** * Run the command in multiple modes and check that the hook assertions hold in each and that the command fails as soon as possible * @param ctor {@link AbstractTestHystrixCommand} constructor * @param assertion sequence of assertions to check after the command has completed */ protected void assertHooksOnFailFast(Func0<C> ctor, Action1<C> assertion) { assertHooksOnFailure(ctor, assertion, true); } /** * Run the command via {@link com.netflix.hystrix.HystrixCommand#observe()}, immediately block and then assert * @param command command to run * @param assertion assertions to check * @param isSuccess should the command succeed? */ protected void assertBlockingObserve(C command, Action1<C> assertion, boolean isSuccess) { System.out.println("Running command.observe(), immediately blocking and then running assertions..."); if (isSuccess) { try { command.observe().toList().toBlocking().single(); } catch (Exception ex) { throw new RuntimeException(ex); } } else { try { command.observe().toList().toBlocking().single(); fail("Expected a command failure!"); } catch (Exception ex) { System.out.println("Received expected ex : " + ex); ex.printStackTrace(); } } assertion.call(command); } /** * Run the command via {@link com.netflix.hystrix.HystrixCommand#observe()}, let the {@link rx.Observable} terminal * states unblock a {@link java.util.concurrent.CountDownLatch} and then assert * @param command command to run * @param assertion assertions to check * @param isSuccess should the command succeed? */ protected void assertNonBlockingObserve(C command, Action1<C> assertion, boolean isSuccess) { System.out.println("Running command.observe(), awaiting terminal state of Observable, then running assertions..."); final CountDownLatch latch = new CountDownLatch(1); Observable<Integer> o = command.observe(); o.subscribe(new Subscriber<Integer>() { @Override public void onCompleted() { latch.countDown(); } @Override public void onError(Throwable e) { latch.countDown(); } @Override public void onNext(Integer i) { //do nothing } }); try { latch.await(3, TimeUnit.SECONDS); assertion.call(command); } catch (Exception e) { throw new RuntimeException(e); } if (isSuccess) { try { o.toList().toBlocking().single(); } catch (Exception ex) { throw new RuntimeException(ex); } } else { try { o.toList().toBlocking().single(); fail("Expected a command failure!"); } catch (Exception ex) { System.out.println("Received expected ex : " + ex); ex.printStackTrace(); } } } protected void assertSaneHystrixRequestLog(final int numCommands) { HystrixRequestLog currentRequestLog = HystrixRequestLog.getCurrentRequest(); try { assertEquals(numCommands, currentRequestLog.getAllExecutedCommands().size()); assertFalse(currentRequestLog.getExecutedCommandsAsString().contains("Executed")); assertTrue(currentRequestLog.getAllExecutedCommands().iterator().next().getExecutionEvents().size() >= 1); //Most commands should have 1 execution event, but fallbacks / responses from cache can cause more than 1. They should never have 0 } catch (Throwable ex) { System.out.println("Problematic Request log : " + currentRequestLog.getExecutedCommandsAsString() + " , expected : " + numCommands); throw new RuntimeException(ex); } } protected void assertCommandExecutionEvents(HystrixInvokableInfo<?> command, HystrixEventType... expectedEventTypes) { boolean emitExpected = false; int expectedEmitCount = 0; boolean fallbackEmitExpected = false; int expectedFallbackEmitCount = 0; List<HystrixEventType> condensedEmitExpectedEventTypes = new ArrayList<HystrixEventType>(); for (HystrixEventType expectedEventType: expectedEventTypes) { if (expectedEventType.equals(HystrixEventType.EMIT)) { if (!emitExpected) { //first EMIT encountered, add it to condensedEmitExpectedEventTypes condensedEmitExpectedEventTypes.add(HystrixEventType.EMIT); } emitExpected = true; expectedEmitCount++; } else if (expectedEventType.equals(HystrixEventType.FALLBACK_EMIT)) { if (!fallbackEmitExpected) { //first FALLBACK_EMIT encountered, add it to condensedEmitExpectedEventTypes condensedEmitExpectedEventTypes.add(HystrixEventType.FALLBACK_EMIT); } fallbackEmitExpected = true; expectedFallbackEmitCount++; } else { condensedEmitExpectedEventTypes.add(expectedEventType); } } List<HystrixEventType> actualEventTypes = command.getExecutionEvents(); assertEquals(expectedEmitCount, command.getNumberEmissions()); assertEquals(expectedFallbackEmitCount, command.getNumberFallbackEmissions()); assertEquals(condensedEmitExpectedEventTypes, actualEventTypes); } /** * Threadpool with 1 thread, queue of size 1 */ protected static class SingleThreadedPoolWithQueue implements HystrixThreadPool { final LinkedBlockingQueue<Runnable> queue; final ThreadPoolExecutor pool; private final int rejectionQueueSizeThreshold; public SingleThreadedPoolWithQueue(int queueSize) { this(queueSize, 100); } public SingleThreadedPoolWithQueue(int queueSize, int rejectionQueueSizeThreshold) { queue = new LinkedBlockingQueue<Runnable>(queueSize); pool = new ThreadPoolExecutor(1, 1, 1, TimeUnit.MINUTES, queue); this.rejectionQueueSizeThreshold = rejectionQueueSizeThreshold; } @Override public ThreadPoolExecutor getExecutor() { return pool; } @Override public Scheduler getScheduler() { return new HystrixContextScheduler(HystrixPlugins.getInstance().getConcurrencyStrategy(), this); } @Override public Scheduler getScheduler(Func0<Boolean> shouldInterruptThread) { return new HystrixContextScheduler(HystrixPlugins.getInstance().getConcurrencyStrategy(), this, shouldInterruptThread); } @Override public void markThreadExecution() { // not used for this test } @Override public void markThreadCompletion() { // not used for this test } @Override public void markThreadRejection() { // not used for this test } @Override public boolean isQueueSpaceAvailable() { return queue.size() < rejectionQueueSizeThreshold; } } /** * Threadpool with 1 thread, queue of size 1 */ protected static class SingleThreadedPoolWithNoQueue implements HystrixThreadPool { final SynchronousQueue<Runnable> queue; final ThreadPoolExecutor pool; public SingleThreadedPoolWithNoQueue() { queue = new SynchronousQueue<Runnable>(); pool = new ThreadPoolExecutor(1, 1, 1, TimeUnit.MINUTES, queue); } @Override public ThreadPoolExecutor getExecutor() { return pool; } @Override public Scheduler getScheduler() { return new HystrixContextScheduler(HystrixPlugins.getInstance().getConcurrencyStrategy(), this); } @Override public Scheduler getScheduler(Func0<Boolean> shouldInterruptThread) { return new HystrixContextScheduler(HystrixPlugins.getInstance().getConcurrencyStrategy(), this, shouldInterruptThread); } @Override public void markThreadExecution() { // not used for this test } @Override public void markThreadCompletion() { // not used for this test } @Override public void markThreadRejection() { // not used for this test } @Override public boolean isQueueSpaceAvailable() { return true; //let the thread pool reject } } /** ********************* SEMAPHORE-ISOLATED Execution Hook Tests *********************************** */ /** * Short-circuit? : NO * Thread/semaphore: SEMAPHORE * Semaphore Permit reached? : NO * Execution Result: SUCCESS */ @Test public void testExecutionHookSemaphoreSuccess() { assertHooksOnSuccess( new Func0<C>() { @Override public C call() { return getCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.SUCCESS, FallbackResult.SUCCESS); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(1, 0, 1)); assertTrue(hook.executionEventsMatch(1, 0, 1)); assertTrue(hook.fallbackEventsMatch(0, 0, 0)); assertEquals("onStart - !onRunStart - onExecutionStart - onExecutionEmit - !onRunSuccess - !onComplete - onEmit - onExecutionSuccess - onSuccess - ", hook.executionSequence.toString()); } }); } /** * Short-circuit? : NO * Thread/semaphore: SEMAPHORE * Semaphore Permit reached? : NO * Execution Result: synchronous HystrixBadRequestException */ @Test public void testExecutionHookSemaphoreBadRequestException() { assertHooksOnFailure( new Func0<C>() { @Override public C call() { return getCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.BAD_REQUEST, FallbackResult.SUCCESS); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(0, 1, 0)); assertTrue(hook.executionEventsMatch(0, 1, 0)); assertTrue(hook.fallbackEventsMatch(0, 0, 0)); assertEquals(HystrixBadRequestException.class, hook.getCommandException().getClass()); assertEquals(HystrixBadRequestException.class, hook.getExecutionException().getClass()); assertEquals("onStart - !onRunStart - onExecutionStart - onExecutionError - !onRunError - onError - ", hook.executionSequence.toString()); } }); } /** * Short-circuit? : NO * Thread/semaphore: SEMAPHORE * Semaphore Permit reached? : NO * Execution Result: synchronous HystrixRuntimeException * Fallback: UnsupportedOperationException */ @Test public void testExecutionHookSemaphoreExceptionNoFallback() { assertHooksOnFailure( new Func0<C>() { @Override public C call() { return getCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.FAILURE, FallbackResult.UNIMPLEMENTED); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(0, 1, 0)); assertTrue(hook.executionEventsMatch(0, 1, 0)); assertTrue(hook.fallbackEventsMatch(0, 0, 0)); assertEquals(RuntimeException.class, hook.getCommandException().getClass()); assertEquals(RuntimeException.class, hook.getExecutionException().getClass()); assertNull(hook.getFallbackException()); assertEquals("onStart - !onRunStart - onExecutionStart - onExecutionError - !onRunError - onError - ", hook.executionSequence.toString()); } }); } /** * Short-circuit? : NO * Thread/semaphore: SEMAPHORE * Semaphore Permit reached? : NO * Execution Result: synchronous HystrixRuntimeException * Fallback: SUCCESS */ @Test public void testExecutionHookSemaphoreExceptionSuccessfulFallback() { assertHooksOnSuccess( new Func0<C>() { @Override public C call() { return getCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.FAILURE, FallbackResult.SUCCESS); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(1, 0, 1)); assertTrue(hook.executionEventsMatch(0, 1, 0)); assertTrue(hook.fallbackEventsMatch(1, 0, 1)); assertEquals(RuntimeException.class, hook.getExecutionException().getClass()); assertEquals("onStart - !onRunStart - onExecutionStart - onExecutionError - !onRunError - onFallbackStart - onFallbackEmit - !onFallbackSuccess - !onComplete - onEmit - onFallbackSuccess - onSuccess - ", hook.executionSequence.toString()); } }); } /** * Short-circuit? : NO * Thread/semaphore: SEMAPHORE * Semaphore Permit reached? : NO * Execution Result: synchronous HystrixRuntimeException * Fallback: synchronous HystrixRuntimeException */ @Test public void testExecutionHookSemaphoreExceptionUnsuccessfulFallback() { assertHooksOnFailure( new Func0<C>() { @Override public C call() { return getCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.FAILURE, FallbackResult.FAILURE); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(0, 1, 0)); assertTrue(hook.executionEventsMatch(0, 1, 0)); assertTrue(hook.fallbackEventsMatch(0, 1, 0)); assertEquals(RuntimeException.class, hook.getCommandException().getClass()); assertEquals(RuntimeException.class, hook.getExecutionException().getClass()); assertEquals(RuntimeException.class, hook.getFallbackException().getClass()); assertEquals("onStart - !onRunStart - onExecutionStart - onExecutionError - !onRunError - onFallbackStart - onFallbackError - onError - ", hook.executionSequence.toString()); } }); } /** * Short-circuit? : NO * Thread/semaphore: SEMAPHORE * Semaphore Permit reached? : YES * Fallback: UnsupportedOperationException */ @Test public void testExecutionHookSemaphoreRejectedNoFallback() { assertHooksOnFailFast( new Func0<C>() { @Override public C call() { AbstractCommand.TryableSemaphore semaphore = new AbstractCommand.TryableSemaphoreActual(HystrixProperty.Factory.asProperty(2)); final C cmd1 = getLatentCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.SUCCESS, 500, FallbackResult.UNIMPLEMENTED, semaphore); final C cmd2 = getLatentCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.SUCCESS, 500, FallbackResult.UNIMPLEMENTED, semaphore); //saturate the semaphore new Thread() { @Override public void run() { cmd1.observe(); } }.start(); new Thread() { @Override public void run() { cmd2.observe(); } }.start(); try { //give the saturating threads a chance to run before we run the command we want to get rejected Thread.sleep(200); } catch (InterruptedException ie) { throw new RuntimeException(ie); } return getLatentCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.SUCCESS, 500, FallbackResult.UNIMPLEMENTED, semaphore); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(0, 1, 0)); assertTrue(hook.executionEventsMatch(0, 0, 0)); assertTrue(hook.fallbackEventsMatch(0, 0, 0)); assertEquals(RuntimeException.class, hook.getCommandException().getClass()); assertNull(hook.getFallbackException()); assertEquals("onStart - onError - ", hook.executionSequence.toString()); } }); } /** * Short-circuit? : NO * Thread/semaphore: SEMAPHORE * Semaphore Permit reached? : YES * Fallback: SUCCESS */ @Test public void testExecutionHookSemaphoreRejectedSuccessfulFallback() { assertHooksOnSuccess( new Func0<C>() { @Override public C call() { AbstractCommand.TryableSemaphore semaphore = new AbstractCommand.TryableSemaphoreActual(HystrixProperty.Factory.asProperty(2)); final C cmd1 = getLatentCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.SUCCESS, 1500, FallbackResult.SUCCESS, semaphore); final C cmd2 = getLatentCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.SUCCESS, 1500, FallbackResult.SUCCESS, semaphore); //saturate the semaphore new Thread() { @Override public void run() { cmd1.observe(); } }.start(); new Thread() { @Override public void run() { cmd2.observe(); } }.start(); try { //give the saturating threads a chance to run before we run the command we want to get rejected Thread.sleep(200); } catch (InterruptedException ie) { throw new RuntimeException(ie); } return getLatentCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.SUCCESS, 500, FallbackResult.SUCCESS, semaphore); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(1, 0, 1)); assertTrue(hook.executionEventsMatch(0, 0, 0)); assertTrue(hook.fallbackEventsMatch(1, 0, 1)); assertEquals("onStart - onFallbackStart - onFallbackEmit - !onFallbackSuccess - !onComplete - onEmit - onFallbackSuccess - onSuccess - ", hook.executionSequence.toString()); } }); } /** * Short-circuit? : NO * Thread/semaphore: SEMAPHORE * Semaphore Permit reached? : YES * Fallback: synchronous HystrixRuntimeException */ @Test public void testExecutionHookSemaphoreRejectedUnsuccessfulFallback() { assertHooksOnFailFast( new Func0<C>() { @Override public C call() { AbstractCommand.TryableSemaphore semaphore = new AbstractCommand.TryableSemaphoreActual(HystrixProperty.Factory.asProperty(2)); final C cmd1 = getLatentCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.SUCCESS, 500, FallbackResult.FAILURE, semaphore); final C cmd2 = getLatentCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.SUCCESS, 500, FallbackResult.FAILURE, semaphore); //saturate the semaphore new Thread() { @Override public void run() { cmd1.observe(); } }.start(); new Thread() { @Override public void run() { cmd2.observe(); } }.start(); try { //give the saturating threads a chance to run before we run the command we want to get rejected Thread.sleep(200); } catch (InterruptedException ie) { throw new RuntimeException(ie); } return getLatentCommand(ExecutionIsolationStrategy.SEMAPHORE, ExecutionResult.SUCCESS, 500, FallbackResult.FAILURE, semaphore); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(0, 1, 0)); assertTrue(hook.executionEventsMatch(0, 0, 0)); assertTrue(hook.fallbackEventsMatch(0, 1, 0)); assertEquals(RuntimeException.class, hook.getCommandException().getClass()); assertEquals(RuntimeException.class, hook.getFallbackException().getClass()); assertEquals("onStart - onFallbackStart - onFallbackError - onError - ", hook.executionSequence.toString()); } }); } /** * Short-circuit? : YES * Thread/semaphore: SEMAPHORE * Fallback: UnsupportedOperationException */ @Test public void testExecutionHookSemaphoreShortCircuitNoFallback() { assertHooksOnFailFast( new Func0<C>() { @Override public C call() { return getCircuitOpenCommand(ExecutionIsolationStrategy.SEMAPHORE, FallbackResult.UNIMPLEMENTED); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(0, 1, 0)); assertTrue(hook.executionEventsMatch(0, 0, 0)); assertTrue(hook.fallbackEventsMatch(0, 0, 0)); assertEquals(RuntimeException.class, hook.getCommandException().getClass()); assertNull(hook.getFallbackException()); assertEquals("onStart - onError - ", hook.executionSequence.toString()); } }); } /** * Short-circuit? : YES * Thread/semaphore: SEMAPHORE * Fallback: SUCCESS */ @Test public void testExecutionHookSemaphoreShortCircuitSuccessfulFallback() { assertHooksOnSuccess( new Func0<C>() { @Override public C call() { return getCircuitOpenCommand(ExecutionIsolationStrategy.SEMAPHORE, FallbackResult.SUCCESS); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(1, 0, 1)); assertTrue(hook.executionEventsMatch(0, 0, 0)); assertTrue(hook.fallbackEventsMatch(1, 0, 1)); assertEquals("onStart - onFallbackStart - onFallbackEmit - !onFallbackSuccess - !onComplete - onEmit - onFallbackSuccess - onSuccess - ", hook.executionSequence.toString()); } }); } /** * Short-circuit? : YES * Thread/semaphore: SEMAPHORE * Fallback: synchronous HystrixRuntimeException */ @Test public void testExecutionHookSemaphoreShortCircuitUnsuccessfulFallback() { assertHooksOnFailFast( new Func0<C>() { @Override public C call() { return getCircuitOpenCommand(ExecutionIsolationStrategy.SEMAPHORE, FallbackResult.FAILURE); } }, new Action1<C>() { @Override public void call(C command) { TestableExecutionHook hook = command.getBuilder().executionHook; assertTrue(hook.commandEmissionsMatch(0, 1, 0)); assertTrue(hook.executionEventsMatch(0, 0, 0)); assertTrue(hook.fallbackEventsMatch(0, 1, 0)); assertEquals(RuntimeException.class, hook.getCommandException().getClass()); assertEquals(RuntimeException.class, hook.getFallbackException().getClass()); assertEquals("onStart - onFallbackStart - onFallbackError - onError - ", hook.executionSequence.toString()); } }); } /** ********************* END SEMAPHORE-ISOLATED Execution Hook Tests *********************************** */ /** * Abstract methods defining a way to instantiate each of the described commands. * {@link HystrixCommandTest} and {@link HystrixObservableCommandTest} should each provide concrete impls for * {@link HystrixCommand}s and {@link} HystrixObservableCommand}s, respectively. */ C getCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult) { return getCommand(isolationStrategy, executionResult, FallbackResult.UNIMPLEMENTED); } C getCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, FallbackResult fallbackResult) { return getCommand(isolationStrategy, executionResult, 0, fallbackResult); } C getCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult) { return getCommand(isolationStrategy, executionResult, executionLatency, fallbackResult, 0, new HystrixCircuitBreakerTest.TestCircuitBreaker(), null, (executionLatency * 2) + 200, CacheEnabled.NO, "foo", 10, 10); } C getCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, int timeout) { return getCommand(isolationStrategy, executionResult, executionLatency, fallbackResult, 0, new HystrixCircuitBreakerTest.TestCircuitBreaker(), null, timeout, CacheEnabled.NO, "foo", 10, 10); } C getCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, int fallbackLatency, HystrixCircuitBreakerTest.TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, CacheEnabled cacheEnabled, Object value, int executionSemaphoreCount, int fallbackSemaphoreCount) { return getCommand(isolationStrategy, executionResult, executionLatency, fallbackResult, fallbackLatency, circuitBreaker, threadPool, timeout, cacheEnabled, value, executionSemaphoreCount, fallbackSemaphoreCount, false); } C getCommand(HystrixCommandKey key, ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, int fallbackLatency, HystrixCircuitBreakerTest.TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, CacheEnabled cacheEnabled, Object value, int executionSemaphoreCount, int fallbackSemaphoreCount) { AbstractCommand.TryableSemaphoreActual executionSemaphore = new AbstractCommand.TryableSemaphoreActual(HystrixProperty.Factory.asProperty(executionSemaphoreCount)); AbstractCommand.TryableSemaphoreActual fallbackSemaphore = new AbstractCommand.TryableSemaphoreActual(HystrixProperty.Factory.asProperty(fallbackSemaphoreCount)); return getCommand(key, isolationStrategy, executionResult, executionLatency, fallbackResult, fallbackLatency, circuitBreaker, threadPool, timeout, cacheEnabled, value, executionSemaphore, fallbackSemaphore, false); } C getCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, int fallbackLatency, HystrixCircuitBreakerTest.TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, CacheEnabled cacheEnabled, Object value, int executionSemaphoreCount, int fallbackSemaphoreCount, boolean circuitBreakerDisabled) { AbstractCommand.TryableSemaphoreActual executionSemaphore = new AbstractCommand.TryableSemaphoreActual(HystrixProperty.Factory.asProperty(executionSemaphoreCount)); AbstractCommand.TryableSemaphoreActual fallbackSemaphore = new AbstractCommand.TryableSemaphoreActual(HystrixProperty.Factory.asProperty(fallbackSemaphoreCount)); return getCommand(isolationStrategy, executionResult, executionLatency, fallbackResult, fallbackLatency, circuitBreaker, threadPool, timeout, cacheEnabled, value, executionSemaphore, fallbackSemaphore, circuitBreakerDisabled); } C getCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, int fallbackLatency, HystrixCircuitBreakerTest.TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, CacheEnabled cacheEnabled, Object value, int executionSemaphoreCount, AbstractCommand.TryableSemaphore fallbackSemaphore, boolean circuitBreakerDisabled) { AbstractCommand.TryableSemaphoreActual executionSemaphore = new AbstractCommand.TryableSemaphoreActual(HystrixProperty.Factory.asProperty(executionSemaphoreCount)); return getCommand(isolationStrategy, executionResult, executionLatency, fallbackResult, fallbackLatency, circuitBreaker, threadPool, timeout, cacheEnabled, value, executionSemaphore, fallbackSemaphore, circuitBreakerDisabled); } abstract C getCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, int fallbackLatency, HystrixCircuitBreakerTest.TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, CacheEnabled cacheEnabled, Object value, AbstractCommand.TryableSemaphore executionSemaphore, AbstractCommand.TryableSemaphore fallbackSemaphore, boolean circuitBreakerDisabled); abstract C getCommand(HystrixCommandKey commandKey, ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, int fallbackLatency, HystrixCircuitBreakerTest.TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, CacheEnabled cacheEnabled, Object value, AbstractCommand.TryableSemaphore executionSemaphore, AbstractCommand.TryableSemaphore fallbackSemaphore, boolean circuitBreakerDisabled); C getLatentCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, int timeout) { return getCommand(isolationStrategy, executionResult, executionLatency, fallbackResult, 0, new HystrixCircuitBreakerTest.TestCircuitBreaker(), null, timeout, CacheEnabled.NO, "foo", 10, 10); } C getLatentCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, HystrixCircuitBreakerTest.TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout) { return getCommand(isolationStrategy, executionResult, executionLatency, fallbackResult, 0, circuitBreaker, threadPool, timeout, CacheEnabled.NO, "foo", 10, 10); } C getLatentCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, AbstractCommand.TryableSemaphore executionSemaphore) { AbstractCommand.TryableSemaphoreActual fallbackSemaphore = new AbstractCommand.TryableSemaphoreActual(HystrixProperty.Factory.asProperty(10)); return getCommand(isolationStrategy, executionResult, executionLatency, fallbackResult, 0, new HystrixCircuitBreakerTest.TestCircuitBreaker(), null, (executionLatency * 2) + 200, CacheEnabled.NO, "foo", executionSemaphore, fallbackSemaphore, false); } C getCircuitOpenCommand(ExecutionIsolationStrategy isolationStrategy, FallbackResult fallbackResult) { HystrixCircuitBreakerTest.TestCircuitBreaker openCircuit = new HystrixCircuitBreakerTest.TestCircuitBreaker().setForceShortCircuit(true); return getCommand(isolationStrategy, ExecutionResult.SUCCESS, 0, fallbackResult, 0, openCircuit, null, 500, CacheEnabled.NO, "foo", 10, 10, false); } C getSharedCircuitBreakerCommand(HystrixCommandKey commandKey, ExecutionIsolationStrategy isolationStrategy, FallbackResult fallbackResult, HystrixCircuitBreakerTest.TestCircuitBreaker circuitBreaker) { return getCommand(commandKey, isolationStrategy, ExecutionResult.FAILURE, 0, fallbackResult, 0, circuitBreaker, null, 500, CacheEnabled.NO, "foo", 10, 10); } C getCircuitBreakerDisabledCommand(ExecutionIsolationStrategy isolationStrategy, ExecutionResult executionResult) { return getCommand(isolationStrategy, executionResult, 0, FallbackResult.UNIMPLEMENTED, 0, new HystrixCircuitBreakerTest.TestCircuitBreaker(), null, 500, CacheEnabled.NO, "foo", 10, 10, true); } C getRecoverableErrorCommand(ExecutionIsolationStrategy isolationStrategy, FallbackResult fallbackResult) { return getCommand(isolationStrategy, ExecutionResult.RECOVERABLE_ERROR, 0, fallbackResult); } C getUnrecoverableErrorCommand(ExecutionIsolationStrategy isolationStrategy, FallbackResult fallbackResult) { return getCommand(isolationStrategy, ExecutionResult.UNRECOVERABLE_ERROR, 0, fallbackResult); } }
apache-2.0
CC4401-TeraCity/TeraCity
engine/src/main/java/org/terasology/logic/particles/BlockParticleEmitterSystem.java
15781
/* * Copyright 2013 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.logic.particles; import org.lwjgl.BufferUtils; import org.lwjgl.opengl.GL11; import org.lwjgl.opengl.GL13; import org.terasology.asset.Assets; import org.terasology.config.Config; import org.terasology.entitySystem.entity.EntityManager; import org.terasology.entitySystem.entity.EntityRef; import org.terasology.entitySystem.entity.lifecycleEvents.BeforeDeactivateComponent; import org.terasology.entitySystem.entity.lifecycleEvents.OnActivatedComponent; import org.terasology.entitySystem.event.ReceiveEvent; import org.terasology.entitySystem.systems.BaseComponentSystem; import org.terasology.entitySystem.systems.RegisterMode; import org.terasology.entitySystem.systems.RegisterSystem; import org.terasology.entitySystem.systems.RenderSystem; import org.terasology.entitySystem.systems.UpdateSubscriberSystem; import org.terasology.logic.location.LocationComponent; import org.terasology.logic.particles.BlockParticleEffectComponent.Particle; import org.terasology.math.geom.Vector2f; import org.terasology.math.geom.Vector3f; import org.terasology.math.geom.Vector3i; import org.terasology.math.geom.Vector4f; import org.terasology.registry.In; import org.terasology.rendering.assets.material.Material; import org.terasology.rendering.assets.texture.Texture; import org.terasology.rendering.logic.NearestSortingList; import org.terasology.rendering.world.WorldRenderer; import org.terasology.utilities.random.FastRandom; import org.terasology.utilities.random.Random; import org.terasology.world.WorldProvider; import org.terasology.world.biomes.Biome; import org.terasology.world.block.Block; import org.terasology.world.block.BlockManager; import org.terasology.world.block.BlockPart; import org.terasology.world.block.loader.WorldAtlas; import java.nio.FloatBuffer; import java.util.Arrays; import java.util.Iterator; import static org.lwjgl.opengl.GL11.GL_ONE; import static org.lwjgl.opengl.GL11.GL_ONE_MINUS_SRC_ALPHA; import static org.lwjgl.opengl.GL11.GL_QUADS; import static org.lwjgl.opengl.GL11.GL_SRC_ALPHA; import static org.lwjgl.opengl.GL11.glBegin; import static org.lwjgl.opengl.GL11.glBindTexture; import static org.lwjgl.opengl.GL11.glBlendFunc; import static org.lwjgl.opengl.GL11.glCallList; import static org.lwjgl.opengl.GL11.glDeleteLists; import static org.lwjgl.opengl.GL11.glDisable; import static org.lwjgl.opengl.GL11.glEnable; import static org.lwjgl.opengl.GL11.glEnd; import static org.lwjgl.opengl.GL11.glEndList; import static org.lwjgl.opengl.GL11.glGenLists; import static org.lwjgl.opengl.GL11.glNewList; import static org.lwjgl.opengl.GL11.glPopMatrix; import static org.lwjgl.opengl.GL11.glPushMatrix; import static org.lwjgl.opengl.GL11.glScalef; import static org.lwjgl.opengl.GL11.glTranslated; import static org.lwjgl.opengl.GL11.glTranslatef; /** * @author Immortius */ // TODO: Generalise for non-block particles // TODO: Dispose display lists @RegisterSystem(RegisterMode.CLIENT) public class BlockParticleEmitterSystem extends BaseComponentSystem implements UpdateSubscriberSystem, RenderSystem { private static final int PARTICLES_PER_UPDATE = 32; @In private EntityManager entityManager; @In private WorldProvider worldProvider; @In private WorldAtlas worldAtlas; @In private BlockManager blockManager; // TODO: lose dependency on worldRenderer? @In private WorldRenderer worldRenderer; @In private Config config; private Random random = new FastRandom(); private NearestSortingList sorter = new NearestSortingList(); private int displayList; public void initialise() { if (displayList == 0) { displayList = glGenLists(1); glNewList(displayList, GL11.GL_COMPILE); drawParticle(); glEndList(); } sorter.initialise(worldRenderer.getActiveCamera()); } @Override public void shutdown() { glDeleteLists(displayList, 1); sorter.stop(); } public void update(float delta) { for (EntityRef entity : entityManager.getEntitiesWith(BlockParticleEffectComponent.class, LocationComponent.class)) { BlockParticleEffectComponent particleEffect = entity.getComponent(BlockParticleEffectComponent.class); Iterator<Particle> iterator = particleEffect.particles.iterator(); while (iterator.hasNext()) { BlockParticleEffectComponent.Particle p = iterator.next(); p.lifeRemaining -= delta; if (p.lifeRemaining <= 0) { iterator.remove(); } else { updateVelocity(entity, particleEffect, p, delta); updatePosition(p, delta); } } for (int i = 0; particleEffect.spawnCount > 0 && i < PARTICLES_PER_UPDATE; ++i) { spawnParticle(particleEffect); } if (particleEffect.particles.size() == 0 && particleEffect.destroyEntityOnCompletion) { entity.destroy(); } else { entity.saveComponent(particleEffect); } } } @ReceiveEvent(components = {BlockParticleEffectComponent.class, LocationComponent.class}) public void onActivated(OnActivatedComponent event, EntityRef entity) { sorter.add(entity); } @ReceiveEvent(components = {BlockParticleEffectComponent.class, LocationComponent.class}) public void onDeactivated(BeforeDeactivateComponent event, EntityRef entity) { sorter.remove(entity); } private void spawnParticle(BlockParticleEffectComponent particleEffect) { Particle p = new Particle(); p.lifeRemaining = random.nextFloat() * (particleEffect.maxLifespan - particleEffect.minLifespan) + particleEffect.minLifespan; p.velocity = random.nextVector3f(); p.size = random.nextFloat() * (particleEffect.maxSize - particleEffect.minSize) + particleEffect.minSize; p.position.set( random.nextFloat(-particleEffect.spawnRange.x, particleEffect.spawnRange.x), random.nextFloat(-particleEffect.spawnRange.y, particleEffect.spawnRange.y), random.nextFloat(-particleEffect.spawnRange.z, particleEffect.spawnRange.z)); p.color = particleEffect.color; if (particleEffect.blockType != null) { final float tileSize = worldAtlas.getRelativeTileSize(); p.texSize.set(tileSize, tileSize); Block b = particleEffect.blockType.getArchetypeBlock(); p.texOffset.set(b.getPrimaryAppearance().getTextureAtlasPos(BlockPart.FRONT)); if (particleEffect.randBlockTexDisplacement) { final float relTileSize = worldAtlas.getRelativeTileSize(); Vector2f particleTexSize = new Vector2f( relTileSize * particleEffect.randBlockTexDisplacementScale.y, relTileSize * particleEffect.randBlockTexDisplacementScale.y); p.texSize.x *= particleEffect.randBlockTexDisplacementScale.x; p.texSize.y *= particleEffect.randBlockTexDisplacementScale.y; p.texOffset.set( p.texOffset.x + random.nextFloat() * (tileSize - particleTexSize.x), p.texOffset.y + random.nextFloat() * (tileSize - particleTexSize.y)); } } //p.texSize.set(TEX_SIZE,TEX_SIZE); particleEffect.particles.add(p); particleEffect.spawnCount--; } protected void updateVelocity(EntityRef entity, BlockParticleEffectComponent particleEffect, Particle particle, float delta) { Vector3f diff = new Vector3f(particleEffect.targetVelocity); diff.sub(particle.velocity); diff.x *= particleEffect.acceleration.x * delta; diff.y *= particleEffect.acceleration.y * delta; diff.z *= particleEffect.acceleration.z * delta; particle.velocity.add(diff); if (particleEffect.collideWithBlocks) { LocationComponent location = entity.getComponent(LocationComponent.class); Vector3f pos = location.getWorldPosition(); pos.add(particle.position); if (worldProvider.getBlock(new Vector3f(pos.x, pos.y + 2 * Math.signum(particle.velocity.y) * particle.size, pos.z)).getId() != 0x0) { particle.velocity.y = 0; } } } protected void updatePosition(Particle particle, float delta) { particle.position.x += particle.velocity.x * delta; particle.position.y += particle.velocity.y * delta; particle.position.z += particle.velocity.z * delta; } public void renderAlphaBlend() { if (config.getRendering().isRenderNearest()) { render(Arrays.asList(sorter.getNearest(config.getRendering().getParticleEffectLimit()))); } else { render(entityManager.getEntitiesWith(BlockParticleEffectComponent.class, LocationComponent.class)); } } private void render(Iterable<EntityRef> particleEntities) { Assets.getMaterial("engine:prog.particle").enable(); glDisable(GL11.GL_CULL_FACE); Vector3f cameraPosition = worldRenderer.getActiveCamera().getPosition(); for (EntityRef entity : particleEntities) { LocationComponent location = entity.getComponent(LocationComponent.class); if (null == location) { continue; } Vector3f worldPos = location.getWorldPosition(); if (!worldProvider.isBlockRelevant(worldPos)) { continue; } BlockParticleEffectComponent particleEffect = entity.getComponent(BlockParticleEffectComponent.class); if (particleEffect.texture == null) { Texture terrainTex = Assets.getTexture("engine:terrain"); if (terrainTex == null) { return; } GL13.glActiveTexture(GL13.GL_TEXTURE0); glBindTexture(GL11.GL_TEXTURE_2D, terrainTex.getId()); } else { GL13.glActiveTexture(GL13.GL_TEXTURE0); glBindTexture(GL11.GL_TEXTURE_2D, particleEffect.texture.getId()); } if (particleEffect.blendMode == BlockParticleEffectComponent.ParticleBlendMode.ADD) { glBlendFunc(GL_ONE, GL_ONE); } if (particleEffect.blockType != null) { renderBlockParticles(worldPos, cameraPosition, particleEffect); } else { renderParticles(worldPos, cameraPosition, particleEffect); } if (particleEffect.blendMode == BlockParticleEffectComponent.ParticleBlendMode.ADD) { glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); } } glEnable(GL11.GL_CULL_FACE); } private void renderBlockParticles(Vector3f worldPos, Vector3f cameraPosition, BlockParticleEffectComponent particleEffect) { Vector3i worldPos3i = new Vector3i(worldPos, 0.5f); Biome biome = worldProvider.getBiome(worldPos3i); glPushMatrix(); glTranslated(worldPos.x - cameraPosition.x, worldPos.y - cameraPosition.y, worldPos.z - cameraPosition.z); for (Particle particle : particleEffect.particles) { glPushMatrix(); glTranslatef(particle.position.x, particle.position.y, particle.position.z); applyOrientation(); glScalef(particle.size, particle.size, particle.size); float light = worldRenderer.getRenderingLightValueAt(new Vector3f(worldPos.x + particle.position.x, worldPos.y + particle.position.y, worldPos.z + particle.position.z)); renderParticle(particle, particleEffect.blockType.getArchetypeBlock(), biome, light); glPopMatrix(); } glPopMatrix(); } private void renderParticles(Vector3f worldPos, Vector3f cameraPosition, BlockParticleEffectComponent particleEffect) { glPushMatrix(); glTranslated(worldPos.x - cameraPosition.x, worldPos.y - cameraPosition.y, worldPos.z - cameraPosition.z); for (Particle particle : particleEffect.particles) { glPushMatrix(); glTranslatef(particle.position.x, particle.position.y, particle.position.z); applyOrientation(); glScalef(particle.size, particle.size, particle.size); float light = worldRenderer.getRenderingLightValueAt(new Vector3f(worldPos.x + particle.position.x, worldPos.y + particle.position.y, worldPos.z + particle.position.z)); renderParticle(particle, light); glPopMatrix(); } glPopMatrix(); } private void applyOrientation() { // Fetch the current modelview matrix final FloatBuffer model = BufferUtils.createFloatBuffer(16); GL11.glGetFloat(GL11.GL_MODELVIEW_MATRIX, model); // And undo all rotations and scaling for (int i = 0; i < 3; i++) { for (int j = 0; j < 3; j++) { if (i == j) { model.put(i * 4 + j, 1.0f); } else { model.put(i * 4 + j, 0.0f); } } } GL11.glLoadMatrix(model); } protected void renderParticle(Particle particle, float light) { Material mat = Assets.getMaterial("engine:prog.particle"); mat.setFloat4("colorOffset", particle.color.x, particle.color.y, particle.color.z, particle.color.w, true); mat.setFloat2("texOffset", particle.texOffset.x, particle.texOffset.y, true); mat.setFloat2("texScale", particle.texSize.x, particle.texSize.y, true); mat.setFloat("light", light, true); glCallList(displayList); } protected void renderParticle(Particle particle, Block block, Biome biome, float light) { Material mat = Assets.getMaterial("engine:prog.particle"); Vector4f colorMod = block.calcColorOffsetFor(BlockPart.FRONT, biome); mat.setFloat4("colorOffset", particle.color.x * colorMod.x, particle.color.y * colorMod.y, particle.color.z * colorMod.z, particle.color.w * colorMod.w, true); mat.setFloat2("texOffset", particle.texOffset.x, particle.texOffset.y, true); mat.setFloat2("texScale", particle.texSize.x, particle.texSize.y, true); mat.setFloat("light", light, true); glCallList(displayList); } private void drawParticle() { glBegin(GL_QUADS); GL11.glTexCoord2f(0.0f, 0.0f); GL11.glVertex3f(-0.5f, 0.5f, 0.0f); GL11.glTexCoord2f(1.0f, 0.0f); GL11.glVertex3f(0.5f, 0.5f, 0.0f); GL11.glTexCoord2f(1.0f, 1.0f); GL11.glVertex3f(0.5f, -0.5f, 0.0f); GL11.glTexCoord2f(0.0f, 1.0f); GL11.glVertex3f(-0.5f, -0.5f, 0.0f); glEnd(); } public void renderOpaque() { } public void renderOverlay() { } public void renderFirstPerson() { } @Override public void renderShadows() { } }
apache-2.0
derekhiggins/ovirt-engine
backend/manager/modules/utils/src/main/java/org/ovirt/engine/core/utils/ipa/ITDSUserContextMapper.java
922
package org.ovirt.engine.core.utils.ipa; import static org.ovirt.engine.core.utils.kerberos.InstallerConstants.ERROR_PREFIX; import javax.naming.NamingException; import javax.naming.directory.Attributes; import org.springframework.ldap.core.ContextMapper; import org.springframework.ldap.core.DirContextAdapter; public class ITDSUserContextMapper implements ContextMapper { @Override public Object mapFromContext(Object ctx) { if (ctx == null) { return null; } DirContextAdapter searchResult = (DirContextAdapter) ctx; Attributes attributes = searchResult.getAttributes(); if (attributes == null) { return null; } try { return attributes.get("uid").get(0); } catch (NamingException e) { System.err.println(ERROR_PREFIX + "Failed getting user GUID"); return null; } } }
apache-2.0
gustavoanatoly/hbase
hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/NormalizationPlan.java
1367
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master.normalizer; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Admin; /** * Interface for normalization plan. */ @InterfaceAudience.Private public interface NormalizationPlan { enum PlanType { SPLIT, MERGE, NONE } /** * Executes normalization plan on cluster (does actual splitting/merging work). * @param admin instance of Admin */ void execute(Admin admin); /** * @return the type of this plan */ PlanType getType(); }
apache-2.0
aesteve/nubes
src/test/java/mock/services/ParrotService.java
250
package mock.services; import io.vertx.codegen.annotations.ProxyGen; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; @ProxyGen public interface ParrotService { void echo(String original, Handler<AsyncResult<String>> handler); }
apache-2.0
andrhamm/Singularity
SingularityExecutor/src/main/java/com/hubspot/singularity/executor/TemplateManager.java
3350
package com.hubspot.singularity.executor; import static java.nio.charset.StandardCharsets.UTF_8; import java.io.BufferedWriter; import java.io.File; import java.nio.file.Files; import java.nio.file.Path; import com.github.jknack.handlebars.Template; import com.google.common.base.Throwables; import com.google.inject.Inject; import com.google.inject.Singleton; import com.google.inject.name.Named; import com.hubspot.singularity.executor.config.SingularityExecutorModule; import com.hubspot.singularity.executor.models.DockerContext; import com.hubspot.singularity.executor.models.EnvironmentContext; import com.hubspot.singularity.executor.models.LogrotateCronTemplateContext; import com.hubspot.singularity.executor.models.LogrotateTemplateContext; import com.hubspot.singularity.executor.models.RunnerContext; @Singleton public class TemplateManager { private final Template runnerTemplate; private final Template environmentTemplate; private final Template logrotateTemplate; private final Template logrotateCronTemplate; private final Template dockerTemplate; @Inject public TemplateManager(@Named(SingularityExecutorModule.RUNNER_TEMPLATE) Template runnerTemplate, @Named(SingularityExecutorModule.ENVIRONMENT_TEMPLATE) Template environmentTemplate, @Named(SingularityExecutorModule.LOGROTATE_TEMPLATE) Template logrotateTemplate, @Named(SingularityExecutorModule.LOGROTATE_CRON_TEMPLATE) Template logrotateCronTemplate, @Named(SingularityExecutorModule.DOCKER_TEMPLATE) Template dockerTemplate ) { this.runnerTemplate = runnerTemplate; this.environmentTemplate = environmentTemplate; this.logrotateTemplate = logrotateTemplate; this.logrotateCronTemplate = logrotateCronTemplate; this.dockerTemplate = dockerTemplate; } public void writeRunnerScript(Path destination, RunnerContext runnerContext) { writeTemplate(destination, runnerTemplate, runnerContext); } public void writeEnvironmentScript(Path destination, EnvironmentContext environmentContext) { writeTemplate(destination, environmentTemplate, environmentContext); } public void writeLogrotateFile(Path destination, LogrotateTemplateContext logRotateContext) { writeTemplate(destination, logrotateTemplate, logRotateContext); } public boolean writeCronEntryForLogrotate(Path destination, LogrotateCronTemplateContext logrotateCronTemplateContext) { writeTemplate(destination, logrotateCronTemplate, logrotateCronTemplateContext); final File destinationFile = destination.toFile(); // ensure file is 644 -- java file permissions are so lame :/ return destinationFile.setExecutable(false, false) && destinationFile.setReadable(true, false) && destinationFile.setWritable(false, false) && destinationFile.setWritable(true); } public void writeDockerScript(Path destination, DockerContext dockerContext) { writeTemplate(destination, dockerTemplate, dockerContext); } private void writeTemplate(Path path, Template template, Object context) { try (final BufferedWriter writer = Files.newBufferedWriter(path, UTF_8)) { template.apply(context, writer); } catch (Exception e) { throw Throwables.propagate(e); } } }
apache-2.0
redkale/redkale
src/main/java/org/redkale/asm/MethodWriter.java
88118
/* * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ /* * * * * * * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package org.redkale.asm; /** * A {@link MethodVisitor} that generates methods in bytecode form. Each visit * method of this class appends the bytecode corresponding to the visited * instruction to a byte vector, in the order these methods are called. * * @author Eric Bruneton * @author Eugene Kuleshov */ class MethodWriter extends MethodVisitor { /** * Pseudo access flag used to denote constructors. */ static final int ACC_CONSTRUCTOR = 0x80000; /** * Frame has exactly the same locals as the previous stack map frame and * number of stack items is zero. */ static final int SAME_FRAME = 0; // to 63 (0-3f) /** * Frame has exactly the same locals as the previous stack map frame and * number of stack items is 1 */ static final int SAME_LOCALS_1_STACK_ITEM_FRAME = 64; // to 127 (40-7f) /** * Reserved for future use */ static final int RESERVED = 128; /** * Frame has exactly the same locals as the previous stack map frame and * number of stack items is 1. Offset is bigger then 63; */ static final int SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED = 247; // f7 /** * Frame where current locals are the same as the locals in the previous * frame, except that the k last locals are absent. The value of k is given * by the formula 251-frame_type. */ static final int CHOP_FRAME = 248; // to 250 (f8-fA) /** * Frame has exactly the same locals as the previous stack map frame and * number of stack items is zero. Offset is bigger then 63; */ static final int SAME_FRAME_EXTENDED = 251; // fb /** * Frame where current locals are the same as the locals in the previous * frame, except that k additional locals are defined. The value of k is * given by the formula frame_type-251. */ static final int APPEND_FRAME = 252; // to 254 // fc-fe /** * Full frame */ static final int FULL_FRAME = 255; // ff /** * Indicates that the stack map frames must be recomputed from scratch. In * this case the maximum stack size and number of local variables is also * recomputed from scratch. * * @see #compute */ static final int FRAMES = 0; /** * Indicates that the stack map frames of type F_INSERT must be computed. * The other frames are not (re)computed. They should all be of type F_NEW * and should be sufficient to compute the content of the F_INSERT frames, * together with the bytecode instructions between a F_NEW and a F_INSERT * frame - and without any knowledge of the type hierarchy (by definition of * F_INSERT). * * @see #compute */ static final int INSERTED_FRAMES = 1; /** * Indicates that the maximum stack size and number of local variables must * be automatically computed. * * @see #compute */ static final int MAXS = 2; /** * Indicates that nothing must be automatically computed. * * @see #compute */ static final int NOTHING = 3; /** * The class writer to which this method must be added. */ final ClassWriter cw; /** * Access flags of this method. */ private int access; /** * The index of the constant pool item that contains the name of this * method. */ private final int name; /** * The index of the constant pool item that contains the descriptor of this * method. */ private final int desc; /** * The descriptor of this method. */ private final String descriptor; /** * The signature of this method. */ String signature; /** * If not zero, indicates that the code of this method must be copied from * the ClassReader associated to this writer in <code>cw.cr</code>. More * precisely, this field gives the index of the first byte to copied from * <code>cw.cr.b</code>. */ int classReaderOffset; /** * If not zero, indicates that the code of this method must be copied from * the ClassReader associated to this writer in <code>cw.cr</code>. More * precisely, this field gives the number of bytes to copied from * <code>cw.cr.b</code>. */ int classReaderLength; /** * Number of exceptions that can be thrown by this method. */ int exceptionCount; /** * The exceptions that can be thrown by this method. More precisely, this * array contains the indexes of the constant pool items that contain the * internal names of these exception classes. */ int[] exceptions; /** * The annotation default attribute of this method. May be &#60;tt&#62;null&#60;/tt&#62;. */ private ByteVector annd; /** * The runtime visible annotations of this method. May be &#60;tt&#62;null&#60;/tt&#62;. */ private AnnotationWriter anns; /** * The runtime invisible annotations of this method. May be &#60;tt&#62;null&#60;/tt&#62;. */ private AnnotationWriter ianns; /** * The runtime visible type annotations of this method. May be &#60;tt&#62;null&#60;/tt&#62; * . */ private AnnotationWriter tanns; /** * The runtime invisible type annotations of this method. May be * &#60;tt&#62;null&#60;/tt&#62;. */ private AnnotationWriter itanns; /** * The runtime visible parameter annotations of this method. May be * &#60;tt&#62;null&#60;/tt&#62;. */ private AnnotationWriter[] panns; /** * The runtime invisible parameter annotations of this method. May be * &#60;tt&#62;null&#60;/tt&#62;. */ private AnnotationWriter[] ipanns; /** * The number of synthetic parameters of this method. */ private int synthetics; /** * The non standard attributes of the method. */ private Attribute attrs; /** * The bytecode of this method. */ private ByteVector code = new ByteVector(); /** * Maximum stack size of this method. */ private int maxStack; /** * Maximum number of local variables for this method. */ private int maxLocals; /** * Number of local variables in the current stack map frame. */ private int currentLocals; /** * Number of stack map frames in the StackMapTable attribute. */ int frameCount; /** * The StackMapTable attribute. */ private ByteVector stackMap; /** * The offset of the last frame that was written in the StackMapTable * attribute. */ private int previousFrameOffset; /** * The last frame that was written in the StackMapTable attribute. * * @see #frame */ private int[] previousFrame; /** * The current stack map frame. The first element contains the offset of the * instruction to which the frame corresponds, the second element is the * number of locals and the third one is the number of stack elements. The * local variables start at index 3 and are followed by the operand stack * values. In summary frame[0] = offset, frame[1] = nLocal, frame[2] = * nStack, frame[3] = nLocal. All types are encoded as integers, with the * same format as the one used in {@link Label}, but limited to BASE types. */ private int[] frame; /** * Number of elements in the exception handler list. */ private int handlerCount; /** * The first element in the exception handler list. */ private Handler firstHandler; /** * The last element in the exception handler list. */ private Handler lastHandler; /** * Number of entries in the MethodParameters attribute. */ private int methodParametersCount; /** * The MethodParameters attribute. */ private ByteVector methodParameters; /** * Number of entries in the LocalVariableTable attribute. */ private int localVarCount; /** * The LocalVariableTable attribute. */ private ByteVector localVar; /** * Number of entries in the LocalVariableTypeTable attribute. */ private int localVarTypeCount; /** * The LocalVariableTypeTable attribute. */ private ByteVector localVarType; /** * Number of entries in the LineNumberTable attribute. */ private int lineNumberCount; /** * The LineNumberTable attribute. */ private ByteVector lineNumber; /** * The start offset of the last visited instruction. */ private int lastCodeOffset; /** * The runtime visible type annotations of the code. May be &#60;tt&#62;null&#60;/tt&#62;. */ private AnnotationWriter ctanns; /** * The runtime invisible type annotations of the code. May be &#60;tt&#62;null&#60;/tt&#62;. */ private AnnotationWriter ictanns; /** * The non standard attributes of the method's code. */ private Attribute cattrs; /** * The number of subroutines in this method. */ private int subroutines; // ------------------------------------------------------------------------ /* * Fields for the control flow graph analysis algorithm (used to compute the * maximum stack size). A control flow graph contains one node per "basic * block", and one edge per "jump" from one basic block to another. Each * node (i.e., each basic block) is represented by the Label object that * corresponds to the first instruction of this basic block. Each node also * stores the list of its successors in the graph, as a linked list of Edge * objects. */ /** * Indicates what must be automatically computed. * * @see #FRAMES * @see #INSERTED_FRAMES * @see #MAXS * @see #NOTHING */ private final int compute; /** * A list of labels. This list is the list of basic blocks in the method, * i.e. a list of Label objects linked to each other by their * {@link Label#successor} field, in the order they are visited by * {@link MethodVisitor#visitLabel}, and starting with the first basic * block. */ private Label labels; /** * The previous basic block. */ private Label previousBlock; /** * The current basic block. */ private Label currentBlock; /** * The (relative) stack size after the last visited instruction. This size * is relative to the beginning of the current basic block, i.e., the true * stack size after the last visited instruction is equal to the * {@link Label#inputStackTop beginStackSize} of the current basic block * plus &#60;tt&#62;stackSize&#60;/tt&#62;. */ private int stackSize; /** * The (relative) maximum stack size after the last visited instruction. * This size is relative to the beginning of the current basic block, i.e., * the true maximum stack size after the last visited instruction is equal * to the {@link Label#inputStackTop beginStackSize} of the current basic * block plus &#60;tt&#62;stackSize&#60;/tt&#62;. */ private int maxStackSize; // ------------------------------------------------------------------------ // Constructor // ------------------------------------------------------------------------ /** * Constructs a new {@link MethodWriter}. * * @param cw * the class writer in which the method must be added. * @param access * the method's access flags (see {@link Opcodes}). * @param name * the method's name. * @param desc * the method's descriptor (see {@link Type}). * @param signature * the method's signature. May be &#60;tt&#62;null&#60;/tt&#62;. * @param exceptions * the internal names of the method's exceptions. May be * &#60;tt&#62;null&#60;/tt&#62;. * @param compute * Indicates what must be automatically computed (see #compute). */ MethodWriter(final ClassWriter cw, final int access, final String name, final String desc, final String signature, final String[] exceptions, final int compute) { super(Opcodes.ASM6); if (cw.firstMethod == null) { cw.firstMethod = this; } else { cw.lastMethod.mv = this; } cw.lastMethod = this; this.cw = cw; this.access = access; if ("<init>".equals(name)) { this.access |= ACC_CONSTRUCTOR; } this.name = cw.newUTF8(name); this.desc = cw.newUTF8(desc); this.descriptor = desc; this.signature = signature; if (exceptions != null && exceptions.length > 0) { exceptionCount = exceptions.length; this.exceptions = new int[exceptionCount]; for (int i = 0; i < exceptionCount; ++i) { this.exceptions[i] = cw.newClass(exceptions[i]); } } this.compute = compute; if (compute != NOTHING) { // updates maxLocals int size = Type.getArgumentsAndReturnSizes(descriptor) >> 2; if ((access & Opcodes.ACC_STATIC) != 0) { --size; } maxLocals = size; currentLocals = size; // creates and visits the label for the first basic block labels = new Label(); labels.status |= Label.PUSHED; visitLabel(labels); } } // ------------------------------------------------------------------------ // Implementation of the MethodVisitor abstract class // ------------------------------------------------------------------------ @Override public void visitParameter(String name, int access) { if (methodParameters == null) { methodParameters = new ByteVector(); } ++methodParametersCount; methodParameters.putShort((name == null) ? 0 : cw.newUTF8(name)) .putShort(access); } @Override public AnnotationVisitor visitAnnotationDefault() { annd = new ByteVector(); return new AnnotationWriter(cw, false, annd, null, 0); } @Override public AnnotationVisitor visitAnnotation(final String desc, final boolean visible) { ByteVector bv = new ByteVector(); // write type, and reserve space for values count bv.putShort(cw.newUTF8(desc)).putShort(0); AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2); if (visible) { aw.next = anns; anns = aw; } else { aw.next = ianns; ianns = aw; } return aw; } @Override public AnnotationVisitor visitTypeAnnotation(final int typeRef, final TypePath typePath, final String desc, final boolean visible) { ByteVector bv = new ByteVector(); // write target_type and target_info AnnotationWriter.putTarget(typeRef, typePath, bv); // write type, and reserve space for values count bv.putShort(cw.newUTF8(desc)).putShort(0); AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, bv.length - 2); if (visible) { aw.next = tanns; tanns = aw; } else { aw.next = itanns; itanns = aw; } return aw; } @Override public AnnotationVisitor visitParameterAnnotation(final int parameter, final String desc, final boolean visible) { ByteVector bv = new ByteVector(); if ("Ljava/lang/Synthetic;".equals(desc)) { // workaround for a bug in javac with synthetic parameters // see ClassReader.readParameterAnnotations synthetics = Math.max(synthetics, parameter + 1); return new AnnotationWriter(cw, false, bv, null, 0); } // write type, and reserve space for values count bv.putShort(cw.newUTF8(desc)).putShort(0); AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2); if (visible) { if (panns == null) { panns = new AnnotationWriter[Type.getArgumentTypes(descriptor).length]; } aw.next = panns[parameter]; panns[parameter] = aw; } else { if (ipanns == null) { ipanns = new AnnotationWriter[Type.getArgumentTypes(descriptor).length]; } aw.next = ipanns[parameter]; ipanns[parameter] = aw; } return aw; } @Override public void visitAttribute(final Attribute attr) { if (attr.isCodeAttribute()) { attr.next = cattrs; cattrs = attr; } else { attr.next = attrs; attrs = attr; } } @Override public void visitCode() { } @Override public void visitFrame(final int type, final int nLocal, final Object[] local, final int nStack, final Object[] stack) { if (compute == FRAMES) { return; } if (compute == INSERTED_FRAMES) { if (currentBlock.frame == null) { // This should happen only once, for the implicit first frame // (which is explicitly visited in ClassReader if the // EXPAND_ASM_INSNS option is used). currentBlock.frame = new CurrentFrame(); currentBlock.frame.owner = currentBlock; currentBlock.frame.initInputFrame(cw, access, Type.getArgumentTypes(descriptor), nLocal); visitImplicitFirstFrame(); } else { if (type == Opcodes.F_NEW) { currentBlock.frame.set(cw, nLocal, local, nStack, stack); } else { // In this case type is equal to F_INSERT by hypothesis, and // currentBlock.frame contains the stack map frame at the // current instruction, computed from the last F_NEW frame // and the bytecode instructions in between (via calls to // CurrentFrame#execute). } visitFrame(currentBlock.frame); } } else if (type == Opcodes.F_NEW) { if (previousFrame == null) { visitImplicitFirstFrame(); } currentLocals = nLocal; int frameIndex = startFrame(code.length, nLocal, nStack); for (int i = 0; i < nLocal; ++i) { if (local[i] instanceof String) { String desc = Type.getObjectType((String) local[i]).getDescriptor(); frame[frameIndex++] = Frame.type(cw, desc); } else if (local[i] instanceof Integer) { frame[frameIndex++] = Frame.BASE | ((Integer) local[i]).intValue(); } else { frame[frameIndex++] = Frame.UNINITIALIZED | cw.addUninitializedType("", ((Label) local[i]).position); } } for (int i = 0; i < nStack; ++i) { if (stack[i] instanceof String) { String desc = Type.getObjectType((String) stack[i]).getDescriptor(); frame[frameIndex++] = Frame.type(cw, desc); } else if (stack[i] instanceof Integer) { frame[frameIndex++] = Frame.BASE | ((Integer) stack[i]).intValue(); } else { frame[frameIndex++] = Frame.UNINITIALIZED | cw.addUninitializedType("", ((Label) stack[i]).position); } } endFrame(); } else { int delta; if (stackMap == null) { stackMap = new ByteVector(); delta = code.length; } else { delta = code.length - previousFrameOffset - 1; if (delta < 0) { if (type == Opcodes.F_SAME) { return; } else { throw new IllegalStateException(); } } } switch (type) { case Opcodes.F_FULL: currentLocals = nLocal; stackMap.putByte(FULL_FRAME).putShort(delta).putShort(nLocal); for (int i = 0; i < nLocal; ++i) { writeFrameType(local[i]); } stackMap.putShort(nStack); for (int i = 0; i < nStack; ++i) { writeFrameType(stack[i]); } break; case Opcodes.F_APPEND: currentLocals += nLocal; stackMap.putByte(SAME_FRAME_EXTENDED + nLocal).putShort(delta); for (int i = 0; i < nLocal; ++i) { writeFrameType(local[i]); } break; case Opcodes.F_CHOP: currentLocals -= nLocal; stackMap.putByte(SAME_FRAME_EXTENDED - nLocal).putShort(delta); break; case Opcodes.F_SAME: if (delta < 64) { stackMap.putByte(delta); } else { stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta); } break; case Opcodes.F_SAME1: if (delta < 64) { stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta); } else { stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) .putShort(delta); } writeFrameType(stack[0]); break; } previousFrameOffset = code.length; ++frameCount; } maxStack = Math.max(maxStack, nStack); maxLocals = Math.max(maxLocals, currentLocals); } @Override public void visitInsn(final int opcode) { lastCodeOffset = code.length; // adds the instruction to the bytecode of the method code.putByte(opcode); // update currentBlock // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES || compute == INSERTED_FRAMES) { currentBlock.frame.execute(opcode, 0, null, null); } else { // updates current and max stack sizes int size = stackSize + Frame.SIZE[opcode]; if (size > maxStackSize) { maxStackSize = size; } stackSize = size; } // if opcode == ATHROW or xRETURN, ends current block (no successor) if ((opcode >= Opcodes.IRETURN && opcode <= Opcodes.RETURN) || opcode == Opcodes.ATHROW) { noSuccessor(); } } } @Override public void visitIntInsn(final int opcode, final int operand) { lastCodeOffset = code.length; // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES || compute == INSERTED_FRAMES) { currentBlock.frame.execute(opcode, operand, null, null); } else if (opcode != Opcodes.NEWARRAY) { // updates current and max stack sizes only for NEWARRAY // (stack size variation = 0 for BIPUSH or SIPUSH) int size = stackSize + 1; if (size > maxStackSize) { maxStackSize = size; } stackSize = size; } } // adds the instruction to the bytecode of the method if (opcode == Opcodes.SIPUSH) { code.put12(opcode, operand); } else { // BIPUSH or NEWARRAY code.put11(opcode, operand); } } @Override public void visitVarInsn(final int opcode, final int var) { lastCodeOffset = code.length; // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES || compute == INSERTED_FRAMES) { currentBlock.frame.execute(opcode, var, null, null); } else { // updates current and max stack sizes if (opcode == Opcodes.RET) { // no stack change, but end of current block (no successor) currentBlock.status |= Label.RET; // save 'stackSize' here for future use // (see {@link #findSubroutineSuccessors}) currentBlock.inputStackTop = stackSize; noSuccessor(); } else { // xLOAD or xSTORE int size = stackSize + Frame.SIZE[opcode]; if (size > maxStackSize) { maxStackSize = size; } stackSize = size; } } } if (compute != NOTHING) { // updates max locals int n; if (opcode == Opcodes.LLOAD || opcode == Opcodes.DLOAD || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE) { n = var + 2; } else { n = var + 1; } if (n > maxLocals) { maxLocals = n; } } // adds the instruction to the bytecode of the method if (var < 4 && opcode != Opcodes.RET) { int opt; if (opcode < Opcodes.ISTORE) { /* ILOAD_0 */ opt = 26 + ((opcode - Opcodes.ILOAD) << 2) + var; } else { /* ISTORE_0 */ opt = 59 + ((opcode - Opcodes.ISTORE) << 2) + var; } code.putByte(opt); } else if (var >= 256) { code.putByte(196 /* WIDE */).put12(opcode, var); } else { code.put11(opcode, var); } if (opcode >= Opcodes.ISTORE && compute == FRAMES && handlerCount > 0) { visitLabel(new Label()); } } @Override public void visitTypeInsn(final int opcode, final String type) { lastCodeOffset = code.length; Item i = cw.newStringishItem(ClassWriter.CLASS, type); // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES || compute == INSERTED_FRAMES) { currentBlock.frame.execute(opcode, code.length, cw, i); } else if (opcode == Opcodes.NEW) { // updates current and max stack sizes only if opcode == NEW // (no stack change for ANEWARRAY, CHECKCAST, INSTANCEOF) int size = stackSize + 1; if (size > maxStackSize) { maxStackSize = size; } stackSize = size; } } // adds the instruction to the bytecode of the method code.put12(opcode, i.index); } @Override public void visitFieldInsn(final int opcode, final String owner, final String name, final String desc) { lastCodeOffset = code.length; Item i = cw.newFieldItem(owner, name, desc); // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES || compute == INSERTED_FRAMES) { currentBlock.frame.execute(opcode, 0, cw, i); } else { int size; // computes the stack size variation char c = desc.charAt(0); switch (opcode) { case Opcodes.GETSTATIC: size = stackSize + (c == 'D' || c == 'J' ? 2 : 1); break; case Opcodes.PUTSTATIC: size = stackSize + (c == 'D' || c == 'J' ? -2 : -1); break; case Opcodes.GETFIELD: size = stackSize + (c == 'D' || c == 'J' ? 1 : 0); break; // case Constants.PUTFIELD: default: size = stackSize + (c == 'D' || c == 'J' ? -3 : -2); break; } // updates current and max stack sizes if (size > maxStackSize) { maxStackSize = size; } stackSize = size; } } // adds the instruction to the bytecode of the method code.put12(opcode, i.index); } @Override public void visitMethodInsn(final int opcode, final String owner, final String name, final String desc, final boolean itf) { lastCodeOffset = code.length; Item i = cw.newMethodItem(owner, name, desc, itf); int argSize = i.intVal; // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES || compute == INSERTED_FRAMES) { currentBlock.frame.execute(opcode, 0, cw, i); } else { /* * computes the stack size variation. In order not to recompute * several times this variation for the same Item, we use the * intVal field of this item to store this variation, once it * has been computed. More precisely this intVal field stores * the sizes of the arguments and of the return value * corresponding to desc. */ if (argSize == 0) { // the above sizes have not been computed yet, // so we compute them... argSize = Type.getArgumentsAndReturnSizes(desc); // ... and we save them in order // not to recompute them in the future i.intVal = argSize; } int size; if (opcode == Opcodes.INVOKESTATIC) { size = stackSize - (argSize >> 2) + (argSize & 0x03) + 1; } else { size = stackSize - (argSize >> 2) + (argSize & 0x03); } // updates current and max stack sizes if (size > maxStackSize) { maxStackSize = size; } stackSize = size; } } // adds the instruction to the bytecode of the method if (opcode == Opcodes.INVOKEINTERFACE) { if (argSize == 0) { argSize = Type.getArgumentsAndReturnSizes(desc); i.intVal = argSize; } code.put12(Opcodes.INVOKEINTERFACE, i.index).put11(argSize >> 2, 0); } else { code.put12(opcode, i.index); } } @Override public void visitInvokeDynamicInsn(final String name, final String desc, final Handle bsm, final Object... bsmArgs) { lastCodeOffset = code.length; Item i = cw.newInvokeDynamicItem(name, desc, bsm, bsmArgs); int argSize = i.intVal; // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES || compute == INSERTED_FRAMES) { currentBlock.frame.execute(Opcodes.INVOKEDYNAMIC, 0, cw, i); } else { /* * computes the stack size variation. In order not to recompute * several times this variation for the same Item, we use the * intVal field of this item to store this variation, once it * has been computed. More precisely this intVal field stores * the sizes of the arguments and of the return value * corresponding to desc. */ if (argSize == 0) { // the above sizes have not been computed yet, // so we compute them... argSize = Type.getArgumentsAndReturnSizes(desc); // ... and we save them in order // not to recompute them in the future i.intVal = argSize; } int size = stackSize - (argSize >> 2) + (argSize & 0x03) + 1; // updates current and max stack sizes if (size > maxStackSize) { maxStackSize = size; } stackSize = size; } } // adds the instruction to the bytecode of the method code.put12(Opcodes.INVOKEDYNAMIC, i.index); code.putShort(0); } @Override public void visitJumpInsn(int opcode, final Label label) { boolean isWide = opcode >= 200; // GOTO_W opcode = isWide ? opcode - 33 : opcode; lastCodeOffset = code.length; Label nextInsn = null; // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES) { currentBlock.frame.execute(opcode, 0, null, null); // 'label' is the target of a jump instruction label.getFirst().status |= Label.TARGET; // adds 'label' as a successor of this basic block addSuccessor(Edge.NORMAL, label); if (opcode != Opcodes.GOTO) { // creates a Label for the next basic block nextInsn = new Label(); } } else if (compute == INSERTED_FRAMES) { currentBlock.frame.execute(opcode, 0, null, null); } else { if (opcode == Opcodes.JSR) { if ((label.status & Label.SUBROUTINE) == 0) { label.status |= Label.SUBROUTINE; ++subroutines; } currentBlock.status |= Label.JSR; addSuccessor(stackSize + 1, label); // creates a Label for the next basic block nextInsn = new Label(); /* * note that, by construction in this method, a JSR block * has at least two successors in the control flow graph: * the first one leads the next instruction after the JSR, * while the second one leads to the JSR target. */ } else { // updates current stack size (max stack size unchanged // because stack size variation always negative in this // case) stackSize += Frame.SIZE[opcode]; addSuccessor(stackSize, label); } } } // adds the instruction to the bytecode of the method if ((label.status & Label.RESOLVED) != 0 && label.position - code.length < Short.MIN_VALUE) { /* * case of a backward jump with an offset < -32768. In this case we * automatically replace GOTO with GOTO_W, JSR with JSR_W and IFxxx * <l> with IFNOTxxx <L> GOTO_W <l> L:..., where IFNOTxxx is the * "opposite" opcode of IFxxx (i.e., IFNE for IFEQ) and where <L> * designates the instruction just after the GOTO_W. */ if (opcode == Opcodes.GOTO) { code.putByte(200); // GOTO_W } else if (opcode == Opcodes.JSR) { code.putByte(201); // JSR_W } else { // if the IF instruction is transformed into IFNOT GOTO_W the // next instruction becomes the target of the IFNOT instruction if (nextInsn != null) { nextInsn.status |= Label.TARGET; } code.putByte(opcode <= 166 ? ((opcode + 1) ^ 1) - 1 : opcode ^ 1); code.putShort(8); // jump offset // ASM pseudo GOTO_W insn, see ClassReader. We don't use a real // GOTO_W because we might need to insert a frame just after (as // the target of the IFNOTxxx jump instruction). code.putByte(220); cw.hasAsmInsns = true; } label.put(this, code, code.length - 1, true); } else if (isWide) { /* * case of a GOTO_W or JSR_W specified by the user (normally * ClassReader when used to resize instructions). In this case we * keep the original instruction. */ code.putByte(opcode + 33); label.put(this, code, code.length - 1, true); } else { /* * case of a backward jump with an offset >= -32768, or of a forward * jump with, of course, an unknown offset. In these cases we store * the offset in 2 bytes (which will be increased in * resizeInstructions, if needed). */ code.putByte(opcode); label.put(this, code, code.length - 1, false); } if (currentBlock != null) { if (nextInsn != null) { // if the jump instruction is not a GOTO, the next instruction // is also a successor of this instruction. Calling visitLabel // adds the label of this next instruction as a successor of the // current block, and starts a new basic block visitLabel(nextInsn); } if (opcode == Opcodes.GOTO) { noSuccessor(); } } } @Override public void visitLabel(final Label label) { // resolves previous forward references to label, if any cw.hasAsmInsns |= label.resolve(this, code.length, code.data); // updates currentBlock if ((label.status & Label.DEBUG) != 0) { return; } if (compute == FRAMES) { if (currentBlock != null) { if (label.position == currentBlock.position) { // successive labels, do not start a new basic block currentBlock.status |= (label.status & Label.TARGET); label.frame = currentBlock.frame; return; } // ends current block (with one new successor) addSuccessor(Edge.NORMAL, label); } // begins a new current block currentBlock = label; if (label.frame == null) { label.frame = new Frame(); label.frame.owner = label; } // updates the basic block list if (previousBlock != null) { if (label.position == previousBlock.position) { previousBlock.status |= (label.status & Label.TARGET); label.frame = previousBlock.frame; currentBlock = previousBlock; return; } previousBlock.successor = label; } previousBlock = label; } else if (compute == INSERTED_FRAMES) { if (currentBlock == null) { // This case should happen only once, for the visitLabel call in // the constructor. Indeed, if compute is equal to // INSERTED_FRAMES currentBlock can not be set back to null (see // #noSuccessor). currentBlock = label; } else { // Updates the frame owner so that a correct frame offset is // computed in visitFrame(Frame). currentBlock.frame.owner = label; } } else if (compute == MAXS) { if (currentBlock != null) { // ends current block (with one new successor) currentBlock.outputStackMax = maxStackSize; addSuccessor(stackSize, label); } // begins a new current block currentBlock = label; // resets the relative current and max stack sizes stackSize = 0; maxStackSize = 0; // updates the basic block list if (previousBlock != null) { previousBlock.successor = label; } previousBlock = label; } } @Override public void visitLdcInsn(final Object cst) { lastCodeOffset = code.length; Item i = cw.newConstItem(cst); // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES || compute == INSERTED_FRAMES) { currentBlock.frame.execute(Opcodes.LDC, 0, cw, i); } else { int size; // computes the stack size variation if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) { size = stackSize + 2; } else { size = stackSize + 1; } // updates current and max stack sizes if (size > maxStackSize) { maxStackSize = size; } stackSize = size; } } // adds the instruction to the bytecode of the method int index = i.index; if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) { code.put12(20 /* LDC2_W */, index); } else if (index >= 256) { code.put12(19 /* LDC_W */, index); } else { code.put11(Opcodes.LDC, index); } } @Override public void visitIincInsn(final int var, final int increment) { lastCodeOffset = code.length; if (currentBlock != null) { if (compute == FRAMES || compute == INSERTED_FRAMES) { currentBlock.frame.execute(Opcodes.IINC, var, null, null); } } if (compute != NOTHING) { // updates max locals int n = var + 1; if (n > maxLocals) { maxLocals = n; } } // adds the instruction to the bytecode of the method if ((var > 255) || (increment > 127) || (increment < -128)) { code.putByte(196 /* WIDE */).put12(Opcodes.IINC, var) .putShort(increment); } else { code.putByte(Opcodes.IINC).put11(var, increment); } } @Override public void visitTableSwitchInsn(final int min, final int max, final Label dflt, final Label... labels) { lastCodeOffset = code.length; // adds the instruction to the bytecode of the method int source = code.length; code.putByte(Opcodes.TABLESWITCH); code.putByteArray(null, 0, (4 - code.length % 4) % 4); dflt.put(this, code, source, true); code.putInt(min).putInt(max); for (int i = 0; i < labels.length; ++i) { labels[i].put(this, code, source, true); } // updates currentBlock visitSwitchInsn(dflt, labels); } @Override public void visitLookupSwitchInsn(final Label dflt, final int[] keys, final Label[] labels) { lastCodeOffset = code.length; // adds the instruction to the bytecode of the method int source = code.length; code.putByte(Opcodes.LOOKUPSWITCH); code.putByteArray(null, 0, (4 - code.length % 4) % 4); dflt.put(this, code, source, true); code.putInt(labels.length); for (int i = 0; i < labels.length; ++i) { code.putInt(keys[i]); labels[i].put(this, code, source, true); } // updates currentBlock visitSwitchInsn(dflt, labels); } private void visitSwitchInsn(final Label dflt, final Label[] labels) { // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES) { currentBlock.frame.execute(Opcodes.LOOKUPSWITCH, 0, null, null); // adds current block successors addSuccessor(Edge.NORMAL, dflt); dflt.getFirst().status |= Label.TARGET; for (int i = 0; i < labels.length; ++i) { addSuccessor(Edge.NORMAL, labels[i]); labels[i].getFirst().status |= Label.TARGET; } } else { // updates current stack size (max stack size unchanged) --stackSize; // adds current block successors addSuccessor(stackSize, dflt); for (int i = 0; i < labels.length; ++i) { addSuccessor(stackSize, labels[i]); } } // ends current block noSuccessor(); } } @Override public void visitMultiANewArrayInsn(final String desc, final int dims) { lastCodeOffset = code.length; Item i = cw.newStringishItem(ClassWriter.CLASS, desc); // Label currentBlock = this.currentBlock; if (currentBlock != null) { if (compute == FRAMES || compute == INSERTED_FRAMES) { currentBlock.frame.execute(Opcodes.MULTIANEWARRAY, dims, cw, i); } else { // updates current stack size (max stack size unchanged because // stack size variation always negative or null) stackSize += 1 - dims; } } // adds the instruction to the bytecode of the method code.put12(Opcodes.MULTIANEWARRAY, i.index).putByte(dims); } @Override public AnnotationVisitor visitInsnAnnotation(int typeRef, TypePath typePath, String desc, boolean visible) { ByteVector bv = new ByteVector(); // write target_type and target_info typeRef = (typeRef & 0xFF0000FF) | (lastCodeOffset << 8); AnnotationWriter.putTarget(typeRef, typePath, bv); // write type, and reserve space for values count bv.putShort(cw.newUTF8(desc)).putShort(0); AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, bv.length - 2); if (visible) { aw.next = ctanns; ctanns = aw; } else { aw.next = ictanns; ictanns = aw; } return aw; } @Override public void visitTryCatchBlock(final Label start, final Label end, final Label handler, final String type) { ++handlerCount; Handler h = new Handler(); h.start = start; h.end = end; h.handler = handler; h.desc = type; h.type = type != null ? cw.newClass(type) : 0; if (lastHandler == null) { firstHandler = h; } else { lastHandler.next = h; } lastHandler = h; } @Override public AnnotationVisitor visitTryCatchAnnotation(int typeRef, TypePath typePath, String desc, boolean visible) { ByteVector bv = new ByteVector(); // write target_type and target_info AnnotationWriter.putTarget(typeRef, typePath, bv); // write type, and reserve space for values count bv.putShort(cw.newUTF8(desc)).putShort(0); AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, bv.length - 2); if (visible) { aw.next = ctanns; ctanns = aw; } else { aw.next = ictanns; ictanns = aw; } return aw; } @Override public void visitLocalVariable(final String name, final String desc, final String signature, final Label start, final Label end, final int index) { if (signature != null) { if (localVarType == null) { localVarType = new ByteVector(); } ++localVarTypeCount; localVarType.putShort(start.position) .putShort(end.position - start.position) .putShort(cw.newUTF8(name)).putShort(cw.newUTF8(signature)) .putShort(index); } if (localVar == null) { localVar = new ByteVector(); } ++localVarCount; localVar.putShort(start.position) .putShort(end.position - start.position) .putShort(cw.newUTF8(name)).putShort(cw.newUTF8(desc)) .putShort(index); if (compute != NOTHING) { // updates max locals char c = desc.charAt(0); int n = index + (c == 'J' || c == 'D' ? 2 : 1); if (n > maxLocals) { maxLocals = n; } } } @Override public AnnotationVisitor visitLocalVariableAnnotation(int typeRef, TypePath typePath, Label[] start, Label[] end, int[] index, String desc, boolean visible) { ByteVector bv = new ByteVector(); // write target_type and target_info bv.putByte(typeRef >>> 24).putShort(start.length); for (int i = 0; i < start.length; ++i) { bv.putShort(start[i].position) .putShort(end[i].position - start[i].position) .putShort(index[i]); } if (typePath == null) { bv.putByte(0); } else { int length = typePath.b[typePath.offset] * 2 + 1; bv.putByteArray(typePath.b, typePath.offset, length); } // write type, and reserve space for values count bv.putShort(cw.newUTF8(desc)).putShort(0); AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, bv.length - 2); if (visible) { aw.next = ctanns; ctanns = aw; } else { aw.next = ictanns; ictanns = aw; } return aw; } @Override public void visitLineNumber(final int line, final Label start) { if (lineNumber == null) { lineNumber = new ByteVector(); } ++lineNumberCount; lineNumber.putShort(start.position); lineNumber.putShort(line); } @Override public void visitMaxs(final int maxStack, final int maxLocals) { if (compute == FRAMES) { // completes the control flow graph with exception handler blocks Handler handler = firstHandler; while (handler != null) { Label l = handler.start.getFirst(); Label h = handler.handler.getFirst(); Label e = handler.end.getFirst(); // computes the kind of the edges to 'h' String t = handler.desc == null ? "java/lang/Throwable" : handler.desc; int kind = Frame.OBJECT | cw.addType(t); // h is an exception handler h.status |= Label.TARGET; // adds 'h' as a successor of labels between 'start' and 'end' while (l != e) { // creates an edge to 'h' Edge b = new Edge(); b.info = kind; b.successor = h; // adds it to the successors of 'l' b.next = l.successors; l.successors = b; // goes to the next label l = l.successor; } handler = handler.next; } // creates and visits the first (implicit) frame Frame f = labels.frame; f.initInputFrame(cw, access, Type.getArgumentTypes(descriptor), this.maxLocals); visitFrame(f); /* * fix point algorithm: mark the first basic block as 'changed' * (i.e. put it in the 'changed' list) and, while there are changed * basic blocks, choose one, mark it as unchanged, and update its * successors (which can be changed in the process). */ int max = 0; Label changed = labels; while (changed != null) { // removes a basic block from the list of changed basic blocks Label l = changed; changed = changed.next; l.next = null; f = l.frame; // a reachable jump target must be stored in the stack map if ((l.status & Label.TARGET) != 0) { l.status |= Label.STORE; } // all visited labels are reachable, by definition l.status |= Label.REACHABLE; // updates the (absolute) maximum stack size int blockMax = f.inputStack.length + l.outputStackMax; if (blockMax > max) { max = blockMax; } // updates the successors of the current basic block Edge e = l.successors; while (e != null) { Label n = e.successor.getFirst(); boolean change = f.merge(cw, n.frame, e.info); if (change && n.next == null) { // if n has changed and is not already in the 'changed' // list, adds it to this list n.next = changed; changed = n; } e = e.next; } } // visits all the frames that must be stored in the stack map Label l = labels; while (l != null) { f = l.frame; if ((l.status & Label.STORE) != 0) { visitFrame(f); } if ((l.status & Label.REACHABLE) == 0) { // finds start and end of dead basic block Label k = l.successor; int start = l.position; int end = (k == null ? code.length : k.position) - 1; // if non empty basic block if (end >= start) { max = Math.max(max, 1); // replaces instructions with NOP ... NOP ATHROW for (int i = start; i < end; ++i) { code.data[i] = Opcodes.NOP; } code.data[end] = (byte) Opcodes.ATHROW; // emits a frame for this unreachable block int frameIndex = startFrame(start, 0, 1); frame[frameIndex] = Frame.OBJECT | cw.addType("java/lang/Throwable"); endFrame(); // removes the start-end range from the exception // handlers firstHandler = Handler.remove(firstHandler, l, k); } } l = l.successor; } handler = firstHandler; handlerCount = 0; while (handler != null) { handlerCount += 1; handler = handler.next; } this.maxStack = max; } else if (compute == MAXS) { // completes the control flow graph with exception handler blocks Handler handler = firstHandler; while (handler != null) { Label l = handler.start; Label h = handler.handler; Label e = handler.end; // adds 'h' as a successor of labels between 'start' and 'end' while (l != e) { // creates an edge to 'h' Edge b = new Edge(); b.info = Edge.EXCEPTION; b.successor = h; // adds it to the successors of 'l' if ((l.status & Label.JSR) == 0) { b.next = l.successors; l.successors = b; } else { // if l is a JSR block, adds b after the first two edges // to preserve the hypothesis about JSR block successors // order (see {@link #visitJumpInsn}) b.next = l.successors.next.next; l.successors.next.next = b; } // goes to the next label l = l.successor; } handler = handler.next; } if (subroutines > 0) { // completes the control flow graph with the RET successors /* * first step: finds the subroutines. This step determines, for * each basic block, to which subroutine(s) it belongs. */ // finds the basic blocks that belong to the "main" subroutine int id = 0; labels.visitSubroutine(null, 1, subroutines); // finds the basic blocks that belong to the real subroutines Label l = labels; while (l != null) { if ((l.status & Label.JSR) != 0) { // the subroutine is defined by l's TARGET, not by l Label subroutine = l.successors.next.successor; // if this subroutine has not been visited yet... if ((subroutine.status & Label.VISITED) == 0) { // ...assigns it a new id and finds its basic blocks id += 1; subroutine.visitSubroutine(null, (id / 32L) << 32 | (1L << (id % 32)), subroutines); } } l = l.successor; } // second step: finds the successors of RET blocks l = labels; while (l != null) { if ((l.status & Label.JSR) != 0) { Label L = labels; while (L != null) { L.status &= ~Label.VISITED2; L = L.successor; } // the subroutine is defined by l's TARGET, not by l Label subroutine = l.successors.next.successor; subroutine.visitSubroutine(l, 0, subroutines); } l = l.successor; } } /* * control flow analysis algorithm: while the block stack is not * empty, pop a block from this stack, update the max stack size, * compute the true (non relative) begin stack size of the * successors of this block, and push these successors onto the * stack (unless they have already been pushed onto the stack). * Note: by hypothesis, the {@link Label#inputStackTop} of the * blocks in the block stack are the true (non relative) beginning * stack sizes of these blocks. */ int max = 0; Label stack = labels; while (stack != null) { // pops a block from the stack Label l = stack; stack = stack.next; // computes the true (non relative) max stack size of this block int start = l.inputStackTop; int blockMax = start + l.outputStackMax; // updates the global max stack size if (blockMax > max) { max = blockMax; } // analyzes the successors of the block Edge b = l.successors; if ((l.status & Label.JSR) != 0) { // ignores the first edge of JSR blocks (virtual successor) b = b.next; } while (b != null) { l = b.successor; // if this successor has not already been pushed... if ((l.status & Label.PUSHED) == 0) { // computes its true beginning stack size... l.inputStackTop = b.info == Edge.EXCEPTION ? 1 : start + b.info; // ...and pushes it onto the stack l.status |= Label.PUSHED; l.next = stack; stack = l; } b = b.next; } } this.maxStack = Math.max(maxStack, max); } else { this.maxStack = maxStack; this.maxLocals = maxLocals; } } @Override public void visitEnd() { } // ------------------------------------------------------------------------ // Utility methods: control flow analysis algorithm // ------------------------------------------------------------------------ /** * Adds a successor to the {@link #currentBlock currentBlock} block. * * @param info * information about the control flow edge to be added. * @param successor * the successor block to be added to the current block. */ private void addSuccessor(final int info, final Label successor) { // creates and initializes an Edge object... Edge b = new Edge(); b.info = info; b.successor = successor; // ...and adds it to the successor list of the currentBlock block b.next = currentBlock.successors; currentBlock.successors = b; } /** * Ends the current basic block. This method must be used in the case where * the current basic block does not have any successor. */ private void noSuccessor() { if (compute == FRAMES) { Label l = new Label(); l.frame = new Frame(); l.frame.owner = l; l.resolve(this, code.length, code.data); previousBlock.successor = l; previousBlock = l; } else { currentBlock.outputStackMax = maxStackSize; } if (compute != INSERTED_FRAMES) { currentBlock = null; } } // ------------------------------------------------------------------------ // Utility methods: stack map frames // ------------------------------------------------------------------------ /** * Visits a frame that has been computed from scratch. * * @param f * the frame that must be visited. */ private void visitFrame(final Frame f) { int i, t; int nTop = 0; int nLocal = 0; int nStack = 0; int[] locals = f.inputLocals; int[] stacks = f.inputStack; // computes the number of locals (ignores TOP types that are just after // a LONG or a DOUBLE, and all trailing TOP types) for (i = 0; i < locals.length; ++i) { t = locals[i]; if (t == Frame.TOP) { ++nTop; } else { nLocal += nTop + 1; nTop = 0; } if (t == Frame.LONG || t == Frame.DOUBLE) { ++i; } } // computes the stack size (ignores TOP types that are just after // a LONG or a DOUBLE) for (i = 0; i < stacks.length; ++i) { t = stacks[i]; ++nStack; if (t == Frame.LONG || t == Frame.DOUBLE) { ++i; } } // visits the frame and its content int frameIndex = startFrame(f.owner.position, nLocal, nStack); for (i = 0; nLocal > 0; ++i, --nLocal) { t = locals[i]; frame[frameIndex++] = t; if (t == Frame.LONG || t == Frame.DOUBLE) { ++i; } } for (i = 0; i < stacks.length; ++i) { t = stacks[i]; frame[frameIndex++] = t; if (t == Frame.LONG || t == Frame.DOUBLE) { ++i; } } endFrame(); } /** * Visit the implicit first frame of this method. */ private void visitImplicitFirstFrame() { // There can be at most descriptor.length() + 1 locals int frameIndex = startFrame(0, descriptor.length() + 1, 0); if ((access & Opcodes.ACC_STATIC) == 0) { if ((access & ACC_CONSTRUCTOR) == 0) { frame[frameIndex++] = Frame.OBJECT | cw.addType(cw.thisName); } else { frame[frameIndex++] = Frame.UNINITIALIZED_THIS; } } int i = 1; loop: while (true) { int j = i; switch (descriptor.charAt(i++)) { case 'Z': case 'C': case 'B': case 'S': case 'I': frame[frameIndex++] = Frame.INTEGER; break; case 'F': frame[frameIndex++] = Frame.FLOAT; break; case 'J': frame[frameIndex++] = Frame.LONG; break; case 'D': frame[frameIndex++] = Frame.DOUBLE; break; case '[': while (descriptor.charAt(i) == '[') { ++i; } if (descriptor.charAt(i) == 'L') { ++i; while (descriptor.charAt(i) != ';') { ++i; } } frame[frameIndex++] = Frame.type(cw, descriptor.substring(j, ++i)); break; case 'L': while (descriptor.charAt(i) != ';') { ++i; } frame[frameIndex++] = Frame.OBJECT | cw.addType(descriptor.substring(j + 1, i++)); break; default: break loop; } } frame[1] = frameIndex - 3; endFrame(); } /** * Starts the visit of a stack map frame. * * @param offset * the offset of the instruction to which the frame corresponds. * @param nLocal * the number of local variables in the frame. * @param nStack * the number of stack elements in the frame. * @return the index of the next element to be written in this frame. */ private int startFrame(final int offset, final int nLocal, final int nStack) { int n = 3 + nLocal + nStack; if (frame == null || frame.length < n) { frame = new int[n]; } frame[0] = offset; frame[1] = nLocal; frame[2] = nStack; return 3; } /** * Checks if the visit of the current frame {@link #frame} is finished, and * if yes, write it in the StackMapTable attribute. */ private void endFrame() { if (previousFrame != null) { // do not write the first frame if (stackMap == null) { stackMap = new ByteVector(); } writeFrame(); ++frameCount; } previousFrame = frame; frame = null; } /** * Compress and writes the current frame {@link #frame} in the StackMapTable * attribute. */ private void writeFrame() { int clocalsSize = frame[1]; int cstackSize = frame[2]; if ((cw.version & 0xFFFF) < Opcodes.V1_6) { stackMap.putShort(frame[0]).putShort(clocalsSize); writeFrameTypes(3, 3 + clocalsSize); stackMap.putShort(cstackSize); writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize); return; } int localsSize = previousFrame[1]; int type = FULL_FRAME; int k = 0; int delta; if (frameCount == 0) { delta = frame[0]; } else { delta = frame[0] - previousFrame[0] - 1; } if (cstackSize == 0) { k = clocalsSize - localsSize; switch (k) { case -3: case -2: case -1: type = CHOP_FRAME; localsSize = clocalsSize; break; case 0: type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED; break; case 1: case 2: case 3: type = APPEND_FRAME; break; } } else if (clocalsSize == localsSize && cstackSize == 1) { type = delta < 63 ? SAME_LOCALS_1_STACK_ITEM_FRAME : SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED; } if (type != FULL_FRAME) { // verify if locals are the same int l = 3; for (int j = 0; j < localsSize; j++) { if (frame[l] != previousFrame[l]) { type = FULL_FRAME; break; } l++; } } switch (type) { case SAME_FRAME: stackMap.putByte(delta); break; case SAME_LOCALS_1_STACK_ITEM_FRAME: stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta); writeFrameTypes(3 + clocalsSize, 4 + clocalsSize); break; case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED: stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED).putShort( delta); writeFrameTypes(3 + clocalsSize, 4 + clocalsSize); break; case SAME_FRAME_EXTENDED: stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta); break; case CHOP_FRAME: stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta); break; case APPEND_FRAME: stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta); writeFrameTypes(3 + localsSize, 3 + clocalsSize); break; // case FULL_FRAME: default: stackMap.putByte(FULL_FRAME).putShort(delta).putShort(clocalsSize); writeFrameTypes(3, 3 + clocalsSize); stackMap.putShort(cstackSize); writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize); } } /** * Writes some types of the current frame {@link #frame} into the * StackMapTableAttribute. This method converts types from the format used * in {@link Label} to the format used in StackMapTable attributes. In * particular, it converts type table indexes to constant pool indexes. * * @param start * index of the first type in {@link #frame} to write. * @param end * index of last type in {@link #frame} to write (exclusive). */ private void writeFrameTypes(final int start, final int end) { for (int i = start; i < end; ++i) { int t = frame[i]; int d = t & Frame.DIM; if (d == 0) { int v = t & Frame.BASE_VALUE; switch (t & Frame.BASE_KIND) { case Frame.OBJECT: stackMap.putByte(7).putShort( cw.newClass(cw.typeTable[v].strVal1)); break; case Frame.UNINITIALIZED: stackMap.putByte(8).putShort(cw.typeTable[v].intVal); break; default: stackMap.putByte(v); } } else { StringBuilder sb = new StringBuilder(); d >>= 28; while (d-- > 0) { sb.append('['); } if ((t & Frame.BASE_KIND) == Frame.OBJECT) { sb.append('L'); sb.append(cw.typeTable[t & Frame.BASE_VALUE].strVal1); sb.append(';'); } else { switch (t & 0xF) { case 1: sb.append('I'); break; case 2: sb.append('F'); break; case 3: sb.append('D'); break; case 9: sb.append('Z'); break; case 10: sb.append('B'); break; case 11: sb.append('C'); break; case 12: sb.append('S'); break; default: sb.append('J'); } } stackMap.putByte(7).putShort(cw.newClass(sb.toString())); } } } private void writeFrameType(final Object type) { if (type instanceof String) { stackMap.putByte(7).putShort(cw.newClass((String) type)); } else if (type instanceof Integer) { stackMap.putByte(((Integer) type).intValue()); } else { stackMap.putByte(8).putShort(((Label) type).position); } } // ------------------------------------------------------------------------ // Utility methods: dump bytecode array // ------------------------------------------------------------------------ /** * Returns the size of the bytecode of this method. * * @return the size of the bytecode of this method. */ final int getSize() { if (classReaderOffset != 0) { return 6 + classReaderLength; } int size = 8; if (code.length > 0) { if (code.length > 65535) { throw new RuntimeException("Method code too large!"); } cw.newUTF8("Code"); size += 18 + code.length + 8 * handlerCount; if (localVar != null) { cw.newUTF8("LocalVariableTable"); size += 8 + localVar.length; } if (localVarType != null) { cw.newUTF8("LocalVariableTypeTable"); size += 8 + localVarType.length; } if (lineNumber != null) { cw.newUTF8("LineNumberTable"); size += 8 + lineNumber.length; } if (stackMap != null) { boolean zip = (cw.version & 0xFFFF) >= Opcodes.V1_6; cw.newUTF8(zip ? "StackMapTable" : "StackMap"); size += 8 + stackMap.length; } if (ctanns != null) { cw.newUTF8("RuntimeVisibleTypeAnnotations"); size += 8 + ctanns.getSize(); } if (ictanns != null) { cw.newUTF8("RuntimeInvisibleTypeAnnotations"); size += 8 + ictanns.getSize(); } if (cattrs != null) { size += cattrs.getSize(cw, code.data, code.length, maxStack, maxLocals); } } if (exceptionCount > 0) { cw.newUTF8("Exceptions"); size += 8 + 2 * exceptionCount; } if ((access & Opcodes.ACC_SYNTHETIC) != 0) { if ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) { cw.newUTF8("Synthetic"); size += 6; } } if ((access & Opcodes.ACC_DEPRECATED) != 0) { cw.newUTF8("Deprecated"); size += 6; } if (signature != null) { cw.newUTF8("Signature"); cw.newUTF8(signature); size += 8; } if (methodParameters != null) { cw.newUTF8("MethodParameters"); size += 7 + methodParameters.length; } if (annd != null) { cw.newUTF8("AnnotationDefault"); size += 6 + annd.length; } if (anns != null) { cw.newUTF8("RuntimeVisibleAnnotations"); size += 8 + anns.getSize(); } if (ianns != null) { cw.newUTF8("RuntimeInvisibleAnnotations"); size += 8 + ianns.getSize(); } if (tanns != null) { cw.newUTF8("RuntimeVisibleTypeAnnotations"); size += 8 + tanns.getSize(); } if (itanns != null) { cw.newUTF8("RuntimeInvisibleTypeAnnotations"); size += 8 + itanns.getSize(); } if (panns != null) { cw.newUTF8("RuntimeVisibleParameterAnnotations"); size += 7 + 2 * (panns.length - synthetics); for (int i = panns.length - 1; i >= synthetics; --i) { size += panns[i] == null ? 0 : panns[i].getSize(); } } if (ipanns != null) { cw.newUTF8("RuntimeInvisibleParameterAnnotations"); size += 7 + 2 * (ipanns.length - synthetics); for (int i = ipanns.length - 1; i >= synthetics; --i) { size += ipanns[i] == null ? 0 : ipanns[i].getSize(); } } if (attrs != null) { size += attrs.getSize(cw, null, 0, -1, -1); } return size; } /** * Puts the bytecode of this method in the given byte vector. * * @param out * the byte vector into which the bytecode of this method must be * copied. */ final void put(final ByteVector out) { final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC; int mask = ACC_CONSTRUCTOR | Opcodes.ACC_DEPRECATED | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR); out.putShort(access & ~mask).putShort(name).putShort(desc); if (classReaderOffset != 0) { out.putByteArray(cw.cr.b, classReaderOffset, classReaderLength); return; } int attributeCount = 0; if (code.length > 0) { ++attributeCount; } if (exceptionCount > 0) { ++attributeCount; } if ((access & Opcodes.ACC_SYNTHETIC) != 0) { if ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) { ++attributeCount; } } if ((access & Opcodes.ACC_DEPRECATED) != 0) { ++attributeCount; } if (signature != null) { ++attributeCount; } if (methodParameters != null) { ++attributeCount; } if (annd != null) { ++attributeCount; } if (anns != null) { ++attributeCount; } if (ianns != null) { ++attributeCount; } if (tanns != null) { ++attributeCount; } if (itanns != null) { ++attributeCount; } if (panns != null) { ++attributeCount; } if (ipanns != null) { ++attributeCount; } if (attrs != null) { attributeCount += attrs.getCount(); } out.putShort(attributeCount); if (code.length > 0) { int size = 12 + code.length + 8 * handlerCount; if (localVar != null) { size += 8 + localVar.length; } if (localVarType != null) { size += 8 + localVarType.length; } if (lineNumber != null) { size += 8 + lineNumber.length; } if (stackMap != null) { size += 8 + stackMap.length; } if (ctanns != null) { size += 8 + ctanns.getSize(); } if (ictanns != null) { size += 8 + ictanns.getSize(); } if (cattrs != null) { size += cattrs.getSize(cw, code.data, code.length, maxStack, maxLocals); } out.putShort(cw.newUTF8("Code")).putInt(size); out.putShort(maxStack).putShort(maxLocals); out.putInt(code.length).putByteArray(code.data, 0, code.length); out.putShort(handlerCount); if (handlerCount > 0) { Handler h = firstHandler; while (h != null) { out.putShort(h.start.position).putShort(h.end.position) .putShort(h.handler.position).putShort(h.type); h = h.next; } } attributeCount = 0; if (localVar != null) { ++attributeCount; } if (localVarType != null) { ++attributeCount; } if (lineNumber != null) { ++attributeCount; } if (stackMap != null) { ++attributeCount; } if (ctanns != null) { ++attributeCount; } if (ictanns != null) { ++attributeCount; } if (cattrs != null) { attributeCount += cattrs.getCount(); } out.putShort(attributeCount); if (localVar != null) { out.putShort(cw.newUTF8("LocalVariableTable")); out.putInt(localVar.length + 2).putShort(localVarCount); out.putByteArray(localVar.data, 0, localVar.length); } if (localVarType != null) { out.putShort(cw.newUTF8("LocalVariableTypeTable")); out.putInt(localVarType.length + 2).putShort(localVarTypeCount); out.putByteArray(localVarType.data, 0, localVarType.length); } if (lineNumber != null) { out.putShort(cw.newUTF8("LineNumberTable")); out.putInt(lineNumber.length + 2).putShort(lineNumberCount); out.putByteArray(lineNumber.data, 0, lineNumber.length); } if (stackMap != null) { boolean zip = (cw.version & 0xFFFF) >= Opcodes.V1_6; out.putShort(cw.newUTF8(zip ? "StackMapTable" : "StackMap")); out.putInt(stackMap.length + 2).putShort(frameCount); out.putByteArray(stackMap.data, 0, stackMap.length); } if (ctanns != null) { out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations")); ctanns.put(out); } if (ictanns != null) { out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations")); ictanns.put(out); } if (cattrs != null) { cattrs.put(cw, code.data, code.length, maxLocals, maxStack, out); } } if (exceptionCount > 0) { out.putShort(cw.newUTF8("Exceptions")).putInt( 2 * exceptionCount + 2); out.putShort(exceptionCount); for (int i = 0; i < exceptionCount; ++i) { out.putShort(exceptions[i]); } } if ((access & Opcodes.ACC_SYNTHETIC) != 0) { if ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) { out.putShort(cw.newUTF8("Synthetic")).putInt(0); } } if ((access & Opcodes.ACC_DEPRECATED) != 0) { out.putShort(cw.newUTF8("Deprecated")).putInt(0); } if (signature != null) { out.putShort(cw.newUTF8("Signature")).putInt(2) .putShort(cw.newUTF8(signature)); } if (methodParameters != null) { out.putShort(cw.newUTF8("MethodParameters")); out.putInt(methodParameters.length + 1).putByte( methodParametersCount); out.putByteArray(methodParameters.data, 0, methodParameters.length); } if (annd != null) { out.putShort(cw.newUTF8("AnnotationDefault")); out.putInt(annd.length); out.putByteArray(annd.data, 0, annd.length); } if (anns != null) { out.putShort(cw.newUTF8("RuntimeVisibleAnnotations")); anns.put(out); } if (ianns != null) { out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations")); ianns.put(out); } if (tanns != null) { out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations")); tanns.put(out); } if (itanns != null) { out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations")); itanns.put(out); } if (panns != null) { out.putShort(cw.newUTF8("RuntimeVisibleParameterAnnotations")); AnnotationWriter.put(panns, synthetics, out); } if (ipanns != null) { out.putShort(cw.newUTF8("RuntimeInvisibleParameterAnnotations")); AnnotationWriter.put(ipanns, synthetics, out); } if (attrs != null) { attrs.put(cw, null, 0, -1, -1, out); } } }
apache-2.0
dropbox/bazel
src/test/java/com/google/devtools/build/lib/skyframe/serialization/DynamicCodecTest.java
12490
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe.serialization; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.fail; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.skyframe.serialization.testutils.SerializationTester; import java.io.BufferedInputStream; import java.util.Arrays; import java.util.Objects; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link DynamicCodec}. */ @RunWith(JUnit4.class) public final class DynamicCodecTest { private static class SimpleExample { private final String elt; private final String elt2; private final int x; private SimpleExample(String elt, String elt2, int x) { this.elt = elt; this.elt2 = elt2; this.x = x; } @SuppressWarnings("EqualsHashCode") // Testing @Override public boolean equals(Object other) { if (!(other instanceof SimpleExample)) { return false; } SimpleExample that = (SimpleExample) other; return Objects.equals(elt, that.elt) && Objects.equals(elt2, that.elt2) && x == that.x; } } @Test public void testExample() throws Exception { new SerializationTester(new SimpleExample("a", "b", -5), new SimpleExample("a", null, 10)) .addCodec(new DynamicCodec(SimpleExample.class)) .makeMemoizing() .runTests(); } private static class ExampleSubclass extends SimpleExample { private final String elt; // duplicate name with superclass private ExampleSubclass(String elt1, String elt2, String elt3, int x) { super(elt1, elt2, x); this.elt = elt3; } @SuppressWarnings("EqualsHashCode") // Testing @Override public boolean equals(Object other) { if (!(other instanceof ExampleSubclass)) { return false; } if (!super.equals(other)) { return false; } ExampleSubclass that = (ExampleSubclass) other; return Objects.equals(elt, that.elt); } } @Test public void testExampleSubclass() throws Exception { new SerializationTester( new ExampleSubclass("a", "b", "c", 0), new ExampleSubclass("a", null, null, 15)) .addCodec(new DynamicCodec(ExampleSubclass.class)) .makeMemoizing() .runTests(); } private static class ExampleSmallPrimitives { private final Void v; private final boolean bit; private final byte b; private final short s; private final char c; private ExampleSmallPrimitives(boolean bit, byte b, short s, char c) { this.v = null; this.bit = bit; this.b = b; this.s = s; this.c = c; } @SuppressWarnings("EqualsHashCode") // Testing @Override public boolean equals(Object other) { if (!(other instanceof ExampleSmallPrimitives)) { return false; } ExampleSmallPrimitives that = (ExampleSmallPrimitives) other; return v == that.v && bit == that.bit && b == that.b && s == that.s && c == that.c; } } @Test public void testExampleSmallPrimitives() throws Exception { new SerializationTester( new ExampleSmallPrimitives(false, (byte) 0, (short) 0, 'a'), new ExampleSmallPrimitives(false, (byte) 120, (short) 18000, 'x'), new ExampleSmallPrimitives(true, Byte.MIN_VALUE, Short.MIN_VALUE, Character.MIN_VALUE), new ExampleSmallPrimitives(true, Byte.MAX_VALUE, Short.MAX_VALUE, Character.MAX_VALUE)) .addCodec(new DynamicCodec(ExampleSmallPrimitives.class)) .makeMemoizing() .runTests(); } private static class ExampleMediumPrimitives { private final int i; private final float f; private ExampleMediumPrimitives(int i, float f) { this.i = i; this.f = f; } @SuppressWarnings("EqualsHashCode") // Testing @Override public boolean equals(Object other) { if (!(other instanceof ExampleMediumPrimitives)) { return false; } ExampleMediumPrimitives that = (ExampleMediumPrimitives) other; return i == that.i && f == that.f; } } @Test public void testExampleMediumPrimitives() throws Exception { new SerializationTester( new ExampleMediumPrimitives(12345, 1e12f), new ExampleMediumPrimitives(67890, -6e9f), new ExampleMediumPrimitives(Integer.MIN_VALUE, Float.MIN_VALUE), new ExampleMediumPrimitives(Integer.MAX_VALUE, Float.MAX_VALUE)) .addCodec(new DynamicCodec(ExampleMediumPrimitives.class)) .makeMemoizing() .runTests(); } private static class ExampleLargePrimitives { private final long l; private final double d; private ExampleLargePrimitives(long l, double d) { this.l = l; this.d = d; } @SuppressWarnings("EqualsHashCode") // Testing @Override public boolean equals(Object other) { if (!(other instanceof ExampleLargePrimitives)) { return false; } ExampleLargePrimitives that = (ExampleLargePrimitives) other; return l == that.l && d == that.d; } } @Test public void testExampleLargePrimitives() throws Exception { new SerializationTester( new ExampleLargePrimitives(12345346523453L, 1e300), new ExampleLargePrimitives(678900093045L, -9e180), new ExampleLargePrimitives(Long.MIN_VALUE, Double.MIN_VALUE), new ExampleLargePrimitives(Long.MAX_VALUE, Double.MAX_VALUE)) .addCodec(new DynamicCodec(ExampleLargePrimitives.class)) .makeMemoizing() .runTests(); } private static class ArrayExample { String[] text; byte[] numbers; char[] chars; long[] longs; private ArrayExample(String[] text, byte[] numbers, char[] chars, long[] longs) { this.text = text; this.numbers = numbers; this.chars = chars; this.longs = longs; } @SuppressWarnings("EqualsHashCode") // Testing @Override public boolean equals(Object other) { if (!(other instanceof ArrayExample)) { return false; } ArrayExample that = (ArrayExample) other; return Arrays.equals(text, that.text) && Arrays.equals(numbers, that.numbers) && Arrays.equals(chars, that.chars) && Arrays.equals(longs, that.longs); } } @Test public void testArray() throws Exception { new SerializationTester( new ArrayExample(null, null, null, null), new ArrayExample(new String[] {}, new byte[] {}, new char[] {}, new long[] {}), new ArrayExample( new String[] {"a", "b", "cde"}, new byte[] {-1, 0, 1}, new char[] {'a', 'b', 'c', 'x', 'y', 'z'}, new long[] {Long.MAX_VALUE, Long.MIN_VALUE, 27983741982341L, 52893748523495834L})) .addCodec(new DynamicCodec(ArrayExample.class)) .runTests(); } private static class NestedArrayExample { int[][] numbers; private NestedArrayExample(int[][] numbers) { this.numbers = numbers; } @SuppressWarnings("EqualsHashCode") // Testing @Override public boolean equals(Object other) { if (!(other instanceof NestedArrayExample)) { return false; } NestedArrayExample that = (NestedArrayExample) other; return Arrays.deepEquals(numbers, that.numbers); } } @Test public void testNestedArray() throws Exception { new SerializationTester( new NestedArrayExample(null), new NestedArrayExample( new int[][] { {1, 2, 3}, {4, 5, 6, 9}, {7} }), new NestedArrayExample(new int[][] {{1, 2, 3}, null, {7}})) .addCodec(new DynamicCodec(NestedArrayExample.class)) .runTests(); } private static class CycleA { private final int value; private CycleB b; private CycleA(int value) { this.value = value; } @SuppressWarnings("EqualsHashCode") // Testing @Override public boolean equals(Object other) { // Integrity check. Not really part of equals. assertThat(b.a).isEqualTo(this); if (!(other instanceof CycleA)) { return false; } CycleA that = (CycleA) other; // Consistency check. Not really part of equals. assertThat(that.b.a).isEqualTo(that); return value == that.value && b.value() == that.b.value; } } private static class CycleB { private final int value; private CycleA a; private CycleB(int value) { this.value = value; } public int value() { return value; } } private static CycleA createCycle(int valueA, int valueB) { CycleA a = new CycleA(valueA); a.b = new CycleB(valueB); a.b.a = a; return a; } @Test public void testCyclic() throws Exception { new SerializationTester(createCycle(1, 2), createCycle(3, 4)) .addCodec(new DynamicCodec(CycleA.class)) .addCodec(new DynamicCodec(CycleB.class)) .makeMemoizing() .runTests(); } enum EnumExample { ZERO, ONE, TWO, THREE } static class PrimitiveExample { private final boolean booleanValue; private final int intValue; private final double doubleValue; private final EnumExample enumValue; private final String stringValue; PrimitiveExample( boolean booleanValue, int intValue, double doubleValue, EnumExample enumValue, String stringValue) { this.booleanValue = booleanValue; this.intValue = intValue; this.doubleValue = doubleValue; this.enumValue = enumValue; this.stringValue = stringValue; } @SuppressWarnings("EqualsHashCode") // Testing @Override public boolean equals(Object object) { if (object == null) { return false; } PrimitiveExample that = (PrimitiveExample) object; return booleanValue == that.booleanValue && intValue == that.intValue && doubleValue == that.doubleValue && Objects.equals(enumValue, that.enumValue) && Objects.equals(stringValue, that.stringValue); } } @Test public void testPrimitiveExample() throws Exception { new SerializationTester( new PrimitiveExample(true, 1, 1.1, EnumExample.ZERO, "foo"), new PrimitiveExample(false, -1, -5.5, EnumExample.ONE, "bar"), new PrimitiveExample(true, 5, 20.0, EnumExample.THREE, null), new PrimitiveExample(true, 100, 100, null, "hello")) .addCodec(new DynamicCodec(PrimitiveExample.class)) .addCodec(new EnumCodec<>(EnumExample.class)) .setRepetitions(100000) .runTests(); } private static class NoCodecExample2 { @SuppressWarnings("unused") private final BufferedInputStream noCodec = new BufferedInputStream(null); } private static class NoCodecExample1 { @SuppressWarnings("unused") private final NoCodecExample2 noCodec = new NoCodecExample2(); } @Test public void testNoCodecExample() throws Exception { ObjectCodecs codecs = new ObjectCodecs(AutoRegistry.get(), ImmutableMap.of()); try { codecs.serializeMemoized(new NoCodecExample1()); fail(); } catch (SerializationException.NoCodecException expected) { assertThat(expected) .hasMessageThat() .contains( "java.io.BufferedInputStream [" + "java.io.BufferedInputStream, " + "com.google.devtools.build.lib.skyframe.serialization." + "DynamicCodecTest$NoCodecExample2, " + "com.google.devtools.build.lib.skyframe.serialization." + "DynamicCodecTest$NoCodecExample1]"); } } }
apache-2.0
apache/commons-dbcp
src/test/java/org/apache/commons/dbcp2/TesterUtils.java
1413
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.dbcp2; import java.lang.reflect.Field; public class TesterUtils { /** * Access a private field. Do it this way rather than increasing the * visibility of the field in the public API. */ public static Object getField(final Object target, final String fieldName) throws Exception { final Class<?> clazz = target.getClass(); final Field f = clazz.getDeclaredField(fieldName); f.setAccessible(true); return f.get(target); } private TesterUtils() { // Utility class - hide default constructor } }
apache-2.0
drewnoakes/metadata-extractor
Source/com/drew/metadata/exif/makernotes/LeicaMakernoteDirectory.java
3940
/* * Copyright 2002-2019 Drew Noakes and contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * More information about this project is available at: * * https://drewnoakes.com/code/exif/ * https://github.com/drewnoakes/metadata-extractor */ package com.drew.metadata.exif.makernotes; import com.drew.lang.annotations.NotNull; import com.drew.metadata.Directory; import java.util.HashMap; /** * Describes tags specific to certain Leica cameras. * <p> * Tag reference from: http://gvsoft.homedns.org/exif/makernote-leica-type1.html * * @author Drew Noakes https://drewnoakes.com */ @SuppressWarnings("WeakerAccess") public class LeicaMakernoteDirectory extends Directory { public static final int TAG_QUALITY = 0x0300; public static final int TAG_USER_PROFILE = 0x0302; public static final int TAG_SERIAL_NUMBER = 0x0303; public static final int TAG_WHITE_BALANCE = 0x0304; public static final int TAG_LENS_TYPE = 0x0310; public static final int TAG_EXTERNAL_SENSOR_BRIGHTNESS_VALUE = 0x0311; public static final int TAG_MEASURED_LV = 0x0312; public static final int TAG_APPROXIMATE_F_NUMBER = 0x0313; public static final int TAG_CAMERA_TEMPERATURE = 0x0320; public static final int TAG_COLOR_TEMPERATURE = 0x0321; public static final int TAG_WB_RED_LEVEL = 0x0322; public static final int TAG_WB_GREEN_LEVEL = 0x0323; public static final int TAG_WB_BLUE_LEVEL = 0x0324; public static final int TAG_CCD_VERSION = 0x0330; public static final int TAG_CCD_BOARD_VERSION = 0x0331; public static final int TAG_CONTROLLER_BOARD_VERSION = 0x0332; public static final int TAG_M16_C_VERSION = 0x0333; public static final int TAG_IMAGE_ID_NUMBER = 0x0340; @NotNull private static final HashMap<Integer, String> _tagNameMap = new HashMap<Integer, String>(); static { _tagNameMap.put(TAG_QUALITY, "Quality"); _tagNameMap.put(TAG_USER_PROFILE, "User Profile"); _tagNameMap.put(TAG_SERIAL_NUMBER, "Serial Number"); _tagNameMap.put(TAG_WHITE_BALANCE, "White Balance"); _tagNameMap.put(TAG_LENS_TYPE, "Lens Type"); _tagNameMap.put(TAG_EXTERNAL_SENSOR_BRIGHTNESS_VALUE, "External Sensor Brightness Value"); _tagNameMap.put(TAG_MEASURED_LV, "Measured LV"); _tagNameMap.put(TAG_APPROXIMATE_F_NUMBER, "Approximate F Number"); _tagNameMap.put(TAG_CAMERA_TEMPERATURE, "Camera Temperature"); _tagNameMap.put(TAG_COLOR_TEMPERATURE, "Color Temperature"); _tagNameMap.put(TAG_WB_RED_LEVEL, "WB Red Level"); _tagNameMap.put(TAG_WB_GREEN_LEVEL, "WB Green Level"); _tagNameMap.put(TAG_WB_BLUE_LEVEL, "WB Blue Level"); _tagNameMap.put(TAG_CCD_VERSION, "CCD Version"); _tagNameMap.put(TAG_CCD_BOARD_VERSION, "CCD Board Version"); _tagNameMap.put(TAG_CONTROLLER_BOARD_VERSION, "Controller Board Version"); _tagNameMap.put(TAG_M16_C_VERSION, "M16 C Version"); _tagNameMap.put(TAG_IMAGE_ID_NUMBER, "Image ID Number"); } public LeicaMakernoteDirectory() { this.setDescriptor(new LeicaMakernoteDescriptor(this)); } @Override @NotNull public String getName() { return "Leica Makernote"; } @Override @NotNull protected HashMap<Integer, String> getTagNameMap() { return _tagNameMap; } }
apache-2.0
flofreud/aws-sdk-java
aws-java-sdk-cloudfront/src/main/java/com/amazonaws/services/cloudfront/package-info.java
643
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /** * */ package com.amazonaws.services.cloudfront;
apache-2.0
SourcePond/fileobserver-api
fileobserver-impl/src/main/java/ch/sourcepond/io/fileobserver/impl/dispatch/DefaultDispatchKey.java
1911
/*Copyright (C) 2017 Roland Hauser, <sourcepond@gmail.com> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.*/ package ch.sourcepond.io.fileobserver.impl.dispatch; import ch.sourcepond.io.fileobserver.api.DispatchKey; import java.nio.file.Path; import java.util.Objects; import static java.lang.String.format; /** * */ final class DefaultDispatchKey implements DispatchKey { private final Object directoryKey; private final Path relativePath; public DefaultDispatchKey(final Object pDirectoryKey, final Path pRelativePath) { directoryKey = pDirectoryKey; relativePath = pRelativePath; } @Override public Object getDirectoryKey() { return directoryKey; } @Override public Path getRelativePath() { return relativePath; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final DefaultDispatchKey other = (DefaultDispatchKey) o; return Objects.equals(directoryKey, other.directoryKey) && Objects.equals(relativePath, other.relativePath); } @Override public int hashCode() { return Objects.hash(directoryKey, relativePath); } @Override public String toString() { return format("[%s:%s]", directoryKey, relativePath); } }
apache-2.0
cowthan/UI-Robot
app/src/main/java/org/ayo/robot/canvas/shape/RectView.java
2195
package org.ayo.robot.canvas.shape; import android.content.Context; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.RectF; import org.ayo.robot.BaseView; public class RectView extends BaseView { public RectView(Context context) { super(context); init(); } private void init(){ } int centerX, centerY; int rw, rh; boolean isInited = false; @Override protected void drawShape(Canvas canvas, int w, int h, Paint paint) { // Rect rect = new Rect(100, 100, 200, 200); // RectF rectF = new RectF(100, 100, 200, 200); if(!isInited){ centerX = w/2; centerY = h/2; rw = w - 200; rh = h - 200; isInited = true; } drawRect(canvas, centerX, centerY, rw, rh, paint); /* Rect的四个顶点是int RectF的四个顶点float RectF和rx(x-radius),ry(y-radius)构成了圆角Rect rx The x-radius of the oval used to round the corners ry The y-radius of the oval used to round the corners Rect和RectF包含的方法: inset union 是否包含点或矩形 */ } public static void drawRect(Canvas canvas, int centerX, int centerY, int w, int h, Paint p){ int left = centerX - w/2; int top = centerY - h/2; int right = left + w; int bottom = top + h; canvas.drawRect(new RectF(left, top, right, bottom), p); } public void moveCenter(int dx, int dy){ centerX += dx; centerY += dy; invalidate(); } public void changeSize(int dw, int dh){ rw += dw; rh += dh; invalidate(); } @Override public String getTitle() { return "canvas.drawRect(rectF, paint)"; } @Override public String getMethod() { return "画矩形"; } @Override public String getComment() { return "画个矩形\n" + "Rect处理int\n" + "RectF处理float\n" + "二者都有inset,union,contains点或矩形的方法"; } }
apache-2.0
ShailShah/alluxio
core/server/master/src/main/java/alluxio/master/file/options/LoadMetadataOptions.java
3539
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.master.file.options; import alluxio.underfs.UfsStatus; import com.google.common.base.Objects; import javax.annotation.concurrent.NotThreadSafe; /** * Method options for loading metadata. */ @NotThreadSafe public final class LoadMetadataOptions { private boolean mCreateAncestors; private boolean mLoadDirectChildren; private UfsStatus mUfsStatus; /** * @return the default {@link LoadMetadataOptions} */ public static LoadMetadataOptions defaults() { return new LoadMetadataOptions(); } private LoadMetadataOptions() { mCreateAncestors = false; mLoadDirectChildren = false; mUfsStatus = null; } /** * @return null if unknown, else the status of UFS path for which loading metadata */ public UfsStatus getUfsStatus() { return mUfsStatus; } /** * @return the recursive flag value; it specifies whether parent directories should be created if * they do not already exist */ public boolean isCreateAncestors() { return mCreateAncestors; } /** * @return the load direct children flag. It specifies whether the direct children should * be loaded. */ public boolean isLoadDirectChildren() { return mLoadDirectChildren; } /** * Sets the recursive flag. * * @param createAncestors the recursive flag value to use; it specifies whether parent directories * should be created if they do not already exist * @return the updated options object */ public LoadMetadataOptions setCreateAncestors(boolean createAncestors) { mCreateAncestors = createAncestors; return this; } /** * Sets the load direct children flag. * * @param loadDirectChildren the load direct children flag. It specifies whether the direct * children should be loaded. * @return the updated object */ public LoadMetadataOptions setLoadDirectChildren(boolean loadDirectChildren) { mLoadDirectChildren = loadDirectChildren; return this; } /** * Sets the UFS status of path. * * @param status UFS status of path * @return the updated object */ public LoadMetadataOptions setUfsStatus(UfsStatus status) { mUfsStatus = status; return this; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof LoadMetadataOptions)) { return false; } LoadMetadataOptions that = (LoadMetadataOptions) o; return Objects.equal(mCreateAncestors, that.mCreateAncestors) && Objects.equal(mLoadDirectChildren, that.mLoadDirectChildren) && Objects.equal(mUfsStatus, that.mUfsStatus); } @Override public int hashCode() { return Objects.hashCode(mCreateAncestors, mLoadDirectChildren, mUfsStatus); } @Override public String toString() { return Objects.toStringHelper(this).add("createAncestors", mCreateAncestors) .add("loadDirectChildren", mLoadDirectChildren) .add("ufsStatus", mUfsStatus).toString(); } }
apache-2.0
noties/Storm
library/src/main/java/ru/noties/storm/sd/IntSerializer.java
1033
/* * Copyright 2015 Dimitry Ivanov (mail@dimitryivanov.ru) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ru.noties.storm.sd; import ru.noties.storm.FieldType; /** * Created by Dimitry Ivanov (mail@dimitryivanov.ru) on 16.02.2015. */ public abstract class IntSerializer<T> extends AbsSerializer<T> { @Override public final FieldType getSerializedFieldType() { return FieldType.INT; } public abstract T deserialize (int value); public abstract int serialize (T value); }
apache-2.0
jmostella/armeria
core/src/main/java/com/linecorp/armeria/server/annotation/ProducesOctetStream.java
1082
/* * Copyright 2018 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.server.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * An alias for {@code @Produces("application/octet-stream")}. */ @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.TYPE, ElementType.METHOD }) @Produces("application/octet-stream") public @interface ProducesOctetStream { }
apache-2.0
emmalanguage/emma
emma-mitos/src/main/java/org/emmalanguage/mitos/operators/GroupBy0ReduceTupleIntDouble.java
2114
/* * Copyright © 2014 TU Berlin (emma@dima.tu-berlin.de) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.emmalanguage.mitos.operators; import org.emmalanguage.mitos.util.TupleIntDouble; import it.unimi.dsi.fastutil.ints.Int2DoubleMap; //import it.unimi.dsi.fastutil.ints.Int2DoubleOpenHashMap; import it.unimi.dsi.fastutil.ints.Int2DoubleRBTreeMap; import java.util.function.Consumer; public abstract class GroupBy0ReduceTupleIntDouble extends BagOperator<TupleIntDouble, TupleIntDouble> { //protected Int2DoubleOpenHashMap hm; protected Int2DoubleRBTreeMap hm; @Override public void openOutBag() { super.openOutBag(); hm = new Int2DoubleRBTreeMap(); hm.defaultReturnValue(Double.MIN_VALUE); } @Override public void pushInElement(TupleIntDouble e, int logicalInputId) { super.pushInElement(e, logicalInputId); double g = hm.putIfAbsent(e.f0, e.f1); if (g != hm.defaultReturnValue()) { reduceFunc(e, g); } } protected abstract void reduceFunc(TupleIntDouble e, double g); @Override public void closeInBag(int inputId) { super.closeInBag(inputId); //hm.int2DoubleEntrySet().fastForEach(new Consumer<Int2DoubleMap.Entry>() { hm.int2DoubleEntrySet().forEach(new Consumer<Int2DoubleMap.Entry>() { @Override public void accept(Int2DoubleMap.Entry e) { out.collectElement(TupleIntDouble.of(e.getIntKey(), e.getDoubleValue())); } }); hm = null; out.closeBag(); } }
apache-2.0
visallo/vertexium
core/src/main/java/org/vertexium/ElementId.java
567
package org.vertexium; public interface ElementId extends VertexiumObjectId { static ElementId vertex(String id) { return new DefaultElementId(ElementType.VERTEX, id); } static ElementId edge(String id) { return new DefaultElementId(ElementType.EDGE, id); } static ElementId create(ElementType elementType, String id) { return new DefaultElementId(elementType, id); } /** * the type of the element. */ ElementType getElementType(); /** * id of the element. */ String getId(); }
apache-2.0
jk1/intellij-community
platform/lang-impl/src/com/intellij/openapi/projectRoots/impl/DependentSdkType.java
3053
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.projectRoots.impl; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.SdkModel; import com.intellij.openapi.projectRoots.SdkType; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Ref; import com.intellij.util.Consumer; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.util.Arrays; /** * @author Dmitry Avdeev */ public abstract class DependentSdkType extends SdkType { public DependentSdkType(@NonNls String name) { super(name); } /** * Checks if dependencies satisfied. */ protected boolean checkDependency(SdkModel sdkModel) { return ContainerUtil.find(sdkModel.getSdks(), sdk -> isValidDependency(sdk)) != null; } protected abstract boolean isValidDependency(Sdk sdk); public abstract String getUnsatisfiedDependencyMessage(); @Override public boolean supportsCustomCreateUI() { return true; } @Override public void showCustomCreateUI(@NotNull final SdkModel sdkModel, @NotNull JComponent parentComponent, @NotNull final Consumer<Sdk> sdkCreatedCallback) { if (!checkDependency(sdkModel)) { if (Messages.showOkCancelDialog(parentComponent, getUnsatisfiedDependencyMessage(), "Cannot Create SDK", Messages.getWarningIcon()) != Messages.OK) { return; } if (fixDependency(sdkModel, sdkCreatedCallback) == null) { return; } } createSdkOfType(sdkModel, this, sdkCreatedCallback); } public abstract SdkType getDependencyType(); protected Sdk fixDependency(SdkModel sdkModel, Consumer<Sdk> sdkCreatedCallback) { return createSdkOfType(sdkModel, getDependencyType(), sdkCreatedCallback); } protected static Sdk createSdkOfType(final SdkModel sdkModel, final SdkType sdkType, final Consumer<Sdk> sdkCreatedCallback) { final Ref<Sdk> result = new Ref<>(null); SdkConfigurationUtil.selectSdkHome(sdkType, home -> { String newSdkName = SdkConfigurationUtil.createUniqueSdkName(sdkType, home, Arrays.asList(sdkModel.getSdks())); final ProjectJdkImpl newJdk = new ProjectJdkImpl(newSdkName, sdkType); newJdk.setHomePath(home); sdkCreatedCallback.consume(newJdk); result.set(newJdk); }); return result.get(); } }
apache-2.0
chavdar/gobblin-1
gobblin-runtime/src/main/java/gobblin/runtime/instance/StandardGobblinInstanceDriver.java
17339
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gobblin.runtime.instance; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.util.concurrent.Service; import com.google.common.util.concurrent.ServiceManager; import com.typesafe.config.ConfigFactory; import gobblin.broker.gobblin_scopes.GobblinScopeTypes; import gobblin.broker.SimpleScope; import gobblin.broker.SharedResourcesBrokerFactory; import gobblin.broker.SharedResourcesBrokerImpl; import gobblin.broker.iface.SharedResourcesBroker; import gobblin.instrumented.Instrumented; import gobblin.metrics.GobblinMetrics; import gobblin.metrics.MetricContext; import gobblin.metrics.Tag; import gobblin.runtime.api.Configurable; import gobblin.runtime.api.GobblinInstanceEnvironment; import gobblin.runtime.api.GobblinInstanceLauncher; import gobblin.runtime.api.GobblinInstancePlugin; import gobblin.runtime.api.GobblinInstancePluginFactory; import gobblin.runtime.api.JobCatalog; import gobblin.runtime.api.JobExecutionLauncher; import gobblin.runtime.api.JobSpecScheduler; import gobblin.runtime.job_catalog.FSJobCatalog; import gobblin.runtime.job_catalog.ImmutableFSJobCatalog; import gobblin.runtime.job_catalog.InMemoryJobCatalog; import gobblin.runtime.job_exec.JobLauncherExecutionDriver; import gobblin.runtime.plugins.email.EmailNotificationPlugin; import gobblin.runtime.scheduler.ImmediateJobSpecScheduler; import gobblin.runtime.scheduler.QuartzJobSpecScheduler; import gobblin.runtime.std.DefaultConfigurableImpl; import gobblin.util.ClassAliasResolver; import gobblin.util.ConfigUtils; /** A simple wrapper {@link DefaultGobblinInstanceDriverImpl} that will instantiate necessary * sub-components (e.g. {@link JobCatalog}, {@link JobSpecScheduler}, {@link JobExecutionLauncher} * and it will manage their lifecycle. */ public class StandardGobblinInstanceDriver extends DefaultGobblinInstanceDriverImpl { public static final String INSTANCE_CFG_PREFIX = "gobblin.instance"; /** A comma-separated list of class names or aliases of {@link GobblinInstancePluginFactory} for * plugins to be instantiated with this instance. */ public static final String PLUGINS_KEY = "plugins"; public static final String PLUGINS_FULL_KEY = INSTANCE_CFG_PREFIX + "." + PLUGINS_KEY; private ServiceManager _subservices; private final List<GobblinInstancePlugin> _plugins; protected StandardGobblinInstanceDriver(String instanceName, Configurable sysConfig, JobCatalog jobCatalog, JobSpecScheduler jobScheduler, JobExecutionLauncher jobLauncher, Optional<MetricContext> instanceMetricContext, Optional<Logger> log, List<GobblinInstancePluginFactory> plugins, SharedResourcesBroker<GobblinScopeTypes> instanceBroker) { super(instanceName, sysConfig, jobCatalog, jobScheduler, jobLauncher, instanceMetricContext, log, instanceBroker); List<Service> componentServices = new ArrayList<>(); checkComponentService(getJobCatalog(), componentServices); checkComponentService(getJobScheduler(), componentServices); checkComponentService(getJobLauncher(), componentServices); _plugins = createPlugins(plugins, componentServices); if (componentServices.size() > 0) { _subservices = new ServiceManager(componentServices); } } private List<GobblinInstancePlugin> createPlugins(List<GobblinInstancePluginFactory> plugins, List<Service> componentServices) { List<GobblinInstancePlugin> res = new ArrayList<>(); for (GobblinInstancePluginFactory pluginFactory: plugins) { Optional<GobblinInstancePlugin> plugin = createPlugin(this, pluginFactory, componentServices); if (plugin.isPresent()) { res.add(plugin.get()); } } return res; } static Optional<GobblinInstancePlugin> createPlugin(StandardGobblinInstanceDriver instance, GobblinInstancePluginFactory pluginFactory, List<Service> componentServices) { instance.getLog().info("Instantiating a plugin of type: " + pluginFactory); try { GobblinInstancePlugin plugin = pluginFactory.createPlugin(instance); componentServices.add(plugin); instance.getLog().info("Instantiated plugin: " + plugin); return Optional.of(plugin); } catch (RuntimeException e) { instance.getLog().warn("Failed to create plugin: " + e, e); } return Optional.absent(); } @Override protected void startUp() throws Exception { getLog().info("Starting driver ..."); if (null != _subservices) { getLog().info("Starting subservices"); _subservices.startAsync(); _subservices.awaitHealthy(getInstanceCfg().getStartTimeoutMs(), TimeUnit.MILLISECONDS); getLog().info("All subservices have been started."); } else { getLog().info("No subservices found."); } super.startUp(); } private void checkComponentService(Object component, List<Service> componentServices) { if (component instanceof Service) { componentServices.add((Service)component); } } @Override protected void shutDown() throws Exception { getLog().info("Shutting down driver ..."); super.shutDown(); if (null != _subservices) { getLog().info("Shutting down subservices ..."); _subservices.stopAsync(); _subservices.awaitStopped(getInstanceCfg().getShutdownTimeoutMs(), TimeUnit.MILLISECONDS); getLog().info("All subservices have been shutdown."); } } public static Builder builder() { return new Builder(); } /** * A builder for StandardGobblinInstanceDriver instances. The goal is to be convention driven * rather than configuration. * * <p>Conventions: * <ul> * <li> Logger uses the instance name as a category * <li> Default implementations of JobCatalog, JobSpecScheduler, JobExecutionLauncher use the * logger as their logger. * </ul> * */ public static class Builder implements GobblinInstanceEnvironment { private static final AtomicInteger INSTANCE_COUNTER = new AtomicInteger(0); private Optional<GobblinInstanceEnvironment> _instanceEnv = Optional.<GobblinInstanceEnvironment>absent(); private Optional<String> _instanceName = Optional.absent(); private Optional<Logger> _log = Optional.absent(); private Optional<JobCatalog> _jobCatalog = Optional.absent(); private Optional<JobSpecScheduler> _jobScheduler = Optional.absent(); private Optional<JobExecutionLauncher> _jobLauncher = Optional.absent(); private Optional<MetricContext> _metricContext = Optional.absent(); private Optional<Boolean> _instrumentationEnabled = Optional.absent(); private Optional<SharedResourcesBroker<GobblinScopeTypes>> _instanceBroker = Optional.absent(); private List<GobblinInstancePluginFactory> _plugins = new ArrayList<>(); private final ClassAliasResolver<GobblinInstancePluginFactory> _aliasResolver = new ClassAliasResolver<>(GobblinInstancePluginFactory.class); public Builder(Optional<GobblinInstanceEnvironment> instanceLauncher) { _instanceEnv = instanceLauncher; } /** Constructor with no Gobblin instance launcher */ public Builder() { } /** Constructor with a launcher */ public Builder(GobblinInstanceLauncher instanceLauncher) { this(); withInstanceEnvironment(instanceLauncher); } public Builder withInstanceEnvironment(GobblinInstanceEnvironment instanceLauncher) { Preconditions.checkNotNull(instanceLauncher); _instanceEnv = Optional.of(instanceLauncher); return this; } public Optional<GobblinInstanceEnvironment> getInstanceEnvironment() { return _instanceEnv; } public String getDefaultInstanceName() { if (_instanceEnv.isPresent()) { return _instanceEnv.get().getInstanceName(); } else { return StandardGobblinInstanceDriver.class.getName() + "-" + INSTANCE_COUNTER.getAndIncrement(); } } @Override public String getInstanceName() { if (! _instanceName.isPresent()) { _instanceName = Optional.of(getDefaultInstanceName()); } return _instanceName.get(); } public Builder withInstanceName(String instanceName) { _instanceName = Optional.of(instanceName); return this; } public Logger getDefaultLog() { return _instanceEnv.isPresent() ? _instanceEnv.get().getLog() : LoggerFactory.getLogger(getInstanceName()); } @Override public Logger getLog() { if (! _log.isPresent()) { _log = Optional.of(getDefaultLog()); } return _log.get(); } public Builder withLog(Logger log) { _log = Optional.of(log); return this; } public JobCatalog getDefaultJobCatalog() { return new InMemoryJobCatalog(this); } public JobCatalog getJobCatalog() { if (! _jobCatalog.isPresent()) { _jobCatalog = Optional.of(getDefaultJobCatalog()); } return _jobCatalog.get(); } public Builder withJobCatalog(JobCatalog jobCatalog) { _jobCatalog = Optional.of(jobCatalog); return this; } public Builder withInMemoryJobCatalog() { return withJobCatalog(new InMemoryJobCatalog(this)); } public Builder withFSJobCatalog() { try { return withJobCatalog(new FSJobCatalog(this)); } catch (IOException e) { throw new RuntimeException("Unable to create FS Job Catalog"); } } public Builder withImmutableFSJobCatalog() { try { return withJobCatalog(new ImmutableFSJobCatalog(this)); } catch (IOException e) { throw new RuntimeException("Unable to create FS Job Catalog"); } } public JobSpecScheduler getDefaultJobScheduler() { return new ImmediateJobSpecScheduler(Optional.of(getLog())); } public JobSpecScheduler getJobScheduler() { if (!_jobScheduler.isPresent()) { _jobScheduler = Optional.of(getDefaultJobScheduler()); } return _jobScheduler.get(); } public Builder withJobScheduler(JobSpecScheduler jobScheduler) { _jobScheduler = Optional.of(jobScheduler); return this; } public Builder withImmediateJobScheduler() { return withJobScheduler(new ImmediateJobSpecScheduler(Optional.of(getLog()))); } public Builder withQuartzJobScheduler() { return withJobScheduler(new QuartzJobSpecScheduler(this)); } public JobExecutionLauncher getDefaultJobLauncher() { JobLauncherExecutionDriver.Launcher res = new JobLauncherExecutionDriver.Launcher().withGobblinInstanceEnvironment(this); return res; } public JobExecutionLauncher getJobLauncher() { if (! _jobLauncher.isPresent()) { _jobLauncher = Optional.of(getDefaultJobLauncher()); } return _jobLauncher.get(); } public Builder withJobLauncher(JobExecutionLauncher jobLauncher) { _jobLauncher = Optional.of(jobLauncher); return this; } public Builder withMetricContext(MetricContext instanceMetricContext) { _metricContext = Optional.of(instanceMetricContext); return this; } @Override public MetricContext getMetricContext() { if (!_metricContext.isPresent()) { _metricContext = Optional.of(getDefaultMetricContext()); } return _metricContext.get(); } public MetricContext getDefaultMetricContext() { gobblin.configuration.State fakeState = new gobblin.configuration.State(getSysConfig().getConfigAsProperties()); List<Tag<?>> tags = new ArrayList<>(); tags.add(new Tag<>(StandardMetrics.INSTANCE_NAME_TAG, getInstanceName())); MetricContext res = Instrumented.getMetricContext(fakeState, StandardGobblinInstanceDriver.class, tags); return res; } public Builder withInstanceBroker(SharedResourcesBroker<GobblinScopeTypes> broker) { _instanceBroker = Optional.of(broker); return this; } public SharedResourcesBroker<GobblinScopeTypes> getInstanceBroker() { if (!_instanceBroker.isPresent()) { _instanceBroker = Optional.of(getDefaultInstanceBroker()); } return _instanceBroker.get(); } public SharedResourcesBroker<GobblinScopeTypes> getDefaultInstanceBroker() { SharedResourcesBrokerImpl<GobblinScopeTypes> globalBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(getSysConfig().getConfig(), GobblinScopeTypes.GLOBAL.defaultScopeInstance()); return globalBroker.newSubscopedBuilder(new SimpleScope<>(GobblinScopeTypes.INSTANCE, getInstanceName())).build(); } public StandardGobblinInstanceDriver build() { Configurable sysConfig = getSysConfig(); return new StandardGobblinInstanceDriver(getInstanceName(), sysConfig, getJobCatalog(), getJobScheduler(), getJobLauncher(), isInstrumentationEnabled() ? Optional.of(getMetricContext()) : Optional.<MetricContext>absent(), Optional.of(getLog()), getPlugins(), getInstanceBroker() ); } @Override public Configurable getSysConfig() { return _instanceEnv.isPresent() ? _instanceEnv.get().getSysConfig() : DefaultConfigurableImpl.createFromConfig(ConfigFactory.load()); } public Builder withInstrumentationEnabled(boolean enabled) { _instrumentationEnabled = Optional.of(enabled); return this; } public boolean getDefaultInstrumentationEnabled() { return GobblinMetrics.isEnabled(getSysConfig().getConfig()); } @Override public boolean isInstrumentationEnabled() { if (!_instrumentationEnabled.isPresent()) { _instrumentationEnabled = Optional.of(getDefaultInstrumentationEnabled()); } return _instrumentationEnabled.get(); } @Override public List<Tag<?>> generateTags(gobblin.configuration.State state) { return Collections.emptyList(); } @Override public void switchMetricContext(List<Tag<?>> tags) { throw new UnsupportedOperationException(); } @Override public void switchMetricContext(MetricContext context) { throw new UnsupportedOperationException(); } /** * Returns the list of plugins as defined in the system configuration. These are the * defined in the PLUGINS_FULL_KEY config option. * The list also includes plugins that are automatically added by gobblin. * */ public List<GobblinInstancePluginFactory> getDefaultPlugins() { List<String> pluginNames = ConfigUtils.getStringList(getSysConfig().getConfig(), PLUGINS_FULL_KEY); List<GobblinInstancePluginFactory> pluginFactories = Lists.newArrayList(); // By default email notification plugin is added. if (!ConfigUtils.getBoolean(getSysConfig().getConfig(), EmailNotificationPlugin.EMAIL_NOTIFICATIONS_DISABLED_KEY, EmailNotificationPlugin.EMAIL_NOTIFICATIONS_DISABLED_DEFAULT)) { pluginFactories.add(new EmailNotificationPlugin.Factory()); } pluginFactories.addAll(Lists.transform(pluginNames, new Function<String, GobblinInstancePluginFactory>() { @Override public GobblinInstancePluginFactory apply(String input) { Class<? extends GobblinInstancePluginFactory> factoryClass; try { factoryClass = _aliasResolver.resolveClass(input); return factoryClass.newInstance(); } catch (ClassNotFoundException|InstantiationException|IllegalAccessException e) { throw new RuntimeException("Unable to instantiate plugin factory " + input + ": " + e, e); } } })); return pluginFactories; } public List<GobblinInstancePluginFactory> getPlugins() { List<GobblinInstancePluginFactory> res = new ArrayList<>(getDefaultPlugins()); res.addAll(_plugins); return res; } public Builder addPlugin(GobblinInstancePluginFactory pluginFactory) { _plugins.add(pluginFactory); return this; } } public List<GobblinInstancePlugin> getPlugins() { return _plugins; } }
apache-2.0
ruhan1/pnc
rest-model/src/main/java/org/jboss/pnc/rest/restmodel/causeway/BuildImportResultRest.java
1612
/** * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.rest.restmodel.causeway; import com.fasterxml.jackson.annotation.JsonInclude; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import java.util.List; /** * Author: Michal Szynkiewicz, michal.l.szynkiewicz@gmail.com * Date: 8/25/16 * Time: 2:48 PM * */ @Deprecated @Data @JsonInclude(JsonInclude.Include.NON_NULL) @NoArgsConstructor @AllArgsConstructor public class BuildImportResultRest { /** * id of pnc build record */ private Integer buildRecordId; /** * build id assigned by brew */ private Integer brewBuildId; /** * link to brew */ private String brewBuildUrl; private BuildImportStatus status; /** * global errors */ private String errorMessage; /** * list of errors for artifact imports */ private List<ArtifactImportError> errors; }
apache-2.0